Fix ABI config in mediaparser JNI am: ef893d404d

Original change: https://googleplex-android-review.googlesource.com/c/platform/frameworks/av/+/13054154

Change-Id: I0dc02d1a51f170b3367052a14985344cdf125eab
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000..87a8f41
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,27 @@
+aidl_interface {
+    name: "av-types-aidl",
+    unstable: true,
+    host_supported: true,
+    vendor_available: true,
+    double_loadable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        "aidl/android/media/InterpolatorConfig.aidl",
+        "aidl/android/media/InterpolatorType.aidl",
+        "aidl/android/media/VolumeShaperConfiguration.aidl",
+        "aidl/android/media/VolumeShaperConfigurationOptionFlag.aidl",
+        "aidl/android/media/VolumeShaperConfigurationType.aidl",
+        "aidl/android/media/VolumeShaperOperation.aidl",
+        "aidl/android/media/VolumeShaperOperationFlag.aidl",
+        "aidl/android/media/VolumeShaperState.aidl",
+    ],
+    backend: {
+        cpp: {
+            min_sdk_version: "29",
+            apex_available: [
+                "//apex_available:platform",
+                "com.android.media",
+            ],
+        },
+    },
+}
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..d97975c
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,3 @@
+third_party {
+  license_type: NOTICE
+}
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index ae920c0..8fe48c2 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -1,2 +1,11 @@
 [Hook Scripts]
 mainline_hook = ${REPO_ROOT}/frameworks/av/tools/mainline_hook_partial.sh ${REPO_ROOT} ${PREUPLOAD_FILES}
+
+[Builtin Hooks]
+clang_format = true
+
+[Builtin Hooks Options]
+# Only turn on clang-format check for the following subfolders.
+clang_format = --commit ${PREUPLOAD_COMMIT} --style file --extensions c,h,cc,cpp
+               media/libmediatranscoding/
+               services/mediatranscoding/
diff --git a/aidl/android/media/InterpolatorConfig.aidl b/aidl/android/media/InterpolatorConfig.aidl
new file mode 100644
index 0000000..ef7486e
--- /dev/null
+++ b/aidl/android/media/InterpolatorConfig.aidl
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.InterpolatorType;
+
+/**
+ * {@hide}
+ */
+parcelable InterpolatorConfig {
+    InterpolatorType type;
+    /** For cubic interpolation, the boundary conditions in slope. */
+    float firstSlope;
+    float lastSlope;
+    /** A flattened list of <x, y> pairs, monotonically increasing in x. */
+    float[] xy;
+}
diff --git a/aidl/android/media/InterpolatorType.aidl b/aidl/android/media/InterpolatorType.aidl
new file mode 100644
index 0000000..b722cad
--- /dev/null
+++ b/aidl/android/media/InterpolatorType.aidl
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * Polynomial spline interpolators.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+enum InterpolatorType {
+    /** Not continuous. */
+    STEP,
+    /** C0. */
+    LINEAR,
+    /** C1. */
+    CUBIC,
+    /** C1 (to provide locally monotonic curves). */
+    CUBIC_MONOTONIC,
+    // CUBIC_C2, // TODO - requires global computation / cache
+}
diff --git a/aidl/android/media/VolumeShaperConfiguration.aidl b/aidl/android/media/VolumeShaperConfiguration.aidl
new file mode 100644
index 0000000..6361851
--- /dev/null
+++ b/aidl/android/media/VolumeShaperConfiguration.aidl
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.InterpolatorConfig;
+import android.media.VolumeShaperConfigurationOptionFlag;
+import android.media.VolumeShaperConfigurationType;
+
+/**
+ * {@hide}
+ */
+parcelable VolumeShaperConfiguration {
+    VolumeShaperConfigurationType type;
+    int id;
+    /** Bitmask, indexed by VolumeShaperConfigurationOptionFlag. */
+    int optionFlags;
+    double durationMs;
+    InterpolatorConfig interpolatorConfig;
+}
diff --git a/aidl/android/media/VolumeShaperConfigurationOptionFlag.aidl b/aidl/android/media/VolumeShaperConfigurationOptionFlag.aidl
new file mode 100644
index 0000000..f583cee
--- /dev/null
+++ b/aidl/android/media/VolumeShaperConfigurationOptionFlag.aidl
@@ -0,0 +1,22 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum VolumeShaperConfigurationOptionFlag {
+    VOLUME_IN_DBFS,
+    CLOCK_TIME,
+}
diff --git a/aidl/android/media/VolumeShaperConfigurationType.aidl b/aidl/android/media/VolumeShaperConfigurationType.aidl
new file mode 100644
index 0000000..aa6334e
--- /dev/null
+++ b/aidl/android/media/VolumeShaperConfigurationType.aidl
@@ -0,0 +1,22 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum VolumeShaperConfigurationType {
+    ID,
+    SCALE,
+}
diff --git a/aidl/android/media/VolumeShaperOperation.aidl b/aidl/android/media/VolumeShaperOperation.aidl
new file mode 100644
index 0000000..dd9a0e7
--- /dev/null
+++ b/aidl/android/media/VolumeShaperOperation.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable VolumeShaperOperation {
+    /** Operations to do. Bitmask of VolumeShaperOperationFlag. */
+    int flags;
+    /** If >= 0 the id to remove in a replace operation. */
+    int replaceId;
+    /** Position in the curve to set if a valid number (not nan). */
+    float xOffset;
+}
diff --git a/aidl/android/media/VolumeShaperOperationFlag.aidl b/aidl/android/media/VolumeShaperOperationFlag.aidl
new file mode 100644
index 0000000..8fe5275
--- /dev/null
+++ b/aidl/android/media/VolumeShaperOperationFlag.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum VolumeShaperOperationFlag {
+    /** The absence of this flag indicates "play". */
+    REVERSE,
+    TERMINATE,
+    JOIN,
+    DELAY,
+    CREATE_IF_NECESSARY,
+}
diff --git a/aidl/android/media/VolumeShaperState.aidl b/aidl/android/media/VolumeShaperState.aidl
new file mode 100644
index 0000000..4085e2b
--- /dev/null
+++ b/aidl/android/media/VolumeShaperState.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable VolumeShaperState {
+    /** Linear volume in the range MIN_LINEAR_VOLUME to MAX_LINEAR_VOLUME. */
+    float volume;
+    /** Position on curve expressed from MIN_CURVE_TIME to MAX_CURVE_TIME. */
+    float xOffset;
+}
diff --git a/apex/Android.bp b/apex/Android.bp
index 2ba6267..6ba9cb9 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -62,6 +62,15 @@
     name: "com.android.media",
     manifest: "manifest.json",
     defaults: ["com.android.media-defaults"],
+    prebuilts: [
+        "media-linker-config",
+    ],
+}
+
+linker_config {
+    name: "media-linker-config",
+    src: "linker.config.json",
+    installable: false,
 }
 
 filegroup {
diff --git a/apex/TEST_MAPPING b/apex/TEST_MAPPING
index f036516..09c46d6 100644
--- a/apex/TEST_MAPPING
+++ b/apex/TEST_MAPPING
@@ -14,17 +14,9 @@
         },
         {
           "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
-        }
-      ]
-    },
-    {
-      "name": "GtsExoPlayerTestCases",
-      "options" : [
-        {
-          "include-annotation": "android.platform.test.annotations.SocPresubmit"
         },
         {
-          "include-filter": "com.google.android.exoplayer.gts.DashTest#testWidevine23FpsH264Fixed"
+          "include-filter": "com.google.android.media.gts.WidevineYouTubePerformanceTests"
         }
       ]
     }
diff --git a/apex/linker.config.json b/apex/linker.config.json
new file mode 100644
index 0000000..67c076e
--- /dev/null
+++ b/apex/linker.config.json
@@ -0,0 +1,3 @@
+{
+    "visible": true
+}
diff --git a/apex/mediaswcodec.rc b/apex/mediaswcodec.rc
index d17481b..0c9b8c8 100644
--- a/apex/mediaswcodec.rc
+++ b/apex/mediaswcodec.rc
@@ -2,6 +2,5 @@
     class main
     user mediacodec
     group camera drmrpc mediadrm
-    override
     ioprio rt 4
     writepid /dev/cpuset/foreground/tasks
diff --git a/apex/testing/Android.bp b/apex/testing/Android.bp
index a04ab3f..d86094e 100644
--- a/apex/testing/Android.bp
+++ b/apex/testing/Android.bp
@@ -17,7 +17,10 @@
     manifest: "test_manifest.json",
     file_contexts: ":com.android.media-file_contexts",
     defaults: ["com.android.media-defaults"],
-    prebuilts: ["sdkinfo_45"],
+    prebuilts: [
+        "sdkinfo_45",
+        "media-linker-config",
+    ],
     installable: false,
 }
 
diff --git a/camera/Android.bp b/camera/Android.bp
index fa36bb3..b777d74 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -38,7 +38,6 @@
         "ICamera.cpp",
         "ICameraClient.cpp",
         "ICameraRecordingProxy.cpp",
-        "ICameraRecordingProxyListener.cpp",
         "camera2/CaptureRequest.cpp",
         "camera2/ConcurrentCamera.cpp",
         "camera2/OutputConfiguration.cpp",
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 84d1d93..f7d194e 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -25,7 +25,6 @@
 #include <binder/IMemory.h>
 
 #include <Camera.h>
-#include <ICameraRecordingProxyListener.h>
 #include <android/hardware/ICameraService.h>
 #include <android/hardware/ICamera.h>
 
@@ -77,63 +76,6 @@
     return CameraBaseT::connect(cameraId, clientPackageName, clientUid, clientPid);
 }
 
-status_t Camera::connectLegacy(int cameraId, int halVersion,
-        const String16& clientPackageName,
-        int clientUid,
-        sp<Camera>& camera)
-{
-    ALOGV("%s: connect legacy camera device", __FUNCTION__);
-    sp<Camera> c = new Camera(cameraId);
-    sp<::android::hardware::ICameraClient> cl = c;
-    status_t status = NO_ERROR;
-    const sp<::android::hardware::ICameraService>& cs = CameraBaseT::getCameraService();
-
-    binder::Status ret;
-    if (cs != nullptr) {
-        ret = cs.get()->connectLegacy(cl, cameraId, halVersion, clientPackageName,
-                clientUid, /*out*/&(c->mCamera));
-    }
-    if (ret.isOk() && c->mCamera != nullptr) {
-        IInterface::asBinder(c->mCamera)->linkToDeath(c);
-        c->mStatus = NO_ERROR;
-        camera = c;
-    } else {
-        switch(ret.serviceSpecificErrorCode()) {
-            case hardware::ICameraService::ERROR_DISCONNECTED:
-                status = -ENODEV;
-                break;
-            case hardware::ICameraService::ERROR_CAMERA_IN_USE:
-                status = -EBUSY;
-                break;
-            case hardware::ICameraService::ERROR_INVALID_OPERATION:
-                status = -EINVAL;
-                break;
-            case hardware::ICameraService::ERROR_MAX_CAMERAS_IN_USE:
-                status = -EUSERS;
-                break;
-            case hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT:
-                status = BAD_VALUE;
-                break;
-            case hardware::ICameraService::ERROR_DEPRECATED_HAL:
-                status = -EOPNOTSUPP;
-                break;
-            case hardware::ICameraService::ERROR_DISABLED:
-                status = -EACCES;
-                break;
-            case hardware::ICameraService::ERROR_PERMISSION_DENIED:
-                status = PERMISSION_DENIED;
-                break;
-            default:
-                status = -EINVAL;
-                ALOGW("An error occurred while connecting to camera %d: %s", cameraId,
-                        (cs != nullptr) ? "Service not available" : ret.toString8().string());
-                break;
-        }
-        c.clear();
-    }
-    return status;
-}
-
 status_t Camera::reconnect()
 {
     ALOGV("reconnect");
@@ -214,10 +156,6 @@
 void Camera::stopRecording()
 {
     ALOGV("stopRecording");
-    {
-        Mutex::Autolock _l(mLock);
-        mRecordingProxyListener.clear();
-    }
     sp <::android::hardware::ICamera> c = mCamera;
     if (c == 0) return;
     c->stopRecording();
@@ -325,12 +263,6 @@
     mListener = listener;
 }
 
-void Camera::setRecordingProxyListener(const sp<ICameraRecordingProxyListener>& listener)
-{
-    Mutex::Autolock _l(mLock);
-    mRecordingProxyListener = listener;
-}
-
 void Camera::setPreviewCallbackFlags(int flag)
 {
     ALOGV("setPreviewCallbackFlags");
@@ -384,19 +316,6 @@
 // callback from camera service when timestamped frame is ready
 void Camera::dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr)
 {
-    // If recording proxy listener is registered, forward the frame and return.
-    // The other listener (mListener) is ignored because the receiver needs to
-    // call releaseRecordingFrame.
-    sp<ICameraRecordingProxyListener> proxylistener;
-    {
-        Mutex::Autolock _l(mLock);
-        proxylistener = mRecordingProxyListener;
-    }
-    if (proxylistener != NULL) {
-        proxylistener->dataCallbackTimestamp(timestamp, msgType, dataPtr);
-        return;
-    }
-
     sp<CameraListener> listener;
     {
         Mutex::Autolock _l(mLock);
@@ -413,19 +332,6 @@
 
 void Camera::recordingFrameHandleCallbackTimestamp(nsecs_t timestamp, native_handle_t* handle)
 {
-    // If recording proxy listener is registered, forward the frame and return.
-    // The other listener (mListener) is ignored because the receiver needs to
-    // call releaseRecordingFrameHandle.
-    sp<ICameraRecordingProxyListener> proxylistener;
-    {
-        Mutex::Autolock _l(mLock);
-        proxylistener = mRecordingProxyListener;
-    }
-    if (proxylistener != NULL) {
-        proxylistener->recordingFrameHandleCallbackTimestamp(timestamp, handle);
-        return;
-    }
-
     sp<CameraListener> listener;
     {
         Mutex::Autolock _l(mLock);
@@ -444,19 +350,6 @@
         const std::vector<nsecs_t>& timestamps,
         const std::vector<native_handle_t*>& handles)
 {
-    // If recording proxy listener is registered, forward the frame and return.
-    // The other listener (mListener) is ignored because the receiver needs to
-    // call releaseRecordingFrameHandle.
-    sp<ICameraRecordingProxyListener> proxylistener;
-    {
-        Mutex::Autolock _l(mLock);
-        proxylistener = mRecordingProxyListener;
-    }
-    if (proxylistener != NULL) {
-        proxylistener->recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
-        return;
-    }
-
     sp<CameraListener> listener;
     {
         Mutex::Autolock _l(mLock);
@@ -476,10 +369,9 @@
     return new RecordingProxy(this);
 }
 
-status_t Camera::RecordingProxy::startRecording(const sp<ICameraRecordingProxyListener>& listener)
+status_t Camera::RecordingProxy::startRecording()
 {
     ALOGV("RecordingProxy::startRecording");
-    mCamera->setRecordingProxyListener(listener);
     mCamera->reconnect();
     return mCamera->startRecording();
 }
@@ -490,23 +382,6 @@
     mCamera->stopRecording();
 }
 
-void Camera::RecordingProxy::releaseRecordingFrame(const sp<IMemory>& mem)
-{
-    ALOGV("RecordingProxy::releaseRecordingFrame");
-    mCamera->releaseRecordingFrame(mem);
-}
-
-void Camera::RecordingProxy::releaseRecordingFrameHandle(native_handle_t* handle) {
-    ALOGV("RecordingProxy::releaseRecordingFrameHandle");
-    mCamera->releaseRecordingFrameHandle(handle);
-}
-
-void Camera::RecordingProxy::releaseRecordingFrameHandleBatch(
-        const std::vector<native_handle_t*>& handles) {
-    ALOGV("RecordingProxy::releaseRecordingFrameHandleBatch");
-    mCamera->releaseRecordingFrameHandleBatch(handles);
-}
-
 Camera::RecordingProxy::RecordingProxy(const sp<Camera>& camera)
 {
     mCamera = camera;
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index aecb70a..0b0f584 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -29,6 +29,7 @@
 #include <binder/IMemory.h>
 
 #include <camera/CameraBase.h>
+#include <camera/CameraUtils.h>
 
 // needed to instantiate
 #include <camera/Camera.h>
@@ -124,9 +125,7 @@
 {
     Mutex::Autolock _l(gLock);
     if (gCameraService.get() == 0) {
-        char value[PROPERTY_VALUE_MAX];
-        property_get("config.disable_cameraservice", value, "0");
-        if (strncmp(value, "0", 2) != 0 && strncasecmp(value, "false", 6) != 0) {
+        if (CameraUtils::isCameraServiceDisabled()) {
             return gCameraService;
         }
 
diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp
index 135384a..96ea5f2 100644
--- a/camera/CameraMetadata.cpp
+++ b/camera/CameraMetadata.cpp
@@ -22,6 +22,7 @@
 
 #include <binder/Parcel.h>
 #include <camera/CameraMetadata.h>
+#include <camera_metadata_hidden.h>
 
 namespace android {
 
@@ -169,6 +170,11 @@
     return entryCount() == 0;
 }
 
+size_t CameraMetadata::bufferSize() const {
+    return (mBuffer == NULL) ? 0 :
+            get_camera_metadata_size(mBuffer);
+}
+
 status_t CameraMetadata::sort() {
     if (mLocked) {
         ALOGE("%s: CameraMetadata is locked", __FUNCTION__);
@@ -872,5 +878,8 @@
     return OK;
 }
 
+metadata_vendor_id_t CameraMetadata::getVendorId() {
+    return get_camera_metadata_vendor_id(mBuffer);
+}
 
 }; // namespace android
diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp
index 68969cf..e95c91c 100644
--- a/camera/CameraParameters.cpp
+++ b/camera/CameraParameters.cpp
@@ -20,6 +20,7 @@
 
 #include <string.h>
 #include <stdlib.h>
+#include <unistd.h>
 #include <camera/CameraParameters.h>
 #include <system/graphics.h>
 
diff --git a/camera/CameraParameters2.cpp b/camera/CameraParameters2.cpp
index c29233c..a1cf355 100644
--- a/camera/CameraParameters2.cpp
+++ b/camera/CameraParameters2.cpp
@@ -21,6 +21,7 @@
 
 #include <string.h>
 #include <stdlib.h>
+#include <unistd.h>
 #include <camera/CameraParameters2.h>
 
 namespace android {
diff --git a/camera/CameraUtils.cpp b/camera/CameraUtils.cpp
index 67fc116..f9b1b37 100644
--- a/camera/CameraUtils.cpp
+++ b/camera/CameraUtils.cpp
@@ -23,6 +23,7 @@
 #include <system/window.h>
 #include <system/graphics.h>
 
+#include <cutils/properties.h>
 #include <utils/Log.h>
 
 namespace android {
@@ -122,4 +123,10 @@
     return OK;
 }
 
+bool CameraUtils::isCameraServiceDisabled() {
+    char value[PROPERTY_VALUE_MAX];
+    property_get("config.disable_cameraservice", value, "0");
+    return (strncmp(value, "0", 2) != 0 && strncasecmp(value, "false", 6) != 0);
+}
+
 } /* namespace android */
diff --git a/camera/ICameraClient.cpp b/camera/ICameraClient.cpp
index c02c81b..bef2ea0 100644
--- a/camera/ICameraClient.cpp
+++ b/camera/ICameraClient.cpp
@@ -142,7 +142,8 @@
             camera_frame_metadata_t metadata;
             if (data.dataAvail() > 0) {
                 metadata.number_of_faces = data.readInt32();
-                if (metadata.number_of_faces <= 0 ||
+                // Zero faces is a valid case, to notify clients that no faces are now visible
+                if (metadata.number_of_faces < 0 ||
                         metadata.number_of_faces > (int32_t)(INT32_MAX / sizeof(camera_face_t))) {
                     ALOGE("%s: Too large face count: %d", __FUNCTION__, metadata.number_of_faces);
                     return BAD_VALUE;
diff --git a/camera/ICameraRecordingProxy.cpp b/camera/ICameraRecordingProxy.cpp
index bd6af75..97523a5 100644
--- a/camera/ICameraRecordingProxy.cpp
+++ b/camera/ICameraRecordingProxy.cpp
@@ -18,7 +18,6 @@
 #define LOG_TAG "ICameraRecordingProxy"
 #include <camera/CameraUtils.h>
 #include <camera/ICameraRecordingProxy.h>
-#include <camera/ICameraRecordingProxyListener.h>
 #include <binder/IMemory.h>
 #include <binder/Parcel.h>
 #include <media/hardware/HardwareAPI.h>
@@ -29,10 +28,7 @@
 
 enum {
     START_RECORDING = IBinder::FIRST_CALL_TRANSACTION,
-    STOP_RECORDING,
-    RELEASE_RECORDING_FRAME,
-    RELEASE_RECORDING_FRAME_HANDLE,
-    RELEASE_RECORDING_FRAME_HANDLE_BATCH,
+    STOP_RECORDING
 };
 
 
@@ -44,12 +40,11 @@
     {
     }
 
-    status_t startRecording(const sp<ICameraRecordingProxyListener>& listener)
+    status_t startRecording()
     {
         ALOGV("startRecording");
         Parcel data, reply;
         data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
-        data.writeStrongBinder(IInterface::asBinder(listener));
         remote()->transact(START_RECORDING, data, &reply);
         return reply.readInt32();
     }
@@ -61,46 +56,6 @@
         data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
         remote()->transact(STOP_RECORDING, data, &reply);
     }
-
-    void releaseRecordingFrame(const sp<IMemory>& mem)
-    {
-        ALOGV("releaseRecordingFrame");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
-        data.writeStrongBinder(IInterface::asBinder(mem));
-        remote()->transact(RELEASE_RECORDING_FRAME, data, &reply);
-    }
-
-    void releaseRecordingFrameHandle(native_handle_t *handle) {
-        ALOGV("releaseRecordingFrameHandle");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
-        data.writeNativeHandle(handle);
-
-        remote()->transact(RELEASE_RECORDING_FRAME_HANDLE, data, &reply);
-
-        // Close the native handle because camera received a dup copy.
-        native_handle_close(handle);
-        native_handle_delete(handle);
-    }
-
-    void releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
-        ALOGV("releaseRecordingFrameHandleBatch");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
-        uint32_t n = handles.size();
-        data.writeUint32(n);
-        for (auto& handle : handles) {
-            data.writeNativeHandle(handle);
-        }
-        remote()->transact(RELEASE_RECORDING_FRAME_HANDLE_BATCH, data, &reply);
-
-        // Close the native handle because camera received a dup copy.
-        for (auto& handle : handles) {
-            native_handle_close(handle);
-            native_handle_delete(handle);
-        }
-    }
 };
 
 IMPLEMENT_META_INTERFACE(CameraRecordingProxy, "android.hardware.ICameraRecordingProxy");
@@ -114,9 +69,7 @@
         case START_RECORDING: {
             ALOGV("START_RECORDING");
             CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
-            sp<ICameraRecordingProxyListener> listener =
-                interface_cast<ICameraRecordingProxyListener>(data.readStrongBinder());
-            reply->writeInt32(startRecording(listener));
+            reply->writeInt32(startRecording());
             return NO_ERROR;
         } break;
         case STOP_RECORDING: {
@@ -125,46 +78,6 @@
             stopRecording();
             return NO_ERROR;
         } break;
-        case RELEASE_RECORDING_FRAME: {
-            ALOGV("RELEASE_RECORDING_FRAME");
-            CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
-            sp<IMemory> mem = interface_cast<IMemory>(data.readStrongBinder());
-            releaseRecordingFrame(mem);
-            return NO_ERROR;
-        } break;
-        case RELEASE_RECORDING_FRAME_HANDLE: {
-            ALOGV("RELEASE_RECORDING_FRAME_HANDLE");
-            CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
-
-            // releaseRecordingFrameHandle will be responsble to close the native handle.
-            releaseRecordingFrameHandle(data.readNativeHandle());
-            return NO_ERROR;
-        } break;
-        case RELEASE_RECORDING_FRAME_HANDLE_BATCH: {
-            ALOGV("RELEASE_RECORDING_FRAME_HANDLE_BATCH");
-            CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
-            uint32_t n = 0;
-            status_t res = data.readUint32(&n);
-            if (res != OK) {
-                ALOGE("%s: Failed to read batch size: %s (%d)", __FUNCTION__, strerror(-res), res);
-                return BAD_VALUE;
-            }
-            std::vector<native_handle_t*> handles;
-            handles.reserve(n);
-            for (uint32_t i = 0; i < n; i++) {
-                native_handle_t* handle = data.readNativeHandle();
-                if (handle == nullptr) {
-                    ALOGE("%s: Received a null native handle at handles[%d]",
-                            __FUNCTION__, i);
-                    return BAD_VALUE;
-                }
-                handles.push_back(handle);
-            }
-
-            // releaseRecordingFrameHandleBatch will be responsble to close the native handle.
-            releaseRecordingFrameHandleBatch(handles);
-            return NO_ERROR;
-        } break;
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
@@ -173,4 +86,3 @@
 // ----------------------------------------------------------------------------
 
 }; // namespace android
-
diff --git a/camera/ICameraRecordingProxyListener.cpp b/camera/ICameraRecordingProxyListener.cpp
deleted file mode 100644
index 66faf8f..0000000
--- a/camera/ICameraRecordingProxyListener.cpp
+++ /dev/null
@@ -1,180 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "ICameraRecordingProxyListener"
-#include <camera/CameraUtils.h>
-#include <camera/ICameraRecordingProxyListener.h>
-#include <binder/IMemory.h>
-#include <binder/Parcel.h>
-#include <media/hardware/HardwareAPI.h>
-#include <utils/Log.h>
-
-namespace android {
-
-enum {
-    DATA_CALLBACK_TIMESTAMP = IBinder::FIRST_CALL_TRANSACTION,
-    RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP,
-    RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH
-};
-
-class BpCameraRecordingProxyListener: public BpInterface<ICameraRecordingProxyListener>
-{
-public:
-    explicit BpCameraRecordingProxyListener(const sp<IBinder>& impl)
-        : BpInterface<ICameraRecordingProxyListener>(impl)
-    {
-    }
-
-    void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& imageData)
-    {
-        ALOGV("dataCallback");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxyListener::getInterfaceDescriptor());
-        data.writeInt64(timestamp);
-        data.writeInt32(msgType);
-        data.writeStrongBinder(IInterface::asBinder(imageData));
-        remote()->transact(DATA_CALLBACK_TIMESTAMP, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-
-    void recordingFrameHandleCallbackTimestamp(nsecs_t timestamp, native_handle_t* handle) {
-        ALOGV("recordingFrameHandleCallbackTimestamp");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxyListener::getInterfaceDescriptor());
-        data.writeInt64(timestamp);
-        data.writeNativeHandle(handle);
-        remote()->transact(RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP, data, &reply,
-                IBinder::FLAG_ONEWAY);
-
-        // The native handle is dupped in ICameraClient so we need to free it here.
-        native_handle_close(handle);
-        native_handle_delete(handle);
-    }
-
-    void recordingFrameHandleCallbackTimestampBatch(
-            const std::vector<nsecs_t>& timestamps,
-            const std::vector<native_handle_t*>& handles) {
-        ALOGV("recordingFrameHandleCallbackTimestampBatch");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxyListener::getInterfaceDescriptor());
-
-        uint32_t n = timestamps.size();
-        if (n != handles.size()) {
-            ALOGE("%s: size of timestamps(%zu) and handles(%zu) mismatch!",
-                    __FUNCTION__, timestamps.size(), handles.size());
-            return;
-        }
-        data.writeUint32(n);
-        for (auto ts : timestamps) {
-            data.writeInt64(ts);
-        }
-        for (auto& handle : handles) {
-            data.writeNativeHandle(handle);
-        }
-        remote()->transact(RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH, data, &reply,
-                IBinder::FLAG_ONEWAY);
-
-        // The native handle is dupped in ICameraClient so we need to free it here.
-        for (auto& handle : handles) {
-            native_handle_close(handle);
-            native_handle_delete(handle);
-        }
-    }
-};
-
-IMPLEMENT_META_INTERFACE(CameraRecordingProxyListener, "android.hardware.ICameraRecordingProxyListener");
-
-// ----------------------------------------------------------------------
-
-status_t BnCameraRecordingProxyListener::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    switch(code) {
-        case DATA_CALLBACK_TIMESTAMP: {
-            ALOGV("DATA_CALLBACK_TIMESTAMP");
-            CHECK_INTERFACE(ICameraRecordingProxyListener, data, reply);
-            nsecs_t timestamp = data.readInt64();
-            int32_t msgType = data.readInt32();
-            sp<IMemory> imageData = interface_cast<IMemory>(data.readStrongBinder());
-            dataCallbackTimestamp(timestamp, msgType, imageData);
-            return NO_ERROR;
-        } break;
-        case RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP: {
-            ALOGV("RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP");
-            CHECK_INTERFACE(ICameraRecordingProxyListener, data, reply);
-            nsecs_t timestamp;
-            status_t res = data.readInt64(&timestamp);
-            if (res != OK) {
-                ALOGE("%s: Failed to read timestamp: %s (%d)", __FUNCTION__, strerror(-res), res);
-                return BAD_VALUE;
-            }
-
-            native_handle_t* handle = data.readNativeHandle();
-            if (handle == nullptr) {
-                ALOGE("%s: Received a null native handle", __FUNCTION__);
-                return BAD_VALUE;
-            }
-            // The native handle will be freed in
-            // BpCameraRecordingProxy::releaseRecordingFrameHandle.
-            recordingFrameHandleCallbackTimestamp(timestamp, handle);
-            return NO_ERROR;
-        } break;
-        case RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH: {
-            ALOGV("RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH");
-            CHECK_INTERFACE(ICameraRecordingProxyListener, data, reply);
-            uint32_t n = 0;
-            status_t res = data.readUint32(&n);
-            if (res != OK) {
-                ALOGE("%s: Failed to read batch size: %s (%d)", __FUNCTION__, strerror(-res), res);
-                return BAD_VALUE;
-            }
-            std::vector<nsecs_t> timestamps;
-            std::vector<native_handle_t*> handles;
-            timestamps.reserve(n);
-            handles.reserve(n);
-            for (uint32_t i = 0; i < n; i++) {
-                nsecs_t t;
-                res = data.readInt64(&t);
-                if (res != OK) {
-                    ALOGE("%s: Failed to read timestamp[%d]: %s (%d)",
-                            __FUNCTION__, i, strerror(-res), res);
-                    return BAD_VALUE;
-                }
-                timestamps.push_back(t);
-            }
-            for (uint32_t i = 0; i < n; i++) {
-                native_handle_t* handle = data.readNativeHandle();
-                if (handle == nullptr) {
-                    ALOGE("%s: Received a null native handle at handles[%d]",
-                            __FUNCTION__, i);
-                    return BAD_VALUE;
-                }
-                handles.push_back(handle);
-            }
-            // The native handle will be freed in
-            // BpCameraRecordingProxy::releaseRecordingFrameHandleBatch.
-            recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
-            return NO_ERROR;
-        } break;
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
diff --git a/camera/TEST_MAPPING b/camera/TEST_MAPPING
new file mode 100644
index 0000000..683e183
--- /dev/null
+++ b/camera/TEST_MAPPING
@@ -0,0 +1,11 @@
+{
+  "postsubmit": [
+    {
+      "name": "CtsCameraTestCases"
+    },
+    {
+      "name": "CtsCameraTestCases",
+      "keywords": ["primary-device"]
+    }
+  ]
+}
diff --git a/camera/VendorTagDescriptor.cpp b/camera/VendorTagDescriptor.cpp
index d713d2d..24fa912 100644
--- a/camera/VendorTagDescriptor.cpp
+++ b/camera/VendorTagDescriptor.cpp
@@ -660,6 +660,16 @@
     return sGlobalVendorTagDescriptorCache;
 }
 
+bool VendorTagDescriptorCache::isVendorCachePresent(metadata_vendor_id_t vendorId) {
+    Mutex::Autolock al(sLock);
+    if ((sGlobalVendorTagDescriptorCache.get() != nullptr) &&
+            (sGlobalVendorTagDescriptorCache->getVendorIdsAndTagDescriptors().find(vendorId) !=
+             sGlobalVendorTagDescriptorCache->getVendorIdsAndTagDescriptors().end())) {
+        return true;
+    }
+    return false;
+}
+
 extern "C" {
 
 int vendor_tag_descriptor_get_tag_count(const vendor_tag_ops_t* /*v*/) {
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index ac7a35b..8af704d 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -69,7 +69,7 @@
 
     /**
      * Default UID/PID values for non-privileged callers of
-     * connect(), connectDevice(), and connectLegacy()
+     * connect() and connectDevice()
      */
     const int USE_CALLING_UID = -1;
     const int USE_CALLING_PID = -1;
@@ -93,20 +93,6 @@
             int clientUid);
 
     /**
-     * halVersion constant for connectLegacy
-     */
-    const int CAMERA_HAL_API_VERSION_UNSPECIFIED = -1;
-
-    /**
-     * Open a camera device in legacy mode, if supported by the camera module HAL.
-     */
-    ICamera connectLegacy(ICameraClient client,
-            int cameraId,
-            int halVersion,
-            String opPackageName,
-            int clientUid);
-
-    /**
      * Add listener for changes to camera device and flashlight state.
      *
      * Also returns the set of currently-known camera IDs and state of each device.
diff --git a/camera/camera2/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp
index 1843ec4..ebc09d7 100644
--- a/camera/camera2/CaptureRequest.cpp
+++ b/camera/camera2/CaptureRequest.cpp
@@ -94,12 +94,12 @@
     // Do not distinguish null arrays from 0-sized arrays.
     for (int32_t i = 0; i < size; ++i) {
         // Parcel.writeParcelableArray
-        size_t len;
-        const char16_t* className = parcel->readString16Inplace(&len);
+        std::optional<std::string> className;
+        parcel->readUtf8FromUtf16(&className);
         ALOGV("%s: Read surface class = %s", __FUNCTION__,
-              className != NULL ? String8(className).string() : "<null>");
+              className.value_or("<null>").c_str());
 
-        if (className == NULL) {
+        if (className == std::nullopt) {
             continue;
         }
 
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index dc7f88a..09a333b 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -37,7 +37,7 @@
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
     ],
-    compile_multilib: "32",
+    compile_multilib: "prefer32",
     cflags: [
         "-Wall",
         "-Wextra",
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 2cdb617..5579183 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -24,7 +24,6 @@
 #include <gui/IGraphicBufferProducer.h>
 #include <system/camera.h>
 #include <camera/ICameraRecordingProxy.h>
-#include <camera/ICameraRecordingProxyListener.h>
 #include <camera/android/hardware/ICamera.h>
 #include <camera/android/hardware/ICameraClient.h>
 #include <camera/CameraBase.h>
@@ -84,10 +83,6 @@
                                 const String16& clientPackageName,
                                 int clientUid, int clientPid);
 
-    static  status_t  connectLegacy(int cameraId, int halVersion,
-                                     const String16& clientPackageName,
-                                     int clientUid, sp<Camera>& camera);
-
             virtual     ~Camera();
 
             status_t    reconnect();
@@ -154,7 +149,6 @@
             status_t    setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer);
 
             void        setListener(const sp<CameraListener>& listener);
-            void        setRecordingProxyListener(const sp<ICameraRecordingProxyListener>& listener);
 
             // Configure preview callbacks to app. Only one of the older
             // callbacks or the callback surface can be active at the same time;
@@ -187,12 +181,8 @@
         explicit RecordingProxy(const sp<Camera>& camera);
 
         // ICameraRecordingProxy interface
-        virtual status_t startRecording(const sp<ICameraRecordingProxyListener>& listener);
+        virtual status_t startRecording();
         virtual void stopRecording();
-        virtual void releaseRecordingFrame(const sp<IMemory>& mem);
-        virtual void releaseRecordingFrameHandle(native_handle_t* handle);
-        virtual void releaseRecordingFrameHandleBatch(
-                const std::vector<native_handle_t*>& handles);
 
     private:
         sp<Camera>         mCamera;
@@ -203,8 +193,6 @@
                         Camera(const Camera&);
                         Camera& operator=(const Camera);
 
-    sp<ICameraRecordingProxyListener>  mRecordingProxyListener;
-
     friend class        CameraBase;
 };
 
diff --git a/camera/include/camera/CameraMetadata.h b/camera/include/camera/CameraMetadata.h
index 9d1b5c7..c56ee6d 100644
--- a/camera/include/camera/CameraMetadata.h
+++ b/camera/include/camera/CameraMetadata.h
@@ -128,6 +128,11 @@
     bool isEmpty() const;
 
     /**
+     * Return the allocated camera metadata buffer size in bytes.
+     */
+    size_t bufferSize() const;
+
+    /**
      * Sort metadata buffer for faster find
      */
     status_t sort();
@@ -237,6 +242,11 @@
     static status_t getTagFromName(const char *name,
             const VendorTagDescriptor* vTags, uint32_t *tag);
 
+    /**
+     * Return the current vendor tag id associated with this metadata.
+     */
+    metadata_vendor_id_t getVendorId();
+
   private:
     camera_metadata_t *mBuffer;
     mutable bool       mLocked;
diff --git a/camera/include/camera/CameraUtils.h b/camera/include/camera/CameraUtils.h
index f596f80..a397ccd 100644
--- a/camera/include/camera/CameraUtils.h
+++ b/camera/include/camera/CameraUtils.h
@@ -47,6 +47,11 @@
          */
         static bool isNativeHandleMetadata(const sp<IMemory>& imageData);
 
+        /**
+         * Check if camera service is disabled on this device
+         */
+        static bool isCameraServiceDisabled();
+
     private:
         CameraUtils();
 };
diff --git a/camera/include/camera/ICameraRecordingProxy.h b/camera/include/camera/ICameraRecordingProxy.h
index 02af2f3..4306dc1 100644
--- a/camera/include/camera/ICameraRecordingProxy.h
+++ b/camera/include/camera/ICameraRecordingProxy.h
@@ -24,13 +24,11 @@
 
 namespace android {
 
-class ICameraRecordingProxyListener;
-class IMemory;
 class Parcel;
 
 /*
- * The purpose of ICameraRecordingProxy and ICameraRecordingProxyListener is to
- * allow applications using the camera during recording.
+ * The purpose of ICameraRecordingProxy is to
+ * allow applications to use the camera during recording with the old camera API.
  *
  * Camera service allows only one client at a time. Since camcorder application
  * needs to own the camera to do things like zoom, the media recorder cannot
@@ -42,35 +40,29 @@
  * ICameraRecordingProxy
  *   startRecording()
  *   stopRecording()
- *   releaseRecordingFrame()
  *
- * ICameraRecordingProxyListener
- *   dataCallbackTimestamp()
-
  * The camcorder app opens the camera and starts the preview. The app passes
  * ICamera and ICameraRecordingProxy to the media recorder by
  * MediaRecorder::setCamera(). The recorder uses ICamera to setup the camera in
  * MediaRecorder::start(). After setup, the recorder disconnects from camera
- * service. The recorder calls ICameraRecordingProxy::startRecording() and
- * passes a ICameraRecordingProxyListener to the app. The app connects back to
- * camera service and starts the recording. The app owns the camera and can do
- * things like zoom. The media recorder receives the video frames from the
- * listener and releases them by ICameraRecordingProxy::releaseRecordingFrame.
- * The recorder calls ICameraRecordingProxy::stopRecording() to stop the
- * recording.
+ * service. The recorder calls ICameraRecordingProxy::startRecording() and The
+ * app owns the camera and can do things like zoom. The media recorder receives
+ * the video frames via a buffer queue.  The recorder calls
+ * ICameraRecordingProxy::stopRecording() to stop the recording.
  *
  * The call sequences are as follows:
  * 1. The app: Camera.unlock().
  * 2. The app: MediaRecorder.setCamera().
  * 3. Start recording
  *    (1) The app: MediaRecorder.start().
- *    (2) The recorder: ICamera.unlock() and ICamera.disconnect().
- *    (3) The recorder: ICameraRecordingProxy.startRecording().
- *    (4) The app: ICamera.reconnect().
- *    (5) The app: ICamera.startRecording().
+ *    (2) The recorder: ICamera.setVideoTarget(buffer queue).
+ *    (3) The recorder: ICamera.unlock() and ICamera.disconnect().
+ *    (4) The recorder: ICameraRecordingProxy.startRecording().
+ *    (5) The app: ICamera.reconnect().
+ *    (6) The app: ICamera.startRecording().
  * 4. During recording
- *    (1) The recorder: receive frames from ICameraRecordingProxyListener.dataCallbackTimestamp()
- *    (2) The recorder: release frames by ICameraRecordingProxy.releaseRecordingFrame().
+ *    (1) The recorder: receive frames via a buffer queue
+ *    (2) The recorder: release frames via a buffer queue
  * 5. Stop recording
  *    (1) The app: MediaRecorder.stop()
  *    (2) The recorder: ICameraRecordingProxy.stopRecording().
@@ -82,12 +74,8 @@
 public:
     DECLARE_META_INTERFACE(CameraRecordingProxy);
 
-    virtual status_t        startRecording(const sp<ICameraRecordingProxyListener>& listener) = 0;
+    virtual status_t        startRecording() = 0;
     virtual void            stopRecording() = 0;
-    virtual void            releaseRecordingFrame(const sp<IMemory>& mem) = 0;
-    virtual void            releaseRecordingFrameHandle(native_handle_t *handle) = 0;
-    virtual void            releaseRecordingFrameHandleBatch(
-                                    const std::vector<native_handle_t*>& handles) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/camera/include/camera/ICameraRecordingProxyListener.h b/camera/include/camera/ICameraRecordingProxyListener.h
deleted file mode 100644
index da03c56..0000000
--- a/camera/include/camera/ICameraRecordingProxyListener.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_ICAMERA_RECORDING_PROXY_LISTENER_H
-#define ANDROID_HARDWARE_ICAMERA_RECORDING_PROXY_LISTENER_H
-
-#include <vector>
-#include <binder/IInterface.h>
-#include <cutils/native_handle.h>
-#include <stdint.h>
-#include <utils/RefBase.h>
-#include <utils/Timers.h>
-
-namespace android {
-
-class Parcel;
-class IMemory;
-
-class ICameraRecordingProxyListener: public IInterface
-{
-public:
-    DECLARE_META_INTERFACE(CameraRecordingProxyListener);
-
-    virtual void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType,
-                                       const sp<IMemory>& data) = 0;
-
-    virtual void recordingFrameHandleCallbackTimestamp(nsecs_t timestamp,
-                                                       native_handle_t* handle) = 0;
-
-    virtual void recordingFrameHandleCallbackTimestampBatch(
-            const std::vector<nsecs_t>& timestamps,
-            const std::vector<native_handle_t*>& handles) = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnCameraRecordingProxyListener: public BnInterface<ICameraRecordingProxyListener>
-{
-public:
-    virtual status_t    onTransact( uint32_t code,
-                                    const Parcel& data,
-                                    Parcel* reply,
-                                    uint32_t flags = 0);
-};
-
-}; // namespace android
-
-#endif
diff --git a/camera/include/camera/VendorTagDescriptor.h b/camera/include/camera/VendorTagDescriptor.h
index b2fbf3a..b3440d5 100644
--- a/camera/include/camera/VendorTagDescriptor.h
+++ b/camera/include/camera/VendorTagDescriptor.h
@@ -249,6 +249,12 @@
      */
     static void clearGlobalVendorTagCache();
 
+    /**
+     * Return true if given vendor id is present in the vendor tag caches, return
+     * false otherwise.
+     */
+    static bool isVendorCachePresent(metadata_vendor_id_t vendorId);
+
 };
 
 } /* namespace android */
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index b58ebe2..73cabbf 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -24,6 +24,7 @@
 #include <utils/Vector.h>
 #include <cutils/properties.h>
 #include <stdlib.h>
+#include <camera/CameraUtils.h>
 #include <camera/VendorTagDescriptor.h>
 
 using namespace android::acam;
@@ -70,12 +71,6 @@
     mCameraService.clear();
 }
 
-static bool isCameraServiceDisabled() {
-    char value[PROPERTY_VALUE_MAX];
-    property_get("config.disable_cameraservice", value, "0");
-    return (strncmp(value, "0", 2) != 0 && strncasecmp(value, "false", 6) != 0);
-}
-
 sp<hardware::ICameraService> CameraManagerGlobal::getCameraService() {
     Mutex::Autolock _l(mLock);
     return getCameraServiceLocked();
@@ -83,7 +78,7 @@
 
 sp<hardware::ICameraService> CameraManagerGlobal::getCameraServiceLocked() {
     if (mCameraService.get() == nullptr) {
-        if (isCameraServiceDisabled()) {
+        if (CameraUtils::isCameraServiceDisabled()) {
             return mCameraService;
         }
 
@@ -743,7 +738,7 @@
     // No way to get package name from native.
     // Send a zero length package name and let camera service figure it out from UID
     binder::Status serviceRet = cs->connectDevice(
-            callbacks, String16(cameraId), String16(""), std::unique_ptr<String16>(),
+            callbacks, String16(cameraId), String16(""), {},
             hardware::ICameraService::USE_CALLING_UID, /*out*/&deviceRemote);
 
     if (!serviceRet.isOk()) {
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index bfa60d9..895514e 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -426,6 +426,7 @@
             camera_metadata_ro_entry_t entry;
             int ret = get_camera_metadata_ro_entry(rawMetadata, i, &entry);
             if (ret != 0) {
+                mData->unlock(rawMetadata);
                 ALOGE("%s: error reading metadata index %zu", __FUNCTION__, i);
                 return ACAMERA_ERROR_UNKNOWN;
             }
@@ -527,6 +528,7 @@
         case ACAMERA_LENS_OPTICAL_STABILIZATION_MODE:
         case ACAMERA_NOISE_REDUCTION_MODE:
         case ACAMERA_SCALER_CROP_REGION:
+        case ACAMERA_SCALER_ROTATE_AND_CROP:
         case ACAMERA_SENSOR_EXPOSURE_TIME:
         case ACAMERA_SENSOR_FRAME_DURATION:
         case ACAMERA_SENSOR_SENSITIVITY:
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index 072bb02..a840bd1 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -36,6 +36,7 @@
 #ifndef _NDK_CAMERA_METADATA_H
 #define _NDK_CAMERA_METADATA_H
 
+#include <stdbool.h>
 #include <stdint.h>
 #include <sys/cdefs.h>
 
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 1354fce..2d54bd1 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -3736,6 +3736,108 @@
     ACAMERA_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP = 
                                                                 // int32
             ACAMERA_SCALER_START + 15,
+    /**
+     * <p>List of rotate-and-crop modes for ACAMERA_SCALER_ROTATE_AND_CROP that are supported by this camera device.</p>
+     *
+     * @see ACAMERA_SCALER_ROTATE_AND_CROP
+     *
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>This entry lists the valid modes for ACAMERA_SCALER_ROTATE_AND_CROP for this camera device.</p>
+     * <p>Starting with API level 30, all devices will list at least <code>ROTATE_AND_CROP_NONE</code>.
+     * Devices with support for rotate-and-crop will additionally list at least
+     * <code>ROTATE_AND_CROP_AUTO</code> and <code>ROTATE_AND_CROP_90</code>.</p>
+     *
+     * @see ACAMERA_SCALER_ROTATE_AND_CROP
+     */
+    ACAMERA_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES =            // byte[n]
+            ACAMERA_SCALER_START + 16,
+    /**
+     * <p>Whether a rotation-and-crop operation is applied to processed
+     * outputs from the camera.</p>
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_scaler_rotate_and_crop_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     *   <li>ACaptureRequest</li>
+     * </ul></p>
+     *
+     * <p>This control is primarily intended to help camera applications with no support for
+     * multi-window modes to work correctly on devices where multi-window scenarios are
+     * unavoidable, such as foldables or other devices with variable display geometry or more
+     * free-form window placement (such as laptops, which often place portrait-orientation apps
+     * in landscape with pillarboxing).</p>
+     * <p>If supported, the default value is <code>ROTATE_AND_CROP_AUTO</code>, which allows the camera API
+     * to enable backwards-compatibility support for applications that do not support resizing
+     * / multi-window modes, when the device is in fact in a multi-window mode (such as inset
+     * portrait on laptops, or on a foldable device in some fold states).  In addition,
+     * <code>ROTATE_AND_CROP_NONE</code> and <code>ROTATE_AND_CROP_90</code> will always be available if this control
+     * is supported by the device.  If not supported, devices API level 30 or higher will always
+     * list only <code>ROTATE_AND_CROP_NONE</code>.</p>
+     * <p>When <code>CROP_AUTO</code> is in use, and the camera API activates backward-compatibility mode,
+     * several metadata fields will also be parsed differently to ensure that coordinates are
+     * correctly handled for features like drawing face detection boxes or passing in
+     * tap-to-focus coordinates.  The camera API will convert positions in the active array
+     * coordinate system to/from the cropped-and-rotated coordinate system to make the
+     * operation transparent for applications.  The following controls are affected:</p>
+     * <ul>
+     * <li>ACAMERA_CONTROL_AE_REGIONS</li>
+     * <li>ACAMERA_CONTROL_AF_REGIONS</li>
+     * <li>ACAMERA_CONTROL_AWB_REGIONS</li>
+     * <li>android.statistics.faces</li>
+     * </ul>
+     * <p>Capture results will contain the actual value selected by the API;
+     * <code>ROTATE_AND_CROP_AUTO</code> will never be seen in a capture result.</p>
+     * <p>Applications can also select their preferred cropping mode, either to opt out of the
+     * backwards-compatibility treatment, or to use the cropping feature themselves as needed.
+     * In this case, no coordinate translation will be done automatically, and all controls
+     * will continue to use the normal active array coordinates.</p>
+     * <p>Cropping and rotating is done after the application of digital zoom (via either
+     * ACAMERA_SCALER_CROP_REGION or ACAMERA_CONTROL_ZOOM_RATIO), but before each individual
+     * output is further cropped and scaled. It only affects processed outputs such as
+     * YUV, PRIVATE, and JPEG.  It has no effect on RAW outputs.</p>
+     * <p>When <code>CROP_90</code> or <code>CROP_270</code> are selected, there is a significant loss to the field of
+     * view. For example, with a 4:3 aspect ratio output of 1600x1200, <code>CROP_90</code> will still
+     * produce 1600x1200 output, but these buffers are cropped from a vertical 3:4 slice at the
+     * center of the 4:3 area, then rotated to be 4:3, and then upscaled to 1600x1200.  Only
+     * 56.25% of the original FOV is still visible.  In general, for an aspect ratio of <code>w:h</code>,
+     * the crop and rotate operation leaves <code>(h/w)^2</code> of the field of view visible. For 16:9,
+     * this is ~31.6%.</p>
+     * <p>As a visual example, the figure below shows the effect of <code>ROTATE_AND_CROP_90</code> on the
+     * outputs for the following parameters:</p>
+     * <ul>
+     * <li>Sensor active array: <code>2000x1500</code></li>
+     * <li>Crop region: top-left: <code>(500, 375)</code>, size: <code>(1000, 750)</code> (4:3 aspect ratio)</li>
+     * <li>Output streams: YUV <code>640x480</code> and YUV <code>1280x720</code></li>
+     * <li><code>ROTATE_AND_CROP_90</code></li>
+     * </ul>
+     * <p><img alt="Effect of ROTATE_AND_CROP_90" src="../images/camera2/metadata/android.scaler.rotateAndCrop/crop-region-rotate-90-43-ratio.png" /></p>
+     * <p>With these settings, the regions of the active array covered by the output streams are:</p>
+     * <ul>
+     * <li>640x480 stream crop: top-left: <code>(219, 375)</code>, size: <code>(562, 750)</code></li>
+     * <li>1280x720 stream crop: top-left: <code>(289, 375)</code>, size: <code>(422, 750)</code></li>
+     * </ul>
+     * <p>Since the buffers are rotated, the buffers as seen by the application are:</p>
+     * <ul>
+     * <li>640x480 stream: top-left: <code>(781, 375)</code> on active array, size: <code>(640, 480)</code>, downscaled 1.17x from sensor pixels</li>
+     * <li>1280x720 stream: top-left: <code>(711, 375)</code> on active array, size: <code>(1280, 720)</code>, upscaled 1.71x from sensor pixels</li>
+     * </ul>
+     *
+     * @see ACAMERA_CONTROL_AE_REGIONS
+     * @see ACAMERA_CONTROL_AF_REGIONS
+     * @see ACAMERA_CONTROL_AWB_REGIONS
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
+     * @see ACAMERA_SCALER_CROP_REGION
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP =                            // byte (acamera_metadata_enum_android_scaler_rotate_and_crop_t)
+            ACAMERA_SCALER_START + 17,
     ACAMERA_SCALER_END,
 
     /**
@@ -4779,7 +4881,7 @@
      * rectangle, and cropping to the rectangle given in ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.</p>
      * <p>E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the
      * dimensions in ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE given the position of a pixel,
-     * (x', y'), in the raw pixel array with dimensions give in
+     * (x', y'), in the raw pixel array with dimensions given in
      * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE:</p>
      * <ol>
      * <li>Choose a pixel (x', y') within the active array region of the raw buffer given in
@@ -8298,6 +8400,51 @@
 
 } acamera_metadata_enum_android_scaler_available_recommended_stream_configurations_t;
 
+// ACAMERA_SCALER_ROTATE_AND_CROP
+typedef enum acamera_metadata_enum_acamera_scaler_rotate_and_crop {
+    /**
+     * <p>No rotate and crop is applied. Processed outputs are in the sensor orientation.</p>
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP_NONE                              = 0,
+
+    /**
+     * <p>Processed images are rotated by 90 degrees clockwise, and then cropped
+     * to the original aspect ratio.</p>
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP_90                                = 1,
+
+    /**
+     * <p>Processed images are rotated by 180 degrees.  Since the aspect ratio does not
+     * change, no cropping is performed.</p>
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP_180                               = 2,
+
+    /**
+     * <p>Processed images are rotated by 270 degrees clockwise, and then cropped
+     * to the original aspect ratio.</p>
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP_270                               = 3,
+
+    /**
+     * <p>The camera API automatically selects the best concrete value for
+     * rotate-and-crop based on the application's support for resizability and the current
+     * multi-window mode.</p>
+     * <p>If the application does not support resizing but the display mode for its main
+     * Activity is not in a typical orientation, the camera API will set <code>ROTATE_AND_CROP_90</code>
+     * or some other supported rotation value, depending on device configuration,
+     * to ensure preview and captured images are correctly shown to the user. Otherwise,
+     * <code>ROTATE_AND_CROP_NONE</code> will be selected.</p>
+     * <p>When a value other than NONE is selected, several metadata fields will also be parsed
+     * differently to ensure that coordinates are correctly handled for features like drawing
+     * face detection boxes or passing in tap-to-focus coordinates.  The camera API will
+     * convert positions in the active array coordinate system to/from the cropped-and-rotated
+     * coordinate system to make the operation transparent for applications.</p>
+     * <p>No coordinate mapping will be done when the application selects a non-AUTO mode.</p>
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP_AUTO                              = 4,
+
+} acamera_metadata_enum_android_scaler_rotate_and_crop_t;
+
 
 // ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
 typedef enum acamera_metadata_enum_acamera_sensor_reference_illuminant1 {
diff --git a/camera/ndk/include/camera/NdkCameraWindowType.h b/camera/ndk/include/camera/NdkCameraWindowType.h
index 99f67e9..df977da 100644
--- a/camera/ndk/include/camera/NdkCameraWindowType.h
+++ b/camera/ndk/include/camera/NdkCameraWindowType.h
@@ -44,7 +44,7 @@
  */
 #ifdef __ANDROID_VNDK__
 #include <cutils/native_handle.h>
-typedef native_handle_t ACameraWindowType;
+typedef const native_handle_t ACameraWindowType;
 #else
 #include <android/native_window.h>
 typedef ANativeWindow ACameraWindowType;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h b/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
index e1af8c1..5a1af79 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
@@ -18,7 +18,7 @@
 #include "utils.h"
 
 struct ACaptureSessionOutput {
-    explicit ACaptureSessionOutput(native_handle_t* window, bool isShared = false,
+    explicit ACaptureSessionOutput(const native_handle_t* window, bool isShared = false,
             const char* physicalCameraId = "") :
             mWindow(window), mIsShared(isShared), mPhysicalCameraId(physicalCameraId) {};
 
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index e511a3f..0fcb700 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -355,7 +355,7 @@
     std::vector<int32_t> requestStreamIdxList;
     std::vector<int32_t> requestSurfaceIdxList;
     for (auto outputTarget : request->targets->mOutputs) {
-        native_handle_t* anw = outputTarget.mWindow;
+        const native_handle_t* anw = outputTarget.mWindow;
         bool found = false;
         req->mSurfaceList.push_back(anw);
         // lookup stream/surface ID
@@ -434,7 +434,7 @@
     }
     pRequest->targets = new ACameraOutputTargets();
     for (size_t i = 0; i < req->mSurfaceList.size(); i++) {
-        native_handle_t* anw = req->mSurfaceList[i];
+        const native_handle_t* anw = req->mSurfaceList[i];
         ACameraOutputTarget outputTarget(anw);
         pRequest->targets->mOutputs.insert(outputTarget);
     }
@@ -611,7 +611,7 @@
 
     std::set<std::pair<native_handle_ptr_wrapper, OutputConfigurationWrapper>> outputSet;
     for (auto outConfig : outputs->mOutputs) {
-        native_handle_t* anw = outConfig.mWindow;
+        const native_handle_t* anw = outConfig.mWindow;
         OutputConfigurationWrapper outConfigInsertW;
         OutputConfiguration &outConfigInsert = outConfigInsertW.mOutputConfiguration;
         outConfigInsert.rotation = utils::convertToHidl(outConfig.mRotation);
@@ -846,8 +846,7 @@
             for (auto streamAndWindowId : request->mCaptureRequest.streamAndWindowIds) {
                 int32_t windowId = streamAndWindowId.windowId;
                 if (utils::isWindowNativeHandleEqual(windowHandles[windowId],outHandle)) {
-                    native_handle_t* anw =
-                        const_cast<native_handle_t *>(windowHandles[windowId].getNativeHandle());
+                    const native_handle_t* anw = windowHandles[windowId].getNativeHandle();
                     ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
                             getId(), anw, frameNumber);
 
@@ -1244,7 +1243,7 @@
                         return;
                     }
 
-                    native_handle_t* anw;
+                    const native_handle_t* anw;
                     found = msg->findPointer(kAnwKey, (void**) &anw);
                     if (!found) {
                         ALOGE("%s: Cannot find native_handle_t!", __FUNCTION__);
diff --git a/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h b/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
index ed67615..5715d77 100644
--- a/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
+++ b/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
@@ -17,7 +17,7 @@
 #include "utils.h"
 
 struct ACameraOutputTarget {
-    explicit ACameraOutputTarget(native_handle_t* window) : mWindow(window) {};
+    explicit ACameraOutputTarget(const native_handle_t* window) : mWindow(window) {};
 
     bool operator == (const ACameraOutputTarget& other) const {
         return mWindow == other.mWindow;
diff --git a/camera/ndk/ndk_vendor/impl/utils.h b/camera/ndk/ndk_vendor/impl/utils.h
index f389f03..6f5820e 100644
--- a/camera/ndk/ndk_vendor/impl/utils.h
+++ b/camera/ndk/ndk_vendor/impl/utils.h
@@ -42,7 +42,7 @@
 // Utility class so that CaptureRequest can be stored by sp<>
 struct CaptureRequest : public RefBase {
   frameworks::cameraservice::device::V2_0::CaptureRequest mCaptureRequest;
-  std::vector<native_handle_t *> mSurfaceList;
+  std::vector<const native_handle_t *> mSurfaceList;
   //Physical camera settings metadata is stored here, since the capture request
   //might not contain it. That's since, fmq might have consumed it.
   hidl_vec<PhysicalCameraSettings> mPhysicalCameraSettings;
@@ -62,13 +62,13 @@
 // Utility class so the native_handle_t can be compared with  its contents instead
 // of just raw pointer comparisons.
 struct native_handle_ptr_wrapper {
-    native_handle_t *mWindow = nullptr;
+    const native_handle_t *mWindow = nullptr;
 
-    native_handle_ptr_wrapper(native_handle_t *nh) : mWindow(nh) { }
+    native_handle_ptr_wrapper(const native_handle_t *nh) : mWindow(nh) { }
 
     native_handle_ptr_wrapper() = default;
 
-    operator native_handle_t *() const { return mWindow; }
+    operator const native_handle_t *() const { return mWindow; }
 
     bool operator ==(const native_handle_ptr_wrapper other) const {
         return isWindowNativeHandleEqual(mWindow, other.mWindow);
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 938b5f5..ba14c5c 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -50,7 +50,7 @@
 static constexpr int kTestImageFormat = AIMAGE_FORMAT_YUV_420_888;
 
 using android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
-using ConfiguredWindows = std::set<native_handle_t *>;
+using ConfiguredWindows = std::set<const native_handle_t *>;
 
 class CameraHelper {
    public:
@@ -60,11 +60,11 @@
 
     struct PhysicalImgReaderInfo {
         const char* physicalCameraId;
-        native_handle_t* anw;
+        const native_handle_t* anw;
     };
 
     // Retaining the error code in case the caller needs to analyze it.
-    std::variant<int, ConfiguredWindows> initCamera(native_handle_t* imgReaderAnw,
+    std::variant<int, ConfiguredWindows> initCamera(const native_handle_t* imgReaderAnw,
             const std::vector<PhysicalImgReaderInfo>& physicalImgReaders,
             bool usePhysicalSettings) {
         ConfiguredWindows configuredWindows;
@@ -257,7 +257,7 @@
     ACameraDevice_StateCallbacks mDeviceCb{this, nullptr, nullptr};
     ACameraCaptureSession_stateCallbacks mSessionCb{ this, nullptr, nullptr, nullptr};
 
-    native_handle_t* mImgReaderAnw = nullptr;  // not owned by us.
+    const native_handle_t* mImgReaderAnw = nullptr;  // not owned by us.
 
     // Camera device
     ACameraDevice* mDevice = nullptr;
@@ -396,7 +396,7 @@
         return 0;
     }
 
-    native_handle_t* getNativeWindow() { return mImgReaderAnw; }
+    const native_handle_t* getNativeWindow() { return mImgReaderAnw; }
 
     int getAcquiredImageCount() {
         std::lock_guard<std::mutex> lock(mMutex);
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 8ccded2..eee05ff 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -378,7 +378,7 @@
         sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
         sp<hardware::camera2::ICameraDeviceUser> device;
         res = service->connectDevice(callbacks, cameraId, String16("meeeeeeeee!"),
-                std::unique_ptr<String16>(), hardware::ICameraService::USE_CALLING_UID,
+                {}, hardware::ICameraService::USE_CALLING_UID,
                 /*out*/&device);
         EXPECT_TRUE(res.isOk()) << res;
         ASSERT_NE(nullptr, device.get());
@@ -421,7 +421,7 @@
         {
             SCOPED_TRACE("openNewDevice");
             binder::Status res = service->connectDevice(callbacks, deviceId, String16("meeeeeeeee!"),
-                    std::unique_ptr<String16>(), hardware::ICameraService::USE_CALLING_UID,
+                    {}, hardware::ICameraService::USE_CALLING_UID,
                     /*out*/&device);
             EXPECT_TRUE(res.isOk()) << res;
         }
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index f4fb626..b31a58b 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -273,14 +273,11 @@
         SurfaceComposerClient::Transaction& t,
         const sp<IBinder>& dpy,
         const ui::DisplayState& displayState) {
-    const ui::Size& viewport = displayState.viewport;
-
-    // Set the region of the layer stack we're interested in, which in our
-    // case is "all of it".
-    Rect layerStackRect(viewport);
+    // Set the region of the layer stack we're interested in, which in our case is "all of it".
+    Rect layerStackRect(displayState.layerStackSpaceRect);
 
     // We need to preserve the aspect ratio of the display.
-    float displayAspect = viewport.getHeight() / static_cast<float>(viewport.getWidth());
+    float displayAspect = layerStackRect.getHeight() / static_cast<float>(layerStackRect.getWidth());
 
 
     // Set the way we map the output onto the display surface (which will
@@ -699,20 +696,21 @@
         return err;
     }
 
-    const ui::Size& viewport = displayState.viewport;
+    const ui::Size& layerStackSpaceRect = displayState.layerStackSpaceRect;
     if (gVerbose) {
         printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
-                viewport.getWidth(), viewport.getHeight(), displayConfig.refreshRate,
-                toCString(displayState.orientation), displayState.layerStack);
+                layerStackSpaceRect.getWidth(), layerStackSpaceRect.getHeight(),
+                displayConfig.refreshRate, toCString(displayState.orientation),
+                displayState.layerStack);
         fflush(stdout);
     }
 
     // Encoder can't take odd number as config
     if (gVideoWidth == 0) {
-        gVideoWidth = floorToEven(viewport.getWidth());
+        gVideoWidth = floorToEven(layerStackSpaceRect.getWidth());
     }
     if (gVideoHeight == 0) {
-        gVideoHeight = floorToEven(viewport.getHeight());
+        gVideoHeight = floorToEven(layerStackSpaceRect.getHeight());
     }
 
     // Configure and start the encoder.
@@ -1170,14 +1168,14 @@
             }
             break;
         case 'd':
-            gPhysicalDisplayId = atoll(optarg);
-            if (gPhysicalDisplayId == 0) {
+            gPhysicalDisplayId = PhysicalDisplayId(atoll(optarg));
+            if (gPhysicalDisplayId.value == 0) {
                 fprintf(stderr, "Please specify a valid physical display id\n");
                 return 2;
             } else if (SurfaceComposerClient::
                     getPhysicalDisplayToken(gPhysicalDisplayId) == nullptr) {
-                fprintf(stderr, "Invalid physical display id: %"
-                        ANDROID_PHYSICAL_DISPLAY_ID_FORMAT "\n", gPhysicalDisplayId);
+                fprintf(stderr, "Invalid physical display id: %s\n",
+                        to_string(gPhysicalDisplayId).c_str());
                 return 2;
             }
             break;
diff --git a/cmds/stagefright/AudioPlayer.cpp b/cmds/stagefright/AudioPlayer.cpp
index eb76953..55427ca 100644
--- a/cmds/stagefright/AudioPlayer.cpp
+++ b/cmds/stagefright/AudioPlayer.cpp
@@ -134,15 +134,18 @@
     success = format->findInt32(kKeySampleRate, &mSampleRate);
     CHECK(success);
 
-    int32_t numChannels, channelMask;
+    int32_t numChannels;
     success = format->findInt32(kKeyChannelCount, &numChannels);
     CHECK(success);
 
-    if(!format->findInt32(kKeyChannelMask, &channelMask)) {
+    audio_channel_mask_t channelMask;
+    if (int32_t rawChannelMask; !format->findInt32(kKeyChannelMask, &rawChannelMask)) {
         // log only when there's a risk of ambiguity of channel mask selection
         ALOGI_IF(numChannels > 2,
                 "source format didn't specify channel mask, using (%d) channel order", numChannels);
         channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
+    } else {
+        channelMask = static_cast<audio_channel_mask_t>(rawChannelMask);
     }
 
     audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT;
diff --git a/cmds/stagefright/record.cpp b/cmds/stagefright/record.cpp
index 37091c4..098c278 100644
--- a/cmds/stagefright/record.cpp
+++ b/cmds/stagefright/record.cpp
@@ -259,31 +259,6 @@
     printf("$\n");
 #endif
 
-#if 0
-    CameraSource *source = CameraSource::Create(
-            String16(argv[0], strlen(argv[0])));
-    source->start();
-
-    printf("source = %p\n", source);
-
-    for (int i = 0; i < 100; ++i) {
-        MediaBuffer *buffer;
-        status_t err = source->read(&buffer);
-        CHECK_EQ(err, (status_t)OK);
-
-        printf("got a frame, data=%p, size=%d\n",
-               buffer->data(), buffer->range_length());
-
-        buffer->release();
-        buffer = NULL;
-    }
-
-    err = source->stop();
-
-    delete source;
-    source = NULL;
-#endif
-
     if (err != OK && err != ERROR_END_OF_STREAM) {
         fprintf(stderr, "record failed: %d\n", err);
         return 1;
diff --git a/drm/TEST_MAPPING b/drm/TEST_MAPPING
index 2595e3e..9f6a532 100644
--- a/drm/TEST_MAPPING
+++ b/drm/TEST_MAPPING
@@ -9,17 +9,9 @@
         },
         {
           "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
-        }
-      ]
-    },
-    {
-      "name": "GtsExoPlayerTestCases",
-      "options" : [
-        {
-          "include-annotation": "android.platform.test.annotations.SocPresubmit"
         },
         {
-          "include-filter": "com.google.android.exoplayer.gts.DashTest#testWidevine23FpsH264Fixed"
+          "include-filter": "com.google.android.media.gts.WidevineYouTubePerformanceTests"
         }
       ]
     }
diff --git a/drm/common/Android.bp b/drm/common/Android.bp
index 272684c..248570e 100644
--- a/drm/common/Android.bp
+++ b/drm/common/Android.bp
@@ -14,7 +14,7 @@
 // limitations under the License.
 //
 
-cc_library_static {
+cc_library {
     name: "libdrmframeworkcommon",
 
     srcs: [
@@ -35,7 +35,11 @@
 
     cflags: ["-Wall", "-Werror"],
 
-    shared_libs: ["libbinder"],
+    shared_libs: [
+        "libbinder",
+        "liblog",
+        "libutils"
+    ],
 
     export_include_dirs: ["include"],
 }
diff --git a/drm/common/include/DrmEngineBase.h b/drm/common/include/DrmEngineBase.h
index 73f11a4..c0a5e3b 100644
--- a/drm/common/include/DrmEngineBase.h
+++ b/drm/common/include/DrmEngineBase.h
@@ -309,7 +309,7 @@
 
     /**
      * Removes all the rights information of each plug-in associated with
-     * DRM framework. Will be used in master reset
+     * DRM framework.
      *
      * @param[in] uniqueId Unique identifier for a session
      * @return status_t
diff --git a/drm/common/include/IDrmEngine.h b/drm/common/include/IDrmEngine.h
index 1837a11..a545941 100644
--- a/drm/common/include/IDrmEngine.h
+++ b/drm/common/include/IDrmEngine.h
@@ -250,7 +250,7 @@
 
     /**
      * Removes all the rights information of each plug-in associated with
-     * DRM framework. Will be used in master reset
+     * DRM framework.
      *
      * @param[in] uniqueId Unique identifier for a session
      * @return status_t
diff --git a/drm/drmserver/Android.bp b/drm/drmserver/Android.bp
index b68e6c2..8b7c551 100644
--- a/drm/drmserver/Android.bp
+++ b/drm/drmserver/Android.bp
@@ -31,19 +31,18 @@
         "liblog",
         "libbinder",
         "libdl",
+        "libdrmframeworkcommon",
         "libselinux",
         "libstagefright_foundation",
     ],
 
-    static_libs: ["libdrmframeworkcommon"],
-
     cflags: [
         "-Wall",
         "-Wextra",
         "-Werror",
     ],
 
-    compile_multilib: "32",
+    compile_multilib: "prefer32",
 
     init_rc: ["drmserver.rc"],
 }
diff --git a/drm/drmserver/DrmManager.cpp b/drm/drmserver/DrmManager.cpp
index 9a32cc5..74e3223 100644
--- a/drm/drmserver/DrmManager.cpp
+++ b/drm/drmserver/DrmManager.cpp
@@ -99,13 +99,13 @@
         }
         default:
         {
-            ALOGW("Unrecognized message type: %zd", msg->what());
+            ALOGW("Unrecognized message type: %u", msg->what());
         }
     }
 }
 
 int64_t DrmManager::getMetricsFlushPeriodUs() {
-    return 1000 * 1000 * std::max(1ll, property_get_int64("drmmanager.metrics.period", 86400));
+    return 1000 * 1000 * std::max(1ll, (long long)property_get_int64("drmmanager.metrics.period", 86400));
 }
 
 void DrmManager::recordEngineMetrics(
diff --git a/drm/libdrmframework/Android.bp b/drm/libdrmframework/Android.bp
index 940c17d..b4a7b25 100644
--- a/drm/libdrmframework/Android.bp
+++ b/drm/libdrmframework/Android.bp
@@ -29,12 +29,11 @@
         "liblog",
         "libbinder",
         "libdl",
+        "libdrmframeworkcommon",
     ],
 
-    static_libs: ["libdrmframeworkcommon"],
-
     export_include_dirs: ["include"],
-    export_static_lib_headers: ["libdrmframeworkcommon"],
+    export_shared_lib_headers: ["libdrmframeworkcommon"],
 
     cflags: ["-Werror"],
 }
diff --git a/drm/libdrmframework/include/DrmManagerClientImpl.h b/drm/libdrmframework/include/DrmManagerClientImpl.h
index 3858675..8c8783b 100644
--- a/drm/libdrmframework/include/DrmManagerClientImpl.h
+++ b/drm/libdrmframework/include/DrmManagerClientImpl.h
@@ -230,7 +230,7 @@
 
     /**
      * Removes all the rights information of each plug-in associated with
-     * DRM framework. Will be used in master reset
+     * DRM framework.
      *
      * @param[in] uniqueId Unique identifier for a session
      * @return status_t
diff --git a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
index bb9d7ec..9f52f7a 100644
--- a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
+++ b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
@@ -36,11 +36,11 @@
         "libcrypto",
         "libssl",
         "libdrmframework",
+        "libdrmframeworkcommon",
     ],
 
     static_libs: [
         "libdrmutility",
-        "libdrmframeworkcommon",
         "libfwdlock-common",
         "libfwdlock-converter",
         "libfwdlock-decoder",
diff --git a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/include/FwdLockEngine.h b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/include/FwdLockEngine.h
index b62ddb9..eb5b0f6 100644
--- a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/include/FwdLockEngine.h
+++ b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/include/FwdLockEngine.h
@@ -252,8 +252,7 @@
 
 /**
  * Removes all the rights information of each plug-in associated with
- * DRM framework. Will be used in master reset but does nothing for
- * Forward Lock Engine.
+ * DRM framework. Does nothing for Forward Lock Engine.
  *
  * @param uniqueId Unique identifier for a session
  * @return status_t
diff --git a/drm/libdrmframework/plugins/forward-lock/internal-format/doc/FwdLock.html b/drm/libdrmframework/plugins/forward-lock/internal-format/doc/FwdLock.html
index 8f95cd2..c1d5b3d 100644
--- a/drm/libdrmframework/plugins/forward-lock/internal-format/doc/FwdLock.html
+++ b/drm/libdrmframework/plugins/forward-lock/internal-format/doc/FwdLock.html
@@ -488,7 +488,7 @@
 <p class=MsoBodyText><b>Note:</b> The key-encryption key must be unique to each
 device; this is what makes the files forward lock–protected. Ideally, it should
 be derived from secret hardware parameters, but at the very least it should be
-persistent from one master reset to the next.</p>
+persistent from one factory reset to the next.</p>
 
 <div style='margin-bottom:24.0pt;border:solid windowtext 1.0pt;padding:1.0pt 4.0pt 1.0pt 4.0pt;
 background:#F2F2F2'>
diff --git a/drm/libdrmframework/plugins/passthru/Android.bp b/drm/libdrmframework/plugins/passthru/Android.bp
index 05b6440..8045586 100644
--- a/drm/libdrmframework/plugins/passthru/Android.bp
+++ b/drm/libdrmframework/plugins/passthru/Android.bp
@@ -19,12 +19,11 @@
 
     srcs: ["src/DrmPassthruPlugIn.cpp"],
 
-    static_libs: ["libdrmframeworkcommon"],
-
     shared_libs: [
         "libutils",
         "liblog",
         "libdl",
+        "libdrmframeworkcommon",
     ],
 
     local_include_dirs: ["include"],
diff --git a/drm/libmediadrm/DrmMetricsConsumer.cpp b/drm/libmediadrm/DrmMetricsConsumer.cpp
index b47b4ff..5f0b26e 100644
--- a/drm/libmediadrm/DrmMetricsConsumer.cpp
+++ b/drm/libmediadrm/DrmMetricsConsumer.cpp
@@ -37,8 +37,8 @@
 template <> std::string GetAttributeName<KeyStatusType>(KeyStatusType type) {
     static const char *type_names[] = {"USABLE", "EXPIRED",
                                        "OUTPUT_NOT_ALLOWED", "STATUS_PENDING",
-                                       "INTERNAL_ERROR"};
-    if (((size_t)type) > arraysize(type_names)) {
+                                       "INTERNAL_ERROR", "USABLE_IN_FUTURE"};
+    if (((size_t)type) >= arraysize(type_names)) {
         return "UNKNOWN_TYPE";
     }
     return type_names[(size_t)type];
@@ -48,7 +48,7 @@
     static const char *type_names[] = {"PROVISION_REQUIRED", "KEY_NEEDED",
                                        "KEY_EXPIRED", "VENDOR_DEFINED",
                                        "SESSION_RECLAIMED"};
-    if (((size_t)type) > arraysize(type_names)) {
+    if (((size_t)type) >= arraysize(type_names)) {
         return "UNKNOWN_TYPE";
     }
     return type_names[(size_t)type];
diff --git a/drm/libmediadrm/DrmSessionManager.cpp b/drm/libmediadrm/DrmSessionManager.cpp
index 5292705..e31395d 100644
--- a/drm/libmediadrm/DrmSessionManager.cpp
+++ b/drm/libmediadrm/DrmSessionManager.cpp
@@ -66,7 +66,7 @@
     std::vector<MediaResourceParcel> resources;
     MediaResourceParcel resource{
             Type::kDrmSession, SubType::kUnspecifiedSubType,
-            toStdVec<int8_t>(sessionId), value};
+            toStdVec<>(sessionId), value};
     resources.push_back(resource);
     return resources;
 }
diff --git a/drm/libmediadrm/include/mediadrm/DrmSessionManager.h b/drm/libmediadrm/include/mediadrm/DrmSessionManager.h
index 9e43504..c56bf01 100644
--- a/drm/libmediadrm/include/mediadrm/DrmSessionManager.h
+++ b/drm/libmediadrm/include/mediadrm/DrmSessionManager.h
@@ -62,7 +62,7 @@
     void removeSession(const Vector<uint8_t>& sessionId);
     bool reclaimSession(int callingPid);
 
-    // sanity check APIs
+    // inspection APIs
     size_t getSessionCount() const;
     bool containsSession(const Vector<uint8_t>& sessionId) const;
 
diff --git a/drm/libmediadrm/protos/Android.bp b/drm/libmediadrm/protos/Android.bp
new file mode 100644
index 0000000..b26cda4
--- /dev/null
+++ b/drm/libmediadrm/protos/Android.bp
@@ -0,0 +1,38 @@
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This is the version of the drm metrics configured for protobuf full on host.
+// It is used by the metrics_dump tool.
+
+cc_library_host_shared {
+    name: "libdrm_metrics_protos_full_host",
+    vendor_available: true,
+
+    include_dirs: ["external/protobuf/src"],
+
+    srcs: [
+        "metrics.proto",
+    ],
+
+    proto: {
+        export_proto_headers: true,
+        type: "full",
+    },
+
+    cflags: [
+        // Suppress unused parameter error. This error occurs
+        // when using the map type in a proto definition.
+        "-Wno-unused-parameter",
+    ],
+}
diff --git a/drm/mediacas/plugins/clearkey/ClearKeyFetcher.cpp b/drm/mediacas/plugins/clearkey/ClearKeyFetcher.cpp
index cb69f91..466e571 100644
--- a/drm/mediacas/plugins/clearkey/ClearKeyFetcher.cpp
+++ b/drm/mediacas/plugins/clearkey/ClearKeyFetcher.cpp
@@ -62,8 +62,8 @@
     }
     ALOGV("descriptor_size=%zu", container.descriptor_size());
 
-    // Sanity check to verify that the BroadcastEncryptor is sending a properly
-    // formed EcmContainer. If it contains two Ecms, the ids should have different
+    // Validate that the BroadcastEncryptor is sending a properly formed
+    // EcmContainer. If it contains two Ecms, the ids should have different
     // parity (one odd, one even). This does not necessarily affect decryption
     // but indicates a problem with Ecm generation.
     if (container.descriptor_size() == 2) {
diff --git a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp b/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
index 2dcd00f..051a968 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
@@ -15,7 +15,7 @@
 namespace clearkey {
 
 std::string MemoryFileSystem::GetFileName(const std::string& path) {
-    size_t index = path.find_last_of("/");
+    size_t index = path.find_last_of('/');
     if (index != std::string::npos) {
         return path.substr(index+1);
     } else {
diff --git a/include/drm/DrmManagerClient.h b/include/drm/DrmManagerClient.h
index 866edac..a38aa9b 100644
--- a/include/drm/DrmManagerClient.h
+++ b/include/drm/DrmManagerClient.h
@@ -318,7 +318,7 @@
 
     /**
      * Removes all the rights information of each plug-in associated with
-     * DRM framework. Will be used in master reset
+     * DRM framework.
      *
      * @return status_t
      *     Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure
diff --git a/include/drm/TEST_MAPPING b/include/drm/TEST_MAPPING
index 28e432e..512e844 100644
--- a/include/drm/TEST_MAPPING
+++ b/include/drm/TEST_MAPPING
@@ -8,17 +8,9 @@
         },
         {
           "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
-        }
-      ]
-    },
-    {
-      "name": "GtsExoPlayerTestCases",
-      "options" : [
-        {
-          "include-annotation": "android.platform.test.annotations.SocPresubmit"
         },
         {
-          "include-filter": "com.google.android.exoplayer.gts.DashTest#testWidevine23FpsH264Fixed"
+          "include-filter": "com.google.android.media.gts.WidevineYouTubePerformanceTests"
         }
       ]
     }
diff --git a/include/media/Interpolator.h b/include/media/Interpolator.h
index 703cf77..2004acb 100644
--- a/include/media/Interpolator.h
+++ b/include/media/Interpolator.h
@@ -21,6 +21,7 @@
 #include <sstream>
 #include <unordered_map>
 
+#include <android/media/InterpolatorConfig.h>
 #include <binder/Parcel.h>
 #include <utils/RefBase.h>
 
@@ -39,17 +40,10 @@
 class Interpolator : public std::map<S, T> {
 public:
     // Polynomial spline interpolators
-    // Extend only at the end of enum, as this must match order in VolumeShapers.java.
-    enum InterpolatorType : int32_t {
-        INTERPOLATOR_TYPE_STEP,   // Not continuous
-        INTERPOLATOR_TYPE_LINEAR, // C0
-        INTERPOLATOR_TYPE_CUBIC,  // C1
-        INTERPOLATOR_TYPE_CUBIC_MONOTONIC, // C1 (to provide locally monotonic curves)
-        // INTERPOLATOR_TYPE_CUBIC_C2, // TODO - requires global computation / cache
-    };
+    using InterpolatorType  = media::InterpolatorType;
 
     explicit Interpolator(
-            InterpolatorType interpolatorType = INTERPOLATOR_TYPE_LINEAR,
+            InterpolatorType interpolatorType = InterpolatorType::LINEAR,
             bool cache = true)
         : mCache(cache)
         , mFirstSlope(0)
@@ -82,13 +76,13 @@
 
         // now that we have two adjacent points:
         switch (mInterpolatorType) {
-        case INTERPOLATOR_TYPE_STEP:
+        case InterpolatorType::STEP:
             return high->first == x ? high->second : low->second;
-        case INTERPOLATOR_TYPE_LINEAR:
+        case InterpolatorType::LINEAR:
             return ((high->first - x) * low->second + (x - low->first) * high->second)
                     / (high->first - low->first);
-        case INTERPOLATOR_TYPE_CUBIC:
-        case INTERPOLATOR_TYPE_CUBIC_MONOTONIC:
+        case InterpolatorType::CUBIC:
+        case InterpolatorType::CUBIC_MONOTONIC:
         default: {
             // See https://en.wikipedia.org/wiki/Cubic_Hermite_spline
 
@@ -116,7 +110,7 @@
             // non catmullRom (finite difference) with regular cubic;
             // the choices here minimize computation.
             bool monotonic, catmullRom;
-            if (mInterpolatorType == INTERPOLATOR_TYPE_CUBIC_MONOTONIC) {
+            if (mInterpolatorType == InterpolatorType::CUBIC_MONOTONIC) {
                 monotonic = true;
                 catmullRom = false;
             } else {
@@ -202,11 +196,11 @@
 
     status_t setInterpolatorType(InterpolatorType interpolatorType) {
         switch (interpolatorType) {
-        case INTERPOLATOR_TYPE_STEP:   // Not continuous
-        case INTERPOLATOR_TYPE_LINEAR: // C0
-        case INTERPOLATOR_TYPE_CUBIC:  // C1
-        case INTERPOLATOR_TYPE_CUBIC_MONOTONIC: // C1 + other constraints
-        // case INTERPOLATOR_TYPE_CUBIC_C2:
+        case InterpolatorType::STEP:   // Not continuous
+        case InterpolatorType::LINEAR: // C0
+        case InterpolatorType::CUBIC:  // C1
+        case InterpolatorType::CUBIC_MONOTONIC: // C1 + other constraints
+        // case InterpolatorType::CUBIC_C2:
             mInterpolatorType = interpolatorType;
             return NO_ERROR;
         default:
@@ -235,49 +229,50 @@
         mMemo.clear();
     }
 
+    // TODO(ytai): remove this method once it is not used.
     status_t writeToParcel(Parcel *parcel) const {
-        if (parcel == nullptr) {
-            return BAD_VALUE;
-        }
-        status_t res = parcel->writeInt32(mInterpolatorType)
-                ?: parcel->writeFloat(mFirstSlope)
-                ?: parcel->writeFloat(mLastSlope)
-                ?: parcel->writeUint32((uint32_t)this->size()); // silent truncation
-        if (res != NO_ERROR) {
-            return res;
-        }
-        for (const auto &pt : *this) {
-            res = parcel->writeFloat(pt.first)
-                    ?: parcel->writeFloat(pt.second);
-            if (res != NO_ERROR) {
-                return res;
-            }
-        }
-        return NO_ERROR;
+        media::InterpolatorConfig config;
+        writeToConfig(&config);
+        return config.writeToParcel(parcel);
     }
 
+    void writeToConfig(media::InterpolatorConfig *config) const {
+        config->type = mInterpolatorType;
+        config->firstSlope = mFirstSlope;
+        config->lastSlope = mLastSlope;
+        for (const auto &pt : *this) {
+            config->xy.push_back(pt.first);
+            config->xy.push_back(pt.second);
+        }
+    }
+
+    // TODO(ytai): remove this method once it is not used.
     status_t readFromParcel(const Parcel &parcel) {
-        this->clear();
-        int32_t type;
-        uint32_t size;
-        status_t res = parcel.readInt32(&type)
-                        ?: parcel.readFloat(&mFirstSlope)
-                        ?: parcel.readFloat(&mLastSlope)
-                        ?: parcel.readUint32(&size)
-                        ?: setInterpolatorType((InterpolatorType)type);
+        media::InterpolatorConfig config;
+        status_t res = config.readFromParcel(&parcel);
         if (res != NO_ERROR) {
             return res;
         }
+        return readFromConfig(config);
+    }
+
+    status_t readFromConfig(const media::InterpolatorConfig &config) {
+        this->clear();
+        setInterpolatorType(config.type);
+        if ((config.xy.size() & 1) != 0) {
+            // xy size must be even.
+            return BAD_VALUE;
+        }
+        uint32_t size = config.xy.size() / 2;
+        mFirstSlope = config.firstSlope;
+        mLastSlope = config.lastSlope;
+
         // Note: We don't need to check size is within some bounds as
         // the Parcel read will fail if size is incorrectly specified too large.
         float lastx;
         for (uint32_t i = 0; i < size; ++i) {
-            float x, y;
-            res = parcel.readFloat(&x)
-                    ?: parcel.readFloat(&y);
-            if (res != NO_ERROR) {
-                return res;
-            }
+            float x = config.xy[i * 2];
+            float y = config.xy[i * 2 + 1];
             if ((i > 0 && !(x > lastx)) /* handle nan */
                     || y != y /* handle nan */) {
                 // This is a std::map object which imposes sorted order
diff --git a/include/media/MmapStreamInterface.h b/include/media/MmapStreamInterface.h
index b3bf16d..61de987 100644
--- a/include/media/MmapStreamInterface.h
+++ b/include/media/MmapStreamInterface.h
@@ -22,6 +22,8 @@
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
 
+#include <time.h>
+
 namespace android {
 
 class MmapStreamCallback;
@@ -103,6 +105,19 @@
     virtual status_t getMmapPosition(struct audio_mmap_position *position) = 0;
 
     /**
+     * Get a recent count of the number of audio frames presented/received to/from an
+     * external observer.
+     *
+     * \param[out] position count of presented audio frames
+     * \param[out] timeNanos associated clock time
+     *
+     * \return OK if the external position is set correctly.
+     *         NO_INIT in case of initialization error
+     *         INVALID_OPERATION if the interface is not implemented
+     */
+    virtual status_t getExternalPosition(uint64_t* position, int64_t* timeNanos) = 0;
+
+    /**
      * Start a stream operating in mmap mode.
      * createMmapBuffer() must be called before calling start()
      *
diff --git a/include/media/VolumeShaper.h b/include/media/VolumeShaper.h
index fe519bb..f8ead2f 100644
--- a/include/media/VolumeShaper.h
+++ b/include/media/VolumeShaper.h
@@ -22,6 +22,11 @@
 #include <math.h>
 #include <sstream>
 
+#include <android/media/VolumeShaperConfiguration.h>
+#include <android/media/VolumeShaperConfigurationOptionFlag.h>
+#include <android/media/VolumeShaperOperation.h>
+#include <android/media/VolumeShaperOperationFlag.h>
+#include <android/media/VolumeShaperState.h>
 #include <binder/Parcel.h>
 #include <media/Interpolator.h>
 #include <utils/Mutex.h>
@@ -284,30 +289,38 @@
             clampVolume();
         }
 
-        // The parcel layout must match VolumeShaper.java
         status_t writeToParcel(Parcel *parcel) const override {
-            if (parcel == nullptr) return BAD_VALUE;
-            return parcel->writeInt32((int32_t)mType)
-                    ?: parcel->writeInt32(mId)
-                    ?: mType == TYPE_ID
-                        ? NO_ERROR
-                        : parcel->writeInt32((int32_t)mOptionFlags)
-                            ?: parcel->writeDouble(mDurationMs)
-                            ?: Interpolator<S, T>::writeToParcel(parcel);
+            VolumeShaperConfiguration parcelable;
+            writeToParcelable(&parcelable);
+            return parcelable.writeToParcel(parcel);
         }
 
-        status_t readFromParcel(const Parcel *parcel) override {
-            int32_t type, optionFlags;
-            return parcel->readInt32(&type)
-                    ?: setType((Type)type)
-                    ?: parcel->readInt32(&mId)
-                    ?: mType == TYPE_ID
-                        ? NO_ERROR
-                        : parcel->readInt32(&optionFlags)
-                            ?: setOptionFlags((OptionFlag)optionFlags)
-                            ?: parcel->readDouble(&mDurationMs)
-                            ?: Interpolator<S, T>::readFromParcel(*parcel)
-                            ?: checkCurve();
+        void writeToParcelable(VolumeShaperConfiguration *parcelable) const {
+            parcelable->id = getId();
+            parcelable->type = getTypeAsAidl();
+            parcelable->optionFlags = 0;
+            if (mType != TYPE_ID) {
+                parcelable->optionFlags = getOptionFlagsAsAidl();
+                parcelable->durationMs = getDurationMs();
+                Interpolator<S, T>::writeToConfig(&parcelable->interpolatorConfig);
+            }
+        }
+
+        status_t readFromParcel(const Parcel* parcel) override {
+            VolumeShaperConfiguration data;
+            return data.readFromParcel(parcel)
+                   ?: readFromParcelable(data);
+        }
+
+        status_t readFromParcelable(const VolumeShaperConfiguration& parcelable) {
+            setId(parcelable.id);
+            return setTypeFromAidl(parcelable.type)
+                   ?: mType == TYPE_ID
+                      ? NO_ERROR
+                      : setOptionFlagsFromAidl(parcelable.optionFlags)
+                        ?: setDurationMs(parcelable.durationMs)
+                           ?: Interpolator<S, T>::readFromConfig(parcelable.interpolatorConfig)
+                              ?: checkCurve();
         }
 
         // Returns a string for debug printing.
@@ -329,6 +342,51 @@
         int32_t mId;             // A valid id is >= 0.
         OptionFlag mOptionFlags; // option flags for the configuration.
         double mDurationMs;      // duration, must be > 0; default is 1000 ms.
+
+        int32_t getOptionFlagsAsAidl() const {
+            int32_t result = 0;
+            if (getOptionFlags() & OPTION_FLAG_VOLUME_IN_DBFS) {
+                result |=
+                        1 << static_cast<int>(VolumeShaperConfigurationOptionFlag::VOLUME_IN_DBFS);
+            }
+            if (getOptionFlags() & OPTION_FLAG_CLOCK_TIME) {
+                result |= 1 << static_cast<int>(VolumeShaperConfigurationOptionFlag::CLOCK_TIME);
+            }
+            return result;
+        }
+
+        status_t setOptionFlagsFromAidl(int32_t aidl) {
+            std::underlying_type_t<OptionFlag> options = 0;
+            if (aidl & (1 << static_cast<int>(VolumeShaperConfigurationOptionFlag::VOLUME_IN_DBFS))) {
+                options |= OPTION_FLAG_VOLUME_IN_DBFS;
+            }
+            if (aidl & (1 << static_cast<int>(VolumeShaperConfigurationOptionFlag::CLOCK_TIME))) {
+                options |= OPTION_FLAG_CLOCK_TIME;
+            }
+            return setOptionFlags(static_cast<OptionFlag>(options));
+        }
+
+        status_t setTypeFromAidl(VolumeShaperConfigurationType aidl) {
+            switch (aidl) {
+                case VolumeShaperConfigurationType::ID:
+                    return setType(TYPE_ID);
+                case VolumeShaperConfigurationType::SCALE:
+                    return setType(TYPE_SCALE);
+                default:
+                    return BAD_VALUE;
+            }
+        }
+
+        VolumeShaperConfigurationType getTypeAsAidl() const {
+            switch (getType()) {
+                case TYPE_ID:
+                    return VolumeShaperConfigurationType::ID;
+                case TYPE_SCALE:
+                    return VolumeShaperConfigurationType::SCALE;
+                default:
+                    LOG_ALWAYS_FATAL("Shouldn't get here");
+            }
+        }
     }; // Configuration
 
     /* VolumeShaper::Operation expresses an operation to perform on the
@@ -420,19 +478,29 @@
             return NO_ERROR;
         }
 
-        status_t writeToParcel(Parcel *parcel) const override {
+        status_t writeToParcel(Parcel* parcel) const override {
             if (parcel == nullptr) return BAD_VALUE;
-            return parcel->writeInt32((int32_t)mFlags)
-                    ?: parcel->writeInt32(mReplaceId)
-                    ?: parcel->writeFloat(mXOffset);
+            VolumeShaperOperation op;
+            writeToParcelable(&op);
+            return op.writeToParcel(parcel);
         }
 
-        status_t readFromParcel(const Parcel *parcel) override {
-            int32_t flags;
-            return parcel->readInt32(&flags)
-                    ?: parcel->readInt32(&mReplaceId)
-                    ?: parcel->readFloat(&mXOffset)
-                    ?: setFlags((Flag)flags);
+        void writeToParcelable(VolumeShaperOperation* op) const {
+            op->flags = getFlagsAsAidl();
+            op->replaceId = mReplaceId;
+            op->xOffset = mXOffset;
+        }
+
+        status_t readFromParcel(const Parcel* parcel) override {
+            VolumeShaperOperation op;
+            return op.readFromParcel(parcel)
+                   ?: readFromParcelable(op);
+        }
+
+        status_t readFromParcelable(const VolumeShaperOperation& op) {
+            mReplaceId = op.replaceId;
+            mXOffset = op.xOffset;
+            return setFlagsFromAidl(op.flags);
         }
 
         std::string toString() const {
@@ -445,6 +513,48 @@
         }
 
     private:
+        status_t setFlagsFromAidl(int32_t aidl) {
+            std::underlying_type_t<Flag> flags = 0;
+            if (aidl & (1 << static_cast<int>(VolumeShaperOperationFlag::REVERSE))) {
+                flags |= FLAG_REVERSE;
+            }
+            if (aidl & (1 << static_cast<int>(VolumeShaperOperationFlag::TERMINATE))) {
+                flags |= FLAG_TERMINATE;
+            }
+            if (aidl & (1 << static_cast<int>(VolumeShaperOperationFlag::JOIN))) {
+                flags |= FLAG_JOIN;
+            }
+            if (aidl & (1 << static_cast<int>(VolumeShaperOperationFlag::DELAY))) {
+                flags |= FLAG_DELAY;
+            }
+            if (aidl & (1 << static_cast<int>(VolumeShaperOperationFlag::CREATE_IF_NECESSARY))) {
+                flags |= FLAG_CREATE_IF_NECESSARY;
+            }
+            return setFlags(static_cast<Flag>(flags));
+        }
+
+        int32_t getFlagsAsAidl() const {
+            int32_t aidl = 0;
+            std::underlying_type_t<Flag> flags = getFlags();
+            if (flags & FLAG_REVERSE) {
+                aidl |= (1 << static_cast<int>(VolumeShaperOperationFlag::REVERSE));
+            }
+            if (flags & FLAG_TERMINATE) {
+                aidl |= (1 << static_cast<int>(VolumeShaperOperationFlag::TERMINATE));
+            }
+            if (flags & FLAG_JOIN) {
+                aidl |= (1 << static_cast<int>(VolumeShaperOperationFlag::JOIN));
+            }
+            if (flags & FLAG_DELAY) {
+                aidl |= (1 << static_cast<int>(VolumeShaperOperationFlag::DELAY));
+            }
+            if (flags & FLAG_CREATE_IF_NECESSARY) {
+                aidl |= (1 << static_cast<int>(VolumeShaperOperationFlag::CREATE_IF_NECESSARY));
+            }
+            return aidl;
+        }
+
+    private:
         Flag mFlags;        // operation to do
         int32_t mReplaceId; // if >= 0 the id to remove in a replace operation.
         S mXOffset;         // position in the curve to set if a valid number (not nan)
@@ -483,15 +593,28 @@
             mXOffset = xOffset;
         }
 
-        status_t writeToParcel(Parcel *parcel) const override {
+        status_t writeToParcel(Parcel* parcel) const override {
             if (parcel == nullptr) return BAD_VALUE;
-            return parcel->writeFloat(mVolume)
-                    ?: parcel->writeFloat(mXOffset);
+            VolumeShaperState state;
+            writeToParcelable(&state);
+            return state.writeToParcel(parcel);
         }
 
-        status_t readFromParcel(const Parcel *parcel) override {
-            return parcel->readFloat(&mVolume)
-                     ?: parcel->readFloat(&mXOffset);
+        void writeToParcelable(VolumeShaperState* parcelable) const {
+            parcelable->volume = mVolume;
+            parcelable->xOffset = mXOffset;
+        }
+
+        status_t readFromParcel(const Parcel* parcel) override {
+            VolumeShaperState state;
+            return state.readFromParcel(parcel)
+                   ?: readFromParcelable(state);
+        }
+
+        status_t readFromParcelable(const VolumeShaperState& parcelable) {
+            mVolume = parcelable.volume;
+            mXOffset = parcelable.xOffset;
+            return OK;
         }
 
         std::string toString() const {
diff --git a/media/OWNERS b/media/OWNERS
index 1afc253..3e194f0 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -8,13 +8,14 @@
 hunga@google.com
 jiabin@google.com
 jmtrivi@google.com
-krocard@google.com
 lajos@google.com
 marcone@google.com
 mnaganov@google.com
+nchalko@google.com
 pawin@google.com
 philburk@google.com
 pmclean@google.com
+quxiangfang@google.com
 rachad@google.com
 rago@google.com
 robertshih@google.com
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index a6dfb21..50facfb 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -1,32 +1,63 @@
+// for frameworks/av/media
 {
-  "presubmit": [
-    {
-      "name": "GtsMediaTestCases",
-      "options" : [
+    "presubmit": [
+        // runs whenever we change something in this tree
         {
-          "include-annotation": "android.platform.test.annotations.Presubmit"
+            "name": "CtsMediaTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.cts.EncodeDecodeTest"
+                }
+            ]
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
-        }
-      ]
-    },
-    {
-      "name": "GtsExoPlayerTestCases",
-      "options" : [
-        {
-          "include-annotation": "android.platform.test.annotations.SocPresubmit"
+            "name": "CtsMediaTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.cts.DecodeEditEncodeTest"
+                }
+            ]
         },
         {
-          "include-filter": "com.google.android.exoplayer.gts.DashTest#testWidevine23FpsH264Fixed"
+            "name": "GtsMediaTestCases",
+            "options" : [
+                {
+                    "include-annotation": "android.platform.test.annotations.Presubmit"
+                },
+                {
+                    "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+                },
+                {
+                    "include-filter": "com.google.android.media.gts.WidevineYouTubePerformanceTests"
+                }
+            ]
         }
-      ]
-    }
-  ],
-  "imports": [
-    {
-      "path": "frameworks/av/drm/mediadrm/plugins"
-    }
-  ]
-}
+    ],
 
+    "imports": [
+        {
+            "path": "frameworks/av/drm/mediadrm/plugins"
+        }
+    ],
+
+    "platinum-postsubmit": [
+        // runs regularly, independent of changes in this tree.
+        // signals if changes elsewhere break media functionality
+        {
+            "name": "CtsMediaTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.cts.EncodeDecodeTest"
+                }
+            ]
+        },
+        {
+            "name": "CtsMediaTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.cts.DecodeEditEncodeTest"
+                }
+            ]
+        }
+    ]
+}
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
new file mode 100644
index 0000000..ca3c81c
--- /dev/null
+++ b/media/audioserver/Android.bp
@@ -0,0 +1,58 @@
+cc_binary {
+    name: "audioserver",
+
+    srcs: [
+        "main_audioserver.cpp",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+
+    header_libs: [
+        "libaudiohal_headers",
+        "libmediametrics_headers",
+    ],
+
+    shared_libs: [
+        "libaaudioservice",
+        "libaudioflinger",
+        "libaudiopolicyservice",
+        "libaudioprocessing",
+        "libbinder",
+        "libcutils",
+        "libhidlbase",
+        "liblog",
+        "libmedia",
+        "libmedialogservice",
+        "libmediautils",
+        "libnbaio",
+        "libnblog",
+        "libpowermanager",
+        "libutils",
+        "libvibrator",
+
+    ],
+
+    // TODO check if we still need all of these include directories
+    include_dirs: [
+        "external/sonic",
+        "frameworks/av/media/libaaudio/include",
+        "frameworks/av/media/libaaudio/src",
+        "frameworks/av/media/libaaudio/src/binding",
+        "frameworks/av/media/libmedia/include",
+        "frameworks/av/services/audioflinger",
+        "frameworks/av/services/audiopolicy",
+        "frameworks/av/services/audiopolicy/common/include",
+        "frameworks/av/services/audiopolicy/common/managerdefinitions/include",
+        "frameworks/av/services/audiopolicy/engine/interface",
+        "frameworks/av/services/audiopolicy/service",
+        "frameworks/av/services/medialog",
+
+        // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
+        "frameworks/av/services/oboeservice",
+    ],
+
+    init_rc: ["audioserver.rc"],
+}
diff --git a/media/audioserver/Android.mk b/media/audioserver/Android.mk
deleted file mode 100644
index cf1c14c..0000000
--- a/media/audioserver/Android.mk
+++ /dev/null
@@ -1,51 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES := \
-	main_audioserver.cpp \
-
-LOCAL_SHARED_LIBRARIES := \
-	libaaudioservice \
-	libaudioflinger \
-	libaudiopolicyservice \
-	libaudioprocessing \
-	libbinder \
-	libcutils \
-	liblog \
-	libhidlbase \
-	libmedia \
-	libmedialogservice \
-	libmediautils \
-	libnbaio \
-	libnblog \
-	libutils \
-	libvibrator
-
-LOCAL_HEADER_LIBRARIES := \
-	libaudiohal_headers \
-	libmediametrics_headers \
-
-# TODO oboeservice is the old folder name for aaudioservice. It will be changed.
-LOCAL_C_INCLUDES := \
-	frameworks/av/services/audioflinger \
-	frameworks/av/services/audiopolicy \
-	frameworks/av/services/audiopolicy/common/managerdefinitions/include \
-	frameworks/av/services/audiopolicy/common/include \
-	frameworks/av/services/audiopolicy/engine/interface \
-	frameworks/av/services/audiopolicy/service \
-	frameworks/av/services/medialog \
-	frameworks/av/services/oboeservice \
-	frameworks/av/media/libaaudio/include \
-	frameworks/av/media/libaaudio/src \
-	frameworks/av/media/libaaudio/src/binding \
-	frameworks/av/media/libmedia/include \
-	external/sonic \
-
-LOCAL_MODULE := audioserver
-
-LOCAL_INIT_RC := audioserver.rc
-
-LOCAL_CFLAGS := -Werror -Wall
-
-include $(BUILD_EXECUTABLE)
diff --git a/media/audioserver/audioserver.rc b/media/audioserver/audioserver.rc
index f05c2d2..c4a6601 100644
--- a/media/audioserver/audioserver.rc
+++ b/media/audioserver/audioserver.rc
@@ -6,8 +6,12 @@
     capabilities BLOCK_SUSPEND
     ioprio rt 4
     task_profiles ProcessCapacityHigh HighPerformance
-
-    onrestart setprop sys.audio.restart.hal 1
+    onrestart restart vendor.audio-hal
+    onrestart restart vendor.audio-hal-4-0-msd
+    onrestart restart audio_proxy_service
+    # Keep the original service names for backward compatibility
+    onrestart restart vendor.audio-hal-2-0
+    onrestart restart audio-hal-2-0
 
 on property:vts.native_server.on=1
     stop audioserver
@@ -17,6 +21,7 @@
 on property:init.svc.audioserver=stopped
     stop vendor.audio-hal
     stop vendor.audio-hal-4-0-msd
+    stop audio_proxy_service
     # Keep the original service names for backward compatibility
     stop vendor.audio-hal-2-0
     stop audio-hal-2-0
@@ -25,6 +30,7 @@
     # audioserver bringing it back into running state.
     start vendor.audio-hal
     start vendor.audio-hal-4-0-msd
+    start audio_proxy_service
     # Keep the original service names for backward compatibility
     start vendor.audio-hal-2-0
     start audio-hal-2-0
@@ -32,16 +38,24 @@
 on property:init.svc.audioserver=running
     start vendor.audio-hal
     start vendor.audio-hal-4-0-msd
+    start audio_proxy_service
     # Keep the original service names for backward compatibility
     start vendor.audio-hal-2-0
     start audio-hal-2-0
 
 on property:sys.audio.restart.hal=1
-    restart vendor.audio-hal
-    restart vendor.audio-hal-4-0-msd
+    # See b/159966243. Avoid restart loop between audioserver and HAL.
     # Keep the original service names for backward compatibility
-    restart vendor.audio-hal-2-0
-    restart audio-hal-2-0
+    stop vendor.audio-hal
+    stop vendor.audio-hal-4-0-msd
+    stop audio_proxy_service
+    stop vendor.audio-hal-2-0
+    stop audio-hal-2-0
+    start vendor.audio-hal
+    start vendor.audio-hal-4-0-msd
+    start audio_proxy_service
+    start vendor.audio-hal-2-0
+    start audio-hal-2-0
     # reset the property
     setprop sys.audio.restart.hal 0
 
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index f9f4f31..8ee1efb 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -29,8 +29,8 @@
 #include <mediautils/LimitProcessMemory.h>
 #include <utils/Log.h>
 
-// from LOCAL_C_INCLUDES
-#include "aaudio/AAudioTesting.h"
+// from include_dirs
+#include "aaudio/AAudioTesting.h" // aaudio_policy_t, AAUDIO_PROP_MMAP_POLICY, AAUDIO_POLICY_*
 #include "AudioFlinger.h"
 #include "AudioPolicyService.h"
 #include "AAudioService.h"
@@ -49,7 +49,12 @@
 
     signal(SIGPIPE, SIG_IGN);
 
+#if 1
+    // FIXME See bug 165702394 and bug 168511485
+    const bool doLog = false;
+#else
     bool doLog = (bool) property_get_bool("ro.test_harness", 0);
+#endif
 
     pid_t childPid;
     // FIXME The advantage of making the process containing media.log service the parent process of
diff --git a/media/bufferpool/1.0/TEST_MAPPING b/media/bufferpool/1.0/TEST_MAPPING
new file mode 100644
index 0000000..a1e6a58
--- /dev/null
+++ b/media/bufferpool/1.0/TEST_MAPPING
@@ -0,0 +1,8 @@
+// mappings for frameworks/av/media/bufferpool/1.0
+{
+  "presubmit": [
+
+    { "name": "VtsVndkHidlBufferpoolV1_0TargetSingleTest" },
+    { "name": "VtsVndkHidlBufferpoolV1_0TargetMultiTest"}
+  ]
+}
diff --git a/media/bufferpool/1.0/vts/Android.bp b/media/bufferpool/1.0/vts/Android.bp
index ee5a757..691ed40 100644
--- a/media/bufferpool/1.0/vts/Android.bp
+++ b/media/bufferpool/1.0/vts/Android.bp
@@ -16,6 +16,7 @@
 
 cc_test {
     name: "VtsVndkHidlBufferpoolV1_0TargetSingleTest",
+    test_suites: ["device-tests"],
     defaults: ["VtsHalTargetTestDefaults"],
     srcs: [
         "allocator.cpp",
@@ -34,6 +35,7 @@
 
 cc_test {
     name: "VtsVndkHidlBufferpoolV1_0TargetMultiTest",
+    test_suites: ["device-tests"],
     defaults: ["VtsHalTargetTestDefaults"],
     srcs: [
         "allocator.cpp",
diff --git a/media/bufferpool/1.0/vts/multi.cpp b/media/bufferpool/1.0/vts/multi.cpp
index 1796819..d8cc285 100644
--- a/media/bufferpool/1.0/vts/multi.cpp
+++ b/media/bufferpool/1.0/vts/multi.cpp
@@ -215,7 +215,7 @@
 }  // anonymous namespace
 
 int main(int argc, char** argv) {
-  setenv("TREBLE_TESTING_OVERRIDE", "true", true);
+  android::hardware::details::setTrebleTestingOverride(true);
   ::testing::InitGoogleTest(&argc, argv);
   int status = RUN_ALL_TESTS();
   LOG(INFO) << "Test result = " << status;
diff --git a/media/bufferpool/2.0/TEST_MAPPING b/media/bufferpool/2.0/TEST_MAPPING
new file mode 100644
index 0000000..65dee2c
--- /dev/null
+++ b/media/bufferpool/2.0/TEST_MAPPING
@@ -0,0 +1,7 @@
+// mappings for frameworks/av/media/bufferpool/2.0
+{
+  "presubmit": [
+    { "name": "VtsVndkHidlBufferpoolV2_0TargetSingleTest"},
+    { "name": "VtsVndkHidlBufferpoolV2_0TargetMultiTest"}
+  ]
+}
diff --git a/media/bufferpool/2.0/tests/Android.bp b/media/bufferpool/2.0/tests/Android.bp
index 8b44f61..8492939 100644
--- a/media/bufferpool/2.0/tests/Android.bp
+++ b/media/bufferpool/2.0/tests/Android.bp
@@ -16,6 +16,7 @@
 
 cc_test {
     name: "VtsVndkHidlBufferpoolV2_0TargetSingleTest",
+    test_suites: ["device-tests"],
     defaults: ["VtsHalTargetTestDefaults"],
     srcs: [
         "allocator.cpp",
@@ -34,6 +35,7 @@
 
 cc_test {
     name: "VtsVndkHidlBufferpoolV2_0TargetMultiTest",
+    test_suites: ["device-tests"],
     defaults: ["VtsHalTargetTestDefaults"],
     srcs: [
         "allocator.cpp",
diff --git a/media/bufferpool/2.0/tests/multi.cpp b/media/bufferpool/2.0/tests/multi.cpp
index 68b6992..b40838e 100644
--- a/media/bufferpool/2.0/tests/multi.cpp
+++ b/media/bufferpool/2.0/tests/multi.cpp
@@ -215,7 +215,7 @@
 }  // anonymous namespace
 
 int main(int argc, char** argv) {
-  setenv("TREBLE_TESTING_OVERRIDE", "true", true);
+  android::hardware::details::setTrebleTestingOverride(true);
   ::testing::InitGoogleTest(&argc, argv);
   int status = RUN_ALL_TESTS();
   LOG(INFO) << "Test result = " << status;
diff --git a/media/codec2/TEST_MAPPING b/media/codec2/TEST_MAPPING
index 8afa1a8..fca3477 100644
--- a/media/codec2/TEST_MAPPING
+++ b/media/codec2/TEST_MAPPING
@@ -1,5 +1,10 @@
 {
   "presubmit": [
+    // TODO failing 4 of 13
+    // { "name": "codec2_core_param_test"},
+    // TODO(b/155516524)
+    // { "name": "codec2_vndk_interface_test"},
+    { "name": "codec2_vndk_test"},
     {
       "name": "CtsMediaTestCases",
       "options": [
diff --git a/media/codec2/components/aac/DrcPresModeWrap.cpp b/media/codec2/components/aac/DrcPresModeWrap.cpp
index bee969b..7ce5c9d 100644
--- a/media/codec2/components/aac/DrcPresModeWrap.cpp
+++ b/media/codec2/components/aac/DrcPresModeWrap.cpp
@@ -161,7 +161,7 @@
     int newHeavy          = mDesHeavy;
 
     if (mDataUpdate) {
-        // sanity check
+        // Validation check
         if ((mDesTarget < MAX_TARGET_LEVEL) && (mDesTarget != -1)){
             mDesTarget = MAX_TARGET_LEVEL;  // limit target level to -10 dB or below
             newTarget = MAX_TARGET_LEVEL;
@@ -217,7 +217,7 @@
         }
         else { // handle other used encoder target levels
 
-            // Sanity check: DRC presentation mode is only specified for max. 5.1 channels
+            // Validation check: DRC presentation mode is only specified for max. 5.1 channels
             if (mStreamNrAACChan > 6) {
                 drcPresMode = 0;
             }
@@ -308,7 +308,7 @@
             } // switch()
         } // if (mEncoderTarget  == GPM_ENCODER_TARGET_LEVEL)
 
-        // sanity again
+        // Validation check again
         if (newHeavy == 1) {
             newBoostFactor=127; // not really needed as the same would be done by the decoder anyway
             newAttFactor = 127;
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index c7046cb..9ba3b697 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -506,30 +506,28 @@
 }
 
 static void copyOutputBufferToYuvPlanarFrame(
-        uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
+        const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
         size_t srcYStride, size_t srcUStride, size_t srcVStride,
         size_t dstYStride, size_t dstUVStride,
         uint32_t width, uint32_t height) {
-    uint8_t* dstStart = dst;
 
     for (size_t i = 0; i < height; ++i) {
-        memcpy(dst, srcY, width);
+        memcpy(dstY, srcY, width);
         srcY += srcYStride;
-        dst += dstYStride;
+        dstY += dstYStride;
     }
 
-    dst = dstStart + dstYStride * height;
     for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dst, srcV, width / 2);
+        memcpy(dstV, srcV, width / 2);
         srcV += srcVStride;
-        dst += dstUVStride;
+        dstV += dstUVStride;
     }
 
-    dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
     for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dst, srcU, width / 2);
+        memcpy(dstU, srcU, width / 2);
         srcU += srcUStride;
-        dst += dstUVStride;
+        dstU += dstUVStride;
     }
 }
 
@@ -596,16 +594,12 @@
     return;
 }
 
-static void convertYUV420Planar16ToYUV420Planar(uint8_t *dst,
+static void convertYUV420Planar16ToYUV420Planar(
+        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
         const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
         size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstYStride, size_t dstUVStride, size_t width, size_t height) {
-
-    uint8_t *dstY = (uint8_t *)dst;
-    size_t dstYSize = dstYStride * height;
-    size_t dstUVSize = dstUVStride * height / 2;
-    uint8_t *dstV = dstY + dstYSize;
-    uint8_t *dstU = dstV + dstUVSize;
+        size_t dstYStride, size_t dstUVStride,
+        size_t width, size_t height) {
 
     for (size_t y = 0; y < height; ++y) {
         for (size_t x = 0; x < width; ++x) {
@@ -696,7 +690,9 @@
           block->width(), block->height(), mWidth, mHeight,
           (int)*(int64_t*)img->user_priv);
 
-    uint8_t* dst = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
+    uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
     size_t srcYStride = img->stride[AOM_PLANE_Y];
     size_t srcUStride = img->stride[AOM_PLANE_U];
     size_t srcVStride = img->stride[AOM_PLANE_V];
@@ -710,13 +706,14 @@
         const uint16_t *srcV = (const uint16_t *)img->planes[AOM_PLANE_V];
 
         if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
-            convertYUV420Planar16ToY410((uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
+            convertYUV420Planar16ToY410((uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
                                     srcUStride / 2, srcVStride / 2,
                                     dstYStride / sizeof(uint32_t),
                                     mWidth, mHeight);
         } else {
-            convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
-                                    srcUStride / 2, srcVStride / 2,
+            convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
+                                    srcY, srcU, srcV,
+                                    srcYStride / 2, srcUStride / 2, srcVStride / 2,
                                     dstYStride, dstUVStride,
                                     mWidth, mHeight);
         }
@@ -725,7 +722,7 @@
         const uint8_t *srcU = (const uint8_t *)img->planes[AOM_PLANE_U];
         const uint8_t *srcV = (const uint8_t *)img->planes[AOM_PLANE_V];
         copyOutputBufferToYuvPlanarFrame(
-                dst, srcY, srcU, srcV,
+                dstY, dstU, dstV, srcY, srcU, srcV,
                 srcYStride, srcUStride, srcVStride,
                 dstYStride, dstUVStride,
                 mWidth, mHeight);
diff --git a/media/codec2/components/cmds/codec2.cpp b/media/codec2/components/cmds/codec2.cpp
index d6025de..a17b04e 100644
--- a/media/codec2/components/cmds/codec2.cpp
+++ b/media/codec2/components/cmds/codec2.cpp
@@ -138,7 +138,7 @@
 
 SimplePlayer::SimplePlayer()
     : mListener(new Listener(this)),
-      mProducerListener(new DummyProducerListener),
+      mProducerListener(new StubProducerListener),
       mLinearPoolId(C2BlockPool::PLATFORM_START),
       mComposerClient(new SurfaceComposerClient) {
     CHECK_EQ(mComposerClient->initCheck(), (status_t)OK);
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 408db7e..72910c5 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -262,9 +262,10 @@
         work->result = C2_NO_MEMORY;
         return;
     }
-    C2WriteView wView = mOutputBlock->map().get();
-    if (wView.error()) {
-        ALOGE("write view map failed %d", wView.error());
+
+    err = mOutputBlock->map().get().error();
+    if (err) {
+        ALOGE("write view map failed %d", err);
         work->result = C2_CORRUPTED;
         return;
     }
diff --git a/media/codec2/components/g711/Android.bp b/media/codec2/components/g711/Android.bp
index 3ede68c..0101b1a 100644
--- a/media/codec2/components/g711/Android.bp
+++ b/media/codec2/components/g711/Android.bp
@@ -7,6 +7,8 @@
 
     srcs: ["C2SoftG711Dec.cpp"],
 
+    static_libs: ["codecs_g711dec"],
+
     cflags: [
         "-DALAW",
     ],
@@ -20,4 +22,6 @@
     ],
 
     srcs: ["C2SoftG711Dec.cpp"],
+
+    static_libs: ["codecs_g711dec"],
 }
diff --git a/media/codec2/components/g711/C2SoftG711Dec.cpp b/media/codec2/components/g711/C2SoftG711Dec.cpp
index 4ff0793..7f9c34e 100644
--- a/media/codec2/components/g711/C2SoftG711Dec.cpp
+++ b/media/codec2/components/g711/C2SoftG711Dec.cpp
@@ -22,7 +22,7 @@
 
 #include <C2PlatformSupport.h>
 #include <SimpleC2Interface.h>
-
+#include <g711Dec.h>
 #include "C2SoftG711Dec.h"
 
 namespace android {
@@ -224,53 +224,6 @@
     return C2_OK;
 }
 
-#ifdef ALAW
-void C2SoftG711Dec::DecodeALaw(
-        int16_t *out, const uint8_t *in, size_t inSize) {
-    while (inSize > 0) {
-        inSize--;
-        int32_t x = *in++;
-
-        int32_t ix = x ^ 0x55;
-        ix &= 0x7f;
-
-        int32_t iexp = ix >> 4;
-        int32_t mant = ix & 0x0f;
-
-        if (iexp > 0) {
-            mant += 16;
-        }
-
-        mant = (mant << 4) + 8;
-
-        if (iexp > 1) {
-            mant = mant << (iexp - 1);
-        }
-
-        *out++ = (x > 127) ? mant : -mant;
-    }
-}
-#else
-void C2SoftG711Dec::DecodeMLaw(
-        int16_t *out, const uint8_t *in, size_t inSize) {
-    while (inSize > 0) {
-        inSize--;
-        int32_t x = *in++;
-
-        int32_t mantissa = ~x;
-        int32_t exponent = (mantissa >> 4) & 7;
-        int32_t segment = exponent + 1;
-        mantissa &= 0x0f;
-
-        int32_t step = 4 << segment;
-
-        int32_t abs = (0x80l << exponent) + step * mantissa + step / 2 - 4 * 33;
-
-        *out++ = (x < 0x80) ? -abs : abs;
-    }
-}
-#endif
-
 class C2SoftG711DecFactory : public C2ComponentFactory {
 public:
     C2SoftG711DecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
diff --git a/media/codec2/components/g711/C2SoftG711Dec.h b/media/codec2/components/g711/C2SoftG711Dec.h
index 23e8ffc..f93840b 100644
--- a/media/codec2/components/g711/C2SoftG711Dec.h
+++ b/media/codec2/components/g711/C2SoftG711Dec.h
@@ -45,12 +45,6 @@
     std::shared_ptr<IntfImpl> mIntf;
     bool mSignalledOutputEos;
 
-#ifdef ALAW
-    void DecodeALaw(int16_t *out, const uint8_t *in, size_t inSize);
-#else
-    void DecodeMLaw(int16_t *out, const uint8_t *in, size_t inSize);
-#endif
-
     C2_DO_NOT_COPY(C2SoftG711Dec);
 };
 
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 120ba7a..a1929e7 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -458,32 +458,28 @@
   }
 }
 
-static void copyOutputBufferToYuvPlanarFrame(uint8_t *dst, const uint8_t *srcY,
-                                             const uint8_t *srcU,
-                                             const uint8_t *srcV, size_t srcYStride,
-                                             size_t srcUStride, size_t srcVStride,
-                                             size_t dstYStride, size_t dstUVStride,
-                                             uint32_t width, uint32_t height) {
-  uint8_t *const dstStart = dst;
+static void copyOutputBufferToYV12Frame(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
+                                        const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+                                        size_t srcYStride, size_t srcUStride, size_t srcVStride,
+                                        size_t dstYStride, size_t dstUVStride,
+                                        uint32_t width, uint32_t height) {
 
   for (size_t i = 0; i < height; ++i) {
-    memcpy(dst, srcY, width);
+    memcpy(dstY, srcY, width);
     srcY += srcYStride;
-    dst += dstYStride;
+    dstY += dstYStride;
   }
 
-  dst = dstStart + dstYStride * height;
   for (size_t i = 0; i < height / 2; ++i) {
-    memcpy(dst, srcV, width / 2);
+    memcpy(dstV, srcV, width / 2);
     srcV += srcVStride;
-    dst += dstUVStride;
+    dstV += dstUVStride;
   }
 
-  dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
   for (size_t i = 0; i < height / 2; ++i) {
-    memcpy(dst, srcU, width / 2);
+    memcpy(dstU, srcU, width / 2);
     srcU += srcUStride;
-    dst += dstUVStride;
+    dstU += dstUVStride;
   }
 }
 
@@ -555,15 +551,11 @@
 }
 
 static void convertYUV420Planar16ToYUV420Planar(
-    uint8_t *dst, const uint16_t *srcY, const uint16_t *srcU,
-    const uint16_t *srcV, size_t srcYStride, size_t srcUStride,
-    size_t srcVStride, size_t dstYStride, size_t dstUVStride,
+    uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
+    const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
+    size_t srcYStride, size_t srcUStride, size_t srcVStride,
+    size_t dstYStride, size_t dstUVStride,
     size_t width, size_t height) {
-  uint8_t *dstY = (uint8_t *)dst;
-  size_t dstYSize = dstYStride * height;
-  size_t dstUVSize = dstUVStride * height / 2;
-  uint8_t *dstV = dstY + dstYSize;
-  uint8_t *dstU = dstV + dstUVSize;
 
   for (size_t y = 0; y < height; ++y) {
     for (size_t x = 0; x < width; ++x) {
@@ -667,10 +659,13 @@
   ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
         block->height(), mWidth, mHeight, (int)buffer->user_private_data);
 
-  uint8_t *dst = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
+  uint8_t *dstY = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
+  uint8_t *dstU = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_U]);
+  uint8_t *dstV = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_V]);
   size_t srcYStride = buffer->stride[0];
   size_t srcUStride = buffer->stride[1];
   size_t srcVStride = buffer->stride[2];
+
   C2PlanarLayout layout = wView.layout();
   size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
   size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
@@ -682,20 +677,24 @@
 
     if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
       convertYUV420Planar16ToY410(
-          (uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
+          (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
           srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight);
     } else {
-      convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
-                                          srcUStride / 2, srcVStride / 2,
-                                          dstYStride, dstUVStride, mWidth, mHeight);
+      convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
+                                          srcY, srcU, srcV,
+                                          srcYStride / 2, srcUStride / 2, srcVStride / 2,
+                                          dstYStride, dstUVStride,
+                                          mWidth, mHeight);
     }
   } else {
     const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
     const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
     const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
-    copyOutputBufferToYuvPlanarFrame(dst, srcY, srcU, srcV, srcYStride, srcUStride,
-                                     srcVStride, dstYStride, dstUVStride,
-                                     mWidth, mHeight);
+    copyOutputBufferToYV12Frame(dstY, dstU, dstV,
+                                srcY, srcU, srcV,
+                                srcYStride, srcUStride, srcVStride,
+                                dstYStride, dstUVStride,
+                                mWidth, mHeight);
   }
   finishWork(buffer->user_private_data, work, std::move(block));
   block = nullptr;
diff --git a/media/codec2/components/gsm/C2SoftGsmDec.h b/media/codec2/components/gsm/C2SoftGsmDec.h
index 2b209fe..edd273b 100644
--- a/media/codec2/components/gsm/C2SoftGsmDec.h
+++ b/media/codec2/components/gsm/C2SoftGsmDec.h
@@ -19,10 +19,7 @@
 
 #include <SimpleC2Component.h>
 
-
-extern "C" {
-    #include "gsm.h"
-}
+#include "gsm.h"
 
 namespace android {
 
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 61b286c..13cc0ec 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -464,34 +464,34 @@
 /* TODO: can remove temporary copy after library supports writing to display
  * buffer Y, U and V plane pointers using stride info. */
 static void copyOutputBufferToYuvPlanarFrame(
-        uint8_t *dst, uint8_t *src,
+        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, uint8_t *src,
         size_t dstYStride, size_t dstUVStride,
         size_t srcYStride, uint32_t width,
         uint32_t height) {
     size_t srcUVStride = srcYStride / 2;
     uint8_t *srcStart = src;
-    uint8_t *dstStart = dst;
+
     size_t vStride = align(height, 16);
     for (size_t i = 0; i < height; ++i) {
-         memcpy(dst, src, width);
+         memcpy(dstY, src, width);
          src += srcYStride;
-         dst += dstYStride;
+         dstY += dstYStride;
     }
+
     /* U buffer */
     src = srcStart + vStride * srcYStride;
-    dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
     for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dst, src, width / 2);
+         memcpy(dstU, src, width / 2);
          src += srcUVStride;
-         dst += dstUVStride;
+         dstU += dstUVStride;
     }
+
     /* V buffer */
     src = srcStart + vStride * srcYStride * 5 / 4;
-    dst = dstStart + (dstYStride * height);
     for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dst, src, width / 2);
+         memcpy(dstV, src, width / 2);
          src += srcUVStride;
-         dst += dstUVStride;
+         dstV += dstUVStride;
     }
 }
 
@@ -672,11 +672,14 @@
         }
 
         uint8_t *outputBufferY = wView.data()[C2PlanarLayout::PLANE_Y];
+        uint8_t *outputBufferU = wView.data()[C2PlanarLayout::PLANE_U];
+        uint8_t *outputBufferV = wView.data()[C2PlanarLayout::PLANE_V];
+
         C2PlanarLayout layout = wView.layout();
         size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
         size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
         (void)copyOutputBufferToYuvPlanarFrame(
-                outputBufferY,
+                outputBufferY, outputBufferU, outputBufferV,
                 mOutputBuffer[mNumSamplesOutput & 1],
                 dstYStride, dstUVStride,
                 align(mWidth, 16), mWidth, mHeight);
diff --git a/media/codec2/components/vorbis/C2SoftVorbisDec.cpp b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
index a8b5377..d3b6e31 100644
--- a/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
+++ b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
@@ -359,6 +359,10 @@
     }
     memcpy(&numPageFrames, data + inSize - sizeof(numPageFrames), sizeof(numPageFrames));
     inSize -= sizeof(numPageFrames);
+    if (inSize == 0) {
+        // empty buffer, ignore
+        return;
+    }
     if (numPageFrames >= 0) {
         mNumFramesLeftOnPage = numPageFrames;
     }
@@ -409,7 +413,7 @@
                 mState,  reinterpret_cast<int16_t *> (wView.data()),
                 kMaxNumSamplesPerChannel);
         if (numFrames < 0) {
-            ALOGD("vorbis_dsp_pcmout returned %d", numFrames);
+            ALOGD("vorbis_dsp_pcmout returned %d frames", numFrames);
             numFrames = 0;
         }
     }
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 3eef1e3..91238e8 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -631,31 +631,30 @@
 }
 
 static void copyOutputBufferToYuvPlanarFrame(
-        uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
+        const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
         size_t srcYStride, size_t srcUStride, size_t srcVStride,
         size_t dstYStride, size_t dstUVStride,
         uint32_t width, uint32_t height) {
-    uint8_t *dstStart = dst;
 
     for (size_t i = 0; i < height; ++i) {
-         memcpy(dst, srcY, width);
+         memcpy(dstY, srcY, width);
          srcY += srcYStride;
-         dst += dstYStride;
+         dstY += dstYStride;
     }
 
-    dst = dstStart + dstYStride * height;
     for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dst, srcV, width / 2);
+         memcpy(dstV, srcV, width / 2);
          srcV += srcVStride;
-         dst += dstUVStride;
+         dstV += dstUVStride;
     }
 
-    dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
     for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dst, srcU, width / 2);
+         memcpy(dstU, srcU, width / 2);
          srcU += srcUStride;
-         dst += dstUVStride;
+         dstU += dstUVStride;
     }
+
 }
 
 static void convertYUV420Planar16ToY410(uint32_t *dst,
@@ -721,16 +720,12 @@
     return;
 }
 
-static void convertYUV420Planar16ToYUV420Planar(uint8_t *dst,
+static void convertYUV420Planar16ToYUV420Planar(
+        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
         const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
         size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstYStride, size_t dstUVStride, size_t width, size_t height) {
-
-    uint8_t *dstY = (uint8_t *)dst;
-    size_t dstYSize = dstYStride * height;
-    size_t dstUVSize = dstUVStride * height / 2;
-    uint8_t *dstV = dstY + dstYSize;
-    uint8_t *dstU = dstV + dstUVSize;
+        size_t dstYStride, size_t dstUVStride,
+        size_t width, size_t height) {
 
     for (size_t y = 0; y < height; ++y) {
         for (size_t x = 0; x < width; ++x) {
@@ -823,7 +818,10 @@
            block->width(), block->height(), mWidth, mHeight,
            ((c2_cntr64_t *)img->user_priv)->peekll());
 
-    uint8_t *dst = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t *dstY = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t *dstU = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_U]);
+    uint8_t *dstV = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_V]);
+
     size_t srcYStride = img->stride[VPX_PLANE_Y];
     size_t srcUStride = img->stride[VPX_PLANE_U];
     size_t srcVStride = img->stride[VPX_PLANE_V];
@@ -842,18 +840,18 @@
             constexpr size_t kHeight = 64;
             for (; i < mHeight; i += kHeight) {
                 queue->entries.push_back(
-                        [dst, srcY, srcU, srcV,
+                        [dstY, srcY, srcU, srcV,
                          srcYStride, srcUStride, srcVStride, dstYStride,
                          width = mWidth, height = std::min(mHeight - i, kHeight)] {
                             convertYUV420Planar16ToY410(
-                                    (uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
+                                    (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
                                     srcUStride / 2, srcVStride / 2, dstYStride / sizeof(uint32_t),
                                     width, height);
                         });
                 srcY += srcYStride / 2 * kHeight;
                 srcU += srcUStride / 2 * (kHeight / 2);
                 srcV += srcVStride / 2 * (kHeight / 2);
-                dst += dstYStride * kHeight;
+                dstY += dstYStride * kHeight;
             }
             CHECK_EQ(0u, queue->numPending);
             queue->numPending = queue->entries.size();
@@ -862,8 +860,9 @@
                 queue.waitForCondition(queue->cond);
             }
         } else {
-            convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
-                                                srcUStride / 2, srcVStride / 2,
+            convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
+                                                srcY, srcU, srcV,
+                                                srcYStride / 2, srcUStride / 2, srcVStride / 2,
                                                 dstYStride, dstUVStride,
                                                 mWidth, mHeight);
         }
@@ -871,8 +870,10 @@
         const uint8_t *srcY = (const uint8_t *)img->planes[VPX_PLANE_Y];
         const uint8_t *srcU = (const uint8_t *)img->planes[VPX_PLANE_U];
         const uint8_t *srcV = (const uint8_t *)img->planes[VPX_PLANE_V];
+
         copyOutputBufferToYuvPlanarFrame(
-                dst, srcY, srcU, srcV,
+                dstY, dstU, dstV,
+                srcY, srcU, srcV,
                 srcYStride, srcUStride, srcVStride,
                 dstYStride, dstUVStride,
                 mWidth, mHeight);
diff --git a/media/codec2/core/Android.bp b/media/codec2/core/Android.bp
index ce1c9ac..33fafa7 100644
--- a/media/codec2/core/Android.bp
+++ b/media/codec2/core/Android.bp
@@ -21,6 +21,10 @@
         "-Werror",
     ],
 
+    header_abi_checker: {
+        check_all_apis: true,
+    },
+
     header_libs: [
         "libcodec2_headers",
         "libhardware_headers",
diff --git a/media/codec2/core/include/C2Buffer.h b/media/codec2/core/include/C2Buffer.h
index 3d3587c..fe37b05 100644
--- a/media/codec2/core/include/C2Buffer.h
+++ b/media/codec2/core/include/C2Buffer.h
@@ -734,6 +734,22 @@
     }
 
     virtual ~C2Allocator() = default;
+
+    /**
+     * Returns a true if the handle looks valid for this allocator.
+     *
+     * It does not actually validate that the handle represents a valid allocation (by this
+     * allocator), only that the handle could have been returned by this allocator. As such,
+     * multiple allocators may return true for looksValid for the same handle.
+     *
+     * This method MUST be "non-blocking", MUST not access kernel and/or device drivers, and
+     * return within 1us.
+     *
+     * \param handle      the handle for an existing allocation (possibly from another
+     *                    allocator)
+     */
+    virtual bool checkHandle(const C2Handle *const handle) const = 0;
+
 protected:
     C2Allocator() = default;
 };
@@ -2156,9 +2172,12 @@
 };
 
 /**
- * An extension of C2Info objects that can contain arbitrary buffer data.
+ * A const metadata object that can contain arbitrary buffer data.
  *
- * \note This object is not describable and contains opaque data.
+ * This object is not an actual C2Info and is not attached to buffers (C2Buffer), but rather to
+ * frames (C2FrameData). It is not describable via C2ParamDescriptor.
+ *
+ * C2InfoBuffer is a const object that can be allocated on stack and is copiable.
  */
 class C2InfoBuffer {
 public:
@@ -2167,14 +2186,65 @@
      *
      * \return the parameter index.
      */
-    const C2Param::Index index() const;
+    const C2Param::Index index() const { return mIndex; }
 
     /**
      * Gets the buffer's data.
      *
      * \return the buffer's data.
      */
-    const C2BufferData data() const;
+    const C2BufferData data() const { return mData; }
+
+    /// Returns a clone of this as a global info buffer.
+    C2InfoBuffer asGlobal() const {
+        C2Param::Index index = mIndex;
+        index.convertToGlobal();
+        return C2InfoBuffer(index, mData);
+    }
+
+    /// Returns a clone of this as a port info buffer.
+    C2InfoBuffer asPort(bool output) const {
+        C2Param::Index index = mIndex;
+        index.convertToPort(output);
+        return C2InfoBuffer(index, mData);
+    }
+
+    /// Returns a clone of this as a stream info buffer.
+    C2InfoBuffer asStream(bool output, unsigned stream) const {
+        C2Param::Index index = mIndex;
+        index.convertToStream(output, stream);
+        return C2InfoBuffer(index, mData);
+    }
+
+    /**
+     * Creates a global info buffer containing a single linear block.
+     *
+     * \param index the core parameter index of this info buffer.
+     * \param block the content of the info buffer.
+     *
+     * \return shared pointer to the created info buffer.
+     */
+    static C2InfoBuffer CreateLinearBuffer(C2Param::CoreIndex index, const C2ConstLinearBlock &block);
+
+    /**
+     * Creates a global info buffer containing a single graphic block.
+     *
+     * \param index the core parameter index of this info buffer.
+     * \param block the content of the info buffer.
+     *
+     * \return shared pointer to the created info buffer.
+     */
+    static C2InfoBuffer CreateGraphicBuffer(C2Param::CoreIndex index, const C2ConstGraphicBlock &block);
+
+protected:
+    // no public constructor
+    explicit C2InfoBuffer(C2Param::Index index, const std::vector<C2ConstLinearBlock> &blocks);
+    explicit C2InfoBuffer(C2Param::Index index, const std::vector<C2ConstGraphicBlock> &blocks);
+
+private:
+    C2Param::Index mIndex;
+    C2BufferData mData;
+    explicit C2InfoBuffer(C2Param::Index index, const C2BufferData &data);
 };
 
 /// @}
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 29bccd5..38f7389 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -249,6 +249,13 @@
 
     // low latency mode
     kParamIndexLowLatencyMode, // bool
+
+    // tunneled codec
+    kParamIndexTunneledMode, // struct
+    kParamIndexTunnelHandle, // int32[]
+    kParamIndexTunnelSystemTime, // int64
+
+    kParamIndexStoreDmaBufUsage,  // store, struct
 };
 
 }
@@ -2036,6 +2043,33 @@
         C2StoreIonUsageInfo;
 
 /**
+ * This structure describes the preferred DMA-Buf allocation parameters for a given memory usage.
+ */
+struct C2StoreDmaBufUsageStruct {
+    inline C2StoreDmaBufUsageStruct() { memset(this, 0, sizeof(*this)); }
+
+    inline C2StoreDmaBufUsageStruct(size_t flexCount, uint64_t usage_, uint32_t capacity_)
+        : usage(usage_), capacity(capacity_), allocFlags(0) {
+        memset(heapName, 0, flexCount);
+    }
+
+    uint64_t usage;                         ///< C2MemoryUsage
+    uint32_t capacity;                      ///< capacity
+    int32_t allocFlags;                     ///< ion allocation flags
+    char heapName[];                        ///< dmabuf heap name
+
+    DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(StoreDmaBufUsage, heapName)
+    C2FIELD(usage, "usage")
+    C2FIELD(capacity, "capacity")
+    C2FIELD(allocFlags, "alloc-flags")
+    C2FIELD(heapName, "heap-name")
+};
+
+// store, private
+typedef C2GlobalParam<C2Info, C2StoreDmaBufUsageStruct, kParamIndexStoreDmaBufUsage>
+        C2StoreDmaBufUsageInfo;
+
+/**
  * Flexible pixel format descriptors
  */
 struct C2FlexiblePixelFormatDescriptorStruct {
@@ -2182,6 +2216,79 @@
 typedef C2PortParam<C2Tuning, C2TimestampGapAdjustmentStruct> C2PortTimestampGapTuning;
 constexpr char C2_PARAMKEY_INPUT_SURFACE_TIMESTAMP_ADJUSTMENT[] = "input-surface.timestamp-adjustment";
 
+/* ===================================== TUNNELED CODEC ==================================== */
+
+/**
+ * Tunneled codec control.
+ */
+struct C2TunneledModeStruct {
+    /// mode
+    enum mode_t : uint32_t;
+    /// sync type
+    enum sync_type_t : uint32_t;
+
+    inline C2TunneledModeStruct() = default;
+
+    inline C2TunneledModeStruct(
+            size_t flexCount, mode_t mode_, sync_type_t type, std::vector<int32_t> id)
+        : mode(mode_), syncType(type) {
+        memcpy(&syncId, &id[0], c2_min(id.size(), flexCount) * FLEX_SIZE);
+    }
+
+    inline C2TunneledModeStruct(size_t flexCount, mode_t mode_, sync_type_t type, int32_t id)
+        : mode(mode_), syncType(type) {
+        if (flexCount >= 1) {
+            syncId[0] = id;
+        }
+    }
+
+    mode_t mode;          ///< tunneled mode
+    sync_type_t syncType; ///< type of sync used for tunneled mode
+    int32_t syncId[];     ///< sync id
+
+    DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(TunneledMode, syncId)
+    C2FIELD(mode, "mode")
+    C2FIELD(syncType, "sync-type")
+    C2FIELD(syncId, "sync-id")
+
+};
+
+C2ENUM(C2TunneledModeStruct::mode_t, uint32_t,
+    NONE,
+    SIDEBAND,
+);
+
+
+C2ENUM(C2TunneledModeStruct::sync_type_t, uint32_t,
+    REALTIME,
+    AUDIO_HW_SYNC,
+    HW_AV_SYNC,
+);
+
+/**
+ * Configure tunneled mode
+ */
+typedef C2PortParam<C2Tuning, C2TunneledModeStruct, kParamIndexTunneledMode>
+        C2PortTunneledModeTuning;
+constexpr char C2_PARAMKEY_TUNNELED_RENDER[] = "output.tunneled-render";
+
+/**
+ * Tunneled mode handle. The meaning of this is depends on the
+ * tunneled mode. If the tunneled mode is SIDEBAND, this is the
+ * sideband handle.
+ */
+typedef C2PortParam<C2Tuning, C2Int32Array, kParamIndexTunnelHandle> C2PortTunnelHandleTuning;
+constexpr char C2_PARAMKEY_OUTPUT_TUNNEL_HANDLE[] = "output.tunnel-handle";
+
+/**
+ * The system time using CLOCK_MONOTONIC in nanoseconds at the tunnel endpoint.
+ * For decoders this is the render time for the output frame and
+ * this corresponds to the media timestamp of the output frame.
+ */
+typedef C2PortParam<C2Info, C2SimpleValueStruct<int64_t>, kParamIndexTunnelSystemTime>
+        C2PortTunnelSystemTime;
+constexpr char C2_PARAMKEY_OUTPUT_RENDER_TIME[] = "output.render-time";
+
 /// @}
 
 #endif  // C2CONFIG_H_
diff --git a/media/codec2/core/include/C2Enum.h b/media/codec2/core/include/C2Enum.h
index b0fad8f..da1f43b 100644
--- a/media/codec2/core/include/C2Enum.h
+++ b/media/codec2/core/include/C2Enum.h
@@ -54,7 +54,7 @@
 /// \note this will contain any initialization, which we will remove when converting to lower-case
 #define _C2_GET_ENUM_NAME(x, y) #x
 /// mapper to get value of enum
-#define _C2_GET_ENUM_VALUE(x, type) (_C2EnumConst<type>)x
+#define _C2_GET_ENUM_VALUE(x, type_) (_C2EnumConst<typename std::underlying_type<type_>::type>)type_::x
 
 /// \endcond
 
@@ -106,7 +106,7 @@
 template<> \
 C2FieldDescriptor::NamedValuesType C2FieldDescriptor::namedValuesFor(const name &r __unused) { \
     return _C2EnumUtils::sanitizeEnumValues( \
-            std::vector<C2Value::Primitive> { _C2_MAP(_C2_GET_ENUM_VALUE, type, __VA_ARGS__) }, \
+            std::vector<C2Value::Primitive> { _C2_MAP(_C2_GET_ENUM_VALUE, name, __VA_ARGS__) }, \
             { _C2_MAP(_C2_GET_ENUM_NAME, type, __VA_ARGS__) }, \
             prefix); \
 }
diff --git a/media/codec2/core/include/C2Param.h b/media/codec2/core/include/C2Param.h
index 51d417a..e938f96 100644
--- a/media/codec2/core/include/C2Param.h
+++ b/media/codec2/core/include/C2Param.h
@@ -317,7 +317,8 @@
         DEFINE_FIELD_BASED_COMPARISON_OPERATORS(Index, mIndex)
 
     private:
-        friend struct C2Param;           // for setStream, MakeStreamId, isValid
+        friend class C2InfoBuffer;       // for convertTo*
+        friend struct C2Param;           // for setStream, MakeStreamId, isValid, convertTo*
         friend struct _C2ParamInspector; // for testing
 
         /**
@@ -508,6 +509,14 @@
         return _mIndex.setPort(output);
     }
 
+    /// sets the size of this parameter.
+    inline void setSize(size_t size) {
+        if (size < sizeof(C2Param)) {
+            size = 0;
+        }
+        _mSize = c2_min(size, _mSize);
+    }
+
 public:
     /// invalidate this parameter. There is no recovery from this call; e.g. parameter
     /// cannot be 'corrected' to be valid.
diff --git a/media/codec2/core/include/C2ParamDef.h b/media/codec2/core/include/C2ParamDef.h
index 0a33283..d578820 100644
--- a/media/codec2/core/include/C2ParamDef.h
+++ b/media/codec2/core/include/C2ParamDef.h
@@ -97,6 +97,9 @@
         PARAM_TYPE = CoreIndex | TypeFlags
     };
 
+    // the underlying param struct type
+    typedef S Struct;
+
 protected:
     enum : uint32_t {
         FLEX_SIZE = 0,
@@ -270,6 +273,11 @@
         } \
         return 0; \
     } \
+    inline void setFlexCount(size_t count) { \
+        if (count < flexCount()) { \
+            this->setSize(sizeof(_Type) + _Type::FLEX_SIZE * count); \
+        } \
+    } \
 
 /// Mark flexible member variable and make structure flexible.
 #define FLEX(cls, m) \
diff --git a/media/codec2/core/include/C2Work.h b/media/codec2/core/include/C2Work.h
index 6923f3e..67084cc 100644
--- a/media/codec2/core/include/C2Work.h
+++ b/media/codec2/core/include/C2Work.h
@@ -161,7 +161,7 @@
     //< for initial work item, these may also come from the parser - if provided
     //< for output buffers, these are the responses to requestedInfos
     std::vector<std::unique_ptr<C2Param>>      configUpdate;
-    std::vector<std::shared_ptr<C2InfoBuffer>> infoBuffers;
+    std::vector<C2InfoBuffer> infoBuffers;
 };
 
 struct C2Worklet {
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hidl/1.0/utils/types.cpp
index c73cb52..1f0c856 100644
--- a/media/codec2/hidl/1.0/utils/types.cpp
+++ b/media/codec2/hidl/1.0/utils/types.cpp
@@ -943,14 +943,9 @@
 
     d->infoBuffers.resize(s.infoBuffers.size());
     i = 0;
-    for (const std::shared_ptr<C2InfoBuffer>& sInfoBuffer : s.infoBuffers) {
+    for (const C2InfoBuffer& sInfoBuffer : s.infoBuffers) {
         InfoBuffer& dInfoBuffer = d->infoBuffers[i++];
-        if (!sInfoBuffer) {
-            LOG(ERROR) << "Null C2FrameData::infoBuffers["
-                       << i - 1 << "].";
-            return false;
-        }
-        if (!objcpy(&dInfoBuffer, *sInfoBuffer,
+        if (!objcpy(&dInfoBuffer, sInfoBuffer,
                 bufferPoolSender, baseBlocks, baseBlockIndices)) {
             LOG(ERROR) << "Invalid C2FrameData::infoBuffers["
                        << i - 1 << "].";
diff --git a/media/codec2/hidl/1.1/utils/Android.bp b/media/codec2/hidl/1.1/utils/Android.bp
index 386f6e2..ab8635b 100644
--- a/media/codec2/hidl/1.1/utils/Android.bp
+++ b/media/codec2/hidl/1.1/utils/Android.bp
@@ -44,6 +44,12 @@
         "libstagefright_bufferpool@2.0.1",
         "libui",
     ],
+
+    // Device does not boot when global ThinLTO is enabled for this library.
+    // http://b/170595429
+    lto: {
+        never: true,
+    },
 }
 
 
diff --git a/media/codec2/hidl/services/Android.bp b/media/codec2/hidl/services/Android.bp
index a16b106..3780a5a 100644
--- a/media/codec2/hidl/services/Android.bp
+++ b/media/codec2/hidl/services/Android.bp
@@ -52,6 +52,9 @@
     // directly in the main device manifest.xml file or via vintf_fragments.
     // (Remove the line below if the entry is already in the main manifest.)
     vintf_fragments: ["manifest_media_c2_V1_1_default.xml"],
+
+    // Remove this line to enable this module.
+    enabled: false,
 }
 
 // seccomp policy file.
diff --git a/media/codec2/hidl/services/vendor.cpp b/media/codec2/hidl/services/vendor.cpp
index 81bffeb..3ddb039 100644
--- a/media/codec2/hidl/services/vendor.cpp
+++ b/media/codec2/hidl/services/vendor.cpp
@@ -122,6 +122,18 @@
                 })
                 .withSetter(SetIonUsage)
                 .build());
+
+            addParameter(
+                DefineParam(mDmaBufUsageInfo, "dmabuf-usage")
+                .withDefault(new C2StoreDmaBufUsageInfo())
+                .withFields({
+                    C2F(mDmaBufUsageInfo, usage).flags({C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE}),
+                    C2F(mDmaBufUsageInfo, capacity).inRange(0, UINT32_MAX, 1024),
+                    C2F(mDmaBufUsageInfo, heapName).any(),
+                    C2F(mDmaBufUsageInfo, allocFlags).flags({}),
+                })
+                .withSetter(SetDmaBufUsage)
+                .build());
         }
 
         virtual ~Interface() = default;
@@ -135,7 +147,16 @@
             return C2R::Ok();
         }
 
+        static C2R SetDmaBufUsage(bool /* mayBlock */, C2P<C2StoreDmaBufUsageInfo> &me) {
+            // Vendor's TODO: put appropriate mapping logic
+            strncpy(me.set().m.heapName, "system", me.v.flexCount());
+            me.set().m.allocFlags = 0;
+            return C2R::Ok();
+        }
+
+
         std::shared_ptr<C2StoreIonUsageInfo> mIonUsageInfo;
+        std::shared_ptr<C2StoreDmaBufUsageInfo> mDmaBufUsageInfo;
     };
     std::shared_ptr<C2ReflectorHelper> mReflectorHelper;
     Interface mInterface;
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 1972d3f..f816778 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -1331,8 +1331,6 @@
         mCallback->onError(err2, ACTION_CODE_FATAL);
         return;
     }
-    // We're not starting after flush.
-    (void)mSentConfigAfterResume.test_and_set();
     err2 = mChannel->start(inputFormat, outputFormat, buffersBoundToCodec);
     if (err2 != OK) {
         mCallback->onError(err2, ACTION_CODE_FATAL);
@@ -1580,7 +1578,6 @@
         return;
     }
 
-    mSentConfigAfterResume.clear();
     {
         Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
         const std::unique_ptr<Config> &config = *configLocked;
@@ -1797,7 +1794,7 @@
             // handle configuration changes in work done
             Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
             const std::unique_ptr<Config> &config = *configLocked;
-            bool changed = !mSentConfigAfterResume.test_and_set();
+            bool changed = false;
             Config::Watcher<C2StreamInitDataInfo::output> initData =
                 config->watch<C2StreamInitDataInfo::output>();
             if (!work->worklets.empty()
@@ -1816,15 +1813,18 @@
                         // move all info into output-stream #0 domain
                         updates.emplace_back(C2Param::CopyAsStream(*info, true /* output */, stream));
                     }
-                    for (const C2ConstGraphicBlock &block : buf->data().graphicBlocks()) {
+
+                    const std::vector<C2ConstGraphicBlock> blocks = buf->data().graphicBlocks();
+                    // for now only do the first block
+                    if (!blocks.empty()) {
                         // ALOGV("got output buffer with crop %u,%u+%u,%u and size %u,%u",
                         //      block.crop().left, block.crop().top,
                         //      block.crop().width, block.crop().height,
                         //      block.width(), block.height());
+                        const C2ConstGraphicBlock &block = blocks[0];
                         updates.emplace_back(new C2StreamCropRectInfo::output(stream, block.crop()));
                         updates.emplace_back(new C2StreamPictureSizeInfo::output(
                                 stream, block.crop().width, block.crop().height));
-                        break; // for now only do the first block
                     }
                     ++stream;
                 }
@@ -1836,7 +1836,7 @@
                 // copy standard infos to graphic buffers if not already present (otherwise, we
                 // may overwrite the actual intermediate value with a final value)
                 stream = 0;
-                const static std::vector<C2Param::Index> stdGfxInfos = {
+                const static C2Param::Index stdGfxInfos[] = {
                     C2StreamRotationInfo::output::PARAM_TYPE,
                     C2StreamColorAspectsInfo::output::PARAM_TYPE,
                     C2StreamDataSpaceInfo::output::PARAM_TYPE,
@@ -2168,15 +2168,17 @@
             return OK;
         }
     }
-    uint64_t minUsage = usage.expected;
-    uint64_t maxUsage = ~0ull;
     std::set<C2Allocator::id_t> allocators;
     GetCommonAllocatorIds(names, C2Allocator::LINEAR, &allocators);
     if (allocators.empty()) {
         *isCompatible = false;
         return OK;
     }
+
+    uint64_t minUsage = 0;
+    uint64_t maxUsage = ~0ull;
     CalculateMinMaxUsage(names, &minUsage, &maxUsage);
+    minUsage |= usage.expected;
     *isCompatible = ((maxUsage & minUsage) == minUsage);
     return OK;
 }
@@ -2203,14 +2205,16 @@
 // static
 std::shared_ptr<C2LinearBlock> CCodec::FetchLinearBlock(
         size_t capacity, const C2MemoryUsage &usage, const std::vector<std::string> &names) {
-    uint64_t minUsage = usage.expected;
-    uint64_t maxUsage = ~0ull;
     std::set<C2Allocator::id_t> allocators;
     GetCommonAllocatorIds(names, C2Allocator::LINEAR, &allocators);
     if (allocators.empty()) {
         allocators.insert(C2PlatformAllocatorStore::DEFAULT_LINEAR);
     }
+
+    uint64_t minUsage = 0;
+    uint64_t maxUsage = ~0ull;
     CalculateMinMaxUsage(names, &minUsage, &maxUsage);
+    minUsage |= usage.expected;
     if ((maxUsage & minUsage) != minUsage) {
         allocators.clear();
         allocators.insert(C2PlatformAllocatorStore::DEFAULT_LINEAR);
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 0626c8d..6e0c295 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -618,25 +618,26 @@
 }
 
 void CCodecBufferChannel::feedInputBufferIfAvailableInternal() {
-    if (mInputMetEos || mPipelineWatcher.lock()->pipelineFull()) {
+    if (mInputMetEos) {
         return;
     }
     {
         Mutexed<Output>::Locked output(mOutput);
         if (!output->buffers ||
                 output->buffers->hasPending() ||
-                output->buffers->numClientBuffers() >= output->numSlots) {
+                output->buffers->numActiveSlots() >= output->numSlots) {
             return;
         }
     }
-    size_t numInputSlots = mInput.lock()->numSlots;
-    for (size_t i = 0; i < numInputSlots; ++i) {
+    size_t numActiveSlots = 0;
+    while (!mPipelineWatcher.lock()->pipelineFull()) {
         sp<MediaCodecBuffer> inBuffer;
         size_t index;
         {
             Mutexed<Input>::Locked input(mInput);
-            if (input->buffers->numClientBuffers() >= input->numSlots) {
-                return;
+            numActiveSlots = input->buffers->numActiveSlots();
+            if (numActiveSlots >= input->numSlots) {
+                break;
             }
             if (!input->buffers->requestNewBuffer(&index, &inBuffer)) {
                 ALOGV("[%s] no new buffer available", mName);
@@ -646,6 +647,7 @@
         ALOGV("[%s] new input index = %zu [%p]", mName, index, inBuffer.get());
         mCallback->onInputBufferAvailable(index, inBuffer);
     }
+    ALOGV("[%s] # active slots after feedInputBufferIfAvailable = %zu", mName, numActiveSlots);
 }
 
 status_t CCodecBufferChannel::renderOutputBuffer(
@@ -814,6 +816,9 @@
     status_t result = mComponent->queueToOutputSurface(block, qbi, &qbo);
     if (result != OK) {
         ALOGI("[%s] queueBuffer failed: %d", mName, result);
+        if (result == NO_INIT) {
+            mCCodecCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+        }
         return result;
     }
     ALOGV("[%s] queue buffer successful", mName);
@@ -1718,7 +1723,13 @@
         }
     }
 
-    if (notifyClient && !buffer && !flags) {
+    bool drop = false;
+    if (worklet->output.flags & C2FrameData::FLAG_DROP_FRAME) {
+        ALOGV("[%s] onWorkDone: drop buffer but keep metadata", mName);
+        drop = true;
+    }
+
+    if (notifyClient && !buffer && !flags && !(drop && outputFormat)) {
         ALOGV("[%s] onWorkDone: Not reporting output buffer (%lld)",
               mName, work->input.ordinal.frameIndex.peekull());
         notifyClient = false;
@@ -1745,7 +1756,7 @@
             return false;
         }
         output->buffers->pushToStash(
-                buffer,
+                drop ? nullptr : buffer,
                 notifyClient,
                 timestamp.peek(),
                 flags,
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index e58a1e4..692da58 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -91,7 +91,9 @@
             newFormat->setInt32(KEY_STRIDE, stride);
             ALOGD("[%s] updating stride = %d", mName, stride);
             if (img->mNumPlanes > 1 && stride > 0) {
-                int32_t vstride = (img->mPlane[1].mOffset - img->mPlane[0].mOffset) / stride;
+                int64_t offsetDelta =
+                    (int64_t)img->mPlane[1].mOffset - (int64_t)img->mPlane[0].mOffset;
+                int32_t vstride = int32_t(offsetDelta / stride);
                 newFormat->setInt32(KEY_SLICE_HEIGHT, vstride);
                 ALOGD("[%s] updating vstride = %d", mName, vstride);
             }
@@ -272,8 +274,6 @@
 
     // The output format can be processed without a registered slot.
     if (outputFormat) {
-        ALOGD("[%s] popFromStashAndRegister: output format changed to %s",
-                mName, outputFormat->debugString().c_str());
         updateSkipCutBuffer(outputFormat, entry.notify);
     }
 
@@ -301,6 +301,10 @@
     }
 
     if (!entry.notify) {
+        if (outputFormat) {
+            ALOGD("[%s] popFromStashAndRegister: output format changed to %s",
+                    mName, outputFormat->debugString().c_str());
+        }
         mPending.pop_front();
         return DISCARD;
     }
@@ -317,6 +321,10 @@
     // Append information from the front stash entry to outBuffer.
     (*outBuffer)->meta()->setInt64("timeUs", entry.timestamp);
     (*outBuffer)->meta()->setInt32("flags", entry.flags);
+    if (outputFormat) {
+        ALOGD("[%s] popFromStashAndRegister: output format changed to %s",
+                mName, outputFormat->debugString().c_str());
+    }
     ALOGV("[%s] popFromStashAndRegister: "
           "out buffer index = %zu [%p] => %p + %zu (%lld)",
           mName, *index, outBuffer->get(),
@@ -487,11 +495,12 @@
     mBuffers.clear();
 }
 
-size_t FlexBuffersImpl::numClientBuffers() const {
+size_t FlexBuffersImpl::numActiveSlots() const {
     return std::count_if(
             mBuffers.begin(), mBuffers.end(),
             [](const Entry &entry) {
-                return (entry.clientBuffer != nullptr);
+                return (entry.clientBuffer != nullptr
+                        || !entry.compBuffer.expired());
             });
 }
 
@@ -637,11 +646,11 @@
     }
 }
 
-size_t BuffersArrayImpl::numClientBuffers() const {
+size_t BuffersArrayImpl::numActiveSlots() const {
     return std::count_if(
             mBuffers.begin(), mBuffers.end(),
             [](const Entry &entry) {
-                return entry.ownedByClient;
+                return entry.ownedByClient || !entry.compBuffer.expired();
             });
 }
 
@@ -691,8 +700,8 @@
     mImpl.flush();
 }
 
-size_t InputBuffersArray::numClientBuffers() const {
-    return mImpl.numClientBuffers();
+size_t InputBuffersArray::numActiveSlots() const {
+    return mImpl.numActiveSlots();
 }
 
 sp<Codec2Buffer> InputBuffersArray::createNewBuffer() {
@@ -729,8 +738,8 @@
     return nullptr;
 }
 
-size_t SlotInputBuffers::numClientBuffers() const {
-    return mImpl.numClientBuffers();
+size_t SlotInputBuffers::numActiveSlots() const {
+    return mImpl.numActiveSlots();
 }
 
 sp<Codec2Buffer> SlotInputBuffers::createNewBuffer() {
@@ -781,8 +790,8 @@
     return std::move(array);
 }
 
-size_t LinearInputBuffers::numClientBuffers() const {
-    return mImpl.numClientBuffers();
+size_t LinearInputBuffers::numActiveSlots() const {
+    return mImpl.numActiveSlots();
 }
 
 // static
@@ -958,8 +967,8 @@
     return std::move(array);
 }
 
-size_t GraphicMetadataInputBuffers::numClientBuffers() const {
-    return mImpl.numClientBuffers();
+size_t GraphicMetadataInputBuffers::numActiveSlots() const {
+    return mImpl.numActiveSlots();
 }
 
 sp<Codec2Buffer> GraphicMetadataInputBuffers::createNewBuffer() {
@@ -1023,8 +1032,8 @@
     return std::move(array);
 }
 
-size_t GraphicInputBuffers::numClientBuffers() const {
-    return mImpl.numClientBuffers();
+size_t GraphicInputBuffers::numActiveSlots() const {
+    return mImpl.numActiveSlots();
 }
 
 sp<Codec2Buffer> GraphicInputBuffers::createNewBuffer() {
@@ -1113,8 +1122,8 @@
     mImpl.getArray(array);
 }
 
-size_t OutputBuffersArray::numClientBuffers() const {
-    return mImpl.numClientBuffers();
+size_t OutputBuffersArray::numActiveSlots() const {
+    return mImpl.numActiveSlots();
 }
 
 void OutputBuffersArray::realloc(const std::shared_ptr<C2Buffer> &c2buffer) {
@@ -1224,8 +1233,8 @@
     return array;
 }
 
-size_t FlexOutputBuffers::numClientBuffers() const {
-    return mImpl.numClientBuffers();
+size_t FlexOutputBuffers::numActiveSlots() const {
+    return mImpl.numActiveSlots();
 }
 
 // LinearOutputBuffers
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index 0d4fa81..c383a7c 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -33,8 +33,8 @@
 class SkipCutBuffer;
 
 constexpr size_t kLinearBufferSize = 1048576;
-// This can fit 4K RGBA frame, and most likely client won't need more than this.
-constexpr size_t kMaxLinearBufferSize = 4096 * 2304 * 4;
+// This can fit an 8K frame.
+constexpr size_t kMaxLinearBufferSize = 7680 * 4320 * 2;
 
 /**
  * Base class for representation of buffers at one port.
@@ -72,7 +72,7 @@
     /**
      * Return number of buffers the client owns.
      */
-    virtual size_t numClientBuffers() const = 0;
+    virtual size_t numActiveSlots() const = 0;
 
     /**
      * Examine image data from the buffer and update the format if necessary.
@@ -584,7 +584,7 @@
      * Return the number of buffers that are sent to the client but not released
      * yet.
      */
-    size_t numClientBuffers() const;
+    size_t numActiveSlots() const;
 
     /**
      * Return the number of buffers that are sent to the component but not
@@ -705,7 +705,7 @@
      * Return the number of buffers that are sent to the client but not released
      * yet.
      */
-    size_t numClientBuffers() const;
+    size_t numActiveSlots() const;
 
     /**
      * Return the size of the array.
@@ -765,7 +765,7 @@
 
     void flush() override;
 
-    size_t numClientBuffers() const final;
+    size_t numActiveSlots() const final;
 
 protected:
     sp<Codec2Buffer> createNewBuffer() override;
@@ -796,7 +796,7 @@
 
     std::unique_ptr<InputBuffers> toArrayMode(size_t size) final;
 
-    size_t numClientBuffers() const final;
+    size_t numActiveSlots() const final;
 
 protected:
     sp<Codec2Buffer> createNewBuffer() final;
@@ -826,7 +826,7 @@
 
     std::unique_ptr<InputBuffers> toArrayMode(size_t size) override;
 
-    size_t numClientBuffers() const final;
+    size_t numActiveSlots() const final;
 
 protected:
     sp<Codec2Buffer> createNewBuffer() override;
@@ -894,7 +894,7 @@
 
     std::unique_ptr<InputBuffers> toArrayMode(size_t size) final;
 
-    size_t numClientBuffers() const final;
+    size_t numActiveSlots() const final;
 
 protected:
     sp<Codec2Buffer> createNewBuffer() override;
@@ -924,7 +924,7 @@
     std::unique_ptr<InputBuffers> toArrayMode(
             size_t size) final;
 
-    size_t numClientBuffers() const final;
+    size_t numActiveSlots() const final;
 
 protected:
     sp<Codec2Buffer> createNewBuffer() override;
@@ -965,7 +965,7 @@
         array->clear();
     }
 
-    size_t numClientBuffers() const final {
+    size_t numActiveSlots() const final {
         return 0u;
     }
 
@@ -1019,7 +1019,7 @@
 
     void getArray(Vector<sp<MediaCodecBuffer>> *array) const final;
 
-    size_t numClientBuffers() const final;
+    size_t numActiveSlots() const final;
 
     /**
      * Reallocate the array, filled with buffers with the same size as given
@@ -1073,7 +1073,7 @@
 
     std::unique_ptr<OutputBuffersArray> toArrayMode(size_t size) override;
 
-    size_t numClientBuffers() const final;
+    size_t numActiveSlots() const final;
 
     /**
      * Return an appropriate Codec2Buffer object for the type of buffers.
diff --git a/media/codec2/sfplugin/TEST_MAPPING b/media/codec2/sfplugin/TEST_MAPPING
new file mode 100644
index 0000000..045e5b5
--- /dev/null
+++ b/media/codec2/sfplugin/TEST_MAPPING
@@ -0,0 +1,12 @@
+// mappings for frameworks/av/media/codec2/sfplugin
+{
+  "presubmit": [
+    // failing 1 of 11
+    // TODO(b/156167471)
+    // { "name": "ccodec_unit_test" },
+
+    // failing 4 of 17, around max-input-size defaults & overrides
+    // TODO(b/156167471)
+    //{ "name": "mc_sanity_test"}
+  ]
+}
diff --git a/media/codec2/sfplugin/include/media/stagefright/CCodec.h b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
index ecb2506..dbbb5d5 100644
--- a/media/codec2/sfplugin/include/media/stagefright/CCodec.h
+++ b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
@@ -193,7 +193,6 @@
 
     Mutexed<std::unique_ptr<CCodecConfig>> mConfig;
     Mutexed<std::list<std::unique_ptr<C2Work>>> mWorkDoneQueue;
-    std::atomic_flag mSentConfigAfterResume;
 
     friend class CCodecCallbackImpl;
 
diff --git a/media/codec2/sfplugin/tests/Android.bp b/media/codec2/sfplugin/tests/Android.bp
index 8d1a9c3..5c774a2 100644
--- a/media/codec2/sfplugin/tests/Android.bp
+++ b/media/codec2/sfplugin/tests/Android.bp
@@ -1,5 +1,6 @@
 cc_test {
     name: "ccodec_unit_test",
+    test_suites: ["device-tests"],
 
     srcs: [
         "CCodecBuffers_test.cpp",
@@ -43,6 +44,7 @@
 
 cc_test {
     name: "mc_sanity_test",
+    test_suites: ["device-tests"],
 
     srcs: [
         "MediaCodec_sanity_test.cpp",
diff --git a/media/codec2/tests/Android.bp b/media/codec2/tests/Android.bp
index fce6e21..c9169a9 100644
--- a/media/codec2/tests/Android.bp
+++ b/media/codec2/tests/Android.bp
@@ -1,5 +1,6 @@
 cc_test {
     name: "codec2_core_param_test",
+    test_suites: ["device-tests"],
 
     srcs: [
         "C2Param_test.cpp",
@@ -28,6 +29,7 @@
 
 cc_test {
     name: "codec2_vndk_test",
+    test_suites: ["device-tests"],
 
     srcs: [
         "C2_test.cpp",
diff --git a/media/codec2/tests/C2Param_test.cpp b/media/codec2/tests/C2Param_test.cpp
index 564d4d2..bb8130c 100644
--- a/media/codec2/tests/C2Param_test.cpp
+++ b/media/codec2/tests/C2Param_test.cpp
@@ -96,7 +96,7 @@
     const static std::vector<C2FieldDescriptor> _FIELD_LIST;
     static const std::vector<C2FieldDescriptor> FieldList();  // <= needed for C2FieldDescriptor
     const static FD::type_t TYPE = (FD::type_t)(CORE_INDEX | FD::STRUCT_FLAG);
-};
+} C2_PACK;
 
 DEFINE_NO_NAMED_VALUES_FOR(C2SizeStruct)
 
@@ -111,11 +111,13 @@
 
 struct C2TestStruct_A {
     int32_t signed32;
+    // 4-byte padding
     int64_t signed64[2];
     uint32_t unsigned32[1];
+    // 4-byte padding
     uint64_t unsigned64;
     float fp32;
-    C2SizeStruct sz[3];
+    C2SizeStruct sz[3]; // 8-byte structure, but 4-byte aligned
     uint8_t blob[100];
     char string[100];
     bool yesNo[100];
@@ -124,21 +126,21 @@
     static const std::vector<C2FieldDescriptor> FieldList();
     // enum : uint32_t { CORE_INDEX = kParamIndexTest };
     // typedef C2TestStruct_A _type;
-} __attribute__((packed));
+} __attribute__((aligned(4)));
 
 const std::vector<C2FieldDescriptor> C2TestStruct_A::FieldList() {
     return _FIELD_LIST;
 }
 const std::vector<C2FieldDescriptor> C2TestStruct_A::_FIELD_LIST =
     { { FD::INT32,    1, "s32",   0, 4 },
-      { FD::INT64,    2, "s64",   4, 8 },
-      { FD::UINT32,   1, "u32",  20, 4 },
-      { FD::UINT64,   1, "u64",  24, 8 },
-      { FD::FLOAT,    1, "fp",   32, 4 },
-      { C2SizeStruct::TYPE, 3, "size", 36, 8 },
-      { FD::BLOB,   100, "blob", 60, 1 },
-      { FD::STRING, 100, "str", 160, 1 },
-      { FD::BLOB,   100, "y-n", 260, 1 } };
+      { FD::INT64,    2, "s64",   8, 8 },
+      { FD::UINT32,   1, "u32",  24, 4 },
+      { FD::UINT64,   1, "u64",  32, 8 },
+      { FD::FLOAT,    1, "fp",   40, 4 },
+      { C2SizeStruct::TYPE, 3, "size", 44, 8 },
+      { FD::BLOB,   100, "blob", 68, 1 },
+      { FD::STRING, 100, "str", 168, 1 },
+      { FD::BLOB,   100, "y-n", 268, 1 } };
 
 TEST_P(C2ParamTest_ParamFieldList, VerifyStruct) {
     std::vector<C2FieldDescriptor> fields = GetParam(), expected = C2TestStruct_A::_FIELD_LIST;
@@ -198,11 +200,13 @@
 
 struct C2TestAStruct {
     int32_t signed32;
+    // 4-byte padding
     int64_t signed64[2];
     uint32_t unsigned32[1];
+    // 4-byte padding
     uint64_t unsigned64;
     float fp32;
-    C2SizeStruct sz[3];
+    C2SizeStruct sz[3]; // 8-byte structure, but 4-byte aligned
     uint8_t blob[100];
     char string[100];
     bool yesNo[100];
@@ -229,11 +233,13 @@
 
 struct C2TestBStruct {
     int32_t signed32;
+    // 4-byte padding
     int64_t signed64[2];
     uint32_t unsigned32[1];
+    // 4-byte padding
     uint64_t unsigned64;
     float fp32;
-    C2SizeStruct sz[3];
+    C2SizeStruct sz[3]; // 8-byte structure, but 4-byte aligned
     uint8_t blob[100];
     char string[100];
     bool yesNo[100];
@@ -286,7 +292,7 @@
         if (fields.size() > 1) {
             EXPECT_EQ(2u, fields.size());
             EXPECT_EQ(C2FieldDescriptor(FD::INT32, 1, "s32", 0, 4), fields[0]);
-            EXPECT_EQ(C2FieldDescriptor(this->FlexType, 0, "flex", 4, this->FLEX_SIZE),
+            EXPECT_EQ(C2FieldDescriptor(this->FlexType, 0, "flex", alignof(TypeParam) /* offset */, this->FLEX_SIZE),
                       fields[1]);
         } else {
             EXPECT_EQ(1u, fields.size());
@@ -392,6 +398,7 @@
 
 struct C2TestStruct_FlexEndS64 {
     int32_t signed32;
+    // 4-byte padding
     int64_t mSigned64Flex[];
 
     const static std::vector<C2FieldDescriptor> _FIELD_LIST;
@@ -406,7 +413,7 @@
 }
 const std::vector<C2FieldDescriptor> C2TestStruct_FlexEndS64::_FIELD_LIST = {
     { FD::INT32, 1, "s32", 0, 4 },
-    { FD::INT64, 0, "flex", 4, 8 },
+    { FD::INT64, 0, "flex", 8, 8 },
 };
 
 struct C2TestFlexS64Struct {
@@ -419,6 +426,7 @@
 
 struct C2TestFlexEndS64Struct {
     int32_t signed32;
+    // 4-byte padding
     int64_t mFlexSigned64[];
     C2TestFlexEndS64Struct() {}
 
@@ -468,7 +476,7 @@
     // enum : uint32_t { CORE_INDEX = C2TestStruct_FlexEndSize, FLEX_SIZE = 8 };
     // typedef C2TestStruct_FlexEndSize _type;
     // typedef C2SizeStruct FlexType;
-};
+} __attribute__((aligned(4)));
 
 const std::vector<C2FieldDescriptor> C2TestStruct_FlexEndSize::FieldList() {
     return _FIELD_LIST;
@@ -539,14 +547,14 @@
 TEST_F(C2ParamTest, FieldId) {
     // pointer constructor
     EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&((C2TestStruct_A*)0)->signed32));
-    EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&((C2TestStruct_A*)0)->signed64));
-    EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId(&((C2TestStruct_A*)0)->unsigned32));
-    EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId(&((C2TestStruct_A*)0)->unsigned64));
-    EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId(&((C2TestStruct_A*)0)->fp32));
-    EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId(&((C2TestStruct_A*)0)->sz));
-    EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId(&((C2TestStruct_A*)0)->blob));
-    EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId(&((C2TestStruct_A*)0)->string));
-    EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId(&((C2TestStruct_A*)0)->yesNo));
+    EXPECT_EQ(_C2FieldId(8, 8), _C2FieldId(&((C2TestStruct_A*)0)->signed64));
+    EXPECT_EQ(_C2FieldId(24, 4), _C2FieldId(&((C2TestStruct_A*)0)->unsigned32));
+    EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId(&((C2TestStruct_A*)0)->unsigned64));
+    EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId(&((C2TestStruct_A*)0)->fp32));
+    EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId(&((C2TestStruct_A*)0)->sz));
+    EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId(&((C2TestStruct_A*)0)->blob));
+    EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId(&((C2TestStruct_A*)0)->string));
+    EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId(&((C2TestStruct_A*)0)->yesNo));
 
     EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&((C2TestFlexEndSizeStruct*)0)->signed32));
     EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&((C2TestFlexEndSizeStruct*)0)->mFlexSize));
@@ -556,14 +564,14 @@
 
     // member pointer constructor
     EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::signed32));
-    EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::signed64));
-    EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::unsigned32));
-    EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::unsigned64));
-    EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::fp32));
-    EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::sz));
-    EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::blob));
-    EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::string));
-    EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::yesNo));
+    EXPECT_EQ(_C2FieldId(8, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::signed64));
+    EXPECT_EQ(_C2FieldId(24, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::unsigned32));
+    EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::unsigned64));
+    EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::fp32));
+    EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::sz));
+    EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::blob));
+    EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::string));
+    EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::yesNo));
 
     EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId((C2TestFlexEndSizeStruct*)0, &C2TestFlexEndSizeStruct::signed32));
     EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId((C2TestFlexEndSizeStruct*)0, &C2TestFlexEndSizeStruct::mFlexSize));
@@ -573,14 +581,14 @@
 
     // member pointer sans type pointer
     EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&C2TestStruct_A::signed32));
-    EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&C2TestStruct_A::signed64));
-    EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId(&C2TestStruct_A::unsigned32));
-    EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId(&C2TestStruct_A::unsigned64));
-    EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId(&C2TestStruct_A::fp32));
-    EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId(&C2TestStruct_A::sz));
-    EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId(&C2TestStruct_A::blob));
-    EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId(&C2TestStruct_A::string));
-    EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId(&C2TestStruct_A::yesNo));
+    EXPECT_EQ(_C2FieldId(8, 8), _C2FieldId(&C2TestStruct_A::signed64));
+    EXPECT_EQ(_C2FieldId(24, 4), _C2FieldId(&C2TestStruct_A::unsigned32));
+    EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId(&C2TestStruct_A::unsigned64));
+    EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId(&C2TestStruct_A::fp32));
+    EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId(&C2TestStruct_A::sz));
+    EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId(&C2TestStruct_A::blob));
+    EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId(&C2TestStruct_A::string));
+    EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId(&C2TestStruct_A::yesNo));
 
     EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&C2TestFlexEndSizeStruct::signed32));
     EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&C2TestFlexEndSizeStruct::mFlexSize));
@@ -594,14 +602,14 @@
 
     // pointer constructor in C2Param
     EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&((C2TestAInfo*)0)->signed32));
-    EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId(&((C2TestAInfo*)0)->signed64));
-    EXPECT_EQ(_C2FieldId(28, 4), _C2FieldId(&((C2TestAInfo*)0)->unsigned32));
-    EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId(&((C2TestAInfo*)0)->unsigned64));
-    EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId(&((C2TestAInfo*)0)->fp32));
-    EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId(&((C2TestAInfo*)0)->sz));
-    EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId(&((C2TestAInfo*)0)->blob));
-    EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId(&((C2TestAInfo*)0)->string));
-    EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId(&((C2TestAInfo*)0)->yesNo));
+    EXPECT_EQ(_C2FieldId(16, 8), _C2FieldId(&((C2TestAInfo*)0)->signed64));
+    EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId(&((C2TestAInfo*)0)->unsigned32));
+    EXPECT_EQ(_C2FieldId(40, 8), _C2FieldId(&((C2TestAInfo*)0)->unsigned64));
+    EXPECT_EQ(_C2FieldId(48, 4), _C2FieldId(&((C2TestAInfo*)0)->fp32));
+    EXPECT_EQ(_C2FieldId(52, 8), _C2FieldId(&((C2TestAInfo*)0)->sz));
+    EXPECT_EQ(_C2FieldId(76, 1), _C2FieldId(&((C2TestAInfo*)0)->blob));
+    EXPECT_EQ(_C2FieldId(176, 1), _C2FieldId(&((C2TestAInfo*)0)->string));
+    EXPECT_EQ(_C2FieldId(276, 1), _C2FieldId(&((C2TestAInfo*)0)->yesNo));
 
     EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&((C2TestFlexEndSizeInfo*)0)->m.signed32));
     EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId(&((C2TestFlexEndSizeInfo*)0)->m.mFlexSize));
@@ -611,14 +619,14 @@
 
     // member pointer in C2Param
     EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::signed32));
-    EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::signed64));
-    EXPECT_EQ(_C2FieldId(28, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::unsigned32));
-    EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::unsigned64));
-    EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::fp32));
-    EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::sz));
-    EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::blob));
-    EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::string));
-    EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::yesNo));
+    EXPECT_EQ(_C2FieldId(16, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::signed64));
+    EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::unsigned32));
+    EXPECT_EQ(_C2FieldId(40, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::unsigned64));
+    EXPECT_EQ(_C2FieldId(48, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::fp32));
+    EXPECT_EQ(_C2FieldId(52, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::sz));
+    EXPECT_EQ(_C2FieldId(76, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::blob));
+    EXPECT_EQ(_C2FieldId(176, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::string));
+    EXPECT_EQ(_C2FieldId(276, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::yesNo));
 
     // NOTE: cannot use a member pointer for flex params due to introduction of 'm'
     // EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&C2TestFlexEndSizeInfo::m.signed32));
@@ -2328,6 +2336,17 @@
         static_assert(std::is_same<decltype(blobValue->m.value), uint8_t[]>::value, "should be uint8_t[]");
         EXPECT_EQ(0, memcmp(blobValue->m.value, "ABCD\0", 6));
         EXPECT_EQ(6u, blobValue->flexCount());
+        blobValue->setFlexCount(7u); // increasing the count does not change it
+        EXPECT_EQ(6u, blobValue->flexCount());
+        blobValue->setFlexCount(2u); // decreasing the count changes it to it
+        EXPECT_EQ(2u, blobValue->flexCount());
+        blobValue->setFlexCount(0u); // can decrease to 0 and blob remains valid
+        EXPECT_EQ(0u, blobValue->flexCount());
+        EXPECT_TRUE(*blobValue);
+        blobValue->invalidate(); // flex params can be invalidated => results in 0 size
+        EXPECT_FALSE(*blobValue);
+        EXPECT_EQ(0u, blobValue->size());
+
         std::vector<C2FieldDescriptor> fields = blobValue->FieldList();
         EXPECT_EQ(1u, fields.size());
         EXPECT_EQ(FD::BLOB, fields.cbegin()->type());
diff --git a/media/codec2/tests/C2UtilTest.cpp b/media/codec2/tests/C2UtilTest.cpp
index 59cd313..2d66df1 100644
--- a/media/codec2/tests/C2UtilTest.cpp
+++ b/media/codec2/tests/C2UtilTest.cpp
@@ -78,7 +78,7 @@
       { "value2", Enum3Value2 },
       { "value4", Enum3Value4 },
       { "invalid", Invalid } });
-    Enum3 e3;
+    Enum3 e3(Invalid);
     C2FieldDescriptor::namedValuesFor(e3);
 
     // upper case
diff --git a/media/codec2/tests/vndk/C2BufferTest.cpp b/media/codec2/tests/vndk/C2BufferTest.cpp
index 780994a..a9f8e17 100644
--- a/media/codec2/tests/vndk/C2BufferTest.cpp
+++ b/media/codec2/tests/vndk/C2BufferTest.cpp
@@ -765,4 +765,54 @@
     }
 }
 
+TEST_F(C2BufferTest, InfoBufferTest) {
+    constexpr size_t kCapacity = 524288u;
+
+    // allocate a linear block
+    std::shared_ptr<C2BlockPool> linearPool(makeLinearBlockPool());
+    std::shared_ptr<C2LinearBlock> linearBlock;
+    ASSERT_EQ(C2_OK, linearPool->fetchLinearBlock(
+            kCapacity,
+            { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+            &linearBlock));
+
+    C2InfoBuffer info = C2InfoBuffer::CreateLinearBuffer(
+            kParamIndexNumber1, linearBlock->share(1024, kCapacity / 2, C2Fence()));
+    std::shared_ptr<C2InfoBuffer> spInfo(new C2InfoBuffer(info));
+    ASSERT_EQ(kParamIndexNumber1, spInfo->index().coreIndex());
+    ASSERT_TRUE(spInfo->index().isGlobal());
+    ASSERT_EQ(C2Param::INFO, spInfo->index().kind());
+    ASSERT_EQ(C2BufferData::LINEAR, spInfo->data().type());
+    ASSERT_EQ(1024, spInfo->data().linearBlocks()[0].offset());
+    ASSERT_EQ(kCapacity / 2, spInfo->data().linearBlocks()[0].size());
+    // handles must actually be identical after sharing into an info buffer
+    ASSERT_EQ(linearBlock->handle(), spInfo->data().linearBlocks()[0].handle());
+    ASSERT_EQ(linearPool->getAllocatorId(), spInfo->data().linearBlocks()[0].getAllocatorId());
+
+    C2InfoBuffer streamInfo = info.asStream(false /* output */,  1u);
+    ASSERT_EQ(kParamIndexNumber1, streamInfo.index().coreIndex());
+    ASSERT_TRUE(streamInfo.index().forStream());
+    ASSERT_TRUE(streamInfo.index().forInput());
+    ASSERT_EQ(1u, streamInfo.index().stream());
+    ASSERT_EQ(C2Param::INFO, streamInfo.index().kind());
+    ASSERT_EQ(C2BufferData::LINEAR, streamInfo.data().type());
+    ASSERT_EQ(1024, streamInfo.data().linearBlocks()[0].offset());
+    ASSERT_EQ(kCapacity / 2, streamInfo.data().linearBlocks()[0].size());
+    // handles must actually be identical after sharing into an info buffer
+    ASSERT_EQ(linearBlock->handle(), streamInfo.data().linearBlocks()[0].handle());
+    ASSERT_EQ(linearPool->getAllocatorId(), streamInfo.data().linearBlocks()[0].getAllocatorId());
+
+    C2InfoBuffer portInfo = streamInfo.asPort(true /* output */);
+    ASSERT_EQ(kParamIndexNumber1, portInfo.index().coreIndex());
+    ASSERT_TRUE(portInfo.index().forPort());
+    ASSERT_TRUE(portInfo.index().forOutput());
+    ASSERT_EQ(C2Param::INFO, portInfo.index().kind());
+    ASSERT_EQ(C2BufferData::LINEAR, portInfo.data().type());
+    ASSERT_EQ(1024, portInfo.data().linearBlocks()[0].offset());
+    ASSERT_EQ(kCapacity / 2, portInfo.data().linearBlocks()[0].size());
+    // handles must actually be identical after sharing into an info buffer
+    ASSERT_EQ(linearBlock->handle(), portInfo.data().linearBlocks()[0].handle());
+    ASSERT_EQ(linearPool->getAllocatorId(), portInfo.data().linearBlocks()[0].getAllocatorId());
+}
+
 } // namespace android
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 6f7acce..60f4736 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -26,6 +26,7 @@
         "C2AllocatorGralloc.cpp",
         "C2Buffer.cpp",
         "C2Config.cpp",
+        "C2DmaBufAllocator.cpp",
         "C2PlatformStorePluginLoader.cpp",
         "C2Store.cpp",
         "platform/C2BqBuffer.cpp",
@@ -64,6 +65,7 @@
         "libhardware",
         "libhidlbase",
         "libion",
+        "libdmabufheap",
         "libfmq",
         "liblog",
         "libnativewindow",
diff --git a/media/codec2/vndk/C2AllocatorBlob.cpp b/media/codec2/vndk/C2AllocatorBlob.cpp
index 50c9e59..565137c 100644
--- a/media/codec2/vndk/C2AllocatorBlob.cpp
+++ b/media/codec2/vndk/C2AllocatorBlob.cpp
@@ -175,12 +175,12 @@
 }
 
 // static
-bool C2AllocatorBlob::isValid(const C2Handle* const o) {
+bool C2AllocatorBlob::CheckHandle(const C2Handle* const o) {
     size_t capacity;
     // Distinguish C2Handle purely allocated by C2AllocatorGralloc, or one allocated through
     // C2AllocatorBlob, by checking the handle's height is 1, and its format is
     // PixelFormat::BLOB by GetCapacityFromHandle().
-    return C2AllocatorGralloc::isValid(o) && GetCapacityFromHandle(o, &capacity) == C2_OK;
+    return C2AllocatorGralloc::CheckHandle(o) && GetCapacityFromHandle(o, &capacity) == C2_OK;
 }
 
 }  // namespace android
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index e1e1377..59471a2 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -25,6 +25,7 @@
 #include <hardware/gralloc.h>
 #include <ui/GraphicBufferAllocator.h>
 #include <ui/GraphicBufferMapper.h>
+#include <ui/Rect.h>
 
 #include <C2AllocatorGralloc.h>
 #include <C2Buffer.h>
@@ -103,7 +104,7 @@
     const static uint32_t MAGIC = '\xc2gr\x00';
 
     static
-    const ExtraData* getExtraData(const C2Handle *const handle) {
+    const ExtraData* GetExtraData(const C2Handle *const handle) {
         if (handle == nullptr
                 || native_handle_is_invalid(handle)
                 || handle->numInts < NUM_INTS) {
@@ -114,23 +115,23 @@
     }
 
     static
-    ExtraData *getExtraData(C2Handle *const handle) {
-        return const_cast<ExtraData *>(getExtraData(const_cast<const C2Handle *const>(handle)));
+    ExtraData *GetExtraData(C2Handle *const handle) {
+        return const_cast<ExtraData *>(GetExtraData(const_cast<const C2Handle *const>(handle)));
     }
 
 public:
     void getIgbpData(uint32_t *generation, uint64_t *igbp_id, uint32_t *igbp_slot) const {
-        const ExtraData *ed = getExtraData(this);
+        const ExtraData *ed = GetExtraData(this);
         *generation = ed->generation;
         *igbp_id = unsigned(ed->igbp_id_lo) | uint64_t(unsigned(ed->igbp_id_hi)) << 32;
         *igbp_slot = ed->igbp_slot;
     }
 
-    static bool isValid(const C2Handle *const o) {
+    static bool IsValid(const C2Handle *const o) {
         if (o == nullptr) { // null handle is always valid
             return true;
         }
-        const ExtraData *xd = getExtraData(o);
+        const ExtraData *xd = GetExtraData(o);
         // we cannot validate width/height/format/usage without accessing gralloc driver
         return xd != nullptr && xd->magic == MAGIC;
     }
@@ -152,7 +153,7 @@
         native_handle_t *res = native_handle_create(handle->numFds, handle->numInts + NUM_INTS);
         if (res != nullptr) {
             memcpy(&res->data, &handle->data, sizeof(int) * (handle->numFds + handle->numInts));
-            *getExtraData(res) = xd;
+            *GetExtraData(res) = xd;
         }
         return reinterpret_cast<C2HandleGralloc *>(res);
     }
@@ -180,10 +181,10 @@
     static bool MigrateNativeHandle(
             native_handle_t *handle,
             uint32_t generation, uint64_t igbp_id, uint32_t igbp_slot) {
-        if (handle == nullptr || !isValid(handle)) {
+        if (handle == nullptr || !IsValid(handle)) {
             return false;
         }
-        ExtraData *ed = getExtraData(handle);
+        ExtraData *ed = GetExtraData(handle);
         if (!ed) return false;
         ed->generation = generation;
         ed->igbp_id_lo = uint32_t(igbp_id & 0xFFFFFFFF);
@@ -195,7 +196,7 @@
 
     static native_handle_t* UnwrapNativeHandle(
             const C2Handle *const handle) {
-        const ExtraData *xd = getExtraData(handle);
+        const ExtraData *xd = GetExtraData(handle);
         if (xd == nullptr || xd->magic != MAGIC) {
             return nullptr;
         }
@@ -211,7 +212,7 @@
             uint32_t *width, uint32_t *height, uint32_t *format,
             uint64_t *usage, uint32_t *stride,
             uint32_t *generation, uint64_t *igbp_id, uint32_t *igbp_slot) {
-        const ExtraData *xd = getExtraData(handle);
+        const ExtraData *xd = GetExtraData(handle);
         if (xd == nullptr) {
             return nullptr;
         }
@@ -253,7 +254,7 @@
     virtual ~C2AllocationGralloc() override;
 
     virtual c2_status_t map(
-            C2Rect rect, C2MemoryUsage usage, C2Fence *fence,
+            C2Rect c2Rect, C2MemoryUsage usage, C2Fence *fence,
             C2PlanarLayout *layout /* nonnull */, uint8_t **addr /* nonnull */) override;
     virtual c2_status_t unmap(
             uint8_t **addr /* nonnull */, C2Rect rect, C2Fence *fence /* nullable */) override;
@@ -336,8 +337,12 @@
 }
 
 c2_status_t C2AllocationGralloc::map(
-        C2Rect rect, C2MemoryUsage usage, C2Fence *fence,
+        C2Rect c2Rect, C2MemoryUsage usage, C2Fence *fence,
         C2PlanarLayout *layout /* nonnull */, uint8_t **addr /* nonnull */) {
+    const Rect rect{(int32_t)c2Rect.left, (int32_t)c2Rect.top,
+                    (int32_t)(c2Rect.left + c2Rect.width) /* right */,
+                    (int32_t)(c2Rect.top + c2Rect.height) /* bottom */};
+
     uint64_t grallocUsage = static_cast<C2AndroidMemoryUsage>(usage).asGrallocUsage();
     ALOGV("mapping buffer with usage %#llx => %#llx",
           (long long)usage.expected, (long long)grallocUsage);
@@ -386,10 +391,7 @@
             void *pointer = nullptr;
             // TODO: fence
             status_t err = GraphicBufferMapper::get().lock(
-                                const_cast<native_handle_t *>(mBuffer), grallocUsage,
-                                { (int32_t)rect.left, (int32_t)rect.top,
-                                  (int32_t)rect.width, (int32_t)rect.height },
-                                &pointer);
+                    const_cast<native_handle_t *>(mBuffer), grallocUsage, rect, &pointer);
             if (err) {
                 ALOGE("failed transaction: lock(RGBA_1010102)");
                 return C2_CORRUPTED;
@@ -464,10 +466,7 @@
             void *pointer = nullptr;
             // TODO: fence
             status_t err = GraphicBufferMapper::get().lock(
-                                const_cast<native_handle_t*>(mBuffer), grallocUsage,
-                                { (int32_t)rect.left, (int32_t)rect.top,
-                                  (int32_t)rect.width, (int32_t)rect.height },
-                                &pointer);
+                    const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, &pointer);
             if (err) {
                 ALOGE("failed transaction: lock(RGBA_8888)");
                 return C2_CORRUPTED;
@@ -524,10 +523,7 @@
             void *pointer = nullptr;
             // TODO: fence
             status_t err = GraphicBufferMapper::get().lock(
-                                const_cast<native_handle_t*>(mBuffer), grallocUsage,
-                                { (int32_t)rect.left, (int32_t)rect.top,
-                                  (int32_t)rect.width, (int32_t)rect.height },
-                                &pointer);
+                    const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, &pointer);
             if (err) {
                 ALOGE("failed transaction: lock(BLOB)");
                 return C2_CORRUPTED;
@@ -544,10 +540,7 @@
             android_ycbcr ycbcrLayout;
 
             status_t err = GraphicBufferMapper::get().lockYCbCr(
-                        const_cast<native_handle_t*>(mBuffer), grallocUsage,
-                        { (int32_t)rect.left, (int32_t)rect.top,
-                          (int32_t)rect.width, (int32_t)rect.height },
-                        &ycbcrLayout);
+                    const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, &ycbcrLayout);
             if (err) {
                 ALOGE("failed transaction: lockYCbCr");
                 return C2_CORRUPTED;
@@ -784,8 +777,9 @@
     return mImpl->status();
 }
 
-bool C2AllocatorGralloc::isValid(const C2Handle* const o) {
-    return C2HandleGralloc::isValid(o);
+// static
+bool C2AllocatorGralloc::CheckHandle(const C2Handle* const o) {
+    return C2HandleGralloc::IsValid(o);
 }
 
 } // namespace android
diff --git a/media/codec2/vndk/C2AllocatorIon.cpp b/media/codec2/vndk/C2AllocatorIon.cpp
index 6d27a02..85623b8 100644
--- a/media/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/codec2/vndk/C2AllocatorIon.cpp
@@ -73,7 +73,7 @@
 };
 
 // static
-bool C2HandleIon::isValid(const C2Handle * const o) {
+bool C2HandleIon::IsValid(const C2Handle * const o) {
     if (!o || memcmp(o, &cHeader, sizeof(cHeader))) {
         return false;
     }
@@ -579,7 +579,7 @@
         return mInit;
     }
 
-    if (!C2HandleIon::isValid(handle)) {
+    if (!C2HandleIon::IsValid(handle)) {
         return C2_BAD_VALUE;
     }
 
@@ -596,9 +596,8 @@
     return ret;
 }
 
-bool C2AllocatorIon::isValid(const C2Handle* const o) {
-    return C2HandleIon::isValid(o);
+bool C2AllocatorIon::CheckHandle(const C2Handle* const o) {
+    return C2HandleIon::IsValid(o);
 }
 
 } // namespace android
-
diff --git a/media/codec2/vndk/C2Buffer.cpp b/media/codec2/vndk/C2Buffer.cpp
index 0b08f31..143355f 100644
--- a/media/codec2/vndk/C2Buffer.cpp
+++ b/media/codec2/vndk/C2Buffer.cpp
@@ -106,6 +106,7 @@
 class BufferDataBuddy : public C2BufferData {
     using C2BufferData::C2BufferData;
     friend class ::C2Buffer;
+    friend class ::C2InfoBuffer;
 };
 
 }  // namespace
@@ -396,26 +397,18 @@
 std::shared_ptr<C2LinearBlock> _C2BlockFactory::CreateLinearBlock(
         const C2Handle *handle) {
     // TODO: get proper allocator? and mutex?
-    static std::unique_ptr<C2Allocator> sAllocator = []{
-        std::unique_ptr<C2Allocator> allocator;
-        if (android::GetPreferredLinearAllocatorId(android::GetCodec2PoolMask()) ==
-                android::C2PlatformAllocatorStore::BLOB) {
-            allocator = std::make_unique<C2AllocatorBlob>(android::C2PlatformAllocatorStore::BLOB);
-        } else {
-            allocator = std::make_unique<C2AllocatorIon>(android::C2PlatformAllocatorStore::ION);
-        }
+    static std::shared_ptr<C2Allocator> sAllocator = []{
+        std::shared_ptr<C2Allocator> allocator;
+        std::shared_ptr<C2AllocatorStore> allocatorStore = android::GetCodec2PlatformAllocatorStore();
+        allocatorStore->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator);
+
         return allocator;
     }();
 
     if (sAllocator == nullptr)
         return nullptr;
 
-    bool isValidHandle = false;
-    if (sAllocator->getId() == android::C2PlatformAllocatorStore::BLOB) {
-        isValidHandle = C2AllocatorBlob::isValid(handle);
-    } else {
-        isValidHandle = C2AllocatorIon::isValid(handle);
-    }
+    bool isValidHandle = sAllocator->checkHandle(handle);
 
     std::shared_ptr<C2LinearAllocation> alloc;
     if (isValidHandle) {
@@ -431,26 +424,18 @@
 std::shared_ptr<C2LinearBlock> _C2BlockFactory::CreateLinearBlock(
         const C2Handle *cHandle, const std::shared_ptr<BufferPoolData> &data) {
     // TODO: get proper allocator? and mutex?
-    static std::unique_ptr<C2Allocator> sAllocator = []{
-        std::unique_ptr<C2Allocator> allocator;
-        if (android::GetPreferredLinearAllocatorId(android::GetCodec2PoolMask()) ==
-                android::C2PlatformAllocatorStore::BLOB) {
-            allocator = std::make_unique<C2AllocatorBlob>(android::C2PlatformAllocatorStore::BLOB);
-        } else {
-            allocator = std::make_unique<C2AllocatorIon>(android::C2PlatformAllocatorStore::ION);
-        }
+    static std::shared_ptr<C2Allocator> sAllocator = []{
+        std::shared_ptr<C2Allocator> allocator;
+        std::shared_ptr<C2AllocatorStore> allocatorStore = android::GetCodec2PlatformAllocatorStore();
+        allocatorStore->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator);
+
         return allocator;
     }();
 
     if (sAllocator == nullptr)
         return nullptr;
 
-    bool isValidHandle = false;
-    if (sAllocator->getId() == android::C2PlatformAllocatorStore::BLOB) {
-        isValidHandle = C2AllocatorBlob::isValid(cHandle);
-    } else {
-        isValidHandle = C2AllocatorIon::isValid(cHandle);
-    }
+    bool isValidHandle = sAllocator->checkHandle(cHandle);
 
     std::shared_ptr<C2LinearAllocation> alloc;
     if (isValidHandle) {
@@ -1148,7 +1133,7 @@
     static std::unique_ptr<C2AllocatorGralloc> sAllocator = std::make_unique<C2AllocatorGralloc>(0);
 
     std::shared_ptr<C2GraphicAllocation> alloc;
-    if (C2AllocatorGralloc::isValid(cHandle)) {
+    if (sAllocator->isValid(cHandle)) {
         c2_status_t err = sAllocator->priorGraphicAllocation(cHandle, &alloc);
         const std::shared_ptr<C2PooledBlockPoolData> poolData =
                 std::make_shared<C2PooledBlockPoolData>(data);
@@ -1185,6 +1170,7 @@
     type_t mType;
     std::vector<C2ConstLinearBlock> mLinearBlocks;
     std::vector<C2ConstGraphicBlock> mGraphicBlocks;
+    friend class C2InfoBuffer;
 };
 
 C2BufferData::C2BufferData(const std::vector<C2ConstLinearBlock> &blocks) : mImpl(new Impl(blocks)) {}
@@ -1200,6 +1186,35 @@
     return mImpl->graphicBlocks();
 }
 
+C2InfoBuffer::C2InfoBuffer(
+    C2Param::Index index, const std::vector<C2ConstLinearBlock> &blocks)
+    : mIndex(index), mData(BufferDataBuddy(blocks)) {
+}
+
+C2InfoBuffer::C2InfoBuffer(
+    C2Param::Index index, const std::vector<C2ConstGraphicBlock> &blocks)
+    : mIndex(index), mData(BufferDataBuddy(blocks)) {
+}
+
+C2InfoBuffer::C2InfoBuffer(
+    C2Param::Index index, const C2BufferData &data)
+    : mIndex(index), mData(data) {
+}
+
+// static
+C2InfoBuffer C2InfoBuffer::CreateLinearBuffer(
+        C2Param::CoreIndex index, const C2ConstLinearBlock &block) {
+    return C2InfoBuffer(index.coreIndex() | C2Param::Index::KIND_INFO | C2Param::Index::DIR_GLOBAL,
+                        { block });
+}
+
+// static
+C2InfoBuffer C2InfoBuffer::CreateGraphicBuffer(
+        C2Param::CoreIndex index, const C2ConstGraphicBlock &block) {
+    return C2InfoBuffer(index.coreIndex() | C2Param::Index::KIND_INFO | C2Param::Index::DIR_GLOBAL,
+                        { block });
+}
+
 class C2Buffer::Impl {
 public:
     Impl(C2Buffer *thiz, const std::vector<C2ConstLinearBlock> &blocks)
@@ -1330,4 +1345,3 @@
 std::shared_ptr<C2Buffer> C2Buffer::CreateGraphicBuffer(const C2ConstGraphicBlock &block) {
     return std::shared_ptr<C2Buffer>(new C2Buffer({ block }));
 }
-
diff --git a/media/codec2/vndk/C2DmaBufAllocator.cpp b/media/codec2/vndk/C2DmaBufAllocator.cpp
new file mode 100644
index 0000000..59e82e2
--- /dev/null
+++ b/media/codec2/vndk/C2DmaBufAllocator.cpp
@@ -0,0 +1,401 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2DmaBufAllocator"
+#include <BufferAllocator/BufferAllocator.h>
+#include <C2Buffer.h>
+#include <C2Debug.h>
+#include <C2DmaBufAllocator.h>
+#include <C2ErrnoUtils.h>
+#include <linux/ion.h>
+#include <sys/mman.h>
+#include <unistd.h>  // getpagesize, size_t, close, dup
+#include <utils/Log.h>
+
+#include <list>
+
+#ifdef __ANDROID_APEX__
+#include <android-base/properties.h>
+#endif
+
+namespace android {
+
+namespace {
+constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+}
+
+/* =========================== BUFFER HANDLE =========================== */
+/**
+ * Buffer handle
+ *
+ * Stores dmabuf fd & metadata
+ *
+ * This handle will not capture mapped fd-s as updating that would require a
+ * global mutex.
+ */
+
+struct C2HandleBuf : public C2Handle {
+    C2HandleBuf(int bufferFd, size_t size)
+        : C2Handle(cHeader),
+          mFds{bufferFd},
+          mInts{int(size & 0xFFFFFFFF), int((uint64_t(size) >> 32) & 0xFFFFFFFF), kMagic} {}
+
+    static bool IsValid(const C2Handle* const o);
+
+    int bufferFd() const { return mFds.mBuffer; }
+    size_t size() const {
+        return size_t(unsigned(mInts.mSizeLo)) | size_t(uint64_t(unsigned(mInts.mSizeHi)) << 32);
+    }
+
+   protected:
+    struct {
+        int mBuffer;  // dmabuf fd
+    } mFds;
+    struct {
+        int mSizeLo;  // low 32-bits of size
+        int mSizeHi;  // high 32-bits of size
+        int mMagic;
+    } mInts;
+
+   private:
+    typedef C2HandleBuf _type;
+    enum {
+        kMagic = '\xc2io\x00',
+        numFds = sizeof(mFds) / sizeof(int),
+        numInts = sizeof(mInts) / sizeof(int),
+        version = sizeof(C2Handle)
+    };
+    // constexpr static C2Handle cHeader = { version, numFds, numInts, {} };
+    const static C2Handle cHeader;
+};
+
+const C2Handle C2HandleBuf::cHeader = {
+        C2HandleBuf::version, C2HandleBuf::numFds, C2HandleBuf::numInts, {}};
+
+// static
+bool C2HandleBuf::IsValid(const C2Handle* const o) {
+    if (!o || memcmp(o, &cHeader, sizeof(cHeader))) {
+        return false;
+    }
+    const C2HandleBuf* other = static_cast<const C2HandleBuf*>(o);
+    return other->mInts.mMagic == kMagic;
+}
+
+/* =========================== DMABUF ALLOCATION =========================== */
+class C2DmaBufAllocation : public C2LinearAllocation {
+   public:
+    /* Interface methods */
+    virtual c2_status_t map(size_t offset, size_t size, C2MemoryUsage usage, C2Fence* fence,
+                            void** addr /* nonnull */) override;
+    virtual c2_status_t unmap(void* addr, size_t size, C2Fence* fenceFd) override;
+    virtual ~C2DmaBufAllocation() override;
+    virtual const C2Handle* handle() const override;
+    virtual id_t getAllocatorId() const override;
+    virtual bool equals(const std::shared_ptr<C2LinearAllocation>& other) const override;
+
+    // internal methods
+    C2DmaBufAllocation(BufferAllocator& alloc, size_t size, C2String heap_name, unsigned flags,
+                       C2Allocator::id_t id);
+    C2DmaBufAllocation(size_t size, int shareFd, C2Allocator::id_t id);
+
+    c2_status_t status() const;
+
+   protected:
+    virtual c2_status_t mapInternal(size_t mapSize, size_t mapOffset, size_t alignmentBytes,
+                                    int prot, int flags, void** base, void** addr) {
+        c2_status_t err = C2_OK;
+        *base = mmap(nullptr, mapSize, prot, flags, mHandle.bufferFd(), mapOffset);
+        ALOGV("mmap(size = %zu, prot = %d, flags = %d, mapFd = %d, offset = %zu) "
+              "returned (%d)",
+              mapSize, prot, flags, mHandle.bufferFd(), mapOffset, errno);
+        if (*base == MAP_FAILED) {
+            *base = *addr = nullptr;
+            err = c2_map_errno<EINVAL>(errno);
+        } else {
+            *addr = (uint8_t*)*base + alignmentBytes;
+        }
+        return err;
+    }
+
+    C2Allocator::id_t mId;
+    C2HandleBuf mHandle;
+    c2_status_t mInit;
+    struct Mapping {
+        void* addr;
+        size_t alignmentBytes;
+        size_t size;
+    };
+    std::list<Mapping> mMappings;
+
+    // TODO: we could make this encapsulate shared_ptr and copiable
+    C2_DO_NOT_COPY(C2DmaBufAllocation);
+};
+
+c2_status_t C2DmaBufAllocation::map(size_t offset, size_t size, C2MemoryUsage usage, C2Fence* fence,
+                                    void** addr) {
+    (void)fence;  // TODO: wait for fence
+    *addr = nullptr;
+    if (!mMappings.empty()) {
+        ALOGV("multiple map");
+        // TODO: technically we should return DUPLICATE here, but our block views
+        // don't actually unmap, so we end up remapping the buffer multiple times.
+        //
+        // return C2_DUPLICATE;
+    }
+    if (size == 0) {
+        return C2_BAD_VALUE;
+    }
+
+    int prot = PROT_NONE;
+    int flags = MAP_SHARED;
+    if (usage.expected & C2MemoryUsage::CPU_READ) {
+        prot |= PROT_READ;
+    }
+    if (usage.expected & C2MemoryUsage::CPU_WRITE) {
+        prot |= PROT_WRITE;
+    }
+
+    size_t alignmentBytes = offset % PAGE_SIZE;
+    size_t mapOffset = offset - alignmentBytes;
+    size_t mapSize = size + alignmentBytes;
+    Mapping map = {nullptr, alignmentBytes, mapSize};
+
+    c2_status_t err =
+            mapInternal(mapSize, mapOffset, alignmentBytes, prot, flags, &(map.addr), addr);
+    if (map.addr) {
+        mMappings.push_back(map);
+    }
+    return err;
+}
+
+c2_status_t C2DmaBufAllocation::unmap(void* addr, size_t size, C2Fence* fence) {
+    if (mMappings.empty()) {
+        ALOGD("tried to unmap unmapped buffer");
+        return C2_NOT_FOUND;
+    }
+    for (auto it = mMappings.begin(); it != mMappings.end(); ++it) {
+        if (addr != (uint8_t*)it->addr + it->alignmentBytes ||
+            size + it->alignmentBytes != it->size) {
+            continue;
+        }
+        int err = munmap(it->addr, it->size);
+        if (err != 0) {
+            ALOGD("munmap failed");
+            return c2_map_errno<EINVAL>(errno);
+        }
+        if (fence) {
+            *fence = C2Fence();  // not using fences
+        }
+        (void)mMappings.erase(it);
+        ALOGV("successfully unmapped: %d", mHandle.bufferFd());
+        return C2_OK;
+    }
+    ALOGD("unmap failed to find specified map");
+    return C2_BAD_VALUE;
+}
+
+c2_status_t C2DmaBufAllocation::status() const {
+    return mInit;
+}
+
+C2Allocator::id_t C2DmaBufAllocation::getAllocatorId() const {
+    return mId;
+}
+
+bool C2DmaBufAllocation::equals(const std::shared_ptr<C2LinearAllocation>& other) const {
+    if (!other || other->getAllocatorId() != getAllocatorId()) {
+        return false;
+    }
+    // get user handle to compare objects
+    std::shared_ptr<C2DmaBufAllocation> otherAsBuf =
+            std::static_pointer_cast<C2DmaBufAllocation>(other);
+    return mHandle.bufferFd() == otherAsBuf->mHandle.bufferFd();
+}
+
+const C2Handle* C2DmaBufAllocation::handle() const {
+    return &mHandle;
+}
+
+C2DmaBufAllocation::~C2DmaBufAllocation() {
+    if (!mMappings.empty()) {
+        ALOGD("Dangling mappings!");
+        for (const Mapping& map : mMappings) {
+            int err = munmap(map.addr, map.size);
+            if (err) ALOGD("munmap failed");
+        }
+    }
+    if (mInit == C2_OK) {
+        native_handle_close(&mHandle);
+    }
+}
+
+C2DmaBufAllocation::C2DmaBufAllocation(BufferAllocator& alloc, size_t size, C2String heap_name,
+                                       unsigned flags, C2Allocator::id_t id)
+    : C2LinearAllocation(size), mHandle(-1, 0) {
+    int bufferFd = -1;
+    int ret = 0;
+
+    bufferFd = alloc.Alloc(heap_name, size, flags);
+    if (bufferFd < 0) ret = bufferFd;
+
+    mHandle = C2HandleBuf(bufferFd, size);
+    mId = id;
+    mInit = c2_status_t(c2_map_errno<ENOMEM, EACCES, EINVAL>(ret));
+}
+
+C2DmaBufAllocation::C2DmaBufAllocation(size_t size, int shareFd, C2Allocator::id_t id)
+    : C2LinearAllocation(size), mHandle(-1, 0) {
+    mHandle = C2HandleBuf(shareFd, size);
+    mId = id;
+    mInit = c2_status_t(c2_map_errno<ENOMEM, EACCES, EINVAL>(0));
+}
+
+/* =========================== DMABUF ALLOCATOR =========================== */
+C2DmaBufAllocator::C2DmaBufAllocator(id_t id) : mInit(C2_OK) {
+    C2MemoryUsage minUsage = {0, 0};
+    C2MemoryUsage maxUsage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+    Traits traits = {"android.allocator.dmabuf", id, LINEAR, minUsage, maxUsage};
+    mTraits = std::make_shared<Traits>(traits);
+}
+
+C2Allocator::id_t C2DmaBufAllocator::getId() const {
+    std::lock_guard<std::mutex> lock(mUsageMapperLock);
+    return mTraits->id;
+}
+
+C2String C2DmaBufAllocator::getName() const {
+    std::lock_guard<std::mutex> lock(mUsageMapperLock);
+    return mTraits->name;
+}
+
+std::shared_ptr<const C2Allocator::Traits> C2DmaBufAllocator::getTraits() const {
+    std::lock_guard<std::mutex> lock(mUsageMapperLock);
+    return mTraits;
+}
+
+void C2DmaBufAllocator::setUsageMapper(const UsageMapperFn& mapper __unused, uint64_t minUsage,
+                                       uint64_t maxUsage, uint64_t blockSize) {
+    std::lock_guard<std::mutex> lock(mUsageMapperLock);
+    mUsageMapperCache.clear();
+    mUsageMapperLru.clear();
+    mUsageMapper = mapper;
+    Traits traits = {mTraits->name, mTraits->id, LINEAR, C2MemoryUsage(minUsage),
+                     C2MemoryUsage(maxUsage)};
+    mTraits = std::make_shared<Traits>(traits);
+    mBlockSize = blockSize;
+}
+
+std::size_t C2DmaBufAllocator::MapperKeyHash::operator()(const MapperKey& k) const {
+    return std::hash<uint64_t>{}(k.first) ^ std::hash<size_t>{}(k.second);
+}
+
+c2_status_t C2DmaBufAllocator::mapUsage(C2MemoryUsage usage, size_t capacity, C2String* heap_name,
+                                        unsigned* flags) {
+    std::lock_guard<std::mutex> lock(mUsageMapperLock);
+    c2_status_t res = C2_OK;
+    // align capacity
+    capacity = (capacity + mBlockSize - 1) & ~(mBlockSize - 1);
+    MapperKey key = std::make_pair(usage.expected, capacity);
+    auto entry = mUsageMapperCache.find(key);
+    if (entry == mUsageMapperCache.end()) {
+        if (mUsageMapper) {
+            res = mUsageMapper(usage, capacity, heap_name, flags);
+        } else {
+            // No system-uncached yet, so disabled for now
+            if (0 && !(usage.expected & (C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE)))
+                *heap_name = "system-uncached";
+            else
+                *heap_name = "system";
+            *flags = 0;
+            res = C2_NO_INIT;
+        }
+        // add usage to cache
+        MapperValue value = std::make_tuple(*heap_name, *flags, res);
+        mUsageMapperLru.emplace_front(key, value);
+        mUsageMapperCache.emplace(std::make_pair(key, mUsageMapperLru.begin()));
+        if (mUsageMapperCache.size() > USAGE_LRU_CACHE_SIZE) {
+            // remove LRU entry
+            MapperKey lruKey = mUsageMapperLru.front().first;
+            mUsageMapperCache.erase(lruKey);
+            mUsageMapperLru.pop_back();
+        }
+    } else {
+        // move entry to MRU
+        mUsageMapperLru.splice(mUsageMapperLru.begin(), mUsageMapperLru, entry->second);
+        const MapperValue& value = entry->second->second;
+        std::tie(*heap_name, *flags, res) = value;
+    }
+    return res;
+}
+
+c2_status_t C2DmaBufAllocator::newLinearAllocation(
+        uint32_t capacity, C2MemoryUsage usage, std::shared_ptr<C2LinearAllocation>* allocation) {
+    if (allocation == nullptr) {
+        return C2_BAD_VALUE;
+    }
+
+    allocation->reset();
+    if (mInit != C2_OK) {
+        return mInit;
+    }
+
+    C2String heap_name;
+    unsigned flags = 0;
+    c2_status_t ret = mapUsage(usage, capacity, &heap_name, &flags);
+    if (ret && ret != C2_NO_INIT) {
+        return ret;
+    }
+
+    std::shared_ptr<C2DmaBufAllocation> alloc = std::make_shared<C2DmaBufAllocation>(
+            mBufferAllocator, capacity, heap_name, flags, getId());
+    ret = alloc->status();
+    if (ret == C2_OK) {
+        *allocation = alloc;
+    }
+    return ret;
+}
+
+c2_status_t C2DmaBufAllocator::priorLinearAllocation(
+        const C2Handle* handle, std::shared_ptr<C2LinearAllocation>* allocation) {
+    *allocation = nullptr;
+    if (mInit != C2_OK) {
+        return mInit;
+    }
+
+    if (!C2HandleBuf::IsValid(handle)) {
+        return C2_BAD_VALUE;
+    }
+
+    // TODO: get capacity and validate it
+    const C2HandleBuf* h = static_cast<const C2HandleBuf*>(handle);
+    std::shared_ptr<C2DmaBufAllocation> alloc =
+            std::make_shared<C2DmaBufAllocation>(h->size(), h->bufferFd(), getId());
+    c2_status_t ret = alloc->status();
+    if (ret == C2_OK) {
+        *allocation = alloc;
+        native_handle_delete(
+                const_cast<native_handle_t*>(reinterpret_cast<const native_handle_t*>(handle)));
+    }
+    return ret;
+}
+
+// static
+bool C2DmaBufAllocator::CheckHandle(const C2Handle* const o) {
+    return C2HandleBuf::IsValid(o);
+}
+
+}  // namespace android
diff --git a/media/codec2/vndk/C2PlatformStorePluginLoader.cpp b/media/codec2/vndk/C2PlatformStorePluginLoader.cpp
index 4c330e5..bee028a 100644
--- a/media/codec2/vndk/C2PlatformStorePluginLoader.cpp
+++ b/media/codec2/vndk/C2PlatformStorePluginLoader.cpp
@@ -33,7 +33,8 @@
 }  // unnamed
 
 C2PlatformStorePluginLoader::C2PlatformStorePluginLoader(const char *libPath)
-    : mCreateBlockPool(nullptr) {
+    : mCreateBlockPool(nullptr),
+      mCreateAllocator(nullptr) {
     mLibHandle = dlopen(libPath, RTLD_NOW | RTLD_NODELETE);
     if (mLibHandle == nullptr) {
         ALOGD("Failed to load library: %s (%s)", libPath, dlerror());
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index d16527e..1e907c1 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -21,6 +21,7 @@
 #include <C2AllocatorBlob.h>
 #include <C2AllocatorGralloc.h>
 #include <C2AllocatorIon.h>
+#include <C2DmaBufAllocator.h>
 #include <C2BufferPriv.h>
 #include <C2BqBufferPriv.h>
 #include <C2Component.h>
@@ -82,6 +83,7 @@
 
     /// returns a shared-singleton ion allocator
     std::shared_ptr<C2Allocator> fetchIonAllocator();
+    std::shared_ptr<C2Allocator> fetchDmaBufAllocator();
 
     /// returns a shared-singleton gralloc allocator
     std::shared_ptr<C2Allocator> fetchGrallocAllocator();
@@ -99,6 +101,20 @@
 C2PlatformAllocatorStoreImpl::C2PlatformAllocatorStoreImpl() {
 }
 
+static bool using_ion(void) {
+    static int cached_result = -1;
+
+    if (cached_result == -1) {
+        struct stat buffer;
+        cached_result = (stat("/dev/ion", &buffer) == 0);
+        if (cached_result)
+            ALOGD("Using ION\n");
+        else
+            ALOGD("Using DMABUF Heaps\n");
+    }
+    return (cached_result == 1);
+}
+
 c2_status_t C2PlatformAllocatorStoreImpl::fetchAllocator(
         id_t id, std::shared_ptr<C2Allocator> *const allocator) {
     allocator->reset();
@@ -107,8 +123,11 @@
     }
     switch (id) {
     // TODO: should we implement a generic registry for all, and use that?
-    case C2PlatformAllocatorStore::ION:
-        *allocator = fetchIonAllocator();
+    case C2PlatformAllocatorStore::ION: /* also ::DMABUFHEAP */
+        if (using_ion())
+            *allocator = fetchIonAllocator();
+        else
+            *allocator = fetchDmaBufAllocator();
         break;
 
     case C2PlatformAllocatorStore::GRALLOC:
@@ -142,7 +161,9 @@
 namespace {
 
 std::mutex gIonAllocatorMutex;
+std::mutex gDmaBufAllocatorMutex;
 std::weak_ptr<C2AllocatorIon> gIonAllocator;
+std::weak_ptr<C2DmaBufAllocator> gDmaBufAllocator;
 
 void UseComponentStoreForIonAllocator(
         const std::shared_ptr<C2AllocatorIon> allocator,
@@ -197,6 +218,65 @@
     allocator->setUsageMapper(mapper, minUsage, maxUsage, blockSize);
 }
 
+void UseComponentStoreForDmaBufAllocator(const std::shared_ptr<C2DmaBufAllocator> allocator,
+                                         std::shared_ptr<C2ComponentStore> store) {
+    C2DmaBufAllocator::UsageMapperFn mapper;
+    const size_t maxHeapNameLen = 128;
+    uint64_t minUsage = 0;
+    uint64_t maxUsage = C2MemoryUsage(C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE).expected;
+    size_t blockSize = getpagesize();
+
+    // query min and max usage as well as block size via supported values
+    std::unique_ptr<C2StoreDmaBufUsageInfo> usageInfo;
+    usageInfo = C2StoreDmaBufUsageInfo::AllocUnique(maxHeapNameLen);
+
+    std::vector<C2FieldSupportedValuesQuery> query = {
+            C2FieldSupportedValuesQuery::Possible(C2ParamField::Make(*usageInfo, usageInfo->m.usage)),
+            C2FieldSupportedValuesQuery::Possible(
+                    C2ParamField::Make(*usageInfo, usageInfo->m.capacity)),
+    };
+    c2_status_t res = store->querySupportedValues_sm(query);
+    if (res == C2_OK) {
+        if (query[0].status == C2_OK) {
+            const C2FieldSupportedValues& fsv = query[0].values;
+            if (fsv.type == C2FieldSupportedValues::FLAGS && !fsv.values.empty()) {
+                minUsage = fsv.values[0].u64;
+                maxUsage = 0;
+                for (C2Value::Primitive v : fsv.values) {
+                    maxUsage |= v.u64;
+                }
+            }
+        }
+        if (query[1].status == C2_OK) {
+            const C2FieldSupportedValues& fsv = query[1].values;
+            if (fsv.type == C2FieldSupportedValues::RANGE && fsv.range.step.u32 > 0) {
+                blockSize = fsv.range.step.u32;
+            }
+        }
+
+        mapper = [store](C2MemoryUsage usage, size_t capacity, C2String* heapName,
+                         unsigned* flags) -> c2_status_t {
+            if (capacity > UINT32_MAX) {
+                return C2_BAD_VALUE;
+            }
+
+            std::unique_ptr<C2StoreDmaBufUsageInfo> usageInfo;
+            usageInfo = C2StoreDmaBufUsageInfo::AllocUnique(maxHeapNameLen, usage.expected, capacity);
+            std::vector<std::unique_ptr<C2SettingResult>> failures;  // TODO: remove
+
+            c2_status_t res = store->config_sm({&*usageInfo}, &failures);
+            if (res == C2_OK) {
+                *heapName = C2String(usageInfo->m.heapName);
+                *flags = usageInfo->m.allocFlags;
+            }
+
+            return res;
+        };
+    }
+
+    allocator->setUsageMapper(mapper, minUsage, maxUsage, blockSize);
+}
+
 }
 
 void C2PlatformAllocatorStoreImpl::setComponentStore(std::shared_ptr<C2ComponentStore> store) {
@@ -233,6 +313,22 @@
     return allocator;
 }
 
+std::shared_ptr<C2Allocator> C2PlatformAllocatorStoreImpl::fetchDmaBufAllocator() {
+    std::lock_guard<std::mutex> lock(gDmaBufAllocatorMutex);
+    std::shared_ptr<C2DmaBufAllocator> allocator = gDmaBufAllocator.lock();
+    if (allocator == nullptr) {
+        std::shared_ptr<C2ComponentStore> componentStore;
+        {
+            std::lock_guard<std::mutex> lock(_mComponentStoreReadLock);
+            componentStore = _mComponentStore;
+        }
+        allocator = std::make_shared<C2DmaBufAllocator>(C2PlatformAllocatorStore::DMABUFHEAP);
+        UseComponentStoreForDmaBufAllocator(allocator, componentStore);
+        gDmaBufAllocator = allocator;
+    }
+    return allocator;
+}
+
 std::shared_ptr<C2Allocator> C2PlatformAllocatorStoreImpl::fetchBlobAllocator() {
     static std::mutex mutex;
     static std::weak_ptr<C2Allocator> blobAllocator;
@@ -347,7 +443,7 @@
             allocatorId = GetPreferredLinearAllocatorId(GetCodec2PoolMask());
         }
         switch(allocatorId) {
-            case C2PlatformAllocatorStore::ION:
+            case C2PlatformAllocatorStore::ION: /* also ::DMABUFHEAP */
                 res = allocatorStore->fetchAllocator(
                         C2PlatformAllocatorStore::ION, &allocator);
                 if (res == C2_OK) {
@@ -645,6 +741,7 @@
 
     struct Interface : public C2InterfaceHelper {
         std::shared_ptr<C2StoreIonUsageInfo> mIonUsageInfo;
+        std::shared_ptr<C2StoreDmaBufUsageInfo> mDmaBufUsageInfo;
 
         Interface(std::shared_ptr<C2ReflectorHelper> reflector)
             : C2InterfaceHelper(reflector) {
@@ -680,7 +777,13 @@
                     me.set().minAlignment = 0;
 #endif
                     return C2R::Ok();
-                }
+                };
+
+                static C2R setDmaBufUsage(bool /* mayBlock */, C2P<C2StoreDmaBufUsageInfo> &me) {
+                    strncpy(me.set().m.heapName, "system", me.v.flexCount());
+                    me.set().m.allocFlags = 0;
+                    return C2R::Ok();
+                };
             };
 
             addParameter(
@@ -695,6 +798,18 @@
                 })
                 .withSetter(Setter::setIonUsage)
                 .build());
+
+            addParameter(
+                DefineParam(mDmaBufUsageInfo, "dmabuf-usage")
+                .withDefault(C2StoreDmaBufUsageInfo::AllocShared(0))
+                .withFields({
+                    C2F(mDmaBufUsageInfo, m.usage).flags({C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE}),
+                    C2F(mDmaBufUsageInfo, m.capacity).inRange(0, UINT32_MAX, 1024),
+                    C2F(mDmaBufUsageInfo, m.allocFlags).flags({}),
+                    C2F(mDmaBufUsageInfo, m.heapName).any(),
+                })
+                .withSetter(Setter::setDmaBufUsage)
+                .build());
         }
     };
 
diff --git a/media/codec2/vndk/include/C2AllocatorBlob.h b/media/codec2/vndk/include/C2AllocatorBlob.h
index 89ce949..fc67af7 100644
--- a/media/codec2/vndk/include/C2AllocatorBlob.h
+++ b/media/codec2/vndk/include/C2AllocatorBlob.h
@@ -44,7 +44,12 @@
 
     virtual ~C2AllocatorBlob() override;
 
-    static bool isValid(const C2Handle* const o);
+    virtual bool checkHandle(const C2Handle* const o) const override { return CheckHandle(o); }
+
+    static bool CheckHandle(const C2Handle* const o);
+
+    // deprecated
+    static bool isValid(const C2Handle* const o) { return CheckHandle(o); }
 
 private:
     std::shared_ptr<const Traits> mTraits;
diff --git a/media/codec2/vndk/include/C2AllocatorGralloc.h b/media/codec2/vndk/include/C2AllocatorGralloc.h
index ee7524e..578cf76 100644
--- a/media/codec2/vndk/include/C2AllocatorGralloc.h
+++ b/media/codec2/vndk/include/C2AllocatorGralloc.h
@@ -84,7 +84,12 @@
 
     virtual ~C2AllocatorGralloc() override;
 
-    static bool isValid(const C2Handle* const o);
+    virtual bool checkHandle(const C2Handle* const o) const override { return CheckHandle(o); }
+
+    static bool CheckHandle(const C2Handle* const o);
+
+    // deprecated
+    static bool isValid(const C2Handle* const o) { return CheckHandle(o); }
 
 private:
     class Impl;
diff --git a/media/codec2/vndk/include/C2AllocatorIon.h b/media/codec2/vndk/include/C2AllocatorIon.h
index 1b2051f..6a49b7d 100644
--- a/media/codec2/vndk/include/C2AllocatorIon.h
+++ b/media/codec2/vndk/include/C2AllocatorIon.h
@@ -57,7 +57,12 @@
 
     virtual ~C2AllocatorIon() override;
 
-    static bool isValid(const C2Handle* const o);
+    virtual bool checkHandle(const C2Handle* const o) const override { return CheckHandle(o); }
+
+    static bool CheckHandle(const C2Handle* const o);
+
+    // deprecated
+    static bool isValid(const C2Handle* const o) { return CheckHandle(o); }
 
     /**
      * Updates the usage mapper for subsequent new allocations, as well as the supported
diff --git a/media/codec2/vndk/include/C2DmaBufAllocator.h b/media/codec2/vndk/include/C2DmaBufAllocator.h
new file mode 100644
index 0000000..abb8307
--- /dev/null
+++ b/media/codec2/vndk/include/C2DmaBufAllocator.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STAGEFRIGHT_CODEC2_ALLOCATOR_BUF_H_
+#define STAGEFRIGHT_CODEC2_ALLOCATOR_BUF_H_
+
+#include <BufferAllocator/BufferAllocator.h>
+#include <C2Buffer.h>
+#include <sys/stat.h>  // stat
+
+#include <functional>
+#include <list>
+#include <mutex>
+#include <tuple>
+#include <unordered_map>
+
+namespace android {
+
+class C2DmaBufAllocator : public C2Allocator {
+   public:
+    virtual c2_status_t newLinearAllocation(
+            uint32_t capacity, C2MemoryUsage usage,
+            std::shared_ptr<C2LinearAllocation>* allocation) override;
+
+    virtual c2_status_t priorLinearAllocation(
+            const C2Handle* handle, std::shared_ptr<C2LinearAllocation>* allocation) override;
+
+    C2DmaBufAllocator(id_t id);
+
+    virtual c2_status_t status() const { return mInit; }
+
+    virtual bool checkHandle(const C2Handle* const o) const override { return CheckHandle(o); }
+
+    static bool CheckHandle(const C2Handle* const o);
+
+    virtual id_t getId() const override;
+
+    virtual C2String getName() const override;
+
+    virtual std::shared_ptr<const Traits> getTraits() const override;
+
+    // Usage mapper function used by the allocator
+    //   (usage, capacity) => (heapName, flags)
+    //
+    // capacity is aligned to the default block-size (defaults to page size) to
+    // reduce caching overhead
+    typedef std::function<c2_status_t(C2MemoryUsage, size_t,
+                                      /* => */ C2String*, unsigned*)>
+            UsageMapperFn;
+
+    /**
+     * Updates the usage mapper for subsequent new allocations, as well as the
+     * supported minimum and maximum usage masks and default block-size to use
+     * for the mapper.
+     *
+     * \param mapper          This method is called to map Codec 2.0 buffer usage
+     *                        to dmabuf heap name and flags required by the dma
+     *                        buf heap device
+     *
+     * \param minUsage        Minimum buffer usage required for supported
+     *                        allocations (defaults to 0)
+     *
+     * \param maxUsage        Maximum buffer usage supported by the ion allocator
+     *                        (defaults to SW_READ | SW_WRITE)
+     *
+     * \param blockSize       Alignment used prior to calling |mapper| for the
+     *                        buffer capacity. This also helps reduce the size of
+     *                        cache required for caching mapper results.
+     *                        (defaults to the page size)
+     */
+    void setUsageMapper(const UsageMapperFn& mapper, uint64_t minUsage, uint64_t maxUsage,
+                        uint64_t blockSize);
+
+   private:
+    c2_status_t mInit;
+    BufferAllocator mBufferAllocator;
+
+    c2_status_t mapUsage(C2MemoryUsage usage, size_t size,
+                         /* => */ C2String* heap_name, unsigned* flags);
+
+    // this locks mTraits, mBlockSize, mUsageMapper, mUsageMapperLru and
+    // mUsageMapperCache
+    mutable std::mutex mUsageMapperLock;
+    std::shared_ptr<const Traits> mTraits;
+    size_t mBlockSize;
+    UsageMapperFn mUsageMapper;
+    typedef std::pair<uint64_t, size_t> MapperKey;
+    struct MapperKeyHash {
+        std::size_t operator()(const MapperKey&) const;
+    };
+    typedef std::tuple<C2String, unsigned, c2_status_t> MapperValue;
+    typedef std::pair<MapperKey, MapperValue> MapperKeyValue;
+    typedef std::list<MapperKeyValue>::iterator MapperKeyValuePointer;
+    std::list<MapperKeyValue> mUsageMapperLru;
+    std::unordered_map<MapperKey, MapperKeyValuePointer, MapperKeyHash> mUsageMapperCache;
+};
+}  // namespace android
+
+#endif  // STAGEFRIGHT_CODEC2_ALLOCATOR_BUF_H_
diff --git a/media/codec2/vndk/include/C2PlatformSupport.h b/media/codec2/vndk/include/C2PlatformSupport.h
index a14e0d3..4814494 100644
--- a/media/codec2/vndk/include/C2PlatformSupport.h
+++ b/media/codec2/vndk/include/C2PlatformSupport.h
@@ -47,6 +47,17 @@
          */
         ION = PLATFORM_START,
 
+        /*
+         * ID of the DMA-Buf Heap (ion replacement) backed platform allocator.
+         *
+         * C2Handle consists of:
+         *   fd  shared dmabuf buffer handle
+         *   int size (lo 32 bits)
+         *   int size (hi 32 bits)
+         *   int magic '\xc2io\x00'
+         */
+        DMABUFHEAP = ION,
+
         /**
          * ID of the gralloc backed platform allocator.
          *
diff --git a/media/codec2/vndk/internal/C2HandleIonInternal.h b/media/codec2/vndk/internal/C2HandleIonInternal.h
index c0e1d83..c67698c 100644
--- a/media/codec2/vndk/internal/C2HandleIonInternal.h
+++ b/media/codec2/vndk/internal/C2HandleIonInternal.h
@@ -28,7 +28,10 @@
           mFds{ bufferFd },
           mInts{ int(size & 0xFFFFFFFF), int((uint64_t(size) >> 32) & 0xFFFFFFFF), kMagic } { }
 
-    static bool isValid(const C2Handle * const o);
+    static bool IsValid(const C2Handle * const o);
+
+    // deprecated
+    static bool isValid(const C2Handle * const o) { return IsValid(o); }
 
     int bufferFd() const { return mFds.mBuffer; }
     size_t size() const {
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 62936f6..fff12c4 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -223,7 +223,7 @@
     static std::unique_ptr<C2AllocatorGralloc> sAllocator = std::make_unique<C2AllocatorGralloc>(0);
 
     std::shared_ptr<C2GraphicAllocation> alloc;
-    if (C2AllocatorGralloc::isValid(handle)) {
+    if (C2AllocatorGralloc::CheckHandle(handle)) {
         uint32_t width;
         uint32_t height;
         uint32_t format;
diff --git a/media/codecs/g711/decoder/Android.bp b/media/codecs/g711/decoder/Android.bp
new file mode 100644
index 0000000..efff60b
--- /dev/null
+++ b/media/codecs/g711/decoder/Android.bp
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_library_static {
+    name: "codecs_g711dec",
+    vendor_available: true,
+    host_supported: true,
+
+    srcs: [
+        "g711DecAlaw.cpp",
+        "g711DecMlaw.cpp",
+    ],
+
+    export_include_dirs: ["."],
+
+    cflags: ["-Werror"],
+
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+            "unsigned-integer-overflow",
+        ],
+        cfi: true,
+    },
+    apex_available: ["com.android.media.swcodec"],
+    min_sdk_version: "29",
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
diff --git a/media/codecs/g711/decoder/g711Dec.h b/media/codecs/g711/decoder/g711Dec.h
new file mode 100644
index 0000000..ca357a5
--- /dev/null
+++ b/media/codecs/g711/decoder/g711Dec.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef G711_DEC_H_
+#define G711_DEC_H_
+
+/**
+ * @file g711Dec.h
+ * @brief g711 Decoder API: DecodeALaw and DecodeMLaw
+ */
+
+/** Decodes input bytes of size inSize according to ALAW
+ *
+ * @param [in] out <tt>int16_t*</tt>: output buffer to be filled with decoded bytes.
+ * @param [in] in <tt>const uint8_t*</tt>: input buffer containing bytes to be decoded.
+ * @param [in] inSize <tt>size_t</tt>: size of the input buffer.
+ */
+void DecodeALaw(int16_t *out, const uint8_t *in, size_t inSize);
+
+/** Decodes input bytes of size inSize according to MLAW
+ *
+ * @param [in] out <tt>int16_t*</tt>: output buffer to be filled with decoded bytes.
+ * @param [in] in <tt>const uint8_t*</tt>: input buffer containing bytes to be decoded.
+ * @param [in] inSize <tt>size_t</tt>: size of the input buffer.
+ */
+void DecodeMLaw(int16_t *out, const uint8_t *in, size_t inSize);
+
+#endif  // G711_DECODER_H_
diff --git a/media/codecs/g711/decoder/g711DecAlaw.cpp b/media/codecs/g711/decoder/g711DecAlaw.cpp
new file mode 100644
index 0000000..e41a7b4
--- /dev/null
+++ b/media/codecs/g711/decoder/g711DecAlaw.cpp
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+void DecodeALaw(int16_t *out, const uint8_t *in, size_t inSize) {
+  if (out != nullptr && in != nullptr) {
+    while (inSize > 0) {
+      inSize--;
+      int32_t x = *in++;
+
+      int32_t ix = x ^ 0x55;
+      ix &= 0x7f;
+
+      int32_t iexp = ix >> 4;
+      int32_t mant = ix & 0x0f;
+
+      if (iexp > 0) {
+        mant += 16;
+      }
+
+      mant = (mant << 4) + 8;
+
+      if (iexp > 1) {
+        mant = mant << (iexp - 1);
+      }
+
+      *out++ = (x > 127) ? mant : -mant;
+    }
+  }
+}
diff --git a/media/codecs/g711/decoder/g711DecMlaw.cpp b/media/codecs/g711/decoder/g711DecMlaw.cpp
new file mode 100644
index 0000000..bb2caea
--- /dev/null
+++ b/media/codecs/g711/decoder/g711DecMlaw.cpp
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+void DecodeMLaw(int16_t *out, const uint8_t *in, size_t inSize) {
+  if (out != nullptr && in != nullptr) {
+    while (inSize > 0) {
+      inSize--;
+      int32_t x = *in++;
+
+      int32_t mantissa = ~x;
+      int32_t exponent = (mantissa >> 4) & 7;
+      int32_t segment = exponent + 1;
+      mantissa &= 0x0f;
+
+      int32_t step = 4 << segment;
+
+      int32_t abs = (0x80l << exponent) + step * mantissa + step / 2 - 4 * 33;
+
+      *out++ = (x < 0x80) ? -abs : abs;
+    }
+  }
+}
diff --git a/media/codecs/g711/fuzzer/Android.bp b/media/codecs/g711/fuzzer/Android.bp
new file mode 100644
index 0000000..ff5efa9
--- /dev/null
+++ b/media/codecs/g711/fuzzer/Android.bp
@@ -0,0 +1,56 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+cc_fuzz {
+    name: "g711alaw_dec_fuzzer",
+    host_supported: true,
+    srcs: [
+        "g711_dec_fuzzer.cpp",
+    ],
+    static_libs: [
+        "codecs_g711dec",
+    ],
+    cflags: [
+        "-DALAW",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "g711mlaw_dec_fuzzer",
+    host_supported: true,
+    srcs: [
+        "g711_dec_fuzzer.cpp",
+    ],
+    static_libs: [
+        "codecs_g711dec",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/codecs/g711/fuzzer/README.md b/media/codecs/g711/fuzzer/README.md
new file mode 100644
index 0000000..0c1c36b
--- /dev/null
+++ b/media/codecs/g711/fuzzer/README.md
@@ -0,0 +1,49 @@
+# Fuzzer for libstagefright_g711dec decoder
+
+## Plugin Design Considerations
+The fuzzer plugin for G711 is designed based on the understanding of the
+codec and tries to achieve the following:
+
+##### Maximize code coverage
+G711 supports two types of decoding:
+1. DecodeALaw
+2. DecodeMLaw
+
+These two decoder API's are fuzzed separately using g711alaw_dec_fuzzer and
+g711mlaw_dec_fuzzer respectively.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the codec as expected by decoder API.
+
+## Build
+
+This describes steps to build g711alaw_dec_fuzzer and g711mlaw_dec_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) g711alaw_dec_fuzzer
+  $ mm -j$(nproc) g711mlaw_dec_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some g711 files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/g711alaw_dec_fuzzer/g711alaw_dec_fuzzer CORPUS_DIR
+  $ adb shell /data/fuzz/arm64/g711mlaw_dec_fuzzer/g711mlaw_dec_fuzzer CORPUS_DIR
+```
+To run on host
+```
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/g711alaw_dec_fuzzer/g711alaw_dec_fuzzer CORPUS_DIR
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/g711mlaw_dec_fuzzer/g711mlaw_dec_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/codecs/g711/fuzzer/g711_dec_fuzzer.cpp b/media/codecs/g711/fuzzer/g711_dec_fuzzer.cpp
new file mode 100644
index 0000000..adfbcf5
--- /dev/null
+++ b/media/codecs/g711/fuzzer/g711_dec_fuzzer.cpp
@@ -0,0 +1,58 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include <stdint.h>
+#include <stdlib.h>
+#include <string.h>
+#include "g711Dec.h"
+
+class Codec {
+ public:
+  Codec() = default;
+  ~Codec() = default;
+  void decodeFrames(const uint8_t *data, size_t size);
+};
+
+void Codec::decodeFrames(const uint8_t *data, size_t size) {
+  size_t outputBufferSize = sizeof(int16_t) * size;
+  int16_t *out = new int16_t[outputBufferSize];
+  if (!out) {
+    return;
+  }
+#ifdef ALAW
+  DecodeALaw(out, data, size);
+#else
+  DecodeMLaw(out, data, size);
+#endif
+  delete[] out;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  if (size < 1) {
+    return 0;
+  }
+  Codec *codec = new Codec();
+  if (!codec) {
+    return 0;
+  }
+  codec->decodeFrames(data, size);
+  delete codec;
+  return 0;
+}
diff --git a/media/extractors/Android.bp b/media/extractors/Android.bp
index 7c4e62f..f9abfe3 100644
--- a/media/extractors/Android.bp
+++ b/media/extractors/Android.bp
@@ -21,7 +21,6 @@
 
     shared_libs: [
         "liblog",
-        "libmediandk#29",
     ],
 
     // extractors are supposed to work on Q(29)
@@ -39,6 +38,21 @@
 
     version_script: "exports.lds",
 
+    target: {
+        android: {
+            shared_libs: [
+                "libmediandk#29",
+            ],
+        },
+        host: {
+            static_libs: [
+                "libutils",
+                "libmediandk_format",
+                "libmedia_ndkformatpriv",
+            ],
+        },
+    },
+
     sanitize: {
         cfi: true,
         misc_undefined: [
diff --git a/media/extractors/TEST_MAPPING b/media/extractors/TEST_MAPPING
index abefb0f..4984b8f 100644
--- a/media/extractors/TEST_MAPPING
+++ b/media/extractors/TEST_MAPPING
@@ -1,5 +1,6 @@
 {
   "presubmit": [
+
     // TODO(b/153661591) enable test once the bug is fixed
     // This tests the extractor path
     // {
@@ -13,5 +14,14 @@
     //      }
     //    ]
     //  }
+  ],
+
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "ExtractorUnitTest" }
   ]
+
+
 }
diff --git a/media/extractors/aac/Android.bp b/media/extractors/aac/Android.bp
index 60d3ae1..c036bb5 100644
--- a/media/extractors/aac/Android.bp
+++ b/media/extractors/aac/Android.bp
@@ -10,4 +10,11 @@
         "libutils",
     ],
 
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/extractors/amr/Android.bp b/media/extractors/amr/Android.bp
index 49c9567..440065f 100644
--- a/media/extractors/amr/Android.bp
+++ b/media/extractors/amr/Android.bp
@@ -8,4 +8,10 @@
         "libstagefright_foundation",
     ],
 
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    }
 }
diff --git a/media/extractors/flac/Android.bp b/media/extractors/flac/Android.bp
index 826c1a0..2593000 100644
--- a/media/extractors/flac/Android.bp
+++ b/media/extractors/flac/Android.bp
@@ -21,4 +21,12 @@
         "libutils",
     ],
 
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
 }
diff --git a/media/extractors/fuzzers/Android.bp b/media/extractors/fuzzers/Android.bp
new file mode 100644
index 0000000..e900e57
--- /dev/null
+++ b/media/extractors/fuzzers/Android.bp
@@ -0,0 +1,336 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+cc_defaults {
+    name: "extractor-fuzzerbase-defaults",
+
+    local_include_dirs: [
+        "include",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    static_libs: [
+        "liblog",
+        "libstagefright_foundation",
+        "libmediandk_format",
+        "libmedia_ndkformatpriv",
+    ],
+
+    shared_libs: [
+        "libutils",
+        "libbinder",
+        "libbase",
+        "libcutils",
+    ],
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+cc_defaults {
+    name: "extractor-fuzzer-defaults",
+    defaults: ["extractor-fuzzerbase-defaults"],
+
+    static_libs: [
+        "libextractorfuzzerbase",
+    ],
+
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_defaults {
+    name: "mpeg2-extractor-fuzzer-defaults",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    include_dirs: [
+        "frameworks/av/media/extractors/mpeg2",
+        "frameworks/av/media/libstagefright",
+    ],
+
+    static_libs: [
+        "libstagefright_foundation_without_imemory",
+        "libstagefright_mpeg2support",
+        "libstagefright_mpeg2extractor",
+        "libstagefright_esds",
+        "libmpeg2extractor",
+        "libmedia_helper",
+    ],
+
+    shared_libs: [
+        "android.hardware.cas@1.0",
+        "android.hardware.cas.native@1.0",
+        "android.hidl.token@1.0-utils",
+        "android.hidl.allocator@1.0",
+        "libcrypto",
+        "libhidlmemory",
+        "libhidlbase",
+    ],
+}
+
+cc_library_static {
+    name: "libextractorfuzzerbase",
+    defaults: ["extractor-fuzzerbase-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "ExtractorFuzzerBase.cpp",
+    ],
+}
+
+cc_fuzz {
+    name: "mp4_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "mp4_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/mp4",
+    ],
+
+    header_libs: [
+        "libaudioclient_headers",
+    ],
+
+    static_libs: [
+        "libstagefright_id3",
+        "libstagefright_esds",
+        "libmp4extractor",
+    ],
+
+    dictionary: "mp4_extractor_fuzzer.dict",
+}
+
+cc_fuzz {
+    name: "wav_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "wav_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/wav",
+    ],
+
+    static_libs: [
+        "libfifo",
+        "libwavextractor",
+    ],
+
+    shared_libs: [
+        "libbinder_ndk",
+    ],
+}
+
+cc_fuzz {
+    name: "amr_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "amr_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/amr",
+    ],
+
+    static_libs: [
+        "libamrextractor",
+    ],
+
+    dictionary: "amr_extractor_fuzzer.dict",
+}
+
+cc_fuzz {
+    name: "mkv_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "mkv_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/mkv",
+    ],
+
+    static_libs: [
+        "libwebm",
+        "libstagefright_flacdec",
+        "libstagefright_metadatautils",
+        "libmkvextractor",
+        "libFLAC",
+    ],
+
+    dictionary: "mkv_extractor_fuzzer.dict",
+}
+
+cc_fuzz {
+    name: "ogg_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "ogg_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/ogg",
+    ],
+
+    static_libs: [
+        "libstagefright_metadatautils",
+        "libvorbisidec",
+        "liboggextractor",
+    ],
+
+    dictionary: "ogg_extractor_fuzzer.dict",
+}
+
+cc_fuzz {
+    name: "mpeg2ps_extractor_fuzzer",
+    defaults: ["mpeg2-extractor-fuzzer-defaults"],
+
+    srcs: [
+        "mpeg2_extractor_fuzzer.cpp",
+    ],
+
+    cflags: [
+        "-DMPEG2PS",
+    ],
+
+    dictionary: "mpeg2ps_extractor_fuzzer.dict",
+}
+
+cc_fuzz {
+    name: "mpeg2ts_extractor_fuzzer",
+    defaults: ["mpeg2-extractor-fuzzer-defaults"],
+
+    srcs: [
+        "mpeg2_extractor_fuzzer.cpp",
+    ],
+
+    dictionary: "mpeg2ts_extractor_fuzzer.dict",
+}
+
+cc_fuzz {
+    name: "mp3_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "mp3_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/mp3",
+    ],
+
+    static_libs: [
+        "libfifo",
+        "libmp3extractor",
+        "libstagefright_id3",
+    ],
+}
+
+cc_fuzz {
+    name: "aac_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "aac_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/aac",
+    ],
+
+    static_libs: [
+        "libaacextractor",
+        "libstagefright_metadatautils",
+    ],
+}
+
+cc_fuzz {
+    name: "flac_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "flac_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/flac",
+    ],
+
+    static_libs: [
+        "libstagefright_metadatautils",
+        "libFLAC",
+        "libflacextractor",
+    ],
+
+    shared_libs: [
+        "libbinder_ndk",
+    ],
+
+    dictionary: "flac_extractor_fuzzer.dict",
+}
+
+cc_fuzz {
+    name: "midi_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+
+    srcs: [
+        "midi_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/midi",
+    ],
+
+    static_libs: [
+        "libsonivox",
+        "libmedia_midiiowrapper",
+        "libmidiextractor",
+        "libwatchdog",
+    ],
+
+    dictionary: "midi_extractor_fuzzer.dict",
+
+    host_supported: true,
+}
diff --git a/media/extractors/fuzzers/ExtractorFuzzerBase.cpp b/media/extractors/fuzzers/ExtractorFuzzerBase.cpp
new file mode 100644
index 0000000..1be8466
--- /dev/null
+++ b/media/extractors/fuzzers/ExtractorFuzzerBase.cpp
@@ -0,0 +1,201 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ExtractorFuzzerBase"
+#include <utils/Log.h>
+
+#include "ExtractorFuzzerBase.h"
+
+using namespace android;
+
+bool ExtractorFuzzerBase::setDataSource(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return false;
+  }
+  mBufferSource = new BufferSource(data, size);
+  mDataSource = reinterpret_cast<DataSource*>(mBufferSource.get());
+  if (!mDataSource) {
+    return false;
+  }
+  return true;
+}
+
+void ExtractorFuzzerBase::getExtractorDef() {
+  float confidence;
+  void* meta = nullptr;
+  FreeMetaFunc freeMeta = nullptr;
+
+  ExtractorDef extractorDef = GETEXTRACTORDEF();
+  if (extractorDef.def_version == EXTRACTORDEF_VERSION_NDK_V1) {
+    extractorDef.u.v2.sniff(mDataSource->wrap(), &confidence, &meta, &freeMeta);
+  } else if (extractorDef.def_version == EXTRACTORDEF_VERSION_NDK_V2) {
+    extractorDef.u.v3.sniff(mDataSource->wrap(), &confidence, &meta, &freeMeta);
+  }
+
+  if (meta != nullptr && freeMeta != nullptr) {
+    freeMeta(meta);
+  }
+}
+
+void ExtractorFuzzerBase::extractTracks() {
+  MediaBufferGroup* bufferGroup = new MediaBufferGroup();
+  if (!bufferGroup) {
+    return;
+  }
+  size_t trackCount = mExtractor->countTracks();
+  for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
+    MediaTrackHelper* track = mExtractor->getTrack(trackIndex);
+    if (!track) {
+      continue;
+    }
+    extractTrack(track, bufferGroup);
+    delete track;
+  }
+  delete bufferGroup;
+}
+
+void ExtractorFuzzerBase::extractTrack(MediaTrackHelper* track, MediaBufferGroup* bufferGroup) {
+  CMediaTrack* cTrack = wrap(track);
+  if (!cTrack) {
+    return;
+  }
+
+  media_status_t status = cTrack->start(track, bufferGroup->wrap());
+  if (status != AMEDIA_OK) {
+    free(cTrack);
+    return;
+  }
+
+  do {
+    MediaBufferHelper* buffer = nullptr;
+    status = track->read(&buffer);
+    if (buffer) {
+      buffer->release();
+    }
+  } while (status == AMEDIA_OK);
+
+  cTrack->stop(track);
+  free(cTrack);
+}
+
+void ExtractorFuzzerBase::getTracksMetadata() {
+  AMediaFormat* format = AMediaFormat_new();
+  uint32_t flags = MediaExtractorPluginHelper::kIncludeExtensiveMetaData;
+
+  size_t trackCount = mExtractor->countTracks();
+  for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
+    mExtractor->getTrackMetaData(format, trackIndex, flags);
+  }
+
+  AMediaFormat_delete(format);
+}
+
+void ExtractorFuzzerBase::getMetadata() {
+  AMediaFormat* format = AMediaFormat_new();
+  mExtractor->getMetaData(format);
+  AMediaFormat_delete(format);
+}
+
+void ExtractorFuzzerBase::setDataSourceFlags(uint32_t flags) {
+  mBufferSource->setFlags(flags);
+}
+
+void ExtractorFuzzerBase::seekAndExtractTracks() {
+  MediaBufferGroup* bufferGroup = new MediaBufferGroup();
+  if (!bufferGroup) {
+    return;
+  }
+  size_t trackCount = mExtractor->countTracks();
+  for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
+    MediaTrackHelper* track = mExtractor->getTrack(trackIndex);
+    if (!track) {
+      continue;
+    }
+
+    AMediaFormat* trackMetaData = AMediaFormat_new();
+    int64_t trackDuration = 0;
+    uint32_t flags = MediaExtractorPluginHelper::kIncludeExtensiveMetaData;
+    mExtractor->getTrackMetaData(trackMetaData, trackIndex, flags);
+    AMediaFormat_getInt64(trackMetaData, AMEDIAFORMAT_KEY_DURATION, &trackDuration);
+
+    seekAndExtractTrack(track, bufferGroup, trackDuration);
+    AMediaFormat_delete(trackMetaData);
+    delete track;
+  }
+  delete bufferGroup;
+}
+
+void ExtractorFuzzerBase::seekAndExtractTrack(MediaTrackHelper* track,
+                                              MediaBufferGroup* bufferGroup,
+                                              int64_t trackDuration) {
+  CMediaTrack* cTrack = wrap(track);
+  if (!cTrack) {
+    return;
+  }
+
+  media_status_t status = cTrack->start(track, bufferGroup->wrap());
+  if (status != AMEDIA_OK) {
+    free(cTrack);
+    return;
+  }
+
+  int32_t seekCount = 0;
+  std::vector<int64_t> seekToTimeStamp;
+  while (seekCount <= kFuzzerMaxSeekPointsCount) {
+    /* This ensures kFuzzerMaxSeekPointsCount seek points are within the clipDuration and 1 seek
+     * point is outside of the clipDuration.
+     */
+    int64_t timeStamp = (seekCount * trackDuration) / (kFuzzerMaxSeekPointsCount - 1);
+    seekToTimeStamp.push_back(timeStamp);
+    seekCount++;
+  }
+
+  std::vector<uint32_t> seekOptions;
+  seekOptions.push_back(CMediaTrackReadOptions::SEEK | CMediaTrackReadOptions::SEEK_CLOSEST);
+  seekOptions.push_back(CMediaTrackReadOptions::SEEK | CMediaTrackReadOptions::SEEK_CLOSEST_SYNC);
+  seekOptions.push_back(CMediaTrackReadOptions::SEEK | CMediaTrackReadOptions::SEEK_PREVIOUS_SYNC);
+  seekOptions.push_back(CMediaTrackReadOptions::SEEK | CMediaTrackReadOptions::SEEK_NEXT_SYNC);
+  seekOptions.push_back(CMediaTrackReadOptions::SEEK | CMediaTrackReadOptions::SEEK_FRAME_INDEX);
+
+  for (uint32_t seekOption : seekOptions) {
+    for (int64_t seekPts : seekToTimeStamp) {
+      MediaTrackHelper::ReadOptions* options =
+          new MediaTrackHelper::ReadOptions(seekOption, seekPts);
+      MediaBufferHelper* buffer = nullptr;
+      track->read(&buffer, options);
+      if (buffer) {
+        buffer->release();
+      }
+      delete options;
+    }
+  }
+
+  cTrack->stop(track);
+  free(cTrack);
+}
+
+void ExtractorFuzzerBase::processData(const uint8_t* data, size_t size) {
+  if (setDataSource(data, size)) {
+    if (createExtractor()) {
+      getExtractorDef();
+      getMetadata();
+      extractTracks();
+      getTracksMetadata();
+      seekAndExtractTracks();
+    }
+  }
+}
diff --git a/media/extractors/fuzzers/README.md b/media/extractors/fuzzers/README.md
new file mode 100644
index 0000000..fb1d52f
--- /dev/null
+++ b/media/extractors/fuzzers/README.md
@@ -0,0 +1,362 @@
+# Fuzzer for extractors
+
+## Table of contents
++ [libextractorfuzzerbase](#ExtractorFuzzerBase)
++ [libmp4extractor](#mp4ExtractorFuzzer)
++ [libwavextractor](#wavExtractorFuzzer)
++ [libamrextractor](#amrExtractorFuzzer)
++ [libmkvextractor](#mkvExtractorFuzzer)
++ [liboggextractor](#oggExtractorFuzzer)
++ [libmpeg2extractor](#mpeg2ExtractorFuzzer)
++ [libmp3extractor](#mp3ExtractorFuzzer)
++ [libaacextractor](#aacExtractorFuzzer)
++ [libflacextractor](#flacExtractor)
++ [libmidiextractor](#midiExtractorFuzzer)
+
+# <a name="ExtractorFuzzerBase"></a> Fuzzer for libextractorfuzzerbase
+All the extractors have a common API - creating a data source, extraction
+of all the tracks, etc. These common APIs have been abstracted in a base class
+called `ExtractorFuzzerBase` to ensure code is reused between fuzzer plugins.
+
+Additionally, `ExtractorFuzzerBase` also has support for memory based buffer
+`BufferSource` since the fuzzing engine feeds data using memory buffers and
+usage of standard data source objects like FileSource, HTTPSource, etc. is
+not feasible.
+
+# <a name="mp4ExtractorFuzzer"></a> Fuzzer for libmp4extractor
+
+## Plugin Design Considerations
+The fuzzer plugin for MP4 extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the MP4 extractor class.
+
+##### Maximize code coverage
+Dict file (dictionary file) is created for MP4 to ensure that the required MP4
+atoms are present in every input file that goes to the fuzzer.
+This ensures that larger code gets covered as a range of MP4 atoms will be
+present in the input data.
+
+
+## Build
+
+This describes steps to build mp4_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mp4_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some MP4 files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mp4_extractor_fuzzer/mp4_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="wavExtractorFuzzer"></a> Fuzzer for libwavextractor
+
+## Plugin Design Considerations
+The fuzzer plugin for WAV extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the WAV extractor class.
+
+
+## Build
+
+This describes steps to build wav_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) wav_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some wav files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/wav_extractor_fuzzer/wav_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="amrExtractorFuzzer"></a> Fuzzer for libamrextractor
+
+## Plugin Design Considerations
+The fuzzer plugin for AMR extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the AMR extractor class.
+
+##### Maximize code coverage
+Dict file (dictionary file) is created for AMR to ensure that the required start
+bytes are present in every input file that goes to the fuzzer.
+This ensures that larger code gets covered.
+
+
+## Build
+
+This describes steps to build amr_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) amr_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some AMR files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/amr_extractor_fuzzer/amr_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="mkvExtractorFuzzer"></a> Fuzzer for libmkvextractor
+
+## Plugin Design Considerations
+The fuzzer plugin for MKV extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the MKV extractor class.
+
+##### Maximize code coverage
+Dict file (dictionary file) is created for MKV to ensure that the required element
+ID's are present in every input file that goes to the fuzzer.
+This ensures that larger code gets covered.
+
+
+## Build
+
+This describes steps to build mkv_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mkv_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some mkv files to that folder.
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mkv_extractor_fuzzer/mkv_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="oggExtractorFuzzer"></a> Fuzzer for liboggextractor
+
+## Plugin Design Considerations
+The fuzzer plugin for OGG extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the OGG extractor object.
+
+##### Maximize code coverage
+Dict file (dictionary file) is created for OGG to ensure that the required start
+bytes are present in every input file that goes to the fuzzer.
+This ensures that larger code gets covered.
+
+
+## Build
+
+This describes steps to build ogg_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) ogg_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some ogg files to that folder.
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ogg_extractor_fuzzer/ogg_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="mpeg2ExtractorFuzzer"></a> Fuzzer for libmpeg2extractor
+
+## Plugin Design Considerations
+The fuzzer plugins for MPEG2-PS and MPEG2-TS extractor use the `ExtractorFuzzerBase` class and
+implement only the `createExtractor` to create the MPEG2-PS or MPEG2-TS extractor
+object respectively.
+
+##### Maximize code coverage
+Dict files (dictionary files) are created for MPEG2-PS and MPEG2-TS to ensure that the
+required start bytes are present in every input file that goes to the fuzzer.
+This ensures that larger code gets covered.
+
+##### Other considerations
+Two fuzzer binaries - mpeg2ps_extractor_fuzzer and mpeg2ts_extractor_fuzzer are
+generated based on the presence of a flag - `MPEG2PS`
+
+
+## Build
+
+This describes steps to build mpeg2ps_extractor_fuzzer and mpeg2ts_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mpeg2ps_extractor_fuzzer
+  $ mm -j$(nproc) mpeg2ts_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some mpeg2 files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mpeg2ps_extractor_fuzzer/mpeg2ps_extractor_fuzzer CORPUS_DIR
+  $ adb shell /data/fuzz/arm64/mpeg2ts_extractor_fuzzer/mpeg2ts_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="mp3ExtractorFuzzer"></a> Fuzzer for libmp3extractor
+
+## Plugin Design Considerations
+The fuzzer plugin for MP3 extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the MP3 extractor class.
+
+
+## Build
+
+This describes steps to build mp3_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mp3_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some mp3 files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mp3_extractor_fuzzer/mp3_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="aacExtractorFuzzer"></a> Fuzzer for libaacextractor
+
+## Plugin Design Considerations
+The fuzzer plugin for AAC extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the AAC extractor class.
+
+
+## Build
+
+This describes steps to build aac_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) aac_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some aac files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/aac_extractor_fuzzer/aac_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="flacExtractor"></a> Fuzzer for libflacextractor
+
+## Plugin Design Considerations
+The fuzzer plugin for FLAC extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the FLAC extractor object.
+
+##### Maximize code coverage
+Dict file (dictionary file) is created for FLAC to ensure that the required start
+bytes are present in every input file that goes to the fuzzer.
+This ensures that larger code gets covered.
+
+
+## Build
+
+This describes steps to build flac_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) flac_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some flac files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/flac_extractor_fuzzer/flac_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="midiExtractorFuzzer"></a> Fuzzer for libmidiextractor
+
+## Plugin Design Considerations
+The fuzzer plugin for MIDI extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the MIDI extractor class.
+
+##### Maximize code coverage
+Dict file (dictionary file) is created for MIDI to ensure that the required MIDI
+headers are present in every input file that goes to the fuzzer.
+This ensures that larger code gets covered as a range of MIDI headers will be
+present in the input data.
+
+
+## Build
+
+This describes steps to build midi_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) midi_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some MIDI files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/midi_extractor_fuzzer/midi_extractor_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/extractors/fuzzers/aac_extractor_fuzzer.cpp b/media/extractors/fuzzers/aac_extractor_fuzzer.cpp
new file mode 100644
index 0000000..98a6cc9
--- /dev/null
+++ b/media/extractors/fuzzers/aac_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "AACExtractor.h"
+
+#include "ExtractorFuzzerBase.h"
+
+using namespace android;
+
+class AacExtractor : public ExtractorFuzzerBase {
+ public:
+  AacExtractor() = default;
+  ~AacExtractor() = default;
+
+  bool createExtractor();
+};
+
+bool AacExtractor::createExtractor() {
+  mExtractor = new AACExtractor(new DataSourceHelper(mDataSource->wrap()), 0);
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  AacExtractor* extractor = new AacExtractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/amr_extractor_fuzzer.cpp b/media/extractors/fuzzers/amr_extractor_fuzzer.cpp
new file mode 100644
index 0000000..6c9e1a5
--- /dev/null
+++ b/media/extractors/fuzzers/amr_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "AMRExtractor.h"
+
+using namespace android;
+
+class AmrExtractor : public ExtractorFuzzerBase {
+ public:
+  AmrExtractor() = default;
+  ~AmrExtractor() = default;
+
+  bool createExtractor();
+};
+
+bool AmrExtractor::createExtractor() {
+  mExtractor = new AMRExtractor(new DataSourceHelper(mDataSource->wrap()));
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  AmrExtractor* extractor = new AmrExtractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/amr_extractor_fuzzer.dict b/media/extractors/fuzzers/amr_extractor_fuzzer.dict
new file mode 100644
index 0000000..bc5726c
--- /dev/null
+++ b/media/extractors/fuzzers/amr_extractor_fuzzer.dict
@@ -0,0 +1,2 @@
+# Start code
+kw1="#!AMR"
diff --git a/media/extractors/fuzzers/flac_extractor_fuzzer.cpp b/media/extractors/fuzzers/flac_extractor_fuzzer.cpp
new file mode 100644
index 0000000..8734d45
--- /dev/null
+++ b/media/extractors/fuzzers/flac_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "FLACExtractor.h"
+
+using namespace android;
+
+class FlacExtractor : public ExtractorFuzzerBase {
+ public:
+  FlacExtractor() = default;
+  ~FlacExtractor() = default;
+
+  bool createExtractor();
+};
+
+bool FlacExtractor::createExtractor() {
+  mExtractor = new FLACExtractor(new DataSourceHelper(mDataSource->wrap()));
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  FlacExtractor* extractor = new FlacExtractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/flac_extractor_fuzzer.dict b/media/extractors/fuzzers/flac_extractor_fuzzer.dict
new file mode 100644
index 0000000..53ad44f
--- /dev/null
+++ b/media/extractors/fuzzers/flac_extractor_fuzzer.dict
@@ -0,0 +1,3 @@
+# Start code (bytes 0-3)
+# The below 4 bytes correspond to "fLaC" in ASCII
+kw1="\x66\x4C\x61\x43"
diff --git a/media/extractors/fuzzers/include/ExtractorFuzzerBase.h b/media/extractors/fuzzers/include/ExtractorFuzzerBase.h
new file mode 100644
index 0000000..6a2a1c1
--- /dev/null
+++ b/media/extractors/fuzzers/include/ExtractorFuzzerBase.h
@@ -0,0 +1,139 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#ifndef __EXTRACTOR_FUZZER_BASE_H__
+#define __EXTRACTOR_FUZZER_BASE_H__
+
+#include <media/DataSource.h>
+#include <media/MediaExtractorPluginHelper.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <vector>
+
+extern "C" {
+android::ExtractorDef GETEXTRACTORDEF();
+}
+
+constexpr int32_t kFuzzerMaxSeekPointsCount = 5;
+
+namespace android {
+
+class ExtractorFuzzerBase {
+ public:
+  ExtractorFuzzerBase() = default;
+  virtual ~ExtractorFuzzerBase() {
+    if (mExtractor) {
+      delete mExtractor;
+      mExtractor = nullptr;
+    }
+    if (mBufferSource) {
+      mBufferSource.clear();
+      mBufferSource = nullptr;
+    }
+  }
+
+  /** Function to create the media extractor component.
+    * To be implemented by the derived class.
+    */
+  virtual bool createExtractor() = 0;
+
+  /** Parent class functions to be reused by derived class.
+    * These are common for all media extractor components.
+    */
+  bool setDataSource(const uint8_t* data, size_t size);
+
+  void getExtractorDef();
+
+  void extractTracks();
+
+  void getMetadata();
+
+  void getTracksMetadata();
+
+  void setDataSourceFlags(uint32_t flags);
+
+  void seekAndExtractTracks();
+
+  void processData(const uint8_t* data, size_t size);
+
+ protected:
+  class BufferSource : public DataSource {
+   public:
+    BufferSource(const uint8_t* data, size_t length) : mData(data), mLength(length) {}
+    virtual ~BufferSource() { mData = nullptr; }
+
+    void setFlags(uint32_t flags) { mFlags = flags; }
+
+    uint32_t flags() { return mFlags; }
+
+    status_t initCheck() const { return mData != nullptr ? OK : NO_INIT; }
+
+    ssize_t readAt(off64_t offset, void* data, size_t size) {
+      if (!mData) {
+        return NO_INIT;
+      }
+
+      Mutex::Autolock autoLock(mLock);
+      if ((offset >= static_cast<off64_t>(mLength)) || (offset < 0)) {
+        return 0;  // read beyond bounds.
+      }
+      size_t numAvailable = mLength - static_cast<size_t>(offset);
+      if (size > numAvailable) {
+        size = numAvailable;
+      }
+      return readAt_l(offset, data, size);
+    }
+
+    status_t getSize(off64_t* size) {
+      if (!mData) {
+        return NO_INIT;
+      }
+
+      Mutex::Autolock autoLock(mLock);
+      *size = static_cast<off64_t>(mLength);
+      return OK;
+    }
+
+   protected:
+    ssize_t readAt_l(off64_t offset, void* data, size_t size) {
+      void* result = memcpy(data, mData + offset, size);
+      return result != nullptr ? size : 0;
+    }
+
+    const uint8_t* mData = nullptr;
+    size_t mLength = 0;
+    Mutex mLock;
+    uint32_t mFlags = 0;
+
+   private:
+    DISALLOW_EVIL_CONSTRUCTORS(BufferSource);
+  };
+
+  sp<BufferSource> mBufferSource;
+  DataSource* mDataSource = nullptr;
+  MediaExtractorPluginHelper* mExtractor = nullptr;
+
+  virtual void extractTrack(MediaTrackHelper* track, MediaBufferGroup* bufferGroup);
+  virtual void seekAndExtractTrack(MediaTrackHelper* track, MediaBufferGroup* bufferGroup,
+                                   int64_t trackDuration);
+};
+
+}  // namespace android
+
+#endif  // __EXTRACTOR_FUZZER_BASE_H__
diff --git a/media/extractors/fuzzers/midi_extractor_fuzzer.cpp b/media/extractors/fuzzers/midi_extractor_fuzzer.cpp
new file mode 100644
index 0000000..e02a12b
--- /dev/null
+++ b/media/extractors/fuzzers/midi_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "MidiExtractor.h"
+
+using namespace android;
+
+class MIDIExtractor : public ExtractorFuzzerBase {
+ public:
+  MIDIExtractor() = default;
+  ~MIDIExtractor() = default;
+
+  bool createExtractor();
+};
+
+bool MIDIExtractor::createExtractor() {
+  mExtractor = new MidiExtractor(mDataSource->wrap());
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  MIDIExtractor* extractor = new MIDIExtractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/midi_extractor_fuzzer.dict b/media/extractors/fuzzers/midi_extractor_fuzzer.dict
new file mode 100644
index 0000000..5b6bb8b
--- /dev/null
+++ b/media/extractors/fuzzers/midi_extractor_fuzzer.dict
@@ -0,0 +1,3 @@
+# MIDI Chunks
+kw1="MThd"
+kw2="MTrk"
diff --git a/media/extractors/fuzzers/mkv_extractor_fuzzer.cpp b/media/extractors/fuzzers/mkv_extractor_fuzzer.cpp
new file mode 100644
index 0000000..eceb93f
--- /dev/null
+++ b/media/extractors/fuzzers/mkv_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "MatroskaExtractor.h"
+
+using namespace android;
+
+class MKVExtractor : public ExtractorFuzzerBase {
+ public:
+  MKVExtractor() = default;
+  ~MKVExtractor() = default;
+
+  bool createExtractor();
+};
+
+bool MKVExtractor::createExtractor() {
+  mExtractor = new MatroskaExtractor(new DataSourceHelper(mDataSource->wrap()));
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  MKVExtractor* extractor = new MKVExtractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/mkv_extractor_fuzzer.dict b/media/extractors/fuzzers/mkv_extractor_fuzzer.dict
new file mode 100644
index 0000000..b3815dc
--- /dev/null
+++ b/media/extractors/fuzzers/mkv_extractor_fuzzer.dict
@@ -0,0 +1,244 @@
+# Elements ID's
+kw1="\x42\x86"
+kw2="\x42\xF7"
+kw3="\x42\xF2"
+kw4="\x42\xF3"
+kw5="\x42\x87"
+kw6="\x42\x85"
+kw7="\x18\x53\x80\x67"
+kw8="\x11\x4D\x9B\x74"
+kw9="\x4D\xBB"
+kw10="\x53\xAB"
+kw11="\x53\xAC"
+kw12="\x15\x49\xA9\x66"
+kw13="\x73\xA4"
+kw14="\x73\x84"
+kw15="\x3C\xB9\x23"
+kw16="\x3C\x83\xAB"
+kw17="\x3C\xB9\x23"
+kw18="\x3E\x83\xBB"
+kw19="\x44\x44"
+kw20="\x69\x24"
+kw21="\x69\xFC"
+kw22="\x69\xBF"
+kw23="\x69\xA5"
+kw24="\x2A\xD7\xB1"
+kw25="\x44\x89"
+kw26="\x44\x61"
+kw27="\x7B\xA9"
+kw28="\x4D\x80"
+kw29="\x57\x41"
+kw30="\x1F\x43\xB6\x75"
+kw31="\xE7"
+kw32="\x58\x54"
+kw33="\x58\xD7"
+kw34="\xA7"
+kw35="\xAB"
+kw36="\xA3"
+kw37="\xA0"
+kw38="\xA1"
+kw39="\xA2"
+kw40="\x75\xA1"
+kw41="\x2A\xD7\xB1"
+kw42="\xA6"
+kw43="\xEE"
+kw44="\xA5"
+kw45="\x9A"
+kw46="\xFA"
+kw47="\xFB"
+kw48="\xFD"
+kw49="\xA4"
+kw50="\x75\xA2"
+kw51="\x8E"
+kw52="\xE8"
+kw53="\xCC"
+kw54="\xCD"
+kw55="\xCB"
+kw56="\xCE"
+kw57="\xCF"
+kw58="\xC8"
+kw59="\xC9"
+kw60="\xCA"
+kw61="\xAF"
+kw62="\x16\x54\xAE\x6B"
+kw63="\xAE"
+kw64="\xD7"
+kw65="\x73\xC5"
+kw66="\x83"
+kw67="\xB9"
+kw68="\x88"
+kw69="\x55\xAA"
+kw70="\x9C"
+kw71="\x6D\xE7"
+kw72="\x6D\xF8"
+kw73="\x23\xE3\x83"
+kw74="\x23\x4E\x7A"
+kw75="\x23\x31\x4F"
+kw76="\x53\x7F"
+kw77="\x55\xEE"
+kw78="\x53\x6E"
+kw79="\x22\xB5\x9C"
+kw80="\x22\xB5\x9D"
+kw81="\x86"
+kw82="\x63\xA2"
+kw83="\x25\x86\x88"
+kw84="\x26\xB2\x40"
+kw85="\xAA"
+kw86="\x6F\xAB"
+kw87="\x56\xAA"
+kw88="\x56\xBB"
+kw89="\x66\x24"
+kw90="\x66\xFC"
+kw91="\x66\xBF"
+kw92="\xE0"
+kw93="\x9A"
+kw94="\x9D"
+kw95="\x53\xB8"
+kw96="\x53\xC0"
+kw97="\x53\xB9"
+kw98="\xB0"
+kw99="\xBA"
+kw100="\x54\xAA"
+kw101="\x54\xBB"
+kw102="\x54\xCC"
+kw103="\x54\xDD"
+kw104="\x54\xB0"
+kw105="\x54\xBA"
+kw106="\x54\xB2"
+kw107="\x54\xB3"
+kw108="\x2E\xB5\x24"
+kw109="\x2F\xB5\x23"
+kw110="\x23\x83\xE3"
+kw111="\x55\xB0"
+kw112="\x55\xB1"
+kw113="\x55\xB2"
+kw114="\x55\xB3"
+kw115="\x55\xB4"
+kw116="\x55\xB5"
+kw117="\x55\xB6"
+kw118="\x55\xB7"
+kw119="\x55\xB8"
+kw120="\x55\xB9"
+kw121="\x55\xBA"
+kw122="\x55\xBB"
+kw123="\x55\xBC"
+kw124="\x55\xBD"
+kw125="\x55\xD0"
+kw126="\x55\xD1"
+kw127="\x55\xD2"
+kw128="\x55\xD3"
+kw129="\x55\xD4"
+kw130="\x55\xD5"
+kw131="\x55\xD6"
+kw132="\x55\xD7"
+kw133="\x55\xD8"
+kw134="\x55\xD9"
+kw135="\x55\xDA"
+kw136="\x76\x70"
+kw137="\x76\x71"
+kw138="\x76\x72"
+kw139="\x76\x73"
+kw140="\x76\x74"
+kw141="\x76\x75"
+kw142="\xE1"
+kw143="\xB5"
+kw144="\x78\xB5"
+kw145="\x9F"
+kw146="\x7D\x7B"
+kw147="\x62\x64"
+kw148="\xE2"
+kw149="\xE3"
+kw150="\xE4"
+kw151="\xE5"
+kw152="\xE6"
+kw153="\xE9"
+kw154="\xED"
+kw155="\xC0"
+kw156="\xC1"
+kw157="\xC6"
+kw158="\xC7"
+kw159="\xC4"
+kw160="\x6D\x80"
+kw161="\x62\x40"
+kw162="\x50\x31"
+kw163="\x50\x32"
+kw164="\x50\x33"
+kw165="\x50\x34"
+kw166="\x50\x35"
+kw167="\x42\x54"
+kw168="\x42\x55"
+kw169="\x47\xE1"
+kw170="\x47\xE2"
+kw171="\x47\xE7"
+kw172="\x47\xE8"
+kw173="\x47\xE3"
+kw174="\x47\xE4"
+kw175="\x47\xE5"
+kw176="\x47\xE6"
+kw177="\x1C\x53\xBB\x6B"
+kw178="\xBB"
+kw179="\xB3"
+kw180="\xB7"
+kw181="\xF7"
+kw182="\xF1"
+kw183="\xF0"
+kw184="\xB2"
+kw185="\x53\x78"
+kw186="\xEA"
+kw187="\xDB"
+kw188="\x96"
+kw189="\x97"
+kw190="\x53\x5F"
+kw191="\xEB"
+kw192="\x19\x41\xA4\x69"
+kw193="\x46\x7E"
+kw194="\x46\x6E"
+kw195="\x46\x60"
+kw196="\x46\x5C"
+kw197="\x46\xAE"
+kw198="\x46\x75"
+kw199="\x46\x61"
+kw200="\x46\x62"
+kw201="\x10\x43\xA7\x70"
+kw202="\x45\xB9"
+kw203="\x45\xBC"
+kw204="\x45\xBD"
+kw205="\x45\xDB"
+kw206="\x45\xDD"
+kw207="\xB6"
+kw208="\x73\xC4"
+kw209="\x56\x54"
+kw210="\x91"
+kw211="\x92"
+kw212="\x98"
+kw213="\x45\x98"
+kw214="\x6E\x67"
+kw215="\x6E\xBC"
+kw216="\x63\xC3"
+kw217="\x8F"
+kw218="\x89"
+kw219="\x80"
+kw220="\x85"
+kw221="\x43\x7C"
+kw222="\x43\x7D"
+kw223="\x43\x7E"
+kw224="\x69\x44"
+kw225="\x69\x55"
+kw226="\x45\x0D"
+kw227="\x69\x11"
+kw228="\x69\x22"
+kw229="\x69\x33"
+kw230="\x12\x54\xC3\x67"
+kw231="\x73\x73"
+kw232="\x63\xC0"
+kw233="\x68\xCA"
+kw234="\x63\xCA"
+kw235="\x63\xC5"
+kw236="\x63\xC9"
+kw237="\x67\xC8"
+kw238="\x45\xA3"
+kw239="\x44\x7A"
+kw240="\x44\x7B"
+kw241="\x44\x84"
+kw242="\x44\x87"
+kw243="\x44\x85"
diff --git a/media/extractors/fuzzers/mp3_extractor_fuzzer.cpp b/media/extractors/fuzzers/mp3_extractor_fuzzer.cpp
new file mode 100644
index 0000000..9a47c18
--- /dev/null
+++ b/media/extractors/fuzzers/mp3_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "MP3Extractor.h"
+
+using namespace android;
+
+class Mp3Extractor : public ExtractorFuzzerBase {
+ public:
+  Mp3Extractor() = default;
+  ~Mp3Extractor() = default;
+
+  bool createExtractor();
+};
+
+bool Mp3Extractor::createExtractor() {
+  mExtractor = new MP3Extractor(new DataSourceHelper(mDataSource->wrap()), nullptr);
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  Mp3Extractor* extractor = new Mp3Extractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/mp4_extractor_fuzzer.cpp b/media/extractors/fuzzers/mp4_extractor_fuzzer.cpp
new file mode 100644
index 0000000..3903519
--- /dev/null
+++ b/media/extractors/fuzzers/mp4_extractor_fuzzer.cpp
@@ -0,0 +1,56 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "MPEG4Extractor.h"
+#include "SampleTable.h"
+
+using namespace android;
+
+class MP4Extractor : public ExtractorFuzzerBase {
+ public:
+  MP4Extractor() = default;
+  ~MP4Extractor() = default;
+
+  bool createExtractor();
+};
+
+bool MP4Extractor::createExtractor() {
+  mExtractor = new MPEG4Extractor(new DataSourceHelper(mDataSource->wrap()));
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  setDataSourceFlags(DataSourceBase::kWantsPrefetching | DataSourceBase::kIsCachingDataSource);
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  MP4Extractor* extractor = new MP4Extractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/mp4_extractor_fuzzer.dict b/media/extractors/fuzzers/mp4_extractor_fuzzer.dict
new file mode 100644
index 0000000..3683649
--- /dev/null
+++ b/media/extractors/fuzzers/mp4_extractor_fuzzer.dict
@@ -0,0 +1,248 @@
+# MP4 Atoms/Boxes
+kw1="ftyp"
+kw2="free"
+kw3="mdat"
+kw4="moov"
+kw5="mvhd"
+kw6="trak"
+kw7="tkhd"
+kw8="edts"
+kw9="elst"
+kw10="mdia"
+kw11="mdhd"
+kw12="hdlr"
+kw13="minf"
+kw14="vmhd"
+kw15="dinf"
+kw16="dref"
+kw17="url "
+kw18="stbl"
+kw19="stsd"
+kw20="avc1"
+kw21="avcC"
+kw22="stts"
+kw23="stss"
+kw24="ctts"
+kw25="stsc"
+kw26="stsz"
+kw27="stco"
+kw28="mp4a"
+kw29="esds"
+kw30="udta"
+kw31="meta"
+kw32="ilst"
+kw33="samr"
+kw34="sawb"
+kw35="ec-3"
+kw36="mp4v"
+kw37="s263"
+kw38="h263"
+kw39="H263"
+kw40="avc1"
+kw41="hvc1"
+kw42="hev1"
+kw43="ac-4"
+kw44="Opus"
+kw45="twos"
+kw46="sowt"
+kw47="alac"
+kw48="fLaC"
+kw49="av01"
+kw50=".mp3"
+kw51="keys"
+kw52="cprt"
+kw53="covr"
+kw54="mvex"
+kw55="moof"
+kw56="traf"
+kw57="mfra"
+kw58="sinf"
+kw59="schi"
+kw60="wave"
+kw61="schm"
+kw62="cbc1"
+kw63="cbcs"
+kw64="cenc"
+kw65="cens"
+kw66="frma"
+kw67="tenc"
+kw68="tref"
+kw69="thmb"
+kw70="pssh"
+kw71="mett"
+kw72="enca"
+kw73="encv"
+kw74="co64"
+kw75="stz2"
+kw76="\xA9xyz"
+kw77="btrt"
+kw78="hvcC"
+kw79="av1C"
+kw80="d263"
+kw81="iloc"
+kw82="iinf"
+kw83="iprp"
+kw84="pitm"
+kw85="idat"
+kw86="iref"
+kw87="ipro"
+kw88="mean"
+kw89="name"
+kw90="data"
+kw91="mehd"
+kw92="text"
+kw93="sbtl"
+kw94="trex"
+kw95="tx3g"
+kw96="colr"
+kw97="titl"
+kw98="perf"
+kw99="auth"
+kw100="gnre"
+kw101="albm"
+kw102="yrrc"
+kw103="ID32"
+kw104="----"
+kw105="sidx"
+kw106="ac-3"
+kw107="qt  "
+kw108="mif1"
+kw109="heic"
+kw110="dac4"
+kw111="dec3"
+kw112="dac3"
+kw113="\xA9alb"
+kw114="\xA9ART"
+kw115="aART"
+kw116="\xA9day"
+kw117="\xA9nam"
+kw118="\xA9wrt"
+kw119="\xA9gen"
+kw120="cpil"
+kw121="trkn"
+kw122="disk"
+kw123="nclx"
+kw124="nclc"
+kw125="tfhd"
+kw126="trun"
+kw127="saiz"
+kw128="saio"
+kw129="senc"
+kw130="isom"
+kw131="iso2"
+kw132="3gp4"
+kw133="mp41"
+kw134="mp42"
+kw135="dash"
+kw136="nvr1"
+kw137="MSNV"
+kw138="wmf "
+kw139="3g2a"
+kw140="3g2b"
+kw141="msf1"
+kw142="hevc"
+kw143="pdin"
+kw144="trgr"
+kw145="smhd"
+kw146="hmhd"
+kw147="nmhd"
+kw148="cslg"
+kw149="stsh"
+kw150="padb"
+kw151="stdp"
+kw152="sdtp"
+kw153="sbgp"
+kw154="sgpd"
+kw155="subs"
+kw156="leva"
+kw157="mfhd"
+kw158="tfdt"
+kw159="tfra"
+kw160="mfro"
+kw161="skip"
+kw162="tsel"
+kw163="strk"
+kw164="stri"
+kw165="strd"
+kw166="xml "
+kw167="bxml"
+kw168="fiin"
+kw169="paen"
+kw170="fire"
+kw171="fpar"
+kw172="fecr"
+kw173="segr"
+kw174="gitn"
+kw175="meco"
+kw176="mere"
+kw177="styp"
+kw178="ssix"
+kw179="prft"
+kw180="hint"
+kw181="cdsc"
+kw182="hind"
+kw183="vdep"
+kw184="vplx"
+kw185="msrc"
+kw186="urn "
+kw187="enct"
+kw188="encs"
+kw189="rinf"
+kw190="srpp"
+kw191="stsg"
+kw192="stvi"
+kw193="tims"
+kw194="tsro"
+kw195="snro"
+kw196="rtp "
+kw197="srtp"
+kw198="rtpo"
+kw199="hnti"
+kw200="sdp "
+kw201="trpy"
+kw202="nump"
+kw203="tpyl"
+kw204="totl"
+kw205="npck"
+kw206="tpay"
+kw207="maxr"
+kw208="dmed"
+kw209="dimm"
+kw210="drep"
+kw211="tmin"
+kw212="tmax"
+kw213="pmax"
+kw214="dmax"
+kw215="payt"
+kw216="fdp "
+kw217="fdsa"
+kw218="fdpa"
+kw219="extr"
+kw220="feci"
+kw221="rm2t"
+kw222="sm2t"
+kw223="tPAT"
+kw224="tPMT"
+kw225="tOD "
+kw226="tsti"
+kw227="istm"
+kw228="pm2t"
+kw229="rrtp"
+kw230="rssr"
+kw231="rscr"
+kw232="rsrp"
+kw233="rssr"
+kw234="ccid"
+kw235="sroc"
+kw236="prtp"
+kw237="roll"
+kw238="rash"
+kw239="alst"
+kw240="rap "
+kw241="tele"
+kw242="mp71"
+kw243="iso3"
+kw244="iso4"
+kw245="iso5"
+kw246="resv"
+kw247="iso6"
diff --git a/media/extractors/fuzzers/mpeg2_extractor_fuzzer.cpp b/media/extractors/fuzzers/mpeg2_extractor_fuzzer.cpp
new file mode 100644
index 0000000..240ef66
--- /dev/null
+++ b/media/extractors/fuzzers/mpeg2_extractor_fuzzer.cpp
@@ -0,0 +1,62 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#ifdef MPEG2PS
+#include "MPEG2PSExtractor.h"
+#else
+#include "MPEG2TSExtractor.h"
+#endif
+
+using namespace android;
+
+class MPEG2Extractor : public ExtractorFuzzerBase {
+ public:
+  MPEG2Extractor() = default;
+  ~MPEG2Extractor() = default;
+
+  bool createExtractor();
+};
+
+bool MPEG2Extractor::createExtractor() {
+#ifdef MPEG2PS
+  mExtractor = new MPEG2PSExtractor(new DataSourceHelper(mDataSource->wrap()));
+#else
+  mExtractor = new MPEG2TSExtractor(new DataSourceHelper(mDataSource->wrap()));
+#endif
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  MPEG2Extractor* extractor = new MPEG2Extractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/mpeg2ps_extractor_fuzzer.dict b/media/extractors/fuzzers/mpeg2ps_extractor_fuzzer.dict
new file mode 100644
index 0000000..69d390a
--- /dev/null
+++ b/media/extractors/fuzzers/mpeg2ps_extractor_fuzzer.dict
@@ -0,0 +1,2 @@
+# Start code (bytes 0-3)
+kw1="\x00\x00\x01\xBA"
diff --git a/media/extractors/fuzzers/mpeg2ts_extractor_fuzzer.dict b/media/extractors/fuzzers/mpeg2ts_extractor_fuzzer.dict
new file mode 100644
index 0000000..006a1eb
--- /dev/null
+++ b/media/extractors/fuzzers/mpeg2ts_extractor_fuzzer.dict
@@ -0,0 +1,2 @@
+# Start byte
+kw1="\x47"
diff --git a/media/extractors/fuzzers/ogg_extractor_fuzzer.cpp b/media/extractors/fuzzers/ogg_extractor_fuzzer.cpp
new file mode 100644
index 0000000..bd2fcc5
--- /dev/null
+++ b/media/extractors/fuzzers/ogg_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "OggExtractor.h"
+
+using namespace android;
+
+class OGGExtractor : public ExtractorFuzzerBase {
+ public:
+  OGGExtractor() = default;
+  ~OGGExtractor() = default;
+
+  bool createExtractor();
+};
+
+bool OGGExtractor::createExtractor() {
+  mExtractor = new OggExtractor(new DataSourceHelper(mDataSource->wrap()));
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  OGGExtractor* extractor = new OGGExtractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/ogg_extractor_fuzzer.dict b/media/extractors/fuzzers/ogg_extractor_fuzzer.dict
new file mode 100644
index 0000000..df2fc38
--- /dev/null
+++ b/media/extractors/fuzzers/ogg_extractor_fuzzer.dict
@@ -0,0 +1,3 @@
+# Start code(bytes 0-3)
+# The below 4 bytes correspond to "OggS" in ASCII
+kw1="\x4F\x67\x67\x53"
diff --git a/media/extractors/fuzzers/wav_extractor_fuzzer.cpp b/media/extractors/fuzzers/wav_extractor_fuzzer.cpp
new file mode 100644
index 0000000..cb11ebd
--- /dev/null
+++ b/media/extractors/fuzzers/wav_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "WAVExtractor.h"
+
+using namespace android;
+
+class wavExtractor : public ExtractorFuzzerBase {
+ public:
+  wavExtractor() = default;
+  ~wavExtractor() = default;
+
+  bool createExtractor();
+};
+
+bool wavExtractor::createExtractor() {
+  mExtractor = new WAVExtractor(new DataSourceHelper(mDataSource->wrap()));
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  wavExtractor* extractor = new wavExtractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/midi/Android.bp b/media/extractors/midi/Android.bp
index b8255fc..1c69bb8 100644
--- a/media/extractors/midi/Android.bp
+++ b/media/extractors/midi/Android.bp
@@ -5,7 +5,7 @@
     srcs: ["MidiExtractor.cpp"],
 
     header_libs: [
-        "libmedia_headers",
+        "libmedia_datasource_headers",
     ],
 
     static_libs: [
@@ -18,4 +18,12 @@
     shared_libs: [
         "libbase",
     ],
+
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/extractors/mkv/Android.bp b/media/extractors/mkv/Android.bp
index 7ad8cc1..330d4fe 100644
--- a/media/extractors/mkv/Android.bp
+++ b/media/extractors/mkv/Android.bp
@@ -21,4 +21,12 @@
         "libutils",
     ],
 
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
 }
diff --git a/media/extractors/mkv/MatroskaExtractor.cpp b/media/extractors/mkv/MatroskaExtractor.cpp
index 27bd357..4fd3a56 100644
--- a/media/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/extractors/mkv/MatroskaExtractor.cpp
@@ -1888,13 +1888,12 @@
 
     for(size_t i = 0; i < track->GetContentEncodingCount(); i++) {
         const mkvparser::ContentEncoding *encoding = track->GetContentEncodingByIndex(i);
-        for(size_t j = 0; j < encoding->GetEncryptionCount(); j++) {
+        if (encoding->GetEncryptionCount() > 0) {
             const mkvparser::ContentEncoding::ContentEncryption *encryption;
-            encryption = encoding->GetEncryptionByIndex(j);
+            encryption = encoding->GetEncryptionByIndex(0);
             AMediaFormat_setBuffer(trackInfo->mMeta,
                     AMEDIAFORMAT_KEY_CRYPTO_KEY, encryption->key_id, encryption->key_id_len);
             trackInfo->mEncrypted = true;
-            break;
         }
 
         for(size_t j = 0; j < encoding->GetCompressionCount(); j++) {
diff --git a/media/extractors/mp3/Android.bp b/media/extractors/mp3/Android.bp
index 102ac81..7d70548 100644
--- a/media/extractors/mp3/Android.bp
+++ b/media/extractors/mp3/Android.bp
@@ -13,4 +13,11 @@
         "libstagefright_foundation",
     ],
 
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/extractors/mp4/Android.bp b/media/extractors/mp4/Android.bp
index e48e1b7..afa055f 100644
--- a/media/extractors/mp4/Android.bp
+++ b/media/extractors/mp4/Android.bp
@@ -16,4 +16,12 @@
         "libstagefright_id3",
         "libutils",
     ],
+
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/extractors/mp4/ItemTable.cpp b/media/extractors/mp4/ItemTable.cpp
index 0773387..ded3d1a 100644
--- a/media/extractors/mp4/ItemTable.cpp
+++ b/media/extractors/mp4/ItemTable.cpp
@@ -76,6 +76,7 @@
     size_t size;
     sp<ABuffer> hvcc;
     sp<ABuffer> icc;
+    sp<ABuffer> av1c;
 
     Vector<uint32_t> thumbnails;
     Vector<uint32_t> dimgRefs;
@@ -546,11 +547,11 @@
                 continue;
             }
             ALOGV("Image item id %d uses thumbnail item id %d", mRefs[i], mItemId);
-            ImageItem &masterImage = itemIdToItemMap.editValueAt(itemIndex);
-            if (!masterImage.thumbnails.empty()) {
+            ImageItem &imageItem = itemIdToItemMap.editValueAt(itemIndex);
+            if (!imageItem.thumbnails.empty()) {
                 ALOGW("already has thumbnails!");
             }
-            masterImage.thumbnails.push_back(mItemId);
+            imageItem.thumbnails.push_back(mItemId);
         }
         break;
     }
@@ -764,6 +765,39 @@
     return OK;
 }
 
+struct Av1cBox : public Box, public ItemProperty {
+    Av1cBox(DataSourceHelper *source) :
+        Box(source, FOURCC("av1C")) {}
+
+    status_t parse(off64_t offset, size_t size) override;
+
+    void attachTo(ImageItem &image) const override {
+        image.av1c = mAv1c;
+    }
+
+private:
+    sp<ABuffer> mAv1c;
+};
+
+status_t Av1cBox::parse(off64_t offset, size_t size) {
+    ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+    mAv1c = new ABuffer(size);
+
+    if (mAv1c->data() == NULL) {
+        ALOGE("b/28471206");
+        return NO_MEMORY;
+    }
+
+    if (source()->readAt(offset, mAv1c->data(), size) < (ssize_t)size) {
+        return ERROR_IO;
+    }
+
+    ALOGV("property av1C");
+
+    return OK;
+}
+
 struct IrotBox : public Box, public ItemProperty {
     IrotBox(DataSourceHelper *source) :
         Box(source, FOURCC("irot")), mAngle(0) {}
@@ -929,7 +963,7 @@
 
 status_t IpcoBox::parse(off64_t offset, size_t size) {
     ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
-    // push dummy as the index is 1-based
+    // push a placeholder as the index is 1-based
     mItemProperties->push_back(new ItemProperty());
     return parseChunks(offset, size);
 }
@@ -957,6 +991,11 @@
             itemProperty = new ColrBox(source());
             break;
         }
+        case FOURCC("av1C"):
+        {
+            itemProperty = new Av1cBox(source());
+            break;
+        }
         default:
         {
             // push dummy to maintain correct item property index
@@ -1203,8 +1242,9 @@
 
 //////////////////////////////////////////////////////////////////
 
-ItemTable::ItemTable(DataSourceHelper *source)
+ItemTable::ItemTable(DataSourceHelper *source, bool isHeif)
     : mDataSource(source),
+      mIsHeif(isHeif),
       mPrimaryItemId(0),
       mIdatOffset(0),
       mIdatSize(0),
@@ -1363,7 +1403,8 @@
         //   'Exif': EXIF metadata
         if (info.itemType != FOURCC("grid") &&
             info.itemType != FOURCC("hvc1") &&
-            info.itemType != FOURCC("Exif")) {
+            info.itemType != FOURCC("Exif") &&
+            info.itemType != FOURCC("av01")) {
             continue;
         }
 
@@ -1509,7 +1550,9 @@
     }
 
     AMediaFormat *meta = AMediaFormat_new();
-    AMediaFormat_setString(meta, AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
+    AMediaFormat_setString(
+        meta, AMEDIAFORMAT_KEY_MIME,
+        mIsHeif ? MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC : MEDIA_MIMETYPE_IMAGE_AVIF);
 
     if (image->itemId == mPrimaryItemId) {
         AMediaFormat_setInt32(meta, AMEDIAFORMAT_KEY_IS_DEFAULT, 1);
@@ -1539,15 +1582,24 @@
         ssize_t thumbItemIndex = mItemIdToItemMap.indexOfKey(image->thumbnails[0]);
         if (thumbItemIndex >= 0) {
             const ImageItem &thumbnail = mItemIdToItemMap[thumbItemIndex];
-
-            if (thumbnail.hvcc != NULL) {
+            if (thumbnail.hvcc != NULL || thumbnail.av1c != NULL) {
                 AMediaFormat_setInt32(meta,
                         AMEDIAFORMAT_KEY_THUMBNAIL_WIDTH, thumbnail.width);
                 AMediaFormat_setInt32(meta,
                         AMEDIAFORMAT_KEY_THUMBNAIL_HEIGHT, thumbnail.height);
-                AMediaFormat_setBuffer(meta,
-                        AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC,
-                        thumbnail.hvcc->data(), thumbnail.hvcc->size());
+                if (thumbnail.hvcc != NULL) {
+                    AMediaFormat_setBuffer(meta,
+                            AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC,
+                            thumbnail.hvcc->data(), thumbnail.hvcc->size());
+                } else {
+                    // We use a hard-coded string here instead of
+                    // AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C. The key is available only from SDK 31.
+                    // The mp4 extractor is part of mainline and builds against SDK 29 as of
+                    // writing. This hard-coded string can be replaced with the named constant once
+                    // the mp4 extractor is built against SDK >= 31.
+                    AMediaFormat_setBuffer(meta,
+                            "thumbnail-csd-av1c", thumbnail.av1c->data(), thumbnail.av1c->size());
+                }
                 ALOGV("image[%u]: thumbnail: size %dx%d, item index %zd",
                         imageIndex, thumbnail.width, thumbnail.height, thumbItemIndex);
             } else {
@@ -1574,12 +1626,21 @@
                 AMEDIAFORMAT_KEY_MAX_INPUT_SIZE, image->width * image->height * 3 / 2);
     }
 
-    if (image->hvcc == NULL) {
-        ALOGE("%s: hvcc is missing for image[%u]!", __FUNCTION__, imageIndex);
-        return NULL;
+    if (mIsHeif) {
+        if (image->hvcc == NULL) {
+            ALOGE("%s: hvcc is missing for image[%u]!", __FUNCTION__, imageIndex);
+            return NULL;
+        }
+        AMediaFormat_setBuffer(meta,
+                AMEDIAFORMAT_KEY_CSD_HEVC, image->hvcc->data(), image->hvcc->size());
+    } else {
+        if (image->av1c == NULL) {
+            ALOGE("%s: av1c is missing for image[%u]!", __FUNCTION__, imageIndex);
+            return NULL;
+        }
+        AMediaFormat_setBuffer(meta,
+                AMEDIAFORMAT_KEY_CSD_0, image->av1c->data(), image->av1c->size());
     }
-    AMediaFormat_setBuffer(meta,
-            AMEDIAFORMAT_KEY_CSD_HEVC, image->hvcc->data(), image->hvcc->size());
 
     if (image->icc != NULL) {
         AMediaFormat_setBuffer(meta,
@@ -1614,17 +1675,17 @@
         return BAD_VALUE;
     }
 
-    uint32_t masterItemIndex = mDisplayables[imageIndex];
+    uint32_t imageItemIndex = mDisplayables[imageIndex];
 
-    const ImageItem &masterImage = mItemIdToItemMap[masterItemIndex];
-    if (masterImage.thumbnails.empty()) {
-        *itemIndex = masterItemIndex;
+    const ImageItem &imageItem = mItemIdToItemMap[imageItemIndex];
+    if (imageItem.thumbnails.empty()) {
+        *itemIndex = imageItemIndex;
         return OK;
     }
 
-    ssize_t thumbItemIndex = mItemIdToItemMap.indexOfKey(masterImage.thumbnails[0]);
+    ssize_t thumbItemIndex = mItemIdToItemMap.indexOfKey(imageItem.thumbnails[0]);
     if (thumbItemIndex < 0) {
-        // Do not return the master image in this case, fail it so that the
+        // Do not return the image item in this case, fail it so that the
         // thumbnail extraction code knows we really don't have it.
         return INVALID_OPERATION;
     }
diff --git a/media/extractors/mp4/ItemTable.h b/media/extractors/mp4/ItemTable.h
index be81b59..b19dc18 100644
--- a/media/extractors/mp4/ItemTable.h
+++ b/media/extractors/mp4/ItemTable.h
@@ -42,12 +42,12 @@
 
 /*
  * ItemTable keeps track of all image items (including coded images, grids and
- * tiles) inside a HEIF still image (ISO/IEC FDIS 23008-12.2:2017(E)).
+ * tiles) inside a HEIF/AVIF still image (ISO/IEC FDIS 23008-12.2:2017(E)).
  */
 
 class ItemTable : public RefBase {
 public:
-    explicit ItemTable(DataSourceHelper *source);
+    ItemTable(DataSourceHelper *source, bool isHeif);
 
     status_t parse(uint32_t type, off64_t offset, size_t size);
 
@@ -65,6 +65,8 @@
 
 private:
     DataSourceHelper *mDataSource;
+    // If this is true, then this item table is for a HEIF image. Otherwise it is for an AVIF image.
+    bool mIsHeif;
 
     KeyedVector<uint32_t, ItemLoc> mItemLocs;
     Vector<ItemInfo> mItemInfos;
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
old mode 100755
new mode 100644
index a976a2b..7989d4b
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -149,6 +149,7 @@
     uint8_t *mSrcBuffer;
 
     bool mIsHeif;
+    bool mIsAvif;
     bool mIsAudio;
     bool mIsUsac = false;
     sp<ItemTable> mItemTable;
@@ -202,8 +203,8 @@
         uint32_t duration;
         int32_t compositionOffset;
         uint8_t iv[16];
-        Vector<size_t> clearsizes;
-        Vector<size_t> encryptedsizes;
+        Vector<uint32_t> clearsizes;
+        Vector<uint32_t> encryptedsizes;
     };
     Vector<Sample> mCurrentSamples;
     std::map<off64_t, uint32_t> mDrmOffsets;
@@ -372,6 +373,8 @@
             return MEDIA_MIMETYPE_AUDIO_FLAC;
         case FOURCC("av01"):
             return MEDIA_MIMETYPE_VIDEO_AV1;
+        case FOURCC("vp09"):
+            return MEDIA_MIMETYPE_VIDEO_VP9;
         case FOURCC(".mp3"):
         case 0x6D730055: // "ms U" mp3 audio
             return MEDIA_MIMETYPE_AUDIO_MPEG;
@@ -412,6 +415,7 @@
       mIsHeif(false),
       mHasMoovBox(false),
       mPreferHeif(mime != NULL && !strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_HEIF)),
+      mIsAvif(false),
       mFirstTrack(NULL),
       mLastTrack(NULL) {
     ALOGV("mime=%s, mPreferHeif=%d", mime, mPreferHeif);
@@ -668,7 +672,7 @@
         }
     }
 
-    if (mIsHeif && (mItemTable != NULL) && (mItemTable->countImages() > 0)) {
+    if ((mIsAvif || mIsHeif) && (mItemTable != NULL) && (mItemTable->countImages() > 0)) {
         off64_t exifOffset;
         size_t exifSize;
         if (mItemTable->getExifOffsetAndSize(&exifOffset, &exifSize) == OK) {
@@ -694,7 +698,7 @@
             }
             mInitCheck = OK;
 
-            ALOGV("adding HEIF image track %u", imageIndex);
+            ALOGV("adding %s image track %u", mIsHeif ? "HEIF" : "AVIF", imageIndex);
             Track *track = new Track;
             if (mLastTrack != NULL) {
                 mLastTrack->next = track;
@@ -720,6 +724,10 @@
                 MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC) != NULL) {
             AMediaFormat_setString(mFileMetaData,
                     AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_CONTAINER_HEIF);
+        } else if (findTrackByMimePrefix(
+                MEDIA_MIMETYPE_IMAGE_AVIF) != NULL) {
+            AMediaFormat_setString(mFileMetaData,
+                    AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_IMAGE_AVIF);
         } else {
             AMediaFormat_setString(mFileMetaData,
                     AMEDIAFORMAT_KEY_MIME, "application/octet-stream");
@@ -1149,7 +1157,7 @@
             } else if (chunk_type == FOURCC("moov")) {
                 mInitCheck = OK;
 
-                return UNKNOWN_ERROR;  // Return a dummy error.
+                return UNKNOWN_ERROR;  // Return a generic error.
             }
             break;
         }
@@ -1787,7 +1795,7 @@
                 return ERROR_IO;
             }
 
-            uint16_t data_ref_index __unused = U16_AT(&buffer[6]);
+            // we can get data_ref_index value from U16_AT(&buffer[6])
             uint16_t version = U16_AT(&buffer[8]);
             uint32_t num_channels = U16_AT(&buffer[16]);
 
@@ -1972,6 +1980,7 @@
         case FOURCC("dvh1"):
         case FOURCC("dav1"):
         case FOURCC("av01"):
+        case FOURCC("vp09"):
         {
             uint8_t buffer[78];
             if (chunk_data_size < (ssize_t)sizeof(buffer)) {
@@ -1984,7 +1993,7 @@
                 return ERROR_IO;
             }
 
-            uint16_t data_ref_index __unused = U16_AT(&buffer[6]);
+            // we can get data_ref_index value from U16_AT(&buffer[6])
             uint16_t width = U16_AT(&buffer[6 + 18]);
             uint16_t height = U16_AT(&buffer[6 + 20]);
 
@@ -2434,6 +2443,8 @@
             *offset += chunk_size;
             break;
         }
+
+        case FOURCC("vpcC"):
         case FOURCC("av1C"):
         {
             auto buffer = heapbuffer<uint8_t>(chunk_data_size);
@@ -2571,9 +2582,9 @@
         case FOURCC("iref"):
         case FOURCC("ipro"):
         {
-            if (mIsHeif) {
+            if (mIsHeif || mIsAvif) {
                 if (mItemTable == NULL) {
-                    mItemTable = new ItemTable(mDataSource);
+                    mItemTable = new ItemTable(mDataSource, mIsHeif);
                 }
                 status_t err = mItemTable->parse(
                         chunk_type, data_offset, chunk_data_size);
@@ -2875,6 +2886,21 @@
             break;
         }
 
+        case FOURCC("pasp"):
+        {
+            *offset += chunk_size;
+            // this must be in a VisualSampleEntry box under the Sample Description Box ('stsd')
+            // ignore otherwise
+            if (depth >= 2 && mPath[depth - 2] == FOURCC("stsd")) {
+                status_t err = parsePaspBox(data_offset, chunk_data_size);
+                if (err != OK) {
+                    return err;
+                }
+            }
+
+            break;
+        }
+
         case FOURCC("titl"):
         case FOURCC("perf"):
         case FOURCC("auth"):
@@ -2999,14 +3025,20 @@
                     mIsHeif = true;
                     brandSet.erase(FOURCC("mif1"));
                     brandSet.erase(FOURCC("heic"));
+                } else if (brandSet.count(FOURCC("avif")) > 0 ||
+                       brandSet.count(FOURCC("avis")) > 0) {
+                    ALOGV("identified AVIF image");
+                    mIsAvif = true;
+                    brandSet.erase(FOURCC("avif"));
+                    brandSet.erase(FOURCC("avis"));
                 }
 
                 if (!brandSet.empty()) {
                     // This means that the file should have moov box.
                     // It could be any iso files (mp4, heifs, etc.)
                     mHasMoovBox = true;
-                    if (mIsHeif) {
-                        ALOGV("identified HEIF image with other tracks");
+                    if (mIsHeif || mIsAvif) {
+                        ALOGV("identified %s image with other tracks", mIsHeif ? "HEIF" : "AVIF");
                     }
                 }
             }
@@ -3407,7 +3439,7 @@
     }
 
     // skip
-    unsigned bsmod __unused = br.getBits(3);
+    br.skipBits(3); // bsmod
 
     unsigned acmod = br.getBits(3);
     unsigned lfeon = br.getBits(1);
@@ -3718,19 +3750,18 @@
         return ERROR_IO;
     }
 
-    uint64_t ctime __unused, mtime __unused, duration __unused;
     int32_t id;
 
     if (version == 1) {
-        ctime = U64_AT(&buffer[4]);
-        mtime = U64_AT(&buffer[12]);
+        // we can get ctime value from U64_AT(&buffer[4])
+        // we can get mtime value from U64_AT(&buffer[12])
         id = U32_AT(&buffer[20]);
-        duration = U64_AT(&buffer[28]);
+        // we can get duration value from U64_AT(&buffer[28])
     } else if (version == 0) {
-        ctime = U32_AT(&buffer[4]);
-        mtime = U32_AT(&buffer[8]);
+        // we can get ctime value from U32_AT(&buffer[4])
+        // we can get mtime value from U32_AT(&buffer[8])
         id = U32_AT(&buffer[12]);
-        duration = U32_AT(&buffer[20]);
+        // we can get duration value from U32_AT(&buffer[20])
     } else {
         return ERROR_UNSUPPORTED;
     }
@@ -4048,6 +4079,26 @@
     return OK;
 }
 
+status_t MPEG4Extractor::parsePaspBox(off64_t offset, size_t size) {
+    if (size < 8 || size == SIZE_MAX || mLastTrack == NULL) {
+        return ERROR_MALFORMED;
+    }
+
+    uint32_t data[2]; // hSpacing, vSpacing
+    if (mDataSource->readAt(offset, data, 8) < 8) {
+        return ERROR_IO;
+    }
+    uint32_t hSpacing = ntohl(data[0]);
+    uint32_t vSpacing = ntohl(data[1]);
+
+    if (hSpacing != 0 && vSpacing != 0) {
+        AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_SAR_WIDTH, hSpacing);
+        AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_SAR_HEIGHT, vSpacing);
+    }
+
+    return OK;
+}
+
 status_t MPEG4Extractor::parse3GPPMetaData(off64_t offset, size_t size, int depth) {
     if (size < 4 || size == SIZE_MAX) {
         return ERROR_MALFORMED;
@@ -4325,7 +4376,8 @@
         if (size != 24 || ((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1))) {
             return NULL;
         }
-   } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)) {
+   } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)
+           || !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
         void *data;
         size_t size;
         if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
@@ -4334,7 +4386,22 @@
 
         const uint8_t *ptr = (const uint8_t *)data;
 
-        if (size < 5 || ptr[0] != 0x81) {  // configurationVersion == 1
+        if (size < 4 || ptr[0] != 0x81) {  // configurationVersion == 1
+            return NULL;
+        }
+        if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
+            itemTable = mItemTable;
+        }
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_VP9)) {
+        void *data;
+        size_t size;
+        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
+            return NULL;
+        }
+
+        const uint8_t *ptr = (const uint8_t *)data;
+
+        if (size < 5 || ptr[0] != 0x01) {  // configurationVersion == 1
             return NULL;
         }
     }
@@ -4391,6 +4458,10 @@
         if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
             return ERROR_MALFORMED;
         }
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_VP9)) {
+        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
+            return ERROR_MALFORMED;
+        }
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)
             || !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG2)
             || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
@@ -4638,18 +4709,17 @@
     if (objectType == AOT_SBR || objectType == AOT_PS) {//SBR specific config per 14496-3 tbl 1.13
         if (br.numBitsLeft() < 4) return ERROR_MALFORMED;
         uint32_t extFreqIndex = br.getBits(4);
-        int32_t extSampleRate __unused;
         if (extFreqIndex == 15) {
             if (csd_size < 8) {
                 return ERROR_MALFORMED;
             }
             if (br.numBitsLeft() < 24) return ERROR_MALFORMED;
-            extSampleRate = br.getBits(24);
+            br.skipBits(24); // extSampleRate
         } else {
             if (extFreqIndex == 13 || extFreqIndex == 14) {
                 return ERROR_MALFORMED;
             }
-            extSampleRate = kSamplingRate[extFreqIndex];
+            //extSampleRate = kSamplingRate[extFreqIndex];
         }
         //TODO: save the extension sampling rate value in meta data =>
         //      AMediaFormat_setInt32(mLastTrack->meta, kKeyExtSampleRate, extSampleRate);
@@ -4692,13 +4762,13 @@
                 objectType == AOT_ER_AAC_LD || objectType == AOT_ER_AAC_SCAL ||
                 objectType == AOT_ER_BSAC) {
             if (br.numBitsLeft() < 2) return ERROR_MALFORMED;
-            const int32_t frameLengthFlag __unused = br.getBits(1);
+            br.skipBits(1); // frameLengthFlag
 
             const int32_t dependsOnCoreCoder = br.getBits(1);
 
             if (dependsOnCoreCoder ) {
                 if (br.numBitsLeft() < 14) return ERROR_MALFORMED;
-                const int32_t coreCoderDelay __unused = br.getBits(14);
+                br.skipBits(14); // coreCoderDelay
             }
 
             int32_t extensionFlag = -1;
@@ -4730,64 +4800,64 @@
                 if (br.numBitsLeft() < 32) {
                     return ERROR_MALFORMED;
                 }
-                const int32_t ElementInstanceTag __unused = br.getBits(4);
-                const int32_t Profile __unused = br.getBits(2);
-                const int32_t SamplingFrequencyIndex __unused = br.getBits(4);
+                br.skipBits(4); // ElementInstanceTag
+                br.skipBits(2); // Profile
+                br.skipBits(4); // SamplingFrequencyIndex
                 const int32_t NumFrontChannelElements = br.getBits(4);
                 const int32_t NumSideChannelElements = br.getBits(4);
                 const int32_t NumBackChannelElements = br.getBits(4);
                 const int32_t NumLfeChannelElements = br.getBits(2);
-                const int32_t NumAssocDataElements __unused = br.getBits(3);
-                const int32_t NumValidCcElements __unused = br.getBits(4);
+                br.skipBits(3); // NumAssocDataElements
+                br.skipBits(4); // NumValidCcElements
 
                 const int32_t MonoMixdownPresent = br.getBits(1);
 
                 if (MonoMixdownPresent != 0) {
                     if (br.numBitsLeft() < 4) return ERROR_MALFORMED;
-                    const int32_t MonoMixdownElementNumber __unused = br.getBits(4);
+                    br.skipBits(4); // MonoMixdownElementNumber
                 }
 
                 if (br.numBitsLeft() < 1) return ERROR_MALFORMED;
                 const int32_t StereoMixdownPresent = br.getBits(1);
                 if (StereoMixdownPresent != 0) {
                     if (br.numBitsLeft() < 4) return ERROR_MALFORMED;
-                    const int32_t StereoMixdownElementNumber __unused = br.getBits(4);
+                    br.skipBits(4); // StereoMixdownElementNumber
                 }
 
                 if (br.numBitsLeft() < 1) return ERROR_MALFORMED;
                 const int32_t MatrixMixdownIndexPresent = br.getBits(1);
                 if (MatrixMixdownIndexPresent != 0) {
                     if (br.numBitsLeft() < 3) return ERROR_MALFORMED;
-                    const int32_t MatrixMixdownIndex __unused = br.getBits(2);
-                    const int32_t PseudoSurroundEnable __unused = br.getBits(1);
+                    br.skipBits(2); // MatrixMixdownIndex
+                    br.skipBits(1); // PseudoSurroundEnable
                 }
 
                 int i;
                 for (i=0; i < NumFrontChannelElements; i++) {
                     if (br.numBitsLeft() < 5) return ERROR_MALFORMED;
                     const int32_t FrontElementIsCpe = br.getBits(1);
-                    const int32_t FrontElementTagSelect __unused = br.getBits(4);
+                    br.skipBits(4); // FrontElementTagSelect
                     channelsNum += FrontElementIsCpe ? 2 : 1;
                 }
 
                 for (i=0; i < NumSideChannelElements; i++) {
                     if (br.numBitsLeft() < 5) return ERROR_MALFORMED;
                     const int32_t SideElementIsCpe = br.getBits(1);
-                    const int32_t SideElementTagSelect __unused = br.getBits(4);
+                    br.skipBits(4); // SideElementTagSelect
                     channelsNum += SideElementIsCpe ? 2 : 1;
                 }
 
                 for (i=0; i < NumBackChannelElements; i++) {
                     if (br.numBitsLeft() < 5) return ERROR_MALFORMED;
                     const int32_t BackElementIsCpe = br.getBits(1);
-                    const int32_t BackElementTagSelect __unused = br.getBits(4);
+                    br.skipBits(4); // BackElementTagSelect
                     channelsNum += BackElementIsCpe ? 2 : 1;
                 }
                 channelsEffectiveNum = channelsNum;
 
                 for (i=0; i < NumLfeChannelElements; i++) {
                     if (br.numBitsLeft() < 4) return ERROR_MALFORMED;
-                    const int32_t LfeElementTagSelect __unused = br.getBits(4);
+                    br.skipBits(4); // LfeElementTagSelect
                     channelsNum += 1;
                 }
                 ALOGV("mpeg4 audio channelsNum = %d", channelsNum);
@@ -4883,7 +4953,6 @@
       mStarted(false),
       mBuffer(NULL),
       mSrcBuffer(NULL),
-      mIsHeif(itemTable != NULL),
       mItemTable(itemTable),
       mElstShiftStartTicks(elstShiftStartTicks),
       mElstInitialEmptyEditTicks(elstInitialEmptyEditTicks) {
@@ -4918,6 +4987,8 @@
               !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
     mIsAC4 = !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC4);
     mIsDolbyVision = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
+    mIsHeif = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC) && mItemTable != NULL;
+    mIsAvif = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF) && mItemTable != NULL;
 
     if (mIsAVC) {
         void *data;
@@ -5766,7 +5837,7 @@
             return -EINVAL;
         }
 
-        // apply some sanity (vs strict legality) checks
+        // apply some quick (vs strict legality) checks
         //
         static constexpr uint32_t kMaxTrunSampleCount = 10000;
         if (sampleCount > kMaxTrunSampleCount) {
@@ -5912,7 +5983,7 @@
 
     if (options && options->getSeekTo(&seekTimeUs, &mode)) {
         ALOGV("seekTimeUs:%" PRId64, seekTimeUs);
-        if (mIsHeif) {
+        if (mIsHeif || mIsAvif) {
             CHECK(mSampleTable == NULL);
             CHECK(mItemTable != NULL);
             int32_t imageIndex;
@@ -6057,7 +6128,7 @@
         newBuffer = true;
 
         status_t err;
-        if (!mIsHeif) {
+        if (!mIsHeif && !mIsAvif) {
             err = mSampleTable->getMetaDataForSample(mCurrentSampleIndex, &offset, &size,
                                                     (uint64_t*)&cts, &isSyncSample, &stts);
             if(err == OK) {
@@ -6485,9 +6556,9 @@
     if (smpl->encryptedsizes.size()) {
         // store clear/encrypted lengths in metadata
         AMediaFormat_setBuffer(bufmeta, AMEDIAFORMAT_KEY_CRYPTO_PLAIN_SIZES,
-                smpl->clearsizes.array(), smpl->clearsizes.size() * 4);
+                smpl->clearsizes.array(), smpl->clearsizes.size() * sizeof(uint32_t));
         AMediaFormat_setBuffer(bufmeta, AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_SIZES,
-                smpl->encryptedsizes.array(), smpl->encryptedsizes.size() * 4);
+                smpl->encryptedsizes.array(), smpl->encryptedsizes.size() * sizeof(uint32_t));
         AMediaFormat_setInt32(bufmeta, AMEDIAFORMAT_KEY_CRYPTO_DEFAULT_IV_SIZE, mDefaultIVSize);
         AMediaFormat_setInt32(bufmeta, AMEDIAFORMAT_KEY_CRYPTO_MODE, mCryptoMode);
         AMediaFormat_setBuffer(bufmeta, AMEDIAFORMAT_KEY_CRYPTO_KEY, mCryptoKey, 16);
@@ -6696,7 +6767,8 @@
         || !memcmp(header, "ftypM4A ", 8) || !memcmp(header, "ftypf4v ", 8)
         || !memcmp(header, "ftypkddi", 8) || !memcmp(header, "ftypM4VP", 8)
         || !memcmp(header, "ftypmif1", 8) || !memcmp(header, "ftypheic", 8)
-        || !memcmp(header, "ftypmsf1", 8) || !memcmp(header, "ftyphevc", 8)) {
+        || !memcmp(header, "ftypmsf1", 8) || !memcmp(header, "ftyphevc", 8)
+        || !memcmp(header, "ftypavif", 8) || !memcmp(header, "ftypavis", 8)) {
         *confidence = 0.4;
 
         return true;
@@ -6713,6 +6785,7 @@
         FOURCC("hvc1"),
         FOURCC("hev1"),
         FOURCC("av01"),
+        FOURCC("vp09"),
         FOURCC("3gp4"),
         FOURCC("mp41"),
         FOURCC("mp42"),
@@ -6731,6 +6804,8 @@
         FOURCC("heic"),  // HEIF image
         FOURCC("msf1"),  // HEIF image sequence
         FOURCC("hevc"),  // HEIF image sequence
+        FOURCC("avif"),  // AVIF image
+        FOURCC("avis"),  // AVIF image sequence
     };
 
     for (size_t i = 0;
diff --git a/media/extractors/mp4/MPEG4Extractor.h b/media/extractors/mp4/MPEG4Extractor.h
index 1e49d50..542a3e6 100644
--- a/media/extractors/mp4/MPEG4Extractor.h
+++ b/media/extractors/mp4/MPEG4Extractor.h
@@ -144,6 +144,7 @@
     bool mIsHeif;
     bool mHasMoovBox;
     bool mPreferHeif;
+    bool mIsAvif;
 
     Track *mFirstTrack, *mLastTrack;
 
@@ -160,6 +161,7 @@
     status_t parseChunk(off64_t *offset, int depth);
     status_t parseITunesMetaData(off64_t offset, size_t size);
     status_t parseColorInfo(off64_t offset, size_t size);
+    status_t parsePaspBox(off64_t offset, size_t size);
     status_t parse3GPPMetaData(off64_t offset, size_t size, int depth);
     void parseID3v2MetaData(off64_t offset, uint64_t size);
     status_t parseQTMetaKey(off64_t data_offset, size_t data_size);
diff --git a/media/extractors/mpeg2/Android.bp b/media/extractors/mpeg2/Android.bp
index bc8632c..4c25314 100644
--- a/media/extractors/mpeg2/Android.bp
+++ b/media/extractors/mpeg2/Android.bp
@@ -1,6 +1,16 @@
 cc_library {
     name: "libmpeg2extractor",
 
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+        android: {
+            shared_libs: ["libvndksupport#29"],
+        },
+    },
+
     defaults: ["extractor-defaults"],
 
     srcs: [
@@ -12,14 +22,13 @@
     shared_libs: [
         "libbase",
         "libcgrouprc#29",
-        "libvndksupport#29",
     ],
 
     header_libs: [
         "libaudioclient_headers",
         "libbase_headers",
         "libstagefright_headers",
-        "libmedia_headers",
+        "libmedia_datasource_headers",
     ],
 
     static_libs: [
@@ -37,7 +46,7 @@
         "libstagefright_esds",
         "libstagefright_foundation_without_imemory",
         "libstagefright_mpeg2extractor",
-        "libstagefright_mpeg2support",
+        "libstagefright_mpeg2support_nocrypto",
         "libutils",
     ],
 
diff --git a/media/extractors/ogg/Android.bp b/media/extractors/ogg/Android.bp
index 7aed683..579065e 100644
--- a/media/extractors/ogg/Android.bp
+++ b/media/extractors/ogg/Android.bp
@@ -20,4 +20,11 @@
         "libvorbisidec",
     ],
 
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/extractors/ogg/OggExtractor.cpp b/media/extractors/ogg/OggExtractor.cpp
index eb2246d..62f0808 100644
--- a/media/extractors/ogg/OggExtractor.cpp
+++ b/media/extractors/ogg/OggExtractor.cpp
@@ -43,6 +43,9 @@
     long vorbis_packet_blocksize(vorbis_info *vi,ogg_packet *op);
 }
 
+static constexpr int OGG_PAGE_FLAG_CONTINUED_PACKET = 1;
+static constexpr int OGG_PAGE_FLAG_END_OF_STREAM = 4;
+
 namespace android {
 
 struct OggSource : public MediaTrackHelper {
@@ -297,7 +300,8 @@
     AMediaFormat_setInt32(meta, AMEDIAFORMAT_KEY_IS_SYNC_FRAME, 1);
 
     *out = packet;
-    ALOGV("returning buffer %p", packet);
+    ALOGV("returning buffer %p, size %zu, length %zu",
+          packet, packet->size(), packet->range_length());
     return AMEDIA_OK;
 }
 
@@ -358,10 +362,10 @@
 
         if (!memcmp(signature, "OggS", 4)) {
             if (*pageOffset > startOffset) {
-                ALOGV("skipped %lld bytes of junk to reach next frame",
-                     (long long)(*pageOffset - startOffset));
+                ALOGV("skipped %lld bytes of junk at %lld to reach next frame",
+                     (long long)(*pageOffset - startOffset), (long long)(startOffset));
             }
-
+            ALOGV("found frame at %lld", (long long)(*pageOffset));
             return OK;
         }
 
@@ -629,7 +633,8 @@
     // Calculate timestamps by accumulating durations starting from the first sample of a page;
     // We assume that we only seek to page boundaries.
     AMediaFormat *meta = (*out)->meta_data();
-    if (AMediaFormat_getInt32(meta, AMEDIAFORMAT_KEY_VALID_SAMPLES, &currentPageSamples)) {
+    if (AMediaFormat_getInt32(meta, AMEDIAFORMAT_KEY_VALID_SAMPLES, &currentPageSamples) &&
+            (mCurrentPage.mFlags & OGG_PAGE_FLAG_END_OF_STREAM)) {
         // first packet in page
         if (mOffset == mFirstDataOffset) {
             currentPageSamples -= mStartGranulePosition;
@@ -812,6 +817,7 @@
             }
             buffer = tmp;
 
+            ALOGV("reading %zu bytes @ %zu", packetSize, size_t(dataOffset));
             ssize_t n = mSource->readAt(
                     dataOffset,
                     (uint8_t *)buffer->data() + buffer->range_length(),
@@ -830,8 +836,9 @@
 
             if (gotFullPacket) {
                 // We've just read the entire packet.
+                ALOGV("got full packet, size %zu", fullSize);
 
-                if (mFirstPacketInPage) {
+                if (mFirstPacketInPage && (mCurrentPage.mFlags & OGG_PAGE_FLAG_END_OF_STREAM)) {
                     AMediaFormat *meta = buffer->meta_data();
                     AMediaFormat_setInt32(
                             meta, AMEDIAFORMAT_KEY_VALID_SAMPLES, mCurrentPageSamples);
@@ -864,6 +871,9 @@
             }
 
             // fall through, the buffer now contains the start of the packet.
+            ALOGV("have start of packet, getting rest");
+        } else {
+            ALOGV("moving to next page");
         }
 
         CHECK_EQ(mNextLaceIndex, mCurrentPage.mNumSegments);
@@ -899,9 +909,10 @@
         mNextLaceIndex = 0;
 
         if (buffer != NULL) {
-            if ((mCurrentPage.mFlags & 1) == 0) {
+            if ((mCurrentPage.mFlags & OGG_PAGE_FLAG_CONTINUED_PACKET) == 0) {
                 // This page does not continue the packet, i.e. the packet
                 // is already complete.
+                ALOGV("packet was already complete?!");
 
                 if (timeUs >= 0) {
                     AMediaFormat *meta = buffer->meta_data();
@@ -909,8 +920,10 @@
                 }
 
                 AMediaFormat *meta = buffer->meta_data();
-                AMediaFormat_setInt32(
-                        meta, AMEDIAFORMAT_KEY_VALID_SAMPLES, mCurrentPageSamples);
+                if (mCurrentPage.mFlags & OGG_PAGE_FLAG_END_OF_STREAM) {
+                    AMediaFormat_setInt32(
+                            meta, AMEDIAFORMAT_KEY_VALID_SAMPLES, mCurrentPageSamples);
+                }
                 mFirstPacketInPage = false;
 
                 *out = buffer;
@@ -929,6 +942,7 @@
     for (size_t i = 0; i < mNumHeaders; ++i) {
         // ignore timestamp for configuration packets
         if ((err = _readNextPacket(&packet, /* calcVorbisTimestamp = */ false)) != AMEDIA_OK) {
+            ALOGV("readNextPacket failed");
             return err;
         }
         ALOGV("read packet of size %zu\n", packet->range_length());
@@ -1008,6 +1022,10 @@
 
     size_t size = buffer->range_length();
 
+    if (size == 0) {
+        return 0;
+    }
+
     ogg_buffer buf;
     buf.data = (uint8_t *)data;
     buf.size = size;
diff --git a/media/extractors/tests/Android.bp b/media/extractors/tests/Android.bp
index b3afe2f..0bca6f5 100644
--- a/media/extractors/tests/Android.bp
+++ b/media/extractors/tests/Android.bp
@@ -17,6 +17,7 @@
 cc_test {
     name: "ExtractorUnitTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: ["ExtractorUnitTest.cpp"],
 
diff --git a/media/extractors/tests/AndroidTest.xml b/media/extractors/tests/AndroidTest.xml
index 6bb2c8a..fc8152c 100644
--- a/media/extractors/tests/AndroidTest.xml
+++ b/media/extractors/tests/AndroidTest.xml
@@ -19,7 +19,7 @@
         <option name="cleanup" value="true" />
         <option name="push" value="ExtractorUnitTest->/data/local/tmp/ExtractorUnitTest" />
         <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor.zip?unzip=true"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor-1.4.zip?unzip=true"
             value="/data/local/tmp/ExtractorUnitTestRes/" />
     </target_preparer>
 
diff --git a/media/extractors/tests/ExtractorUnitTest.cpp b/media/extractors/tests/ExtractorUnitTest.cpp
index 518166e..d91fffa 100644
--- a/media/extractors/tests/ExtractorUnitTest.cpp
+++ b/media/extractors/tests/ExtractorUnitTest.cpp
@@ -20,8 +20,10 @@
 
 #include <datasource/FileSource.h>
 #include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaDataUtils.h>
+#include <media/stagefright/foundation/OpusHeader.h>
 
 #include "aac/AACExtractor.h"
 #include "amr/AMRExtractor.h"
@@ -43,11 +45,76 @@
 #define OUTPUT_DUMP_FILE "/data/local/tmp/extractorOutput"
 
 constexpr int32_t kMaxCount = 10;
-constexpr int32_t kOpusSeekPreRollUs = 80000;  // 80 ms;
+constexpr int32_t kAudioDefaultSampleDuration = 20000;                       // 20ms
+constexpr int32_t kRandomSeekToleranceUs = 2 * kAudioDefaultSampleDuration;  // 40 ms;
+constexpr int32_t kRandomSeed = 700;
+constexpr int32_t kUndefined = -1;
+
+enum inputID {
+    // audio streams
+    AAC_1,
+    AMR_NB_1,
+    AMR_WB_1,
+    FLAC_1,
+    GSM_1,
+    MIDI_1,
+    MP3_1,
+    OPUS_1,
+    VORBIS_1,
+    // video streams
+    HEVC_1,
+    HEVC_2,
+    MPEG2_PS_1,
+    MPEG2_TS_1,
+    MPEG4_1,
+    VP9_1,
+    UNKNOWN_ID,
+};
+
+// LookUpTable of clips and metadata for component testing
+static const struct InputData {
+    inputID inpId;
+    string mime;
+    string inputFile;
+    int32_t firstParam;
+    int32_t secondParam;
+    int32_t profile;
+    int32_t frameRate;
+} kInputData[] = {
+        {AAC_1, MEDIA_MIMETYPE_AUDIO_AAC, "test_mono_44100Hz_aac.aac", 44100, 1, AACObjectLC,
+         kUndefined},
+        {AMR_NB_1, MEDIA_MIMETYPE_AUDIO_AMR_NB, "bbb_mono_8kHz_amrnb.amr", 8000, 1, kUndefined,
+         kUndefined},
+        {AMR_WB_1, MEDIA_MIMETYPE_AUDIO_AMR_WB, "bbb_mono_16kHz_amrwb.amr", 16000, 1, kUndefined,
+         kUndefined},
+        {FLAC_1, MEDIA_MIMETYPE_AUDIO_RAW, "bbb_stereo_48kHz_flac.flac", 48000, 2, kUndefined,
+         kUndefined},
+        {GSM_1, MEDIA_MIMETYPE_AUDIO_MSGSM, "test_mono_8kHz_gsm.wav", 8000, 1, kUndefined,
+         kUndefined},
+        {MIDI_1, MEDIA_MIMETYPE_AUDIO_RAW, "midi_a.mid", 22050, 2, kUndefined, kUndefined},
+        {MP3_1, MEDIA_MIMETYPE_AUDIO_MPEG, "bbb_stereo_48kHz_mp3.mp3", 48000, 2, kUndefined,
+         kUndefined},
+        {OPUS_1, MEDIA_MIMETYPE_AUDIO_OPUS, "test_stereo_48kHz_opus.opus", 48000, 2, kUndefined,
+         kUndefined},
+        {VORBIS_1, MEDIA_MIMETYPE_AUDIO_VORBIS, "bbb_stereo_48kHz_vorbis.ogg", 48000, 2, kUndefined,
+         kUndefined},
+
+        // Test (b/151677264) for MP4 extractor
+        {HEVC_1, MEDIA_MIMETYPE_VIDEO_HEVC, "crowd_508x240_25fps_hevc.mp4", 508, 240,
+         HEVCProfileMain, 25},
+        {HEVC_2, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, "test3.heic", 820, 460, kUndefined, kUndefined},
+        {MPEG2_PS_1, MEDIA_MIMETYPE_VIDEO_MPEG2, "swirl_144x136_mpeg2.mpg", 144, 136,
+         MPEG2ProfileMain, 12},
+        {MPEG2_TS_1, MEDIA_MIMETYPE_VIDEO_MPEG2, "bbb_cif_768kbps_30fps_mpeg2.ts", 352, 288,
+         MPEG2ProfileMain, 30},
+        {MPEG4_1, MEDIA_MIMETYPE_VIDEO_MPEG4, "bbb_cif_768kbps_30fps_mpeg4.mkv", 352, 288,
+         MPEG4ProfileSimple, 30},
+        {VP9_1, MEDIA_MIMETYPE_VIDEO_VP9, "bbb_340x280_30fps_vp9.webm", 340, 280, VP9Profile0, 30},
+};
 
 static ExtractorUnitTestEnvironment *gEnv = nullptr;
 
-class ExtractorUnitTest : public ::testing::TestWithParam<pair<string, string>> {
+class ExtractorUnitTest {
   public:
     ExtractorUnitTest() : mInputFp(nullptr), mDataSource(nullptr), mExtractor(nullptr) {}
 
@@ -66,16 +133,29 @@
         }
     }
 
-    virtual void SetUp() override {
+    void setupExtractor(string writerFormat) {
         mExtractorName = unknown_comp;
         mDisableTest = false;
 
         static const std::map<std::string, standardExtractors> mapExtractor = {
-                {"aac", AAC},     {"amr", AMR},         {"mp3", MP3},        {"ogg", OGG},
-                {"wav", WAV},     {"mkv", MKV},         {"flac", FLAC},      {"midi", MIDI},
-                {"mpeg4", MPEG4}, {"mpeg2ts", MPEG2TS}, {"mpeg2ps", MPEG2PS}};
+                {"aac", AAC},
+                {"amr", AMR},
+                {"flac", FLAC},
+                {"mid", MIDI},
+                {"midi", MIDI},
+                {"mkv", MKV},
+                {"mp3", MP3},
+                {"mp4", MPEG4},
+                {"mpeg2ps", MPEG2PS},
+                {"mpeg2ts", MPEG2TS},
+                {"mpeg4", MPEG4},
+                {"mpg", MPEG2PS},
+                {"ogg", OGG},
+                {"opus", OGG},
+                {"ts", MPEG2TS},
+                {"wav", WAV},
+                {"webm", MKV}};
         // Find the component type
-        string writerFormat = GetParam().first;
         if (mapExtractor.find(writerFormat) != mapExtractor.end()) {
             mExtractorName = mapExtractor.at(writerFormat);
         }
@@ -112,6 +192,39 @@
     MediaExtractorPluginHelper *mExtractor;
 };
 
+class ExtractorFunctionalityTest
+    : public ExtractorUnitTest,
+      public ::testing::TestWithParam<tuple<string /* container */, string /* InputFile */,
+                                            int32_t /* numTracks */, bool /* seekSupported */>> {
+  public:
+    virtual void SetUp() override {
+        tuple<string, string, int32_t, bool> params = GetParam();
+        mContainer = get<0>(params);
+        mNumTracks = get<2>(params);
+        setupExtractor(mContainer);
+    }
+    string mContainer;
+    int32_t mNumTracks;
+};
+
+class ConfigParamTest : public ExtractorUnitTest,
+                        public ::testing::TestWithParam<pair<string, inputID>> {
+  public:
+    virtual void SetUp() override { setupExtractor(GetParam().first); }
+
+    struct configFormat {
+        string mime;
+        int32_t width;
+        int32_t height;
+        int32_t sampleRate;
+        int32_t channelCount;
+        int32_t profile;
+        int32_t frameRate;
+    };
+
+    void getFileProperties(inputID inputId, string &inputFile, configFormat &configParam);
+};
+
 int32_t ExtractorUnitTest::setDataSource(string inputFileName) {
     mInputFp = fopen(inputFileName.c_str(), "rb");
     if (!mInputFp) {
@@ -168,6 +281,75 @@
     return 0;
 }
 
+void ConfigParamTest::getFileProperties(inputID inputId, string &inputFile,
+                                        configFormat &configParam) {
+    int32_t inputDataSize = sizeof(kInputData) / sizeof(kInputData[0]);
+    int32_t inputIdx = 0;
+    for (; inputIdx < inputDataSize; inputIdx++) {
+        if (inputId == kInputData[inputIdx].inpId) {
+            break;
+        }
+    }
+    if (inputIdx == inputDataSize) {
+        return;
+    }
+    inputFile += kInputData[inputIdx].inputFile;
+    configParam.mime = kInputData[inputIdx].mime;
+    size_t found = configParam.mime.find("audio/");
+    // Check if 'audio/' is present in the begininig of the mime type
+    if (found == 0) {
+        configParam.sampleRate = kInputData[inputIdx].firstParam;
+        configParam.channelCount = kInputData[inputIdx].secondParam;
+    } else {
+        configParam.width = kInputData[inputIdx].firstParam;
+        configParam.height = kInputData[inputIdx].secondParam;
+    }
+    configParam.profile = kInputData[inputIdx].profile;
+    configParam.frameRate = kInputData[inputIdx].frameRate;
+    return;
+}
+
+void randomSeekTest(MediaTrackHelper *track, int64_t clipDuration) {
+    int32_t status = 0;
+    int32_t seekCount = 0;
+    bool hasTimestamp = false;
+    vector<int64_t> seekToTimeStamp;
+    string seekPtsString;
+
+    srand(kRandomSeed);
+    while (seekCount < kMaxCount) {
+        int64_t timeStamp = ((double)rand() / RAND_MAX) * clipDuration;
+        seekToTimeStamp.push_back(timeStamp);
+        seekPtsString.append(to_string(timeStamp));
+        seekPtsString.append(", ");
+        seekCount++;
+    }
+
+    for (int64_t seekPts : seekToTimeStamp) {
+        MediaTrackHelper::ReadOptions *options = new MediaTrackHelper::ReadOptions(
+                CMediaTrackReadOptions::SEEK_CLOSEST | CMediaTrackReadOptions::SEEK, seekPts);
+        ASSERT_NE(options, nullptr) << "Cannot create read option";
+
+        MediaBufferHelper *buffer = nullptr;
+        status = track->read(&buffer, options);
+        if (buffer) {
+            AMediaFormat *metaData = buffer->meta_data();
+            int64_t timeStamp = 0;
+            hasTimestamp = AMediaFormat_getInt64(metaData, AMEDIAFORMAT_KEY_TIME_US, &timeStamp);
+            ASSERT_TRUE(hasTimestamp) << "Extractor didn't set timestamp for the given sample";
+
+            buffer->release();
+            EXPECT_LE(abs(timeStamp - seekPts), kRandomSeekToleranceUs)
+                    << "Seek unsuccessful. Expected timestamp range ["
+                    << seekPts - kRandomSeekToleranceUs << ", " << seekPts + kRandomSeekToleranceUs
+                    << "] "
+                    << "received " << timeStamp << ", list of input seek timestamps ["
+                    << seekPtsString << "]";
+        }
+        delete options;
+    }
+}
+
 void getSeekablePoints(vector<int64_t> &seekablePoints, MediaTrackHelper *track) {
     int32_t status = 0;
     if (!seekablePoints.empty()) {
@@ -190,20 +372,21 @@
     }
 }
 
-TEST_P(ExtractorUnitTest, CreateExtractorTest) {
+TEST_P(ExtractorFunctionalityTest, CreateExtractorTest) {
     if (mDisableTest) return;
 
     ALOGV("Checks if a valid extractor is created for a given input file");
-    string inputFileName = gEnv->getRes() + GetParam().second;
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
 
-    ASSERT_EQ(setDataSource(inputFileName), 0)
-            << "SetDataSource failed for" << GetParam().first << "extractor";
+    int32_t status = setDataSource(inputFileName);
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
 
-    ASSERT_EQ(createExtractor(), 0)
-            << "Extractor creation failed for" << GetParam().first << "extractor";
+    status = createExtractor();
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
 
-    // A valid extractor instace should return success for following calls
-    ASSERT_GT(mExtractor->countTracks(), 0);
+    int32_t numTracks = mExtractor->countTracks();
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
 
     AMediaFormat *format = AMediaFormat_new();
     ASSERT_NE(format, nullptr) << "AMediaFormat_new returned null AMediaformat";
@@ -212,20 +395,21 @@
     AMediaFormat_delete(format);
 }
 
-TEST_P(ExtractorUnitTest, ExtractorTest) {
+TEST_P(ExtractorFunctionalityTest, ExtractorTest) {
     if (mDisableTest) return;
 
-    ALOGV("Validates %s Extractor for a given input file", GetParam().first.c_str());
-    string inputFileName = gEnv->getRes() + GetParam().second;
+    ALOGV("Validates %s Extractor for a given input file", mContainer.c_str());
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
 
     int32_t status = setDataSource(inputFileName);
-    ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
 
     status = createExtractor();
-    ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
 
     int32_t numTracks = mExtractor->countTracks();
-    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
 
     for (int32_t idx = 0; idx < numTracks; idx++) {
         MediaTrackHelper *track = mExtractor->getTrack(idx);
@@ -262,20 +446,21 @@
     }
 }
 
-TEST_P(ExtractorUnitTest, MetaDataComparisonTest) {
+TEST_P(ExtractorFunctionalityTest, MetaDataComparisonTest) {
     if (mDisableTest) return;
 
     ALOGV("Validates Extractor's meta data for a given input file");
-    string inputFileName = gEnv->getRes() + GetParam().second;
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
 
     int32_t status = setDataSource(inputFileName);
-    ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
 
     status = createExtractor();
-    ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
 
     int32_t numTracks = mExtractor->countTracks();
-    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
 
     AMediaFormat *extractorFormat = AMediaFormat_new();
     ASSERT_NE(extractorFormat, nullptr) << "AMediaFormat_new returned null AMediaformat";
@@ -337,20 +522,21 @@
     AMediaFormat_delete(extractorFormat);
 }
 
-TEST_P(ExtractorUnitTest, MultipleStartStopTest) {
+TEST_P(ExtractorFunctionalityTest, MultipleStartStopTest) {
     if (mDisableTest) return;
 
-    ALOGV("Test %s extractor for multiple start and stop calls", GetParam().first.c_str());
-    string inputFileName = gEnv->getRes() + GetParam().second;
+    ALOGV("Test %s extractor for multiple start and stop calls", mContainer.c_str());
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
 
     int32_t status = setDataSource(inputFileName);
-    ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
 
     status = createExtractor();
-    ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
 
     int32_t numTracks = mExtractor->countTracks();
-    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
 
     // start/stop the tracks multiple times
     for (int32_t count = 0; count < kMaxCount; count++) {
@@ -379,27 +565,28 @@
     }
 }
 
-TEST_P(ExtractorUnitTest, SeekTest) {
-    // Both Flac and Wav extractor can give samples from any pts and mark the given sample as
-    // sync frame. So, this seek test is not applicable to FLAC and WAV extractors
-    if (mDisableTest || mExtractorName == FLAC || mExtractorName == WAV) return;
+TEST_P(ExtractorFunctionalityTest, SeekTest) {
+    if (mDisableTest) return;
 
-    ALOGV("Validates %s Extractor behaviour for different seek modes", GetParam().first.c_str());
-    string inputFileName = gEnv->getRes() + GetParam().second;
+    ALOGV("Validates %s Extractor behaviour for different seek modes", mContainer.c_str());
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
 
     int32_t status = setDataSource(inputFileName);
-    ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
 
     status = createExtractor();
-    ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
 
     int32_t numTracks = mExtractor->countTracks();
-    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
 
     uint32_t seekFlag = mExtractor->flags();
-    if (!(seekFlag & MediaExtractorPluginHelper::CAN_SEEK)) {
-        cout << "[   WARN   ] Test Skipped. " << GetParam().first
-             << " Extractor doesn't support seek\n";
+    bool seekSupported = get<3>(GetParam());
+    bool seekable = seekFlag & MediaExtractorPluginHelper::CAN_SEEK;
+    if (!seekable) {
+        ASSERT_FALSE(seekSupported) << mContainer << "Extractor is expected to support seek ";
+        cout << "[   WARN   ] Test Skipped. " << mContainer << " Extractor doesn't support seek\n";
         return;
     }
 
@@ -415,19 +602,73 @@
         MediaBufferGroup *bufferGroup = new MediaBufferGroup();
         status = cTrack->start(track, bufferGroup->wrap());
         ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
-        getSeekablePoints(seekablePoints, track);
-        ASSERT_GT(seekablePoints.size(), 0)
-                << "Failed to get seekable points for " << GetParam().first << " extractor";
+
+        // For Flac, Wav and Midi extractor, all samples are seek points.
+        // We cannot create list of all seekable points for these.
+        // This means that if we pass a seekToTimeStamp between two seek points, we may
+        // end up getting the timestamp of next sample as a seekable timestamp.
+        // This timestamp may/may not be a part of the seekable point vector thereby failing the
+        // test. So we test these extractors using random seek test.
+        if (mExtractorName == FLAC || mExtractorName == WAV || mExtractorName == MIDI) {
+            AMediaFormat *trackMeta = AMediaFormat_new();
+            ASSERT_NE(trackMeta, nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+            status = mExtractor->getTrackMetaData(trackMeta, idx, 1);
+            ASSERT_EQ(OK, (media_status_t)status) << "Failed to get trackMetaData";
+
+            int64_t clipDuration = 0;
+            AMediaFormat_getInt64(trackMeta, AMEDIAFORMAT_KEY_DURATION, &clipDuration);
+            ASSERT_GT(clipDuration, 0) << "Invalid clip duration ";
+            randomSeekTest(track, clipDuration);
+            AMediaFormat_delete(trackMeta);
+            continue;
+        }
 
         AMediaFormat *trackFormat = AMediaFormat_new();
         ASSERT_NE(trackFormat, nullptr) << "AMediaFormat_new returned null format";
         status = track->getFormat(trackFormat);
         ASSERT_EQ(OK, (media_status_t)status) << "Failed to get track meta data";
 
-        bool isOpus = false;
         const char *mime;
-        AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
-        if (!strcmp(mime, "audio/opus")) isOpus = true;
+        ASSERT_TRUE(AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime))
+                << "Failed to get mime";
+
+        // Image formats are not expected to be seekable
+        if (!strncmp(mime, "image/", 6)) continue;
+
+        // Request seekable points for remaining extractors which will be used to validate the seek
+        // accuracy for the extractors. Depending on SEEK Mode, we expect the extractors to return
+        // the expected sync frame. We don't prefer random seek test for these extractors because
+        // they aren't expected to seek to random samples. MP4 for instance can seek to
+        // next/previous sync frames but not to samples between two sync frames.
+        getSeekablePoints(seekablePoints, track);
+        ASSERT_GT(seekablePoints.size(), 0)
+                << "Failed to get seekable points for " << mContainer << " extractor";
+
+        bool isOpus = false;
+        int64_t opusSeekPreRollUs = 0;
+        if (!strcmp(mime, "audio/opus")) {
+            isOpus = true;
+            void *seekPreRollBuf = nullptr;
+            size_t size = 0;
+            if (!AMediaFormat_getBuffer(trackFormat, "csd-2", &seekPreRollBuf, &size)) {
+                size_t opusHeadSize = 0;
+                size_t codecDelayBufSize = 0;
+                size_t seekPreRollBufSize = 0;
+                void *csdBuffer = nullptr;
+                void *opusHeadBuf = nullptr;
+                void *codecDelayBuf = nullptr;
+                AMediaFormat_getBuffer(trackFormat, "csd-0", &csdBuffer, &size);
+                ASSERT_NE(csdBuffer, nullptr);
+
+                GetOpusHeaderBuffers((uint8_t *)csdBuffer, size, &opusHeadBuf, &opusHeadSize,
+                                     &codecDelayBuf, &codecDelayBufSize, &seekPreRollBuf,
+                                     &seekPreRollBufSize);
+            }
+            ASSERT_NE(seekPreRollBuf, nullptr)
+                    << "Invalid track format. SeekPreRoll info missing for Opus file";
+            opusSeekPreRollUs = *((int64_t *)seekPreRollBuf);
+        }
         AMediaFormat_delete(trackFormat);
 
         int32_t seekIdx = 0;
@@ -448,7 +689,7 @@
                 // extractor is calculated based on (seekPts - seekPreRollUs).
                 // So we add the preRoll value to the timeStamp we want to seek to.
                 if (isOpus) {
-                    seekToTimeStamp += kOpusSeekPreRollUs;
+                    seekToTimeStamp += opusSeekPreRollUs;
                 }
 
                 MediaTrackHelper::ReadOptions *options = new MediaTrackHelper::ReadOptions(
@@ -496,24 +737,517 @@
     seekablePoints.clear();
 }
 
-// TODO: (b/145332185)
-// Add MIDI inputs
-INSTANTIATE_TEST_SUITE_P(ExtractorUnitTestAll, ExtractorUnitTest,
-                         ::testing::Values(make_pair("aac", "loudsoftaac.aac"),
-                                           make_pair("amr", "testamr.amr"),
-                                           make_pair("amr", "amrwb.wav"),
-                                           make_pair("ogg", "john_cage.ogg"),
-                                           make_pair("wav", "monotestgsm.wav"),
-                                           make_pair("mpeg2ts", "segment000001.ts"),
-                                           make_pair("flac", "sinesweepflac.flac"),
-                                           make_pair("ogg", "testopus.opus"),
-                                           make_pair("mkv", "sinesweepvorbis.mkv"),
-                                           make_pair("mpeg4", "sinesweepoggmp4.mp4"),
-                                           make_pair("mp3", "sinesweepmp3lame.mp3"),
-                                           make_pair("mkv", "swirl_144x136_vp9.webm"),
-                                           make_pair("mkv", "swirl_144x136_vp8.webm"),
-                                           make_pair("mpeg2ps", "swirl_144x136_mpeg2.mpg"),
-                                           make_pair("mpeg4", "swirl_132x130_mpeg4.mp4")));
+// Tests the extractors for seek beyond range : (0, ClipDuration)
+TEST_P(ExtractorFunctionalityTest, MonkeySeekTest) {
+    if (mDisableTest) return;
+    // TODO(b/155630778): Enable test for wav extractors
+    if (mExtractorName == WAV) return;
+
+    ALOGV("Validates %s Extractor behaviour for invalid seek points", mContainer.c_str());
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
+
+    int32_t status = setDataSource(inputFileName);
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
+
+    status = createExtractor();
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
+
+    int32_t numTracks = mExtractor->countTracks();
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
+
+    uint32_t seekFlag = mExtractor->flags();
+    bool seekSupported = get<3>(GetParam());
+    bool seekable = seekFlag & MediaExtractorPluginHelper::CAN_SEEK;
+    if (!seekable) {
+        ASSERT_FALSE(seekSupported) << mContainer << "Extractor is expected to support seek ";
+        cout << "[   WARN   ] Test Skipped. " << mContainer << " Extractor doesn't support seek\n";
+        return;
+    }
+
+    for (int32_t idx = 0; idx < numTracks; idx++) {
+        MediaTrackHelper *track = mExtractor->getTrack(idx);
+        ASSERT_NE(track, nullptr) << "Failed to get track for index " << idx;
+
+        CMediaTrack *cTrack = wrap(track);
+        ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << idx;
+
+        MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+        status = cTrack->start(track, bufferGroup->wrap());
+        ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+
+        AMediaFormat *trackMeta = AMediaFormat_new();
+        ASSERT_NE(trackMeta, nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+        status = mExtractor->getTrackMetaData(
+                trackMeta, idx, MediaExtractorPluginHelper::kIncludeExtensiveMetaData);
+        ASSERT_EQ(OK, (media_status_t)status) << "Failed to get trackMetaData";
+
+        const char *mime;
+        ASSERT_TRUE(AMediaFormat_getString(trackMeta, AMEDIAFORMAT_KEY_MIME, &mime))
+                << "Failed to get mime";
+
+        int64_t clipDuration = 0;
+        AMediaFormat_getInt64(trackMeta, AMEDIAFORMAT_KEY_DURATION, &clipDuration);
+        // Image formats are not expected to have duration information
+        ASSERT_TRUE(clipDuration > 0 || !strncmp(mime, "image/", 6)) << "Invalid clip duration ";
+        AMediaFormat_delete(trackMeta);
+
+        int64_t seekToTimeStampUs[] = {-clipDuration, clipDuration / 2, clipDuration,
+                                       clipDuration * 2};
+        for (int32_t mode = CMediaTrackReadOptions::SEEK_PREVIOUS_SYNC;
+             mode <= CMediaTrackReadOptions::SEEK_CLOSEST; mode++) {
+            for (int64_t seekTimeUs : seekToTimeStampUs) {
+                MediaTrackHelper::ReadOptions *options = new MediaTrackHelper::ReadOptions(
+                        mode | CMediaTrackReadOptions::SEEK, seekTimeUs);
+                ASSERT_NE(options, nullptr) << "Cannot create read option";
+
+                MediaBufferHelper *buffer = nullptr;
+                status = track->read(&buffer, options);
+                if (status == AMEDIA_ERROR_END_OF_STREAM) {
+                    delete options;
+                    continue;
+                }
+                if (buffer) {
+                    AMediaFormat *metaData = buffer->meta_data();
+                    int64_t timeStamp;
+                    AMediaFormat_getInt64(metaData, AMEDIAFORMAT_KEY_TIME_US, &timeStamp);
+                    ALOGV("Seeked to timestamp : %lld, requested : %lld", (long long)timeStamp,
+                          (long long)seekTimeUs);
+                    buffer->release();
+                }
+                delete options;
+            }
+        }
+        status = cTrack->stop(track);
+        ASSERT_EQ(OK, status) << "Failed to stop the track";
+        delete bufferGroup;
+        delete track;
+    }
+}
+
+// Tests extractors for invalid tracks
+TEST_P(ExtractorFunctionalityTest, SanityTest) {
+    if (mDisableTest) return;
+    // TODO(b/155626946): Enable test for MPEG2 TS/PS extractors
+    if (mExtractorName == MPEG2TS || mExtractorName == MPEG2PS) return;
+
+    ALOGV("Validates %s Extractor behaviour for invalid tracks", mContainer.c_str());
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
+
+    int32_t status = setDataSource(inputFileName);
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
+
+    status = createExtractor();
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
+
+    int32_t numTracks = mExtractor->countTracks();
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
+
+    int32_t trackIdx[] = {-1, numTracks};
+    for (int32_t idx : trackIdx) {
+        MediaTrackHelper *track = mExtractor->getTrack(idx);
+        ASSERT_EQ(track, nullptr) << "Failed to get track for index " << idx << "\n";
+
+        AMediaFormat *extractorFormat = AMediaFormat_new();
+        ASSERT_NE(extractorFormat, nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+        status = mExtractor->getTrackMetaData(
+                extractorFormat, idx, MediaExtractorPluginHelper::kIncludeExtensiveMetaData);
+        ASSERT_NE(OK, status) << "getTrackMetaData should return error for invalid index " << idx;
+        AMediaFormat_delete(extractorFormat);
+    }
+
+    // Validate Extractor's getTrackMetaData for null format
+    AMediaFormat *mediaFormat = nullptr;
+    status = mExtractor->getTrackMetaData(mediaFormat, 0,
+                                          MediaExtractorPluginHelper::kIncludeExtensiveMetaData);
+    ASSERT_NE(OK, status) << "getTrackMetaData should return error for null Media format";
+}
+
+// This test validates config params for a given input file.
+// For this test we only take single track files since the focus of this test is
+// to validate the file properties reported by Extractor and not multi-track behavior
+TEST_P(ConfigParamTest, ConfigParamValidation) {
+    if (mDisableTest) return;
+
+    string container = GetParam().first;
+    ALOGV("Validates %s Extractor for input's file properties", container.c_str());
+    string inputFileName = gEnv->getRes();
+    inputID inputFileId = GetParam().second;
+    configFormat configParam;
+    getFileProperties(inputFileId, inputFileName, configParam);
+
+    int32_t status = setDataSource(inputFileName);
+    ASSERT_EQ(status, 0) << "SetDataSource failed for " << container << "extractor";
+
+    status = createExtractor();
+    ASSERT_EQ(status, 0) << "Extractor creation failed for " << container << "extractor";
+
+    int32_t numTracks = mExtractor->countTracks();
+    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+
+    MediaTrackHelper *track = mExtractor->getTrack(0);
+    ASSERT_NE(track, nullptr) << "Failed to get track for index 0";
+
+    AMediaFormat *trackFormat = AMediaFormat_new();
+    ASSERT_NE(trackFormat, nullptr) << "AMediaFormat_new returned null format";
+
+    status = track->getFormat(trackFormat);
+    ASSERT_EQ(OK, (media_status_t)status) << "Failed to get track meta data";
+
+    const char *trackMime;
+    bool valueFound = AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &trackMime);
+    ASSERT_TRUE(valueFound) << "Mime type not set by extractor";
+    ASSERT_STREQ(configParam.mime.c_str(), trackMime) << "Invalid track format";
+
+    if (!strncmp(trackMime, "audio/", 6)) {
+        int32_t trackSampleRate, trackChannelCount;
+        ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+                                          &trackChannelCount));
+        ASSERT_TRUE(
+                AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_SAMPLE_RATE, &trackSampleRate));
+        ASSERT_EQ(configParam.sampleRate, trackSampleRate) << "SampleRate not as expected";
+        ASSERT_EQ(configParam.channelCount, trackChannelCount) << "ChannelCount not as expected";
+    } else {
+        int32_t trackWidth, trackHeight;
+        ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_WIDTH, &trackWidth));
+        ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_HEIGHT, &trackHeight));
+        ASSERT_EQ(configParam.width, trackWidth) << "Width not as expected";
+        ASSERT_EQ(configParam.height, trackHeight) << "Height not as expected";
+
+        if (configParam.frameRate != kUndefined) {
+            int32_t frameRate;
+            ASSERT_TRUE(
+                    AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_FRAME_RATE, &frameRate));
+            ASSERT_EQ(configParam.frameRate, frameRate) << "frameRate not as expected";
+        }
+    }
+    // validate the profile for the input clip
+    int32_t profile;
+    if (configParam.profile != kUndefined) {
+        if (AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_PROFILE, &profile)) {
+            ASSERT_EQ(configParam.profile, profile) << "profile not as expected";
+        } else if (mExtractorName == AAC &&
+                   AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_AAC_PROFILE, &profile)) {
+            ASSERT_EQ(configParam.profile, profile) << "profile not as expected";
+        } else {
+            ASSERT_TRUE(false) << "profile not returned in extractor";
+        }
+    }
+
+    delete track;
+    AMediaFormat_delete(trackFormat);
+}
+
+class ExtractorComparison
+    : public ExtractorUnitTest,
+      public ::testing::TestWithParam<pair<string /* InputFile0 */, string /* InputFile1 */>> {
+  public:
+    ~ExtractorComparison() {
+        for (int8_t *extractorOp : mExtractorOutput) {
+            if (extractorOp != nullptr) {
+                free(extractorOp);
+            }
+        }
+    }
+
+    int8_t *mExtractorOutput[2]{};
+    size_t mExtractorOuputSize[2]{};
+};
+
+size_t allocateOutputBuffers(string inputFileName, AMediaFormat *extractorFormat) {
+    size_t bufferSize = 0u;
+    // allocating the buffer size as sampleRate * channelCount * clipDuration since
+    // some extractors like flac, midi and wav decodes the file. These extractors
+    // advertise the mime type as raw.
+    const char *mime;
+    AMediaFormat_getString(extractorFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+    if (!strcmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
+        int64_t clipDurationUs = -1;
+        int32_t channelCount = -1;
+        int32_t sampleRate = -1;
+        int32_t bitsPerSampple = -1;
+        if (!AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+                                   &channelCount) || channelCount <= 0) {
+            ALOGE("Invalid channelCount for input file : %s", inputFileName.c_str());
+            return 0;
+        }
+        if (!AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_SAMPLE_RATE, &sampleRate) ||
+            sampleRate <= 0) {
+            ALOGE("Invalid sampleRate for input file : %s", inputFileName.c_str());
+            return 0;
+        }
+        if (!AMediaFormat_getInt64(extractorFormat, AMEDIAFORMAT_KEY_DURATION, &clipDurationUs) ||
+            clipDurationUs <= 0) {
+            ALOGE("Invalid clip duration for input file : %s", inputFileName.c_str());
+            return 0;
+        }
+        if (!AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_PCM_ENCODING,
+                                   &bitsPerSampple) || bitsPerSampple <= 0) {
+            ALOGE("Invalid bits per sample for input file : %s", inputFileName.c_str());
+            return 0;
+        }
+        bufferSize = bitsPerSampple * channelCount * sampleRate * (clipDurationUs / 1000000 + 1);
+    } else {
+        struct stat buf;
+        int32_t status = stat(inputFileName.c_str(), &buf);
+        if (status != 0) {
+            ALOGE("Unable to get file properties for: %s", inputFileName.c_str());
+            return 0;
+        }
+        bufferSize = buf.st_size;
+    }
+    return bufferSize;
+}
+
+// Compare output of two extractors for identical content
+TEST_P(ExtractorComparison, ExtractorComparisonTest) {
+    vector<string> inputFileNames = {GetParam().first, GetParam().second};
+    size_t extractedOutputSize[2]{};
+    AMediaFormat *extractorFormat[2]{};
+    int32_t status = OK;
+
+    for (int32_t idx = 0; idx < inputFileNames.size(); idx++) {
+        string containerFormat = inputFileNames[idx].substr(inputFileNames[idx].find(".") + 1);
+        setupExtractor(containerFormat);
+        if (mDisableTest) {
+            ALOGV("Unknown extractor %s. Skipping the test", containerFormat.c_str());
+            return;
+        }
+
+        ALOGV("Validates %s Extractor for %s", containerFormat.c_str(),
+              inputFileNames[idx].c_str());
+        string inputFileName = gEnv->getRes() + inputFileNames[idx];
+
+        status = setDataSource(inputFileName);
+        ASSERT_EQ(status, 0) << "SetDataSource failed for" << containerFormat << "extractor";
+
+        status = createExtractor();
+        ASSERT_EQ(status, 0) << "Extractor creation failed for " << containerFormat << " extractor";
+
+        int32_t numTracks = mExtractor->countTracks();
+        ASSERT_EQ(numTracks, 1) << "This test expects inputs with one track only";
+
+        int32_t trackIdx = 0;
+        MediaTrackHelper *track = mExtractor->getTrack(trackIdx);
+        ASSERT_NE(track, nullptr) << "Failed to get track for index " << trackIdx;
+
+        extractorFormat[idx] = AMediaFormat_new();
+        ASSERT_NE(extractorFormat[idx], nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+        status = track->getFormat(extractorFormat[idx]);
+        ASSERT_EQ(OK, (media_status_t)status) << "Failed to get track meta data";
+
+        CMediaTrack *cTrack = wrap(track);
+        ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << trackIdx;
+
+        mExtractorOuputSize[idx] = allocateOutputBuffers(inputFileName, extractorFormat[idx]);
+        ASSERT_GT(mExtractorOuputSize[idx], 0u) << " Invalid size for output buffers";
+
+        mExtractorOutput[idx] = (int8_t *)calloc(1, mExtractorOuputSize[idx]);
+        ASSERT_NE(mExtractorOutput[idx], nullptr)
+                << "Unable to allocate memory for writing extractor's output";
+
+        MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+        status = cTrack->start(track, bufferGroup->wrap());
+        ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+
+        int32_t offset = 0;
+        while (status != AMEDIA_ERROR_END_OF_STREAM) {
+            MediaBufferHelper *buffer = nullptr;
+            status = track->read(&buffer);
+            ALOGV("track->read Status = %d buffer %p", status, buffer);
+            if (buffer) {
+                ASSERT_LE(offset + buffer->range_length(), mExtractorOuputSize[idx])
+                        << "Memory overflow. Extracted output size more than expected";
+
+                memcpy(mExtractorOutput[idx] + offset, buffer->data(), buffer->range_length());
+                extractedOutputSize[idx] += buffer->range_length();
+                offset += buffer->range_length();
+                buffer->release();
+            }
+        }
+        status = cTrack->stop(track);
+        ASSERT_EQ(OK, status) << "Failed to stop the track";
+
+        fclose(mInputFp);
+        delete bufferGroup;
+        delete track;
+        mDataSource.clear();
+        delete mExtractor;
+        mInputFp = nullptr;
+        mExtractor = nullptr;
+    }
+
+    // Compare the meta data from both the extractors
+    const char *mime[2];
+    AMediaFormat_getString(extractorFormat[0], AMEDIAFORMAT_KEY_MIME, &mime[0]);
+    AMediaFormat_getString(extractorFormat[1], AMEDIAFORMAT_KEY_MIME, &mime[1]);
+    ASSERT_STREQ(mime[0], mime[1]) << "Mismatch between extractor's format";
+
+    if (!strncmp(mime[0], "audio/", 6)) {
+        int32_t channelCount0, channelCount1;
+        int32_t sampleRate0, sampleRate1;
+        ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat[0], AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+                                          &channelCount0));
+        ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat[0], AMEDIAFORMAT_KEY_SAMPLE_RATE,
+                                          &sampleRate0));
+        ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat[1], AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+                                          &channelCount1));
+        ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat[1], AMEDIAFORMAT_KEY_SAMPLE_RATE,
+                                          &sampleRate1));
+        ASSERT_EQ(channelCount0, channelCount1) << "Mismatch between extractor's channelCount";
+        ASSERT_EQ(sampleRate0, sampleRate1) << "Mismatch between extractor's sampleRate";
+    } else if (!strncmp(mime[0], "video/", 6)) {
+        int32_t width0, height0;
+        int32_t width1, height1;
+        ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat[0], AMEDIAFORMAT_KEY_WIDTH, &width0));
+        ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat[0], AMEDIAFORMAT_KEY_HEIGHT, &height0));
+        ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat[1], AMEDIAFORMAT_KEY_WIDTH, &width1));
+        ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat[1], AMEDIAFORMAT_KEY_HEIGHT, &height1));
+        ASSERT_EQ(width0, width1) << "Mismatch between extractor's width";
+        ASSERT_EQ(height0, height1) << "Mismatch between extractor's height";
+    } else {
+        ASSERT_TRUE(false) << "Invalid mime type " << mime[0];
+    }
+
+    for (AMediaFormat *exFormat : extractorFormat) {
+        AMediaFormat_delete(exFormat);
+    }
+
+    // Compare the extracted outputs of both extractor
+    ASSERT_EQ(extractedOutputSize[0], extractedOutputSize[1])
+            << "Extractor's output size doesn't match between " << inputFileNames[0] << "and "
+            << inputFileNames[1] << " extractors";
+    status = memcmp(mExtractorOutput[0], mExtractorOutput[1], extractedOutputSize[0]);
+    ASSERT_EQ(status, 0) << "Extracted content mismatch between " << inputFileNames[0] << "and "
+                         << inputFileNames[1] << " extractors";
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        ExtractorComparisonAll, ExtractorComparison,
+        ::testing::Values(make_pair("swirl_144x136_vp9.mp4", "swirl_144x136_vp9.webm"),
+                          make_pair("video_480x360_mp4_vp9_333kbps_25fps.mp4",
+                                    "video_480x360_webm_vp9_333kbps_25fps.webm"),
+                          make_pair("video_1280x720_av1_hdr_static_3mbps.mp4",
+                                    "video_1280x720_av1_hdr_static_3mbps.webm"),
+                          make_pair("swirl_132x130_mpeg4.3gp", "swirl_132x130_mpeg4.mkv"),
+                          make_pair("swirl_144x136_avc.mkv", "swirl_144x136_avc.mp4"),
+                          make_pair("swirl_132x130_mpeg4.mp4", "swirl_132x130_mpeg4.mkv"),
+                          make_pair("crowd_508x240_25fps_hevc.mp4","crowd_508x240_25fps_hevc.mkv"),
+                          make_pair("bbb_cif_768kbps_30fps_mpeg2.mp4",
+                                    "bbb_cif_768kbps_30fps_mpeg2.ts"),
+
+                          make_pair("loudsoftaac.aac", "loudsoftaac.mkv"),
+                          make_pair("sinesweepflacmkv.mkv", "sinesweepflacmp4.mp4"),
+                          make_pair("sinesweepmp3lame.mp3", "sinesweepmp3lame.mkv"),
+                          make_pair("sinesweepoggmp4.mp4", "sinesweepogg.ogg"),
+                          make_pair("sinesweepvorbis.mp4", "sinesweepvorbis.ogg"),
+                          make_pair("sinesweepvorbis.mkv", "sinesweepvorbis.ogg"),
+                          make_pair("testopus.mkv", "testopus.mp4"),
+                          make_pair("testopus.mp4", "testopus.opus"),
+
+                          make_pair("loudsoftaac.aac", "loudsoftaac.aac"),
+                          make_pair("testamr.amr", "testamr.amr"),
+                          make_pair("sinesweepflac.flac", "sinesweepflac.flac"),
+                          make_pair("midi_a.mid", "midi_a.mid"),
+                          make_pair("sinesweepvorbis.mkv", "sinesweepvorbis.mkv"),
+                          make_pair("sinesweepmp3lame.mp3", "sinesweepmp3lame.mp3"),
+                          make_pair("sinesweepoggmp4.mp4", "sinesweepoggmp4.mp4"),
+                          make_pair("testopus.opus", "testopus.opus"),
+                          make_pair("john_cage.ogg", "john_cage.ogg"),
+                          make_pair("monotestgsm.wav", "monotestgsm.wav"),
+
+                          make_pair("swirl_144x136_mpeg2.mpg", "swirl_144x136_mpeg2.mpg"),
+                          make_pair("swirl_132x130_mpeg4.mp4", "swirl_132x130_mpeg4.mp4"),
+                          make_pair("swirl_144x136_vp9.webm", "swirl_144x136_vp9.webm"),
+                          make_pair("swirl_144x136_vp8.webm", "swirl_144x136_vp8.webm")));
+
+INSTANTIATE_TEST_SUITE_P(ConfigParamTestAll, ConfigParamTest,
+                         ::testing::Values(make_pair("aac", AAC_1),
+                                           make_pair("amr", AMR_NB_1),
+                                           make_pair("amr", AMR_WB_1),
+                                           make_pair("flac", FLAC_1),
+                                           make_pair("wav", GSM_1),
+                                           make_pair("midi", MIDI_1),
+                                           make_pair("mp3", MP3_1),
+                                           make_pair("ogg", OPUS_1),
+                                           make_pair("ogg", VORBIS_1),
+
+                                           make_pair("mpeg4", HEVC_1),
+                                           make_pair("mpeg4", HEVC_2),
+                                           make_pair("mpeg2ps", MPEG2_PS_1),
+                                           make_pair("mpeg2ts", MPEG2_TS_1),
+                                           make_pair("mkv", MPEG4_1),
+                                           make_pair("mkv", VP9_1)));
+
+// Validate extractors for container format, input file, no. of tracks and supports seek flag
+INSTANTIATE_TEST_SUITE_P(
+        ExtractorUnitTestAll, ExtractorFunctionalityTest,
+        ::testing::Values(
+                make_tuple("aac", "loudsoftaac.aac", 1, true),
+                make_tuple("amr", "testamr.amr", 1, true),
+                make_tuple("amr", "amrwb.wav", 1, true),
+                make_tuple("flac", "sinesweepflac.flac", 1, true),
+                make_tuple("midi", "midi_a.mid", 1, true),
+                make_tuple("mkv", "sinesweepvorbis.mkv", 1, true),
+                make_tuple("mkv", "sinesweepmp3lame.mkv", 1, true),
+                make_tuple("mkv", "loudsoftaac.mkv", 1, true),
+                make_tuple("mp3", "sinesweepmp3lame.mp3", 1, true),
+                make_tuple("mp3", "id3test10.mp3", 1, true),
+                make_tuple("mpeg2ts", "segment000001.ts", 2, false),
+                make_tuple("mpeg2ts", "testac3ts.ts", 1, false),
+                make_tuple("mpeg2ts", "testac4ts.ts", 1, false),
+                make_tuple("mpeg2ts", "testeac3ts.ts", 1, false),
+                make_tuple("mpeg4", "audio_aac_mono_70kbs_44100hz.mp4", 2, true),
+                make_tuple("mpeg4", "multi0_ac4.mp4", 1, true),
+                make_tuple("mpeg4", "noise_6ch_44khz_aot5_dr_sbr_sig2_mp4.m4a", 1, true),
+                make_tuple("mpeg4", "sinesweepalac.mov", 1, true),
+                make_tuple("mpeg4", "sinesweepflacmp4.mp4", 1, true),
+                make_tuple("mpeg4", "sinesweepm4a.m4a", 1, true),
+                make_tuple("mpeg4", "sinesweepoggmp4.mp4", 1, true),
+                make_tuple("mpeg4", "sinesweepopusmp4.mp4", 1, true),
+                make_tuple("mpeg4", "testac3mp4.mp4", 1, true),
+                make_tuple("mpeg4", "testeac3mp4.mp4", 1, true),
+                make_tuple("ogg", "john_cage.ogg", 1, true),
+                make_tuple("ogg", "testopus.opus", 1, true),
+                make_tuple("ogg", "sinesweepoggalbumart.ogg", 1, true),
+                make_tuple("wav", "loudsoftwav.wav", 1, true),
+                make_tuple("wav", "monotestgsm.wav", 1, true),
+                make_tuple("wav", "noise_5ch_44khz_aot2_wave.wav", 1, true),
+                make_tuple("wav", "sine1khzm40db_alaw.wav", 1, true),
+                make_tuple("wav", "sine1khzm40db_f32le.wav", 1, true),
+                make_tuple("wav", "sine1khzm40db_mulaw.wav", 1, true),
+
+                make_tuple("mkv", "swirl_144x136_avc.mkv", 1, true),
+                make_tuple("mkv", "withoutcues.mkv", 2, true),
+                make_tuple("mkv", "swirl_144x136_vp9.webm", 1, true),
+                make_tuple("mkv", "swirl_144x136_vp8.webm", 1, true),
+                make_tuple("mpeg2ps", "swirl_144x136_mpeg2.mpg", 1, false),
+                make_tuple("mpeg2ps", "programstream.mpeg", 2, false),
+                make_tuple("mpeg4", "color_176x144_bt601_525_lr_sdr_h264.mp4", 1, true),
+                make_tuple("mpeg4", "heifwriter_input.heic", 4, false),
+                make_tuple("mpeg4", "psshtest.mp4", 1, true),
+                make_tuple("mpeg4", "swirl_132x130_mpeg4.mp4", 1, true),
+                make_tuple("mpeg4", "testvideo.3gp", 4, true),
+                make_tuple("mpeg4", "testvideo_with_2_timedtext_tracks.3gp", 4, true),
+                make_tuple("mpeg4",
+                           "video_176x144_3gp_h263_300kbps_25fps_aac_stereo_128kbps_11025hz_"
+                           "metadata_gyro_compliant.3gp",
+                           3, true),
+                make_tuple(
+                        "mpeg4",
+                        "video_1920x1080_mp4_mpeg2_12000kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
+                        2, true),
+                make_tuple("mpeg4",
+                           "video_480x360_mp4_hevc_650kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 2,
+                           true),
+                make_tuple(
+                        "mpeg4",
+                        "video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz_dash.mp4",
+                        2, true)));
 
 int main(int argc, char **argv) {
     gEnv = new ExtractorUnitTestEnvironment();
diff --git a/media/extractors/tests/README.md b/media/extractors/tests/README.md
index 69538b6..cff09ca 100644
--- a/media/extractors/tests/README.md
+++ b/media/extractors/tests/README.md
@@ -22,7 +22,7 @@
 adb push ${OUT}/data/nativetest/ExtractorUnitTest/ExtractorUnitTest /data/local/tmp/
 ```
 
-The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor.zip). Download, unzip and push these files into device for testing.
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor-1.4.zip). Download, unzip and push these files into device for testing.
 
 ```
 adb push extractor /data/local/tmp/
diff --git a/media/extractors/wav/Android.bp b/media/extractors/wav/Android.bp
index 5d38a81..85d4cce 100644
--- a/media/extractors/wav/Android.bp
+++ b/media/extractors/wav/Android.bp
@@ -19,4 +19,11 @@
         "libfifo",
         "libstagefright_foundation",
     ],
+
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/janitors/OWNERS-codecs b/media/janitors/OWNERS-codecs
new file mode 100644
index 0000000..e201399
--- /dev/null
+++ b/media/janitors/OWNERS-codecs
@@ -0,0 +1,5 @@
+# gerrit owner/approvers for the actual software codec libraries
+# differentiated from plugins connecting those codecs to either omx or codec2 infrastructure
+essick@google.com
+lajos@google.com
+marcone@google.com
diff --git a/media/janitors/README b/media/janitors/README
new file mode 100644
index 0000000..9db8e0e
--- /dev/null
+++ b/media/janitors/README
@@ -0,0 +1,4 @@
+A collection of OWNERS files that we reference from other projects,
+such as the software codecs in directories like external/libavc.
+This is to simplify our owner/approver management across the multiple
+projects related to media.
diff --git a/media/libaaudio/Android.bp b/media/libaaudio/Android.bp
index 140052f..e81ab06 100644
--- a/media/libaaudio/Android.bp
+++ b/media/libaaudio/Android.bp
@@ -32,5 +32,6 @@
 cc_library_headers {
     name: "libaaudio_headers",
     export_include_dirs: ["include"],
+    export_header_lib_headers: ["aaudio-aidl-cpp"],
+    header_libs: ["aaudio-aidl-cpp"],
 }
-
diff --git a/media/libaaudio/Doxyfile.orig b/media/libaaudio/Doxyfile.orig
deleted file mode 100644
index 137facb..0000000
--- a/media/libaaudio/Doxyfile.orig
+++ /dev/null
@@ -1,2303 +0,0 @@
-# Doxyfile 1.8.6
-
-# This file describes the settings to be used by the documentation system
-# doxygen (www.doxygen.org) for a project.
-#
-# All text after a double hash (##) is considered a comment and is placed in
-# front of the TAG it is preceding.
-#
-# All text after a single hash (#) is considered a comment and will be ignored.
-# The format is:
-# TAG = value [value, ...]
-# For lists, items can also be appended using:
-# TAG += value [value, ...]
-# Values that contain spaces should be placed between quotes (\" \").
-
-#---------------------------------------------------------------------------
-# Project related configuration options
-#---------------------------------------------------------------------------
-
-# This tag specifies the encoding used for all characters in the config file
-# that follow. The default is UTF-8 which is also the encoding used for all text
-# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv
-# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv
-# for the list of possible encodings.
-# The default value is: UTF-8.
-
-DOXYFILE_ENCODING      = UTF-8
-
-# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
-# double-quotes, unless you are using Doxywizard) that should identify the
-# project for which the documentation is generated. This name is used in the
-# title of most generated pages and in a few other places.
-# The default value is: My Project.
-
-PROJECT_NAME           = "My Project"
-
-# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
-# could be handy for archiving the generated documentation or if some version
-# control system is used.
-
-PROJECT_NUMBER         =
-
-# Using the PROJECT_BRIEF tag one can provide an optional one line description
-# for a project that appears at the top of each page and should give viewer a
-# quick idea about the purpose of the project. Keep the description short.
-
-PROJECT_BRIEF          =
-
-# With the PROJECT_LOGO tag one can specify an logo or icon that is included in
-# the documentation. The maximum height of the logo should not exceed 55 pixels
-# and the maximum width should not exceed 200 pixels. Doxygen will copy the logo
-# to the output directory.
-
-PROJECT_LOGO           =
-
-# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
-# into which the generated documentation will be written. If a relative path is
-# entered, it will be relative to the location where doxygen was started. If
-# left blank the current directory will be used.
-
-OUTPUT_DIRECTORY       =
-
-# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create 4096 sub-
-# directories (in 2 levels) under the output directory of each output format and
-# will distribute the generated files over these directories. Enabling this
-# option can be useful when feeding doxygen a huge amount of source files, where
-# putting all generated files in the same directory would otherwise causes
-# performance problems for the file system.
-# The default value is: NO.
-
-CREATE_SUBDIRS         = NO
-
-# The OUTPUT_LANGUAGE tag is used to specify the language in which all
-# documentation generated by doxygen is written. Doxygen will use this
-# information to generate all constant output in the proper language.
-# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
-# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
-# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
-# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
-# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
-# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
-# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
-# Ukrainian and Vietnamese.
-# The default value is: English.
-
-OUTPUT_LANGUAGE        = English
-
-# If the BRIEF_MEMBER_DESC tag is set to YES doxygen will include brief member
-# descriptions after the members that are listed in the file and class
-# documentation (similar to Javadoc). Set to NO to disable this.
-# The default value is: YES.
-
-BRIEF_MEMBER_DESC      = YES
-
-# If the REPEAT_BRIEF tag is set to YES doxygen will prepend the brief
-# description of a member or function before the detailed description
-#
-# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
-# brief descriptions will be completely suppressed.
-# The default value is: YES.
-
-REPEAT_BRIEF           = YES
-
-# This tag implements a quasi-intelligent brief description abbreviator that is
-# used to form the text in various listings. Each string in this list, if found
-# as the leading text of the brief description, will be stripped from the text
-# and the result, after processing the whole list, is used as the annotated
-# text. Otherwise, the brief description is used as-is. If left blank, the
-# following values are used ($name is automatically replaced with the name of
-# the entity):The $name class, The $name widget, The $name file, is, provides,
-# specifies, contains, represents, a, an and the.
-
-ABBREVIATE_BRIEF       =
-
-# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
-# doxygen will generate a detailed section even if there is only a brief
-# description.
-# The default value is: NO.
-
-ALWAYS_DETAILED_SEC    = NO
-
-# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
-# inherited members of a class in the documentation of that class as if those
-# members were ordinary class members. Constructors, destructors and assignment
-# operators of the base classes will not be shown.
-# The default value is: NO.
-
-INLINE_INHERITED_MEMB  = NO
-
-# If the FULL_PATH_NAMES tag is set to YES doxygen will prepend the full path
-# before files name in the file list and in the header files. If set to NO the
-# shortest path that makes the file name unique will be used
-# The default value is: YES.
-
-FULL_PATH_NAMES        = YES
-
-# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
-# Stripping is only done if one of the specified strings matches the left-hand
-# part of the path. The tag can be used to show relative paths in the file list.
-# If left blank the directory from which doxygen is run is used as the path to
-# strip.
-#
-# Note that you can specify absolute paths here, but also relative paths, which
-# will be relative from the directory where doxygen is started.
-# This tag requires that the tag FULL_PATH_NAMES is set to YES.
-
-STRIP_FROM_PATH        =
-
-# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
-# path mentioned in the documentation of a class, which tells the reader which
-# header file to include in order to use a class. If left blank only the name of
-# the header file containing the class definition is used. Otherwise one should
-# specify the list of include paths that are normally passed to the compiler
-# using the -I flag.
-
-STRIP_FROM_INC_PATH    =
-
-# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
-# less readable) file names. This can be useful is your file systems doesn't
-# support long names like on DOS, Mac, or CD-ROM.
-# The default value is: NO.
-
-SHORT_NAMES            = NO
-
-# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
-# first line (until the first dot) of a Javadoc-style comment as the brief
-# description. If set to NO, the Javadoc-style will behave just like regular Qt-
-# style comments (thus requiring an explicit @brief command for a brief
-# description.)
-# The default value is: NO.
-
-JAVADOC_AUTOBRIEF      = NO
-
-# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
-# line (until the first dot) of a Qt-style comment as the brief description. If
-# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
-# requiring an explicit \brief command for a brief description.)
-# The default value is: NO.
-
-QT_AUTOBRIEF           = NO
-
-# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
-# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
-# a brief description. This used to be the default behavior. The new default is
-# to treat a multi-line C++ comment block as a detailed description. Set this
-# tag to YES if you prefer the old behavior instead.
-#
-# Note that setting this tag to YES also means that rational rose comments are
-# not recognized any more.
-# The default value is: NO.
-
-MULTILINE_CPP_IS_BRIEF = NO
-
-# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
-# documentation from any documented member that it re-implements.
-# The default value is: YES.
-
-INHERIT_DOCS           = YES
-
-# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce a
-# new page for each member. If set to NO, the documentation of a member will be
-# part of the file/class/namespace that contains it.
-# The default value is: NO.
-
-SEPARATE_MEMBER_PAGES  = NO
-
-# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
-# uses this value to replace tabs by spaces in code fragments.
-# Minimum value: 1, maximum value: 16, default value: 4.
-
-TAB_SIZE               = 4
-
-# This tag can be used to specify a number of aliases that act as commands in
-# the documentation. An alias has the form:
-# name=value
-# For example adding
-# "sideeffect=@par Side Effects:\n"
-# will allow you to put the command \sideeffect (or @sideeffect) in the
-# documentation, which will result in a user-defined paragraph with heading
-# "Side Effects:". You can put \n's in the value part of an alias to insert
-# newlines.
-
-ALIASES                =
-
-# This tag can be used to specify a number of word-keyword mappings (TCL only).
-# A mapping has the form "name=value". For example adding "class=itcl::class"
-# will allow you to use the command class in the itcl::class meaning.
-
-TCL_SUBST              =
-
-# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
-# only. Doxygen will then generate output that is more tailored for C. For
-# instance, some of the names that are used will be different. The list of all
-# members will be omitted, etc.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_FOR_C  = NO
-
-# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
-# Python sources only. Doxygen will then generate output that is more tailored
-# for that language. For instance, namespaces will be presented as packages,
-# qualified scopes will look different, etc.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_JAVA   = NO
-
-# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
-# sources. Doxygen will then generate output that is tailored for Fortran.
-# The default value is: NO.
-
-OPTIMIZE_FOR_FORTRAN   = NO
-
-# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
-# sources. Doxygen will then generate output that is tailored for VHDL.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_VHDL   = NO
-
-# Doxygen selects the parser to use depending on the extension of the files it
-# parses. With this tag you can assign which parser to use for a given
-# extension. Doxygen has a built-in mapping, but you can override or extend it
-# using this tag. The format is ext=language, where ext is a file extension, and
-# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
-# C#, C, C++, D, PHP, Objective-C, Python, Fortran, VHDL. For instance to make
-# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C
-# (default is Fortran), use: inc=Fortran f=C.
-#
-# Note For files without extension you can use no_extension as a placeholder.
-#
-# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
-# the files are not read by doxygen.
-
-EXTENSION_MAPPING      =
-
-# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
-# according to the Markdown format, which allows for more readable
-# documentation. See http://daringfireball.net/projects/markdown/ for details.
-# The output of markdown processing is further processed by doxygen, so you can
-# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
-# case of backward compatibilities issues.
-# The default value is: YES.
-
-MARKDOWN_SUPPORT       = YES
-
-# When enabled doxygen tries to link words that correspond to documented
-# classes, or namespaces to their corresponding documentation. Such a link can
-# be prevented in individual cases by by putting a % sign in front of the word
-# or globally by setting AUTOLINK_SUPPORT to NO.
-# The default value is: YES.
-
-AUTOLINK_SUPPORT       = YES
-
-# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
-# to include (a tag file for) the STL sources as input, then you should set this
-# tag to YES in order to let doxygen match functions declarations and
-# definitions whose arguments contain STL classes (e.g. func(std::string);
-# versus func(std::string) {}). This also make the inheritance and collaboration
-# diagrams that involve STL classes more complete and accurate.
-# The default value is: NO.
-
-BUILTIN_STL_SUPPORT    = NO
-
-# If you use Microsoft's C++/CLI language, you should set this option to YES to
-# enable parsing support.
-# The default value is: NO.
-
-CPP_CLI_SUPPORT        = NO
-
-# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
-# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen
-# will parse them like normal C++ but will assume all classes use public instead
-# of private inheritance when no explicit protection keyword is present.
-# The default value is: NO.
-
-SIP_SUPPORT            = NO
-
-# For Microsoft's IDL there are propget and propput attributes to indicate
-# getter and setter methods for a property. Setting this option to YES will make
-# doxygen to replace the get and set methods by a property in the documentation.
-# This will only work if the methods are indeed getting or setting a simple
-# type. If this is not the case, or you want to show the methods anyway, you
-# should set this option to NO.
-# The default value is: YES.
-
-IDL_PROPERTY_SUPPORT   = YES
-
-# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
-# tag is set to YES, then doxygen will reuse the documentation of the first
-# member in the group (if any) for the other members of the group. By default
-# all members of a group must be documented explicitly.
-# The default value is: NO.
-
-DISTRIBUTE_GROUP_DOC   = NO
-
-# Set the SUBGROUPING tag to YES to allow class member groups of the same type
-# (for instance a group of public functions) to be put as a subgroup of that
-# type (e.g. under the Public Functions section). Set it to NO to prevent
-# subgrouping. Alternatively, this can be done per class using the
-# \nosubgrouping command.
-# The default value is: YES.
-
-SUBGROUPING            = YES
-
-# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
-# are shown inside the group in which they are included (e.g. using \ingroup)
-# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
-# and RTF).
-#
-# Note that this feature does not work in combination with
-# SEPARATE_MEMBER_PAGES.
-# The default value is: NO.
-
-INLINE_GROUPED_CLASSES = NO
-
-# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
-# with only public data fields or simple typedef fields will be shown inline in
-# the documentation of the scope in which they are defined (i.e. file,
-# namespace, or group documentation), provided this scope is documented. If set
-# to NO, structs, classes, and unions are shown on a separate page (for HTML and
-# Man pages) or section (for LaTeX and RTF).
-# The default value is: NO.
-
-INLINE_SIMPLE_STRUCTS  = NO
-
-# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
-# enum is documented as struct, union, or enum with the name of the typedef. So
-# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
-# with name TypeT. When disabled the typedef will appear as a member of a file,
-# namespace, or class. And the struct will be named TypeS. This can typically be
-# useful for C code in case the coding convention dictates that all compound
-# types are typedef'ed and only the typedef is referenced, never the tag name.
-# The default value is: NO.
-
-TYPEDEF_HIDES_STRUCT   = NO
-
-# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
-# cache is used to resolve symbols given their name and scope. Since this can be
-# an expensive process and often the same symbol appears multiple times in the
-# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
-# doxygen will become slower. If the cache is too large, memory is wasted. The
-# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
-# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
-# symbols. At the end of a run doxygen will report the cache usage and suggest
-# the optimal cache size from a speed point of view.
-# Minimum value: 0, maximum value: 9, default value: 0.
-
-LOOKUP_CACHE_SIZE      = 0
-
-#---------------------------------------------------------------------------
-# Build related configuration options
-#---------------------------------------------------------------------------
-
-# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
-# documentation are documented, even if no documentation was available. Private
-# class members and static file members will be hidden unless the
-# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
-# Note: This will also disable the warnings about undocumented members that are
-# normally produced when WARNINGS is set to YES.
-# The default value is: NO.
-
-EXTRACT_ALL            = NO
-
-# If the EXTRACT_PRIVATE tag is set to YES all private members of a class will
-# be included in the documentation.
-# The default value is: NO.
-
-EXTRACT_PRIVATE        = NO
-
-# If the EXTRACT_PACKAGE tag is set to YES all members with package or internal
-# scope will be included in the documentation.
-# The default value is: NO.
-
-EXTRACT_PACKAGE        = NO
-
-# If the EXTRACT_STATIC tag is set to YES all static members of a file will be
-# included in the documentation.
-# The default value is: NO.
-
-EXTRACT_STATIC         = NO
-
-# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) defined
-# locally in source files will be included in the documentation. If set to NO
-# only classes defined in header files are included. Does not have any effect
-# for Java sources.
-# The default value is: YES.
-
-EXTRACT_LOCAL_CLASSES  = YES
-
-# This flag is only useful for Objective-C code. When set to YES local methods,
-# which are defined in the implementation section but not in the interface are
-# included in the documentation. If set to NO only methods in the interface are
-# included.
-# The default value is: NO.
-
-EXTRACT_LOCAL_METHODS  = NO
-
-# If this flag is set to YES, the members of anonymous namespaces will be
-# extracted and appear in the documentation as a namespace called
-# 'anonymous_namespace{file}', where file will be replaced with the base name of
-# the file that contains the anonymous namespace. By default anonymous namespace
-# are hidden.
-# The default value is: NO.
-
-EXTRACT_ANON_NSPACES   = NO
-
-# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
-# undocumented members inside documented classes or files. If set to NO these
-# members will be included in the various overviews, but no documentation
-# section is generated. This option has no effect if EXTRACT_ALL is enabled.
-# The default value is: NO.
-
-HIDE_UNDOC_MEMBERS     = NO
-
-# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
-# undocumented classes that are normally visible in the class hierarchy. If set
-# to NO these classes will be included in the various overviews. This option has
-# no effect if EXTRACT_ALL is enabled.
-# The default value is: NO.
-
-HIDE_UNDOC_CLASSES     = NO
-
-# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
-# (class|struct|union) declarations. If set to NO these declarations will be
-# included in the documentation.
-# The default value is: NO.
-
-HIDE_FRIEND_COMPOUNDS  = NO
-
-# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
-# documentation blocks found inside the body of a function. If set to NO these
-# blocks will be appended to the function's detailed documentation block.
-# The default value is: NO.
-
-HIDE_IN_BODY_DOCS      = NO
-
-# The INTERNAL_DOCS tag determines if documentation that is typed after a
-# \internal command is included. If the tag is set to NO then the documentation
-# will be excluded. Set it to YES to include the internal documentation.
-# The default value is: NO.
-
-INTERNAL_DOCS          = NO
-
-# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
-# names in lower-case letters. If set to YES upper-case letters are also
-# allowed. This is useful if you have classes or files whose names only differ
-# in case and if your file system supports case sensitive file names. Windows
-# and Mac users are advised to set this option to NO.
-# The default value is: system dependent.
-
-CASE_SENSE_NAMES       = YES
-
-# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
-# their full class and namespace scopes in the documentation. If set to YES the
-# scope will be hidden.
-# The default value is: NO.
-
-HIDE_SCOPE_NAMES       = NO
-
-# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
-# the files that are included by a file in the documentation of that file.
-# The default value is: YES.
-
-SHOW_INCLUDE_FILES     = YES
-
-# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
-# grouped member an include statement to the documentation, telling the reader
-# which file to include in order to use the member.
-# The default value is: NO.
-
-SHOW_GROUPED_MEMB_INC  = NO
-
-# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
-# files with double quotes in the documentation rather than with sharp brackets.
-# The default value is: NO.
-
-FORCE_LOCAL_INCLUDES   = NO
-
-# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
-# documentation for inline members.
-# The default value is: YES.
-
-INLINE_INFO            = YES
-
-# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
-# (detailed) documentation of file and class members alphabetically by member
-# name. If set to NO the members will appear in declaration order.
-# The default value is: YES.
-
-SORT_MEMBER_DOCS       = YES
-
-# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
-# descriptions of file, namespace and class members alphabetically by member
-# name. If set to NO the members will appear in declaration order. Note that
-# this will also influence the order of the classes in the class list.
-# The default value is: NO.
-
-SORT_BRIEF_DOCS        = NO
-
-# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
-# (brief and detailed) documentation of class members so that constructors and
-# destructors are listed first. If set to NO the constructors will appear in the
-# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
-# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
-# member documentation.
-# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
-# detailed member documentation.
-# The default value is: NO.
-
-SORT_MEMBERS_CTORS_1ST = NO
-
-# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
-# of group names into alphabetical order. If set to NO the group names will
-# appear in their defined order.
-# The default value is: NO.
-
-SORT_GROUP_NAMES       = NO
-
-# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
-# fully-qualified names, including namespaces. If set to NO, the class list will
-# be sorted only by class name, not including the namespace part.
-# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
-# Note: This option applies only to the class list, not to the alphabetical
-# list.
-# The default value is: NO.
-
-SORT_BY_SCOPE_NAME     = NO
-
-# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
-# type resolution of all parameters of a function it will reject a match between
-# the prototype and the implementation of a member function even if there is
-# only one candidate or it is obvious which candidate to choose by doing a
-# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
-# accept a match between prototype and implementation in such cases.
-# The default value is: NO.
-
-STRICT_PROTO_MATCHING  = NO
-
-# The GENERATE_TODOLIST tag can be used to enable ( YES) or disable ( NO) the
-# todo list. This list is created by putting \todo commands in the
-# documentation.
-# The default value is: YES.
-
-GENERATE_TODOLIST      = YES
-
-# The GENERATE_TESTLIST tag can be used to enable ( YES) or disable ( NO) the
-# test list. This list is created by putting \test commands in the
-# documentation.
-# The default value is: YES.
-
-GENERATE_TESTLIST      = YES
-
-# The GENERATE_BUGLIST tag can be used to enable ( YES) or disable ( NO) the bug
-# list. This list is created by putting \bug commands in the documentation.
-# The default value is: YES.
-
-GENERATE_BUGLIST       = YES
-
-# The GENERATE_DEPRECATEDLIST tag can be used to enable ( YES) or disable ( NO)
-# the deprecated list. This list is created by putting \deprecated commands in
-# the documentation.
-# The default value is: YES.
-
-GENERATE_DEPRECATEDLIST= YES
-
-# The ENABLED_SECTIONS tag can be used to enable conditional documentation
-# sections, marked by \if <section_label> ... \endif and \cond <section_label>
-# ... \endcond blocks.
-
-ENABLED_SECTIONS       =
-
-# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
-# initial value of a variable or macro / define can have for it to appear in the
-# documentation. If the initializer consists of more lines than specified here
-# it will be hidden. Use a value of 0 to hide initializers completely. The
-# appearance of the value of individual variables and macros / defines can be
-# controlled using \showinitializer or \hideinitializer command in the
-# documentation regardless of this setting.
-# Minimum value: 0, maximum value: 10000, default value: 30.
-
-MAX_INITIALIZER_LINES  = 30
-
-# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
-# the bottom of the documentation of classes and structs. If set to YES the list
-# will mention the files that were used to generate the documentation.
-# The default value is: YES.
-
-SHOW_USED_FILES        = YES
-
-# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
-# will remove the Files entry from the Quick Index and from the Folder Tree View
-# (if specified).
-# The default value is: YES.
-
-SHOW_FILES             = YES
-
-# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
-# page. This will remove the Namespaces entry from the Quick Index and from the
-# Folder Tree View (if specified).
-# The default value is: YES.
-
-SHOW_NAMESPACES        = YES
-
-# The FILE_VERSION_FILTER tag can be used to specify a program or script that
-# doxygen should invoke to get the current version for each file (typically from
-# the version control system). Doxygen will invoke the program by executing (via
-# popen()) the command command input-file, where command is the value of the
-# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
-# by doxygen. Whatever the program writes to standard output is used as the file
-# version. For an example see the documentation.
-
-FILE_VERSION_FILTER    =
-
-# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
-# by doxygen. The layout file controls the global structure of the generated
-# output files in an output format independent way. To create the layout file
-# that represents doxygen's defaults, run doxygen with the -l option. You can
-# optionally specify a file name after the option, if omitted DoxygenLayout.xml
-# will be used as the name of the layout file.
-#
-# Note that if you run doxygen from a directory containing a file called
-# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
-# tag is left empty.
-
-LAYOUT_FILE            =
-
-# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
-# the reference definitions. This must be a list of .bib files. The .bib
-# extension is automatically appended if omitted. This requires the bibtex tool
-# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info.
-# For LaTeX the style of the bibliography can be controlled using
-# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
-# search path. Do not use file names with spaces, bibtex cannot handle them. See
-# also \cite for info how to create references.
-
-CITE_BIB_FILES         =
-
-#---------------------------------------------------------------------------
-# Configuration options related to warning and progress messages
-#---------------------------------------------------------------------------
-
-# The QUIET tag can be used to turn on/off the messages that are generated to
-# standard output by doxygen. If QUIET is set to YES this implies that the
-# messages are off.
-# The default value is: NO.
-
-QUIET                  = NO
-
-# The WARNINGS tag can be used to turn on/off the warning messages that are
-# generated to standard error ( stderr) by doxygen. If WARNINGS is set to YES
-# this implies that the warnings are on.
-#
-# Tip: Turn warnings on while writing the documentation.
-# The default value is: YES.
-
-WARNINGS               = YES
-
-# If the WARN_IF_UNDOCUMENTED tag is set to YES, then doxygen will generate
-# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
-# will automatically be disabled.
-# The default value is: YES.
-
-WARN_IF_UNDOCUMENTED   = YES
-
-# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
-# potential errors in the documentation, such as not documenting some parameters
-# in a documented function, or documenting parameters that don't exist or using
-# markup commands wrongly.
-# The default value is: YES.
-
-WARN_IF_DOC_ERROR      = YES
-
-# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
-# are documented, but have no documentation for their parameters or return
-# value. If set to NO doxygen will only warn about wrong or incomplete parameter
-# documentation, but not about the absence of documentation.
-# The default value is: NO.
-
-WARN_NO_PARAMDOC       = NO
-
-# The WARN_FORMAT tag determines the format of the warning messages that doxygen
-# can produce. The string should contain the $file, $line, and $text tags, which
-# will be replaced by the file and line number from which the warning originated
-# and the warning text. Optionally the format may contain $version, which will
-# be replaced by the version of the file (if it could be obtained via
-# FILE_VERSION_FILTER)
-# The default value is: $file:$line: $text.
-
-WARN_FORMAT            = "$file:$line: $text"
-
-# The WARN_LOGFILE tag can be used to specify a file to which warning and error
-# messages should be written. If left blank the output is written to standard
-# error (stderr).
-
-WARN_LOGFILE           =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the input files
-#---------------------------------------------------------------------------
-
-# The INPUT tag is used to specify the files and/or directories that contain
-# documented source files. You may enter file names like myfile.cpp or
-# directories like /usr/src/myproject. Separate the files or directories with
-# spaces.
-# Note: If this tag is empty the current directory is searched.
-
-INPUT                  =
-
-# This tag can be used to specify the character encoding of the source files
-# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
-# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
-# documentation (see: http://www.gnu.org/software/libiconv) for the list of
-# possible encodings.
-# The default value is: UTF-8.
-
-INPUT_ENCODING         = UTF-8
-
-# If the value of the INPUT tag contains directories, you can use the
-# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
-# *.h) to filter out the source-files in the directories. If left blank the
-# following patterns are tested:*.c, *.cc, *.cxx, *.cpp, *.c++, *.java, *.ii,
-# *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp,
-# *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown,
-# *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf,
-# *.qsf, *.as and *.js.
-
-FILE_PATTERNS          =
-
-# The RECURSIVE tag can be used to specify whether or not subdirectories should
-# be searched for input files as well.
-# The default value is: NO.
-
-RECURSIVE              = NO
-
-# The EXCLUDE tag can be used to specify files and/or directories that should be
-# excluded from the INPUT source files. This way you can easily exclude a
-# subdirectory from a directory tree whose root is specified with the INPUT tag.
-#
-# Note that relative paths are relative to the directory from which doxygen is
-# run.
-
-EXCLUDE                =
-
-# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
-# directories that are symbolic links (a Unix file system feature) are excluded
-# from the input.
-# The default value is: NO.
-
-EXCLUDE_SYMLINKS       = NO
-
-# If the value of the INPUT tag contains directories, you can use the
-# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
-# certain files from those directories.
-#
-# Note that the wildcards are matched against the file with absolute path, so to
-# exclude all test directories for example use the pattern */test/*
-
-EXCLUDE_PATTERNS       =
-
-# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
-# (namespaces, classes, functions, etc.) that should be excluded from the
-# output. The symbol name can be a fully qualified name, a word, or if the
-# wildcard * is used, a substring. Examples: ANamespace, AClass,
-# AClass::ANamespace, ANamespace::*Test
-#
-# Note that the wildcards are matched against the file with absolute path, so to
-# exclude all test directories use the pattern */test/*
-
-EXCLUDE_SYMBOLS        =
-
-# The EXAMPLE_PATH tag can be used to specify one or more files or directories
-# that contain example code fragments that are included (see the \include
-# command).
-
-EXAMPLE_PATH           =
-
-# If the value of the EXAMPLE_PATH tag contains directories, you can use the
-# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
-# *.h) to filter out the source-files in the directories. If left blank all
-# files are included.
-
-EXAMPLE_PATTERNS       =
-
-# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
-# searched for input files to be used with the \include or \dontinclude commands
-# irrespective of the value of the RECURSIVE tag.
-# The default value is: NO.
-
-EXAMPLE_RECURSIVE      = NO
-
-# The IMAGE_PATH tag can be used to specify one or more files or directories
-# that contain images that are to be included in the documentation (see the
-# \image command).
-
-IMAGE_PATH             =
-
-# The INPUT_FILTER tag can be used to specify a program that doxygen should
-# invoke to filter for each input file. Doxygen will invoke the filter program
-# by executing (via popen()) the command:
-#
-# <filter> <input-file>
-#
-# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the
-# name of an input file. Doxygen will then use the output that the filter
-# program writes to standard output. If FILTER_PATTERNS is specified, this tag
-# will be ignored.
-#
-# Note that the filter must not add or remove lines; it is applied before the
-# code is scanned, but not when the output code is generated. If lines are added
-# or removed, the anchors will not be placed correctly.
-
-INPUT_FILTER           =
-
-# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
-# basis. Doxygen will compare the file name with each pattern and apply the
-# filter if there is a match. The filters are a list of the form: pattern=filter
-# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
-# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
-# patterns match the file name, INPUT_FILTER is applied.
-
-FILTER_PATTERNS        =
-
-# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
-# INPUT_FILTER ) will also be used to filter the input files that are used for
-# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
-# The default value is: NO.
-
-FILTER_SOURCE_FILES    = NO
-
-# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
-# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
-# it is also possible to disable source filtering for a specific pattern using
-# *.ext= (so without naming a filter).
-# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
-
-FILTER_SOURCE_PATTERNS =
-
-# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
-# is part of the input, its contents will be placed on the main page
-# (index.html). This can be useful if you have a project on for instance GitHub
-# and want to reuse the introduction page also for the doxygen output.
-
-USE_MDFILE_AS_MAINPAGE =
-
-#---------------------------------------------------------------------------
-# Configuration options related to source browsing
-#---------------------------------------------------------------------------
-
-# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
-# generated. Documented entities will be cross-referenced with these sources.
-#
-# Note: To get rid of all source code in the generated output, make sure that
-# also VERBATIM_HEADERS is set to NO.
-# The default value is: NO.
-
-SOURCE_BROWSER         = NO
-
-# Setting the INLINE_SOURCES tag to YES will include the body of functions,
-# classes and enums directly into the documentation.
-# The default value is: NO.
-
-INLINE_SOURCES         = NO
-
-# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
-# special comment blocks from generated source code fragments. Normal C, C++ and
-# Fortran comments will always remain visible.
-# The default value is: YES.
-
-STRIP_CODE_COMMENTS    = YES
-
-# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
-# function all documented functions referencing it will be listed.
-# The default value is: NO.
-
-REFERENCED_BY_RELATION = NO
-
-# If the REFERENCES_RELATION tag is set to YES then for each documented function
-# all documented entities called/used by that function will be listed.
-# The default value is: NO.
-
-REFERENCES_RELATION    = NO
-
-# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
-# to YES, then the hyperlinks from functions in REFERENCES_RELATION and
-# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
-# link to the documentation.
-# The default value is: YES.
-
-REFERENCES_LINK_SOURCE = YES
-
-# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
-# source code will show a tooltip with additional information such as prototype,
-# brief description and links to the definition and documentation. Since this
-# will make the HTML file larger and loading of large files a bit slower, you
-# can opt to disable this feature.
-# The default value is: YES.
-# This tag requires that the tag SOURCE_BROWSER is set to YES.
-
-SOURCE_TOOLTIPS        = YES
-
-# If the USE_HTAGS tag is set to YES then the references to source code will
-# point to the HTML generated by the htags(1) tool instead of doxygen built-in
-# source browser. The htags tool is part of GNU's global source tagging system
-# (see http://www.gnu.org/software/global/global.html). You will need version
-# 4.8.6 or higher.
-#
-# To use it do the following:
-# - Install the latest version of global
-# - Enable SOURCE_BROWSER and USE_HTAGS in the config file
-# - Make sure the INPUT points to the root of the source tree
-# - Run doxygen as normal
-#
-# Doxygen will invoke htags (and that will in turn invoke gtags), so these
-# tools must be available from the command line (i.e. in the search path).
-#
-# The result: instead of the source browser generated by doxygen, the links to
-# source code will now point to the output of htags.
-# The default value is: NO.
-# This tag requires that the tag SOURCE_BROWSER is set to YES.
-
-USE_HTAGS              = NO
-
-# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
-# verbatim copy of the header file for each class for which an include is
-# specified. Set to NO to disable this.
-# See also: Section \class.
-# The default value is: YES.
-
-VERBATIM_HEADERS       = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to the alphabetical class index
-#---------------------------------------------------------------------------
-
-# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
-# compounds will be generated. Enable this if the project contains a lot of
-# classes, structs, unions or interfaces.
-# The default value is: YES.
-
-ALPHABETICAL_INDEX     = YES
-
-# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
-# which the alphabetical index list will be split.
-# Minimum value: 1, maximum value: 20, default value: 5.
-# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
-
-COLS_IN_ALPHA_INDEX    = 5
-
-# In case all classes in a project start with a common prefix, all classes will
-# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
-# can be used to specify a prefix (or a list of prefixes) that should be ignored
-# while generating the index headers.
-# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
-
-IGNORE_PREFIX          =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the HTML output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_HTML tag is set to YES doxygen will generate HTML output
-# The default value is: YES.
-
-GENERATE_HTML          = YES
-
-# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: html.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_OUTPUT            = html
-
-# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
-# generated HTML page (for example: .htm, .php, .asp).
-# The default value is: .html.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_FILE_EXTENSION    = .html
-
-# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
-# each generated HTML page. If the tag is left blank doxygen will generate a
-# standard header.
-#
-# To get valid HTML the header file that includes any scripts and style sheets
-# that doxygen needs, which is dependent on the configuration options used (e.g.
-# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
-# default header using
-# doxygen -w html new_header.html new_footer.html new_stylesheet.css
-# YourConfigFile
-# and then modify the file new_header.html. See also section "Doxygen usage"
-# for information on how to generate the default header that doxygen normally
-# uses.
-# Note: The header is subject to change so you typically have to regenerate the
-# default header when upgrading to a newer version of doxygen. For a description
-# of the possible markers and block names see the documentation.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_HEADER            =
-
-# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
-# generated HTML page. If the tag is left blank doxygen will generate a standard
-# footer. See HTML_HEADER for more information on how to generate a default
-# footer and what special commands can be used inside the footer. See also
-# section "Doxygen usage" for information on how to generate the default footer
-# that doxygen normally uses.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_FOOTER            =
-
-# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
-# sheet that is used by each HTML page. It can be used to fine-tune the look of
-# the HTML output. If left blank doxygen will generate a default style sheet.
-# See also section "Doxygen usage" for information on how to generate the style
-# sheet that doxygen normally uses.
-# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
-# it is more robust and this tag (HTML_STYLESHEET) will in the future become
-# obsolete.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_STYLESHEET        =
-
-# The HTML_EXTRA_STYLESHEET tag can be used to specify an additional user-
-# defined cascading style sheet that is included after the standard style sheets
-# created by doxygen. Using this option one can overrule certain style aspects.
-# This is preferred over using HTML_STYLESHEET since it does not replace the
-# standard style sheet and is therefor more robust against future updates.
-# Doxygen will copy the style sheet file to the output directory. For an example
-# see the documentation.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_EXTRA_STYLESHEET  =
-
-# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
-# other source files which should be copied to the HTML output directory. Note
-# that these files will be copied to the base HTML output directory. Use the
-# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
-# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
-# files will be copied as-is; there are no commands or markers available.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_EXTRA_FILES       =
-
-# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
-# will adjust the colors in the stylesheet and background images according to
-# this color. Hue is specified as an angle on a colorwheel, see
-# http://en.wikipedia.org/wiki/Hue for more information. For instance the value
-# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
-# purple, and 360 is red again.
-# Minimum value: 0, maximum value: 359, default value: 220.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_HUE    = 220
-
-# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
-# in the HTML output. For a value of 0 the output will use grayscales only. A
-# value of 255 will produce the most vivid colors.
-# Minimum value: 0, maximum value: 255, default value: 100.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_SAT    = 100
-
-# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
-# luminance component of the colors in the HTML output. Values below 100
-# gradually make the output lighter, whereas values above 100 make the output
-# darker. The value divided by 100 is the actual gamma applied, so 80 represents
-# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
-# change the gamma.
-# Minimum value: 40, maximum value: 240, default value: 80.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_GAMMA  = 80
-
-# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
-# page will contain the date and time when the page was generated. Setting this
-# to NO can help when comparing the output of multiple runs.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_TIMESTAMP         = YES
-
-# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
-# documentation will contain sections that can be hidden and shown after the
-# page has loaded.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_DYNAMIC_SECTIONS  = NO
-
-# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
-# shown in the various tree structured indices initially; the user can expand
-# and collapse entries dynamically later on. Doxygen will expand the tree to
-# such a level that at most the specified number of entries are visible (unless
-# a fully collapsed tree already exceeds this amount). So setting the number of
-# entries 1 will produce a full collapsed tree by default. 0 is a special value
-# representing an infinite number of entries and will result in a full expanded
-# tree by default.
-# Minimum value: 0, maximum value: 9999, default value: 100.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_INDEX_NUM_ENTRIES = 100
-
-# If the GENERATE_DOCSET tag is set to YES, additional index files will be
-# generated that can be used as input for Apple's Xcode 3 integrated development
-# environment (see: http://developer.apple.com/tools/xcode/), introduced with
-# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a
-# Makefile in the HTML output directory. Running make will produce the docset in
-# that directory and running make install will install the docset in
-# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
-# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
-# for more information.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_DOCSET        = NO
-
-# This tag determines the name of the docset feed. A documentation feed provides
-# an umbrella under which multiple documentation sets from a single provider
-# (such as a company or product suite) can be grouped.
-# The default value is: Doxygen generated docs.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_FEEDNAME        = "Doxygen generated docs"
-
-# This tag specifies a string that should uniquely identify the documentation
-# set bundle. This should be a reverse domain-name style string, e.g.
-# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_BUNDLE_ID       = org.doxygen.Project
-
-# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
-# the documentation publisher. This should be a reverse domain-name style
-# string, e.g. com.mycompany.MyDocSet.documentation.
-# The default value is: org.doxygen.Publisher.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_PUBLISHER_ID    = org.doxygen.Publisher
-
-# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
-# The default value is: Publisher.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_PUBLISHER_NAME  = Publisher
-
-# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
-# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
-# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
-# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on
-# Windows.
-#
-# The HTML Help Workshop contains a compiler that can convert all HTML output
-# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
-# files are now used as the Windows 98 help format, and will replace the old
-# Windows help format (.hlp) on all Windows platforms in the future. Compressed
-# HTML files also contain an index, a table of contents, and you can search for
-# words in the documentation. The HTML workshop also contains a viewer for
-# compressed HTML files.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_HTMLHELP      = NO
-
-# The CHM_FILE tag can be used to specify the file name of the resulting .chm
-# file. You can add a path in front of the file if the result should not be
-# written to the html output directory.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-CHM_FILE               =
-
-# The HHC_LOCATION tag can be used to specify the location (absolute path
-# including file name) of the HTML help compiler ( hhc.exe). If non-empty
-# doxygen will try to run the HTML help compiler on the generated index.hhp.
-# The file has to be specified with full path.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-HHC_LOCATION           =
-
-# The GENERATE_CHI flag controls if a separate .chi index file is generated (
-# YES) or that it should be included in the master .chm file ( NO).
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-GENERATE_CHI           = NO
-
-# The CHM_INDEX_ENCODING is used to encode HtmlHelp index ( hhk), content ( hhc)
-# and project file content.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-CHM_INDEX_ENCODING     =
-
-# The BINARY_TOC flag controls whether a binary table of contents is generated (
-# YES) or a normal table of contents ( NO) in the .chm file.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-BINARY_TOC             = NO
-
-# The TOC_EXPAND flag can be set to YES to add extra items for group members to
-# the table of contents of the HTML help documentation and to the tree view.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-TOC_EXPAND             = NO
-
-# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
-# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
-# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
-# (.qch) of the generated HTML documentation.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_QHP           = NO
-
-# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
-# the file name of the resulting .qch file. The path specified is relative to
-# the HTML output folder.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QCH_FILE               =
-
-# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
-# Project output. For more information please see Qt Help Project / Namespace
-# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_NAMESPACE          = org.doxygen.Project
-
-# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
-# Help Project output. For more information please see Qt Help Project / Virtual
-# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-
-# folders).
-# The default value is: doc.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_VIRTUAL_FOLDER     = doc
-
-# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
-# filter to add. For more information please see Qt Help Project / Custom
-# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
-# filters).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_CUST_FILTER_NAME   =
-
-# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
-# custom filter to add. For more information please see Qt Help Project / Custom
-# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
-# filters).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_CUST_FILTER_ATTRS  =
-
-# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
-# project's filter section matches. Qt Help Project / Filter Attributes (see:
-# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_SECT_FILTER_ATTRS  =
-
-# The QHG_LOCATION tag can be used to specify the location of Qt's
-# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
-# generated .qhp file.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHG_LOCATION           =
-
-# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
-# generated, together with the HTML files, they form an Eclipse help plugin. To
-# install this plugin and make it available under the help contents menu in
-# Eclipse, the contents of the directory containing the HTML and XML files needs
-# to be copied into the plugins directory of eclipse. The name of the directory
-# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
-# After copying Eclipse needs to be restarted before the help appears.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_ECLIPSEHELP   = NO
-
-# A unique identifier for the Eclipse help plugin. When installing the plugin
-# the directory name containing the HTML and XML files should also have this
-# name. Each documentation set should have its own identifier.
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
-
-ECLIPSE_DOC_ID         = org.doxygen.Project
-
-# If you want full control over the layout of the generated HTML pages it might
-# be necessary to disable the index and replace it with your own. The
-# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
-# of each HTML page. A value of NO enables the index and the value YES disables
-# it. Since the tabs in the index contain the same information as the navigation
-# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-DISABLE_INDEX          = NO
-
-# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
-# structure should be generated to display hierarchical information. If the tag
-# value is set to YES, a side panel will be generated containing a tree-like
-# index structure (just like the one that is generated for HTML Help). For this
-# to work a browser that supports JavaScript, DHTML, CSS and frames is required
-# (i.e. any modern browser). Windows users are probably better off using the
-# HTML help feature. Via custom stylesheets (see HTML_EXTRA_STYLESHEET) one can
-# further fine-tune the look of the index. As an example, the default style
-# sheet generated by doxygen has an example that shows how to put an image at
-# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
-# the same information as the tab index, you could consider setting
-# DISABLE_INDEX to YES when enabling this option.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_TREEVIEW      = NO
-
-# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
-# doxygen will group on one line in the generated HTML documentation.
-#
-# Note that a value of 0 will completely suppress the enum values from appearing
-# in the overview section.
-# Minimum value: 0, maximum value: 20, default value: 4.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-ENUM_VALUES_PER_LINE   = 4
-
-# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
-# to set the initial width (in pixels) of the frame in which the tree is shown.
-# Minimum value: 0, maximum value: 1500, default value: 250.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-TREEVIEW_WIDTH         = 250
-
-# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open links to
-# external symbols imported via tag files in a separate window.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-EXT_LINKS_IN_WINDOW    = NO
-
-# Use this tag to change the font size of LaTeX formulas included as images in
-# the HTML documentation. When you change the font size after a successful
-# doxygen run you need to manually remove any form_*.png images from the HTML
-# output directory to force them to be regenerated.
-# Minimum value: 8, maximum value: 50, default value: 10.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-FORMULA_FONTSIZE       = 10
-
-# Use the FORMULA_TRANPARENT tag to determine whether or not the images
-# generated for formulas are transparent PNGs. Transparent PNGs are not
-# supported properly for IE 6.0, but are supported on all modern browsers.
-#
-# Note that when changing this option you need to delete any form_*.png files in
-# the HTML output directory before the changes have effect.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-FORMULA_TRANSPARENT    = YES
-
-# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
-# http://www.mathjax.org) which uses client side Javascript for the rendering
-# instead of using prerendered bitmaps. Use this if you do not have LaTeX
-# installed or if you want to formulas look prettier in the HTML output. When
-# enabled you may also need to install MathJax separately and configure the path
-# to it using the MATHJAX_RELPATH option.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-USE_MATHJAX            = NO
-
-# When MathJax is enabled you can set the default output format to be used for
-# the MathJax output. See the MathJax site (see:
-# http://docs.mathjax.org/en/latest/output.html) for more details.
-# Possible values are: HTML-CSS (which is slower, but has the best
-# compatibility), NativeMML (i.e. MathML) and SVG.
-# The default value is: HTML-CSS.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_FORMAT         = HTML-CSS
-
-# When MathJax is enabled you need to specify the location relative to the HTML
-# output directory using the MATHJAX_RELPATH option. The destination directory
-# should contain the MathJax.js script. For instance, if the mathjax directory
-# is located at the same level as the HTML output directory, then
-# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
-# Content Delivery Network so you can quickly see the result without installing
-# MathJax. However, it is strongly recommended to install a local copy of
-# MathJax from http://www.mathjax.org before deployment.
-# The default value is: http://cdn.mathjax.org/mathjax/latest.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_RELPATH        = http://cdn.mathjax.org/mathjax/latest
-
-# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
-# extension names that should be enabled during MathJax rendering. For example
-# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_EXTENSIONS     =
-
-# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
-# of code that will be used on startup of the MathJax code. See the MathJax site
-# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
-# example see the documentation.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_CODEFILE       =
-
-# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
-# the HTML output. The underlying search engine uses javascript and DHTML and
-# should work on any modern browser. Note that when using HTML help
-# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
-# there is already a search function so this one should typically be disabled.
-# For large projects the javascript based search engine can be slow, then
-# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
-# search using the keyboard; to jump to the search box use <access key> + S
-# (what the <access key> is depends on the OS and browser, but it is typically
-# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down
-# key> to jump into the search results window, the results can be navigated
-# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel
-# the search. The filter options can be selected when the cursor is inside the
-# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>
-# to select a filter and <Enter> or <escape> to activate or cancel the filter
-# option.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-SEARCHENGINE           = YES
-
-# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
-# implemented using a web server instead of a web client using Javascript. There
-# are two flavours of web server based searching depending on the
-# EXTERNAL_SEARCH setting. When disabled, doxygen will generate a PHP script for
-# searching and an index file used by the script. When EXTERNAL_SEARCH is
-# enabled the indexing and searching needs to be provided by external tools. See
-# the section "External Indexing and Searching" for details.
-# The default value is: NO.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SERVER_BASED_SEARCH    = NO
-
-# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP
-# script for searching. Instead the search results are written to an XML file
-# which needs to be processed by an external indexer. Doxygen will invoke an
-# external search engine pointed to by the SEARCHENGINE_URL option to obtain the
-# search results.
-#
-# Doxygen ships with an example indexer ( doxyindexer) and search engine
-# (doxysearch.cgi) which are based on the open source search engine library
-# Xapian (see: http://xapian.org/).
-#
-# See the section "External Indexing and Searching" for details.
-# The default value is: NO.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTERNAL_SEARCH        = NO
-
-# The SEARCHENGINE_URL should point to a search engine hosted by a web server
-# which will return the search results when EXTERNAL_SEARCH is enabled.
-#
-# Doxygen ships with an example indexer ( doxyindexer) and search engine
-# (doxysearch.cgi) which are based on the open source search engine library
-# Xapian (see: http://xapian.org/). See the section "External Indexing and
-# Searching" for details.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SEARCHENGINE_URL       =
-
-# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
-# search data is written to a file for indexing by an external tool. With the
-# SEARCHDATA_FILE tag the name of this file can be specified.
-# The default file is: searchdata.xml.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SEARCHDATA_FILE        = searchdata.xml
-
-# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the
-# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
-# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
-# projects and redirect the results back to the right project.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTERNAL_SEARCH_ID     =
-
-# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
-# projects other than the one defined by this configuration file, but that are
-# all added to the same external search index. Each project needs to have a
-# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of
-# to a relative location where the documentation can be found. The format is:
-# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTRA_SEARCH_MAPPINGS  =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the LaTeX output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_LATEX tag is set to YES doxygen will generate LaTeX output.
-# The default value is: YES.
-
-GENERATE_LATEX         = YES
-
-# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: latex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_OUTPUT           = latex
-
-# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
-# invoked.
-#
-# Note that when enabling USE_PDFLATEX this option is only used for generating
-# bitmaps for formulas in the HTML output, but not in the Makefile that is
-# written to the output directory.
-# The default file is: latex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_CMD_NAME         = latex
-
-# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
-# index for LaTeX.
-# The default file is: makeindex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-MAKEINDEX_CMD_NAME     = makeindex
-
-# If the COMPACT_LATEX tag is set to YES doxygen generates more compact LaTeX
-# documents. This may be useful for small projects and may help to save some
-# trees in general.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-COMPACT_LATEX          = NO
-
-# The PAPER_TYPE tag can be used to set the paper type that is used by the
-# printer.
-# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x
-# 14 inches) and executive (7.25 x 10.5 inches).
-# The default value is: a4.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-PAPER_TYPE             = a4
-
-# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names
-# that should be included in the LaTeX output. To get the times font for
-# instance you can specify
-# EXTRA_PACKAGES=times
-# If left blank no extra packages will be included.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-EXTRA_PACKAGES         =
-
-# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
-# generated LaTeX document. The header should contain everything until the first
-# chapter. If it is left blank doxygen will generate a standard header. See
-# section "Doxygen usage" for information on how to let doxygen write the
-# default header to a separate file.
-#
-# Note: Only use a user-defined header if you know what you are doing! The
-# following commands have a special meaning inside the header: $title,
-# $datetime, $date, $doxygenversion, $projectname, $projectnumber. Doxygen will
-# replace them by respectively the title of the page, the current date and time,
-# only the current date, the version number of doxygen, the project name (see
-# PROJECT_NAME), or the project number (see PROJECT_NUMBER).
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_HEADER           =
-
-# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the
-# generated LaTeX document. The footer should contain everything after the last
-# chapter. If it is left blank doxygen will generate a standard footer.
-#
-# Note: Only use a user-defined footer if you know what you are doing!
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_FOOTER           =
-
-# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or
-# other source files which should be copied to the LATEX_OUTPUT output
-# directory. Note that the files will be copied as-is; there are no commands or
-# markers available.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_EXTRA_FILES      =
-
-# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is
-# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will
-# contain links (just like the HTML output) instead of page references. This
-# makes the output suitable for online browsing using a PDF viewer.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-PDF_HYPERLINKS         = YES
-
-# If the LATEX_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
-# the PDF file directly from the LaTeX files. Set this option to YES to get a
-# higher quality PDF documentation.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-USE_PDFLATEX           = YES
-
-# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode
-# command to the generated LaTeX files. This will instruct LaTeX to keep running
-# if errors occur, instead of asking the user for help. This option is also used
-# when generating formulas in HTML.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_BATCHMODE        = NO
-
-# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the
-# index chapters (such as File Index, Compound Index, etc.) in the output.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_HIDE_INDICES     = NO
-
-# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source
-# code with syntax highlighting in the LaTeX output.
-#
-# Note that which sources are shown also depends on other settings such as
-# SOURCE_BROWSER.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_SOURCE_CODE      = NO
-
-# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
-# bibliography, e.g. plainnat, or ieeetr. See
-# http://en.wikipedia.org/wiki/BibTeX and \cite for more info.
-# The default value is: plain.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_BIB_STYLE        = plain
-
-#---------------------------------------------------------------------------
-# Configuration options related to the RTF output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_RTF tag is set to YES doxygen will generate RTF output. The
-# RTF output is optimized for Word 97 and may not look too pretty with other RTF
-# readers/editors.
-# The default value is: NO.
-
-GENERATE_RTF           = NO
-
-# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: rtf.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_OUTPUT             = rtf
-
-# If the COMPACT_RTF tag is set to YES doxygen generates more compact RTF
-# documents. This may be useful for small projects and may help to save some
-# trees in general.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-COMPACT_RTF            = NO
-
-# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will
-# contain hyperlink fields. The RTF file will contain links (just like the HTML
-# output) instead of page references. This makes the output suitable for online
-# browsing using Word or some other Word compatible readers that support those
-# fields.
-#
-# Note: WordPad (write) and others do not support links.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_HYPERLINKS         = NO
-
-# Load stylesheet definitions from file. Syntax is similar to doxygen's config
-# file, i.e. a series of assignments. You only have to provide replacements,
-# missing definitions are set to their default value.
-#
-# See also section "Doxygen usage" for information on how to generate the
-# default style sheet that doxygen normally uses.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_STYLESHEET_FILE    =
-
-# Set optional variables used in the generation of an RTF document. Syntax is
-# similar to doxygen's config file. A template extensions file can be generated
-# using doxygen -e rtf extensionFile.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_EXTENSIONS_FILE    =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the man page output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_MAN tag is set to YES doxygen will generate man pages for
-# classes and files.
-# The default value is: NO.
-
-GENERATE_MAN           = NO
-
-# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it. A directory man3 will be created inside the directory specified by
-# MAN_OUTPUT.
-# The default directory is: man.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_OUTPUT             = man
-
-# The MAN_EXTENSION tag determines the extension that is added to the generated
-# man pages. In case the manual section does not start with a number, the number
-# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is
-# optional.
-# The default value is: .3.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_EXTENSION          = .3
-
-# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it
-# will generate one additional man file for each entity documented in the real
-# man page(s). These additional files only source the real man page, but without
-# them the man command would be unable to find the correct page.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_LINKS              = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the XML output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_XML tag is set to YES doxygen will generate an XML file that
-# captures the structure of the code including all documentation.
-# The default value is: NO.
-
-GENERATE_XML           = NO
-
-# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: xml.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_OUTPUT             = xml
-
-# The XML_SCHEMA tag can be used to specify a XML schema, which can be used by a
-# validating XML parser to check the syntax of the XML files.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_SCHEMA             =
-
-# The XML_DTD tag can be used to specify a XML DTD, which can be used by a
-# validating XML parser to check the syntax of the XML files.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_DTD                =
-
-# If the XML_PROGRAMLISTING tag is set to YES doxygen will dump the program
-# listings (including syntax highlighting and cross-referencing information) to
-# the XML output. Note that enabling this will significantly increase the size
-# of the XML output.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_PROGRAMLISTING     = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to the DOCBOOK output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_DOCBOOK tag is set to YES doxygen will generate Docbook files
-# that can be used to generate PDF.
-# The default value is: NO.
-
-GENERATE_DOCBOOK       = NO
-
-# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
-# front of it.
-# The default directory is: docbook.
-# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
-
-DOCBOOK_OUTPUT         = docbook
-
-#---------------------------------------------------------------------------
-# Configuration options for the AutoGen Definitions output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_AUTOGEN_DEF tag is set to YES doxygen will generate an AutoGen
-# Definitions (see http://autogen.sf.net) file that captures the structure of
-# the code including all documentation. Note that this feature is still
-# experimental and incomplete at the moment.
-# The default value is: NO.
-
-GENERATE_AUTOGEN_DEF   = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the Perl module output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_PERLMOD tag is set to YES doxygen will generate a Perl module
-# file that captures the structure of the code including all documentation.
-#
-# Note that this feature is still experimental and incomplete at the moment.
-# The default value is: NO.
-
-GENERATE_PERLMOD       = NO
-
-# If the PERLMOD_LATEX tag is set to YES doxygen will generate the necessary
-# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI
-# output from the Perl module output.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_LATEX          = NO
-
-# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be nicely
-# formatted so it can be parsed by a human reader. This is useful if you want to
-# understand what is going on. On the other hand, if this tag is set to NO the
-# size of the Perl module output will be much smaller and Perl will parse it
-# just the same.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_PRETTY         = YES
-
-# The names of the make variables in the generated doxyrules.make file are
-# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful
-# so different doxyrules.make files included by the same Makefile don't
-# overwrite each other's variables.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_MAKEVAR_PREFIX =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the preprocessor
-#---------------------------------------------------------------------------
-
-# If the ENABLE_PREPROCESSING tag is set to YES doxygen will evaluate all
-# C-preprocessor directives found in the sources and include files.
-# The default value is: YES.
-
-ENABLE_PREPROCESSING   = YES
-
-# If the MACRO_EXPANSION tag is set to YES doxygen will expand all macro names
-# in the source code. If set to NO only conditional compilation will be
-# performed. Macro expansion can be done in a controlled way by setting
-# EXPAND_ONLY_PREDEF to YES.
-# The default value is: NO.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-MACRO_EXPANSION        = NO
-
-# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
-# the macro expansion is limited to the macros specified with the PREDEFINED and
-# EXPAND_AS_DEFINED tags.
-# The default value is: NO.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-EXPAND_ONLY_PREDEF     = NO
-
-# If the SEARCH_INCLUDES tag is set to YES the includes files in the
-# INCLUDE_PATH will be searched if a #include is found.
-# The default value is: YES.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-SEARCH_INCLUDES        = YES
-
-# The INCLUDE_PATH tag can be used to specify one or more directories that
-# contain include files that are not input files but should be processed by the
-# preprocessor.
-# This tag requires that the tag SEARCH_INCLUDES is set to YES.
-
-INCLUDE_PATH           =
-
-# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
-# patterns (like *.h and *.hpp) to filter out the header-files in the
-# directories. If left blank, the patterns specified with FILE_PATTERNS will be
-# used.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-INCLUDE_FILE_PATTERNS  =
-
-# The PREDEFINED tag can be used to specify one or more macro names that are
-# defined before the preprocessor is started (similar to the -D option of e.g.
-# gcc). The argument of the tag is a list of macros of the form: name or
-# name=definition (no spaces). If the definition and the "=" are omitted, "=1"
-# is assumed. To prevent a macro definition from being undefined via #undef or
-# recursively expanded use the := operator instead of the = operator.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-PREDEFINED             =
-
-# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
-# tag can be used to specify a list of macro names that should be expanded. The
-# macro definition that is found in the sources will be used. Use the PREDEFINED
-# tag if you want to use a different macro definition that overrules the
-# definition found in the source code.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-EXPAND_AS_DEFINED      =
-
-# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will
-# remove all refrences to function-like macros that are alone on a line, have an
-# all uppercase name, and do not end with a semicolon. Such function macros are
-# typically used for boiler-plate code, and will confuse the parser if not
-# removed.
-# The default value is: YES.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-SKIP_FUNCTION_MACROS   = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to external references
-#---------------------------------------------------------------------------
-
-# The TAGFILES tag can be used to specify one or more tag files. For each tag
-# file the location of the external documentation should be added. The format of
-# a tag file without this location is as follows:
-# TAGFILES = file1 file2 ...
-# Adding location for the tag files is done as follows:
-# TAGFILES = file1=loc1 "file2 = loc2" ...
-# where loc1 and loc2 can be relative or absolute paths or URLs. See the
-# section "Linking to external documentation" for more information about the use
-# of tag files.
-# Note: Each tag file must have an unique name (where the name does NOT include
-# the path). If a tag file is not located in the directory in which doxygen is
-# run, you must also specify the path to the tagfile here.
-
-TAGFILES               =
-
-# When a file name is specified after GENERATE_TAGFILE, doxygen will create a
-# tag file that is based on the input files it reads. See section "Linking to
-# external documentation" for more information about the usage of tag files.
-
-GENERATE_TAGFILE       =
-
-# If the ALLEXTERNALS tag is set to YES all external class will be listed in the
-# class index. If set to NO only the inherited external classes will be listed.
-# The default value is: NO.
-
-ALLEXTERNALS           = NO
-
-# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed in
-# the modules index. If set to NO, only the current project's groups will be
-# listed.
-# The default value is: YES.
-
-EXTERNAL_GROUPS        = YES
-
-# If the EXTERNAL_PAGES tag is set to YES all external pages will be listed in
-# the related pages index. If set to NO, only the current project's pages will
-# be listed.
-# The default value is: YES.
-
-EXTERNAL_PAGES         = YES
-
-# The PERL_PATH should be the absolute path and name of the perl script
-# interpreter (i.e. the result of 'which perl').
-# The default file (with absolute path) is: /usr/bin/perl.
-
-PERL_PATH              = /usr/bin/perl
-
-#---------------------------------------------------------------------------
-# Configuration options related to the dot tool
-#---------------------------------------------------------------------------
-
-# If the CLASS_DIAGRAMS tag is set to YES doxygen will generate a class diagram
-# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to
-# NO turns the diagrams off. Note that this option also works with HAVE_DOT
-# disabled, but it is recommended to install and use dot, since it yields more
-# powerful graphs.
-# The default value is: YES.
-
-CLASS_DIAGRAMS         = YES
-
-# You can define message sequence charts within doxygen comments using the \msc
-# command. Doxygen will then run the mscgen tool (see:
-# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the
-# documentation. The MSCGEN_PATH tag allows you to specify the directory where
-# the mscgen tool resides. If left empty the tool is assumed to be found in the
-# default search path.
-
-MSCGEN_PATH            =
-
-# You can include diagrams made with dia in doxygen documentation. Doxygen will
-# then run dia to produce the diagram and insert it in the documentation. The
-# DIA_PATH tag allows you to specify the directory where the dia binary resides.
-# If left empty dia is assumed to be found in the default search path.
-
-DIA_PATH               =
-
-# If set to YES, the inheritance and collaboration graphs will hide inheritance
-# and usage relations if the target is undocumented or is not a class.
-# The default value is: YES.
-
-HIDE_UNDOC_RELATIONS   = YES
-
-# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
-# available from the path. This tool is part of Graphviz (see:
-# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
-# Bell Labs. The other options in this section have no effect if this option is
-# set to NO
-# The default value is: NO.
-
-HAVE_DOT               = NO
-
-# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed
-# to run in parallel. When set to 0 doxygen will base this on the number of
-# processors available in the system. You can set it explicitly to a value
-# larger than 0 to get control over the balance between CPU load and processing
-# speed.
-# Minimum value: 0, maximum value: 32, default value: 0.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_NUM_THREADS        = 0
-
-# When you want a differently looking font n the dot files that doxygen
-# generates you can specify the font name using DOT_FONTNAME. You need to make
-# sure dot is able to find the font, which can be done by putting it in a
-# standard location or by setting the DOTFONTPATH environment variable or by
-# setting DOT_FONTPATH to the directory containing the font.
-# The default value is: Helvetica.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTNAME           = Helvetica
-
-# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of
-# dot graphs.
-# Minimum value: 4, maximum value: 24, default value: 10.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTSIZE           = 10
-
-# By default doxygen will tell dot to use the default font as specified with
-# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set
-# the path where dot can find it using this tag.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTPATH           =
-
-# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for
-# each documented class showing the direct and indirect inheritance relations.
-# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CLASS_GRAPH            = YES
-
-# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
-# graph for each documented class showing the direct and indirect implementation
-# dependencies (inheritance, containment, and class references variables) of the
-# class with other documented classes.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-COLLABORATION_GRAPH    = YES
-
-# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
-# groups, showing the direct groups dependencies.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GROUP_GRAPHS           = YES
-
-# If the UML_LOOK tag is set to YES doxygen will generate inheritance and
-# collaboration diagrams in a style similar to the OMG's Unified Modeling
-# Language.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-UML_LOOK               = NO
-
-# If the UML_LOOK tag is enabled, the fields and methods are shown inside the
-# class node. If there are many fields or methods and many nodes the graph may
-# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the
-# number of items for each type to make the size more manageable. Set this to 0
-# for no limit. Note that the threshold may be exceeded by 50% before the limit
-# is enforced. So when you set the threshold to 10, up to 15 fields may appear,
-# but if the number exceeds 15, the total amount of fields shown is limited to
-# 10.
-# Minimum value: 0, maximum value: 100, default value: 10.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-UML_LIMIT_NUM_FIELDS   = 10
-
-# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
-# collaboration graphs will show the relations between templates and their
-# instances.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-TEMPLATE_RELATIONS     = NO
-
-# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
-# YES then doxygen will generate a graph for each documented file showing the
-# direct and indirect include dependencies of the file with other documented
-# files.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INCLUDE_GRAPH          = YES
-
-# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
-# set to YES then doxygen will generate a graph for each documented file showing
-# the direct and indirect include dependencies of the file with other documented
-# files.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INCLUDED_BY_GRAPH      = YES
-
-# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
-# dependency graph for every global function or class method.
-#
-# Note that enabling this option will significantly increase the time of a run.
-# So in most cases it will be better to enable call graphs for selected
-# functions only using the \callgraph command.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CALL_GRAPH             = NO
-
-# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller
-# dependency graph for every global function or class method.
-#
-# Note that enabling this option will significantly increase the time of a run.
-# So in most cases it will be better to enable caller graphs for selected
-# functions only using the \callergraph command.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CALLER_GRAPH           = NO
-
-# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical
-# hierarchy of all classes instead of a textual one.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GRAPHICAL_HIERARCHY    = YES
-
-# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
-# dependencies a directory has on other directories in a graphical way. The
-# dependency relations are determined by the #include relations between the
-# files in the directories.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DIRECTORY_GRAPH        = YES
-
-# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
-# generated by dot.
-# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
-# to make the SVG files visible in IE 9+ (other browsers do not have this
-# requirement).
-# Possible values are: png, jpg, gif and svg.
-# The default value is: png.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_IMAGE_FORMAT       = png
-
-# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
-# enable generation of interactive SVG images that allow zooming and panning.
-#
-# Note that this requires a modern browser other than Internet Explorer. Tested
-# and working are Firefox, Chrome, Safari, and Opera.
-# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make
-# the SVG files visible. Older versions of IE do not have SVG support.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INTERACTIVE_SVG        = NO
-
-# The DOT_PATH tag can be used to specify the path where the dot tool can be
-# found. If left blank, it is assumed the dot tool can be found in the path.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_PATH               =
-
-# The DOTFILE_DIRS tag can be used to specify one or more directories that
-# contain dot files that are included in the documentation (see the \dotfile
-# command).
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOTFILE_DIRS           =
-
-# The MSCFILE_DIRS tag can be used to specify one or more directories that
-# contain msc files that are included in the documentation (see the \mscfile
-# command).
-
-MSCFILE_DIRS           =
-
-# The DIAFILE_DIRS tag can be used to specify one or more directories that
-# contain dia files that are included in the documentation (see the \diafile
-# command).
-
-DIAFILE_DIRS           =
-
-# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes
-# that will be shown in the graph. If the number of nodes in a graph becomes
-# larger than this value, doxygen will truncate the graph, which is visualized
-# by representing a node as a red box. Note that doxygen if the number of direct
-# children of the root node in a graph is already larger than
-# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that
-# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
-# Minimum value: 0, maximum value: 10000, default value: 50.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_GRAPH_MAX_NODES    = 50
-
-# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs
-# generated by dot. A depth value of 3 means that only nodes reachable from the
-# root by following a path via at most 3 edges will be shown. Nodes that lay
-# further from the root node will be omitted. Note that setting this option to 1
-# or 2 may greatly reduce the computation time needed for large code bases. Also
-# note that the size of a graph can be further restricted by
-# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
-# Minimum value: 0, maximum value: 1000, default value: 0.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-MAX_DOT_GRAPH_DEPTH    = 0
-
-# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
-# background. This is disabled by default, because dot on Windows does not seem
-# to support this out of the box.
-#
-# Warning: Depending on the platform used, enabling this option may lead to
-# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
-# read).
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_TRANSPARENT        = NO
-
-# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output
-# files in one run (i.e. multiple -o and -T options on the command line). This
-# makes dot run faster, but since only newer versions of dot (>1.8.10) support
-# this, this feature is disabled by default.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_MULTI_TARGETS      = YES
-
-# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page
-# explaining the meaning of the various boxes and arrows in the dot generated
-# graphs.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GENERATE_LEGEND        = YES
-
-# If the DOT_CLEANUP tag is set to YES doxygen will remove the intermediate dot
-# files that are used to generate the various graphs.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_CLEANUP            = YES
diff --git a/media/libaaudio/examples/loopback/Android.bp b/media/libaaudio/examples/loopback/Android.bp
index 5b7d956..4de632f 100644
--- a/media/libaaudio/examples/loopback/Android.bp
+++ b/media/libaaudio/examples/loopback/Android.bp
@@ -4,9 +4,11 @@
     srcs: ["src/loopback.cpp"],
     cflags: ["-Wall", "-Werror"],
     static_libs: ["libsndfile"],
+    include_dirs: ["external/oboe/apps/OboeTester/app/src/main/cpp"],
     shared_libs: [
         "libaaudio",
         "libaudioutils",
+        "liblog"
         ],
     header_libs: ["libaaudio_example_utils"],
 }
diff --git a/media/libaaudio/examples/loopback/src/analyzer/GlitchAnalyzer.h b/media/libaaudio/examples/loopback/src/analyzer/GlitchAnalyzer.h
deleted file mode 100644
index 04435d1..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/GlitchAnalyzer.h
+++ /dev/null
@@ -1,445 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANALYZER_GLITCH_ANALYZER_H
-#define ANALYZER_GLITCH_ANALYZER_H
-
-#include <algorithm>
-#include <cctype>
-#include <iomanip>
-#include <iostream>
-
-#include "LatencyAnalyzer.h"
-#include "PseudoRandom.h"
-
-/**
- * Output a steady sine wave and analyze the return signal.
- *
- * Use a cosine transform to measure the predicted magnitude and relative phase of the
- * looped back sine wave. Then generate a predicted signal and compare with the actual signal.
- */
-class GlitchAnalyzer : public LoopbackProcessor {
-public:
-
-    int32_t getState() const {
-        return mState;
-    }
-
-    double getPeakAmplitude() const {
-        return mPeakFollower.getLevel();
-    }
-
-    double getTolerance() {
-        return mTolerance;
-    }
-
-    void setTolerance(double tolerance) {
-        mTolerance = tolerance;
-        mScaledTolerance = mMagnitude * mTolerance;
-    }
-
-    void setMagnitude(double magnitude) {
-        mMagnitude = magnitude;
-        mScaledTolerance = mMagnitude * mTolerance;
-    }
-
-    int32_t getGlitchCount() const {
-        return mGlitchCount;
-    }
-
-    int32_t getStateFrameCount(int state) const {
-        return mStateFrameCounters[state];
-    }
-
-    double getSignalToNoiseDB() {
-        static const double threshold = 1.0e-14;
-        if (mMeanSquareSignal < threshold || mMeanSquareNoise < threshold) {
-            return 0.0;
-        } else {
-            double signalToNoise = mMeanSquareSignal / mMeanSquareNoise; // power ratio
-            double signalToNoiseDB = 10.0 * log(signalToNoise);
-            if (signalToNoiseDB < MIN_SNR_DB) {
-                ALOGD("ERROR - signal to noise ratio is too low! < %d dB. Adjust volume.",
-                     MIN_SNR_DB);
-                setResult(ERROR_VOLUME_TOO_LOW);
-            }
-            return signalToNoiseDB;
-        }
-    }
-
-    std::string analyze() override {
-        std::stringstream report;
-        report << "GlitchAnalyzer ------------------\n";
-        report << LOOPBACK_RESULT_TAG "peak.amplitude     = " << std::setw(8)
-               << getPeakAmplitude() << "\n";
-        report << LOOPBACK_RESULT_TAG "sine.magnitude     = " << std::setw(8)
-               << mMagnitude << "\n";
-        report << LOOPBACK_RESULT_TAG "rms.noise          = " << std::setw(8)
-               << mMeanSquareNoise << "\n";
-        report << LOOPBACK_RESULT_TAG "signal.to.noise.db = " << std::setw(8)
-               << getSignalToNoiseDB() << "\n";
-        report << LOOPBACK_RESULT_TAG "frames.accumulated = " << std::setw(8)
-               << mFramesAccumulated << "\n";
-        report << LOOPBACK_RESULT_TAG "sine.period        = " << std::setw(8)
-               << mSinePeriod << "\n";
-        report << LOOPBACK_RESULT_TAG "test.state         = " << std::setw(8)
-               << mState << "\n";
-        report << LOOPBACK_RESULT_TAG "frame.count        = " << std::setw(8)
-               << mFrameCounter << "\n";
-        // Did we ever get a lock?
-        bool gotLock = (mState == STATE_LOCKED) || (mGlitchCount > 0);
-        if (!gotLock) {
-            report << "ERROR - failed to lock on reference sine tone.\n";
-            setResult(ERROR_NO_LOCK);
-        } else {
-            // Only print if meaningful.
-            report << LOOPBACK_RESULT_TAG "glitch.count       = " << std::setw(8)
-                   << mGlitchCount << "\n";
-            report << LOOPBACK_RESULT_TAG "max.glitch         = " << std::setw(8)
-                   << mMaxGlitchDelta << "\n";
-            if (mGlitchCount > 0) {
-                report << "ERROR - number of glitches > 0\n";
-                setResult(ERROR_GLITCHES);
-            }
-        }
-        return report.str();
-    }
-
-    void printStatus() override {
-        ALOGD("st = %d, #gl = %3d,", mState, mGlitchCount);
-    }
-    /**
-     * Calculate the magnitude of the component of the input signal
-     * that matches the analysis frequency.
-     * Also calculate the phase that we can use to create a
-     * signal that matches that component.
-     * The phase will be between -PI and +PI.
-     */
-    double calculateMagnitude(double *phasePtr = nullptr) {
-        if (mFramesAccumulated == 0) {
-            return 0.0;
-        }
-        double sinMean = mSinAccumulator / mFramesAccumulated;
-        double cosMean = mCosAccumulator / mFramesAccumulated;
-        double magnitude = 2.0 * sqrt((sinMean * sinMean) + (cosMean * cosMean));
-        if (phasePtr != nullptr) {
-            double phase = M_PI_2 - atan2(sinMean, cosMean);
-            *phasePtr = phase;
-        }
-        return magnitude;
-    }
-
-    /**
-     * @param frameData contains microphone data with sine signal feedback
-     * @param channelCount
-     */
-    result_code processInputFrame(float *frameData, int /* channelCount */) override {
-        result_code result = RESULT_OK;
-
-        float sample = frameData[0];
-        float peak = mPeakFollower.process(sample);
-
-        // Force a periodic glitch to test the detector!
-        if (mForceGlitchDuration > 0) {
-            if (mForceGlitchCounter == 0) {
-                ALOGE("%s: force a glitch!!", __func__);
-                mForceGlitchCounter = getSampleRate();
-            } else if (mForceGlitchCounter <= mForceGlitchDuration) {
-                // Force an abrupt offset.
-                sample += (sample > 0.0) ? -0.5f : 0.5f;
-            }
-            --mForceGlitchCounter;
-        }
-
-        mStateFrameCounters[mState]++; // count how many frames we are in each state
-
-        switch (mState) {
-            case STATE_IDLE:
-                mDownCounter--;
-                if (mDownCounter <= 0) {
-                    mState = STATE_IMMUNE;
-                    mDownCounter = IMMUNE_FRAME_COUNT;
-                    mInputPhase = 0.0; // prevent spike at start
-                    mOutputPhase = 0.0;
-                }
-                break;
-
-            case STATE_IMMUNE:
-                mDownCounter--;
-                if (mDownCounter <= 0) {
-                    mState = STATE_WAITING_FOR_SIGNAL;
-                }
-                break;
-
-            case STATE_WAITING_FOR_SIGNAL:
-                if (peak > mThreshold) {
-                    mState = STATE_WAITING_FOR_LOCK;
-                    //ALOGD("%5d: switch to STATE_WAITING_FOR_LOCK", mFrameCounter);
-                    resetAccumulator();
-                }
-                break;
-
-            case STATE_WAITING_FOR_LOCK:
-                mSinAccumulator += sample * sinf(mInputPhase);
-                mCosAccumulator += sample * cosf(mInputPhase);
-                mFramesAccumulated++;
-                // Must be a multiple of the period or the calculation will not be accurate.
-                if (mFramesAccumulated == mSinePeriod * PERIODS_NEEDED_FOR_LOCK) {
-                    double phaseOffset = 0.0;
-                    setMagnitude(calculateMagnitude(&phaseOffset));
-//                    ALOGD("%s() mag = %f, offset = %f, prev = %f",
-//                            __func__, mMagnitude, mPhaseOffset, mPreviousPhaseOffset);
-                    if (mMagnitude > mThreshold) {
-                        if (abs(phaseOffset) < kMaxPhaseError) {
-                            mState = STATE_LOCKED;
-//                            ALOGD("%5d: switch to STATE_LOCKED", mFrameCounter);
-                        }
-                        // Adjust mInputPhase to match measured phase
-                        mInputPhase += phaseOffset;
-                    }
-                    resetAccumulator();
-                }
-                incrementInputPhase();
-                break;
-
-            case STATE_LOCKED: {
-                // Predict next sine value
-                double predicted = sinf(mInputPhase) * mMagnitude;
-                double diff = predicted - sample;
-                double absDiff = fabs(diff);
-                mMaxGlitchDelta = std::max(mMaxGlitchDelta, absDiff);
-                if (absDiff > mScaledTolerance) {
-                    result = ERROR_GLITCHES;
-                    onGlitchStart();
-//                    LOGI("diff glitch detected, absDiff = %g", absDiff);
-                } else {
-                    mSumSquareSignal += predicted * predicted;
-                    mSumSquareNoise += diff * diff;
-                    // Track incoming signal and slowly adjust magnitude to account
-                    // for drift in the DRC or AGC.
-                    mSinAccumulator += sample * sinf(mInputPhase);
-                    mCosAccumulator += sample * cosf(mInputPhase);
-                    mFramesAccumulated++;
-                    // Must be a multiple of the period or the calculation will not be accurate.
-                    if (mFramesAccumulated == mSinePeriod) {
-                        const double coefficient = 0.1;
-                        double phaseOffset = 0.0;
-                        double magnitude = calculateMagnitude(&phaseOffset);
-                        // One pole averaging filter.
-                        setMagnitude((mMagnitude * (1.0 - coefficient)) + (magnitude * coefficient));
-
-                        mMeanSquareNoise = mSumSquareNoise * mInverseSinePeriod;
-                        mMeanSquareSignal = mSumSquareSignal * mInverseSinePeriod;
-                        resetAccumulator();
-
-                        if (abs(phaseOffset) > kMaxPhaseError) {
-                            result = ERROR_GLITCHES;
-                            onGlitchStart();
-                            ALOGD("phase glitch detected, phaseOffset = %g", phaseOffset);
-                        } else if (mMagnitude < mThreshold) {
-                            result = ERROR_GLITCHES;
-                            onGlitchStart();
-                            ALOGD("magnitude glitch detected, mMagnitude = %g", mMagnitude);
-                        }
-                    }
-                }
-                incrementInputPhase();
-            } break;
-
-            case STATE_GLITCHING: {
-                // Predict next sine value
-                mGlitchLength++;
-                double predicted = sinf(mInputPhase) * mMagnitude;
-                double diff = predicted - sample;
-                double absDiff = fabs(diff);
-                mMaxGlitchDelta = std::max(mMaxGlitchDelta, absDiff);
-                if (absDiff < mScaledTolerance) { // close enough?
-                    // If we get a full sine period of non-glitch samples in a row then consider the glitch over.
-                    // We don't want to just consider a zero crossing the end of a glitch.
-                    if (mNonGlitchCount++ > mSinePeriod) {
-                        onGlitchEnd();
-                    }
-                } else {
-                    mNonGlitchCount = 0;
-                    if (mGlitchLength > (4 * mSinePeriod)) {
-                        relock();
-                    }
-                }
-                incrementInputPhase();
-            } break;
-
-            case NUM_STATES: // not a real state
-                break;
-        }
-
-        mFrameCounter++;
-
-        return result;
-    }
-
-    // advance and wrap phase
-    void incrementInputPhase() {
-        mInputPhase += mPhaseIncrement;
-        if (mInputPhase > M_PI) {
-            mInputPhase -= (2.0 * M_PI);
-        }
-    }
-
-    // advance and wrap phase
-    void incrementOutputPhase() {
-        mOutputPhase += mPhaseIncrement;
-        if (mOutputPhase > M_PI) {
-            mOutputPhase -= (2.0 * M_PI);
-        }
-    }
-
-    /**
-     * @param frameData upon return, contains the reference sine wave
-     * @param channelCount
-     */
-    result_code processOutputFrame(float *frameData, int channelCount) override {
-        float output = 0.0f;
-        // Output sine wave so we can measure it.
-        if (mState != STATE_IDLE) {
-            float sinOut = sinf(mOutputPhase);
-            incrementOutputPhase();
-            output = (sinOut * mOutputAmplitude)
-                     + (mWhiteNoise.nextRandomDouble() * kNoiseAmplitude);
-            // ALOGD("sin(%f) = %f, %f\n", mOutputPhase, sinOut,  mPhaseIncrement);
-        }
-        frameData[0] = output;
-        for (int i = 1; i < channelCount; i++) {
-            frameData[i] = 0.0f;
-        }
-        return RESULT_OK;
-    }
-
-    void onGlitchStart() {
-        mGlitchCount++;
-//        ALOGD("%5d: STARTED a glitch # %d", mFrameCounter, mGlitchCount);
-        mState = STATE_GLITCHING;
-        mGlitchLength = 1;
-        mNonGlitchCount = 0;
-    }
-
-    void onGlitchEnd() {
-//        ALOGD("%5d: ENDED a glitch # %d, length = %d", mFrameCounter, mGlitchCount, mGlitchLength);
-        mState = STATE_LOCKED;
-        resetAccumulator();
-    }
-
-    // reset the sine wave detector
-    void resetAccumulator() {
-        mFramesAccumulated = 0;
-        mSinAccumulator = 0.0;
-        mCosAccumulator = 0.0;
-        mSumSquareSignal = 0.0;
-        mSumSquareNoise = 0.0;
-    }
-
-    void relock() {
-//        ALOGD("relock: %d because of a very long %d glitch", mFrameCounter, mGlitchLength);
-        mState = STATE_WAITING_FOR_LOCK;
-        resetAccumulator();
-    }
-
-    void reset() override {
-        LoopbackProcessor::reset();
-        mState = STATE_IDLE;
-        mDownCounter = IDLE_FRAME_COUNT;
-        resetAccumulator();
-    }
-
-    void prepareToTest() override {
-        LoopbackProcessor::prepareToTest();
-        mSinePeriod = getSampleRate() / kTargetGlitchFrequency;
-        mOutputPhase = 0.0f;
-        mInverseSinePeriod = 1.0 / mSinePeriod;
-        mPhaseIncrement = 2.0 * M_PI * mInverseSinePeriod;
-        mGlitchCount = 0;
-        mMaxGlitchDelta = 0.0;
-        for (int i = 0; i < NUM_STATES; i++) {
-            mStateFrameCounters[i] = 0;
-        }
-    }
-
-private:
-
-    // These must match the values in GlitchActivity.java
-    enum sine_state_t {
-        STATE_IDLE,               // beginning
-        STATE_IMMUNE,             // ignoring input, waiting fo HW to settle
-        STATE_WAITING_FOR_SIGNAL, // looking for a loud signal
-        STATE_WAITING_FOR_LOCK,   // trying to lock onto the phase of the sine
-        STATE_LOCKED,             // locked on the sine wave, looking for glitches
-        STATE_GLITCHING,           // locked on the sine wave but glitching
-        NUM_STATES
-    };
-
-    enum constants {
-        // Arbitrary durations, assuming 48000 Hz
-        IDLE_FRAME_COUNT = 48 * 100,
-        IMMUNE_FRAME_COUNT = 48 * 100,
-        PERIODS_NEEDED_FOR_LOCK = 8,
-        MIN_SNR_DB = 65
-    };
-
-    static constexpr float kNoiseAmplitude = 0.00; // Used to experiment with warbling caused by DRC.
-    static constexpr int kTargetGlitchFrequency = 607;
-    static constexpr double kMaxPhaseError = M_PI * 0.05;
-
-    float   mTolerance = 0.10; // scaled from 0.0 to 1.0
-    double  mThreshold = 0.005;
-    int     mSinePeriod = 1; // this will be set before use
-    double  mInverseSinePeriod = 1.0;
-
-    int32_t mStateFrameCounters[NUM_STATES];
-
-    double  mPhaseIncrement = 0.0;
-    double  mInputPhase = 0.0;
-    double  mOutputPhase = 0.0;
-    double  mMagnitude = 0.0;
-    int32_t mFramesAccumulated = 0;
-    double  mSinAccumulator = 0.0;
-    double  mCosAccumulator = 0.0;
-    double  mMaxGlitchDelta = 0.0;
-    int32_t mGlitchCount = 0;
-    int32_t mNonGlitchCount = 0;
-    int32_t mGlitchLength = 0;
-    // This is used for processing every frame so we cache it here.
-    double  mScaledTolerance = 0.0;
-    int     mDownCounter = IDLE_FRAME_COUNT;
-    int32_t mFrameCounter = 0;
-    double  mOutputAmplitude = 0.75;
-
-    int32_t mForceGlitchDuration = 0; // if > 0 then force a glitch for debugging
-    int32_t mForceGlitchCounter = 4 * 48000; // count down and trigger at zero
-
-    // measure background noise continuously as a deviation from the expected signal
-    double  mSumSquareSignal = 0.0;
-    double  mSumSquareNoise = 0.0;
-    double  mMeanSquareSignal = 0.0;
-    double  mMeanSquareNoise = 0.0;
-
-    PeakDetector  mPeakFollower;
-
-    PseudoRandom  mWhiteNoise;
-
-    sine_state_t  mState = STATE_IDLE;
-};
-
-
-#endif //ANALYZER_GLITCH_ANALYZER_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/LatencyAnalyzer.h b/media/libaaudio/examples/loopback/src/analyzer/LatencyAnalyzer.h
deleted file mode 100644
index e506791..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/LatencyAnalyzer.h
+++ /dev/null
@@ -1,606 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Tools for measuring latency and for detecting glitches.
- * These classes are pure math and can be used with any audio system.
- */
-
-#ifndef ANALYZER_LATENCY_ANALYZER_H
-#define ANALYZER_LATENCY_ANALYZER_H
-
-#include <algorithm>
-#include <assert.h>
-#include <cctype>
-#include <iomanip>
-#include <iostream>
-#include <math.h>
-#include <memory>
-#include <sstream>
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <vector>
-
-#include "PeakDetector.h"
-#include "PseudoRandom.h"
-#include "RandomPulseGenerator.h"
-
-// This is used when the code is in Oboe.
-#ifndef ALOGD
-#define ALOGD printf
-#define ALOGE printf
-#define ALOGW printf
-#endif
-
-#define LOOPBACK_RESULT_TAG  "RESULT: "
-
-static constexpr int32_t kDefaultSampleRate = 48000;
-static constexpr int32_t kMillisPerSecond   = 1000;
-static constexpr int32_t kMaxLatencyMillis  = 700;  // arbitrary and generous
-static constexpr double  kMinimumConfidence = 0.2;
-
-struct LatencyReport {
-    int32_t latencyInFrames = 0.0;
-    double confidence = 0.0;
-
-    void reset() {
-        latencyInFrames = 0;
-        confidence = 0.0;
-    }
-};
-
-// Calculate a normalized cross correlation.
-static double calculateNormalizedCorrelation(const float *a,
-                                             const float *b,
-                                             int windowSize) {
-    double correlation = 0.0;
-    double sumProducts = 0.0;
-    double sumSquares = 0.0;
-
-    // Correlate a against b.
-    for (int i = 0; i < windowSize; i++) {
-        float s1 = a[i];
-        float s2 = b[i];
-        // Use a normalized cross-correlation.
-        sumProducts += s1 * s2;
-        sumSquares += ((s1 * s1) + (s2 * s2));
-    }
-
-    if (sumSquares >= 1.0e-9) {
-        correlation = 2.0 * sumProducts / sumSquares;
-    }
-    return correlation;
-}
-
-static double calculateRootMeanSquare(float *data, int32_t numSamples) {
-    double sum = 0.0;
-    for (int32_t i = 0; i < numSamples; i++) {
-        float sample = data[i];
-        sum += sample * sample;
-    }
-    return sqrt(sum / numSamples);
-}
-
-/**
- * Monophonic recording with processing.
- */
-class AudioRecording
-{
-public:
-
-    void allocate(int maxFrames) {
-        mData = std::make_unique<float[]>(maxFrames);
-        mMaxFrames = maxFrames;
-    }
-
-    // Write SHORT data from the first channel.
-    int32_t write(int16_t *inputData, int32_t inputChannelCount, int32_t numFrames) {
-        // stop at end of buffer
-        if ((mFrameCounter + numFrames) > mMaxFrames) {
-            numFrames = mMaxFrames - mFrameCounter;
-        }
-        for (int i = 0; i < numFrames; i++) {
-            mData[mFrameCounter++] = inputData[i * inputChannelCount] * (1.0f / 32768);
-        }
-        return numFrames;
-    }
-
-    // Write FLOAT data from the first channel.
-    int32_t write(float *inputData, int32_t inputChannelCount, int32_t numFrames) {
-        // stop at end of buffer
-        if ((mFrameCounter + numFrames) > mMaxFrames) {
-            numFrames = mMaxFrames - mFrameCounter;
-        }
-        for (int i = 0; i < numFrames; i++) {
-            mData[mFrameCounter++] = inputData[i * inputChannelCount];
-        }
-        return numFrames;
-    }
-
-    // Write FLOAT data from the first channel.
-    int32_t write(float sample) {
-        // stop at end of buffer
-        if (mFrameCounter < mMaxFrames) {
-            mData[mFrameCounter++] = sample;
-            return 1;
-        }
-        return 0;
-    }
-
-    void clear() {
-        mFrameCounter = 0;
-    }
-    int32_t size() const {
-        return mFrameCounter;
-    }
-
-    bool isFull() const {
-        return mFrameCounter >= mMaxFrames;
-    }
-
-    float *getData() const {
-        return mData.get();
-    }
-
-    void setSampleRate(int32_t sampleRate) {
-        mSampleRate = sampleRate;
-    }
-
-    int32_t getSampleRate() const {
-        return mSampleRate;
-    }
-
-    /**
-     * Square the samples so they are all positive and so the peaks are emphasized.
-     */
-    void square() {
-        float *x = mData.get();
-        for (int i = 0; i < mFrameCounter; i++) {
-            x[i] *= x[i];
-        }
-    }
-
-    /**
-     * Amplify a signal so that the peak matches the specified target.
-     *
-     * @param target final max value
-     * @return gain applied to signal
-     */
-    float normalize(float target) {
-        float maxValue = 1.0e-9f;
-        for (int i = 0; i < mFrameCounter; i++) {
-            maxValue = std::max(maxValue, abs(mData[i]));
-        }
-        float gain = target / maxValue;
-        for (int i = 0; i < mFrameCounter; i++) {
-            mData[i] *= gain;
-        }
-        return gain;
-    }
-
-private:
-    std::unique_ptr<float[]> mData;
-    int32_t       mFrameCounter = 0;
-    int32_t       mMaxFrames = 0;
-    int32_t       mSampleRate = kDefaultSampleRate; // common default
-};
-
-static int measureLatencyFromPulse(AudioRecording &recorded,
-                                   AudioRecording &pulse,
-                                   LatencyReport *report) {
-
-    report->latencyInFrames = 0;
-    report->confidence = 0.0;
-
-    int numCorrelations = recorded.size() - pulse.size();
-    if (numCorrelations < 10) {
-        ALOGE("%s() recording too small = %d frames\n", __func__, recorded.size());
-        return -1;
-    }
-    std::unique_ptr<float[]> correlations= std::make_unique<float[]>(numCorrelations);
-
-    // Correlate pulse against the recorded data.
-    for (int i = 0; i < numCorrelations; i++) {
-        float correlation = (float) calculateNormalizedCorrelation(&recorded.getData()[i],
-                                                                   &pulse.getData()[0],
-                                                                   pulse.size());
-        correlations[i] = correlation;
-    }
-
-    // Find highest peak in correlation array.
-    float peakCorrelation = 0.0;
-    int peakIndex = -1;
-    for (int i = 0; i < numCorrelations; i++) {
-        float value = abs(correlations[i]);
-        if (value > peakCorrelation) {
-            peakCorrelation = value;
-            peakIndex = i;
-        }
-    }
-    if (peakIndex < 0) {
-        ALOGE("%s() no signal for correlation\n", __func__);
-        return -2;
-    }
-
-    report->latencyInFrames = peakIndex;
-    report->confidence = peakCorrelation;
-
-    return 0;
-}
-
-// ====================================================================================
-class LoopbackProcessor {
-public:
-    virtual ~LoopbackProcessor() = default;
-
-    enum result_code {
-        RESULT_OK = 0,
-        ERROR_NOISY = -99,
-        ERROR_VOLUME_TOO_LOW,
-        ERROR_VOLUME_TOO_HIGH,
-        ERROR_CONFIDENCE,
-        ERROR_INVALID_STATE,
-        ERROR_GLITCHES,
-        ERROR_NO_LOCK
-    };
-
-    virtual void prepareToTest() {
-        reset();
-    }
-
-    virtual void reset() {
-        mResult = 0;
-        mResetCount++;
-    }
-
-    virtual result_code processInputFrame(float *frameData, int channelCount) = 0;
-    virtual result_code processOutputFrame(float *frameData, int channelCount) = 0;
-
-    void process(float *inputData, int inputChannelCount, int numInputFrames,
-                 float *outputData, int outputChannelCount, int numOutputFrames) {
-        int numBoth = std::min(numInputFrames, numOutputFrames);
-        // Process one frame at a time.
-        for (int i = 0; i < numBoth; i++) {
-            processInputFrame(inputData, inputChannelCount);
-            inputData += inputChannelCount;
-            processOutputFrame(outputData, outputChannelCount);
-            outputData += outputChannelCount;
-        }
-        // If there is more input than output.
-        for (int i = numBoth; i < numInputFrames; i++) {
-            processInputFrame(inputData, inputChannelCount);
-            inputData += inputChannelCount;
-        }
-        // If there is more output than input.
-        for (int i = numBoth; i < numOutputFrames; i++) {
-            processOutputFrame(outputData, outputChannelCount);
-            outputData += outputChannelCount;
-        }
-    }
-
-    virtual std::string analyze() = 0;
-
-    virtual void printStatus() {};
-
-    int32_t getResult() {
-        return mResult;
-    }
-
-    void setResult(int32_t result) {
-        mResult = result;
-    }
-
-    virtual bool isDone() {
-        return false;
-    }
-
-    virtual int save(const char *fileName) {
-        (void) fileName;
-        return -1;
-    }
-
-    virtual int load(const char *fileName) {
-        (void) fileName;
-        return -1;
-    }
-
-    virtual void setSampleRate(int32_t sampleRate) {
-        mSampleRate = sampleRate;
-    }
-
-    int32_t getSampleRate() const {
-        return mSampleRate;
-    }
-
-    int32_t getResetCount() const {
-        return mResetCount;
-    }
-
-    /** Called when not enough input frames could be read after synchronization.
-     */
-    virtual void onInsufficientRead() {
-        reset();
-    }
-
-protected:
-    int32_t   mResetCount = 0;
-
-private:
-    int32_t mSampleRate = kDefaultSampleRate;
-    int32_t mResult = 0;
-};
-
-class LatencyAnalyzer : public LoopbackProcessor {
-public:
-
-    LatencyAnalyzer() : LoopbackProcessor() {}
-    virtual ~LatencyAnalyzer() = default;
-
-    virtual int32_t getProgress() const = 0;
-
-    virtual int getState() = 0;
-
-    // @return latency in frames
-    virtual int32_t getMeasuredLatency() = 0;
-
-    virtual double getMeasuredConfidence() = 0;
-
-    virtual double getBackgroundRMS() = 0;
-
-    virtual double getSignalRMS() = 0;
-
-};
-
-// ====================================================================================
-/**
- * Measure latency given a loopback stream data.
- * Use an encoded bit train as the sound source because it
- * has an unambiguous correlation value.
- * Uses a state machine to cycle through various stages.
- *
- */
-class PulseLatencyAnalyzer : public LatencyAnalyzer {
-public:
-
-    PulseLatencyAnalyzer() : LatencyAnalyzer() {
-        int32_t maxLatencyFrames = getSampleRate() * kMaxLatencyMillis / kMillisPerSecond;
-        int32_t numPulseBits = getSampleRate() * kPulseLengthMillis
-                / (kFramesPerEncodedBit * kMillisPerSecond);
-        int32_t  pulseLength = numPulseBits * kFramesPerEncodedBit;
-        mFramesToRecord = pulseLength + maxLatencyFrames;
-        mAudioRecording.allocate(mFramesToRecord);
-        mAudioRecording.setSampleRate(getSampleRate());
-        generateRandomPulse(pulseLength);
-    }
-
-    void generateRandomPulse(int32_t pulseLength) {
-        mPulse.allocate(pulseLength);
-        RandomPulseGenerator pulser(kFramesPerEncodedBit);
-        for (int i = 0; i < pulseLength; i++) {
-            mPulse.write(pulser.nextFloat());
-        }
-    }
-
-    int getState() override {
-        return mState;
-    }
-
-    void setSampleRate(int32_t sampleRate) override {
-        LoopbackProcessor::setSampleRate(sampleRate);
-        mAudioRecording.setSampleRate(sampleRate);
-    }
-
-    void reset() override {
-        LoopbackProcessor::reset();
-        mDownCounter = getSampleRate() / 2;
-        mLoopCounter = 0;
-
-        mPulseCursor = 0;
-        mBackgroundSumSquare = 0.0f;
-        mBackgroundSumCount = 0;
-        mBackgroundRMS = 0.0f;
-        mSignalRMS = 0.0f;
-
-        mState = STATE_MEASURE_BACKGROUND;
-        mAudioRecording.clear();
-        mLatencyReport.reset();
-    }
-
-    bool hasEnoughData() {
-        return mAudioRecording.isFull();
-    }
-
-    bool isDone() override {
-        return mState == STATE_DONE;
-    }
-
-    int32_t getProgress() const override {
-        return mAudioRecording.size();
-    }
-
-    std::string analyze() override {
-        std::stringstream report;
-        report << "PulseLatencyAnalyzer ---------------\n";
-        report << LOOPBACK_RESULT_TAG "test.state             = "
-                << std::setw(8) << mState << "\n";
-        report << LOOPBACK_RESULT_TAG "test.state.name        = "
-                << convertStateToText(mState) << "\n";
-        report << LOOPBACK_RESULT_TAG "background.rms         = "
-                << std::setw(8) << mBackgroundRMS << "\n";
-
-        int32_t newResult = RESULT_OK;
-        if (mState != STATE_GOT_DATA) {
-            report << "WARNING - Bad state. Check volume on device.\n";
-            // setResult(ERROR_INVALID_STATE);
-        } else {
-            float gain = mAudioRecording.normalize(1.0f);
-            measureLatencyFromPulse(mAudioRecording,
-                                    mPulse,
-                                    &mLatencyReport);
-
-            if (mLatencyReport.confidence < kMinimumConfidence) {
-                report << "   ERROR - confidence too low!";
-                newResult = ERROR_CONFIDENCE;
-            } else {
-                mSignalRMS = calculateRootMeanSquare(
-                        &mAudioRecording.getData()[mLatencyReport.latencyInFrames], mPulse.size())
-                                / gain;
-            }
-            double latencyMillis = kMillisPerSecond * (double) mLatencyReport.latencyInFrames
-                                   / getSampleRate();
-            report << LOOPBACK_RESULT_TAG "latency.frames         = " << std::setw(8)
-                   << mLatencyReport.latencyInFrames << "\n";
-            report << LOOPBACK_RESULT_TAG "latency.msec           = " << std::setw(8)
-                   << latencyMillis << "\n";
-            report << LOOPBACK_RESULT_TAG "latency.confidence     = " << std::setw(8)
-                   << mLatencyReport.confidence << "\n";
-        }
-        mState = STATE_DONE;
-        if (getResult() == RESULT_OK) {
-            setResult(newResult);
-        }
-
-        return report.str();
-    }
-
-    int32_t getMeasuredLatency() override {
-        return mLatencyReport.latencyInFrames;
-    }
-
-    double getMeasuredConfidence() override {
-        return mLatencyReport.confidence;
-    }
-
-    double getBackgroundRMS() override {
-        return mBackgroundRMS;
-    }
-
-    double getSignalRMS() override {
-        return mSignalRMS;
-    }
-
-    void printStatus() override {
-        ALOGD("st = %d", mState);
-    }
-
-    result_code processInputFrame(float *frameData, int channelCount) override {
-        echo_state nextState = mState;
-        mLoopCounter++;
-
-        switch (mState) {
-            case STATE_MEASURE_BACKGROUND:
-                // Measure background RMS on channel 0
-                mBackgroundSumSquare += frameData[0] * frameData[0];
-                mBackgroundSumCount++;
-                mDownCounter--;
-                if (mDownCounter <= 0) {
-                    mBackgroundRMS = sqrtf(mBackgroundSumSquare / mBackgroundSumCount);
-                    nextState = STATE_IN_PULSE;
-                    mPulseCursor = 0;
-                }
-                break;
-
-            case STATE_IN_PULSE:
-                // Record input until the mAudioRecording is full.
-                mAudioRecording.write(frameData, channelCount, 1);
-                if (hasEnoughData()) {
-                    nextState = STATE_GOT_DATA;
-                }
-                break;
-
-            case STATE_GOT_DATA:
-            case STATE_DONE:
-            default:
-                break;
-        }
-
-        mState = nextState;
-        return RESULT_OK;
-    }
-
-    result_code processOutputFrame(float *frameData, int channelCount) override {
-        switch (mState) {
-            case STATE_IN_PULSE:
-                if (mPulseCursor < mPulse.size()) {
-                    float pulseSample = mPulse.getData()[mPulseCursor++];
-                    for (int i = 0; i < channelCount; i++) {
-                        frameData[i] = pulseSample;
-                    }
-                } else {
-                    for (int i = 0; i < channelCount; i++) {
-                        frameData[i] = 0;
-                    }
-                }
-                break;
-
-            case STATE_MEASURE_BACKGROUND:
-            case STATE_GOT_DATA:
-            case STATE_DONE:
-            default:
-                for (int i = 0; i < channelCount; i++) {
-                    frameData[i] = 0.0f; // silence
-                }
-                break;
-        }
-
-        return RESULT_OK;
-    }
-
-private:
-
-    enum echo_state {
-        STATE_MEASURE_BACKGROUND,
-        STATE_IN_PULSE,
-        STATE_GOT_DATA, // must match RoundTripLatencyActivity.java
-        STATE_DONE,
-    };
-
-    const char *convertStateToText(echo_state state) {
-        switch (state) {
-            case STATE_MEASURE_BACKGROUND:
-                return "INIT";
-            case STATE_IN_PULSE:
-                return "PULSE";
-            case STATE_GOT_DATA:
-                return "GOT_DATA";
-            case STATE_DONE:
-                return "DONE";
-        }
-        return "UNKNOWN";
-    }
-
-    int32_t         mDownCounter = 500;
-    int32_t         mLoopCounter = 0;
-    echo_state      mState = STATE_MEASURE_BACKGROUND;
-
-    static constexpr int32_t kFramesPerEncodedBit = 8; // multiple of 2
-    static constexpr int32_t kPulseLengthMillis = 500;
-
-    AudioRecording     mPulse;
-    int32_t            mPulseCursor = 0;
-
-    double             mBackgroundSumSquare = 0.0;
-    int32_t            mBackgroundSumCount = 0;
-    double             mBackgroundRMS = 0.0;
-    double             mSignalRMS = 0.0;
-    int32_t            mFramesToRecord = 0;
-
-    AudioRecording     mAudioRecording; // contains only the input after starting the pulse
-    LatencyReport      mLatencyReport;
-};
-
-#endif // ANALYZER_LATENCY_ANALYZER_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/ManchesterEncoder.h b/media/libaaudio/examples/loopback/src/analyzer/ManchesterEncoder.h
deleted file mode 100644
index 0a4bd5b..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/ManchesterEncoder.h
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANALYZER_MANCHESTER_ENCODER_H
-#define ANALYZER_MANCHESTER_ENCODER_H
-
-#include <cstdint>
-
-/**
- * Encode bytes using Manchester Coding scheme.
- *
- * Manchester Code is self clocking.
- * There is a transition in the middle of every bit.
- * Zero is high then low.
- * One is low then high.
- *
- * This avoids having long DC sections that would droop when
- * passed though analog circuits with AC coupling.
- *
- * IEEE 802.3 compatible.
- */
-
-class ManchesterEncoder {
-public:
-    ManchesterEncoder(int samplesPerPulse)
-            : mSamplesPerPulse(samplesPerPulse)
-            , mSamplesPerPulseHalf(samplesPerPulse / 2)
-            , mCursor(samplesPerPulse) {
-    }
-
-    virtual ~ManchesterEncoder() = default;
-
-    /**
-     * This will be called when the next byte is needed.
-     * @return
-     */
-    virtual uint8_t onNextByte() = 0;
-
-    /**
-     * Generate the next floating point sample.
-     * @return
-     */
-    virtual float nextFloat() {
-        advanceSample();
-        if (mCurrentBit) {
-            return (mCursor < mSamplesPerPulseHalf) ? -1.0f : 1.0f; // one
-        } else {
-            return (mCursor < mSamplesPerPulseHalf) ? 1.0f : -1.0f; // zero
-        }
-    }
-
-protected:
-    /**
-     * This will be called when a new bit is ready to be encoded.
-     * It can be used to prepare the encoded samples.
-     * @param current
-     */
-    virtual void onNextBit(bool /* current */) {};
-
-    void advanceSample() {
-        // Are we ready for a new bit?
-        if (++mCursor >= mSamplesPerPulse) {
-            mCursor = 0;
-            if (mBitsLeft == 0) {
-                mCurrentByte = onNextByte();
-                mBitsLeft = 8;
-            }
-            --mBitsLeft;
-            mCurrentBit = (mCurrentByte >> mBitsLeft) & 1;
-            onNextBit(mCurrentBit);
-        }
-    }
-
-    bool getCurrentBit() {
-        return mCurrentBit;
-    }
-
-    const int mSamplesPerPulse;
-    const int mSamplesPerPulseHalf;
-    int       mCursor;
-    int       mBitsLeft = 0;
-    uint8_t   mCurrentByte = 0;
-    bool      mCurrentBit = false;
-};
-#endif //ANALYZER_MANCHESTER_ENCODER_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/PeakDetector.h b/media/libaaudio/examples/loopback/src/analyzer/PeakDetector.h
deleted file mode 100644
index 4b3b4e7..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/PeakDetector.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANALYZER_PEAK_DETECTOR_H
-#define ANALYZER_PEAK_DETECTOR_H
-
-#include <math.h>
-
-/**
- * Measure a peak envelope by rising with the peaks,
- * and decaying exponentially after each peak.
- * The absolute value of the input signal is used.
- */
-class PeakDetector {
-public:
-
-    void reset() {
-        mLevel = 0.0;
-    }
-
-    double process(double input) {
-        mLevel *= mDecay; // exponential decay
-        input = fabs(input);
-        // never fall below the input signal
-        if (input > mLevel) {
-            mLevel = input;
-        }
-        return mLevel;
-    }
-
-    double getLevel() const {
-        return mLevel;
-    }
-
-    double getDecay() const {
-        return mDecay;
-    }
-
-    /**
-     * Multiply the level by this amount on every iteration.
-     * This provides an exponential decay curve.
-     * A value just under 1.0 is best, for example, 0.99;
-     * @param decay scale level for each input
-     */
-    void setDecay(double decay) {
-        mDecay = decay;
-    }
-
-private:
-    static constexpr double kDefaultDecay = 0.99f;
-
-    double mLevel = 0.0;
-    double mDecay = kDefaultDecay;
-};
-#endif //ANALYZER_PEAK_DETECTOR_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/PseudoRandom.h b/media/libaaudio/examples/loopback/src/analyzer/PseudoRandom.h
deleted file mode 100644
index 1c4938c..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/PseudoRandom.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-#ifndef ANALYZER_PSEUDORANDOM_H
-#define ANALYZER_PSEUDORANDOM_H
-
-#include <cctype>
-
-class PseudoRandom {
-public:
-    PseudoRandom(int64_t seed = 99887766)
-            :    mSeed(seed)
-    {}
-
-    /**
-     * Returns the next random double from -1.0 to 1.0
-     *
-     * @return value from -1.0 to 1.0
-     */
-    double nextRandomDouble() {
-        return nextRandomInteger() * (0.5 / (((int32_t)1) << 30));
-    }
-
-    /** Calculate random 32 bit number using linear-congruential method
-     * with known real-time performance.
-     */
-    int32_t nextRandomInteger() {
-#if __has_builtin(__builtin_mul_overflow) && __has_builtin(__builtin_add_overflow)
-        int64_t prod;
-        // Use values for 64-bit sequence from MMIX by Donald Knuth.
-        __builtin_mul_overflow(mSeed, (int64_t)6364136223846793005, &prod);
-        __builtin_add_overflow(prod, (int64_t)1442695040888963407, &mSeed);
-#else
-        mSeed = (mSeed * (int64_t)6364136223846793005) + (int64_t)1442695040888963407;
-#endif
-        return (int32_t) (mSeed >> 32); // The higher bits have a longer sequence.
-    }
-
-private:
-    int64_t mSeed;
-};
-
-#endif //ANALYZER_PSEUDORANDOM_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/RandomPulseGenerator.h b/media/libaaudio/examples/loopback/src/analyzer/RandomPulseGenerator.h
deleted file mode 100644
index 030050b..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/RandomPulseGenerator.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANALYZER_RANDOM_PULSE_GENERATOR_H
-#define ANALYZER_RANDOM_PULSE_GENERATOR_H
-
-#include <stdlib.h>
-#include "RoundedManchesterEncoder.h"
-
-/**
- * Encode random ones and zeros using Manchester Code per IEEE 802.3.
- */
-class RandomPulseGenerator : public RoundedManchesterEncoder {
-public:
-    RandomPulseGenerator(int samplesPerPulse)
-    : RoundedManchesterEncoder(samplesPerPulse) {
-    }
-
-    virtual ~RandomPulseGenerator() = default;
-
-    /**
-     * This will be called when the next byte is needed.
-     * @return random byte
-     */
-    uint8_t onNextByte() override {
-        return static_cast<uint8_t>(rand());
-    }
-};
-
-#endif //ANALYZER_RANDOM_PULSE_GENERATOR_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/RoundedManchesterEncoder.h b/media/libaaudio/examples/loopback/src/analyzer/RoundedManchesterEncoder.h
deleted file mode 100644
index f2eba84..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/RoundedManchesterEncoder.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANALYZER_ROUNDED_MANCHESTER_ENCODER_H
-#define ANALYZER_ROUNDED_MANCHESTER_ENCODER_H
-
-#include <math.h>
-#include <memory.h>
-#include <stdlib.h>
-#include "ManchesterEncoder.h"
-
-/**
- * Encode bytes using Manchester Code.
- * Round the edges using a half cosine to reduce ringing caused by a hard edge.
- */
-
-class RoundedManchesterEncoder : public ManchesterEncoder {
-public:
-    RoundedManchesterEncoder(int samplesPerPulse)
-            : ManchesterEncoder(samplesPerPulse) {
-        int rampSize = samplesPerPulse / 4;
-        mZeroAfterZero = std::make_unique<float[]>(samplesPerPulse);
-        mZeroAfterOne = std::make_unique<float[]>(samplesPerPulse);
-
-        int sampleIndex = 0;
-        for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
-            float phase = (rampIndex + 1) * M_PI / rampSize;
-            float sample = -cosf(phase);
-            mZeroAfterZero[sampleIndex] = sample;
-            mZeroAfterOne[sampleIndex] = 1.0f;
-            sampleIndex++;
-        }
-        for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
-            mZeroAfterZero[sampleIndex] = 1.0f;
-            mZeroAfterOne[sampleIndex] = 1.0f;
-            sampleIndex++;
-        }
-        for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
-            float phase = (rampIndex + 1) * M_PI / rampSize;
-            float sample = cosf(phase);
-            mZeroAfterZero[sampleIndex] = sample;
-            mZeroAfterOne[sampleIndex] = sample;
-            sampleIndex++;
-        }
-        for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
-            mZeroAfterZero[sampleIndex] = -1.0f;
-            mZeroAfterOne[sampleIndex] = -1.0f;
-            sampleIndex++;
-        }
-    }
-
-    void onNextBit(bool current) override {
-        // Do we need to use the rounded edge?
-        mCurrentSamples = (current ^ mPreviousBit)
-                          ? mZeroAfterOne.get()
-                          : mZeroAfterZero.get();
-        mPreviousBit = current;
-    }
-
-    float nextFloat() override {
-        advanceSample();
-        float output = mCurrentSamples[mCursor];
-        if (getCurrentBit()) output = -output;
-        return output;
-    }
-
-private:
-
-    bool mPreviousBit = false;
-    float *mCurrentSamples = nullptr;
-    std::unique_ptr<float[]> mZeroAfterZero;
-    std::unique_ptr<float[]> mZeroAfterOne;
-};
-
-#endif //ANALYZER_ROUNDED_MANCHESTER_ENCODER_H
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index 0d2ec70..6fff568 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -36,8 +36,12 @@
 #include "AAudioSimpleRecorder.h"
 #include "AAudioExampleUtils.h"
 
+// Get logging macros from OboeTester
+#include "android_debug.h"
+// Get signal analyzers from OboeTester
 #include "analyzer/GlitchAnalyzer.h"
 #include "analyzer/LatencyAnalyzer.h"
+
 #include "../../utils/AAudioExampleUtils.h"
 
 // V0.4.00 = rectify and low-pass filter the echos, auto-correlate entire echo
@@ -45,8 +49,9 @@
 //           fix -n option to set output buffer for -tm
 //           plot first glitch
 // V0.4.02 = allow -n0 for minimal buffer size
-// V0.5.00 = use latency analyzer from OboeTester, uses random noise for latency
-#define APP_VERSION             "0.5.00"
+// V0.5.00 = use latency analyzer copied from OboeTester, uses random noise for latency
+// V0.5.01 = use latency analyzer directly from OboeTester in external/oboe
+#define APP_VERSION             "0.5.01"
 
 // Tag for machine readable results as property = value pairs
 #define RESULT_TAG              "RESULT: "
diff --git a/media/libaaudio/examples/utils/dummy.cpp b/media/libaaudio/examples/utils/dummy.cpp
deleted file mode 100644
index 8ef7e36..0000000
--- a/media/libaaudio/examples/utils/dummy.cpp
+++ /dev/null
@@ -1,5 +0,0 @@
-/**
- * Dummy file needed to get Android Studio to scan this folder.
- */
-
-int g_DoNotUseThisVariable = 0;
diff --git a/media/libaaudio/examples/utils/unused.cpp b/media/libaaudio/examples/utils/unused.cpp
new file mode 100644
index 0000000..9a5205e
--- /dev/null
+++ b/media/libaaudio/examples/utils/unused.cpp
@@ -0,0 +1,5 @@
+/**
+ * Unused file required to get Android Studio to scan this folder.
+ */
+
+int g_DoNotUseThisVariable = 0;
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index a47f189..6666788 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -29,6 +29,8 @@
 #ifndef AAUDIO_AAUDIO_H
 #define AAUDIO_AAUDIO_H
 
+#include <stdbool.h>
+#include <stdint.h>
 #include <time.h>
 
 #ifdef __cplusplus
@@ -687,7 +689,7 @@
         aaudio_performance_mode_t mode) __INTRODUCED_IN(26);
 
 /**
- * Set the intended use case for the stream.
+ * Set the intended use case for the output stream.
  *
  * The AAudio system will use this information to optimize the
  * behavior of the stream.
@@ -704,7 +706,7 @@
         aaudio_usage_t usage) __INTRODUCED_IN(28);
 
 /**
- * Set the type of audio data that the stream will carry.
+ * Set the type of audio data that the output stream will carry.
  *
  * The AAudio system will use this information to optimize the
  * behavior of the stream.
@@ -1035,6 +1037,11 @@
  * but still allow queries to the stream to occur from other threads. This often
  * happens if you are monitoring stream progress from a UI thread.
  *
+ * NOTE: This function is only fully implemented for MMAP streams,
+ * which are low latency streams supported by some devices.
+ * On other "Legacy" streams some audio resources will still be in use
+ * and some callbacks may still be in process after this call.
+ *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return {@link #AAUDIO_OK} or a negative error.
  */
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index 717f31a..aafcccc 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -21,6 +21,7 @@
     ],
 
     cflags: [
+        "-Wthread-safety",
         "-Wno-unused-parameter",
         "-Wall",
         "-Werror",
@@ -85,6 +86,7 @@
         "libcutils",
         "libutils",
         "libbinder",
+        "aaudio-aidl-cpp",
     ],
 
     cflags: [
@@ -114,11 +116,10 @@
         "client/AudioStreamInternalPlay.cpp",
         "client/IsochronousClockModel.cpp",
         "binding/AudioEndpointParcelable.cpp",
+        "binding/AAudioBinderAdapter.cpp",
         "binding/AAudioBinderClient.cpp",
         "binding/AAudioStreamRequest.cpp",
         "binding/AAudioStreamConfiguration.cpp",
-        "binding/IAAudioClient.cpp",
-        "binding/IAAudioService.cpp",
         "binding/RingBufferParcelable.cpp",
         "binding/SharedMemoryParcelable.cpp",
         "binding/SharedRegionParcelable.cpp",
@@ -138,3 +139,33 @@
         misc_undefined: ["bounds"],
     },
 }
+
+aidl_interface {
+    name: "aaudio-aidl",
+    unstable: true,
+    local_include_dir: "binding/aidl",
+    srcs: [
+        "binding/aidl/aaudio/Endpoint.aidl",
+        "binding/aidl/aaudio/RingBuffer.aidl",
+        "binding/aidl/aaudio/SharedRegion.aidl",
+        "binding/aidl/aaudio/StreamParameters.aidl",
+        "binding/aidl/aaudio/StreamRequest.aidl",
+        "binding/aidl/aaudio/IAAudioClient.aidl",
+        "binding/aidl/aaudio/IAAudioService.aidl",
+    ],
+    imports: [
+        "audio_common-aidl",
+        "shared-file-region-aidl",
+    ],
+    backend:
+    {
+        cpp: {
+            enabled: true,
+        },
+        java: {
+            // TODO: need to have audio_common-aidl available in Java to enable
+            //       this.
+            enabled: false,
+        },
+    },
+}
diff --git a/media/libaaudio/src/binding/AAudioBinderAdapter.cpp b/media/libaaudio/src/binding/AAudioBinderAdapter.cpp
new file mode 100644
index 0000000..2b2fe6d
--- /dev/null
+++ b/media/libaaudio/src/binding/AAudioBinderAdapter.cpp
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <binding/AAudioBinderAdapter.h>
+#include <utility/AAudioUtilities.h>
+
+namespace aaudio {
+
+using android::binder::Status;
+
+AAudioBinderAdapter::AAudioBinderAdapter(IAAudioService* delegate)
+        : mDelegate(delegate) {}
+
+void AAudioBinderAdapter::registerClient(const android::sp<IAAudioClient>& client) {
+    mDelegate->registerClient(client);
+}
+
+aaudio_handle_t AAudioBinderAdapter::openStream(const AAudioStreamRequest& request,
+                                                AAudioStreamConfiguration& config) {
+    aaudio_handle_t result;
+    StreamParameters params;
+    Status status = mDelegate->openStream(request.parcelable(),
+                                          &params,
+                                          &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(status.transactionError());
+    }
+    config = params;
+    return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::closeStream(aaudio_handle_t streamHandle) {
+    aaudio_result_t result;
+    Status status = mDelegate->closeStream(streamHandle, &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(status.transactionError());
+    }
+    return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::getStreamDescription(aaudio_handle_t streamHandle,
+                                                          AudioEndpointParcelable& endpointOut) {
+    aaudio_result_t result;
+    Endpoint endpoint;
+    Status status = mDelegate->getStreamDescription(streamHandle,
+                                                    &endpoint,
+                                                    &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(status.transactionError());
+    }
+    endpointOut = std::move(endpoint);
+    return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::startStream(aaudio_handle_t streamHandle) {
+    aaudio_result_t result;
+    Status status = mDelegate->startStream(streamHandle, &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(status.transactionError());
+    }
+    return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::pauseStream(aaudio_handle_t streamHandle) {
+    aaudio_result_t result;
+    Status status = mDelegate->pauseStream(streamHandle, &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(status.transactionError());
+    }
+    return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::stopStream(aaudio_handle_t streamHandle) {
+    aaudio_result_t result;
+    Status status = mDelegate->stopStream(streamHandle, &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(status.transactionError());
+    }
+    return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::flushStream(aaudio_handle_t streamHandle) {
+    aaudio_result_t result;
+    Status status = mDelegate->flushStream(streamHandle, &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(status.transactionError());
+    }
+    return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::registerAudioThread(aaudio_handle_t streamHandle,
+                                                         pid_t clientThreadId,
+                                                         int64_t periodNanoseconds) {
+    aaudio_result_t result;
+    Status status = mDelegate->registerAudioThread(streamHandle, clientThreadId, periodNanoseconds, &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(status.transactionError());
+    }
+    return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::unregisterAudioThread(aaudio_handle_t streamHandle,
+                                                           pid_t clientThreadId) {
+    aaudio_result_t result;
+    Status status = mDelegate->unregisterAudioThread(streamHandle, clientThreadId, &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(status.transactionError());
+    }
+    return result;
+}
+
+}  // namespace aaudio
diff --git a/media/libaaudio/src/binding/AAudioBinderAdapter.h b/media/libaaudio/src/binding/AAudioBinderAdapter.h
new file mode 100644
index 0000000..5e9ab57
--- /dev/null
+++ b/media/libaaudio/src/binding/AAudioBinderAdapter.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aaudio/IAAudioService.h>
+#include <binding/AAudioServiceInterface.h>
+
+namespace aaudio {
+
+/**
+ * An adapter that takes in an underlying IAAudioService and exposes an
+ * AAudioServiceInterface.
+ *
+ * This class is abstract: the client is expected to inherit from this class and implement those
+ * methods from AAudioServiceInterface that don't have counterparts in IAAudioService.
+ */
+class AAudioBinderAdapter : public AAudioServiceInterface {
+public:
+    explicit AAudioBinderAdapter(IAAudioService* delegate);
+
+    void registerClient(const android::sp<IAAudioClient>& client) override;
+
+    aaudio_handle_t openStream(const AAudioStreamRequest& request,
+                               AAudioStreamConfiguration& configuration) override;
+
+    aaudio_result_t closeStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
+                                         AudioEndpointParcelable& endpoint) override;
+
+    aaudio_result_t startStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t pauseStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t stopStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t flushStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle,
+                                        pid_t clientThreadId,
+                                        int64_t periodNanoseconds) override;
+
+    aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
+                                          pid_t clientThreadId) override;
+
+private:
+    IAAudioService* const mDelegate;
+};
+
+}  // namespace aaudio
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.cpp b/media/libaaudio/src/binding/AAudioBinderClient.cpp
index 7b0d31f..fa5a2da 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.cpp
+++ b/media/libaaudio/src/binding/AAudioBinderClient.cpp
@@ -19,35 +19,30 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
-#include <binder/IInterface.h>
 #include <binder/IServiceManager.h>
 #include <binder/ProcessState.h>
 #include <utils/Mutex.h>
 #include <utils/RefBase.h>
 #include <utils/Singleton.h>
-#include <media/AudioSystem.h>
-
 #include <aaudio/AAudio.h>
 
 #include "AudioEndpointParcelable.h"
-#include "binding/AAudioBinderClient.h"
-//#include "binding/AAudioStreamRequest.h"
-//#include "binding/AAudioStreamConfiguration.h"
-//#include "binding/IAAudioService.h"
-//#include "binding/AAudioServiceMessage.h"
 
-//#include "AAudioServiceInterface.h"
+#include "binding/AAudioBinderClient.h"
+
+#define AAUDIO_SERVICE_NAME  "media.aaudio"
 
 using android::String16;
 using android::IServiceManager;
 using android::defaultServiceManager;
 using android::interface_cast;
 using android::IInterface;
-using android::IAAudioService;
 using android::Mutex;
 using android::ProcessState;
 using android::sp;
+using android::status_t;
 using android::wp;
+using android::binder::Status;
 
 using namespace aaudio;
 
@@ -67,20 +62,18 @@
 AAudioBinderClient::~AAudioBinderClient() {
     ALOGV("%s - destroying %p", __func__, this);
     Mutex::Autolock _l(mServiceLock);
-    if (mAAudioService != 0) {
-        IInterface::asBinder(mAAudioService)->unlinkToDeath(mAAudioClient);
-    }
 }
 
 // TODO Share code with other service clients.
 // Helper function to get access to the "AAudioService" service.
 // This code was modeled after frameworks/av/media/libaudioclient/AudioSystem.cpp
-const sp<IAAudioService> AAudioBinderClient::getAAudioService() {
+std::shared_ptr<AAudioServiceInterface> AAudioBinderClient::getAAudioService() {
+    std::shared_ptr<AAudioServiceInterface> result;
     sp<IAAudioService> aaudioService;
     bool needToRegister = false;
     {
         Mutex::Autolock _l(mServiceLock);
-        if (mAAudioService.get() == nullptr) {
+        if (mAdapter == nullptr) {
             sp<IBinder> binder;
             sp<IServiceManager> sm = defaultServiceManager();
             // Try several times to get the service.
@@ -99,7 +92,8 @@
                 if (status != NO_ERROR) {
                     ALOGE("%s() - linkToDeath() returned %d", __func__, status);
                 }
-                mAAudioService = interface_cast<IAAudioService>(binder);
+                aaudioService = interface_cast<IAAudioService>(binder);
+                mAdapter.reset(new Adapter(aaudioService, mAAudioClient));
                 needToRegister = true;
                 // Make sure callbacks can be received by mAAudioClient
                 ProcessState::self()->startThreadPool();
@@ -107,18 +101,18 @@
                 ALOGE("AAudioBinderClient could not connect to %s", AAUDIO_SERVICE_NAME);
             }
         }
-        aaudioService = mAAudioService;
+        result = mAdapter;
     }
     // Do this outside the mutex lock.
     if (needToRegister && aaudioService.get() != nullptr) { // new client?
         aaudioService->registerClient(mAAudioClient);
     }
-    return aaudioService;
+    return result;
 }
 
 void AAudioBinderClient::dropAAudioService() {
     Mutex::Autolock _l(mServiceLock);
-    mAAudioService.clear(); // force a reconnect
+    mAdapter.reset();
 }
 
 /**
@@ -127,13 +121,13 @@
 * @return handle to the stream or a negative error
 */
 aaudio_handle_t AAudioBinderClient::openStream(const AAudioStreamRequest &request,
-                                               AAudioStreamConfiguration &configurationOutput) {
+                                               AAudioStreamConfiguration &configuration) {
     aaudio_handle_t stream;
     for (int i = 0; i < 2; i++) {
-        const sp<IAAudioService> &service = getAAudioService();
+        std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
         if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
 
-        stream = service->openStream(request, configurationOutput);
+        stream = service->openStream(request, configuration);
 
         if (stream == AAUDIO_ERROR_NO_SERVICE) {
             ALOGE("openStream lost connection to AAudioService.");
@@ -146,8 +140,9 @@
 }
 
 aaudio_result_t AAudioBinderClient::closeStream(aaudio_handle_t streamHandle) {
-    const sp<IAAudioService> service = getAAudioService();
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+
     return service->closeStream(streamHandle);
 }
 
@@ -155,33 +150,38 @@
 * used to communicate with the underlying HAL or Service.
 */
 aaudio_result_t AAudioBinderClient::getStreamDescription(aaudio_handle_t streamHandle,
-                                                         AudioEndpointParcelable &parcelable) {
-    const sp<IAAudioService> service = getAAudioService();
+                                                         AudioEndpointParcelable& endpointOut) {
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
-    return service->getStreamDescription(streamHandle, parcelable);
+
+    return service->getStreamDescription(streamHandle, endpointOut);
 }
 
 aaudio_result_t AAudioBinderClient::startStream(aaudio_handle_t streamHandle) {
-    const sp<IAAudioService> service = getAAudioService();
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+
     return service->startStream(streamHandle);
 }
 
 aaudio_result_t AAudioBinderClient::pauseStream(aaudio_handle_t streamHandle) {
-    const sp<IAAudioService> service = getAAudioService();
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+
     return service->pauseStream(streamHandle);
 }
 
 aaudio_result_t AAudioBinderClient::stopStream(aaudio_handle_t streamHandle) {
-    const sp<IAAudioService> service = getAAudioService();
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+
     return service->stopStream(streamHandle);
 }
 
 aaudio_result_t AAudioBinderClient::flushStream(aaudio_handle_t streamHandle) {
-    const sp<IAAudioService> service = getAAudioService();
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+
     return service->flushStream(streamHandle);
 }
 
@@ -191,17 +191,16 @@
 aaudio_result_t AAudioBinderClient::registerAudioThread(aaudio_handle_t streamHandle,
                                                         pid_t clientThreadId,
                                                         int64_t periodNanoseconds) {
-    const sp<IAAudioService> service = getAAudioService();
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
-    return service->registerAudioThread(streamHandle,
-                                        clientThreadId,
-                                        periodNanoseconds);
+
+    return service->registerAudioThread(streamHandle, clientThreadId, periodNanoseconds);
 }
 
 aaudio_result_t AAudioBinderClient::unregisterAudioThread(aaudio_handle_t streamHandle,
                                                           pid_t clientThreadId) {
-    const sp<IAAudioService> service = getAAudioService();
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
-    return service->unregisterAudioThread(streamHandle,
-                                          clientThreadId);
+
+    return service->unregisterAudioThread(streamHandle, clientThreadId);
 }
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.h b/media/libaaudio/src/binding/AAudioBinderClient.h
index e8c91fc..6a7b639 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.h
+++ b/media/libaaudio/src/binding/AAudioBinderClient.h
@@ -21,12 +21,15 @@
 #include <utils/Singleton.h>
 
 #include <aaudio/AAudio.h>
-#include "AAudioServiceDefinitions.h"
+#include <binder/IInterface.h>
+
+#include "aaudio/BnAAudioClient.h"
+#include "aaudio/IAAudioService.h"
 #include "AAudioServiceInterface.h"
+#include "binding/AAudioBinderAdapter.h"
 #include "binding/AAudioStreamRequest.h"
-#include "binding/AAudioStreamConfiguration.h"
 #include "binding/AudioEndpointParcelable.h"
-#include "binding/IAAudioService.h"
+#include "core/AAudioStreamParameters.h"
 
 /**
  * Implements the AAudioServiceInterface by talking to the service through Binder.
@@ -44,11 +47,7 @@
 
     virtual ~AAudioBinderClient();
 
-    const android::sp<android::IAAudioService> getAAudioService();
-
-    void dropAAudioService();
-
-    void registerClient(const android::sp<android::IAAudioClient>& client __unused) override {}
+    void registerClient(const android::sp<IAAudioClient>& client __unused) override {}
 
     /**
      * @param request info needed to create the stream
@@ -64,7 +63,7 @@
     * used to communicate with the underlying HAL or Service.
     */
     aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
-                                                 AudioEndpointParcelable &parcelable) override;
+                                         AudioEndpointParcelable &endpointOut) override;
 
     /**
      * Start the flow of data.
@@ -115,8 +114,7 @@
         ALOGW("onStreamChange called!");
     }
 
-    class AAudioClient : public android::IBinder::DeathRecipient , public android::BnAAudioClient
-    {
+    class AAudioClient : public android::IBinder::DeathRecipient, public BnAAudioClient {
     public:
         AAudioClient(android::wp<AAudioBinderClient> aaudioBinderClient)
                 : mBinderClient(aaudioBinderClient) {
@@ -132,21 +130,66 @@
         }
 
         // implement BnAAudioClient
-        void onStreamChange(aaudio_handle_t handle, int32_t opcode, int32_t value) {
+        android::binder::Status onStreamChange(int32_t handle, int32_t opcode, int32_t value) {
+            static_assert(std::is_same_v<aaudio_handle_t, int32_t>);
             android::sp<AAudioBinderClient> client = mBinderClient.promote();
             if (client.get() != nullptr) {
                 client->onStreamChange(handle, opcode, value);
             }
+            return android::binder::Status::ok();
         }
     private:
         android::wp<AAudioBinderClient> mBinderClient;
     };
 
-private:
+    // This adapter is used to convert the binder interface (delegate) to the AudioServiceInterface
+    // conventions (translating between data types and respective parcelables, translating error
+    // codes and calling conventions).
+    // The adapter also owns the underlying service object and is responsible to unlink its death
+    // listener when destroyed.
+    class Adapter : public AAudioBinderAdapter {
+    public:
+        Adapter(const android::sp<IAAudioService>& delegate,
+                const android::sp<AAudioClient>& aaudioClient)
+                : AAudioBinderAdapter(delegate.get()),
+                  mDelegate(delegate),
+                  mAAudioClient(aaudioClient) {}
 
-    android::Mutex                  mServiceLock;
-    android::sp<android::IAAudioService>  mAAudioService;
-    android::sp<AAudioClient>       mAAudioClient;
+        virtual ~Adapter() {
+            if (mDelegate != nullptr) {
+                android::IInterface::asBinder(mDelegate)->unlinkToDeath(mAAudioClient);
+            }
+        }
+
+        // This should never be called (call is rejected at the AudioBinderClient level).
+        aaudio_result_t startClient(aaudio_handle_t streamHandle __unused,
+                                    const android::AudioClient& client __unused,
+                                    const audio_attributes_t* attr __unused,
+                                    audio_port_handle_t* clientHandle __unused) override {
+            LOG_ALWAYS_FATAL("Shouldn't get here");
+            return AAUDIO_ERROR_UNAVAILABLE;
+        }
+
+        // This should never be called (call is rejected at the AudioBinderClient level).
+        aaudio_result_t stopClient(aaudio_handle_t streamHandle __unused,
+                                   audio_port_handle_t clientHandle __unused) override {
+            LOG_ALWAYS_FATAL("Shouldn't get here");
+            return AAUDIO_ERROR_UNAVAILABLE;
+        }
+
+    private:
+        android::sp<IAAudioService> mDelegate;
+        android::sp<AAudioClient> mAAudioClient;
+    };
+
+private:
+    android::Mutex                          mServiceLock;
+    std::shared_ptr<AAudioServiceInterface> mAdapter;
+    android::sp<AAudioClient>               mAAudioClient;
+
+    std::shared_ptr<AAudioServiceInterface> getAAudioService();
+
+    void dropAAudioService();
 
 };
 
diff --git a/media/libaaudio/src/binding/AAudioServiceInterface.h b/media/libaaudio/src/binding/AAudioServiceInterface.h
index 9c28cc7..5d11512 100644
--- a/media/libaaudio/src/binding/AAudioServiceInterface.h
+++ b/media/libaaudio/src/binding/AAudioServiceInterface.h
@@ -20,11 +20,11 @@
 #include <utils/StrongPointer.h>
 #include <media/AudioClient.h>
 
+#include "aaudio/IAAudioClient.h"
 #include "binding/AAudioServiceDefinitions.h"
 #include "binding/AAudioStreamRequest.h"
 #include "binding/AAudioStreamConfiguration.h"
 #include "binding/AudioEndpointParcelable.h"
-#include "binding/IAAudioClient.h"
 
 /**
  * This has the same methods as IAAudioService but without the Binder features.
@@ -40,7 +40,7 @@
     AAudioServiceInterface() {};
     virtual ~AAudioServiceInterface() = default;
 
-    virtual void registerClient(const android::sp<android::IAAudioClient>& client) = 0;
+    virtual void registerClient(const android::sp<IAAudioClient>& client) = 0;
 
     /**
      * @param request info needed to create the stream
diff --git a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
index b785f88..2d501ef 100644
--- a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
@@ -23,101 +23,66 @@
 #include <sys/mman.h>
 #include <aaudio/AAudio.h>
 
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
-
 #include "binding/AAudioStreamConfiguration.h"
 
-using android::NO_ERROR;
-using android::status_t;
-using android::Parcel;
-using android::Parcelable;
-
 using namespace aaudio;
 
-AAudioStreamConfiguration::AAudioStreamConfiguration() {}
-AAudioStreamConfiguration::~AAudioStreamConfiguration() {}
+using android::media::audio::common::AudioFormat;
 
-status_t AAudioStreamConfiguration::writeToParcel(Parcel* parcel) const {
-    status_t status;
-
-    status = parcel->writeInt32(getDeviceId());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32(getSampleRate());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32(getSamplesPerFrame());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32((int32_t) getSharingMode());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32((int32_t) getFormat());
-    if (status != NO_ERROR) goto error;
-
-    status = parcel->writeInt32((int32_t) getDirection());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32(getBufferCapacity());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32((int32_t) getUsage());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32((int32_t) getContentType());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32((int32_t) getInputPreset());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32((int32_t) getAllowedCapturePolicy());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32(getSessionId());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32(isPrivacySensitive() ? 1 : 0);
-    if (status != NO_ERROR) goto error;
-    return NO_ERROR;
-error:
-    ALOGE("%s(): write failed = %d", __func__, status);
-    return status;
+AAudioStreamConfiguration::AAudioStreamConfiguration(const StreamParameters& parcelable) {
+    setSamplesPerFrame(parcelable.samplesPerFrame);
+    setSampleRate(parcelable.sampleRate);
+    setDeviceId(parcelable.deviceId);
+    static_assert(sizeof(aaudio_sharing_mode_t) == sizeof(parcelable.sharingMode));
+    setSharingMode(parcelable.sharingMode);
+    static_assert(sizeof(audio_format_t) == sizeof(parcelable.audioFormat));
+    setFormat(static_cast<audio_format_t>(parcelable.audioFormat));
+    static_assert(sizeof(aaudio_direction_t) == sizeof(parcelable.direction));
+    setDirection(parcelable.direction);
+    static_assert(sizeof(audio_usage_t) == sizeof(parcelable.usage));
+    setUsage(parcelable.usage);
+    static_assert(sizeof(aaudio_content_type_t) == sizeof(parcelable.contentType));
+    setContentType(parcelable.contentType);
+    static_assert(sizeof(aaudio_input_preset_t) == sizeof(parcelable.inputPreset));
+    setInputPreset(parcelable.inputPreset);
+    setBufferCapacity(parcelable.bufferCapacity);
+    static_assert(
+            sizeof(aaudio_allowed_capture_policy_t) == sizeof(parcelable.allowedCapturePolicy));
+    setAllowedCapturePolicy(parcelable.allowedCapturePolicy);
+    static_assert(sizeof(aaudio_session_id_t) == sizeof(parcelable.sessionId));
+    setSessionId(parcelable.sessionId);
+    setPrivacySensitive(parcelable.isPrivacySensitive);
 }
 
-status_t AAudioStreamConfiguration::readFromParcel(const Parcel* parcel) {
-    int32_t value;
-    status_t status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setDeviceId(value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setSampleRate(value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setSamplesPerFrame(value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setSharingMode((aaudio_sharing_mode_t) value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setFormat((audio_format_t) value);
+AAudioStreamConfiguration&
+AAudioStreamConfiguration::operator=(const StreamParameters& parcelable) {
+    this->~AAudioStreamConfiguration();
+    new (this) AAudioStreamConfiguration(parcelable);
+    return *this;
+}
 
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setDirection((aaudio_direction_t) value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setBufferCapacity(value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setUsage((aaudio_usage_t) value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setContentType((aaudio_content_type_t) value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setInputPreset((aaudio_input_preset_t) value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setAllowedCapturePolicy((aaudio_allowed_capture_policy_t) value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setSessionId(value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setPrivacySensitive(value == 1);
-    return NO_ERROR;
-error:
-    ALOGE("%s(): read failed = %d", __func__, status);
-    return status;
+StreamParameters AAudioStreamConfiguration::parcelable() const {
+    StreamParameters result;
+    result.samplesPerFrame = getSamplesPerFrame();
+    result.sampleRate = getSampleRate();
+    result.deviceId = getDeviceId();
+    static_assert(sizeof(aaudio_sharing_mode_t) == sizeof(result.sharingMode));
+    result.sharingMode = getSharingMode();
+    static_assert(sizeof(audio_format_t) == sizeof(result.audioFormat));
+    result.audioFormat = static_cast<AudioFormat>(getFormat());
+    static_assert(sizeof(aaudio_direction_t) == sizeof(result.direction));
+    result.direction = getDirection();
+    static_assert(sizeof(audio_usage_t) == sizeof(result.usage));
+    result.usage = getUsage();
+    static_assert(sizeof(aaudio_content_type_t) == sizeof(result.contentType));
+    result.contentType = getContentType();
+    static_assert(sizeof(aaudio_input_preset_t) == sizeof(result.inputPreset));
+    result.inputPreset = getInputPreset();
+    result.bufferCapacity = getBufferCapacity();
+    static_assert(sizeof(aaudio_allowed_capture_policy_t) == sizeof(result.allowedCapturePolicy));
+    result.allowedCapturePolicy = getAllowedCapturePolicy();
+    static_assert(sizeof(aaudio_session_id_t) == sizeof(result.sessionId));
+    result.sessionId = getSessionId();
+    result.isPrivacySensitive = isPrivacySensitive();
+    return result;
 }
diff --git a/media/libaaudio/src/binding/AAudioStreamConfiguration.h b/media/libaaudio/src/binding/AAudioStreamConfiguration.h
index b324896..f428eb0 100644
--- a/media/libaaudio/src/binding/AAudioStreamConfiguration.h
+++ b/media/libaaudio/src/binding/AAudioStreamConfiguration.h
@@ -20,24 +20,24 @@
 #include <stdint.h>
 
 #include <aaudio/AAudio.h>
+#include <aaudio/StreamParameters.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
 #include "core/AAudioStreamParameters.h"
 
-using android::status_t;
-using android::Parcel;
-using android::Parcelable;
-
 namespace aaudio {
 
-class AAudioStreamConfiguration : public AAudioStreamParameters, public Parcelable {
+// This is a holder for AAudioStreamParameters, which allows conversion to/from it parcelable
+// representation, StreamParameters.
+class AAudioStreamConfiguration : public AAudioStreamParameters {
 public:
-    AAudioStreamConfiguration();
-    virtual ~AAudioStreamConfiguration();
+    AAudioStreamConfiguration() = default;
 
-    virtual status_t writeToParcel(Parcel* parcel) const override;
+    explicit AAudioStreamConfiguration(const StreamParameters& parcelable);
 
-    virtual status_t readFromParcel(const Parcel* parcel) override;
+    AAudioStreamConfiguration& operator=(const StreamParameters& parcelable);
+
+    StreamParameters parcelable() const;
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/binding/AAudioStreamRequest.cpp b/media/libaaudio/src/binding/AAudioStreamRequest.cpp
index c30c5b9..536395a 100644
--- a/media/libaaudio/src/binding/AAudioStreamRequest.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamRequest.cpp
@@ -21,67 +21,32 @@
 #include <stdint.h>
 
 #include <sys/mman.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
 
 #include <aaudio/AAudio.h>
 
 #include "binding/AAudioStreamConfiguration.h"
 #include "binding/AAudioStreamRequest.h"
 
-using android::NO_ERROR;
-using android::status_t;
-using android::Parcel;
-using android::Parcelable;
-
 using namespace aaudio;
 
-AAudioStreamRequest::AAudioStreamRequest()
-    : mConfiguration()
-    {}
-
-AAudioStreamRequest::~AAudioStreamRequest() {}
-
-status_t AAudioStreamRequest::writeToParcel(Parcel* parcel) const {
-    status_t status = parcel->writeInt32((int32_t) mUserId);
-    if (status != NO_ERROR) goto error;
-
-    status = parcel->writeBool(mSharingModeMatchRequired);
-    if (status != NO_ERROR) goto error;
-
-    status = parcel->writeBool(mInService);
-    if (status != NO_ERROR) goto error;
-
-    status = mConfiguration.writeToParcel(parcel);
-    if (status != NO_ERROR) goto error;
-
-    return NO_ERROR;
-
-error:
-    ALOGE("writeToParcel(): write failed = %d", status);
-    return status;
+AAudioStreamRequest::AAudioStreamRequest(const StreamRequest& parcelable) :
+        mConfiguration(std::move(parcelable.params)),
+        mUserId(parcelable.userId),
+        mProcessId(parcelable.processId),
+        mSharingModeMatchRequired(parcelable.sharingModeMatchRequired),
+        mInService(parcelable.inService) {
+    static_assert(sizeof(mUserId) == sizeof(parcelable.userId));
+    static_assert(sizeof(mProcessId) == sizeof(parcelable.processId));
 }
 
-status_t AAudioStreamRequest::readFromParcel(const Parcel* parcel) {
-    int32_t temp;
-    status_t status = parcel->readInt32(&temp);
-    if (status != NO_ERROR) goto error;
-    mUserId = (uid_t) temp;
-
-    status = parcel->readBool(&mSharingModeMatchRequired);
-    if (status != NO_ERROR) goto error;
-
-    status = parcel->readBool(&mInService);
-    if (status != NO_ERROR) goto error;
-
-    status = mConfiguration.readFromParcel(parcel);
-    if (status != NO_ERROR) goto error;
-
-    return NO_ERROR;
-
-error:
-    ALOGE("readFromParcel(): read failed = %d", status);
-    return status;
+StreamRequest AAudioStreamRequest::parcelable() const {
+    StreamRequest result;
+    result.params = std::move(mConfiguration).parcelable();
+    result.userId = mUserId;
+    result.processId = mProcessId;
+    result.sharingModeMatchRequired = mSharingModeMatchRequired;
+    result.inService = mInService;
+    return result;
 }
 
 aaudio_result_t AAudioStreamRequest::validate() const {
diff --git a/media/libaaudio/src/binding/AAudioStreamRequest.h b/media/libaaudio/src/binding/AAudioStreamRequest.h
index 492f69d..31d3ea1 100644
--- a/media/libaaudio/src/binding/AAudioStreamRequest.h
+++ b/media/libaaudio/src/binding/AAudioStreamRequest.h
@@ -20,21 +20,18 @@
 #include <stdint.h>
 
 #include <aaudio/AAudio.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
+#include <aaudio/StreamRequest.h>
 
 #include "binding/AAudioStreamConfiguration.h"
 
-using android::status_t;
-using android::Parcel;
-using android::Parcelable;
-
 namespace aaudio {
 
-class AAudioStreamRequest : public Parcelable {
+class AAudioStreamRequest {
 public:
-    AAudioStreamRequest();
-    virtual ~AAudioStreamRequest();
+    AAudioStreamRequest() = default;
+
+    // Construct based on a parcelable representation.
+    explicit AAudioStreamRequest(const StreamRequest& parcelable);
 
     uid_t getUserId() const {
         return mUserId;
@@ -76,15 +73,14 @@
         mInService = inService;
     }
 
-    virtual status_t writeToParcel(Parcel* parcel) const override;
-
-    virtual status_t readFromParcel(const Parcel* parcel) override;
-
     aaudio_result_t validate() const;
 
     void dump() const;
 
-protected:
+    // Extract a parcelable representation of this object.
+    StreamRequest parcelable() const;
+
+private:
     AAudioStreamConfiguration  mConfiguration;
     uid_t                      mUserId = (uid_t) -1;
     pid_t                      mProcessId = (pid_t) -1;
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
index 61d7d27..aa4ac27 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
@@ -29,22 +29,43 @@
 #include "binding/AudioEndpointParcelable.h"
 
 using android::base::unique_fd;
+using android::media::SharedFileRegion;
 using android::NO_ERROR;
 using android::status_t;
-using android::Parcel;
-using android::Parcelable;
 
 using namespace aaudio;
 
-/**
- * Container for information about the message queues plus
- * general stream information needed by AAudio clients.
- * It contains no addresses, just sizes, offsets and file descriptors for
- * shared memory that can be passed through Binder.
- */
-AudioEndpointParcelable::AudioEndpointParcelable() {}
+AudioEndpointParcelable::AudioEndpointParcelable(Endpoint&& parcelable)
+        : mUpMessageQueueParcelable(std::move(parcelable.upMessageQueueParcelable)),
+          mDownMessageQueueParcelable(std::move(parcelable.downMessageQueueParcelable)),
+          mUpDataQueueParcelable(std::move(parcelable.upDataQueueParcelable)),
+          mDownDataQueueParcelable(std::move(parcelable.downDataQueueParcelable)),
+          mNumSharedMemories(parcelable.sharedMemories.size()) {
+    for (size_t i = 0; i < parcelable.sharedMemories.size() && i < MAX_SHARED_MEMORIES; ++i) {
+        // Re-construct.
+        mSharedMemories[i].~SharedMemoryParcelable();
+        new(&mSharedMemories[i]) SharedMemoryParcelable(std::move(parcelable.sharedMemories[i]));
+    }
+}
 
-AudioEndpointParcelable::~AudioEndpointParcelable() {}
+AudioEndpointParcelable& AudioEndpointParcelable::operator=(Endpoint&& parcelable) {
+    this->~AudioEndpointParcelable();
+    new(this) AudioEndpointParcelable(std::move(parcelable));
+    return *this;
+}
+
+Endpoint AudioEndpointParcelable::parcelable()&& {
+    Endpoint result;
+    result.upMessageQueueParcelable = std::move(mUpMessageQueueParcelable).parcelable();
+    result.downMessageQueueParcelable = std::move(mDownMessageQueueParcelable).parcelable();
+    result.upDataQueueParcelable = std::move(mUpDataQueueParcelable).parcelable();
+    result.downDataQueueParcelable = std::move(mDownDataQueueParcelable).parcelable();
+    result.sharedMemories.reserve(std::min(mNumSharedMemories, MAX_SHARED_MEMORIES));
+    for (size_t i = 0; i < mNumSharedMemories && i < MAX_SHARED_MEMORIES; ++i) {
+        result.sharedMemories.emplace_back(std::move(mSharedMemories[i]).parcelable());
+    }
+    return result;
+}
 
 /**
  * Add the file descriptor to the table.
@@ -60,60 +81,6 @@
     return index;
 }
 
-/**
- * The read and write must be symmetric.
- */
-status_t AudioEndpointParcelable::writeToParcel(Parcel* parcel) const {
-    status_t status = AAudioConvert_aaudioToAndroidStatus(validate());
-    if (status != NO_ERROR) goto error;
-
-    status = parcel->writeInt32(mNumSharedMemories);
-    if (status != NO_ERROR) goto error;
-
-    for (int i = 0; i < mNumSharedMemories; i++) {
-        status = mSharedMemories[i].writeToParcel(parcel);
-        if (status != NO_ERROR) goto error;
-    }
-    status = mUpMessageQueueParcelable.writeToParcel(parcel);
-    if (status != NO_ERROR) goto error;
-    status = mDownMessageQueueParcelable.writeToParcel(parcel);
-    if (status != NO_ERROR) goto error;
-    status = mUpDataQueueParcelable.writeToParcel(parcel);
-    if (status != NO_ERROR) goto error;
-    status = mDownDataQueueParcelable.writeToParcel(parcel);
-    if (status != NO_ERROR) goto error;
-
-    return NO_ERROR;
-
-error:
-    ALOGE("%s returning %d", __func__, status);
-    return status;
-}
-
-status_t AudioEndpointParcelable::readFromParcel(const Parcel* parcel) {
-    status_t status = parcel->readInt32(&mNumSharedMemories);
-    if (status != NO_ERROR) goto error;
-
-    for (int i = 0; i < mNumSharedMemories; i++) {
-        mSharedMemories[i].readFromParcel(parcel);
-        if (status != NO_ERROR) goto error;
-    }
-    status = mUpMessageQueueParcelable.readFromParcel(parcel);
-    if (status != NO_ERROR) goto error;
-    status = mDownMessageQueueParcelable.readFromParcel(parcel);
-    if (status != NO_ERROR) goto error;
-    status = mUpDataQueueParcelable.readFromParcel(parcel);
-    if (status != NO_ERROR) goto error;
-    status = mDownDataQueueParcelable.readFromParcel(parcel);
-    if (status != NO_ERROR) goto error;
-
-    return AAudioConvert_aaudioToAndroidStatus(validate());
-
-error:
-    ALOGE("%s returning %d", __func__, status);
-    return status;
-}
-
 aaudio_result_t AudioEndpointParcelable::resolve(EndpointDescriptor *descriptor) {
     aaudio_result_t result = mUpMessageQueueParcelable.resolve(mSharedMemories,
                                                            &descriptor->upMessageQueueDescriptor);
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.h b/media/libaaudio/src/binding/AudioEndpointParcelable.h
index e4f8b9e..5237a1a 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.h
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.h
@@ -20,16 +20,13 @@
 #include <stdint.h>
 
 //#include <sys/mman.h>
+#include <aaudio/Endpoint.h>
 #include <android-base/unique_fd.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
 
 #include "binding/AAudioServiceDefinitions.h"
 #include "binding/RingBufferParcelable.h"
 
 using android::status_t;
-using android::Parcel;
-using android::Parcelable;
 
 namespace aaudio {
 
@@ -39,10 +36,15 @@
  * It contains no addresses, just sizes, offsets and file descriptors for
  * shared memory that can be passed through Binder.
  */
-class AudioEndpointParcelable : public Parcelable {
+class AudioEndpointParcelable {
 public:
-    AudioEndpointParcelable();
-    virtual ~AudioEndpointParcelable();
+    AudioEndpointParcelable() = default;
+
+    // Ctor/assignment from a parcelable representation.
+    // Since the parcelable object owns unique FDs (for shared memory blocks), move semantics are
+    // provided to avoid the need to dupe.
+    AudioEndpointParcelable(Endpoint&& parcelable);
+    AudioEndpointParcelable& operator=(Endpoint&& parcelable);
 
     /**
      * Add the file descriptor to the table.
@@ -50,16 +52,17 @@
      */
     int32_t addFileDescriptor(const android::base::unique_fd& fd, int32_t sizeInBytes);
 
-    virtual status_t writeToParcel(Parcel* parcel) const override;
-
-    virtual status_t readFromParcel(const Parcel* parcel) override;
-
     aaudio_result_t resolve(EndpointDescriptor *descriptor);
 
     aaudio_result_t close();
 
     void dump();
 
+    // Extract a parcelable representation of this object.
+    // Since our shared memory objects own a unique FD, move semantics are provided to avoid the
+    // need to dupe.
+    Endpoint parcelable()&&;
+
 public: // TODO add getters
     // Set capacityInFrames to zero if Queue is unused.
     RingBufferParcelable    mUpMessageQueueParcelable;   // server to client
diff --git a/media/libaaudio/src/binding/IAAudioClient.cpp b/media/libaaudio/src/binding/IAAudioClient.cpp
deleted file mode 100644
index c69c4e8..0000000
--- a/media/libaaudio/src/binding/IAAudioClient.cpp
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "AAudio"
-//#define LOG_NDEBUG 0
-#include <utils/Log.h>
-
-#include <aaudio/AAudio.h>
-
-#include "binding/AAudioBinderClient.h"
-#include "binding/AAudioServiceDefinitions.h"
-#include "binding/IAAudioClient.h"
-#include "utility/AAudioUtilities.h"
-
-namespace android {
-
-using aaudio::aaudio_handle_t;
-
-/**
- * This is used by the AAudio Service to talk to an AAudio Client.
- *
- * The order of parameters in the Parcels must match with code in AAudioClient.cpp.
- */
-class BpAAudioClient : public BpInterface<IAAudioClient>
-{
-public:
-    explicit BpAAudioClient(const sp<IBinder>& impl)
-        : BpInterface<IAAudioClient>(impl)
-    {
-    }
-
-    void onStreamChange(aaudio_handle_t handle, int32_t opcode, int32_t value) override {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAAudioClient::getInterfaceDescriptor());
-        data.writeInt32(handle);
-        data.writeInt32(opcode);
-        data.writeInt32(value);
-        remote()->transact(ON_STREAM_CHANGE, data,  &reply, IBinder::FLAG_ONEWAY);
-    }
-
-};
-
-// Implement an interface to the service.
-IMPLEMENT_META_INTERFACE(AAudioClient, "IAAudioClient");
-
-// The order of parameters in the Parcels must match with code in BpAAudioClient
-
-status_t BnAAudioClient::onTransact(uint32_t code, const Parcel& data,
-                                        Parcel* reply, uint32_t flags) {
-    aaudio_handle_t streamHandle;
-    int32_t opcode = 0;
-    int32_t value = 0;
-    ALOGV("BnAAudioClient::onTransact(%u) %u", code, flags);
-
-    switch(code) {
-        case ON_STREAM_CHANGE: {
-            CHECK_INTERFACE(IAAudioClient, data, reply);
-            data.readInt32(&streamHandle);
-            data.readInt32(&opcode);
-            data.readInt32(&value);
-            onStreamChange(streamHandle, opcode, value);
-            ALOGD("BnAAudioClient onStreamChange(%x, %d, %d)", streamHandle, opcode, value);
-            return NO_ERROR;
-        } break;
-
-        default:
-            // ALOGW("BnAAudioClient::onTransact not handled %u", code);
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-} /* namespace android */
diff --git a/media/libaaudio/src/binding/IAAudioClient.h b/media/libaaudio/src/binding/IAAudioClient.h
deleted file mode 100644
index f21fd93..0000000
--- a/media/libaaudio/src/binding/IAAudioClient.h
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_AAUDIO_IAAUDIO_CLIENT_H
-#define ANDROID_AAUDIO_IAAUDIO_CLIENT_H
-
-#include <stdint.h>
-#include <binder/IInterface.h>
-
-#include <aaudio/AAudio.h>
-
-#include "binding/AAudioCommon.h"
-
-namespace android {
-
-
-// Interface (our AIDL) - client methods called by service
-class IAAudioClient : public IInterface {
-public:
-
-    DECLARE_META_INTERFACE(AAudioClient);
-
-    virtual void onStreamChange(aaudio::aaudio_handle_t handle, int32_t opcode, int32_t value) = 0;
-
-};
-
-class BnAAudioClient : public BnInterface<IAAudioClient> {
-public:
-    virtual status_t onTransact(uint32_t code, const Parcel& data,
-                                Parcel* reply, uint32_t flags = 0);
-};
-
-} /* namespace android */
-
-#endif //ANDROID_AAUDIO_IAAUDIO_SERVICE_H
diff --git a/media/libaaudio/src/binding/IAAudioService.cpp b/media/libaaudio/src/binding/IAAudioService.cpp
deleted file mode 100644
index e017b3a..0000000
--- a/media/libaaudio/src/binding/IAAudioService.cpp
+++ /dev/null
@@ -1,424 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "AAudio"
-//#define LOG_NDEBUG 0
-#include <utils/Log.h>
-
-#include <aaudio/AAudio.h>
-#include <binder/IPCThreadState.h>
-
-#include "binding/AudioEndpointParcelable.h"
-#include "binding/AAudioStreamRequest.h"
-#include "binding/AAudioServiceDefinitions.h"
-#include "binding/AAudioStreamConfiguration.h"
-#include "binding/IAAudioService.h"
-#include "utility/AAudioUtilities.h"
-
-namespace android {
-
-using aaudio::aaudio_handle_t;
-
-/**
- * This is used by the AAudio Client to talk to the AAudio Service.
- *
- * The order of parameters in the Parcels must match with code in AAudioService.cpp.
- */
-class BpAAudioService : public BpInterface<IAAudioService>
-{
-public:
-    explicit BpAAudioService(const sp<IBinder>& impl)
-        : BpInterface<IAAudioService>(impl)
-    {
-    }
-
-    void registerClient(const sp<IAAudioClient>& client) override
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeStrongBinder(IInterface::asBinder(client));
-        remote()->transact(REGISTER_CLIENT, data, &reply);
-    }
-
-    aaudio_handle_t openStream(const aaudio::AAudioStreamRequest &request,
-                               aaudio::AAudioStreamConfiguration &configurationOutput) override {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        // request.dump();
-        request.writeToParcel(&data);
-        status_t err = remote()->transact(OPEN_STREAM, data, &reply);
-        if (err != NO_ERROR) {
-            ALOGE("BpAAudioService::client openStream transact failed %d", err);
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_handle_t stream;
-        err = reply.readInt32(&stream);
-        if (err != NO_ERROR) {
-            ALOGE("BpAAudioService::client transact(OPEN_STREAM) readInt %d", err);
-            return AAudioConvert_androidToAAudioResult(err);
-        } else if (stream < 0) {
-            return stream;
-        }
-        err = configurationOutput.readFromParcel(&reply);
-        if (err != NO_ERROR) {
-            ALOGE("BpAAudioService::client openStream readFromParcel failed %d", err);
-            closeStream(stream);
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        return stream;
-    }
-
-    virtual aaudio_result_t closeStream(aaudio_handle_t streamHandle) override {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeInt32(streamHandle);
-        status_t err = remote()->transact(CLOSE_STREAM, data, &reply);
-        if (err != NO_ERROR) {
-            ALOGE("BpAAudioService::client closeStream transact failed %d", err);
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_result_t res;
-        reply.readInt32(&res);
-        return res;
-    }
-
-    virtual aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
-                                               aaudio::AudioEndpointParcelable &parcelable)   {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeInt32(streamHandle);
-        status_t err = remote()->transact(GET_STREAM_DESCRIPTION, data, &reply);
-        if (err != NO_ERROR) {
-            ALOGE("BpAAudioService::client transact(GET_STREAM_DESCRIPTION) returns %d", err);
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_result_t result;
-        err = reply.readInt32(&result);
-        if (err != NO_ERROR) {
-            ALOGE("BpAAudioService::client transact(GET_STREAM_DESCRIPTION) readInt %d", err);
-            return AAudioConvert_androidToAAudioResult(err);
-        } else if (result != AAUDIO_OK) {
-            ALOGE("BpAAudioService::client GET_STREAM_DESCRIPTION passed result %d", result);
-            return result;
-        }
-        err = parcelable.readFromParcel(&reply);
-        if (err != NO_ERROR) {
-            ALOGE("BpAAudioService::client transact(GET_STREAM_DESCRIPTION) read endpoint %d", err);
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        return result;
-    }
-
-    // TODO should we wait for a reply?
-    virtual aaudio_result_t startStream(aaudio_handle_t streamHandle) override {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeInt32(streamHandle);
-        status_t err = remote()->transact(START_STREAM, data, &reply);
-        if (err != NO_ERROR) {
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_result_t res;
-        reply.readInt32(&res);
-        return res;
-    }
-
-    virtual aaudio_result_t pauseStream(aaudio_handle_t streamHandle) override {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeInt32(streamHandle);
-        status_t err = remote()->transact(PAUSE_STREAM, data, &reply);
-        if (err != NO_ERROR) {
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_result_t res;
-        reply.readInt32(&res);
-        return res;
-    }
-
-    virtual aaudio_result_t stopStream(aaudio_handle_t streamHandle) override {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeInt32(streamHandle);
-        status_t err = remote()->transact(STOP_STREAM, data, &reply);
-        if (err != NO_ERROR) {
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_result_t res;
-        reply.readInt32(&res);
-        return res;
-    }
-
-    virtual aaudio_result_t flushStream(aaudio_handle_t streamHandle) override {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeInt32(streamHandle);
-        status_t err = remote()->transact(FLUSH_STREAM, data, &reply);
-        if (err != NO_ERROR) {
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_result_t res;
-        reply.readInt32(&res);
-        return res;
-    }
-
-    virtual aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle,
-                                                pid_t clientThreadId,
-                                                int64_t periodNanoseconds)
-    override {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeInt32(streamHandle);
-        data.writeInt32((int32_t) clientThreadId);
-        data.writeInt64(periodNanoseconds);
-        status_t err = remote()->transact(REGISTER_AUDIO_THREAD, data, &reply);
-        if (err != NO_ERROR) {
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_result_t res;
-        reply.readInt32(&res);
-        return res;
-    }
-
-    virtual aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
-                                                  pid_t clientThreadId)
-    override {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeInt32(streamHandle);
-        data.writeInt32((int32_t) clientThreadId);
-        status_t err = remote()->transact(UNREGISTER_AUDIO_THREAD, data, &reply);
-        if (err != NO_ERROR) {
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_result_t res;
-        reply.readInt32(&res);
-        return res;
-    }
-
-};
-
-// Implement an interface to the service.
-// This is here so that you don't have to link with libaaudio static library.
-IMPLEMENT_META_INTERFACE(AAudioService, "IAAudioService");
-
-// The order of parameters in the Parcels must match with code in BpAAudioService
-
-status_t BnAAudioService::onTransact(uint32_t code, const Parcel& data,
-                                        Parcel* reply, uint32_t flags) {
-    aaudio_handle_t streamHandle = 0;
-    aaudio::AAudioStreamRequest request;
-    aaudio::AAudioStreamConfiguration configuration;
-    pid_t tid = 0;
-    int64_t nanoseconds = 0;
-    aaudio_result_t result = AAUDIO_OK;
-    status_t status = NO_ERROR;
-    ALOGV("BnAAudioService::onTransact(%i) %i", code, flags);
-
-    switch(code) {
-        case REGISTER_CLIENT: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            sp<IAAudioClient> client = interface_cast<IAAudioClient>(
-                    data.readStrongBinder());
-            // readStrongBinder() can return null
-            if (client.get() == nullptr) {
-                ALOGE("BnAAudioService::%s(REGISTER_CLIENT) client is NULL!", __func__);
-                android_errorWriteLog(0x534e4554, "116230453");
-                return DEAD_OBJECT;
-            } else {
-                registerClient(client);
-                return NO_ERROR;
-            }
-        } break;
-
-        case OPEN_STREAM: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            request.readFromParcel(&data);
-            result = request.validate();
-            if (result != AAUDIO_OK) {
-                streamHandle = result;
-            } else {
-                //ALOGD("BnAAudioService::client openStream request dump --------------------");
-                //request.dump();
-                // Override the uid and pid from the client in case they are incorrect.
-                request.setUserId(IPCThreadState::self()->getCallingUid());
-                request.setProcessId(IPCThreadState::self()->getCallingPid());
-                streamHandle = openStream(request, configuration);
-                //ALOGD("BnAAudioService::onTransact OPEN_STREAM server handle = 0x%08X",
-                //        streamHandle);
-            }
-            reply->writeInt32(streamHandle);
-            configuration.writeToParcel(reply);
-            return NO_ERROR;
-        } break;
-
-        case CLOSE_STREAM: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            status = data.readInt32(&streamHandle);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(CLOSE_STREAM) streamHandle failed!", __func__);
-                return status;
-            }
-            result = closeStream(streamHandle);
-            //ALOGD("BnAAudioService::onTransact CLOSE_STREAM 0x%08X, result = %d",
-            //      streamHandle, result);
-            reply->writeInt32(result);
-            return NO_ERROR;
-        } break;
-
-        case GET_STREAM_DESCRIPTION: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            status = data.readInt32(&streamHandle);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(GET_STREAM_DESCRIPTION) streamHandle failed!", __func__);
-                return status;
-            }
-            aaudio::AudioEndpointParcelable parcelable;
-            result = getStreamDescription(streamHandle, parcelable);
-            if (result != AAUDIO_OK) {
-                return AAudioConvert_aaudioToAndroidStatus(result);
-            }
-            status = reply->writeInt32(result);
-            if (status != NO_ERROR) {
-                return status;
-            }
-            return parcelable.writeToParcel(reply);
-        } break;
-
-        case START_STREAM: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            status = data.readInt32(&streamHandle);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(START_STREAM) streamHandle failed!", __func__);
-                return status;
-            }
-            result = startStream(streamHandle);
-            ALOGV("BnAAudioService::onTransact START_STREAM 0x%08X, result = %d",
-                    streamHandle, result);
-            reply->writeInt32(result);
-            return NO_ERROR;
-        } break;
-
-        case PAUSE_STREAM: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            status = data.readInt32(&streamHandle);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(PAUSE_STREAM) streamHandle failed!", __func__);
-                return status;
-            }
-            result = pauseStream(streamHandle);
-            ALOGV("BnAAudioService::onTransact PAUSE_STREAM 0x%08X, result = %d",
-                  streamHandle, result);
-            reply->writeInt32(result);
-            return NO_ERROR;
-        } break;
-
-        case STOP_STREAM: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            status = data.readInt32(&streamHandle);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(STOP_STREAM) streamHandle failed!", __func__);
-                return status;
-            }
-            result = stopStream(streamHandle);
-            ALOGV("BnAAudioService::onTransact STOP_STREAM 0x%08X, result = %d",
-                  streamHandle, result);
-            reply->writeInt32(result);
-            return NO_ERROR;
-        } break;
-
-        case FLUSH_STREAM: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            status = data.readInt32(&streamHandle);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(FLUSH_STREAM) streamHandle failed!", __func__);
-                return status;
-            }
-            result = flushStream(streamHandle);
-            ALOGV("BnAAudioService::onTransact FLUSH_STREAM 0x%08X, result = %d",
-                    streamHandle, result);
-            reply->writeInt32(result);
-            return NO_ERROR;
-        } break;
-
-        case REGISTER_AUDIO_THREAD: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            status = data.readInt32(&streamHandle);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(REGISTER_AUDIO_THREAD) streamHandle failed!", __func__);
-                return status;
-            }
-            status = data.readInt32(&tid);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(REGISTER_AUDIO_THREAD) tid failed!", __func__);
-                return status;
-            }
-            status = data.readInt64(&nanoseconds);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(REGISTER_AUDIO_THREAD) nanoseconds failed!", __func__);
-                return status;
-            }
-            result = registerAudioThread(streamHandle, tid, nanoseconds);
-            ALOGV("BnAAudioService::%s(REGISTER_AUDIO_THREAD) 0x%08X, result = %d",
-                    __func__, streamHandle, result);
-            reply->writeInt32(result);
-            return NO_ERROR;
-        } break;
-
-        case UNREGISTER_AUDIO_THREAD: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            status = data.readInt32(&streamHandle);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(UNREGISTER_AUDIO_THREAD) streamHandle failed!", __func__);
-                return status;
-            }
-            status = data.readInt32(&tid);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(UNREGISTER_AUDIO_THREAD) tid failed!", __func__);
-                return status;
-            }
-            result = unregisterAudioThread(streamHandle, tid);
-            ALOGV("BnAAudioService::onTransact UNREGISTER_AUDIO_THREAD 0x%08X, result = %d",
-                    streamHandle, result);
-            reply->writeInt32(result);
-            return NO_ERROR;
-        } break;
-
-        default:
-            // ALOGW("BnAAudioService::onTransact not handled %u", code);
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-} /* namespace android */
diff --git a/media/libaaudio/src/binding/IAAudioService.h b/media/libaaudio/src/binding/IAAudioService.h
deleted file mode 100644
index 6bdb826..0000000
--- a/media/libaaudio/src/binding/IAAudioService.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_AAUDIO_IAAUDIO_SERVICE_H
-#define ANDROID_AAUDIO_IAAUDIO_SERVICE_H
-
-#include <stdint.h>
-#include <utils/RefBase.h>
-#include <binder/TextOutput.h>
-#include <binder/IInterface.h>
-
-#include <aaudio/AAudio.h>
-
-#include "binding/AAudioCommon.h"
-#include "binding/AAudioServiceDefinitions.h"
-#include "binding/AAudioStreamConfiguration.h"
-#include "binding/AAudioStreamRequest.h"
-#include "binding/AudioEndpointParcelable.h"
-#include "binding/IAAudioClient.h"
-
-namespace android {
-
-#define AAUDIO_SERVICE_NAME  "media.aaudio"
-
-// Interface (our AIDL) - service methods called by client
-class IAAudioService : public IInterface {
-public:
-
-    DECLARE_META_INTERFACE(AAudioService);
-
-    // Register an object to receive audio input/output change and track notifications.
-    // For a given calling pid, AAudio service disregards any registrations after the first.
-    // Thus the IAAudioClient must be a singleton per process.
-    virtual void registerClient(const sp<IAAudioClient>& client) = 0;
-
-    /**
-     * @param request info needed to create the stream
-     * @param configuration contains information about the created stream
-     * @return handle to the stream or a negative error
-     */
-    virtual aaudio::aaudio_handle_t openStream(const aaudio::AAudioStreamRequest &request,
-                                     aaudio::AAudioStreamConfiguration &configurationOutput) = 0;
-
-    virtual aaudio_result_t closeStream(aaudio::aaudio_handle_t streamHandle) = 0;
-
-    /* Get an immutable description of the in-memory queues
-    * used to communicate with the underlying HAL or Service.
-    */
-    virtual aaudio_result_t getStreamDescription(aaudio::aaudio_handle_t streamHandle,
-                                               aaudio::AudioEndpointParcelable &parcelable) = 0;
-
-    /**
-     * Start the flow of data.
-     * This is asynchronous. When complete, the service will send a STARTED event.
-     */
-    virtual aaudio_result_t startStream(aaudio::aaudio_handle_t streamHandle) = 0;
-
-    /**
-     * Stop the flow of data such that start() can resume without loss of data.
-     * This is asynchronous. When complete, the service will send a PAUSED event.
-     */
-    virtual aaudio_result_t pauseStream(aaudio::aaudio_handle_t streamHandle) = 0;
-
-    /**
-     * Stop the flow of data such that the data currently in the buffer is played.
-     * This is asynchronous. When complete, the service will send a STOPPED event.
-     */
-    virtual aaudio_result_t stopStream(aaudio::aaudio_handle_t streamHandle) = 0;
-
-    /**
-     *  Discard any data held by the underlying HAL or Service.
-     * This is asynchronous. When complete, the service will send a FLUSHED event.
-     */
-    virtual aaudio_result_t flushStream(aaudio::aaudio_handle_t streamHandle) = 0;
-
-    /**
-     * Manage the specified thread as a low latency audio thread.
-     */
-    virtual aaudio_result_t registerAudioThread(aaudio::aaudio_handle_t streamHandle,
-                                              pid_t clientThreadId,
-                                              int64_t periodNanoseconds) = 0;
-
-    virtual aaudio_result_t unregisterAudioThread(aaudio::aaudio_handle_t streamHandle,
-                                                pid_t clientThreadId) = 0;
-};
-
-class BnAAudioService : public BnInterface<IAAudioService> {
-public:
-    virtual status_t onTransact(uint32_t code, const Parcel& data,
-                                Parcel* reply, uint32_t flags = 0);
-
-};
-
-} /* namespace android */
-
-#endif //ANDROID_AAUDIO_IAAUDIO_SERVICE_H
diff --git a/media/libaaudio/src/binding/RingBufferParcelable.cpp b/media/libaaudio/src/binding/RingBufferParcelable.cpp
index 4996b3f..a4b3cec 100644
--- a/media/libaaudio/src/binding/RingBufferParcelable.cpp
+++ b/media/libaaudio/src/binding/RingBufferParcelable.cpp
@@ -29,8 +29,29 @@
 
 using namespace aaudio;
 
-RingBufferParcelable::RingBufferParcelable() {}
-RingBufferParcelable::~RingBufferParcelable() {}
+RingBufferParcelable::RingBufferParcelable(const RingBuffer& parcelable)
+        : mReadCounterParcelable(std::move(parcelable.readCounterParcelable)),
+          mWriteCounterParcelable(std::move(parcelable.writeCounterParcelable)),
+          mDataParcelable(std::move(parcelable.dataParcelable)),
+          mBytesPerFrame(parcelable.bytesPerFrame),
+          mFramesPerBurst(parcelable.framesPerBurst),
+          mCapacityInFrames(parcelable.capacityInFrames),
+          mFlags(static_cast<RingbufferFlags>(parcelable.flags)) {
+    static_assert(sizeof(mFlags) == sizeof(parcelable.flags));
+}
+
+RingBuffer RingBufferParcelable::parcelable() const {
+    RingBuffer result;
+    result.readCounterParcelable = std::move(mReadCounterParcelable).parcelable();
+    result.writeCounterParcelable = std::move(mWriteCounterParcelable).parcelable();
+    result.dataParcelable = std::move(mDataParcelable).parcelable();
+    result.bytesPerFrame = mBytesPerFrame;
+    result.framesPerBurst = mFramesPerBurst;
+    result.capacityInFrames = mCapacityInFrames;
+    static_assert(sizeof(mFlags) == sizeof(result.flags));
+    result.flags = static_cast<int32_t>(mFlags);
+    return result;
+}
 
 // TODO This assumes that all three use the same SharedMemoryParcelable
 void RingBufferParcelable::setupMemory(int32_t sharedMemoryIndex,
@@ -76,58 +97,6 @@
     mCapacityInFrames = capacityInFrames;
 }
 
-/**
- * The read and write must be symmetric.
- */
-status_t RingBufferParcelable::writeToParcel(Parcel* parcel) const {
-    status_t status = AAudioConvert_aaudioToAndroidStatus(validate());
-    if (status != NO_ERROR) goto error;
-
-    status = parcel->writeInt32(mCapacityInFrames);
-    if (status != NO_ERROR) goto error;
-    if (mCapacityInFrames > 0) {
-        status = parcel->writeInt32(mBytesPerFrame);
-        if (status != NO_ERROR) goto error;
-        status = parcel->writeInt32(mFramesPerBurst);
-        if (status != NO_ERROR) goto error;
-        status = parcel->writeInt32(mFlags);
-        if (status != NO_ERROR) goto error;
-        status = mReadCounterParcelable.writeToParcel(parcel);
-        if (status != NO_ERROR) goto error;
-        status = mWriteCounterParcelable.writeToParcel(parcel);
-        if (status != NO_ERROR) goto error;
-        status = mDataParcelable.writeToParcel(parcel);
-        if (status != NO_ERROR) goto error;
-    }
-    return NO_ERROR;
-error:
-    ALOGE("%s returning %d", __func__, status);
-    return status;
-}
-
-status_t RingBufferParcelable::readFromParcel(const Parcel* parcel) {
-    status_t status = parcel->readInt32(&mCapacityInFrames);
-    if (status != NO_ERROR) goto error;
-    if (mCapacityInFrames > 0) {
-        status = parcel->readInt32(&mBytesPerFrame);
-        if (status != NO_ERROR) goto error;
-        status = parcel->readInt32(&mFramesPerBurst);
-        if (status != NO_ERROR) goto error;
-        status = parcel->readInt32((int32_t *)&mFlags);
-        if (status != NO_ERROR) goto error;
-        status = mReadCounterParcelable.readFromParcel(parcel);
-        if (status != NO_ERROR) goto error;
-        status = mWriteCounterParcelable.readFromParcel(parcel);
-        if (status != NO_ERROR) goto error;
-        status = mDataParcelable.readFromParcel(parcel);
-        if (status != NO_ERROR) goto error;
-    }
-    return AAudioConvert_aaudioToAndroidStatus(validate());
-error:
-    ALOGE("%s returning %d", __func__, status);
-    return status;
-}
-
 aaudio_result_t RingBufferParcelable::resolve(SharedMemoryParcelable *memoryParcels, RingBufferDescriptor *descriptor) {
     aaudio_result_t result;
 
diff --git a/media/libaaudio/src/binding/RingBufferParcelable.h b/media/libaaudio/src/binding/RingBufferParcelable.h
index 1dbcf07..2508cea 100644
--- a/media/libaaudio/src/binding/RingBufferParcelable.h
+++ b/media/libaaudio/src/binding/RingBufferParcelable.h
@@ -19,6 +19,7 @@
 
 #include <stdint.h>
 
+#include <aaudio/RingBuffer.h>
 #include <binder/Parcelable.h>
 
 #include "binding/AAudioServiceDefinitions.h"
@@ -26,10 +27,12 @@
 
 namespace aaudio {
 
-class RingBufferParcelable : public Parcelable {
+class RingBufferParcelable  {
 public:
-    RingBufferParcelable();
-    virtual ~RingBufferParcelable();
+    RingBufferParcelable() = default;
+
+    // Construct based on a parcelable representation.
+    explicit RingBufferParcelable(const RingBuffer& parcelable);
 
     // TODO This assumes that all three use the same SharedMemoryParcelable
     void setupMemory(int32_t sharedMemoryIndex,
@@ -57,21 +60,14 @@
 
     bool isFileDescriptorSafe(SharedMemoryParcelable *memoryParcels);
 
-    /**
-     * The read and write must be symmetric.
-     */
-    virtual status_t writeToParcel(Parcel* parcel) const override;
-
-    virtual status_t readFromParcel(const Parcel* parcel) override;
-
     aaudio_result_t resolve(SharedMemoryParcelable *memoryParcels, RingBufferDescriptor *descriptor);
 
     void dump();
 
+    // Extract a parcelable representation of this object.
+    RingBuffer parcelable() const;
+
 private:
-
-    aaudio_result_t validate() const;
-
     SharedRegionParcelable  mReadCounterParcelable;
     SharedRegionParcelable  mWriteCounterParcelable;
     SharedRegionParcelable  mDataParcelable;
@@ -79,6 +75,8 @@
     int32_t                 mFramesPerBurst = 0;    // for ISOCHRONOUS queues
     int32_t                 mCapacityInFrames = 0;  // zero if unused
     RingbufferFlags         mFlags = RingbufferFlags::NONE;
+
+    aaudio_result_t validate() const;
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
index b6e8472..685b779 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
@@ -18,6 +18,7 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
+#include <inttypes.h>
 #include <stdint.h>
 #include <stdio.h>
 
@@ -33,61 +34,36 @@
 using android::base::unique_fd;
 using android::NO_ERROR;
 using android::status_t;
-using android::Parcel;
-using android::Parcelable;
+using android::media::SharedFileRegion;
 
 using namespace aaudio;
 
-SharedMemoryParcelable::SharedMemoryParcelable() {}
-SharedMemoryParcelable::~SharedMemoryParcelable() {};
+SharedMemoryParcelable::SharedMemoryParcelable(SharedFileRegion&& parcelable) {
+    mFd = parcelable.fd.release();
+    mSizeInBytes = parcelable.size;
+    mOffsetInBytes = parcelable.offset;
+}
+
+SharedFileRegion SharedMemoryParcelable::parcelable() && {
+    SharedFileRegion result;
+    result.fd.reset(std::move(mFd));
+    result.size = mSizeInBytes;
+    result.offset = mOffsetInBytes;
+    return result;
+}
+
+SharedMemoryParcelable SharedMemoryParcelable::dup() const {
+    SharedMemoryParcelable result;
+    result.setup(mFd, static_cast<int32_t>(mSizeInBytes));
+    return result;
+}
 
 void SharedMemoryParcelable::setup(const unique_fd& fd, int32_t sizeInBytes) {
-    mFd.reset(dup(fd.get())); // store a duplicate fd
+    mFd.reset(::dup(fd.get())); // store a duplicate fd
     ALOGV("setup(fd = %d -> %d, size = %d) this = %p\n", fd.get(), mFd.get(), sizeInBytes, this);
     mSizeInBytes = sizeInBytes;
 }
 
-status_t SharedMemoryParcelable::writeToParcel(Parcel* parcel) const {
-    status_t status = AAudioConvert_aaudioToAndroidStatus(validate());
-    if (status != NO_ERROR) return status;
-
-    status = parcel->writeInt32(mSizeInBytes);
-    if (status != NO_ERROR) return status;
-    if (mSizeInBytes > 0) {
-        ALOGV("writeToParcel() mFd = %d, this = %p\n", mFd.get(), this);
-        status = parcel->writeUniqueFileDescriptor(mFd);
-        ALOGE_IF(status != NO_ERROR, "SharedMemoryParcelable writeDupFileDescriptor failed : %d",
-                 status);
-    }
-    return status;
-}
-
-status_t SharedMemoryParcelable::readFromParcel(const Parcel* parcel) {
-    status_t status = parcel->readInt32(&mSizeInBytes);
-    if (status != NO_ERROR) goto error;
-
-    if (mSizeInBytes > 0) {
-        // The Parcel owns the file descriptor and will close it later.
-        unique_fd mmapFd;
-        status = parcel->readUniqueFileDescriptor(&mmapFd);
-        if (status != NO_ERROR) {
-            ALOGE("readFromParcel() readUniqueFileDescriptor() failed : %d", status);
-            goto error;
-        }
-
-        // Resolve the memory now while we still have the FD from the Parcel.
-        // Closing the FD will not affect the shared memory once mmap() has been called.
-        aaudio_result_t result = resolveSharedMemory(mmapFd);
-        status = AAudioConvert_aaudioToAndroidStatus(result);
-        if (status != NO_ERROR) goto error;
-    }
-
-    return AAudioConvert_aaudioToAndroidStatus(validate());
-
-error:
-    return status;
-}
-
 aaudio_result_t SharedMemoryParcelable::close() {
     if (mResolvedAddress != MMAP_UNRESOLVED_ADDRESS) {
         int err = munmap(mResolvedAddress, mSizeInBytes);
@@ -104,7 +80,7 @@
     mResolvedAddress = (uint8_t *) mmap(0, mSizeInBytes, PROT_READ | PROT_WRITE,
                                         MAP_SHARED, fd.get(), 0);
     if (mResolvedAddress == MMAP_UNRESOLVED_ADDRESS) {
-        ALOGE("mmap() failed for fd = %d, nBytes = %d, errno = %s",
+        ALOGE("mmap() failed for fd = %d, nBytes = %" PRId64 ", errno = %s",
               fd.get(), mSizeInBytes, strerror(errno));
         return AAUDIO_ERROR_INTERNAL;
     }
@@ -118,7 +94,7 @@
         return AAUDIO_ERROR_OUT_OF_RANGE;
     } else if ((offsetInBytes + sizeInBytes) > mSizeInBytes) {
         ALOGE("out of range, offsetInBytes = %d, "
-                      "sizeInBytes = %d, mSizeInBytes = %d",
+                      "sizeInBytes = %d, mSizeInBytes = %" PRId64,
               offsetInBytes, sizeInBytes, mSizeInBytes);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
@@ -148,7 +124,11 @@
 
 aaudio_result_t SharedMemoryParcelable::validate() const {
     if (mSizeInBytes < 0 || mSizeInBytes >= MAX_MMAP_SIZE_BYTES) {
-        ALOGE("invalid mSizeInBytes = %d", mSizeInBytes);
+        ALOGE("invalid mSizeInBytes = %" PRId64, mSizeInBytes);
+        return AAUDIO_ERROR_OUT_OF_RANGE;
+    }
+    if (mOffsetInBytes != 0) {
+        ALOGE("invalid mOffsetInBytes = %" PRId64, mOffsetInBytes);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
     return AAUDIO_OK;
@@ -156,5 +136,5 @@
 
 void SharedMemoryParcelable::dump() {
     ALOGD("mFd = %d", mFd.get());
-    ALOGD("mSizeInBytes = %d", mSizeInBytes);
+    ALOGD("mSizeInBytes = %" PRId64, mSizeInBytes);
 }
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.h b/media/libaaudio/src/binding/SharedMemoryParcelable.h
index 4ec38c5..1f2c335 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.h
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.h
@@ -21,12 +21,11 @@
 #include <sys/mman.h>
 
 #include <android-base/unique_fd.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
+#include <android/media/SharedFileRegion.h>
 
 namespace aaudio {
 
-// Arbitrary limits for sanity checks. TODO remove after debugging.
+// Arbitrary limits for range checks.
 #define MAX_SHARED_MEMORIES (32)
 #define MAX_MMAP_OFFSET_BYTES (32 * 1024 * 8)
 #define MAX_MMAP_SIZE_BYTES (32 * 1024 * 8)
@@ -36,10 +35,14 @@
  * It may be divided into several regions.
  * The memory can be shared using Binder or simply shared between threads.
  */
-class SharedMemoryParcelable : public android::Parcelable {
+class SharedMemoryParcelable {
 public:
-    SharedMemoryParcelable();
-    virtual ~SharedMemoryParcelable();
+    SharedMemoryParcelable() = default;
+
+    // Ctor from a parcelable representation.
+    // Since the parcelable object owns a unique FD, move semantics are provided to avoid the need
+    // to dupe.
+    explicit SharedMemoryParcelable(android::media::SharedFileRegion&& parcelable);
 
     /**
      * Make a dup() of the fd and store it for later use.
@@ -49,10 +52,6 @@
      */
     void setup(const android::base::unique_fd& fd, int32_t sizeInBytes);
 
-    virtual android::status_t writeToParcel(android::Parcel* parcel) const override;
-
-    virtual android::status_t readFromParcel(const android::Parcel* parcel) override;
-
     // mmap() shared memory
     aaudio_result_t resolve(int32_t offsetInBytes, int32_t sizeInBytes, void **regionAddressPtr);
 
@@ -63,20 +62,23 @@
 
     void dump();
 
-protected:
+    // Extract a parcelable representation of this object.
+    // Since we own a unique FD, move semantics are provided to avoid the need to dupe.
+    android::media::SharedFileRegion parcelable() &&;
 
-#define MMAP_UNRESOLVED_ADDRESS    reinterpret_cast<uint8_t*>(MAP_FAILED)
-
-    aaudio_result_t resolveSharedMemory(const android::base::unique_fd& fd);
-
-    android::base::unique_fd   mFd;
-    int32_t                    mSizeInBytes = 0;
-    uint8_t                   *mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
+    // Copy this instance. Duplicates the underlying FD.
+    SharedMemoryParcelable dup() const;
 
 private:
+#define MMAP_UNRESOLVED_ADDRESS    reinterpret_cast<uint8_t*>(MAP_FAILED)
 
+    android::base::unique_fd   mFd;
+    int64_t                    mSizeInBytes = 0;
+    int64_t                    mOffsetInBytes = 0;
+    uint8_t                   *mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
+
+    aaudio_result_t resolveSharedMemory(const android::base::unique_fd& fd);
     aaudio_result_t validate() const;
-
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/binding/SharedRegionParcelable.cpp b/media/libaaudio/src/binding/SharedRegionParcelable.cpp
index c776116..56b99c0 100644
--- a/media/libaaudio/src/binding/SharedRegionParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedRegionParcelable.cpp
@@ -36,8 +36,18 @@
 
 using namespace aaudio;
 
-SharedRegionParcelable::SharedRegionParcelable() {}
-SharedRegionParcelable::~SharedRegionParcelable() {}
+SharedRegionParcelable::SharedRegionParcelable(const SharedRegion& parcelable)
+        : mSharedMemoryIndex(parcelable.sharedMemoryIndex),
+          mOffsetInBytes(parcelable.offsetInBytes),
+          mSizeInBytes(parcelable.sizeInBytes) {}
+
+SharedRegion SharedRegionParcelable::parcelable() const {
+    SharedRegion result;
+    result.sharedMemoryIndex = mSharedMemoryIndex;
+    result.offsetInBytes = mOffsetInBytes;
+    result.sizeInBytes = mSizeInBytes;
+    return result;
+}
 
 void SharedRegionParcelable::setup(int32_t sharedMemoryIndex,
                                    int32_t offsetInBytes,
@@ -47,41 +57,6 @@
     mSizeInBytes = sizeInBytes;
 }
 
-status_t SharedRegionParcelable::writeToParcel(Parcel* parcel) const {
-    status_t status = AAudioConvert_aaudioToAndroidStatus(validate());
-    if (status != NO_ERROR) goto error;
-
-    status = parcel->writeInt32(mSizeInBytes);
-    if (status != NO_ERROR) goto error;
-    if (mSizeInBytes > 0) {
-        status = parcel->writeInt32(mSharedMemoryIndex);
-        if (status != NO_ERROR) goto error;
-        status = parcel->writeInt32(mOffsetInBytes);
-        if (status != NO_ERROR) goto error;
-    }
-    return NO_ERROR;
-
-error:
-    ALOGE("%s returning %d", __func__, status);
-    return status;
-}
-
-status_t SharedRegionParcelable::readFromParcel(const Parcel* parcel) {
-    status_t status = parcel->readInt32(&mSizeInBytes);
-    if (status != NO_ERROR) goto error;
-    if (mSizeInBytes > 0) {
-        status = parcel->readInt32(&mSharedMemoryIndex);
-        if (status != NO_ERROR) goto error;
-        status = parcel->readInt32(&mOffsetInBytes);
-        if (status != NO_ERROR) goto error;
-    }
-    return AAudioConvert_aaudioToAndroidStatus(validate());
-
-error:
-    ALOGE("%s returning %d", __func__, status);
-    return status;
-}
-
 aaudio_result_t SharedRegionParcelable::resolve(SharedMemoryParcelable *memoryParcels,
                                               void **regionAddressPtr) {
     if (mSizeInBytes == 0) {
diff --git a/media/libaaudio/src/binding/SharedRegionParcelable.h b/media/libaaudio/src/binding/SharedRegionParcelable.h
index 0cd8c04..c15fc30 100644
--- a/media/libaaudio/src/binding/SharedRegionParcelable.h
+++ b/media/libaaudio/src/binding/SharedRegionParcelable.h
@@ -20,41 +20,39 @@
 #include <stdint.h>
 
 #include <sys/mman.h>
-#include <binder/Parcelable.h>
 
 #include <aaudio/AAudio.h>
+#include <aaudio/SharedRegion.h>
 
 #include "binding/SharedMemoryParcelable.h"
 
 using android::status_t;
-using android::Parcel;
-using android::Parcelable;
 
 namespace aaudio {
 
-class SharedRegionParcelable : public Parcelable {
+class SharedRegionParcelable {
 public:
-    SharedRegionParcelable();
-    virtual ~SharedRegionParcelable();
+    SharedRegionParcelable() = default;
+
+    // Construct based on a parcelable representation.
+    explicit SharedRegionParcelable(const SharedRegion& parcelable);
 
     void setup(int32_t sharedMemoryIndex, int32_t offsetInBytes, int32_t sizeInBytes);
 
-    virtual status_t writeToParcel(Parcel* parcel) const override;
-
-    virtual status_t readFromParcel(const Parcel* parcel) override;
-
     aaudio_result_t resolve(SharedMemoryParcelable *memoryParcels, void **regionAddressPtr);
 
     bool isFileDescriptorSafe(SharedMemoryParcelable *memoryParcels);
 
     void dump();
 
-protected:
+    // Extract a parcelable representation of this object.
+    SharedRegion parcelable() const;
+
+private:
     int32_t mSharedMemoryIndex = -1;
     int32_t mOffsetInBytes     = 0;
     int32_t mSizeInBytes       = 0;
 
-private:
     aaudio_result_t validate() const;
 };
 
diff --git a/media/libaaudio/src/binding/aidl/aaudio/Endpoint.aidl b/media/libaaudio/src/binding/aidl/aaudio/Endpoint.aidl
new file mode 100644
index 0000000..3600b6a
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/Endpoint.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+import aaudio.RingBuffer;
+import android.media.SharedFileRegion;
+
+parcelable Endpoint {
+    // Set capacityInFrames to zero if Queue is unused.
+    RingBuffer upMessageQueueParcelable;   // server to client
+    RingBuffer downMessageQueueParcelable; // to server
+    RingBuffer upDataQueueParcelable;      // eg. record, could share same queue
+    RingBuffer downDataQueueParcelable;    // eg. playback
+    SharedFileRegion[] sharedMemories;
+}
diff --git a/media/libaaudio/src/binding/aidl/aaudio/IAAudioClient.aidl b/media/libaaudio/src/binding/aidl/aaudio/IAAudioClient.aidl
new file mode 100644
index 0000000..a010dbc
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/IAAudioClient.aidl
@@ -0,0 +1,21 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+interface IAAudioClient {
+    oneway void onStreamChange(int handle, int opcode, int value);
+}
diff --git a/media/libaaudio/src/binding/aidl/aaudio/IAAudioService.aidl b/media/libaaudio/src/binding/aidl/aaudio/IAAudioService.aidl
new file mode 100644
index 0000000..44d2211
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/IAAudioService.aidl
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+import aaudio.Endpoint;
+import aaudio.IAAudioClient;
+import aaudio.StreamParameters;
+import aaudio.StreamRequest;
+
+interface IAAudioService {
+    /**
+     * Register an object to receive audio input/output change and track notifications.
+     * For a given calling pid, AAudio service disregards any registrations after the first.
+     * Thus the IAAudioClient must be a singleton per process.
+     */
+    void registerClient(IAAudioClient client);
+
+    /**
+     * @param request info needed to create the stream
+     * @param paramsOut contains information about the created stream
+     * @return handle to the stream or a negative error
+     */
+    int openStream(in StreamRequest request,
+                   out StreamParameters paramsOut);
+
+    int closeStream(int streamHandle);
+
+    /*
+     * Get an immutable description of the in-memory queues
+     * used to communicate with the underlying HAL or Service.
+     */
+    int getStreamDescription(int streamHandle, out Endpoint endpoint);
+
+    /**
+     * Start the flow of data.
+     * This is asynchronous. When complete, the service will send a STARTED event.
+     */
+    int startStream(int streamHandle);
+
+    /**
+     * Stop the flow of data such that start() can resume without loss of data.
+     * This is asynchronous. When complete, the service will send a PAUSED event.
+     */
+    int pauseStream(int streamHandle);
+
+    /**
+     * Stop the flow of data such that the data currently in the buffer is played.
+     * This is asynchronous. When complete, the service will send a STOPPED event.
+     */
+    int stopStream(int streamHandle);
+
+    /**
+     *  Discard any data held by the underlying HAL or Service.
+     * This is asynchronous. When complete, the service will send a FLUSHED event.
+     */
+    int flushStream(int streamHandle);
+
+    /**
+     * Manage the specified thread as a low latency audio thread.
+     */
+    int registerAudioThread(int streamHandle,
+                            int clientThreadId,
+                            long periodNanoseconds);
+
+    int unregisterAudioThread(int streamHandle,
+                              int clientThreadId);
+}
diff --git a/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl b/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl
new file mode 100644
index 0000000..a58b33a
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+import aaudio.SharedRegion;
+
+parcelable RingBuffer {
+    SharedRegion        readCounterParcelable;
+    SharedRegion        writeCounterParcelable;
+    SharedRegion        dataParcelable;
+    int                 bytesPerFrame;     // index is in frames
+    int                 framesPerBurst;    // for ISOCHRONOUS queues
+    int                 capacityInFrames;  // zero if unused
+    int /* RingbufferFlags */ flags;  // = RingbufferFlags::NONE;
+}
\ No newline at end of file
diff --git a/media/libaaudio/src/binding/aidl/aaudio/SharedRegion.aidl b/media/libaaudio/src/binding/aidl/aaudio/SharedRegion.aidl
new file mode 100644
index 0000000..26153e8
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/SharedRegion.aidl
@@ -0,0 +1,23 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+parcelable SharedRegion {
+    int sharedMemoryIndex;
+    int offsetInBytes;
+    int sizeInBytes;
+}
diff --git a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
new file mode 100644
index 0000000..b7c4f70
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+import android.media.audio.common.AudioFormat;
+
+parcelable StreamParameters {
+    int                                       samplesPerFrame;  //      = AAUDIO_UNSPECIFIED;
+    int                                       sampleRate;  //           = AAUDIO_UNSPECIFIED;
+    int                                       deviceId;  //             = AAUDIO_UNSPECIFIED;
+    int /* aaudio_sharing_mode_t */           sharingMode;  //          = AAUDIO_SHARING_MODE_SHARED;
+    AudioFormat                               audioFormat;  //          = AUDIO_FORMAT_DEFAULT;
+    int /* aaudio_direction_t */              direction;  //            = AAUDIO_DIRECTION_OUTPUT;
+    int /* aaudio_usage_t */                  usage;  //                = AAUDIO_UNSPECIFIED;
+    int /* aaudio_content_type_t */           contentType;  //          = AAUDIO_UNSPECIFIED;
+    int /* aaudio_input_preset_t */           inputPreset;  //          = AAUDIO_UNSPECIFIED;
+    int                                       bufferCapacity;  //       = AAUDIO_UNSPECIFIED;
+    int /* aaudio_allowed_capture_policy_t */ allowedCapturePolicy;  // = AAUDIO_UNSPECIFIED;
+    int /* aaudio_session_id_t */             sessionId;  //            = AAUDIO_SESSION_ID_NONE;
+    boolean                                   isPrivacySensitive;  //   = false;
+}
diff --git a/media/libaaudio/src/binding/aidl/aaudio/StreamRequest.aidl b/media/libaaudio/src/binding/aidl/aaudio/StreamRequest.aidl
new file mode 100644
index 0000000..9bf4077
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/StreamRequest.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+import aaudio.StreamParameters;
+
+parcelable StreamRequest {
+    StreamParameters    params;
+    int                 userId; // = (uid_t) -1;
+    int                 processId; // = (pid_t) -1;
+    boolean             sharingModeMatchRequired; // = false;
+    boolean             inService; // = false; // Stream opened by AAudioservice
+}
\ No newline at end of file
diff --git a/media/libaaudio/src/client/AudioEndpoint.cpp b/media/libaaudio/src/client/AudioEndpoint.cpp
index 06f66d3..0a19d17 100644
--- a/media/libaaudio/src/client/AudioEndpoint.cpp
+++ b/media/libaaudio/src/client/AudioEndpoint.cpp
@@ -137,7 +137,7 @@
         return AAUDIO_ERROR_INTERNAL;
     }
 
-    mUpCommandQueue = std::make_unique<FifoBuffer>(
+    mUpCommandQueue = std::make_unique<FifoBufferIndirect>(
             descriptor->bytesPerFrame,
             descriptor->capacityInFrames,
             descriptor->readCounterAddress,
@@ -166,7 +166,7 @@
                                   ? &mDataWriteCounter
                                   : descriptor->writeCounterAddress;
 
-    mDataQueue = std::make_unique<FifoBuffer>(
+    mDataQueue = std::make_unique<FifoBufferIndirect>(
             descriptor->bytesPerFrame,
             descriptor->capacityInFrames,
             readCounterAddress,
diff --git a/media/libaaudio/src/client/AudioEndpoint.h b/media/libaaudio/src/client/AudioEndpoint.h
index 484d917..4c8d60f 100644
--- a/media/libaaudio/src/client/AudioEndpoint.h
+++ b/media/libaaudio/src/client/AudioEndpoint.h
@@ -93,8 +93,8 @@
     void dump() const;
 
 private:
-    std::unique_ptr<android::FifoBuffer> mUpCommandQueue;
-    std::unique_ptr<android::FifoBuffer> mDataQueue;
+    std::unique_ptr<android::FifoBufferIndirect> mUpCommandQueue;
+    std::unique_ptr<android::FifoBufferIndirect> mDataQueue;
     bool                    mFreeRunning;
     android::fifo_counter_t mDataReadCounter; // only used if free-running
     android::fifo_counter_t mDataWriteCounter; // only used if free-running
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 4520823..809c76e 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -34,7 +34,6 @@
 #include "AudioEndpointParcelable.h"
 #include "binding/AAudioStreamRequest.h"
 #include "binding/AAudioStreamConfiguration.h"
-#include "binding/IAAudioService.h"
 #include "binding/AAudioServiceMessage.h"
 #include "core/AudioGlobal.h"
 #include "core/AudioStreamBuilder.h"
@@ -76,6 +75,7 @@
 }
 
 AudioStreamInternal::~AudioStreamInternal() {
+    ALOGD("%s() %p called", __func__, this);
 }
 
 aaudio_result_t AudioStreamInternal::open(const AudioStreamBuilder &builder) {
@@ -211,10 +211,10 @@
         result = AAUDIO_ERROR_OUT_OF_RANGE;
         goto error;
     }
-    mFramesPerBurst = framesPerBurst; // only save good value
+    setFramesPerBurst(framesPerBurst); // only save good value
 
     mBufferCapacityInFrames = mEndpointDescriptor.dataQueueDescriptor.capacityInFrames;
-    if (mBufferCapacityInFrames < mFramesPerBurst
+    if (mBufferCapacityInFrames < getFramesPerBurst()
             || mBufferCapacityInFrames > MAX_BUFFER_CAPACITY_IN_FRAMES) {
         ALOGE("%s - bufferCapacity out of range = %d", __func__, mBufferCapacityInFrames);
         result = AAUDIO_ERROR_OUT_OF_RANGE;
@@ -239,7 +239,7 @@
 
         }
         if (mCallbackFrames == AAUDIO_UNSPECIFIED) {
-            mCallbackFrames = mFramesPerBurst;
+            mCallbackFrames = getFramesPerBurst();
         }
 
         const int32_t callbackBufferSize = mCallbackFrames * getBytesPerFrame();
@@ -271,21 +271,21 @@
     return result;
 
 error:
-    releaseCloseFinal();
+    safeReleaseClose();
     return result;
 }
 
 // This must be called under mStreamLock.
 aaudio_result_t AudioStreamInternal::release_l() {
     aaudio_result_t result = AAUDIO_OK;
-    ALOGV("%s(): mServiceStreamHandle = 0x%08X", __func__, mServiceStreamHandle);
+    ALOGD("%s(): mServiceStreamHandle = 0x%08X", __func__, mServiceStreamHandle);
     if (mServiceStreamHandle != AAUDIO_HANDLE_INVALID) {
         aaudio_stream_state_t currentState = getState();
         // Don't release a stream while it is running. Stop it first.
         // If DISCONNECTED then we should still try to stop in case the
         // error callback is still running.
         if (isActive() || currentState == AAUDIO_STREAM_STATE_DISCONNECTED) {
-            requestStop();
+            requestStop_l();
         }
 
         logReleaseBufferState();
@@ -331,7 +331,7 @@
  * The processing code will then save the current offset
  * between client and server and apply that to any position given to the app.
  */
-aaudio_result_t AudioStreamInternal::requestStart()
+aaudio_result_t AudioStreamInternal::requestStart_l()
 {
     int64_t startTime;
     if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
@@ -353,6 +353,8 @@
     // Clear any stale timestamps from the previous run.
     drainTimestampsFromService();
 
+    prepareBuffersForStart(); // tell subclasses to get ready
+
     aaudio_result_t result = mServiceInterface.startStream(mServiceStreamHandle);
     if (result == AAUDIO_ERROR_INVALID_HANDLE) {
         ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
@@ -372,7 +374,7 @@
                               * AAUDIO_NANOS_PER_SECOND
                               / getSampleRate();
         mCallbackEnabled.store(true);
-        result = createThread(periodNanos, aaudio_callback_thread_proc, this);
+        result = createThread_l(periodNanos, aaudio_callback_thread_proc, this);
     }
     if (result != AAUDIO_OK) {
         setState(originalState);
@@ -398,33 +400,36 @@
 }
 
 // This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternal::stopCallback()
+aaudio_result_t AudioStreamInternal::stopCallback_l()
 {
     if (isDataCallbackSet()
             && (isActive() || getState() == AAUDIO_STREAM_STATE_DISCONNECTED)) {
         mCallbackEnabled.store(false);
-        aaudio_result_t result = joinThread(NULL); // may temporarily unlock mStreamLock
+        aaudio_result_t result = joinThread_l(NULL); // may temporarily unlock mStreamLock
         if (result == AAUDIO_ERROR_INVALID_HANDLE) {
             ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
             result = AAUDIO_OK;
         }
         return result;
     } else {
+        ALOGD("%s() skipped, isDataCallbackSet() = %d, isActive() = %d, getState()  = %d", __func__,
+            isDataCallbackSet(), isActive(), getState());
         return AAUDIO_OK;
     }
 }
 
-// This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternal::requestStop() {
-    aaudio_result_t result = stopCallback();
+aaudio_result_t AudioStreamInternal::requestStop_l() {
+    aaudio_result_t result = stopCallback_l();
     if (result != AAUDIO_OK) {
+        ALOGW("%s() stop callback returned %d, returning early", __func__, result);
         return result;
     }
     // The stream may have been unlocked temporarily to let a callback finish
     // and the callback may have stopped the stream.
     // Check to make sure the stream still needs to be stopped.
-    // See also AudioStream::safeStop().
+    // See also AudioStream::safeStop_l().
     if (!(isActive() || getState() == AAUDIO_STREAM_STATE_DISCONNECTED)) {
+        ALOGD("%s() returning early, not active or disconnected", __func__);
         return AAUDIO_OK;
     }
 
@@ -755,9 +760,9 @@
 
 aaudio_result_t AudioStreamInternal::setBufferSize(int32_t requestedFrames) {
     int32_t adjustedFrames = requestedFrames;
-    const int32_t maximumSize = getBufferCapacity() - mFramesPerBurst;
+    const int32_t maximumSize = getBufferCapacity() - getFramesPerBurst();
     // Minimum size should be a multiple number of bursts.
-    const int32_t minimumSize = 1 * mFramesPerBurst;
+    const int32_t minimumSize = 1 * getFramesPerBurst();
 
     // Clip to minimum size so that rounding up will work better.
     adjustedFrames = std::max(minimumSize, adjustedFrames);
@@ -767,9 +772,9 @@
         adjustedFrames = maximumSize;
     } else {
         // Round to the next highest burst size.
-        int32_t numBursts = (adjustedFrames + mFramesPerBurst - 1) / mFramesPerBurst;
-        adjustedFrames = numBursts * mFramesPerBurst;
-        // Clip just in case maximumSize is not a multiple of mFramesPerBurst.
+        int32_t numBursts = (adjustedFrames + getFramesPerBurst() - 1) / getFramesPerBurst();
+        adjustedFrames = numBursts * getFramesPerBurst();
+        // Clip just in case maximumSize is not a multiple of getFramesPerBurst().
         adjustedFrames = std::min(maximumSize, adjustedFrames);
     }
 
@@ -804,15 +809,6 @@
     return mBufferCapacityInFrames;
 }
 
-int32_t AudioStreamInternal::getFramesPerBurst() const {
-    return mFramesPerBurst;
-}
-
-// This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternal::joinThread(void** returnArg) {
-    return AudioStream::joinThread(returnArg, calculateReasonableTimeout(getFramesPerBurst()));
-}
-
 bool AudioStreamInternal::isClockModelInControl() const {
     return isActive() && mAudioEndpoint->isFreeRunning() && mClockModel.isRunning();
 }
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 61591b3..fbe4c13 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -20,7 +20,6 @@
 #include <stdint.h>
 #include <aaudio/AAudio.h>
 
-#include "binding/IAAudioService.h"
 #include "binding/AudioEndpointParcelable.h"
 #include "binding/AAudioServiceInterface.h"
 #include "client/IsochronousClockModel.h"
@@ -29,7 +28,6 @@
 #include "utility/AudioClock.h"
 
 using android::sp;
-using android::IAAudioService;
 
 namespace aaudio {
 
@@ -46,10 +44,6 @@
     AudioStreamInternal(AAudioServiceInterface  &serviceInterface, bool inService);
     virtual ~AudioStreamInternal();
 
-    aaudio_result_t requestStart() override;
-
-    aaudio_result_t requestStop() override;
-
     aaudio_result_t getTimestamp(clockid_t clockId,
                                        int64_t *framePosition,
                                        int64_t *timeNanoseconds) override;
@@ -58,16 +52,12 @@
 
     aaudio_result_t open(const AudioStreamBuilder &builder) override;
 
-    aaudio_result_t release_l() override;
-
     aaudio_result_t setBufferSize(int32_t requestedFrames) override;
 
     int32_t getBufferSize() const override;
 
     int32_t getBufferCapacity() const override;
 
-    int32_t getFramesPerBurst() const override;
-
     int32_t getXRunCount() const override {
         return mXRunCount;
     }
@@ -76,12 +66,9 @@
 
     aaudio_result_t unregisterThread() override;
 
-    aaudio_result_t joinThread(void** returnArg);
-
     // Called internally from 'C'
     virtual void *callbackLoop() = 0;
 
-
     bool isMMap() override {
         return true;
     }
@@ -100,6 +87,10 @@
     }
 
 protected:
+    aaudio_result_t requestStart_l() REQUIRES(mStreamLock) override;
+    aaudio_result_t requestStop_l() REQUIRES(mStreamLock) override;
+
+    aaudio_result_t release_l() REQUIRES(mStreamLock) override;
 
     aaudio_result_t processData(void *buffer,
                          int32_t numFrames,
@@ -121,9 +112,11 @@
 
     aaudio_result_t processCommands();
 
-    aaudio_result_t stopCallback();
+    aaudio_result_t stopCallback_l();
 
-    virtual void advanceClientToMatchServerPosition() = 0;
+    virtual void prepareBuffersForStart() {}
+
+    virtual void advanceClientToMatchServerPosition(int32_t serverMargin = 0) = 0;
 
     virtual void onFlushFromServer() {}
 
@@ -159,7 +152,6 @@
 
     aaudio_handle_t          mServiceStreamHandle; // opaque handle returned from service
 
-    int32_t                  mFramesPerBurst = MIN_FRAMES_PER_BURST; // frames per HAL transfer
     int32_t                  mXRunCount = 0;      // how many underrun events?
 
     // Offset from underlying frame position.
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
index 9fa2e40..5d311fc 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
@@ -14,8 +14,6 @@
  * limitations under the License.
  */
 
-#define LOG_TAG (mInService ? "AudioStreamInternalCapture_Service" \
-                          : "AudioStreamInternalCapture_Client")
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -29,6 +27,14 @@
 #define ATRACE_TAG ATRACE_TAG_AUDIO
 #include <utils/Trace.h>
 
+// We do this after the #includes because if a header uses ALOG.
+// it would fail on the reference to mInService.
+#undef LOG_TAG
+// This file is used in both client and server processes.
+// This is needed to make sense of the logs more easily.
+#define LOG_TAG (mInService ? "AudioStreamInternalCapture_Service" \
+                          : "AudioStreamInternalCapture_Client")
+
 using android::WrappingBuffer;
 
 using namespace aaudio;
@@ -41,9 +47,9 @@
 
 AudioStreamInternalCapture::~AudioStreamInternalCapture() {}
 
-void AudioStreamInternalCapture::advanceClientToMatchServerPosition() {
+void AudioStreamInternalCapture::advanceClientToMatchServerPosition(int32_t serverMargin) {
     int64_t readCounter = mAudioEndpoint->getDataReadCounter();
-    int64_t writeCounter = mAudioEndpoint->getDataWriteCounter();
+    int64_t writeCounter = mAudioEndpoint->getDataWriteCounter() + serverMargin;
 
     // Bump offset so caller does not see the retrograde motion in getFramesRead().
     int64_t offset = readCounter - writeCounter;
@@ -143,7 +149,7 @@
                 // Calculate frame position based off of the readCounter because
                 // the writeCounter might have just advanced in the background,
                 // causing us to sleep until a later burst.
-                int64_t nextPosition = mAudioEndpoint->getDataReadCounter() + mFramesPerBurst;
+                int64_t nextPosition = mAudioEndpoint->getDataReadCounter() + getFramesPerBurst();
                 wakeTime = mClockModel.convertPositionToLatestTime(nextPosition);
             }
                 break;
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.h b/media/libaaudio/src/client/AudioStreamInternalCapture.h
index 6436a53..251a7f2 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.h
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.h
@@ -23,7 +23,6 @@
 #include "client/AudioStreamInternal.h"
 
 using android::sp;
-using android::IAAudioService;
 
 namespace aaudio {
 
@@ -46,7 +45,7 @@
     }
 protected:
 
-    void advanceClientToMatchServerPosition() override;
+    void advanceClientToMatchServerPosition(int32_t serverOffset = 0) override;
 
 /**
  * Low level data processing that will not block. It will just read or write as much as it can.
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 1303daf..b81e5e4 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -14,8 +14,6 @@
  * limitations under the License.
  */
 
-#define LOG_TAG (mInService ? "AudioStreamInternalPlay_Service" \
-                          : "AudioStreamInternalPlay_Client")
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -26,6 +24,15 @@
 #include "client/AudioStreamInternalPlay.h"
 #include "utility/AudioClock.h"
 
+// We do this after the #includes because if a header uses ALOG.
+// it would fail on the reference to mInService.
+#undef LOG_TAG
+// This file is used in both client and server processes.
+// This is needed to make sense of the logs more easily.
+#define LOG_TAG (mInService ? "AudioStreamInternalPlay_Service" \
+                            : "AudioStreamInternalPlay_Client")
+
+using android::status_t;
 using android::WrappingBuffer;
 
 using namespace aaudio;
@@ -49,7 +56,7 @@
                              getDeviceChannelCount());
 
         if (result != AAUDIO_OK) {
-            releaseCloseFinal();
+            safeReleaseClose();
         }
         // Sample rate is constrained to common values by now and should not overflow.
         int32_t numFrames = kRampMSec * getSampleRate() / AAUDIO_MILLIS_PER_SECOND;
@@ -59,9 +66,9 @@
 }
 
 // This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternalPlay::requestPause()
+aaudio_result_t AudioStreamInternalPlay::requestPause_l()
 {
-    aaudio_result_t result = stopCallback();
+    aaudio_result_t result = stopCallback_l();
     if (result != AAUDIO_OK) {
         return result;
     }
@@ -76,7 +83,7 @@
     return mServiceInterface.pauseStream(mServiceStreamHandle);
 }
 
-aaudio_result_t AudioStreamInternalPlay::requestFlush() {
+aaudio_result_t AudioStreamInternalPlay::requestFlush_l() {
     if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
         ALOGW("%s() mServiceStreamHandle invalid", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
@@ -86,8 +93,13 @@
     return mServiceInterface.flushStream(mServiceStreamHandle);
 }
 
-void AudioStreamInternalPlay::advanceClientToMatchServerPosition() {
-    int64_t readCounter = mAudioEndpoint->getDataReadCounter();
+void AudioStreamInternalPlay::prepareBuffersForStart() {
+    // Prevent stale data from being played.
+    mAudioEndpoint->eraseDataMemory();
+}
+
+void AudioStreamInternalPlay::advanceClientToMatchServerPosition(int32_t serverMargin) {
+    int64_t readCounter = mAudioEndpoint->getDataReadCounter() + serverMargin;
     int64_t writeCounter = mAudioEndpoint->getDataWriteCounter();
 
     // Bump offset so caller does not see the retrograde motion in getFramesRead().
@@ -145,7 +157,9 @@
     if (mNeedCatchUp.isRequested()) {
         // Catch an MMAP pointer that is already advancing.
         // This will avoid initial underruns caused by a slow cold start.
-        advanceClientToMatchServerPosition();
+        // We add a one burst margin in case the DSP advances before we can write the data.
+        // This can help prevent the beginning of the stream from being skipped.
+        advanceClientToMatchServerPosition(getFramesPerBurst());
         mNeedCatchUp.acknowledge();
     }
 
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.h b/media/libaaudio/src/client/AudioStreamInternalPlay.h
index 2e93157..03c957d 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.h
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.h
@@ -25,7 +25,6 @@
 #include "client/AudioStreamInternal.h"
 
 using android::sp;
-using android::IAAudioService;
 
 namespace aaudio {
 
@@ -36,9 +35,9 @@
 
     aaudio_result_t open(const AudioStreamBuilder &builder) override;
 
-    aaudio_result_t requestPause() override;
+    aaudio_result_t requestPause_l() override;
 
-    aaudio_result_t requestFlush() override;
+    aaudio_result_t requestFlush_l() override;
 
     bool isFlushSupported() const override {
         // Only implement FLUSH for OUTPUT streams.
@@ -65,7 +64,9 @@
 
 protected:
 
-    void advanceClientToMatchServerPosition() override;
+    void prepareBuffersForStart() override;
+
+    void advanceClientToMatchServerPosition(int32_t serverMargin = 0) override;
 
     void onFlushFromServer() override;
 
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 8965875..cfa7221 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -255,17 +255,16 @@
     if (audioStream != nullptr) {
         aaudio_stream_id_t id = audioStream->getId();
         ALOGD("%s(s#%u) called ---------------", __func__, id);
-        result = audioStream->safeRelease();
-        // safeRelease will only fail if called illegally, for example, from a callback.
+        result = audioStream->safeReleaseClose();
+        // safeReleaseClose will only fail if called illegally, for example, from a callback.
         // That would result in deleting an active stream, which would cause a crash.
         if (result != AAUDIO_OK) {
             ALOGW("%s(s#%u) failed. Close it from another thread.",
                   __func__, id);
         } else {
             audioStream->unregisterPlayerBase();
-             // Mark CLOSED to keep destructors from asserting.
-            audioStream->closeFinal();
-            delete audioStream;
+            // Allow the stream to be deleted.
+            AudioStreamBuilder::stopUsingStream(audioStream);
         }
         ALOGD("%s(s#%u) returned %d ---------", __func__, id, result);
     }
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index f5c75ca..57c4c16 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -39,15 +39,23 @@
 }
 
 AudioStream::AudioStream()
-        : mPlayerBase(new MyPlayerBase(this))
+        : mPlayerBase(new MyPlayerBase())
         , mStreamId(AAudio_getNextStreamId())
         {
-    // mThread is a pthread_t of unknown size so we need memset.
-    memset(&mThread, 0, sizeof(mThread));
     setPeriodNanoseconds(0);
 }
 
 AudioStream::~AudioStream() {
+    // Please preserve these logs because there have been several bugs related to
+    // AudioStream deletion and late callbacks.
+    ALOGD("%s(s#%u) mPlayerBase strongCount = %d",
+            __func__, getId(), mPlayerBase->getStrongCount());
+
+    ALOGE_IF(pthread_equal(pthread_self(), mThread),
+            "%s() destructor running in callback", __func__);
+
+    ALOGE_IF(mHasThread, "%s() callback thread never join()ed", __func__);
+
     // If the stream is deleted when OPEN or in use then audio resources will leak.
     // This would indicate an internal error. So we want to find this ASAP.
     LOG_ALWAYS_FATAL_IF(!(getState() == AAUDIO_STREAM_STATE_CLOSED
@@ -55,8 +63,6 @@
                           || getState() == AAUDIO_STREAM_STATE_DISCONNECTED),
                         "~AudioStream() - still in use, state = %s",
                         AudioGlobal_convertStreamStateToText(getState()));
-
-    mPlayerBase->clearParentReference(); // remove reference to this AudioStream
 }
 
 aaudio_result_t AudioStream::open(const AudioStreamBuilder& builder)
@@ -162,7 +168,7 @@
             return AAUDIO_ERROR_INVALID_STATE;
     }
 
-    aaudio_result_t result = requestStart();
+    aaudio_result_t result = requestStart_l();
     if (result == AAUDIO_OK) {
         // We only call this for logging in "dumpsys audio". So ignore return code.
         (void) mPlayerBase->start();
@@ -212,7 +218,7 @@
             return AAUDIO_ERROR_INVALID_STATE;
     }
 
-    aaudio_result_t result = requestPause();
+    aaudio_result_t result = requestPause_l();
     if (result == AAUDIO_OK) {
         // We only call this for logging in "dumpsys audio". So ignore return code.
         (void) mPlayerBase->pause();
@@ -237,12 +243,12 @@
         return result;
     }
 
-    return requestFlush();
+    return requestFlush_l();
 }
 
 aaudio_result_t AudioStream::systemStopFromCallback() {
     std::lock_guard<std::mutex> lock(mStreamLock);
-    aaudio_result_t result = safeStop();
+    aaudio_result_t result = safeStop_l();
     if (result == AAUDIO_OK) {
         // We only call this for logging in "dumpsys audio". So ignore return code.
         (void) mPlayerBase->stop();
@@ -256,7 +262,7 @@
         ALOGE("stream cannot be stopped by calling from a callback!");
         return AAUDIO_ERROR_INVALID_STATE;
     }
-    aaudio_result_t result = safeStop();
+    aaudio_result_t result = safeStop_l();
     if (result == AAUDIO_OK) {
         // We only call this for logging in "dumpsys audio". So ignore return code.
         (void) mPlayerBase->stop();
@@ -264,8 +270,7 @@
     return result;
 }
 
-// This must be called under mStreamLock.
-aaudio_result_t AudioStream::safeStop() {
+aaudio_result_t AudioStream::safeStop_l() {
 
     switch (getState()) {
         // Proceed with stopping.
@@ -297,26 +302,47 @@
             return AAUDIO_ERROR_INVALID_STATE;
     }
 
-    return requestStop();
+    return requestStop_l();
 }
 
 aaudio_result_t AudioStream::safeRelease() {
-    // This get temporarily unlocked in the release() when joining callback threads.
+    // This may get temporarily unlocked in the MMAP release() when joining callback threads.
     std::lock_guard<std::mutex> lock(mStreamLock);
     if (collidesWithCallback()) {
         ALOGE("%s cannot be called from a callback!", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
-    if (getState() == AAUDIO_STREAM_STATE_CLOSING) {
+    if (getState() == AAUDIO_STREAM_STATE_CLOSING) { // already released?
         return AAUDIO_OK;
     }
     return release_l();
 }
 
+aaudio_result_t AudioStream::safeReleaseClose() {
+    // This get temporarily unlocked in the MMAP release() when joining callback threads.
+    std::lock_guard<std::mutex> lock(mStreamLock);
+    if (collidesWithCallback()) {
+        ALOGE("%s cannot be called from a callback!", __func__);
+        return AAUDIO_ERROR_INVALID_STATE;
+    }
+    releaseCloseFinal_l();
+    return AAUDIO_OK;
+}
+
+aaudio_result_t AudioStream::safeReleaseCloseFromCallback() {
+    // This get temporarily unlocked in the MMAP release() when joining callback threads.
+    std::lock_guard<std::mutex> lock(mStreamLock);
+    releaseCloseFinal_l();
+    return AAUDIO_OK;
+}
+
 void AudioStream::setState(aaudio_stream_state_t state) {
     ALOGD("%s(s#%d) from %d to %d", __func__, getId(), mState, state);
+    if (state == mState) {
+        return; // no change
+    }
     // Track transition to DISCONNECTED state.
-    if (state == AAUDIO_STREAM_STATE_DISCONNECTED && mState != state) {
+    if (state == AAUDIO_STREAM_STATE_DISCONNECTED) {
         android::mediametrics::LogItem(mMetricsId)
                 .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
                 .set(AMEDIAMETRICS_PROP_STATE, AudioGlobal_convertStreamStateToText(getState()))
@@ -324,18 +350,18 @@
     }
     // CLOSED is a final state
     if (mState == AAUDIO_STREAM_STATE_CLOSED) {
-        ALOGE("%s(%d) tried to set to %d but already CLOSED", __func__, getId(), state);
+        ALOGW("%s(%d) tried to set to %d but already CLOSED", __func__, getId(), state);
 
     // Once CLOSING, we can only move to CLOSED state.
     } else if (mState == AAUDIO_STREAM_STATE_CLOSING
                && state != AAUDIO_STREAM_STATE_CLOSED) {
-        ALOGE("%s(%d) tried to set to %d but already CLOSING", __func__, getId(), state);
+        ALOGW("%s(%d) tried to set to %d but already CLOSING", __func__, getId(), state);
 
     // Once DISCONNECTED, we can only move to CLOSING or CLOSED state.
     } else if (mState == AAUDIO_STREAM_STATE_DISCONNECTED
                && !(state == AAUDIO_STREAM_STATE_CLOSING
                    || state == AAUDIO_STREAM_STATE_CLOSED)) {
-        ALOGE("%s(%d) tried to set to %d but already DISCONNECTED", __func__, getId(), state);
+        ALOGW("%s(%d) tried to set to %d but already DISCONNECTED", __func__, getId(), state);
 
     } else {
         mState = state;
@@ -387,21 +413,28 @@
     return procResult;
 }
 
-// This is the entry point for the new thread created by createThread().
+
+// This is the entry point for the new thread created by createThread_l().
 // It converts the 'C' function call to a C++ method call.
 static void* AudioStream_internalThreadProc(void* threadArg) {
     AudioStream *audioStream = (AudioStream *) threadArg;
-    return audioStream->wrapUserThread();
+    // Prevent the stream from being deleted while being used.
+    // This is just for extra safety. It is probably not needed because
+    // this callback should be joined before the stream is closed.
+    android::sp<AudioStream> protectedStream(audioStream);
+    // Balance the incStrong() in createThread_l().
+    protectedStream->decStrong(nullptr);
+    return protectedStream->wrapUserThread();
 }
 
 // This is not exposed in the API.
 // But it is still used internally to implement callbacks for MMAP mode.
-aaudio_result_t AudioStream::createThread(int64_t periodNanoseconds,
-                                     aaudio_audio_thread_proc_t threadProc,
-                                     void* threadArg)
+aaudio_result_t AudioStream::createThread_l(int64_t periodNanoseconds,
+                                            aaudio_audio_thread_proc_t threadProc,
+                                            void* threadArg)
 {
     if (mHasThread) {
-        ALOGE("createThread() - mHasThread already true");
+        ALOGE("%s() - mHasThread already true", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
     if (threadProc == nullptr) {
@@ -411,10 +444,14 @@
     mThreadProc = threadProc;
     mThreadArg = threadArg;
     setPeriodNanoseconds(periodNanoseconds);
+    // Prevent this object from getting deleted before the thread has a chance to create
+    // its strong pointer. Assume the thread will call decStrong().
+    this->incStrong(nullptr);
     int err = pthread_create(&mThread, nullptr, AudioStream_internalThreadProc, this);
     if (err != 0) {
         android::status_t status = -errno;
-        ALOGE("createThread() - pthread_create() failed, %d", status);
+        ALOGE("%s() - pthread_create() failed, %d", __func__, status);
+        this->decStrong(nullptr); // Because the thread won't do it.
         return AAudioConvert_androidToAAudioResult(status);
     } else {
         // TODO Use AAudioThread or maybe AndroidThread
@@ -434,36 +471,39 @@
     }
 }
 
+aaudio_result_t AudioStream::joinThread(void** returnArg) {
+    // This may get temporarily unlocked in the MMAP release() when joining callback threads.
+    std::lock_guard<std::mutex> lock(mStreamLock);
+    return joinThread_l(returnArg);
+}
+
 // This must be called under mStreamLock.
-aaudio_result_t AudioStream::joinThread(void** returnArg, int64_t timeoutNanoseconds __unused)
-{
+aaudio_result_t AudioStream::joinThread_l(void** returnArg) {
     if (!mHasThread) {
-        ALOGE("joinThread() - but has no thread");
+        ALOGD("joinThread() - but has no thread");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     aaudio_result_t result = AAUDIO_OK;
     // If the callback is stopping the stream because the app passed back STOP
     // then we don't need to join(). The thread is already about to exit.
-    if (pthread_self() != mThread) {
+    if (!pthread_equal(pthread_self(), mThread)) {
         // Called from an app thread. Not the callback.
         // Unlock because the callback may be trying to stop the stream but is blocked.
         mStreamLock.unlock();
-#if 0
-        // TODO implement equivalent of pthread_timedjoin_np()
-        struct timespec abstime;
-        int err = pthread_timedjoin_np(mThread, returnArg, &abstime);
-#else
         int err = pthread_join(mThread, returnArg);
-#endif
         mStreamLock.lock();
         if (err) {
             ALOGE("%s() pthread_join() returns err = %d", __func__, err);
             result = AAudioConvert_androidToAAudioResult(-err);
+        } else {
+            ALOGD("%s() pthread_join succeeded", __func__);
+            // This must be set false so that the callback thread can be created
+            // when the stream is restarted.
+            mHasThread = false;
         }
+    } else {
+        ALOGD("%s() pthread_join() called on itself!", __func__);
     }
-    // This must be set false so that the callback thread can be created
-    // when the stream is restarted.
-    mHasThread = false;
     return (result != AAUDIO_OK) ? result : mThreadRegistrationResult;
 }
 
@@ -520,11 +560,18 @@
 }
 
 #if AAUDIO_USE_VOLUME_SHAPER
-android::media::VolumeShaper::Status AudioStream::applyVolumeShaper(
-        const android::media::VolumeShaper::Configuration& configuration __unused,
-        const android::media::VolumeShaper::Operation& operation __unused) {
-    ALOGW("applyVolumeShaper() is not supported");
-    return android::media::VolumeShaper::Status::ok();
+::android::binder::Status AudioStream::MyPlayerBase::applyVolumeShaper(
+        const ::android::media::VolumeShaper::Configuration& configuration,
+        const ::android::media::VolumeShaper::Operation& operation) {
+    android::sp<AudioStream> audioStream;
+    {
+        std::lock_guard<std::mutex> lock(mParentLock);
+        audioStream = mParent.promote();
+    }
+    if (audioStream) {
+        return audioStream->applyVolumeShaper(configuration, operation);
+    }
+    return android::NO_ERROR;
 }
 #endif
 
@@ -534,26 +581,36 @@
     doSetVolume(); // apply this change
 }
 
-AudioStream::MyPlayerBase::MyPlayerBase(AudioStream *parent) : mParent(parent) {
-}
-
-AudioStream::MyPlayerBase::~MyPlayerBase() {
-}
-
-void AudioStream::MyPlayerBase::registerWithAudioManager() {
+void AudioStream::MyPlayerBase::registerWithAudioManager(const android::sp<AudioStream>& parent) {
+    std::lock_guard<std::mutex> lock(mParentLock);
+    mParent = parent;
     if (!mRegistered) {
-        init(android::PLAYER_TYPE_AAUDIO, AAudioConvert_usageToInternal(mParent->getUsage()));
+        init(android::PLAYER_TYPE_AAUDIO, AAudioConvert_usageToInternal(parent->getUsage()));
         mRegistered = true;
     }
 }
 
 void AudioStream::MyPlayerBase::unregisterWithAudioManager() {
+    std::lock_guard<std::mutex> lock(mParentLock);
     if (mRegistered) {
         baseDestroy();
         mRegistered = false;
     }
 }
 
+android::status_t AudioStream::MyPlayerBase::playerSetVolume() {
+    android::sp<AudioStream> audioStream;
+    {
+        std::lock_guard<std::mutex> lock(mParentLock);
+        audioStream = mParent.promote();
+    }
+    if (audioStream) {
+        // No pan and only left volume is taken into account from IPLayer interface
+        audioStream->setDuckAndMuteVolume(mVolumeMultiplierL  /* mPanMultiplierL */);
+    }
+    return android::NO_ERROR;
+}
+
 void AudioStream::MyPlayerBase::destroy() {
     unregisterWithAudioManager();
 }
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index fb71c36..510ead8 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -20,13 +20,17 @@
 #include <atomic>
 #include <mutex>
 #include <stdint.h>
-#include <aaudio/AAudio.h>
+
+#include <android-base/thread_annotations.h>
 #include <binder/IServiceManager.h>
 #include <binder/Status.h>
 #include <utils/StrongPointer.h>
 
-#include "media/VolumeShaper.h"
-#include "media/PlayerBase.h"
+#include <aaudio/AAudio.h>
+#include <media/AudioSystem.h>
+#include <media/PlayerBase.h>
+#include <media/VolumeShaper.h>
+
 #include "utility/AAudioUtilities.h"
 #include "utility/MonotonicCounter.h"
 
@@ -45,7 +49,8 @@
 /**
  * AAudio audio stream.
  */
-class AudioStream {
+// By extending AudioDeviceCallback, we also inherit from RefBase.
+class AudioStream : public android::AudioSystem::AudioDeviceCallback {
 public:
 
     AudioStream();
@@ -54,11 +59,6 @@
 
 protected:
 
-    /* Asynchronous requests.
-     * Use waitForStateChange() to wait for completion.
-     */
-    virtual aaudio_result_t requestStart() = 0;
-
     /**
      * Check the state to see if Pause is currently legal.
      *
@@ -77,18 +77,22 @@
         return false;
     }
 
-    virtual aaudio_result_t requestPause()
-    {
+    /* Asynchronous requests.
+     * Use waitForStateChange() to wait for completion.
+     */
+    virtual aaudio_result_t requestStart_l() REQUIRES(mStreamLock) = 0;
+
+    virtual aaudio_result_t requestPause_l() REQUIRES(mStreamLock) {
         // Only implement this for OUTPUT streams.
         return AAUDIO_ERROR_UNIMPLEMENTED;
     }
 
-    virtual aaudio_result_t requestFlush() {
+    virtual aaudio_result_t requestFlush_l() REQUIRES(mStreamLock) {
         // Only implement this for OUTPUT streams.
         return AAUDIO_ERROR_UNIMPLEMENTED;
     }
 
-    virtual aaudio_result_t requestStop() = 0;
+    virtual aaudio_result_t requestStop_l() REQUIRES(mStreamLock) = 0;
 
 public:
     virtual aaudio_result_t getTimestamp(clockid_t clockId,
@@ -117,33 +121,40 @@
     virtual void logOpen();
     void logReleaseBufferState();
 
+    /* Note about naming for "release"  and "close" related methods.
+     *
+     * These names are intended to match the public AAudio API.
+     * The original AAudio API had an AAudioStream_close() function that
+     * released the hardware and deleted the stream. That made it difficult
+     * because apps want to release the HW ASAP but are not in a rush to delete
+     * the stream object. So in R we added an AAudioStream_release() function
+     * that just released the hardware.
+     * The AAudioStream_close() method releases if needed and then closes.
+     */
+
+protected:
     /**
      * Free any hardware or system resources from the open() call.
      * It is safe to call release_l() multiple times.
      */
-    virtual aaudio_result_t release_l() {
+    virtual aaudio_result_t release_l() REQUIRES(mStreamLock) {
         setState(AAUDIO_STREAM_STATE_CLOSING);
         return AAUDIO_OK;
     }
 
-    aaudio_result_t closeFinal() {
+    /**
+     * Free any resources not already freed by release_l().
+     * Assume release_l() already called.
+     */
+    virtual void close_l() REQUIRES(mStreamLock) {
+        // Releasing the stream will set the state to CLOSING.
+        assert(getState() == AAUDIO_STREAM_STATE_CLOSING);
+        // setState() prevents a transition from CLOSING to any state other than CLOSED.
         // State is checked by destructor.
         setState(AAUDIO_STREAM_STATE_CLOSED);
-        return AAUDIO_OK;
     }
 
-    /**
-     * Release then close the stream.
-     * @return AAUDIO_OK or negative error.
-     */
-    aaudio_result_t releaseCloseFinal() {
-        aaudio_result_t result = release_l(); // TODO review locking
-        if (result == AAUDIO_OK) {
-          result = closeFinal();
-        }
-        return result;
-    }
-
+public:
     // This is only used to identify a stream in the logs without
     // revealing any pointers.
     aaudio_stream_id_t getId() {
@@ -152,11 +163,11 @@
 
     virtual aaudio_result_t setBufferSize(int32_t requestedFrames) = 0;
 
-    virtual aaudio_result_t createThread(int64_t periodNanoseconds,
-                                       aaudio_audio_thread_proc_t threadProc,
-                                       void *threadArg);
+    virtual aaudio_result_t createThread_l(int64_t periodNanoseconds,
+                                           aaudio_audio_thread_proc_t threadProc,
+                                           void *threadArg);
 
-    aaudio_result_t joinThread(void **returnArg, int64_t timeoutNanoseconds);
+    aaudio_result_t joinThread(void **returnArg);
 
     virtual aaudio_result_t registerThread() {
         return AAUDIO_OK;
@@ -183,11 +194,11 @@
     }
 
     virtual int32_t getBufferCapacity() const {
-        return AAUDIO_ERROR_UNIMPLEMENTED;
+        return mBufferCapacity;
     }
 
     virtual int32_t getFramesPerBurst() const {
-        return AAUDIO_ERROR_UNIMPLEMENTED;
+        return mFramesPerBurst;
     }
 
     virtual int32_t getXRunCount() const {
@@ -328,6 +339,10 @@
      */
     bool collidesWithCallback() const;
 
+    // Implement AudioDeviceCallback
+    void onAudioDeviceUpdate(audio_io_handle_t audioIo,
+            audio_port_handle_t deviceId) override {};
+
     // ============== I/O ===========================
     // A Stream will only implement read() or write() depending on its direction.
     virtual aaudio_result_t write(const void *buffer __unused,
@@ -366,7 +381,7 @@
      */
     void registerPlayerBase() {
         if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
-            mPlayerBase->registerWithAudioManager();
+            mPlayerBase->registerWithAudioManager(this);
         }
     }
 
@@ -395,21 +410,35 @@
      */
     aaudio_result_t systemStopFromCallback();
 
+    /**
+     * Safely RELEASE a stream after taking mStreamLock and checking
+     * to make sure we are not being called from a callback.
+     * @return AAUDIO_OK or a negative error
+     */
     aaudio_result_t safeRelease();
 
+    /**
+     * Safely RELEASE and CLOSE a stream after taking mStreamLock and checking
+     * to make sure we are not being called from a callback.
+     * @return AAUDIO_OK or a negative error
+     */
+    aaudio_result_t safeReleaseClose();
+
+    aaudio_result_t safeReleaseCloseFromCallback();
+
 protected:
 
     // PlayerBase allows the system to control the stream volume.
     class MyPlayerBase : public android::PlayerBase {
     public:
-        explicit MyPlayerBase(AudioStream *parent);
+        MyPlayerBase() {};
 
-        virtual ~MyPlayerBase();
+        virtual ~MyPlayerBase() = default;
 
         /**
          * Register for volume changes and remote control.
          */
-        void registerWithAudioManager();
+        void registerWithAudioManager(const android::sp<AudioStream>& parent);
 
         /**
          * UnRegister.
@@ -421,8 +450,6 @@
          */
         void destroy() override;
 
-        void clearParentReference() { mParent = nullptr; }
-
         // Just a stub. The ability to start audio through PlayerBase is being deprecated.
         android::status_t playerStart() override {
             return android::NO_ERROR;
@@ -438,18 +465,10 @@
             return android::NO_ERROR;
         }
 
-        android::status_t playerSetVolume() override {
-            // No pan and only left volume is taken into account from IPLayer interface
-            mParent->setDuckAndMuteVolume(mVolumeMultiplierL  /* * mPanMultiplierL */);
-            return android::NO_ERROR;
-        }
+        android::status_t playerSetVolume() override;
 
 #if AAUDIO_USE_VOLUME_SHAPER
-        ::android::binder::Status applyVolumeShaper(
-                const ::android::media::VolumeShaper::Configuration& configuration,
-                const ::android::media::VolumeShaper::Operation& operation) {
-            return mParent->applyVolumeShaper(configuration, operation);
-        }
+        ::android::binder::Status applyVolumeShaper();
 #endif
 
         aaudio_result_t getResult() {
@@ -457,9 +476,11 @@
         }
 
     private:
-        AudioStream          *mParent;
-        aaudio_result_t       mResult = AAUDIO_OK;
-        bool                  mRegistered = false;
+        // Use a weak pointer so the AudioStream can be deleted.
+        std::mutex               mParentLock;
+        android::wp<AudioStream> mParent GUARDED_BY(mParentLock);
+        aaudio_result_t          mResult = AAUDIO_OK;
+        bool                     mRegistered = false;
     };
 
     /**
@@ -470,30 +491,32 @@
         mSampleRate = sampleRate;
     }
 
-    /**
-     * This should not be called after the open() call.
-     */
+    // This should not be called after the open() call.
     void setSamplesPerFrame(int32_t samplesPerFrame) {
         mSamplesPerFrame = samplesPerFrame;
     }
 
-    /**
-     * This should not be called after the open() call.
-     */
+    // This should not be called after the open() call.
+    void setFramesPerBurst(int32_t framesPerBurst) {
+        mFramesPerBurst = framesPerBurst;
+    }
+
+    // This should not be called after the open() call.
+    void setBufferCapacity(int32_t bufferCapacity) {
+        mBufferCapacity = bufferCapacity;
+    }
+
+    // This should not be called after the open() call.
     void setSharingMode(aaudio_sharing_mode_t sharingMode) {
         mSharingMode = sharingMode;
     }
 
-    /**
-     * This should not be called after the open() call.
-     */
+    // This should not be called after the open() call.
     void setFormat(audio_format_t format) {
         mFormat = format;
     }
 
-    /**
-     * This should not be called after the open() call.
-     */
+    // This should not be called after the open() call.
     void setDeviceFormat(audio_format_t format) {
         mDeviceFormat = format;
     }
@@ -508,10 +531,13 @@
         mDeviceId = deviceId;
     }
 
+    // This should not be called after the open() call.
     void setSessionId(int32_t sessionId) {
         mSessionId = sessionId;
     }
 
+    aaudio_result_t joinThread_l(void **returnArg) REQUIRES(mStreamLock);
+
     std::atomic<bool>    mCallbackEnabled{false};
 
     float                mDuckAndMuteVolume = 1.0f;
@@ -578,11 +604,22 @@
 
     std::string mMetricsId; // set once during open()
 
+    std::mutex                 mStreamLock;
+
 private:
 
-    aaudio_result_t safeStop();
+    aaudio_result_t safeStop_l() REQUIRES(mStreamLock);
 
-    std::mutex                 mStreamLock;
+    /**
+     * Release then close the stream.
+     */
+    void releaseCloseFinal_l() REQUIRES(mStreamLock) {
+        if (getState() != AAUDIO_STREAM_STATE_CLOSING) { // not already released?
+            // Ignore result and keep closing.
+            (void) release_l();
+        }
+        close_l();
+    }
 
     const android::sp<MyPlayerBase>   mPlayerBase;
 
@@ -595,6 +632,8 @@
     audio_format_t              mFormat = AUDIO_FORMAT_DEFAULT;
     aaudio_stream_state_t       mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
     aaudio_performance_mode_t   mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
+    int32_t                     mFramesPerBurst = 0;
+    int32_t                     mBufferCapacity = 0;
 
     aaudio_usage_t              mUsage           = AAUDIO_UNSPECIFIED;
     aaudio_content_type_t       mContentType     = AAUDIO_UNSPECIFIED;
@@ -620,8 +659,8 @@
     std::atomic<pid_t>          mErrorCallbackThread{CALLBACK_THREAD_NONE};
 
     // background thread ----------------------------------
-    bool                        mHasThread = false;
-    pthread_t                   mThread; // initialized in constructor
+    bool                        mHasThread GUARDED_BY(mStreamLock) = false;
+    pthread_t                   mThread  GUARDED_BY(mStreamLock) = {};
 
     // These are set by the application thread and then read by the audio pthread.
     std::atomic<int64_t>        mPeriodNanoseconds; // for tuning SCHED_FIFO threads
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index 60dad84..630b289 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -63,27 +63,26 @@
 static aaudio_result_t builder_createStream(aaudio_direction_t direction,
                                          aaudio_sharing_mode_t sharingMode,
                                          bool tryMMap,
-                                         AudioStream **audioStreamPtr) {
-    *audioStreamPtr = nullptr;
+                                         android::sp<AudioStream> &stream) {
     aaudio_result_t result = AAUDIO_OK;
 
     switch (direction) {
 
         case AAUDIO_DIRECTION_INPUT:
             if (tryMMap) {
-                *audioStreamPtr = new AudioStreamInternalCapture(AAudioBinderClient::getInstance(),
+                stream = new AudioStreamInternalCapture(AAudioBinderClient::getInstance(),
                                                                  false);
             } else {
-                *audioStreamPtr = new AudioStreamRecord();
+                stream = new AudioStreamRecord();
             }
             break;
 
         case AAUDIO_DIRECTION_OUTPUT:
             if (tryMMap) {
-                *audioStreamPtr = new AudioStreamInternalPlay(AAudioBinderClient::getInstance(),
+                stream = new AudioStreamInternalPlay(AAudioBinderClient::getInstance(),
                                                               false);
             } else {
-                *audioStreamPtr = new AudioStreamTrack();
+                stream = new AudioStreamTrack();
             }
             break;
 
@@ -98,7 +97,7 @@
 // Fall back to Legacy path if MMAP not available.
 // Exact behavior is controlled by MMapPolicy.
 aaudio_result_t AudioStreamBuilder::build(AudioStream** streamPtr) {
-    AudioStream *audioStream = nullptr;
+
     if (streamPtr == nullptr) {
         ALOGE("%s() streamPtr is null", __func__);
         return AAUDIO_ERROR_NULL;
@@ -171,41 +170,48 @@
         setPrivacySensitive(true);
     }
 
-    result = builder_createStream(getDirection(), sharingMode, allowMMap, &audioStream);
+    android::sp<AudioStream> audioStream;
+    result = builder_createStream(getDirection(), sharingMode, allowMMap, audioStream);
     if (result == AAUDIO_OK) {
         // Open the stream using the parameters from the builder.
         result = audioStream->open(*this);
-        if (result == AAUDIO_OK) {
-            *streamPtr = audioStream;
-        } else {
+        if (result != AAUDIO_OK) {
             bool isMMap = audioStream->isMMap();
-            delete audioStream;
-            audioStream = nullptr;
-
             if (isMMap && allowLegacy) {
                 ALOGV("%s() MMAP stream did not open so try Legacy path", __func__);
                 // If MMAP stream failed to open then TRY using a legacy stream.
                 result = builder_createStream(getDirection(), sharingMode,
-                                              false, &audioStream);
+                                              false, audioStream);
                 if (result == AAUDIO_OK) {
                     result = audioStream->open(*this);
-                    if (result == AAUDIO_OK) {
-                        *streamPtr = audioStream;
-                    } else {
-                        delete audioStream;
-                        audioStream = nullptr;
-                    }
                 }
             }
         }
-        if (audioStream != nullptr) {
+        if (result == AAUDIO_OK) {
             audioStream->logOpen();
-        }
+            *streamPtr = startUsingStream(audioStream);
+        } // else audioStream will go out of scope and be deleted
     }
 
     return result;
 }
 
+AudioStream *AudioStreamBuilder::startUsingStream(android::sp<AudioStream> &audioStream) {
+    // Increment the smart pointer so it will not get deleted when
+    // we pass it to the C caller and it goes out of scope.
+    // The C code cannot hold a smart pointer so we increment the reference
+    // count to indicate that the C app owns a reference.
+    audioStream->incStrong(nullptr);
+    return audioStream.get();
+}
+
+void AudioStreamBuilder::stopUsingStream(AudioStream *stream) {
+    // Undo the effect of startUsingStream()
+    android::sp<AudioStream> spAudioStream(stream);
+    ALOGV("%s() strongCount = %d", __func__, spAudioStream->getStrongCount());
+    spAudioStream->decStrong(nullptr);
+}
+
 aaudio_result_t AudioStreamBuilder::validate() const {
 
     // Check for values that are ridiculously out of range to prevent math overflow exploits.
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.h b/media/libaaudio/src/core/AudioStreamBuilder.h
index d5fb80d..9f93341 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.h
+++ b/media/libaaudio/src/core/AudioStreamBuilder.h
@@ -108,9 +108,16 @@
 
     virtual aaudio_result_t validate() const override;
 
+
     void logParameters() const;
 
+    // Mark the stream so it can be deleted.
+    static void stopUsingStream(AudioStream *stream);
+
 private:
+    // Extract a raw pointer that we can pass to a 'C' app.
+    static AudioStream *startUsingStream(android::sp<AudioStream> &spAudioStream);
+
     bool                       mSharingModeMatchRequired = false; // must match sharing mode requested
     aaudio_performance_mode_t  mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
 
diff --git a/media/libaaudio/src/fifo/FifoBuffer.cpp b/media/libaaudio/src/fifo/FifoBuffer.cpp
index f5113f2..5c11882 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.cpp
+++ b/media/libaaudio/src/fifo/FifoBuffer.cpp
@@ -31,40 +31,37 @@
 #include "FifoBuffer.h"
 
 using android::FifoBuffer;
+using android::FifoBufferAllocated;
+using android::FifoBufferIndirect;
 using android::fifo_frames_t;
 
-FifoBuffer::FifoBuffer(int32_t bytesPerFrame, fifo_frames_t capacityInFrames)
-        : mBytesPerFrame(bytesPerFrame)
+FifoBuffer::FifoBuffer(int32_t bytesPerFrame)
+        : mBytesPerFrame(bytesPerFrame) {}
+
+FifoBufferAllocated::FifoBufferAllocated(int32_t bytesPerFrame, fifo_frames_t capacityInFrames)
+        : FifoBuffer(bytesPerFrame)
 {
     mFifo = std::make_unique<FifoController>(capacityInFrames, capacityInFrames);
     // allocate buffer
     int32_t bytesPerBuffer = bytesPerFrame * capacityInFrames;
-    mStorage = new uint8_t[bytesPerBuffer];
-    mStorageOwned = true;
+    mInternalStorage = std::make_unique<uint8_t[]>(bytesPerBuffer);
     ALOGV("%s() capacityInFrames = %d, bytesPerFrame = %d",
           __func__, capacityInFrames, bytesPerFrame);
 }
 
-FifoBuffer::FifoBuffer( int32_t   bytesPerFrame,
+FifoBufferIndirect::FifoBufferIndirect( int32_t   bytesPerFrame,
                         fifo_frames_t   capacityInFrames,
-                        fifo_counter_t *  readIndexAddress,
-                        fifo_counter_t *  writeIndexAddress,
+                        fifo_counter_t *readIndexAddress,
+                        fifo_counter_t *writeIndexAddress,
                         void *  dataStorageAddress
                         )
-        : mBytesPerFrame(bytesPerFrame)
-        , mStorage(static_cast<uint8_t *>(dataStorageAddress))
+        : FifoBuffer(bytesPerFrame)
+        , mExternalStorage(static_cast<uint8_t *>(dataStorageAddress))
 {
     mFifo = std::make_unique<FifoControllerIndirect>(capacityInFrames,
                                        capacityInFrames,
                                        readIndexAddress,
                                        writeIndexAddress);
-    mStorageOwned = false;
-}
-
-FifoBuffer::~FifoBuffer() {
-    if (mStorageOwned) {
-        delete[] mStorage;
-    }
 }
 
 int32_t FifoBuffer::convertFramesToBytes(fifo_frames_t frames) {
@@ -76,15 +73,16 @@
                                     int32_t startIndex) {
     wrappingBuffer->data[1] = nullptr;
     wrappingBuffer->numFrames[1] = 0;
+    uint8_t *storage = getStorage();
     if (framesAvailable > 0) {
         fifo_frames_t capacity = mFifo->getCapacity();
-        uint8_t *source = &mStorage[convertFramesToBytes(startIndex)];
+        uint8_t *source = &storage[convertFramesToBytes(startIndex)];
         // Does the available data cross the end of the FIFO?
         if ((startIndex + framesAvailable) > capacity) {
             wrappingBuffer->data[0] = source;
             fifo_frames_t firstFrames = capacity - startIndex;
             wrappingBuffer->numFrames[0] = firstFrames;
-            wrappingBuffer->data[1] = &mStorage[0];
+            wrappingBuffer->data[1] = &storage[0];
             wrappingBuffer->numFrames[1] = framesAvailable - firstFrames;
         } else {
             wrappingBuffer->data[0] = source;
@@ -191,6 +189,6 @@
 void FifoBuffer::eraseMemory() {
     int32_t numBytes = convertFramesToBytes(getBufferCapacityInFrames());
     if (numBytes > 0) {
-        memset(mStorage, 0, (size_t) numBytes);
+        memset(getStorage(), 0, (size_t) numBytes);
     }
 }
diff --git a/media/libaaudio/src/fifo/FifoBuffer.h b/media/libaaudio/src/fifo/FifoBuffer.h
index 0d188c4..37548f0 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.h
+++ b/media/libaaudio/src/fifo/FifoBuffer.h
@@ -38,15 +38,9 @@
 
 class FifoBuffer {
 public:
-    FifoBuffer(int32_t bytesPerFrame, fifo_frames_t capacityInFrames);
+    FifoBuffer(int32_t bytesPerFrame);
 
-    FifoBuffer(int32_t bytesPerFrame,
-               fifo_frames_t capacityInFrames,
-               fifo_counter_t *readCounterAddress,
-               fifo_counter_t *writeCounterAddress,
-               void *dataStorageAddress);
-
-    ~FifoBuffer();
+    virtual ~FifoBuffer() = default;
 
     int32_t convertFramesToBytes(fifo_frames_t frames);
 
@@ -121,19 +115,53 @@
      */
     void eraseMemory();
 
-private:
+protected:
+
+    virtual uint8_t *getStorage() const = 0;
 
     void fillWrappingBuffer(WrappingBuffer *wrappingBuffer,
                             int32_t framesAvailable, int32_t startIndex);
 
     const int32_t             mBytesPerFrame;
-    // We do not use a std::unique_ptr for mStorage because it is often a pointer to
-    // memory shared between processes and cannot be deleted trivially.
-    uint8_t                  *mStorage = nullptr;
-    bool                      mStorageOwned = false; // did this object allocate the storage?
     std::unique_ptr<FifoControllerBase> mFifo{};
 };
 
+// Define two subclasses to handle the two ways that storage is allocated.
+
+// Allocate storage internally.
+class FifoBufferAllocated : public FifoBuffer {
+public:
+    FifoBufferAllocated(int32_t bytesPerFrame, fifo_frames_t capacityInFrames);
+
+private:
+
+    uint8_t *getStorage() const override {
+        return mInternalStorage.get();
+    };
+
+    std::unique_ptr<uint8_t[]> mInternalStorage;
+};
+
+// Allocate storage externally and pass it in.
+class FifoBufferIndirect : public FifoBuffer {
+public:
+    // We use raw pointers because the memory may be
+    // in the middle of an allocated block and cannot be deleted directly.
+    FifoBufferIndirect(int32_t bytesPerFrame,
+                       fifo_frames_t capacityInFrames,
+                       fifo_counter_t* readCounterAddress,
+                       fifo_counter_t* writeCounterAddress,
+                       void* dataStorageAddress);
+
+private:
+
+    uint8_t *getStorage() const override {
+        return mExternalStorage;
+    };
+
+    uint8_t *mExternalStorage = nullptr;
+};
+
 }  // android
 
 #endif //FIFO_FIFO_BUFFER_H
diff --git a/media/libaaudio/src/fifo/FifoControllerIndirect.h b/media/libaaudio/src/fifo/FifoControllerIndirect.h
index 5832d9c..ec48e57 100644
--- a/media/libaaudio/src/fifo/FifoControllerIndirect.h
+++ b/media/libaaudio/src/fifo/FifoControllerIndirect.h
@@ -27,7 +27,7 @@
 /**
  * A FifoControllerBase with counters external to the class.
  *
- * The actual copunters may be stored in separate regions of shared memory
+ * The actual counters may be stored in separate regions of shared memory
  * with different access rights.
  */
 class FifoControllerIndirect : public FifoControllerBase {
diff --git a/media/libaaudio/src/flowgraph/AudioProcessorBase.h b/media/libaaudio/src/flowgraph/AudioProcessorBase.h
index eda46ae..972932f 100644
--- a/media/libaaudio/src/flowgraph/AudioProcessorBase.h
+++ b/media/libaaudio/src/flowgraph/AudioProcessorBase.h
@@ -267,7 +267,7 @@
     AudioFloatInputPort input;
 
     /**
-     * Dummy processor. The work happens in the read() method.
+     * Do nothing. The work happens in the read() method.
      *
      * @param framePosition index of first frame to be processed
      * @param numFrames
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index c062882..fdaa2ab 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -34,8 +34,7 @@
 using namespace aaudio;
 
 AudioStreamLegacy::AudioStreamLegacy()
-        : AudioStream()
-        , mDeviceCallback(new StreamDeviceCallback(this)) {
+        : AudioStream() {
 }
 
 AudioStreamLegacy::~AudioStreamLegacy() {
@@ -163,7 +162,11 @@
 }
 
 void AudioStreamLegacy::forceDisconnect(bool errorCallbackEnabled) {
-    if (getState() != AAUDIO_STREAM_STATE_DISCONNECTED) {
+    // There is no need to disconnect if already in these states.
+    if (getState() != AAUDIO_STREAM_STATE_DISCONNECTED
+            && getState() != AAUDIO_STREAM_STATE_CLOSING
+            && getState() != AAUDIO_STREAM_STATE_CLOSED
+            ) {
         setState(AAUDIO_STREAM_STATE_DISCONNECTED);
         if (errorCallbackEnabled) {
             maybeCallErrorCallback(AAUDIO_ERROR_DISCONNECTED);
@@ -205,24 +208,30 @@
     return AAudioConvert_androidToAAudioResult(status);
 }
 
-void AudioStreamLegacy::onAudioDeviceUpdate(audio_port_handle_t deviceId)
-{
+void AudioStreamLegacy::onAudioDeviceUpdate(audio_io_handle_t /* audioIo */,
+            audio_port_handle_t deviceId) {
     // Device routing is a common source of errors and DISCONNECTS.
-    // Please leave this log in place.
-    ALOGD("%s() devId %d => %d", __func__, (int) getDeviceId(), (int)deviceId);
-    if (getDeviceId() != AAUDIO_UNSPECIFIED && getDeviceId() != deviceId &&
-            getState() != AAUDIO_STREAM_STATE_DISCONNECTED) {
+    // Please leave this log in place. If there is a bug then this might
+    // get called after the stream has been deleted so log before we
+    // touch the stream object.
+    ALOGD("%s(deviceId = %d)", __func__, (int)deviceId);
+    if (getDeviceId() != AAUDIO_UNSPECIFIED
+            && getDeviceId() != deviceId
+            && getState() != AAUDIO_STREAM_STATE_DISCONNECTED
+            ) {
         // Note that isDataCallbackActive() is affected by state so call it before DISCONNECTING.
         // If we have a data callback and the stream is active, then ask the data callback
         // to DISCONNECT and call the error callback.
         if (isDataCallbackActive()) {
-            ALOGD("onAudioDeviceUpdate() request DISCONNECT in data callback due to device change");
+            ALOGD("%s() request DISCONNECT in data callback, device %d => %d",
+                  __func__, (int) getDeviceId(), (int) deviceId);
             // If the stream is stopped before the data callback has a chance to handle the
-            // request then the requestStop() and requestPause() methods will handle it after
+            // request then the requestStop_l() and requestPause() methods will handle it after
             // the callback has stopped.
             mRequestDisconnect.request();
         } else {
-            ALOGD("onAudioDeviceUpdate() DISCONNECT the stream now");
+            ALOGD("%s() DISCONNECT the stream now, device %d => %d",
+                  __func__, (int) getDeviceId(), (int) deviceId);
             forceDisconnect();
         }
     }
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.h b/media/libaaudio/src/legacy/AudioStreamLegacy.h
index 9c24b2b..88ef270 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.h
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.h
@@ -87,29 +87,13 @@
 
 protected:
 
-    class StreamDeviceCallback : public android::AudioSystem::AudioDeviceCallback
-    {
-    public:
-
-        StreamDeviceCallback(AudioStreamLegacy *parent) : mParent(parent) {}
-        virtual ~StreamDeviceCallback() {}
-
-        virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo __unused,
-                                         audio_port_handle_t deviceId) {
-            if (mParent != nullptr) {
-                mParent->onAudioDeviceUpdate(deviceId);
-            }
-        }
-
-        AudioStreamLegacy *mParent;
-    };
-
     aaudio_result_t getBestTimestamp(clockid_t clockId,
                                      int64_t *framePosition,
                                      int64_t *timeNanoseconds,
                                      android::ExtendedTimestamp *extendedTimestamp);
 
-    void onAudioDeviceUpdate(audio_port_handle_t deviceId);
+    void onAudioDeviceUpdate(audio_io_handle_t audioIo,
+            audio_port_handle_t deviceId) override;
 
     /*
      * Check to see whether a callback thread has requested a disconnected.
@@ -128,6 +112,18 @@
         return mFramesRead.increment(frames);
     }
 
+    /**
+     * Get the framesPerBurst from the underlying API.
+     * @return framesPerBurst
+     */
+    virtual int32_t getFramesPerBurstFromDevice() const = 0;
+
+    /**
+     * Get the bufferCapacity from the underlying API.
+     * @return bufferCapacity in frames
+     */
+    virtual int32_t getBufferCapacityFromDevice() const = 0;
+
     // This is used for exact matching by MediaMetrics. So do not change it.
     // MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_AAUDIO
     static constexpr char     kCallerName[] = "aaudio";
@@ -140,7 +136,6 @@
     int32_t                    mBlockAdapterBytesPerFrame = 0;
     aaudio_wrapping_frames_t   mPositionWhenStarting = 0;
     int32_t                    mCallbackBufferSize = 0;
-    const android::sp<StreamDeviceCallback>   mDeviceCallback;
 
     AtomicRequestor            mRequestDisconnect;
 
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index b0dc59e..45b2258 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -118,6 +118,7 @@
         setDeviceFormat(getFormat());
     }
 
+    // To avoid glitching, let AudioFlinger pick the optimal burst size.
     uint32_t notificationFrames = 0;
 
     // Setup the callback if there is one.
@@ -128,7 +129,6 @@
         streamTransferType = AudioRecord::transfer_type::TRANSFER_CALLBACK;
         callback = getLegacyCallback();
         callbackData = this;
-        notificationFrames = builder.getFramesPerDataCallback();
     }
     mCallbackBufferSize = builder.getFramesPerDataCallback();
 
@@ -185,7 +185,7 @@
         // Did we get a valid track?
         status_t status = mAudioRecord->initCheck();
         if (status != OK) {
-            releaseCloseFinal();
+            safeReleaseClose();
             ALOGE("open(), initCheck() returned %d", status);
             return AAudioConvert_androidToAAudioResult(status);
         }
@@ -210,12 +210,9 @@
 
     // Get the actual values from the AudioRecord.
     setSamplesPerFrame(mAudioRecord->channelCount());
-
-    int32_t actualSampleRate = mAudioRecord->getSampleRate();
-    ALOGW_IF(actualSampleRate != getSampleRate(),
-             "open() sampleRate changed from %d to %d",
-             getSampleRate(), actualSampleRate);
-    setSampleRate(actualSampleRate);
+    setSampleRate(mAudioRecord->getSampleRate());
+    setBufferCapacity(getBufferCapacityFromDevice());
+    setFramesPerBurst(getFramesPerBurstFromDevice());
 
     // We may need to pass the data through a block size adapter to guarantee constant size.
     if (mCallbackBufferSize != AAUDIO_UNSPECIFIED) {
@@ -282,7 +279,7 @@
             : (aaudio_session_id_t) mAudioRecord->getSessionId();
     setSessionId(actualSessionId);
 
-    mAudioRecord->addAudioDeviceCallback(mDeviceCallback);
+    mAudioRecord->addAudioDeviceCallback(this);
 
     return AAUDIO_OK;
 }
@@ -291,16 +288,24 @@
     // TODO add close() or release() to AudioFlinger's AudioRecord API.
     //  Then call it from here
     if (getState() != AAUDIO_STREAM_STATE_CLOSING) {
-        mAudioRecord->removeAudioDeviceCallback(mDeviceCallback);
+        mAudioRecord->removeAudioDeviceCallback(this);
         logReleaseBufferState();
-        mAudioRecord.clear();
-        mFixedBlockWriter.close();
+        // Data callbacks may still be running!
         return AudioStream::release_l();
     } else {
         return AAUDIO_OK; // already released
     }
 }
 
+void AudioStreamRecord::close_l() {
+    mAudioRecord.clear();
+    // Do not close mFixedBlockWriter because a data callback
+    // thread might still be running if someone else has a reference
+    // to mAudioRecord.
+    // It has a unique_ptr to its buffer so it will clean up by itself.
+    AudioStream::close_l();
+}
+
 const void * AudioStreamRecord::maybeConvertDeviceData(const void *audioData, int32_t numFrames) {
     if (mFormatConversionBufferFloat.get() != nullptr) {
         LOG_ALWAYS_FATAL_IF(numFrames > mFormatConversionBufferSizeInFrames,
@@ -336,7 +341,7 @@
     return;
 }
 
-aaudio_result_t AudioStreamRecord::requestStart()
+aaudio_result_t AudioStreamRecord::requestStart_l()
 {
     if (mAudioRecord.get() == nullptr) {
         return AAUDIO_ERROR_INVALID_STATE;
@@ -360,7 +365,7 @@
     return AAUDIO_OK;
 }
 
-aaudio_result_t AudioStreamRecord::requestStop() {
+aaudio_result_t AudioStreamRecord::requestStop_l() {
     if (mAudioRecord.get() == nullptr) {
         return AAUDIO_ERROR_INVALID_STATE;
     }
@@ -483,7 +488,7 @@
     return getBufferCapacity(); // TODO implement in AudioRecord?
 }
 
-int32_t AudioStreamRecord::getBufferCapacity() const
+int32_t AudioStreamRecord::getBufferCapacityFromDevice() const
 {
     return static_cast<int32_t>(mAudioRecord->frameCount());
 }
@@ -493,8 +498,7 @@
     return 0; // TODO implement when AudioRecord supports it
 }
 
-int32_t AudioStreamRecord::getFramesPerBurst() const
-{
+int32_t AudioStreamRecord::getFramesPerBurstFromDevice() const {
     return static_cast<int32_t>(mAudioRecord->getNotificationPeriodInFrames());
 }
 
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.h b/media/libaaudio/src/legacy/AudioStreamRecord.h
index c5944c7..b2f8ba5 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.h
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.h
@@ -39,9 +39,7 @@
 
     aaudio_result_t open(const AudioStreamBuilder & builder) override;
     aaudio_result_t release_l() override;
-
-    aaudio_result_t requestStart() override;
-    aaudio_result_t requestStop() override;
+    void close_l() override;
 
     virtual aaudio_result_t getTimestamp(clockid_t clockId,
                                          int64_t *framePosition,
@@ -55,14 +53,10 @@
 
     int32_t getBufferSize() const override;
 
-    int32_t getBufferCapacity() const override;
-
     int32_t getXRunCount() const override;
 
     int64_t getFramesWritten() override;
 
-    int32_t getFramesPerBurst() const override;
-
     aaudio_result_t updateStateMachine() override;
 
     aaudio_direction_t getDirection() const override {
@@ -78,6 +72,14 @@
 
     const void * maybeConvertDeviceData(const void *audioData, int32_t numFrames) override;
 
+protected:
+
+    aaudio_result_t requestStart_l() REQUIRES(mStreamLock) override;
+    aaudio_result_t requestStop_l() REQUIRES(mStreamLock) override;
+
+    int32_t getFramesPerBurstFromDevice() const override;
+    int32_t getBufferCapacityFromDevice() const override;
+
 private:
     android::sp<android::AudioRecord> mAudioRecord;
     // adapts between variable sized blocks and fixed size blocks
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 4869480..1d036d0 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -96,6 +96,7 @@
 
     size_t frameCount = (size_t)builder.getBufferCapacity();
 
+    // To avoid glitching, let AudioFlinger pick the optimal burst size.
     int32_t notificationFrames = 0;
 
     const audio_format_t format = (getFormat() == AUDIO_FORMAT_DEFAULT)
@@ -118,8 +119,6 @@
             // Take advantage of a special trick that allows us to create a buffer
             // that is some multiple of the burst size.
             notificationFrames = 0 - DEFAULT_BURSTS_PER_BUFFER_CAPACITY;
-        } else {
-            notificationFrames = builder.getFramesPerDataCallback();
         }
     }
     mCallbackBufferSize = builder.getFramesPerDataCallback();
@@ -179,7 +178,7 @@
     // Did we get a valid track?
     status_t status = mAudioTrack->initCheck();
     if (status != NO_ERROR) {
-        releaseCloseFinal();
+        safeReleaseClose();
         ALOGE("open(), initCheck() returned %d", status);
         return AAudioConvert_androidToAAudioResult(status);
     }
@@ -193,12 +192,9 @@
     setSamplesPerFrame(mAudioTrack->channelCount());
     setFormat(mAudioTrack->format());
     setDeviceFormat(mAudioTrack->format());
-
-    int32_t actualSampleRate = mAudioTrack->getSampleRate();
-    ALOGW_IF(actualSampleRate != getSampleRate(),
-             "open() sampleRate changed from %d to %d",
-             getSampleRate(), actualSampleRate);
-    setSampleRate(actualSampleRate);
+    setSampleRate(mAudioTrack->getSampleRate());
+    setBufferCapacity(getBufferCapacityFromDevice());
+    setFramesPerBurst(getFramesPerBurstFromDevice());
 
     // We may need to pass the data through a block size adapter to guarantee constant size.
     if (mCallbackBufferSize != AAUDIO_UNSPECIFIED) {
@@ -221,10 +217,7 @@
             : (aaudio_session_id_t) mAudioTrack->getSessionId();
     setSessionId(actualSessionId);
 
-    mInitialBufferCapacity = getBufferCapacity();
-    mInitialFramesPerBurst = getFramesPerBurst();
-
-    mAudioTrack->addAudioDeviceCallback(mDeviceCallback);
+    mAudioTrack->addAudioDeviceCallback(this);
 
     // Update performance mode based on the actual stream flags.
     // For example, if the sample rate is not allowed then you won't get a FAST track.
@@ -240,11 +233,11 @@
 
     setSharingMode(AAUDIO_SHARING_MODE_SHARED); // EXCLUSIVE mode not supported in legacy
 
-    // Log warning if we did not get what we asked for.
-    ALOGW_IF(actualFlags != flags,
+    // Log if we did not get what we asked for.
+    ALOGD_IF(actualFlags != flags,
              "open() flags changed from 0x%08X to 0x%08X",
              flags, actualFlags);
-    ALOGW_IF(actualPerformanceMode != perfMode,
+    ALOGD_IF(actualPerformanceMode != perfMode,
              "open() perfMode changed from %d to %d",
              perfMode, actualPerformanceMode);
 
@@ -253,19 +246,26 @@
 
 aaudio_result_t AudioStreamTrack::release_l() {
     if (getState() != AAUDIO_STREAM_STATE_CLOSING) {
-        mAudioTrack->removeAudioDeviceCallback(mDeviceCallback);
+        status_t err = mAudioTrack->removeAudioDeviceCallback(this);
+        ALOGE_IF(err, "%s() removeAudioDeviceCallback returned %d", __func__, err);
         logReleaseBufferState();
-        // TODO Investigate why clear() causes a hang in test_various.cpp
-        // if I call close() from a data callback.
-        // But the same thing in AudioRecord is OK!
-        // mAudioTrack.clear();
-        mFixedBlockReader.close();
+        // Data callbacks may still be running!
         return AudioStream::release_l();
     } else {
         return AAUDIO_OK; // already released
     }
 }
 
+void AudioStreamTrack::close_l() {
+    // Stop callbacks before deleting mFixedBlockReader memory.
+    mAudioTrack.clear();
+    // Do not close mFixedBlockReader because a data callback
+    // thread might still be running if someone else has a reference
+    // to mAudioRecord.
+    // It has a unique_ptr to its buffer so it will clean up by itself.
+    AudioStream::close_l();
+}
+
 void AudioStreamTrack::processCallback(int event, void *info) {
 
     switch (event) {
@@ -281,8 +281,8 @@
                     || mAudioTrack->format() != getFormat()
                     || mAudioTrack->getSampleRate() != getSampleRate()
                     || mAudioTrack->getRoutedDeviceId() != getDeviceId()
-                    || getBufferCapacity() != mInitialBufferCapacity
-                    || getFramesPerBurst() != mInitialFramesPerBurst) {
+                    || getBufferCapacityFromDevice() != getBufferCapacity()
+                    || getFramesPerBurstFromDevice() != getFramesPerBurst()) {
                 processCallbackCommon(AAUDIO_CALLBACK_OPERATION_DISCONNECTED, info);
             }
             break;
@@ -293,7 +293,7 @@
     return;
 }
 
-aaudio_result_t AudioStreamTrack::requestStart() {
+aaudio_result_t AudioStreamTrack::requestStart_l() {
     if (mAudioTrack.get() == nullptr) {
         ALOGE("requestStart() no AudioTrack");
         return AAUDIO_ERROR_INVALID_STATE;
@@ -320,7 +320,7 @@
     return AAUDIO_OK;
 }
 
-aaudio_result_t AudioStreamTrack::requestPause() {
+aaudio_result_t AudioStreamTrack::requestPause_l() {
     if (mAudioTrack.get() == nullptr) {
         ALOGE("%s() no AudioTrack", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
@@ -336,7 +336,7 @@
     return checkForDisconnectRequest(false);
 }
 
-aaudio_result_t AudioStreamTrack::requestFlush() {
+aaudio_result_t AudioStreamTrack::requestFlush_l() {
     if (mAudioTrack.get() == nullptr) {
         ALOGE("%s() no AudioTrack", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
@@ -350,7 +350,7 @@
     return AAUDIO_OK;
 }
 
-aaudio_result_t AudioStreamTrack::requestStop() {
+aaudio_result_t AudioStreamTrack::requestStop_l() {
     if (mAudioTrack.get() == nullptr) {
         ALOGE("%s() no AudioTrack", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
@@ -471,7 +471,7 @@
     return static_cast<int32_t>(mAudioTrack->getBufferSizeInFrames());
 }
 
-int32_t AudioStreamTrack::getBufferCapacity() const
+int32_t AudioStreamTrack::getBufferCapacityFromDevice() const
 {
     return static_cast<int32_t>(mAudioTrack->frameCount());
 }
@@ -481,8 +481,7 @@
     return static_cast<int32_t>(mAudioTrack->getUnderrunCount());
 }
 
-int32_t AudioStreamTrack::getFramesPerBurst() const
-{
+int32_t AudioStreamTrack::getFramesPerBurstFromDevice() const {
     return static_cast<int32_t>(mAudioTrack->getNotificationPeriodInFrames());
 }
 
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index 93a1ff4..f604871 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -42,12 +42,15 @@
 
     aaudio_result_t open(const AudioStreamBuilder & builder) override;
     aaudio_result_t release_l() override;
+    void close_l() override;
 
-    aaudio_result_t requestStart() override;
-    aaudio_result_t requestPause() override;
-    aaudio_result_t requestFlush() override;
-    aaudio_result_t requestStop() override;
+protected:
+    aaudio_result_t requestStart_l() REQUIRES(mStreamLock)  override;
+    aaudio_result_t requestPause_l() REQUIRES(mStreamLock) override;
+    aaudio_result_t requestFlush_l() REQUIRES(mStreamLock) override;
+    aaudio_result_t requestStop_l() REQUIRES(mStreamLock) override;
 
+public:
     bool isFlushSupported() const override {
         // Only implement FLUSH for OUTPUT streams.
         return true;
@@ -68,8 +71,6 @@
 
     aaudio_result_t setBufferSize(int32_t requestedFrames) override;
     int32_t getBufferSize() const override;
-    int32_t getBufferCapacity() const override;
-    int32_t getFramesPerBurst()const  override;
     int32_t getXRunCount() const override;
 
     int64_t getFramesRead() override;
@@ -95,6 +96,11 @@
             const android::media::VolumeShaper::Operation& operation) override;
 #endif
 
+protected:
+
+    int32_t getFramesPerBurstFromDevice() const override;
+    int32_t getBufferCapacityFromDevice() const override;
+
 private:
 
     android::sp<android::AudioTrack> mAudioTrack;
@@ -104,10 +110,6 @@
 
     // TODO add 64-bit position reporting to AudioTrack and use it.
     aaudio_wrapping_frames_t         mPositionWhenPausing = 0;
-
-    // initial AudioTrack frame count and notification period
-    int32_t mInitialBufferCapacity = 0;
-    int32_t mInitialFramesPerBurst = 0;
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index 9007b10..3dfb801 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -27,7 +27,7 @@
 #include "core/AudioGlobal.h"
 #include <aaudio/AAudioTesting.h>
 #include <math.h>
-#include <system/audio-base.h>
+#include <system/audio.h>
 #include <assert.h>
 
 #include "utility/AAudioUtilities.h"
@@ -231,7 +231,8 @@
         case AAUDIO_ALLOW_CAPTURE_BY_SYSTEM:
             return AUDIO_FLAG_NO_MEDIA_PROJECTION;
         case AAUDIO_ALLOW_CAPTURE_BY_NONE:
-            return AUDIO_FLAG_NO_MEDIA_PROJECTION | AUDIO_FLAG_NO_SYSTEM_CAPTURE;
+            return static_cast<audio_flags_mask_t>(
+                    AUDIO_FLAG_NO_MEDIA_PROJECTION | AUDIO_FLAG_NO_SYSTEM_CAPTURE);
         default:
             ALOGE("%s() 0x%08X unrecognized", __func__, policy);
             return AUDIO_FLAG_NONE; //
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index d2e4805..82eb77d 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -21,6 +21,7 @@
 #include <functional>
 #include <stdint.h>
 #include <sys/types.h>
+#include <unistd.h>
 
 #include <utils/Errors.h>
 #include <system/audio.h>
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index 8935d57..95d6543 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -11,10 +11,12 @@
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_marshalling.cpp"],
     shared_libs: [
+        "aaudio-aidl-cpp",
         "libaaudio_internal",
         "libbinder",
         "libcutils",
         "libutils",
+        "shared-file-region-aidl-unstable-cpp",
     ],
 }
 
diff --git a/media/libaaudio/tests/test_aaudio_monkey.cpp b/media/libaaudio/tests/test_aaudio_monkey.cpp
index be54835..cc29678 100644
--- a/media/libaaudio/tests/test_aaudio_monkey.cpp
+++ b/media/libaaudio/tests/test_aaudio_monkey.cpp
@@ -46,11 +46,10 @@
         int32_t numFrames);
 
 void AAudioMonkeyErrorCallbackProc(
-        AAudioStream *stream __unused,
-        void *userData __unused,
-        aaudio_result_t error) {
-    printf("Error Callback, error: %d\n",(int)error);
-}
+        AAudioStream * /* stream */,
+        void *userData,
+        aaudio_result_t error);
+
 
 // This function is not thread safe. Only use this from a single thread.
 double nextRandomDouble() {
@@ -99,6 +98,10 @@
         aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
         aaudio_result_t result = AAudioStream_waitForStateChange(getStream(),
             AAUDIO_STREAM_STATE_UNKNOWN, &state, 0);
+        if (result == AAUDIO_ERROR_DISCONNECTED) {
+            printf("WARNING - AAudioStream_waitForStateChange returned DISCONNECTED\n");
+            return true; // OK
+        }
         if (result != AAUDIO_OK) {
             printf("ERROR - AAudioStream_waitForStateChange returned %d\n", result);
             return false;
@@ -114,7 +117,7 @@
                (unsigned long long) framesRead,
                xRuns);
 
-        if (framesWritten < framesRead) {
+        if (state != AAUDIO_STREAM_STATE_STARTING && framesWritten < framesRead) {
             printf("WARNING - UNDERFLOW - diff = %d !!!!!!!!!!!!\n",
                    (int) (framesWritten - framesRead));
         }
@@ -132,8 +135,23 @@
             return -1;
         }
 
+        // update and query stream state
+        aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+        state = AAudioStream_getState(getStream());
+        if (state < 0) {
+            printf("ERROR - AAudioStream_getState returned %d\n", state);
+            return state;
+        }
+
+        if (state == AAUDIO_STREAM_STATE_DISCONNECTED) {
+            printf("#%d, Closing disconnected stream.\n", getIndex());
+            result = close();
+            return result;
+        }
+
         double dice = nextRandomDouble();
         // Select an action based on a weighted probability.
+        printf("    "); // indent action
         if (dice < PROB_START) {
             printf("start\n");
             result = AAudioStream_requestStart(getStream());
@@ -200,6 +218,10 @@
         return AAUDIO_CALLBACK_RESULT_CONTINUE;
     }
 
+    int getIndex() const {
+        return mIndex;
+    }
+
 private:
     const AAudioArgsParser  *mArgParser;
     const int                mIndex;
@@ -223,6 +245,13 @@
     return monkey->renderAudio(stream, audioData, numFrames);
 }
 
+void AAudioMonkeyErrorCallbackProc(
+        AAudioStream * /* stream */,
+        void *userData,
+        aaudio_result_t error) {
+    AAudioMonkey *monkey = (AAudioMonkey *) userData;
+    printf("#%d, Error Callback, error: %d\n", monkey->getIndex(), (int)error);
+}
 
 static void usage() {
     AAudioArgsParser::usage();
diff --git a/media/libaaudio/tests/test_atomic_fifo.cpp b/media/libaaudio/tests/test_atomic_fifo.cpp
index 130ef43..4dbb219 100644
--- a/media/libaaudio/tests/test_atomic_fifo.cpp
+++ b/media/libaaudio/tests/test_atomic_fifo.cpp
@@ -26,6 +26,7 @@
 using android::fifo_counter_t;
 using android::FifoController;
 using android::FifoBuffer;
+using android::FifoBufferIndirect;
 using android::WrappingBuffer;
 
 TEST(test_fifo_controller, fifo_indices) {
@@ -325,7 +326,7 @@
         verifyStorageIntegrity();
     }
 
-    FifoBuffer     mFifoBuffer;
+    FifoBufferIndirect     mFifoBuffer;
     fifo_frames_t  mNextWriteIndex = 0;
     fifo_frames_t  mNextVerifyIndex = 0;
     fifo_frames_t  mThreshold;
diff --git a/media/libaaudio/tests/test_marshalling.cpp b/media/libaaudio/tests/test_marshalling.cpp
index c51fbce..49213dc 100644
--- a/media/libaaudio/tests/test_marshalling.cpp
+++ b/media/libaaudio/tests/test_marshalling.cpp
@@ -33,6 +33,29 @@
 using namespace android;
 using namespace aaudio;
 
+template<typename T>
+T copy(const T& object) {
+    return T(object);
+}
+
+template<>
+SharedMemoryParcelable copy<SharedMemoryParcelable>(const SharedMemoryParcelable& object) {
+    return object.dup();
+}
+
+template<typename T>
+void writeToParcel(const T& object, Parcel* parcel) {
+    copy(object).parcelable().writeToParcel(parcel);
+}
+
+template<typename T>
+T readFromParcel(const Parcel& parcel) {
+    using ParcelType = std::decay_t<decltype(std::declval<T>().parcelable())>;
+    ParcelType parcelable;
+    parcelable.readFromParcel(&parcel);
+    return T(std::move(parcelable));
+}
+
 // Test adding one value.
 TEST(test_marshalling, aaudio_one_read_write) {
     Parcel parcel;
@@ -48,7 +71,6 @@
 // Test SharedMemoryParcel.
 TEST(test_marshalling, aaudio_shared_memory) {
     SharedMemoryParcelable sharedMemoryA;
-    SharedMemoryParcelable sharedMemoryB;
     const size_t memSizeBytes = 840;
     unique_fd fd(ashmem_create_region("TestMarshalling", memSizeBytes));
     ASSERT_LE(0, fd);
@@ -63,10 +85,10 @@
 
     Parcel parcel;
     size_t pos = parcel.dataPosition();
-    sharedMemoryA.writeToParcel(&parcel);
+    writeToParcel(sharedMemoryA, &parcel);
 
     parcel.setDataPosition(pos);
-    sharedMemoryB.readFromParcel(&parcel);
+    SharedMemoryParcelable sharedMemoryB = readFromParcel<SharedMemoryParcelable>(parcel);
     EXPECT_EQ(sharedMemoryA.getSizeInBytes(), sharedMemoryB.getSizeInBytes());
 
     // should see same value at two different addresses
@@ -81,7 +103,6 @@
 TEST(test_marshalling, aaudio_shared_region) {
     SharedMemoryParcelable sharedMemories[2];
     SharedRegionParcelable sharedRegionA;
-    SharedRegionParcelable sharedRegionB;
     const size_t memSizeBytes = 840;
     unique_fd fd(ashmem_create_region("TestMarshalling", memSizeBytes));
     ASSERT_LE(0, fd);
@@ -97,10 +118,10 @@
 
     Parcel parcel;
     size_t pos = parcel.dataPosition();
-    sharedRegionA.writeToParcel(&parcel);
+    writeToParcel(sharedRegionA, &parcel);
 
     parcel.setDataPosition(pos);
-    sharedRegionB.readFromParcel(&parcel);
+    SharedRegionParcelable sharedRegionB = readFromParcel<SharedRegionParcelable>(parcel);
 
     // should see same value
     void *region2;
@@ -113,7 +134,6 @@
 TEST(test_marshalling, aaudio_ring_buffer_parcelable) {
     SharedMemoryParcelable sharedMemories[2];
     RingBufferParcelable ringBufferA;
-    RingBufferParcelable ringBufferB;
 
     const size_t bytesPerFrame = 8;
     const size_t framesPerBurst = 32;
@@ -147,11 +167,11 @@
     // write A to parcel
     Parcel parcel;
     size_t pos = parcel.dataPosition();
-    ringBufferA.writeToParcel(&parcel);
+    writeToParcel(ringBufferA, &parcel);
 
     // read B from parcel
     parcel.setDataPosition(pos);
-    ringBufferB.readFromParcel(&parcel);
+    RingBufferParcelable ringBufferB = readFromParcel<RingBufferParcelable>(parcel);
 
     RingBufferDescriptor descriptorB;
     EXPECT_EQ(AAUDIO_OK, ringBufferB.resolve(sharedMemories, &descriptorB));
diff --git a/media/libaaudio/tests/test_stop_hang.cpp b/media/libaaudio/tests/test_stop_hang.cpp
index 2397b6c..982ff4a 100644
--- a/media/libaaudio/tests/test_stop_hang.cpp
+++ b/media/libaaudio/tests/test_stop_hang.cpp
@@ -45,7 +45,7 @@
                 {
                     // Will block if the thread is running.
                     // This mutex is used to close() immediately after the callback returns
-                    // and before the requestStop() is called.
+                    // and before the requestStop_l() is called.
                     std::lock_guard<std::mutex> lock(doneLock);
                     if (done) break;
                 }
diff --git a/media/libaaudio/tests/test_various.cpp b/media/libaaudio/tests/test_various.cpp
index a20c799..cbf863f 100644
--- a/media/libaaudio/tests/test_various.cpp
+++ b/media/libaaudio/tests/test_various.cpp
@@ -33,6 +33,11 @@
         void *audioData,
         int32_t numFrames
 ) {
+    aaudio_direction_t direction = AAudioStream_getDirection(stream);
+    if (direction == AAUDIO_DIRECTION_INPUT) {
+        return AAUDIO_CALLBACK_RESULT_CONTINUE;
+    }
+    // Check to make sure the buffer is initialized to all zeros.
     int channels = AAudioStream_getChannelCount(stream);
     int numSamples = channels * numFrames;
     bool allZeros = true;
@@ -48,7 +53,8 @@
 constexpr int64_t NANOS_PER_MILLISECOND = 1000 * 1000;
 
 void checkReleaseThenClose(aaudio_performance_mode_t perfMode,
-        aaudio_sharing_mode_t sharingMode) {
+        aaudio_sharing_mode_t sharingMode,
+        aaudio_direction_t direction = AAUDIO_DIRECTION_OUTPUT) {
     AAudioStreamBuilder* aaudioBuilder = nullptr;
     AAudioStream* aaudioStream = nullptr;
 
@@ -61,6 +67,7 @@
                                         nullptr);
     AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, perfMode);
     AAudioStreamBuilder_setSharingMode(aaudioBuilder, sharingMode);
+    AAudioStreamBuilder_setDirection(aaudioBuilder, direction);
     AAudioStreamBuilder_setFormat(aaudioBuilder, AAUDIO_FORMAT_PCM_FLOAT);
 
     // Create an AAudioStream using the Builder.
@@ -88,14 +95,28 @@
     // We should NOT be able to start or change a stream after it has been released.
     EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE, AAudioStream_requestStart(aaudioStream));
     EXPECT_EQ(AAUDIO_STREAM_STATE_CLOSING, AAudioStream_getState(aaudioStream));
-    EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE, AAudioStream_requestPause(aaudioStream));
+    // Pause is only implemented for OUTPUT.
+    if (direction == AAUDIO_DIRECTION_OUTPUT) {
+        EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE,
+                  AAudioStream_requestPause(aaudioStream));
+    }
     EXPECT_EQ(AAUDIO_STREAM_STATE_CLOSING, AAudioStream_getState(aaudioStream));
     EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE, AAudioStream_requestStop(aaudioStream));
     EXPECT_EQ(AAUDIO_STREAM_STATE_CLOSING, AAudioStream_getState(aaudioStream));
 
     // Does this crash?
-    EXPECT_LT(0, AAudioStream_getFramesRead(aaudioStream));
-    EXPECT_LT(0, AAudioStream_getFramesWritten(aaudioStream));
+    EXPECT_GT(AAudioStream_getFramesRead(aaudioStream), 0);
+    EXPECT_GT(AAudioStream_getFramesWritten(aaudioStream), 0);
+    EXPECT_GT(AAudioStream_getFramesPerBurst(aaudioStream), 0);
+    EXPECT_GE(AAudioStream_getXRunCount(aaudioStream), 0);
+    EXPECT_GT(AAudioStream_getBufferCapacityInFrames(aaudioStream), 0);
+    EXPECT_GT(AAudioStream_getBufferSizeInFrames(aaudioStream), 0);
+
+    int64_t timestampFrames = 0;
+    int64_t timestampNanos = 0;
+    aaudio_result_t result = AAudioStream_getTimestamp(aaudioStream, CLOCK_MONOTONIC,
+            &timestampFrames, &timestampNanos);
+    EXPECT_TRUE(result == AAUDIO_ERROR_INVALID_STATE || result == AAUDIO_ERROR_UNIMPLEMENTED);
 
     // Verify Closing State. Does this crash?
     aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
@@ -107,20 +128,42 @@
     EXPECT_EQ(AAUDIO_OK, AAudioStream_close(aaudioStream));
 }
 
-TEST(test_various, aaudio_release_close_none) {
+TEST(test_various, aaudio_release_close_none_output) {
     checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_NONE,
-            AAUDIO_SHARING_MODE_SHARED);
+            AAUDIO_SHARING_MODE_SHARED,
+            AAUDIO_DIRECTION_OUTPUT);
     // No EXCLUSIVE streams with MODE_NONE.
 }
 
-TEST(test_various, aaudio_release_close_low_shared) {
-    checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
-            AAUDIO_SHARING_MODE_SHARED);
+TEST(test_various, aaudio_release_close_none_input) {
+    checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_NONE,
+            AAUDIO_SHARING_MODE_SHARED,
+            AAUDIO_DIRECTION_INPUT);
+    // No EXCLUSIVE streams with MODE_NONE.
 }
 
-TEST(test_various, aaudio_release_close_low_exclusive) {
+TEST(test_various, aaudio_release_close_low_shared_output) {
     checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
-            AAUDIO_SHARING_MODE_EXCLUSIVE);
+            AAUDIO_SHARING_MODE_SHARED,
+            AAUDIO_DIRECTION_OUTPUT);
+}
+
+TEST(test_various, aaudio_release_close_low_shared_input) {
+    checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+            AAUDIO_SHARING_MODE_SHARED,
+            AAUDIO_DIRECTION_INPUT);
+}
+
+TEST(test_various, aaudio_release_close_low_exclusive_output) {
+    checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+            AAUDIO_SHARING_MODE_EXCLUSIVE,
+            AAUDIO_DIRECTION_OUTPUT);
+}
+
+TEST(test_various, aaudio_release_close_low_exclusive_input) {
+    checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+            AAUDIO_SHARING_MODE_EXCLUSIVE,
+            AAUDIO_DIRECTION_INPUT);
 }
 
 enum FunctionToCall {
diff --git a/media/libaudioclient/AidlConversion.cpp b/media/libaudioclient/AidlConversion.cpp
new file mode 100644
index 0000000..d362d8f
--- /dev/null
+++ b/media/libaudioclient/AidlConversion.cpp
@@ -0,0 +1,1684 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlConversion"
+//#define LOG_NDEBUG 0
+#include <system/audio.h>
+#include <utils/Log.h>
+
+#include "media/AidlConversion.h"
+
+#include <media/ShmemCompat.h>
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Utilities
+
+namespace android {
+
+using base::unexpected;
+
+namespace {
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// The code below establishes:
+// IntegralTypeOf<T>, which works for either integral types (in which case it evaluates to T), or
+// enum types (in which case it evaluates to std::underlying_type_T<T>).
+
+template<typename T, typename = std::enable_if_t<std::is_integral_v<T> || std::is_enum_v<T>>>
+struct IntegralTypeOfStruct {
+    using Type = T;
+};
+
+template<typename T>
+struct IntegralTypeOfStruct<T, std::enable_if_t<std::is_enum_v<T>>> {
+    using Type = std::underlying_type_t<T>;
+};
+
+template<typename T>
+using IntegralTypeOf = typename IntegralTypeOfStruct<T>::Type;
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Utilities for handling bitmasks.
+
+template<typename Enum>
+Enum index2enum_index(int index) {
+    static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+    return static_cast<Enum>(index);
+}
+
+template<typename Enum>
+Enum index2enum_bitmask(int index) {
+    static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+    return static_cast<Enum>(1 << index);
+}
+
+template<typename Mask, typename Enum>
+Mask enumToMask_bitmask(Enum e) {
+    static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+    static_assert(std::is_enum_v<Mask> || std::is_integral_v<Mask>);
+    return static_cast<Mask>(e);
+}
+
+template<typename Mask, typename Enum>
+Mask enumToMask_index(Enum e) {
+    static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+    static_assert(std::is_enum_v<Mask> || std::is_integral_v<Mask>);
+    return static_cast<Mask>(static_cast<std::make_unsigned_t<IntegralTypeOf<Mask>>>(1)
+            << static_cast<int>(e));
+}
+
+template<typename DestMask, typename SrcMask, typename DestEnum, typename SrcEnum>
+ConversionResult<DestMask> convertBitmask(
+        SrcMask src, const std::function<ConversionResult<DestEnum>(SrcEnum)>& enumConversion,
+        const std::function<SrcEnum(int)>& srcIndexToEnum,
+        const std::function<DestMask(DestEnum)>& destEnumToMask) {
+    using UnsignedDestMask = std::make_unsigned_t<IntegralTypeOf<DestMask>>;
+    using UnsignedSrcMask = std::make_unsigned_t<IntegralTypeOf<SrcMask>>;
+
+    UnsignedDestMask dest = static_cast<UnsignedDestMask>(0);
+    UnsignedSrcMask usrc = static_cast<UnsignedSrcMask>(src);
+
+    int srcBitIndex = 0;
+    while (usrc != 0) {
+        if (usrc & 1) {
+            SrcEnum srcEnum = srcIndexToEnum(srcBitIndex);
+            DestEnum destEnum = VALUE_OR_RETURN(enumConversion(srcEnum));
+            DestMask destMask = destEnumToMask(destEnum);
+            dest |= destMask;
+        }
+        ++srcBitIndex;
+        usrc >>= 1;
+    }
+    return static_cast<DestMask>(dest);
+}
+
+template<typename Mask, typename Enum>
+bool bitmaskIsSet(Mask mask, Enum index) {
+    return (mask & enumToMask_index<Mask, Enum>(index)) != 0;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Utilities for working with AIDL unions.
+// UNION_GET(obj, fieldname) returns a ConversionResult<T> containing either the strongly-typed
+//   value of the respective field, or BAD_VALUE if the union is not set to the requested field.
+// UNION_SET(obj, fieldname, value) sets the requested field to the given value.
+
+template<typename T, typename T::Tag tag>
+using UnionFieldType = std::decay_t<decltype(std::declval<T>().template get<tag>())>;
+
+template<typename T, typename T::Tag tag>
+ConversionResult<UnionFieldType<T, tag>> unionGetField(const T& u) {
+    if (u.getTag() != tag) {
+        return unexpected(BAD_VALUE);
+    }
+    return u.template get<tag>();
+}
+
+#define UNION_GET(u, field) \
+    unionGetField<std::decay_t<decltype(u)>, std::decay_t<decltype(u)>::Tag::field>(u)
+
+#define UNION_SET(u, field, value) \
+    (u).set<std::decay_t<decltype(u)>::Tag::field>(value)
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+
+template<typename To, typename From>
+ConversionResult<To> convertReinterpret(From from) {
+    static_assert(sizeof(From) == sizeof(To));
+    return static_cast<To>(from);
+}
+
+enum class Direction {
+    INPUT, OUTPUT
+};
+
+ConversionResult<Direction> direction(media::AudioPortRole role, media::AudioPortType type) {
+    switch (type) {
+        case media::AudioPortType::DEVICE:
+            switch (role) {
+                case media::AudioPortRole::SOURCE:
+                    return Direction::INPUT;
+                case media::AudioPortRole::SINK:
+                    return Direction::OUTPUT;
+                default:
+                    break;
+            }
+            break;
+        case media::AudioPortType::MIX:
+            switch (role) {
+                case media::AudioPortRole::SOURCE:
+                    return Direction::OUTPUT;
+                case media::AudioPortRole::SINK:
+                    return Direction::INPUT;
+                default:
+                    break;
+            }
+            break;
+        default:
+            break;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<Direction> direction(audio_port_role_t role, audio_port_type_t type) {
+    switch (type) {
+        case AUDIO_PORT_TYPE_DEVICE:
+            switch (role) {
+                case AUDIO_PORT_ROLE_SOURCE:
+                    return Direction::INPUT;
+                case AUDIO_PORT_ROLE_SINK:
+                    return Direction::OUTPUT;
+                default:
+                    break;
+            }
+            break;
+        case AUDIO_PORT_TYPE_MIX:
+            switch (role) {
+                case AUDIO_PORT_ROLE_SOURCE:
+                    return Direction::OUTPUT;
+                case AUDIO_PORT_ROLE_SINK:
+                    return Direction::INPUT;
+                default:
+                    break;
+            }
+            break;
+        default:
+            break;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+}  // namespace
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Converters
+
+status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize) {
+    if (aidl.size() > maxSize - 1) {
+        return BAD_VALUE;
+    }
+    aidl.copy(dest, aidl.size());
+    dest[aidl.size()] = '\0';
+    return OK;
+}
+
+ConversionResult<std::string> legacy2aidl_string(const char* legacy, size_t maxSize) {
+    if (legacy == nullptr) {
+        return unexpected(BAD_VALUE);
+    }
+    if (strnlen(legacy, maxSize) == maxSize) {
+        // No null-terminator.
+        return unexpected(BAD_VALUE);
+    }
+    return std::string(legacy);
+}
+
+ConversionResult<audio_module_handle_t> aidl2legacy_int32_t_audio_module_handle_t(int32_t aidl) {
+    return convertReinterpret<audio_module_handle_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_module_handle_t_int32_t(audio_module_handle_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_io_handle_t> aidl2legacy_int32_t_audio_io_handle_t(int32_t aidl) {
+    return convertReinterpret<audio_io_handle_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_io_handle_t_int32_t(audio_io_handle_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_port_handle_t> aidl2legacy_int32_t_audio_port_handle_t(int32_t aidl) {
+    return convertReinterpret<audio_port_handle_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_port_handle_t_int32_t(audio_port_handle_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_patch_handle_t> aidl2legacy_int32_t_audio_patch_handle_t(int32_t aidl) {
+    return convertReinterpret<audio_patch_handle_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_patch_handle_t_int32_t(audio_patch_handle_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_unique_id_t> aidl2legacy_int32_t_audio_unique_id_t(int32_t aidl) {
+    return convertReinterpret<audio_unique_id_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_unique_id_t_int32_t(audio_unique_id_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<pid_t> aidl2legacy_int32_t_pid_t(int32_t aidl) {
+    return convertReinterpret<pid_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_pid_t_int32_t(pid_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<uid_t> aidl2legacy_int32_t_uid_t(int32_t aidl) {
+    return convertReinterpret<uid_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_uid_t_int32_t(uid_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<String16> aidl2legacy_string_view_String16(std::string_view aidl) {
+    return String16(aidl.data(), aidl.size());
+}
+
+ConversionResult<std::string> legacy2aidl_String16_string(const String16& legacy) {
+    return std::string(String8(legacy).c_str());
+}
+
+// The legacy enum is unnamed. Thus, we use int.
+ConversionResult<int> aidl2legacy_AudioPortConfigType(media::AudioPortConfigType aidl) {
+    switch (aidl) {
+        case media::AudioPortConfigType::SAMPLE_RATE:
+            return AUDIO_PORT_CONFIG_SAMPLE_RATE;
+        case media::AudioPortConfigType::CHANNEL_MASK:
+            return AUDIO_PORT_CONFIG_CHANNEL_MASK;
+        case media::AudioPortConfigType::FORMAT:
+            return AUDIO_PORT_CONFIG_FORMAT;
+        case media::AudioPortConfigType::FLAGS:
+            return AUDIO_PORT_CONFIG_FLAGS;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+// The legacy enum is unnamed. Thus, we use int.
+ConversionResult<media::AudioPortConfigType> legacy2aidl_AudioPortConfigType(int legacy) {
+    switch (legacy) {
+        case AUDIO_PORT_CONFIG_SAMPLE_RATE:
+            return media::AudioPortConfigType::SAMPLE_RATE;
+        case AUDIO_PORT_CONFIG_CHANNEL_MASK:
+            return media::AudioPortConfigType::CHANNEL_MASK;
+        case AUDIO_PORT_CONFIG_FORMAT:
+            return media::AudioPortConfigType::FORMAT;
+        case AUDIO_PORT_CONFIG_FLAGS:
+            return media::AudioPortConfigType::FLAGS;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<unsigned int> aidl2legacy_int32_t_config_mask(int32_t aidl) {
+    return convertBitmask<unsigned int, int32_t, int, media::AudioPortConfigType>(
+            aidl, aidl2legacy_AudioPortConfigType,
+            // AudioPortConfigType enum is index-based.
+            index2enum_index<media::AudioPortConfigType>,
+            // AUDIO_PORT_CONFIG_* flags are mask-based.
+            enumToMask_bitmask<unsigned int, int>);
+}
+
+ConversionResult<int32_t> legacy2aidl_config_mask_int32_t(unsigned int legacy) {
+    return convertBitmask<int32_t, unsigned int, media::AudioPortConfigType, int>(
+            legacy, legacy2aidl_AudioPortConfigType,
+            // AUDIO_PORT_CONFIG_* flags are mask-based.
+            index2enum_bitmask<unsigned>,
+            // AudioPortConfigType enum is index-based.
+            enumToMask_index<int32_t, media::AudioPortConfigType>);
+}
+
+ConversionResult<audio_channel_mask_t> aidl2legacy_int32_t_audio_channel_mask_t(int32_t aidl) {
+    // TODO(ytai): should we convert bit-by-bit?
+    // One problem here is that the representation is both opaque and is different based on the
+    // context (input vs. output). Can determine based on type and role, as per useInChannelMask().
+    return convertReinterpret<audio_channel_mask_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_channel_mask_t_int32_t(audio_channel_mask_t legacy) {
+    // TODO(ytai): should we convert bit-by-bit?
+    // One problem here is that the representation is both opaque and is different based on the
+    // context (input vs. output). Can determine based on type and role, as per useInChannelMask().
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_io_config_event> aidl2legacy_AudioIoConfigEvent_audio_io_config_event(
+        media::AudioIoConfigEvent aidl) {
+    switch (aidl) {
+        case media::AudioIoConfigEvent::OUTPUT_REGISTERED:
+            return AUDIO_OUTPUT_REGISTERED;
+        case media::AudioIoConfigEvent::OUTPUT_OPENED:
+            return AUDIO_OUTPUT_OPENED;
+        case media::AudioIoConfigEvent::OUTPUT_CLOSED:
+            return AUDIO_OUTPUT_CLOSED;
+        case media::AudioIoConfigEvent::OUTPUT_CONFIG_CHANGED:
+            return AUDIO_OUTPUT_CONFIG_CHANGED;
+        case media::AudioIoConfigEvent::INPUT_REGISTERED:
+            return AUDIO_INPUT_REGISTERED;
+        case media::AudioIoConfigEvent::INPUT_OPENED:
+            return AUDIO_INPUT_OPENED;
+        case media::AudioIoConfigEvent::INPUT_CLOSED:
+            return AUDIO_INPUT_CLOSED;
+        case media::AudioIoConfigEvent::INPUT_CONFIG_CHANGED:
+            return AUDIO_INPUT_CONFIG_CHANGED;
+        case media::AudioIoConfigEvent::CLIENT_STARTED:
+            return AUDIO_CLIENT_STARTED;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_AudioIoConfigEvent(
+        audio_io_config_event legacy) {
+    switch (legacy) {
+        case AUDIO_OUTPUT_REGISTERED:
+            return media::AudioIoConfigEvent::OUTPUT_REGISTERED;
+        case AUDIO_OUTPUT_OPENED:
+            return media::AudioIoConfigEvent::OUTPUT_OPENED;
+        case AUDIO_OUTPUT_CLOSED:
+            return media::AudioIoConfigEvent::OUTPUT_CLOSED;
+        case AUDIO_OUTPUT_CONFIG_CHANGED:
+            return media::AudioIoConfigEvent::OUTPUT_CONFIG_CHANGED;
+        case AUDIO_INPUT_REGISTERED:
+            return media::AudioIoConfigEvent::INPUT_REGISTERED;
+        case AUDIO_INPUT_OPENED:
+            return media::AudioIoConfigEvent::INPUT_OPENED;
+        case AUDIO_INPUT_CLOSED:
+            return media::AudioIoConfigEvent::INPUT_CLOSED;
+        case AUDIO_INPUT_CONFIG_CHANGED:
+            return media::AudioIoConfigEvent::INPUT_CONFIG_CHANGED;
+        case AUDIO_CLIENT_STARTED:
+            return media::AudioIoConfigEvent::CLIENT_STARTED;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<audio_port_role_t> aidl2legacy_AudioPortRole_audio_port_role_t(
+        media::AudioPortRole aidl) {
+    switch (aidl) {
+        case media::AudioPortRole::NONE:
+            return AUDIO_PORT_ROLE_NONE;
+        case media::AudioPortRole::SOURCE:
+            return AUDIO_PORT_ROLE_SOURCE;
+        case media::AudioPortRole::SINK:
+            return AUDIO_PORT_ROLE_SINK;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<media::AudioPortRole> legacy2aidl_audio_port_role_t_AudioPortRole(
+        audio_port_role_t legacy) {
+    switch (legacy) {
+        case AUDIO_PORT_ROLE_NONE:
+            return media::AudioPortRole::NONE;
+        case AUDIO_PORT_ROLE_SOURCE:
+            return media::AudioPortRole::SOURCE;
+        case AUDIO_PORT_ROLE_SINK:
+            return media::AudioPortRole::SINK;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<audio_port_type_t> aidl2legacy_AudioPortType_audio_port_type_t(
+        media::AudioPortType aidl) {
+    switch (aidl) {
+        case media::AudioPortType::NONE:
+            return AUDIO_PORT_TYPE_NONE;
+        case media::AudioPortType::DEVICE:
+            return AUDIO_PORT_TYPE_DEVICE;
+        case media::AudioPortType::MIX:
+            return AUDIO_PORT_TYPE_MIX;
+        case media::AudioPortType::SESSION:
+            return AUDIO_PORT_TYPE_SESSION;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<media::AudioPortType> legacy2aidl_audio_port_type_t_AudioPortType(
+        audio_port_type_t legacy) {
+    switch (legacy) {
+        case AUDIO_PORT_TYPE_NONE:
+            return media::AudioPortType::NONE;
+        case AUDIO_PORT_TYPE_DEVICE:
+            return media::AudioPortType::DEVICE;
+        case AUDIO_PORT_TYPE_MIX:
+            return media::AudioPortType::MIX;
+        case AUDIO_PORT_TYPE_SESSION:
+            return media::AudioPortType::SESSION;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<audio_format_t> aidl2legacy_AudioFormat_audio_format_t(
+        media::audio::common::AudioFormat aidl) {
+    // This relies on AudioFormat being kept in sync with audio_format_t.
+    static_assert(sizeof(media::audio::common::AudioFormat) == sizeof(audio_format_t));
+    return static_cast<audio_format_t>(aidl);
+}
+
+ConversionResult<media::audio::common::AudioFormat> legacy2aidl_audio_format_t_AudioFormat(
+        audio_format_t legacy) {
+    // This relies on AudioFormat being kept in sync with audio_format_t.
+    static_assert(sizeof(media::audio::common::AudioFormat) == sizeof(audio_format_t));
+    return static_cast<media::audio::common::AudioFormat>(legacy);
+}
+
+ConversionResult<int> aidl2legacy_AudioGainMode_int(media::AudioGainMode aidl) {
+    switch (aidl) {
+        case media::AudioGainMode::JOINT:
+            return AUDIO_GAIN_MODE_JOINT;
+        case media::AudioGainMode::CHANNELS:
+            return AUDIO_GAIN_MODE_CHANNELS;
+        case media::AudioGainMode::RAMP:
+            return AUDIO_GAIN_MODE_RAMP;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<media::AudioGainMode> legacy2aidl_int_AudioGainMode(int legacy) {
+    switch (legacy) {
+        case AUDIO_GAIN_MODE_JOINT:
+            return media::AudioGainMode::JOINT;
+        case AUDIO_GAIN_MODE_CHANNELS:
+            return media::AudioGainMode::CHANNELS;
+        case AUDIO_GAIN_MODE_RAMP:
+            return media::AudioGainMode::RAMP;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t(int32_t aidl) {
+    return convertBitmask<audio_gain_mode_t, int32_t, int, media::AudioGainMode>(
+            aidl, aidl2legacy_AudioGainMode_int,
+            // AudioGainMode is index-based.
+            index2enum_index<media::AudioGainMode>,
+            // AUDIO_GAIN_MODE_* constants are mask-based.
+            enumToMask_bitmask<audio_gain_mode_t, int>);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t(audio_gain_mode_t legacy) {
+    return convertBitmask<int32_t, audio_gain_mode_t, media::AudioGainMode, int>(
+            legacy, legacy2aidl_int_AudioGainMode,
+            // AUDIO_GAIN_MODE_* constants are mask-based.
+            index2enum_bitmask<int>,
+            // AudioGainMode is index-based.
+            enumToMask_index<int32_t, media::AudioGainMode>);
+}
+
+ConversionResult<audio_devices_t> aidl2legacy_int32_t_audio_devices_t(int32_t aidl) {
+    // TODO(ytai): bitfield?
+    return convertReinterpret<audio_devices_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_devices_t_int32_t(audio_devices_t legacy) {
+    // TODO(ytai): bitfield?
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
+        const media::AudioGainConfig& aidl, media::AudioPortRole role, media::AudioPortType type) {
+    audio_gain_config legacy;
+    legacy.index = VALUE_OR_RETURN(convertIntegral<int>(aidl.index));
+    legacy.mode = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_gain_mode_t(aidl.mode));
+    legacy.channel_mask =
+            VALUE_OR_RETURN(aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+    const bool isInput = VALUE_OR_RETURN(direction(role, type)) == Direction::INPUT;
+    const bool isJoint = bitmaskIsSet(aidl.mode, media::AudioGainMode::JOINT);
+    size_t numValues = isJoint ? 1
+                               : isInput ? audio_channel_count_from_in_mask(legacy.channel_mask)
+                                         : audio_channel_count_from_out_mask(legacy.channel_mask);
+    if (aidl.values.size() != numValues || aidl.values.size() > std::size(legacy.values)) {
+        return unexpected(BAD_VALUE);
+    }
+    for (size_t i = 0; i < numValues; ++i) {
+        legacy.values[i] = VALUE_OR_RETURN(convertIntegral<int>(aidl.values[i]));
+    }
+    legacy.ramp_duration_ms = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.rampDurationMs));
+    return legacy;
+}
+
+ConversionResult<media::AudioGainConfig> legacy2aidl_audio_gain_config_AudioGainConfig(
+        const audio_gain_config& legacy, audio_port_role_t role, audio_port_type_t type) {
+    media::AudioGainConfig aidl;
+    aidl.index = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.index));
+    aidl.mode = VALUE_OR_RETURN(legacy2aidl_audio_gain_mode_t_int32_t(legacy.mode));
+    aidl.channelMask =
+            VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+    const bool isInput = VALUE_OR_RETURN(direction(role, type)) == Direction::INPUT;
+    const bool isJoint = (legacy.mode & AUDIO_GAIN_MODE_JOINT) != 0;
+    size_t numValues = isJoint ? 1
+                               : isInput ? audio_channel_count_from_in_mask(legacy.channel_mask)
+                                         : audio_channel_count_from_out_mask(legacy.channel_mask);
+    aidl.values.resize(numValues);
+    for (size_t i = 0; i < numValues; ++i) {
+        aidl.values[i] = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.values[i]));
+    }
+    aidl.rampDurationMs = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.ramp_duration_ms));
+    return aidl;
+}
+
+ConversionResult<audio_input_flags_t> aidl2legacy_AudioInputFlags_audio_input_flags_t(
+        media::AudioInputFlags aidl) {
+    switch (aidl) {
+        case media::AudioInputFlags::FAST:
+            return AUDIO_INPUT_FLAG_FAST;
+        case media::AudioInputFlags::HW_HOTWORD:
+            return AUDIO_INPUT_FLAG_HW_HOTWORD;
+        case media::AudioInputFlags::RAW:
+            return AUDIO_INPUT_FLAG_RAW;
+        case media::AudioInputFlags::SYNC:
+            return AUDIO_INPUT_FLAG_SYNC;
+        case media::AudioInputFlags::MMAP_NOIRQ:
+            return AUDIO_INPUT_FLAG_MMAP_NOIRQ;
+        case media::AudioInputFlags::VOIP_TX:
+            return AUDIO_INPUT_FLAG_VOIP_TX;
+        case media::AudioInputFlags::HW_AV_SYNC:
+            return AUDIO_INPUT_FLAG_HW_AV_SYNC;
+        case media::AudioInputFlags::DIRECT:
+            return AUDIO_INPUT_FLAG_DIRECT;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<media::AudioInputFlags> legacy2aidl_audio_input_flags_t_AudioInputFlags(
+        audio_input_flags_t legacy) {
+    switch (legacy) {
+        case AUDIO_INPUT_FLAG_FAST:
+            return media::AudioInputFlags::FAST;
+        case AUDIO_INPUT_FLAG_HW_HOTWORD:
+            return media::AudioInputFlags::HW_HOTWORD;
+        case AUDIO_INPUT_FLAG_RAW:
+            return media::AudioInputFlags::RAW;
+        case AUDIO_INPUT_FLAG_SYNC:
+            return media::AudioInputFlags::SYNC;
+        case AUDIO_INPUT_FLAG_MMAP_NOIRQ:
+            return media::AudioInputFlags::MMAP_NOIRQ;
+        case AUDIO_INPUT_FLAG_VOIP_TX:
+            return media::AudioInputFlags::VOIP_TX;
+        case AUDIO_INPUT_FLAG_HW_AV_SYNC:
+            return media::AudioInputFlags::HW_AV_SYNC;
+        case AUDIO_INPUT_FLAG_DIRECT:
+            return media::AudioInputFlags::DIRECT;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<audio_output_flags_t> aidl2legacy_AudioOutputFlags_audio_output_flags_t(
+        media::AudioOutputFlags aidl) {
+    switch (aidl) {
+        case media::AudioOutputFlags::DIRECT:
+            return AUDIO_OUTPUT_FLAG_DIRECT;
+        case media::AudioOutputFlags::PRIMARY:
+            return AUDIO_OUTPUT_FLAG_PRIMARY;
+        case media::AudioOutputFlags::FAST:
+            return AUDIO_OUTPUT_FLAG_FAST;
+        case media::AudioOutputFlags::DEEP_BUFFER:
+            return AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+        case media::AudioOutputFlags::COMPRESS_OFFLOAD:
+            return AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+        case media::AudioOutputFlags::NON_BLOCKING:
+            return AUDIO_OUTPUT_FLAG_NON_BLOCKING;
+        case media::AudioOutputFlags::HW_AV_SYNC:
+            return AUDIO_OUTPUT_FLAG_HW_AV_SYNC;
+        case media::AudioOutputFlags::TTS:
+            return AUDIO_OUTPUT_FLAG_TTS;
+        case media::AudioOutputFlags::RAW:
+            return AUDIO_OUTPUT_FLAG_RAW;
+        case media::AudioOutputFlags::SYNC:
+            return AUDIO_OUTPUT_FLAG_SYNC;
+        case media::AudioOutputFlags::IEC958_NONAUDIO:
+            return AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO;
+        case media::AudioOutputFlags::DIRECT_PCM:
+            return AUDIO_OUTPUT_FLAG_DIRECT_PCM;
+        case media::AudioOutputFlags::MMAP_NOIRQ:
+            return AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+        case media::AudioOutputFlags::VOIP_RX:
+            return AUDIO_OUTPUT_FLAG_VOIP_RX;
+        case media::AudioOutputFlags::INCALL_MUSIC:
+            return AUDIO_OUTPUT_FLAG_INCALL_MUSIC;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<media::AudioOutputFlags> legacy2aidl_audio_output_flags_t_AudioOutputFlags(
+        audio_output_flags_t legacy) {
+    switch (legacy) {
+        case AUDIO_OUTPUT_FLAG_DIRECT:
+            return media::AudioOutputFlags::DIRECT;
+        case AUDIO_OUTPUT_FLAG_PRIMARY:
+            return media::AudioOutputFlags::PRIMARY;
+        case AUDIO_OUTPUT_FLAG_FAST:
+            return media::AudioOutputFlags::FAST;
+        case AUDIO_OUTPUT_FLAG_DEEP_BUFFER:
+            return media::AudioOutputFlags::DEEP_BUFFER;
+        case AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD:
+            return media::AudioOutputFlags::COMPRESS_OFFLOAD;
+        case AUDIO_OUTPUT_FLAG_NON_BLOCKING:
+            return media::AudioOutputFlags::NON_BLOCKING;
+        case AUDIO_OUTPUT_FLAG_HW_AV_SYNC:
+            return media::AudioOutputFlags::HW_AV_SYNC;
+        case AUDIO_OUTPUT_FLAG_TTS:
+            return media::AudioOutputFlags::TTS;
+        case AUDIO_OUTPUT_FLAG_RAW:
+            return media::AudioOutputFlags::RAW;
+        case AUDIO_OUTPUT_FLAG_SYNC:
+            return media::AudioOutputFlags::SYNC;
+        case AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO:
+            return media::AudioOutputFlags::IEC958_NONAUDIO;
+        case AUDIO_OUTPUT_FLAG_DIRECT_PCM:
+            return media::AudioOutputFlags::DIRECT_PCM;
+        case AUDIO_OUTPUT_FLAG_MMAP_NOIRQ:
+            return media::AudioOutputFlags::MMAP_NOIRQ;
+        case AUDIO_OUTPUT_FLAG_VOIP_RX:
+            return media::AudioOutputFlags::VOIP_RX;
+        case AUDIO_OUTPUT_FLAG_INCALL_MUSIC:
+            return media::AudioOutputFlags::INCALL_MUSIC;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<audio_input_flags_t> aidl2legacy_audio_input_flags_mask(int32_t aidl) {
+    using LegacyMask = std::underlying_type_t<audio_input_flags_t>;
+
+    LegacyMask converted = VALUE_OR_RETURN(
+            (convertBitmask<LegacyMask, int32_t, audio_input_flags_t, media::AudioInputFlags>(
+                    aidl, aidl2legacy_AudioInputFlags_audio_input_flags_t,
+                    index2enum_index<media::AudioInputFlags>,
+                    enumToMask_bitmask<LegacyMask, audio_input_flags_t>)));
+    return static_cast<audio_input_flags_t>(converted);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_input_flags_mask(audio_input_flags_t legacy) {
+    using LegacyMask = std::underlying_type_t<audio_input_flags_t>;
+
+    LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
+    return convertBitmask<int32_t, LegacyMask, media::AudioInputFlags, audio_input_flags_t>(
+            legacyMask, legacy2aidl_audio_input_flags_t_AudioInputFlags,
+            index2enum_bitmask<audio_input_flags_t>,
+            enumToMask_index<int32_t, media::AudioInputFlags>);
+}
+
+ConversionResult<audio_output_flags_t> aidl2legacy_audio_output_flags_mask(int32_t aidl) {
+    return convertBitmask<audio_output_flags_t,
+                          int32_t,
+                          audio_output_flags_t,
+                          media::AudioOutputFlags>(
+            aidl, aidl2legacy_AudioOutputFlags_audio_output_flags_t,
+            index2enum_index<media::AudioOutputFlags>,
+            enumToMask_bitmask<audio_output_flags_t, audio_output_flags_t>);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_output_flags_mask(audio_output_flags_t legacy) {
+    using LegacyMask = std::underlying_type_t<audio_output_flags_t>;
+
+    LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
+    return convertBitmask<int32_t, LegacyMask, media::AudioOutputFlags, audio_output_flags_t>(
+            legacyMask, legacy2aidl_audio_output_flags_t_AudioOutputFlags,
+            index2enum_bitmask<audio_output_flags_t>,
+            enumToMask_index<int32_t, media::AudioOutputFlags>);
+}
+
+ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
+        const media::AudioIoFlags& aidl, media::AudioPortRole role, media::AudioPortType type) {
+    audio_io_flags legacy;
+    Direction dir = VALUE_OR_RETURN(direction(role, type));
+    switch (dir) {
+        case Direction::INPUT: {
+            legacy.input = VALUE_OR_RETURN(
+                    aidl2legacy_audio_input_flags_mask(VALUE_OR_RETURN(UNION_GET(aidl, input))));
+        }
+            break;
+
+        case Direction::OUTPUT: {
+            legacy.output = VALUE_OR_RETURN(
+                    aidl2legacy_audio_output_flags_mask(VALUE_OR_RETURN(UNION_GET(aidl, output))));
+        }
+            break;
+    }
+
+    return legacy;
+}
+
+ConversionResult<media::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
+        const audio_io_flags& legacy, audio_port_role_t role, audio_port_type_t type) {
+    media::AudioIoFlags aidl;
+
+    Direction dir = VALUE_OR_RETURN(direction(role, type));
+    switch (dir) {
+        case Direction::INPUT:
+            UNION_SET(aidl, input,
+                      VALUE_OR_RETURN(legacy2aidl_audio_input_flags_mask(legacy.input)));
+            break;
+        case Direction::OUTPUT:
+            UNION_SET(aidl, output,
+                      VALUE_OR_RETURN(legacy2aidl_audio_output_flags_mask(legacy.output)));
+            break;
+    }
+    return aidl;
+}
+
+ConversionResult<audio_port_config_device_ext> aidl2legacy_AudioPortConfigDeviceExt(
+        const media::AudioPortConfigDeviceExt& aidl) {
+    audio_port_config_device_ext legacy;
+    legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidl.hwModule));
+    legacy.type = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_devices_t(aidl.type));
+    RETURN_IF_ERROR(aidl2legacy_string(aidl.address, legacy.address, AUDIO_DEVICE_MAX_ADDRESS_LEN));
+    return legacy;
+}
+
+ConversionResult<media::AudioPortConfigDeviceExt> legacy2aidl_AudioPortConfigDeviceExt(
+        const audio_port_config_device_ext& legacy) {
+    media::AudioPortConfigDeviceExt aidl;
+    aidl.hwModule = VALUE_OR_RETURN(legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
+    aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(legacy.type));
+    aidl.address = VALUE_OR_RETURN(
+            legacy2aidl_string(legacy.address, AUDIO_DEVICE_MAX_ADDRESS_LEN));
+    return aidl;
+}
+
+ConversionResult<audio_stream_type_t> aidl2legacy_AudioStreamType_audio_stream_type_t(
+        media::AudioStreamType aidl) {
+    switch (aidl) {
+        case media::AudioStreamType::DEFAULT:
+            return AUDIO_STREAM_DEFAULT;
+        case media::AudioStreamType::VOICE_CALL:
+            return AUDIO_STREAM_VOICE_CALL;
+        case media::AudioStreamType::SYSTEM:
+            return AUDIO_STREAM_SYSTEM;
+        case media::AudioStreamType::RING:
+            return AUDIO_STREAM_RING;
+        case media::AudioStreamType::MUSIC:
+            return AUDIO_STREAM_MUSIC;
+        case media::AudioStreamType::ALARM:
+            return AUDIO_STREAM_ALARM;
+        case media::AudioStreamType::NOTIFICATION:
+            return AUDIO_STREAM_NOTIFICATION;
+        case media::AudioStreamType::BLUETOOTH_SCO:
+            return AUDIO_STREAM_BLUETOOTH_SCO;
+        case media::AudioStreamType::ENFORCED_AUDIBLE:
+            return AUDIO_STREAM_ENFORCED_AUDIBLE;
+        case media::AudioStreamType::DTMF:
+            return AUDIO_STREAM_DTMF;
+        case media::AudioStreamType::TTS:
+            return AUDIO_STREAM_TTS;
+        case media::AudioStreamType::ACCESSIBILITY:
+            return AUDIO_STREAM_ACCESSIBILITY;
+        case media::AudioStreamType::ASSISTANT:
+            return AUDIO_STREAM_ASSISTANT;
+        case media::AudioStreamType::REROUTING:
+            return AUDIO_STREAM_REROUTING;
+        case media::AudioStreamType::PATCH:
+            return AUDIO_STREAM_PATCH;
+        case media::AudioStreamType::CALL_ASSISTANT:
+            return AUDIO_STREAM_CALL_ASSISTANT;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<media::AudioStreamType> legacy2aidl_audio_stream_type_t_AudioStreamType(
+        audio_stream_type_t legacy) {
+    switch (legacy) {
+        case AUDIO_STREAM_DEFAULT:
+            return media::AudioStreamType::DEFAULT;
+        case AUDIO_STREAM_VOICE_CALL:
+            return media::AudioStreamType::VOICE_CALL;
+        case AUDIO_STREAM_SYSTEM:
+            return media::AudioStreamType::SYSTEM;
+        case AUDIO_STREAM_RING:
+            return media::AudioStreamType::RING;
+        case AUDIO_STREAM_MUSIC:
+            return media::AudioStreamType::MUSIC;
+        case AUDIO_STREAM_ALARM:
+            return media::AudioStreamType::ALARM;
+        case AUDIO_STREAM_NOTIFICATION:
+            return media::AudioStreamType::NOTIFICATION;
+        case AUDIO_STREAM_BLUETOOTH_SCO:
+            return media::AudioStreamType::BLUETOOTH_SCO;
+        case AUDIO_STREAM_ENFORCED_AUDIBLE:
+            return media::AudioStreamType::ENFORCED_AUDIBLE;
+        case AUDIO_STREAM_DTMF:
+            return media::AudioStreamType::DTMF;
+        case AUDIO_STREAM_TTS:
+            return media::AudioStreamType::TTS;
+        case AUDIO_STREAM_ACCESSIBILITY:
+            return media::AudioStreamType::ACCESSIBILITY;
+        case AUDIO_STREAM_ASSISTANT:
+            return media::AudioStreamType::ASSISTANT;
+        case AUDIO_STREAM_REROUTING:
+            return media::AudioStreamType::REROUTING;
+        case AUDIO_STREAM_PATCH:
+            return media::AudioStreamType::PATCH;
+        case AUDIO_STREAM_CALL_ASSISTANT:
+            return media::AudioStreamType::CALL_ASSISTANT;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<audio_source_t> aidl2legacy_AudioSourceType_audio_source_t(
+        media::AudioSourceType aidl) {
+    switch (aidl) {
+        case media::AudioSourceType::INVALID:
+            // This value does not have an enum
+            return AUDIO_SOURCE_INVALID;
+        case media::AudioSourceType::DEFAULT:
+            return AUDIO_SOURCE_DEFAULT;
+        case media::AudioSourceType::MIC:
+            return AUDIO_SOURCE_MIC;
+        case media::AudioSourceType::VOICE_UPLINK:
+            return AUDIO_SOURCE_VOICE_UPLINK;
+        case media::AudioSourceType::VOICE_DOWNLINK:
+            return AUDIO_SOURCE_VOICE_DOWNLINK;
+        case media::AudioSourceType::VOICE_CALL:
+            return AUDIO_SOURCE_VOICE_CALL;
+        case media::AudioSourceType::CAMCORDER:
+            return AUDIO_SOURCE_CAMCORDER;
+        case media::AudioSourceType::VOICE_RECOGNITION:
+            return AUDIO_SOURCE_VOICE_RECOGNITION;
+        case media::AudioSourceType::VOICE_COMMUNICATION:
+            return AUDIO_SOURCE_VOICE_COMMUNICATION;
+        case media::AudioSourceType::REMOTE_SUBMIX:
+            return AUDIO_SOURCE_REMOTE_SUBMIX;
+        case media::AudioSourceType::UNPROCESSED:
+            return AUDIO_SOURCE_UNPROCESSED;
+        case media::AudioSourceType::VOICE_PERFORMANCE:
+            return AUDIO_SOURCE_VOICE_PERFORMANCE;
+        case media::AudioSourceType::ECHO_REFERENCE:
+            return AUDIO_SOURCE_ECHO_REFERENCE;
+        case media::AudioSourceType::FM_TUNER:
+            return AUDIO_SOURCE_FM_TUNER;
+        case media::AudioSourceType::HOTWORD:
+            return AUDIO_SOURCE_HOTWORD;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<media::AudioSourceType> legacy2aidl_audio_source_t_AudioSourceType(
+        audio_source_t legacy) {
+    switch (legacy) {
+        case AUDIO_SOURCE_INVALID:
+            return media::AudioSourceType::INVALID;
+        case AUDIO_SOURCE_DEFAULT:
+            return media::AudioSourceType::DEFAULT;
+        case AUDIO_SOURCE_MIC:
+            return media::AudioSourceType::MIC;
+        case AUDIO_SOURCE_VOICE_UPLINK:
+            return media::AudioSourceType::VOICE_UPLINK;
+        case AUDIO_SOURCE_VOICE_DOWNLINK:
+            return media::AudioSourceType::VOICE_DOWNLINK;
+        case AUDIO_SOURCE_VOICE_CALL:
+            return media::AudioSourceType::VOICE_CALL;
+        case AUDIO_SOURCE_CAMCORDER:
+            return media::AudioSourceType::CAMCORDER;
+        case AUDIO_SOURCE_VOICE_RECOGNITION:
+            return media::AudioSourceType::VOICE_RECOGNITION;
+        case AUDIO_SOURCE_VOICE_COMMUNICATION:
+            return media::AudioSourceType::VOICE_COMMUNICATION;
+        case AUDIO_SOURCE_REMOTE_SUBMIX:
+            return media::AudioSourceType::REMOTE_SUBMIX;
+        case AUDIO_SOURCE_UNPROCESSED:
+            return media::AudioSourceType::UNPROCESSED;
+        case AUDIO_SOURCE_VOICE_PERFORMANCE:
+            return media::AudioSourceType::VOICE_PERFORMANCE;
+        case AUDIO_SOURCE_ECHO_REFERENCE:
+            return media::AudioSourceType::ECHO_REFERENCE;
+        case AUDIO_SOURCE_FM_TUNER:
+            return media::AudioSourceType::FM_TUNER;
+        case AUDIO_SOURCE_HOTWORD:
+            return media::AudioSourceType::HOTWORD;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<audio_session_t> aidl2legacy_int32_t_audio_session_t(int32_t aidl) {
+    return convertReinterpret<audio_session_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_session_t_int32_t(audio_session_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_config_mix_ext_usecase = decltype(audio_port_config_mix_ext::usecase);
+
+ConversionResult<audio_port_config_mix_ext_usecase> aidl2legacy_AudioPortConfigMixExtUseCase(
+        const media::AudioPortConfigMixExtUseCase& aidl, media::AudioPortRole role) {
+    audio_port_config_mix_ext_usecase legacy;
+
+    switch (role) {
+        case media::AudioPortRole::NONE:
+            // Just verify that the union is empty.
+            VALUE_OR_RETURN(UNION_GET(aidl, nothing));
+            break;
+
+        case media::AudioPortRole::SOURCE:
+            // This is not a bug. A SOURCE role corresponds to the stream field.
+            legacy.stream = VALUE_OR_RETURN(aidl2legacy_AudioStreamType_audio_stream_type_t(
+                    VALUE_OR_RETURN(UNION_GET(aidl, stream))));
+            break;
+
+        case media::AudioPortRole::SINK:
+            // This is not a bug. A SINK role corresponds to the source field.
+            legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSourceType_audio_source_t(
+                    VALUE_OR_RETURN(UNION_GET(aidl, source))));
+            break;
+
+        default:
+            LOG_ALWAYS_FATAL("Shouldn't get here");
+    }
+    return legacy;
+}
+
+ConversionResult<media::AudioPortConfigMixExtUseCase> legacy2aidl_AudioPortConfigMixExtUseCase(
+        const audio_port_config_mix_ext_usecase& legacy, audio_port_role_t role) {
+    media::AudioPortConfigMixExtUseCase aidl;
+
+    switch (role) {
+        case AUDIO_PORT_ROLE_NONE:
+            UNION_SET(aidl, nothing, false);
+            break;
+        case AUDIO_PORT_ROLE_SOURCE:
+            // This is not a bug. A SOURCE role corresponds to the stream field.
+            UNION_SET(aidl, stream, VALUE_OR_RETURN(
+                    legacy2aidl_audio_stream_type_t_AudioStreamType(legacy.stream)));
+            break;
+        case AUDIO_PORT_ROLE_SINK:
+            // This is not a bug. A SINK role corresponds to the source field.
+            UNION_SET(aidl, source,
+                      VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSourceType(legacy.source)));
+            break;
+        default:
+            LOG_ALWAYS_FATAL("Shouldn't get here");
+    }
+    return aidl;
+}
+
+ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortConfigMixExt(
+        const media::AudioPortConfigMixExt& aidl, media::AudioPortRole role) {
+    audio_port_config_mix_ext legacy;
+    legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidl.hwModule));
+    legacy.handle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.handle));
+    legacy.usecase = VALUE_OR_RETURN(aidl2legacy_AudioPortConfigMixExtUseCase(aidl.usecase, role));
+    return legacy;
+}
+
+ConversionResult<media::AudioPortConfigMixExt> legacy2aidl_AudioPortConfigMixExt(
+        const audio_port_config_mix_ext& legacy, audio_port_role_t role) {
+    media::AudioPortConfigMixExt aidl;
+    aidl.hwModule = VALUE_OR_RETURN(legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
+    aidl.handle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
+    aidl.usecase = VALUE_OR_RETURN(legacy2aidl_AudioPortConfigMixExtUseCase(legacy.usecase, role));
+    return aidl;
+}
+
+ConversionResult<audio_port_config_session_ext> aidl2legacy_AudioPortConfigSessionExt(
+        const media::AudioPortConfigSessionExt& aidl) {
+    audio_port_config_session_ext legacy;
+    legacy.session = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.session));
+    return legacy;
+}
+
+ConversionResult<media::AudioPortConfigSessionExt> legacy2aidl_AudioPortConfigSessionExt(
+        const audio_port_config_session_ext& legacy) {
+    media::AudioPortConfigSessionExt aidl;
+    aidl.session = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(legacy.session));
+    return aidl;
+}
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_config_ext = decltype(audio_port_config::ext);
+
+ConversionResult<audio_port_config_ext> aidl2legacy_AudioPortConfigExt(
+        const media::AudioPortConfigExt& aidl, media::AudioPortType type,
+        media::AudioPortRole role) {
+    audio_port_config_ext legacy;
+    // Our way of representing a union in AIDL is to have multiple vectors and require that at most
+    // one of the them has size 1 and the rest are empty.
+    switch (type) {
+        case media::AudioPortType::NONE:
+            // Just verify that the union is empty.
+            VALUE_OR_RETURN(UNION_GET(aidl, nothing));
+            break;
+        case media::AudioPortType::DEVICE:
+            legacy.device = VALUE_OR_RETURN(
+                    aidl2legacy_AudioPortConfigDeviceExt(VALUE_OR_RETURN(UNION_GET(aidl, device))));
+            break;
+        case media::AudioPortType::MIX:
+            legacy.mix = VALUE_OR_RETURN(
+                    aidl2legacy_AudioPortConfigMixExt(VALUE_OR_RETURN(UNION_GET(aidl, mix)), role));
+            break;
+        case media::AudioPortType::SESSION:
+            legacy.session = VALUE_OR_RETURN(aidl2legacy_AudioPortConfigSessionExt(
+                    VALUE_OR_RETURN(UNION_GET(aidl, session))));
+            break;
+        default:
+            LOG_ALWAYS_FATAL("Shouldn't get here");
+    }
+    return legacy;
+}
+
+ConversionResult<media::AudioPortConfigExt> legacy2aidl_AudioPortConfigExt(
+        const audio_port_config_ext& legacy, audio_port_type_t type, audio_port_role_t role) {
+    media::AudioPortConfigExt aidl;
+
+    switch (type) {
+        case AUDIO_PORT_TYPE_NONE:
+            UNION_SET(aidl, nothing, false);
+            break;
+        case AUDIO_PORT_TYPE_DEVICE:
+            UNION_SET(aidl, device,
+                      VALUE_OR_RETURN(legacy2aidl_AudioPortConfigDeviceExt(legacy.device)));
+            break;
+        case AUDIO_PORT_TYPE_MIX:
+            UNION_SET(aidl, mix,
+                      VALUE_OR_RETURN(legacy2aidl_AudioPortConfigMixExt(legacy.mix, role)));
+            break;
+        case AUDIO_PORT_TYPE_SESSION:
+            UNION_SET(aidl, session,
+                      VALUE_OR_RETURN(legacy2aidl_AudioPortConfigSessionExt(legacy.session)));
+            break;
+        default:
+            LOG_ALWAYS_FATAL("Shouldn't get here");
+    }
+    return aidl;
+}
+
+ConversionResult<audio_port_config> aidl2legacy_AudioPortConfig_audio_port_config(
+        const media::AudioPortConfig& aidl) {
+    audio_port_config legacy;
+    legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.id));
+    legacy.role = VALUE_OR_RETURN(aidl2legacy_AudioPortRole_audio_port_role_t(aidl.role));
+    legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioPortType_audio_port_type_t(aidl.type));
+    legacy.config_mask = VALUE_OR_RETURN(aidl2legacy_int32_t_config_mask(aidl.configMask));
+    if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::SAMPLE_RATE)) {
+        legacy.sample_rate = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sampleRate));
+    }
+    if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::CHANNEL_MASK)) {
+        legacy.channel_mask =
+                VALUE_OR_RETURN(aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+    }
+    if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::FORMAT)) {
+        legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+    }
+    if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::GAIN)) {
+        legacy.gain = VALUE_OR_RETURN(
+                aidl2legacy_AudioGainConfig_audio_gain_config(aidl.gain, aidl.role, aidl.type));
+    }
+    if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::FLAGS)) {
+        legacy.flags = VALUE_OR_RETURN(
+                aidl2legacy_AudioIoFlags_audio_io_flags(aidl.flags, aidl.role, aidl.type));
+    }
+    legacy.ext = VALUE_OR_RETURN(aidl2legacy_AudioPortConfigExt(aidl.ext, aidl.type, aidl.role));
+    return legacy;
+}
+
+ConversionResult<media::AudioPortConfig> legacy2aidl_audio_port_config_AudioPortConfig(
+        const audio_port_config& legacy) {
+    media::AudioPortConfig aidl;
+    aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
+    aidl.role = VALUE_OR_RETURN(legacy2aidl_audio_port_role_t_AudioPortRole(legacy.role));
+    aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_port_type_t_AudioPortType(legacy.type));
+    aidl.configMask = VALUE_OR_RETURN(legacy2aidl_config_mask_int32_t(legacy.config_mask));
+    if (legacy.config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) {
+        aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+    }
+    if (legacy.config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK) {
+        aidl.channelMask =
+                VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+    }
+    if (legacy.config_mask & AUDIO_PORT_CONFIG_FORMAT) {
+        aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+    }
+    if (legacy.config_mask & AUDIO_PORT_CONFIG_GAIN) {
+        aidl.gain = VALUE_OR_RETURN(legacy2aidl_audio_gain_config_AudioGainConfig(
+                legacy.gain, legacy.role, legacy.type));
+    }
+    if (legacy.config_mask & AUDIO_PORT_CONFIG_FLAGS) {
+        aidl.flags = VALUE_OR_RETURN(
+                legacy2aidl_audio_io_flags_AudioIoFlags(legacy.flags, legacy.role, legacy.type));
+    }
+    aidl.ext =
+            VALUE_OR_RETURN(legacy2aidl_AudioPortConfigExt(legacy.ext, legacy.type, legacy.role));
+    return aidl;
+}
+
+ConversionResult<struct audio_patch> aidl2legacy_AudioPatch_audio_patch(
+        const media::AudioPatch& aidl) {
+    struct audio_patch legacy;
+    legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_patch_handle_t(aidl.id));
+    legacy.num_sinks = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sinks.size()));
+    if (legacy.num_sinks > AUDIO_PATCH_PORTS_MAX) {
+        return unexpected(BAD_VALUE);
+    }
+    for (size_t i = 0; i < legacy.num_sinks; ++i) {
+        legacy.sinks[i] =
+                VALUE_OR_RETURN(aidl2legacy_AudioPortConfig_audio_port_config(aidl.sinks[i]));
+    }
+    legacy.num_sources = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sources.size()));
+    if (legacy.num_sources > AUDIO_PATCH_PORTS_MAX) {
+        return unexpected(BAD_VALUE);
+    }
+    for (size_t i = 0; i < legacy.num_sources; ++i) {
+        legacy.sources[i] =
+                VALUE_OR_RETURN(aidl2legacy_AudioPortConfig_audio_port_config(aidl.sources[i]));
+    }
+    return legacy;
+}
+
+ConversionResult<media::AudioPatch> legacy2aidl_audio_patch_AudioPatch(
+        const struct audio_patch& legacy) {
+    media::AudioPatch aidl;
+    aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_patch_handle_t_int32_t(legacy.id));
+
+    if (legacy.num_sinks > AUDIO_PATCH_PORTS_MAX) {
+        return unexpected(BAD_VALUE);
+    }
+    for (unsigned int i = 0; i < legacy.num_sinks; ++i) {
+        aidl.sinks.push_back(
+                VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfig(legacy.sinks[i])));
+    }
+    if (legacy.num_sources > AUDIO_PATCH_PORTS_MAX) {
+        return unexpected(BAD_VALUE);
+    }
+    for (unsigned int i = 0; i < legacy.num_sources; ++i) {
+        aidl.sources.push_back(
+                VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfig(legacy.sources[i])));
+    }
+    return aidl;
+}
+
+ConversionResult<sp<AudioIoDescriptor>> aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(
+        const media::AudioIoDescriptor& aidl) {
+    sp<AudioIoDescriptor> legacy(new AudioIoDescriptor());
+    legacy->mIoHandle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.ioHandle));
+    legacy->mPatch = VALUE_OR_RETURN(aidl2legacy_AudioPatch_audio_patch(aidl.patch));
+    legacy->mSamplingRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.samplingRate));
+    legacy->mFormat = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+    legacy->mChannelMask =
+            VALUE_OR_RETURN(aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+    legacy->mFrameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+    legacy->mFrameCountHAL = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCountHAL));
+    legacy->mLatency = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.latency));
+    legacy->mPortId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
+    return legacy;
+}
+
+ConversionResult<media::AudioIoDescriptor> legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(
+        const sp<AudioIoDescriptor>& legacy) {
+    media::AudioIoDescriptor aidl;
+    aidl.ioHandle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy->mIoHandle));
+    aidl.patch = VALUE_OR_RETURN(legacy2aidl_audio_patch_AudioPatch(legacy->mPatch));
+    aidl.samplingRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy->mSamplingRate));
+    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy->mFormat));
+    aidl.channelMask = VALUE_OR_RETURN(
+            legacy2aidl_audio_channel_mask_t_int32_t(legacy->mChannelMask));
+    aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy->mFrameCount));
+    aidl.frameCountHAL = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy->mFrameCountHAL));
+    aidl.latency = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy->mLatency));
+    aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy->mPortId));
+    return aidl;
+}
+
+ConversionResult<AudioClient> aidl2legacy_AudioClient(const media::AudioClient& aidl) {
+    AudioClient legacy;
+    legacy.clientUid = VALUE_OR_RETURN(aidl2legacy_int32_t_uid_t(aidl.clientUid));
+    legacy.clientPid = VALUE_OR_RETURN(aidl2legacy_int32_t_pid_t(aidl.clientPid));
+    legacy.clientTid = VALUE_OR_RETURN(aidl2legacy_int32_t_pid_t(aidl.clientTid));
+    legacy.packageName = VALUE_OR_RETURN(aidl2legacy_string_view_String16(aidl.packageName));
+    return legacy;
+}
+
+ConversionResult<media::AudioClient> legacy2aidl_AudioClient(const AudioClient& legacy) {
+    media::AudioClient aidl;
+    aidl.clientUid = VALUE_OR_RETURN(legacy2aidl_uid_t_int32_t(legacy.clientUid));
+    aidl.clientPid = VALUE_OR_RETURN(legacy2aidl_pid_t_int32_t(legacy.clientPid));
+    aidl.clientTid = VALUE_OR_RETURN(legacy2aidl_pid_t_int32_t(legacy.clientTid));
+    aidl.packageName = VALUE_OR_RETURN(legacy2aidl_String16_string(legacy.packageName));
+    return aidl;
+}
+
+ConversionResult<audio_content_type_t>
+aidl2legacy_AudioContentType_audio_content_type_t(media::AudioContentType aidl) {
+    switch (aidl) {
+        case media::AudioContentType::UNKNOWN:
+            return AUDIO_CONTENT_TYPE_UNKNOWN;
+        case media::AudioContentType::SPEECH:
+            return AUDIO_CONTENT_TYPE_SPEECH;
+        case media::AudioContentType::MUSIC:
+            return AUDIO_CONTENT_TYPE_MUSIC;
+        case media::AudioContentType::MOVIE:
+            return AUDIO_CONTENT_TYPE_MOVIE;
+        case media::AudioContentType::SONIFICATION:
+            return AUDIO_CONTENT_TYPE_SONIFICATION;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioContentType>
+legacy2aidl_audio_content_type_t_AudioContentType(audio_content_type_t legacy) {
+    switch (legacy) {
+        case AUDIO_CONTENT_TYPE_UNKNOWN:
+            return media::AudioContentType::UNKNOWN;
+        case AUDIO_CONTENT_TYPE_SPEECH:
+            return media::AudioContentType::SPEECH;
+        case AUDIO_CONTENT_TYPE_MUSIC:
+            return media::AudioContentType::MUSIC;
+        case AUDIO_CONTENT_TYPE_MOVIE:
+            return media::AudioContentType::MOVIE;
+        case AUDIO_CONTENT_TYPE_SONIFICATION:
+            return media::AudioContentType::SONIFICATION;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_usage_t>
+aidl2legacy_AudioUsage_audio_usage_t(media::AudioUsage aidl) {
+    switch (aidl) {
+        case media::AudioUsage::UNKNOWN:
+            return AUDIO_USAGE_UNKNOWN;
+        case media::AudioUsage::MEDIA:
+            return AUDIO_USAGE_MEDIA;
+        case media::AudioUsage::VOICE_COMMUNICATION:
+            return AUDIO_USAGE_VOICE_COMMUNICATION;
+        case media::AudioUsage::VOICE_COMMUNICATION_SIGNALLING:
+            return AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
+        case media::AudioUsage::ALARM:
+            return AUDIO_USAGE_ALARM;
+        case media::AudioUsage::NOTIFICATION:
+            return AUDIO_USAGE_NOTIFICATION;
+        case media::AudioUsage::NOTIFICATION_TELEPHONY_RINGTONE:
+            return AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
+        case media::AudioUsage::NOTIFICATION_COMMUNICATION_REQUEST:
+            return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST;
+        case media::AudioUsage::NOTIFICATION_COMMUNICATION_INSTANT:
+            return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT;
+        case media::AudioUsage::NOTIFICATION_COMMUNICATION_DELAYED:
+            return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED;
+        case media::AudioUsage::NOTIFICATION_EVENT:
+            return AUDIO_USAGE_NOTIFICATION_EVENT;
+        case media::AudioUsage::ASSISTANCE_ACCESSIBILITY:
+            return AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
+        case media::AudioUsage::ASSISTANCE_NAVIGATION_GUIDANCE:
+            return AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE;
+        case media::AudioUsage::ASSISTANCE_SONIFICATION:
+            return AUDIO_USAGE_ASSISTANCE_SONIFICATION;
+        case media::AudioUsage::GAME:
+            return AUDIO_USAGE_GAME;
+        case media::AudioUsage::VIRTUAL_SOURCE:
+            return AUDIO_USAGE_VIRTUAL_SOURCE;
+        case media::AudioUsage::ASSISTANT:
+            return AUDIO_USAGE_ASSISTANT;
+        case media::AudioUsage::CALL_ASSISTANT:
+            return AUDIO_USAGE_CALL_ASSISTANT;
+        case media::AudioUsage::EMERGENCY:
+            return AUDIO_USAGE_EMERGENCY;
+        case media::AudioUsage::SAFETY:
+            return AUDIO_USAGE_SAFETY;
+        case media::AudioUsage::VEHICLE_STATUS:
+            return AUDIO_USAGE_VEHICLE_STATUS;
+        case media::AudioUsage::ANNOUNCEMENT:
+            return AUDIO_USAGE_ANNOUNCEMENT;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioUsage>
+legacy2aidl_audio_usage_t_AudioUsage(audio_usage_t legacy) {
+    switch (legacy) {
+        case AUDIO_USAGE_UNKNOWN:
+            return media::AudioUsage::UNKNOWN;
+        case AUDIO_USAGE_MEDIA:
+            return media::AudioUsage::MEDIA;
+        case AUDIO_USAGE_VOICE_COMMUNICATION:
+            return media::AudioUsage::VOICE_COMMUNICATION;
+        case AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING:
+            return media::AudioUsage::VOICE_COMMUNICATION_SIGNALLING;
+        case AUDIO_USAGE_ALARM:
+            return media::AudioUsage::ALARM;
+        case AUDIO_USAGE_NOTIFICATION:
+            return media::AudioUsage::NOTIFICATION;
+        case AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE:
+            return media::AudioUsage::NOTIFICATION_TELEPHONY_RINGTONE;
+        case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST:
+            return media::AudioUsage::NOTIFICATION_COMMUNICATION_REQUEST;
+        case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT:
+            return media::AudioUsage::NOTIFICATION_COMMUNICATION_INSTANT;
+        case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED:
+            return media::AudioUsage::NOTIFICATION_COMMUNICATION_DELAYED;
+        case AUDIO_USAGE_NOTIFICATION_EVENT:
+            return media::AudioUsage::NOTIFICATION_EVENT;
+        case AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY:
+            return media::AudioUsage::ASSISTANCE_ACCESSIBILITY;
+        case AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
+            return media::AudioUsage::ASSISTANCE_NAVIGATION_GUIDANCE;
+        case AUDIO_USAGE_ASSISTANCE_SONIFICATION:
+            return media::AudioUsage::ASSISTANCE_SONIFICATION;
+        case AUDIO_USAGE_GAME:
+            return media::AudioUsage::GAME;
+        case AUDIO_USAGE_VIRTUAL_SOURCE:
+            return media::AudioUsage::VIRTUAL_SOURCE;
+        case AUDIO_USAGE_ASSISTANT:
+            return media::AudioUsage::ASSISTANT;
+        case AUDIO_USAGE_CALL_ASSISTANT:
+            return media::AudioUsage::CALL_ASSISTANT;
+        case AUDIO_USAGE_EMERGENCY:
+            return media::AudioUsage::EMERGENCY;
+        case AUDIO_USAGE_SAFETY:
+            return media::AudioUsage::SAFETY;
+        case AUDIO_USAGE_VEHICLE_STATUS:
+            return media::AudioUsage::VEHICLE_STATUS;
+        case AUDIO_USAGE_ANNOUNCEMENT:
+            return media::AudioUsage::ANNOUNCEMENT;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_flags_mask_t>
+aidl2legacy_AudioFlag_audio_flags_mask_t(media::AudioFlag aidl) {
+    switch (aidl) {
+        case media::AudioFlag::AUDIBILITY_ENFORCED:
+            return AUDIO_FLAG_AUDIBILITY_ENFORCED;
+        case media::AudioFlag::SECURE:
+            return AUDIO_FLAG_SECURE;
+        case media::AudioFlag::SCO:
+            return AUDIO_FLAG_SCO;
+        case media::AudioFlag::BEACON:
+            return AUDIO_FLAG_BEACON;
+        case media::AudioFlag::HW_AV_SYNC:
+            return AUDIO_FLAG_HW_AV_SYNC;
+        case media::AudioFlag::HW_HOTWORD:
+            return AUDIO_FLAG_HW_HOTWORD;
+        case media::AudioFlag::BYPASS_INTERRUPTION_POLICY:
+            return AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY;
+        case media::AudioFlag::BYPASS_MUTE:
+            return AUDIO_FLAG_BYPASS_MUTE;
+        case media::AudioFlag::LOW_LATENCY:
+            return AUDIO_FLAG_LOW_LATENCY;
+        case media::AudioFlag::DEEP_BUFFER:
+            return AUDIO_FLAG_DEEP_BUFFER;
+        case media::AudioFlag::NO_MEDIA_PROJECTION:
+            return AUDIO_FLAG_NO_MEDIA_PROJECTION;
+        case media::AudioFlag::MUTE_HAPTIC:
+            return AUDIO_FLAG_MUTE_HAPTIC;
+        case media::AudioFlag::NO_SYSTEM_CAPTURE:
+            return AUDIO_FLAG_NO_SYSTEM_CAPTURE;
+        case media::AudioFlag::CAPTURE_PRIVATE:
+            return AUDIO_FLAG_CAPTURE_PRIVATE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioFlag>
+legacy2aidl_audio_flags_mask_t_AudioFlag(audio_flags_mask_t legacy) {
+    switch (legacy) {
+        case AUDIO_FLAG_NONE:
+            return unexpected(BAD_VALUE);
+        case AUDIO_FLAG_AUDIBILITY_ENFORCED:
+            return media::AudioFlag::AUDIBILITY_ENFORCED;
+        case AUDIO_FLAG_SECURE:
+            return media::AudioFlag::SECURE;
+        case AUDIO_FLAG_SCO:
+            return media::AudioFlag::SCO;
+        case AUDIO_FLAG_BEACON:
+            return media::AudioFlag::BEACON;
+        case AUDIO_FLAG_HW_AV_SYNC:
+            return media::AudioFlag::HW_AV_SYNC;
+        case AUDIO_FLAG_HW_HOTWORD:
+            return media::AudioFlag::HW_HOTWORD;
+        case AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY:
+            return media::AudioFlag::BYPASS_INTERRUPTION_POLICY;
+        case AUDIO_FLAG_BYPASS_MUTE:
+            return media::AudioFlag::BYPASS_MUTE;
+        case AUDIO_FLAG_LOW_LATENCY:
+            return media::AudioFlag::LOW_LATENCY;
+        case AUDIO_FLAG_DEEP_BUFFER:
+            return media::AudioFlag::DEEP_BUFFER;
+        case AUDIO_FLAG_NO_MEDIA_PROJECTION:
+            return media::AudioFlag::NO_MEDIA_PROJECTION;
+        case AUDIO_FLAG_MUTE_HAPTIC:
+            return media::AudioFlag::MUTE_HAPTIC;
+        case AUDIO_FLAG_NO_SYSTEM_CAPTURE:
+            return media::AudioFlag::NO_SYSTEM_CAPTURE;
+        case AUDIO_FLAG_CAPTURE_PRIVATE:
+            return media::AudioFlag::CAPTURE_PRIVATE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_flags_mask_t>
+aidl2legacy_int32_t_audio_flags_mask_t_mask(int32_t aidl) {
+    return convertBitmask<audio_flags_mask_t, int32_t, audio_flags_mask_t, media::AudioFlag>(
+            aidl, aidl2legacy_AudioFlag_audio_flags_mask_t, index2enum_index<media::AudioFlag>,
+            enumToMask_bitmask<audio_flags_mask_t, audio_flags_mask_t>);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_audio_flags_mask_t_int32_t_mask(audio_flags_mask_t legacy) {
+    return convertBitmask<int32_t, audio_flags_mask_t, media::AudioFlag, audio_flags_mask_t>(
+            legacy, legacy2aidl_audio_flags_mask_t_AudioFlag,
+            index2enum_bitmask<audio_flags_mask_t>,
+            enumToMask_index<int32_t, media::AudioFlag>);
+}
+
+ConversionResult<audio_attributes_t>
+aidl2legacy_AudioAttributesInternal_audio_attributes_t(const media::AudioAttributesInternal& aidl) {
+    audio_attributes_t legacy;
+    legacy.content_type = VALUE_OR_RETURN(
+            aidl2legacy_AudioContentType_audio_content_type_t(aidl.contentType));
+    legacy.usage = VALUE_OR_RETURN(aidl2legacy_AudioUsage_audio_usage_t(aidl.usage));
+    legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSourceType_audio_source_t(aidl.source));
+    legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_flags_mask_t_mask(aidl.flags));
+    RETURN_IF_ERROR(aidl2legacy_string(aidl.tags, legacy.tags, sizeof(legacy.tags)));
+    return legacy;
+}
+
+ConversionResult<media::AudioAttributesInternal>
+legacy2aidl_audio_attributes_t_AudioAttributesInternal(const audio_attributes_t& legacy) {
+    media::AudioAttributesInternal aidl;
+    aidl.contentType = VALUE_OR_RETURN(
+            legacy2aidl_audio_content_type_t_AudioContentType(legacy.content_type));
+    aidl.usage = VALUE_OR_RETURN(legacy2aidl_audio_usage_t_AudioUsage(legacy.usage));
+    aidl.source = VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSourceType(legacy.source));
+    aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_flags_mask_t_int32_t_mask(legacy.flags));
+    aidl.tags = VALUE_OR_RETURN(legacy2aidl_string(legacy.tags, sizeof(legacy.tags)));
+    return aidl;
+}
+
+ConversionResult<audio_encapsulation_mode_t>
+aidl2legacy_audio_encapsulation_mode_t_AudioEncapsulationMode(media::AudioEncapsulationMode aidl) {
+    switch (aidl) {
+        case media::AudioEncapsulationMode::NONE:
+            return AUDIO_ENCAPSULATION_MODE_NONE;
+        case media::AudioEncapsulationMode::ELEMENTARY_STREAM:
+            return AUDIO_ENCAPSULATION_MODE_ELEMENTARY_STREAM;
+        case media::AudioEncapsulationMode::HANDLE:
+            return AUDIO_ENCAPSULATION_MODE_HANDLE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioEncapsulationMode>
+legacy2aidl_AudioEncapsulationMode_audio_encapsulation_mode_t(audio_encapsulation_mode_t legacy) {
+    switch (legacy) {
+        case AUDIO_ENCAPSULATION_MODE_NONE:
+            return media::AudioEncapsulationMode::NONE;
+        case AUDIO_ENCAPSULATION_MODE_ELEMENTARY_STREAM:
+            return media::AudioEncapsulationMode::ELEMENTARY_STREAM;
+        case AUDIO_ENCAPSULATION_MODE_HANDLE:
+            return media::AudioEncapsulationMode::HANDLE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_offload_info_t>
+aidl2legacy_AudioOffloadInfo_audio_offload_info_t(const media::AudioOffloadInfo& aidl) {
+    audio_offload_info_t legacy;
+    legacy.version = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.version));
+    legacy.size = sizeof(audio_offload_info_t);
+    audio_config_base_t config = VALUE_OR_RETURN(
+            aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.config));
+    legacy.sample_rate = config.sample_rate;
+    legacy.channel_mask = config.channel_mask;
+    legacy.format = config.format;
+    legacy.stream_type = VALUE_OR_RETURN(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(aidl.streamType));
+    legacy.bit_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.bitRate));
+    legacy.duration_us = VALUE_OR_RETURN(convertIntegral<int64_t>(aidl.durationUs));
+    legacy.has_video = aidl.hasVideo;
+    legacy.is_streaming = aidl.isStreaming;
+    legacy.bit_width = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.bitWidth));
+    legacy.offload_buffer_size = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.offloadBufferSize));
+    legacy.usage = VALUE_OR_RETURN(aidl2legacy_AudioUsage_audio_usage_t(aidl.usage));
+    legacy.encapsulation_mode = VALUE_OR_RETURN(
+            aidl2legacy_audio_encapsulation_mode_t_AudioEncapsulationMode(aidl.encapsulationMode));
+    legacy.content_id = VALUE_OR_RETURN(convertReinterpret<int32_t>(aidl.contentId));
+    legacy.sync_id = VALUE_OR_RETURN(convertReinterpret<int32_t>(aidl.syncId));
+    return legacy;
+}
+
+ConversionResult<media::AudioOffloadInfo>
+legacy2aidl_audio_offload_info_t_AudioOffloadInfo(const audio_offload_info_t& legacy) {
+    media::AudioOffloadInfo aidl;
+    // Version 0.1 fields.
+    if (legacy.size < offsetof(audio_offload_info_t, usage) + sizeof(audio_offload_info_t::usage)) {
+        return unexpected(BAD_VALUE);
+    }
+    aidl.version = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.version));
+    aidl.config.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+    aidl.config.channelMask = VALUE_OR_RETURN(
+            legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+    aidl.config.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+    aidl.streamType = VALUE_OR_RETURN(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(legacy.stream_type));
+    aidl.bitRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.bit_rate));
+    aidl.durationUs = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy.duration_us));
+    aidl.hasVideo = legacy.has_video;
+    aidl.isStreaming = legacy.is_streaming;
+    aidl.bitWidth = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.bit_width));
+    aidl.offloadBufferSize = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.offload_buffer_size));
+    aidl.usage = VALUE_OR_RETURN(legacy2aidl_audio_usage_t_AudioUsage(legacy.usage));
+
+    // Version 0.2 fields.
+    if (legacy.version >= AUDIO_OFFLOAD_INFO_VERSION_0_2) {
+        if (legacy.size <
+            offsetof(audio_offload_info_t, sync_id) + sizeof(audio_offload_info_t::sync_id)) {
+            return unexpected(BAD_VALUE);
+        }
+        aidl.encapsulationMode = VALUE_OR_RETURN(
+                legacy2aidl_AudioEncapsulationMode_audio_encapsulation_mode_t(
+                        legacy.encapsulation_mode));
+        aidl.contentId = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.content_id));
+        aidl.syncId = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.sync_id));
+    }
+    return aidl;
+}
+
+ConversionResult<audio_config_t>
+aidl2legacy_AudioConfig_audio_config_t(const media::AudioConfig& aidl) {
+    audio_config_t legacy;
+    legacy.sample_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
+    legacy.channel_mask = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+    legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+    legacy.offload_info = VALUE_OR_RETURN(
+            aidl2legacy_AudioOffloadInfo_audio_offload_info_t(aidl.offloadInfo));
+    legacy.frame_count = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.frameCount));
+    return legacy;
+}
+
+ConversionResult<media::AudioConfig>
+legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy) {
+    media::AudioConfig aidl;
+    aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+    aidl.channelMask = VALUE_OR_RETURN(
+            legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+    aidl.offloadInfo = VALUE_OR_RETURN(
+            legacy2aidl_audio_offload_info_t_AudioOffloadInfo(legacy.offload_info));
+    aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy.frame_count));
+    return aidl;
+}
+
+ConversionResult<audio_config_base_t>
+aidl2legacy_AudioConfigBase_audio_config_base_t(const media::AudioConfigBase& aidl) {
+    audio_config_base_t legacy;
+    legacy.sample_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
+    legacy.channel_mask = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+    legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+    return legacy;
+}
+
+ConversionResult<media::AudioConfigBase>
+legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy) {
+    media::AudioConfigBase aidl;
+    aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+    aidl.channelMask = VALUE_OR_RETURN(
+            legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+    return aidl;
+}
+
+ConversionResult<sp<IMemory>>
+aidl2legacy_SharedFileRegion_IMemory(const media::SharedFileRegion& aidl) {
+    sp<IMemory> legacy;
+    if (!convertSharedFileRegionToIMemory(aidl, &legacy)) {
+        return unexpected(BAD_VALUE);
+    }
+    return legacy;
+}
+
+ConversionResult<media::SharedFileRegion>
+legacy2aidl_IMemory_SharedFileRegion(const sp<IMemory>& legacy) {
+    media::SharedFileRegion aidl;
+    if (!convertIMemoryToSharedFileRegion(legacy, &aidl)) {
+        return unexpected(BAD_VALUE);
+    }
+    return aidl;
+}
+
+ConversionResult<sp<IMemory>>
+aidl2legacy_NullableSharedFileRegion_IMemory(const std::optional<media::SharedFileRegion>& aidl) {
+    sp<IMemory> legacy;
+    if (!convertNullableSharedFileRegionToIMemory(aidl, &legacy)) {
+        return unexpected(BAD_VALUE);
+    }
+    return legacy;
+}
+
+ConversionResult<std::optional<media::SharedFileRegion>>
+legacy2aidl_NullableIMemory_SharedFileRegion(const sp<IMemory>& legacy) {
+    std::optional<media::SharedFileRegion> aidl;
+    if (!convertNullableIMemoryToSharedFileRegion(legacy, &aidl)) {
+        return unexpected(BAD_VALUE);
+    }
+    return aidl;
+}
+
+}  // namespace android
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 0c40cbb..c23c38c 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -2,6 +2,7 @@
     name: "libaudioclient_headers",
     vendor_available: true,
     min_sdk_version: "29",
+    host_supported: true,
 
     header_libs: [
         "libaudiofoundation_headers",
@@ -12,6 +13,19 @@
     export_header_lib_headers: [
         "libaudiofoundation_headers",
     ],
+    static_libs: [
+        "audioflinger-aidl-unstable-cpp",
+        "av-types-aidl-unstable-cpp",
+    ],
+    export_static_lib_headers: [
+        "audioflinger-aidl-unstable-cpp",
+        "av-types-aidl-unstable-cpp",
+    ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 cc_library_shared {
@@ -23,6 +37,7 @@
         "AudioVolumeGroup.cpp",
     ],
     shared_libs: [
+        "audioflinger-aidl-unstable-cpp",
         "capture_state_listener-aidl-cpp",
         "libaudiofoundation",
         "libaudioutils",
@@ -38,6 +53,7 @@
     include_dirs: ["system/media/audio_utils/include"],
     export_include_dirs: ["include"],
     export_shared_lib_headers: [
+        "audioflinger-aidl-unstable-cpp",
         "capture_state_listener-aidl-cpp",
     ],
 }
@@ -49,7 +65,7 @@
         export_aidl_headers: true,
         local_include_dirs: ["aidl"],
         include_dirs: [
-            "frameworks/av/media/libaudioclient/aidl",
+            "frameworks/av/aidl",
         ],
     },
 
@@ -57,7 +73,6 @@
         // AIDL files for audioclient interfaces
         // The headers for these interfaces will be available to any modules that
         // include libaudioclient, at the path "aidl/package/path/BnFoo.h"
-        ":libaudioclient_aidl_callback",
         ":libaudioclient_aidl_private",
         ":libaudioclient_aidl",
 
@@ -67,19 +82,20 @@
         "AudioTrack.cpp",
         "AudioTrackShared.cpp",
         "IAudioFlinger.cpp",
-        "IAudioFlingerClient.cpp",
         "IAudioPolicyService.cpp",
         "IAudioPolicyServiceClient.cpp",
         "IAudioTrack.cpp",
-        "IEffect.cpp",
-        "IEffectClient.cpp",
         "ToneGenerator.cpp",
         "PlayerBase.cpp",
         "RecordingActivityTracker.cpp",
         "TrackPlayerBase.cpp",
     ],
     shared_libs: [
+        "audioclient-types-aidl-unstable-cpp",
+        "audioflinger-aidl-unstable-cpp",
+        "av-types-aidl-unstable-cpp",
         "capture_state_listener-aidl-cpp",
+        "libaudioclient_aidl_conversion",
         "libaudiofoundation",
         "libaudioutils",
         "libaudiopolicy",
@@ -93,16 +109,21 @@
         "libmediautils",
         "libnblog",
         "libprocessgroup",
+        "libshmemcompat",
         "libutils",
         "libvibrator",
     ],
-    export_shared_lib_headers: ["libbinder"],
+    export_shared_lib_headers: [
+        "audioflinger-aidl-unstable-cpp",
+        "libbinder",
+    ],
 
     include_dirs: [
         "frameworks/av/media/libnbaio/include_mono/",
     ],
     local_include_dirs: [
-        "include/media", "aidl"
+        "include/media",
+        "aidl",
     ],
     header_libs: [
         "libaudioclient_headers",
@@ -110,10 +131,16 @@
         "libmedia_headers",
     ],
     export_header_lib_headers: ["libaudioclient_headers"],
+    export_static_lib_headers: [
+        "effect-aidl-cpp",
+        "shared-file-region-aidl-unstable-cpp",
+    ],
 
-    // for memory heap analysis
     static_libs: [
+        "effect-aidl-cpp",
+        // for memory heap analysis
         "libc_malloc_debug_backtrace",
+        "shared-file-region-aidl-unstable-cpp",
     ],
     cflags: [
         "-Wall",
@@ -121,7 +148,38 @@
         "-Wno-error=deprecated-declarations",
     ],
     sanitize: {
-        misc_undefined : [
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
+
+cc_library_shared {
+    name: "libaudioclient_aidl_conversion",
+    srcs: ["AidlConversion.cpp"],
+    local_include_dirs: ["include"],
+    shared_libs: [
+        "audioclient-types-aidl-unstable-cpp",
+        "libbase",
+        "libbinder",
+        "liblog",
+        "libshmemcompat",
+        "libutils",
+        "shared-file-region-aidl-unstable-cpp",
+    ],
+    export_shared_lib_headers: [
+        "audioclient-types-aidl-unstable-cpp",
+        "libbase",
+        "shared-file-region-aidl-unstable-cpp",
+    ],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wno-error=deprecated-declarations",
+    ],
+    sanitize: {
+        misc_undefined: [
             "unsigned-integer-overflow",
             "signed-integer-overflow",
         ],
@@ -147,15 +205,6 @@
     path: "aidl",
 }
 
-// AIDL interface for audio track callback
-filegroup {
-    name: "libaudioclient_aidl_callback",
-    srcs: [
-        "aidl/android/media/IAudioTrackCallback.aidl",
-    ],
-    path: "aidl",
-}
-
 aidl_interface {
     name: "capture_state_listener-aidl",
     unstable: true,
@@ -164,3 +213,98 @@
         "aidl/android/media/ICaptureStateListener.aidl",
     ],
 }
+
+aidl_interface {
+    name: "effect-aidl",
+    unstable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        "aidl/android/media/IEffect.aidl",
+        "aidl/android/media/IEffectClient.aidl",
+    ],
+    imports: [
+        "shared-file-region-aidl",
+    ],
+}
+
+aidl_interface {
+    name: "audioclient-types-aidl",
+    unstable: true,
+    host_supported: true,
+    vendor_available: true,
+    double_loadable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        "aidl/android/media/AudioAttributesInternal.aidl",
+        "aidl/android/media/AudioClient.aidl",
+        "aidl/android/media/AudioConfig.aidl",
+        "aidl/android/media/AudioConfigBase.aidl",
+        "aidl/android/media/AudioContentType.aidl",
+        "aidl/android/media/AudioEncapsulationMode.aidl",
+        "aidl/android/media/AudioFlag.aidl",
+        "aidl/android/media/AudioGainConfig.aidl",
+        "aidl/android/media/AudioGainMode.aidl",
+        "aidl/android/media/AudioInputFlags.aidl",
+        "aidl/android/media/AudioIoConfigEvent.aidl",
+        "aidl/android/media/AudioIoDescriptor.aidl",
+        "aidl/android/media/AudioIoFlags.aidl",
+        "aidl/android/media/AudioOffloadInfo.aidl",
+        "aidl/android/media/AudioOutputFlags.aidl",
+        "aidl/android/media/AudioPatch.aidl",
+        "aidl/android/media/AudioPortConfig.aidl",
+        "aidl/android/media/AudioPortConfigType.aidl",
+        "aidl/android/media/AudioPortConfigDeviceExt.aidl",
+        "aidl/android/media/AudioPortConfigExt.aidl",
+        "aidl/android/media/AudioPortConfigMixExt.aidl",
+        "aidl/android/media/AudioPortConfigMixExtUseCase.aidl",
+        "aidl/android/media/AudioPortConfigSessionExt.aidl",
+        "aidl/android/media/AudioPortRole.aidl",
+        "aidl/android/media/AudioPortType.aidl",
+        "aidl/android/media/AudioSourceType.aidl",
+        "aidl/android/media/AudioStreamType.aidl",
+        "aidl/android/media/AudioUsage.aidl",
+     ],
+    imports: [
+        "audio_common-aidl",
+    ],
+    backend: {
+        cpp: {
+            min_sdk_version: "29",
+            apex_available: [
+                "//apex_available:platform",
+                "com.android.media",
+            ],
+        },
+    },
+}
+
+aidl_interface {
+    name: "audioflinger-aidl",
+    unstable: true,
+    local_include_dir: "aidl",
+    host_supported: true,
+    vendor_available: true,
+    srcs: [
+        "aidl/android/media/CreateRecordRequest.aidl",
+        "aidl/android/media/CreateRecordResponse.aidl",
+        "aidl/android/media/CreateTrackRequest.aidl",
+        "aidl/android/media/CreateTrackResponse.aidl",
+
+        "aidl/android/media/IAudioFlingerClient.aidl",
+        "aidl/android/media/IAudioTrackCallback.aidl",
+    ],
+    imports: [
+        "audioclient-types-aidl",
+        "shared-file-region-aidl",
+    ],
+    double_loadable: true,
+    backend: {
+        cpp: {
+            min_sdk_version: "29",
+            apex_available: [
+                "//apex_available:platform",
+                "com.android.media",
+            ],
+        },
+    },
+}
diff --git a/media/libaudioclient/AudioAttributes.cpp b/media/libaudioclient/AudioAttributes.cpp
index 1ee6930..ff4ba06 100644
--- a/media/libaudioclient/AudioAttributes.cpp
+++ b/media/libaudioclient/AudioAttributes.cpp
@@ -57,7 +57,7 @@
         parcel->writeInt32(0);
     } else {
         parcel->writeInt32(1);
-        parcel->writeUtf8AsUtf16(mAttributes.tags);
+        parcel->writeUtf8AsUtf16(std::string(mAttributes.tags));
     }
     parcel->writeInt32(static_cast<int32_t>(mStreamType));
     parcel->writeUint32(static_cast<uint32_t>(mGroupId));
diff --git a/media/libaudioclient/AudioEffect.cpp b/media/libaudioclient/AudioEffect.cpp
index 3ead6cb..1282474 100644
--- a/media/libaudioclient/AudioEffect.cpp
+++ b/media/libaudioclient/AudioEffect.cpp
@@ -23,77 +23,35 @@
 #include <sys/types.h>
 #include <limits.h>
 
-#include <private/media/AudioEffectShared.h>
-#include <media/AudioEffect.h>
-
-#include <utils/Log.h>
 #include <binder/IPCThreadState.h>
-
-
+#include <media/AudioEffect.h>
+#include <media/ShmemCompat.h>
+#include <private/media/AudioEffectShared.h>
+#include <utils/Log.h>
 
 namespace android {
 
+using binder::Status;
+
+namespace {
+
+// Copy from a raw pointer + size into a vector of bytes.
+void appendToBuffer(const void* data,
+                    size_t size,
+                    std::vector<uint8_t>* buffer) {
+    const uint8_t* p = reinterpret_cast<const uint8_t*>(data);
+    buffer->insert(buffer->end(), p, p + size);
+}
+
+}  // namespace
+
 // ---------------------------------------------------------------------------
 
 AudioEffect::AudioEffect(const String16& opPackageName)
-    : mStatus(NO_INIT), mProbe(false), mOpPackageName(opPackageName)
+    : mOpPackageName(opPackageName)
 {
 }
 
-
-AudioEffect::AudioEffect(const effect_uuid_t *type,
-                const String16& opPackageName,
-                const effect_uuid_t *uuid,
-                int32_t priority,
-                effect_callback_t cbf,
-                void* user,
-                audio_session_t sessionId,
-                audio_io_handle_t io,
-                const AudioDeviceTypeAddr& device,
-                bool probe
-                )
-    : mStatus(NO_INIT), mProbe(false), mOpPackageName(opPackageName)
-{
-    AutoMutex lock(mConstructLock);
-    mStatus = set(type, uuid, priority, cbf, user, sessionId, io, device, probe);
-}
-
-AudioEffect::AudioEffect(const char *typeStr,
-                const String16& opPackageName,
-                const char *uuidStr,
-                int32_t priority,
-                effect_callback_t cbf,
-                void* user,
-                audio_session_t sessionId,
-                audio_io_handle_t io,
-                const AudioDeviceTypeAddr& device,
-                bool probe
-                )
-    : mStatus(NO_INIT), mProbe(false), mOpPackageName(opPackageName)
-{
-    effect_uuid_t type;
-    effect_uuid_t *pType = NULL;
-    effect_uuid_t uuid;
-    effect_uuid_t *pUuid = NULL;
-
-    ALOGV("Constructor string\n - type: %s\n - uuid: %s", typeStr, uuidStr);
-
-    if (typeStr != NULL) {
-        if (stringToGuid(typeStr, &type) == NO_ERROR) {
-            pType = &type;
-        }
-    }
-
-    if (uuidStr != NULL) {
-        if (stringToGuid(uuidStr, &uuid) == NO_ERROR) {
-            pUuid = &uuid;
-        }
-    }
-
-    AutoMutex lock(mConstructLock);
-    mStatus = set(pType, pUuid, priority, cbf, user, sessionId, io, device, probe);
-}
-
 status_t AudioEffect::set(const effect_uuid_t *type,
                 const effect_uuid_t *uuid,
                 int32_t priority,
@@ -104,7 +62,7 @@
                 const AudioDeviceTypeAddr& device,
                 bool probe)
 {
-    sp<IEffect> iEffect;
+    sp<media::IEffect> iEffect;
     sp<IMemory> cblk;
     int enabled;
 
@@ -166,8 +124,10 @@
 
     mEnabled = (volatile int32_t)enabled;
 
-    cblk = iEffect->getCblk();
-    if (cblk == 0) {
+    if (media::SharedFileRegion shmem;
+            !iEffect->getCblk(&shmem).isOk()
+            || !convertSharedFileRegionToIMemory(shmem, &cblk)
+            || cblk == 0) {
         mStatus = NO_INIT;
         ALOGE("Could not get control block");
         return mStatus;
@@ -194,6 +154,34 @@
     return mStatus;
 }
 
+status_t AudioEffect::set(const char *typeStr,
+                const char *uuidStr,
+                int32_t priority,
+                effect_callback_t cbf,
+                void* user,
+                audio_session_t sessionId,
+                audio_io_handle_t io,
+                const AudioDeviceTypeAddr& device,
+                bool probe)
+{
+    effect_uuid_t type;
+    effect_uuid_t *pType = nullptr;
+    effect_uuid_t uuid;
+    effect_uuid_t *pUuid = nullptr;
+
+    ALOGV("AudioEffect::set string\n - type: %s\n - uuid: %s",
+            typeStr ? typeStr : "nullptr", uuidStr ? uuidStr : "nullptr");
+
+    if (stringToGuid(typeStr, &type) == NO_ERROR) {
+        pType = &type;
+    }
+    if (stringToGuid(uuidStr, &uuid) == NO_ERROR) {
+        pUuid = &uuid;
+    }
+
+    return set(pType, pUuid, priority, cbf, user, sessionId, io, device, probe);
+}
+
 
 AudioEffect::~AudioEffect()
 {
@@ -242,15 +230,19 @@
     }
 
     status_t status = NO_ERROR;
-
     AutoMutex lock(mLock);
     if (enabled != mEnabled) {
+        Status bs;
+
         if (enabled) {
             ALOGV("enable %p", this);
-            status = mIEffect->enable();
+            bs = mIEffect->enable(&status);
         } else {
             ALOGV("disable %p", this);
-            status = mIEffect->disable();
+            bs = mIEffect->disable(&status);
+        }
+        if (!bs.isOk()) {
+            status = bs.transactionError();
         }
         if (status == NO_ERROR) {
             mEnabled = enabled;
@@ -283,7 +275,20 @@
         mLock.lock();
     }
 
-    status_t status = mIEffect->command(cmdCode, cmdSize, cmdData, replySize, replyData);
+    std::vector<uint8_t> data;
+    appendToBuffer(cmdData, cmdSize, &data);
+
+    status_t status;
+    std::vector<uint8_t> response;
+
+    Status bs = mIEffect->command(cmdCode, data, *replySize, &response, &status);
+    if (!bs.isOk()) {
+        status = bs.transactionError();
+    }
+    if (status == NO_ERROR) {
+        memcpy(replyData, response.data(), response.size());
+        *replySize = response.size();
+    }
 
     if (cmdCode == EFFECT_CMD_ENABLE || cmdCode == EFFECT_CMD_DISABLE) {
         if (status == NO_ERROR) {
@@ -298,7 +303,6 @@
     return status;
 }
 
-
 status_t AudioEffect::setParameter(effect_param_t *param)
 {
     if (mProbe) {
@@ -312,14 +316,27 @@
         return BAD_VALUE;
     }
 
-    uint32_t size = sizeof(int);
     uint32_t psize = ((param->psize - 1) / sizeof(int) + 1) * sizeof(int) + param->vsize;
 
     ALOGV("setParameter: param: %d, param2: %d", *(int *)param->data,
             (param->psize == 8) ? *((int *)param->data + 1): -1);
 
-    return mIEffect->command(EFFECT_CMD_SET_PARAM, sizeof (effect_param_t) + psize, param, &size,
-            &param->status);
+    std::vector<uint8_t> cmd;
+    appendToBuffer(param, sizeof(effect_param_t) + psize, &cmd);
+    std::vector<uint8_t> response;
+    status_t status;
+    Status bs = mIEffect->command(EFFECT_CMD_SET_PARAM,
+                                  cmd,
+                                  sizeof(int),
+                                  &response,
+                                  &status);
+    if (!bs.isOk()) {
+        status = bs.transactionError();
+        return status;
+    }
+    assert(response.size() == sizeof(int));
+    memcpy(&param->status, response.data(), response.size());
+    return status;
 }
 
 status_t AudioEffect::setParameterDeferred(effect_param_t *param)
@@ -364,8 +381,18 @@
     if (mCblk->clientIndex == 0) {
         return INVALID_OPERATION;
     }
-    uint32_t size = 0;
-    return mIEffect->command(EFFECT_CMD_SET_PARAM_COMMIT, 0, NULL, &size, NULL);
+    std::vector<uint8_t> cmd;
+    std::vector<uint8_t> response;
+    status_t status;
+    Status bs = mIEffect->command(EFFECT_CMD_SET_PARAM_COMMIT,
+                                  cmd,
+                                  0,
+                                  &response,
+                                  &status);
+    if (!bs.isOk()) {
+        status = bs.transactionError();
+    }
+    return status;
 }
 
 status_t AudioEffect::getParameter(effect_param_t *param)
@@ -387,8 +414,18 @@
     uint32_t psize = sizeof(effect_param_t) + ((param->psize - 1) / sizeof(int) + 1) * sizeof(int) +
             param->vsize;
 
-    return mIEffect->command(EFFECT_CMD_GET_PARAM, sizeof(effect_param_t) + param->psize, param,
-            &psize, param);
+    status_t status;
+    std::vector<uint8_t> cmd;
+    std::vector<uint8_t> response;
+    appendToBuffer(param, sizeof(effect_param_t) + param->psize, &cmd);
+
+    Status bs = mIEffect->command(EFFECT_CMD_GET_PARAM, cmd, psize, &response, &status);
+    if (!bs.isOk()) {
+        status = bs.transactionError();
+        return status;
+    }
+    memcpy(param, response.data(), response.size());
+    return status;
 }
 
 
@@ -436,19 +473,18 @@
     }
 }
 
-void AudioEffect::commandExecuted(uint32_t cmdCode,
-                                  uint32_t cmdSize __unused,
-                                  void *cmdData,
-                                  uint32_t replySize __unused,
-                                  void *replyData)
+void AudioEffect::commandExecuted(int32_t cmdCode,
+                                  const std::vector<uint8_t>& cmdData,
+                                  const std::vector<uint8_t>& replyData)
 {
-    if (cmdData == NULL || replyData == NULL) {
+    if (cmdData.empty() || replyData.empty()) {
         return;
     }
 
     if (mCbf != NULL && cmdCode == EFFECT_CMD_SET_PARAM) {
-        effect_param_t *cmd = (effect_param_t *)cmdData;
-        cmd->status = *(int32_t *)replyData;
+        std::vector<uint8_t> cmdDataCopy(cmdData);
+        effect_param_t* cmd = reinterpret_cast<effect_param_t *>(cmdDataCopy.data());
+        cmd->status = *reinterpret_cast<const int32_t *>(replyData.data());
         mCbf(EVENT_PARAMETER_CHANGED, mUserData, cmd);
     }
 }
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index df47def..4d9fbb0 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -35,6 +35,15 @@
 #include <media/MediaMetricsItem.h>
 #include <media/TypeConverter.h>
 
+#define VALUE_OR_FATAL(result)                   \
+    ({                                           \
+       auto _tmp = (result);                     \
+       LOG_ALWAYS_FATAL_IF(!_tmp.ok(),           \
+                           "Failed result (%d)", \
+                           _tmp.error());        \
+       std::move(_tmp.value());                  \
+     })
+
 #define WAIT_PERIOD_MS          10
 
 namespace android {
@@ -279,7 +288,8 @@
         mAttributes.source = inputSource;
         if (inputSource == AUDIO_SOURCE_VOICE_COMMUNICATION
                 || inputSource == AUDIO_SOURCE_CAMCORDER) {
-            mAttributes.flags |= AUDIO_FLAG_CAPTURE_PRIVATE;
+            mAttributes.flags = static_cast<audio_flags_mask_t>(
+                    mAttributes.flags | AUDIO_FLAG_CAPTURE_PRIVATE);
         }
     } else {
         // stream type shouldn't be looked at, this track has audio attributes
@@ -742,6 +752,8 @@
     void *iMemPointer;
     audio_track_cblk_t* cblk;
     status_t status;
+    static const int32_t kMaxCreateAttempts = 3;
+    int32_t remainingAttempts = kMaxCreateAttempts;
 
     if (audioFlinger == 0) {
         ALOGE("%s(%d): Could not get audioflinger", __func__, mPortId);
@@ -803,15 +815,26 @@
     input.sessionId = mSessionId;
     originalSessionId = mSessionId;
 
-    record = audioFlinger->createRecord(input,
-                                                              output,
-                                                              &status);
+    do {
+        media::CreateRecordResponse response;
+        record = audioFlinger->createRecord(VALUE_OR_FATAL(input.toAidl()), response, &status);
+        output = VALUE_OR_FATAL(IAudioFlinger::CreateRecordOutput::fromAidl(response));
+        if (status == NO_ERROR) {
+            break;
+        }
+        if (status != FAILED_TRANSACTION || --remainingAttempts <= 0) {
+            ALOGE("%s(%d): AudioFlinger could not create record track, status: %d",
+                  __func__, mPortId, status);
+            goto exit;
+        }
+        // FAILED_TRANSACTION happens under very specific conditions causing a state mismatch
+        // between audio policy manager and audio flinger during the input stream open sequence
+        // and can be recovered by retrying.
+        // Leave time for race condition to clear before retrying and randomize delay
+        // to reduce the probability of concurrent retries in locked steps.
+        usleep((20 + rand() % 30) * 10000);
+    } while (1);
 
-    if (status != NO_ERROR) {
-        ALOGE("%s(%d): AudioFlinger could not create record track, status: %d",
-              __func__, mPortId, status);
-        goto exit;
-    }
     ALOG_ASSERT(record != 0);
 
     // AudioFlinger now owns the reference to the I/O handle,
@@ -1092,7 +1115,7 @@
     }
 
     if (ssize_t(userSize) < 0 || (buffer == NULL && userSize != 0)) {
-        // sanity-check. user is most-likely passing an error code, and it would
+        // Validation. user is most-likely passing an error code, and it would
         // make the return value ambiguous (actualSize vs error).
         ALOGE("%s(%d) (buffer=%p, size=%zu (%zu)",
                 __func__, mPortId, buffer, userSize, userSize);
@@ -1319,7 +1342,7 @@
         mCbf(EVENT_MORE_DATA, mUserData, &audioBuffer);
         size_t readSize = audioBuffer.size;
 
-        // Sanity check on returned size
+        // Validate on returned size
         if (ssize_t(readSize) < 0 || readSize > reqSize) {
             ALOGE("%s(%d):  EVENT_MORE_DATA requested %zu bytes but callback returned %zd bytes",
                     __func__, mPortId, reqSize, ssize_t(readSize));
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 6357da4..cfe5f3a 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -23,6 +23,7 @@
 #include <binder/IServiceManager.h>
 #include <binder/ProcessState.h>
 #include <binder/IPCThreadState.h>
+#include <media/AidlConversion.h>
 #include <media/AudioResamplerPublic.h>
 #include <media/AudioSystem.h>
 #include <media/IAudioFlinger.h>
@@ -32,10 +33,17 @@
 
 #include <system/audio.h>
 
+#define VALUE_OR_RETURN_STATUS(x) \
+    ({ auto _tmp = (x); \
+       if (!_tmp.ok()) return Status::fromStatusT(_tmp.error()); \
+       std::move(_tmp.value()); })
+
 // ----------------------------------------------------------------------------
 
 namespace android {
 
+using binder::Status;
+
 // client singleton for AudioFlinger binder interface
 Mutex AudioSystem::gLock;
 Mutex AudioSystem::gLockErrorCallbacks;
@@ -47,8 +55,9 @@
 record_config_callback AudioSystem::gRecordConfigCallback = NULL;
 
 // Required to be held while calling into gSoundTriggerCaptureStateListener.
+class CaptureStateListenerImpl;
 Mutex gSoundTriggerCaptureStateListenerLock;
-sp<AudioSystem::CaptureStateListener> gSoundTriggerCaptureStateListener = nullptr;
+sp<CaptureStateListenerImpl> gSoundTriggerCaptureStateListener = nullptr;
 
 // establish binder interface to AudioFlinger service
 const sp<IAudioFlinger> AudioSystem::get_audio_flinger()
@@ -520,11 +529,17 @@
     ALOGW("AudioFlinger server died!");
 }
 
-void AudioSystem::AudioFlingerClient::ioConfigChanged(audio_io_config_event event,
-                                                      const sp<AudioIoDescriptor>& ioDesc) {
+Status AudioSystem::AudioFlingerClient::ioConfigChanged(
+        media::AudioIoConfigEvent _event,
+        const media::AudioIoDescriptor& _ioDesc) {
+    audio_io_config_event event = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioIoConfigEvent_audio_io_config_event(_event));
+    sp<AudioIoDescriptor> ioDesc(
+            VALUE_OR_RETURN_STATUS(aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(_ioDesc)));
+
     ALOGV("ioConfigChanged() event %d", event);
 
-    if (ioDesc == 0 || ioDesc->mIoHandle == AUDIO_IO_HANDLE_NONE) return;
+    if (ioDesc->mIoHandle == AUDIO_IO_HANDLE_NONE) return Status::ok();
 
     audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
     std::vector<sp<AudioDeviceCallback>> callbacksToCall;
@@ -581,7 +596,8 @@
         case AUDIO_INPUT_CONFIG_CHANGED: {
             sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->mIoHandle);
             if (oldDesc == 0) {
-                ALOGW("ioConfigChanged() modifying unknown output! %d", ioDesc->mIoHandle);
+                ALOGW("ioConfigChanged() modifying unknown %s! %d",
+                    event == AUDIO_OUTPUT_CONFIG_CHANGED ? "output" : "input", ioDesc->mIoHandle);
                 break;
             }
 
@@ -638,6 +654,8 @@
         // If callbacksToCall is not empty, it implies ioDesc->mIoHandle and deviceId are valid
         cb->onAudioDeviceUpdate(ioDesc->mIoHandle, deviceId);
     }
+
+    return Status::ok();
 }
 
 status_t AudioSystem::AudioFlingerClient::getInputBufferSize(
@@ -1361,7 +1379,7 @@
     return aps->registerPolicyMixes(mixes, registration);
 }
 
-status_t AudioSystem::setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices)
+status_t AudioSystem::setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices)
 {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
@@ -1375,7 +1393,7 @@
 }
 
 status_t AudioSystem::setUserIdDeviceAffinities(int userId,
-                                                const Vector<AudioDeviceTypeAddr>& devices)
+                                                const AudioDeviceTypeAddrVector& devices)
 {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
@@ -1602,74 +1620,141 @@
     return aps->isCallScreenModeSupported();
 }
 
-status_t AudioSystem::setPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                    const AudioDeviceTypeAddr &device)
+status_t AudioSystem::setDevicesRoleForStrategy(product_strategy_t strategy,
+                                                device_role_t role,
+                                                const AudioDeviceTypeAddrVector &devices)
 {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
-    return aps->setPreferredDeviceForStrategy(strategy, device);
+    return aps->setDevicesRoleForStrategy(strategy, role, devices);
 }
 
-status_t AudioSystem::removePreferredDeviceForStrategy(product_strategy_t strategy)
+status_t AudioSystem::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role)
 {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
-    return aps->removePreferredDeviceForStrategy(strategy);
+    return aps->removeDevicesRoleForStrategy(strategy, role);
 }
 
-status_t AudioSystem::getPreferredDeviceForStrategy(product_strategy_t strategy,
-        AudioDeviceTypeAddr &device)
+status_t AudioSystem::getDevicesForRoleAndStrategy(product_strategy_t strategy,
+                                                   device_role_t role,
+                                                   AudioDeviceTypeAddrVector &devices)
 {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
-    return aps->getPreferredDeviceForStrategy(strategy, device);
+    return aps->getDevicesForRoleAndStrategy(strategy, role, devices);
+}
+
+status_t AudioSystem::setDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                     device_role_t role,
+                                                     const AudioDeviceTypeAddrVector &devices)
+{
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) {
+        return PERMISSION_DENIED;
+    }
+    return aps->setDevicesRoleForCapturePreset(audioSource, role, devices);
+}
+
+status_t AudioSystem::addDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                     device_role_t role,
+                                                     const AudioDeviceTypeAddrVector &devices)
+{
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) {
+        return PERMISSION_DENIED;
+    }
+    return aps->addDevicesRoleForCapturePreset(audioSource, role, devices);
+}
+
+status_t AudioSystem::removeDevicesRoleForCapturePreset(
+        audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector& devices)
+{
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) {
+        return PERMISSION_DENIED;
+    }
+    return aps->removeDevicesRoleForCapturePreset(audioSource, role, devices);
+}
+
+status_t AudioSystem::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                       device_role_t role)
+{
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) {
+        return PERMISSION_DENIED;
+    }
+    return aps->clearDevicesRoleForCapturePreset(audioSource, role);
+}
+
+status_t AudioSystem::getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+                                                        device_role_t role,
+                                                        AudioDeviceTypeAddrVector &devices)
+{
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) {
+        return PERMISSION_DENIED;
+    }
+    return aps->getDevicesForRoleAndCapturePreset(audioSource, role, devices);
 }
 
 class CaptureStateListenerImpl : public media::BnCaptureStateListener,
                                  public IBinder::DeathRecipient {
 public:
+    CaptureStateListenerImpl(
+            const sp<IAudioPolicyService>& aps,
+            const sp<AudioSystem::CaptureStateListener>& listener)
+            : mAps(aps), mListener(listener) {}
+
+    void init() {
+        bool active;
+        status_t status = mAps->registerSoundTriggerCaptureStateListener(this, &active);
+        if (status != NO_ERROR) {
+            mListener->onServiceDied();
+            return;
+        }
+        mListener->onStateChanged(active);
+        IInterface::asBinder(mAps)->linkToDeath(this);
+    }
+
     binder::Status setCaptureState(bool active) override {
         Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
-        gSoundTriggerCaptureStateListener->onStateChanged(active);
+        mListener->onStateChanged(active);
         return binder::Status::ok();
     }
 
     void binderDied(const wp<IBinder>&) override {
         Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
-        gSoundTriggerCaptureStateListener->onServiceDied();
+        mListener->onServiceDied();
         gSoundTriggerCaptureStateListener = nullptr;
     }
+
+private:
+    // Need this in order to keep the death receipent alive.
+    sp<IAudioPolicyService> mAps;
+    sp<AudioSystem::CaptureStateListener> mListener;
 };
 
 status_t AudioSystem::registerSoundTriggerCaptureStateListener(
     const sp<CaptureStateListener>& listener) {
+    LOG_ALWAYS_FATAL_IF(listener == nullptr);
+
     const sp<IAudioPolicyService>& aps =
             AudioSystem::get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
 
-    sp<CaptureStateListenerImpl> wrapper = new CaptureStateListenerImpl();
-
     Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
+    gSoundTriggerCaptureStateListener = new CaptureStateListenerImpl(aps, listener);
+    gSoundTriggerCaptureStateListener->init();
 
-    bool active;
-    status_t status =
-        aps->registerSoundTriggerCaptureStateListener(wrapper, &active);
-    if (status != NO_ERROR) {
-        listener->onServiceDied();
-        return NO_ERROR;
-    }
-    gSoundTriggerCaptureStateListener = listener;
-    listener->onStateChanged(active);
-    sp<IBinder> binder = IInterface::asBinder(aps);
-    binder->linkToDeath(wrapper);
     return NO_ERROR;
 }
 
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 32129f0..14950a8 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -38,6 +38,15 @@
 #include <media/MediaMetricsItem.h>
 #include <media/TypeConverter.h>
 
+#define VALUE_OR_FATAL(result)                   \
+    ({                                           \
+       auto _tmp = (result);                     \
+       LOG_ALWAYS_FATAL_IF(!_tmp.ok(),           \
+                           "Failed result (%d)", \
+                           _tmp.error());        \
+       std::move(_tmp.value());                  \
+     })
+
 #define WAIT_PERIOD_MS                  10
 #define WAIT_STREAM_END_TIMEOUT_SEC     120
 static const int kMaxLoopCountNotifications = 32;
@@ -210,7 +219,11 @@
     return NO_ERROR;
 }
 
-AudioTrack::AudioTrack()
+AudioTrack::AudioTrack() : AudioTrack("" /*opPackageName*/)
+{
+}
+
+AudioTrack::AudioTrack(const std::string& opPackageName)
     : mStatus(NO_INIT),
       mState(STATE_STOPPED),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
@@ -218,11 +231,12 @@
       mPausedPosition(0),
       mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
       mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
+      mOpPackageName(opPackageName),
       mAudioTrackCallback(new AudioTrackCallback())
 {
     mAttributes.content_type = AUDIO_CONTENT_TYPE_UNKNOWN;
     mAttributes.usage = AUDIO_USAGE_UNKNOWN;
-    mAttributes.flags = 0x0;
+    mAttributes.flags = AUDIO_FLAG_NONE;
     strcpy(mAttributes.tags, "");
 }
 
@@ -244,12 +258,14 @@
         const audio_attributes_t* pAttributes,
         bool doNotReconnect,
         float maxRequiredSpeed,
-        audio_port_handle_t selectedDeviceId)
+        audio_port_handle_t selectedDeviceId,
+        const std::string& opPackageName)
     : mStatus(NO_INIT),
       mState(STATE_STOPPED),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
       mPreviousSchedulingGroup(SP_DEFAULT),
       mPausedPosition(0),
+      mOpPackageName(opPackageName),
       mAudioTrackCallback(new AudioTrackCallback())
 {
     mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
@@ -277,13 +293,15 @@
         pid_t pid,
         const audio_attributes_t* pAttributes,
         bool doNotReconnect,
-        float maxRequiredSpeed)
+        float maxRequiredSpeed,
+        const std::string& opPackageName)
     : mStatus(NO_INIT),
       mState(STATE_STOPPED),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
       mPreviousSchedulingGroup(SP_DEFAULT),
       mPausedPosition(0),
       mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
+      mOpPackageName(opPackageName),
       mAudioTrackCallback(new AudioTrackCallback())
 {
     mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
@@ -458,7 +476,7 @@
     if (format == AUDIO_FORMAT_DEFAULT) {
         format = AUDIO_FORMAT_PCM_16_BIT;
     } else if (format == AUDIO_FORMAT_IEC61937) { // HDMI pass-through?
-        mAttributes.flags |= AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO;
+        flags = static_cast<audio_output_flags_t>(flags | AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO);
     }
 
     // validate parameters
@@ -529,6 +547,7 @@
     } else {
         mOffloadInfo = NULL;
         memset(&mOffloadInfoCopy, 0, sizeof(audio_offload_info_t));
+        mOffloadInfoCopy = AUDIO_INFO_INITIALIZER;
     }
 
     mVolume[AUDIO_INTERLEAVE_LEFT] = 1.0f;
@@ -635,6 +654,36 @@
     return status;
 }
 
+
+status_t AudioTrack::set(
+        audio_stream_type_t streamType,
+        uint32_t sampleRate,
+        audio_format_t format,
+        uint32_t channelMask,
+        size_t frameCount,
+        audio_output_flags_t flags,
+        callback_t cbf,
+        void* user,
+        int32_t notificationFrames,
+        const sp<IMemory>& sharedBuffer,
+        bool threadCanCallJava,
+        audio_session_t sessionId,
+        transfer_type transferType,
+        const audio_offload_info_t *offloadInfo,
+        uid_t uid,
+        pid_t pid,
+        const audio_attributes_t* pAttributes,
+        bool doNotReconnect,
+        float maxRequiredSpeed,
+        audio_port_handle_t selectedDeviceId)
+{
+    return set(streamType, sampleRate, format,
+            static_cast<audio_channel_mask_t>(channelMask),
+            frameCount, flags, cbf, user, notificationFrames, sharedBuffer,
+            threadCanCallJava, sessionId, transferType, offloadInfo, uid, pid,
+            pAttributes, doNotReconnect, maxRequiredSpeed, selectedDeviceId);
+}
+
 // -------------------------------------------------------------------------
 
 status_t AudioTrack::start()
@@ -1555,12 +1604,15 @@
     input.selectedDeviceId = mSelectedDeviceId;
     input.sessionId = mSessionId;
     input.audioTrackCallback = mAudioTrackCallback;
+    input.opPackageName = mOpPackageName;
 
-    IAudioFlinger::CreateTrackOutput output;
-
-    sp<IAudioTrack> track = audioFlinger->createTrack(input,
-                                                      output,
+    media::CreateTrackResponse response;
+    sp<IAudioTrack> track = audioFlinger->createTrack(VALUE_OR_FATAL(input.toAidl()),
+                                                      response,
                                                       &status);
+    IAudioFlinger::CreateTrackOutput output = VALUE_OR_FATAL(
+            IAudioFlinger::CreateTrackOutput::fromAidl(
+                    response));
 
     if (status != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
         ALOGE("%s(%d): AudioFlinger could not create track, status: %d output %d",
@@ -1936,7 +1988,7 @@
     }
 
     if (ssize_t(userSize) < 0 || (buffer == NULL && userSize != 0)) {
-        // Sanity-check: user is most-likely passing an error code, and it would
+        // Validation: user is most-likely passing an error code, and it would
         // make the return value ambiguous (actualSize vs error).
         ALOGE("%s(%d): AudioTrack::write(buffer=%p, size=%zu (%zd)",
                 __func__, mPortId, buffer, userSize, userSize);
@@ -2326,7 +2378,7 @@
                 mUserData, &audioBuffer);
         size_t writtenSize = audioBuffer.size;
 
-        // Sanity check on returned size
+        // Validate on returned size
         if (ssize_t(writtenSize) < 0 || writtenSize > reqSize) {
             ALOGE("%s(%d): EVENT_MORE_DATA requested %zu bytes but callback returned %zd bytes",
                     __func__, mPortId, reqSize, ssize_t(writtenSize));
diff --git a/media/libaudioclient/AudioTrackShared.cpp b/media/libaudioclient/AudioTrackShared.cpp
index f1f8f9c..e2c9698 100644
--- a/media/libaudioclient/AudioTrackShared.cpp
+++ b/media/libaudioclient/AudioTrackShared.cpp
@@ -900,11 +900,8 @@
     }
     audio_track_cblk_t* cblk = mCblk;
 
-    int32_t flush = cblk->u.mStreaming.mFlush;
-    if (flush != mFlush) {
-        // FIXME should return an accurate value, but over-estimate is better than under-estimate
-        return mFrameCount;
-    }
+    flushBufferIfNeeded();
+
     const int32_t rear = getRear();
     ssize_t filled = audio_utils::safe_sub_overflow(rear, cblk->u.mStreaming.mFront);
     // pipe should not already be overfull
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 16d2232..57142b0 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -24,6 +24,8 @@
 
 #include <binder/IPCThreadState.h>
 #include <binder/Parcel.h>
+#include <media/AudioSanitizer.h>
+#include <media/IAudioPolicyService.h>
 #include <mediautils/ServiceUtilities.h>
 #include <mediautils/TimeCheck.h>
 #include "IAudioFlinger.h"
@@ -95,6 +97,156 @@
 
 #define MAX_ITEMS_PER_LIST 1024
 
+ConversionResult<media::CreateTrackRequest> IAudioFlinger::CreateTrackInput::toAidl() const {
+    media::CreateTrackRequest aidl;
+    aidl.attr = VALUE_OR_RETURN(legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+    aidl.config = VALUE_OR_RETURN(legacy2aidl_audio_config_t_AudioConfig(config));
+    aidl.clientInfo = VALUE_OR_RETURN(legacy2aidl_AudioClient(clientInfo));
+    aidl.sharedBuffer = VALUE_OR_RETURN(legacy2aidl_NullableIMemory_SharedFileRegion(sharedBuffer));
+    aidl.notificationsPerBuffer = VALUE_OR_RETURN(convertIntegral<int32_t>(notificationsPerBuffer));
+    aidl.speed = speed;
+    aidl.audioTrackCallback = audioTrackCallback;
+    aidl.opPackageName = opPackageName;
+    aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_output_flags_mask(flags));
+    aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
+    aidl.notificationFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(notificationFrameCount));
+    aidl.selectedDeviceId = VALUE_OR_RETURN(
+            legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+    aidl.sessionId = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
+    return aidl;
+}
+
+ConversionResult<IAudioFlinger::CreateTrackInput>
+IAudioFlinger::CreateTrackInput::fromAidl(const media::CreateTrackRequest& aidl) {
+    IAudioFlinger::CreateTrackInput legacy;
+    legacy.attr = VALUE_OR_RETURN(aidl2legacy_AudioAttributesInternal_audio_attributes_t(aidl.attr));
+    legacy.config = VALUE_OR_RETURN(aidl2legacy_AudioConfig_audio_config_t(aidl.config));
+    legacy.clientInfo = VALUE_OR_RETURN(aidl2legacy_AudioClient(aidl.clientInfo));
+    legacy.sharedBuffer = VALUE_OR_RETURN(aidl2legacy_NullableSharedFileRegion_IMemory(aidl.sharedBuffer));
+    legacy.notificationsPerBuffer = VALUE_OR_RETURN(
+            convertIntegral<uint32_t>(aidl.notificationsPerBuffer));
+    legacy.speed = aidl.speed;
+    legacy.audioTrackCallback = aidl.audioTrackCallback;
+    legacy.opPackageName = aidl.opPackageName;
+    legacy.flags = VALUE_OR_RETURN(aidl2legacy_audio_output_flags_mask(aidl.flags));
+    legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+    legacy.notificationFrameCount = VALUE_OR_RETURN(
+            convertIntegral<size_t>(aidl.notificationFrameCount));
+    legacy.selectedDeviceId = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_port_handle_t(aidl.selectedDeviceId));
+    legacy.sessionId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.sessionId));
+    return legacy;
+}
+
+ConversionResult<media::CreateTrackResponse>
+IAudioFlinger::CreateTrackOutput::toAidl() const {
+    media::CreateTrackResponse aidl;
+    aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_output_flags_mask(flags));
+    aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
+    aidl.notificationFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(notificationFrameCount));
+    aidl.selectedDeviceId = VALUE_OR_RETURN(
+            legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+    aidl.sessionId = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
+    aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(sampleRate));
+    aidl.afFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(afFrameCount));
+    aidl.afSampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(afSampleRate));
+    aidl.afLatencyMs = VALUE_OR_RETURN(convertIntegral<int32_t>(afLatencyMs));
+    aidl.outputId = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(outputId));
+    aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(portId));
+    return aidl;
+}
+
+ConversionResult<IAudioFlinger::CreateTrackOutput>
+IAudioFlinger::CreateTrackOutput::fromAidl(
+        const media::CreateTrackResponse& aidl) {
+    IAudioFlinger::CreateTrackOutput legacy;
+    legacy.flags = VALUE_OR_RETURN(aidl2legacy_audio_output_flags_mask(aidl.flags));
+    legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+    legacy.notificationFrameCount = VALUE_OR_RETURN(
+            convertIntegral<size_t>(aidl.notificationFrameCount));
+    legacy.selectedDeviceId = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_port_handle_t(aidl.selectedDeviceId));
+    legacy.sessionId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.sessionId));
+    legacy.sampleRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
+    legacy.afFrameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.afFrameCount));
+    legacy.afSampleRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.afSampleRate));
+    legacy.afLatencyMs = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.afLatencyMs));
+    legacy.outputId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.outputId));
+    legacy.portId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
+    return legacy;
+}
+
+ConversionResult<media::CreateRecordRequest>
+IAudioFlinger::CreateRecordInput::toAidl() const {
+    media::CreateRecordRequest aidl;
+    aidl.attr = VALUE_OR_RETURN(legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+    aidl.config = VALUE_OR_RETURN(legacy2aidl_audio_config_base_t_AudioConfigBase(config));
+    aidl.clientInfo = VALUE_OR_RETURN(legacy2aidl_AudioClient(clientInfo));
+    aidl.opPackageName = VALUE_OR_RETURN(legacy2aidl_String16_string(opPackageName));
+    aidl.riid = VALUE_OR_RETURN(legacy2aidl_audio_unique_id_t_int32_t(riid));
+    aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_input_flags_mask(flags));
+    aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
+    aidl.notificationFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(notificationFrameCount));
+    aidl.selectedDeviceId = VALUE_OR_RETURN(
+            legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+    aidl.sessionId = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
+    return aidl;
+}
+
+ConversionResult<IAudioFlinger::CreateRecordInput>
+IAudioFlinger::CreateRecordInput::fromAidl(
+        const media::CreateRecordRequest& aidl) {
+    IAudioFlinger::CreateRecordInput legacy;
+    legacy.attr = VALUE_OR_RETURN(aidl2legacy_AudioAttributesInternal_audio_attributes_t(aidl.attr));
+    legacy.config = VALUE_OR_RETURN(aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.config));
+    legacy.clientInfo = VALUE_OR_RETURN(aidl2legacy_AudioClient(aidl.clientInfo));
+    legacy.opPackageName = VALUE_OR_RETURN(aidl2legacy_string_view_String16(aidl.opPackageName));
+    legacy.riid = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_unique_id_t(aidl.riid));
+    legacy.flags = VALUE_OR_RETURN(aidl2legacy_audio_input_flags_mask(aidl.flags));
+    legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+    legacy.notificationFrameCount = VALUE_OR_RETURN(
+            convertIntegral<size_t>(aidl.notificationFrameCount));
+    legacy.selectedDeviceId = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_port_handle_t(aidl.selectedDeviceId));
+    legacy.sessionId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.sessionId));
+    return legacy;
+}
+
+ConversionResult<media::CreateRecordResponse>
+IAudioFlinger::CreateRecordOutput::toAidl() const {
+    media::CreateRecordResponse aidl;
+    aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_input_flags_mask(flags));
+    aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
+    aidl.notificationFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(notificationFrameCount));
+    aidl.selectedDeviceId = VALUE_OR_RETURN(
+            legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+    aidl.sessionId = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
+    aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(sampleRate));
+    aidl.inputId = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(inputId));
+    aidl.cblk = VALUE_OR_RETURN(legacy2aidl_NullableIMemory_SharedFileRegion(cblk));
+    aidl.buffers = VALUE_OR_RETURN(legacy2aidl_NullableIMemory_SharedFileRegion(buffers));
+    aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(portId));
+    return aidl;
+}
+
+ConversionResult<IAudioFlinger::CreateRecordOutput>
+IAudioFlinger::CreateRecordOutput::fromAidl(
+        const media::CreateRecordResponse& aidl) {
+    IAudioFlinger::CreateRecordOutput legacy;
+    legacy.flags = VALUE_OR_RETURN(aidl2legacy_audio_input_flags_mask(aidl.flags));
+    legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+    legacy.notificationFrameCount = VALUE_OR_RETURN(
+            convertIntegral<size_t>(aidl.notificationFrameCount));
+    legacy.selectedDeviceId = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_port_handle_t(aidl.selectedDeviceId));
+    legacy.sessionId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.sessionId));
+    legacy.sampleRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
+    legacy.inputId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.inputId));
+    legacy.cblk = VALUE_OR_RETURN(aidl2legacy_NullableSharedFileRegion_IMemory(aidl.cblk));
+    legacy.buffers = VALUE_OR_RETURN(aidl2legacy_NullableSharedFileRegion_IMemory(aidl.buffers));
+    legacy.portId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
+    return legacy;
+}
 
 class BpAudioFlinger : public BpInterface<IAudioFlinger>
 {
@@ -104,9 +256,9 @@
     {
     }
 
-    virtual sp<IAudioTrack> createTrack(const CreateTrackInput& input,
-                                        CreateTrackOutput& output,
-                                        status_t *status)
+    virtual sp<IAudioTrack> createTrack(const media::CreateTrackRequest& input,
+                                        media::CreateTrackResponse& output,
+                                        status_t* status)
     {
         Parcel data, reply;
         sp<IAudioTrack> track;
@@ -116,7 +268,7 @@
             return track;
         }
 
-        input.writeToParcel(&data);
+        data.writeParcelable(input);
 
         status_t lStatus = remote()->transact(CREATE_TRACK, data, &reply);
         if (lStatus != NO_ERROR) {
@@ -139,9 +291,9 @@
         return track;
     }
 
-    virtual sp<media::IAudioRecord> createRecord(const CreateRecordInput& input,
-                                                 CreateRecordOutput& output,
-                                                 status_t *status)
+    virtual sp<media::IAudioRecord> createRecord(const media::CreateRecordRequest& input,
+                                                 media::CreateRecordResponse& output,
+                                                 status_t* status)
     {
         Parcel data, reply;
         sp<media::IAudioRecord> record;
@@ -151,7 +303,7 @@
             return record;
         }
 
-        input.writeToParcel(&data);
+        data.writeParcelable(input);
 
         status_t lStatus = remote()->transact(CREATE_RECORD, data, &reply);
         if (lStatus != NO_ERROR) {
@@ -371,7 +523,7 @@
         return reply.readString8();
     }
 
-    virtual void registerClient(const sp<IAudioFlingerClient>& client)
+    virtual void registerClient(const sp<media::IAudioFlingerClient>& client)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
@@ -653,9 +805,9 @@
         return NO_ERROR;
     }
 
-    virtual sp<IEffect> createEffect(
+    virtual sp<media::IEffect> createEffect(
                                     effect_descriptor_t *pDesc,
-                                    const sp<IEffectClient>& client,
+                                    const sp<media::IEffectClient>& client,
                                     int32_t priority,
                                     audio_io_handle_t output,
                                     audio_session_t sessionId,
@@ -668,7 +820,7 @@
                                     int *enabled)
     {
         Parcel data, reply;
-        sp<IEffect> effect;
+        sp<media::IEffect> effect;
         if (pDesc == NULL) {
             if (status != NULL) {
                 *status = BAD_VALUE;
@@ -705,7 +857,7 @@
             if (enabled != NULL) {
                 *enabled = tmp;
             }
-            effect = interface_cast<IEffect>(reply.readStrongBinder());
+            effect = interface_cast<media::IEffect>(reply.readStrongBinder());
             reply.read(pDesc, sizeof(effect_descriptor_t));
         }
         if (status != NULL) {
@@ -1001,7 +1153,7 @@
             break;
     }
 
-    // Whitelist of relevant events to trigger log merging.
+    // List of relevant events that trigger log merging.
     // Log merging should activate during audio activity of any kind. This are considered the
     // most relevant events.
     // TODO should select more wisely the items from the list
@@ -1024,18 +1176,28 @@
     std::string tag("IAudioFlinger command " + std::to_string(code));
     TimeCheck check(tag.c_str());
 
+    // Make sure we connect to Audio Policy Service before calling into AudioFlinger:
+    //  - AudioFlinger can call into Audio Policy Service with its global mutex held
+    //  - If this is the first time Audio Policy Service is queried from inside audioserver process
+    //  this will trigger Audio Policy Manager initialization.
+    //  - Audio Policy Manager initialization calls into AudioFlinger which will try to lock
+    //  its global mutex and a deadlock will occur.
+    if (IPCThreadState::self()->getCallingPid() != getpid()) {
+        AudioSystem::get_audio_policy_service();
+    }
+
     switch (code) {
         case CREATE_TRACK: {
             CHECK_INTERFACE(IAudioFlinger, data, reply);
 
-            CreateTrackInput input;
-            if (input.readFromParcel((Parcel*)&data) != NO_ERROR) {
+            media::CreateTrackRequest input;
+            if (data.readParcelable(&input) != NO_ERROR) {
                 reply->writeInt32(DEAD_OBJECT);
                 return NO_ERROR;
             }
 
             status_t status;
-            CreateTrackOutput output;
+            media::CreateTrackResponse output;
 
             sp<IAudioTrack> track= createTrack(input,
                                                output,
@@ -1053,14 +1215,14 @@
         case CREATE_RECORD: {
             CHECK_INTERFACE(IAudioFlinger, data, reply);
 
-            CreateRecordInput input;
-            if (input.readFromParcel((Parcel*)&data) != NO_ERROR) {
+            media::CreateRecordRequest input;
+            if (data.readParcelable(&input) != NO_ERROR) {
                 reply->writeInt32(DEAD_OBJECT);
                 return NO_ERROR;
             }
 
             status_t status;
-            CreateRecordOutput output;
+            media::CreateRecordResponse output;
 
             sp<media::IAudioRecord> record = createRecord(input,
                                                           output,
@@ -1201,7 +1363,7 @@
 
         case REGISTER_CLIENT: {
             CHECK_INTERFACE(IAudioFlinger, data, reply);
-            sp<IAudioFlingerClient> client = interface_cast<IAudioFlingerClient>(
+            sp<media::IAudioFlingerClient> client = interface_cast<media::IAudioFlingerClient>(
                     data.readStrongBinder());
             registerClient(client);
             return NO_ERROR;
@@ -1210,7 +1372,7 @@
             CHECK_INTERFACE(IAudioFlinger, data, reply);
             uint32_t sampleRate = data.readInt32();
             audio_format_t format = (audio_format_t) data.readInt32();
-            audio_channel_mask_t channelMask = data.readInt32();
+            audio_channel_mask_t channelMask = (audio_channel_mask_t) data.readInt32();
             reply->writeInt64( getInputBufferSize(sampleRate, format, channelMask) );
             return NO_ERROR;
         } break;
@@ -1386,7 +1548,8 @@
             if (data.read(&desc, sizeof(effect_descriptor_t)) != NO_ERROR) {
                 ALOGE("b/23905951");
             }
-            sp<IEffectClient> client = interface_cast<IEffectClient>(data.readStrongBinder());
+            sp<media::IEffectClient> client =
+                    interface_cast<media::IEffectClient>(data.readStrongBinder());
             int32_t priority = data.readInt32();
             audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
             audio_session_t sessionId = (audio_session_t) data.readInt32();
@@ -1402,8 +1565,8 @@
             int id = 0;
             int enabled = 0;
 
-            sp<IEffect> effect = createEffect(&desc, client, priority, output, sessionId, device,
-                    opPackageName, pid, probe, &status, &id, &enabled);
+            sp<media::IEffect> effect = createEffect(&desc, client, priority, output, sessionId,
+                    device, opPackageName, pid, probe, &status, &id, &enabled);
             reply->writeInt32(status);
             reply->writeInt32(id);
             reply->writeInt32(enabled);
@@ -1483,10 +1646,15 @@
         case GET_AUDIO_PORT: {
             CHECK_INTERFACE(IAudioFlinger, data, reply);
             struct audio_port port = {};
-            if (data.read(&port, sizeof(struct audio_port)) != NO_ERROR) {
+            status_t status = data.read(&port, sizeof(struct audio_port));
+            if (status != NO_ERROR) {
                 ALOGE("b/23905951");
+                return status;
             }
-            status_t status = getAudioPort(&port);
+            status = AudioSanitizer::sanitizeAudioPort(&port);
+            if (status == NO_ERROR) {
+                status = getAudioPort(&port);
+            }
             reply->writeInt32(status);
             if (status == NO_ERROR) {
                 reply->write(&port, sizeof(struct audio_port));
@@ -1496,12 +1664,20 @@
         case CREATE_AUDIO_PATCH: {
             CHECK_INTERFACE(IAudioFlinger, data, reply);
             struct audio_patch patch;
-            data.read(&patch, sizeof(struct audio_patch));
-            audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
-            if (data.read(&handle, sizeof(audio_patch_handle_t)) != NO_ERROR) {
-                ALOGE("b/23905951");
+            status_t status = data.read(&patch, sizeof(struct audio_patch));
+            if (status != NO_ERROR) {
+                return status;
             }
-            status_t status = createAudioPatch(&patch, &handle);
+            audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
+            status = data.read(&handle, sizeof(audio_patch_handle_t));
+            if (status != NO_ERROR) {
+                ALOGE("b/23905951");
+                return status;
+            }
+            status = AudioSanitizer::sanitizeAudioPatch(&patch);
+            if (status == NO_ERROR) {
+                status = createAudioPatch(&patch, &handle);
+            }
             reply->writeInt32(status);
             if (status == NO_ERROR) {
                 reply->write(&handle, sizeof(audio_patch_handle_t));
@@ -1546,8 +1722,14 @@
         case SET_AUDIO_PORT_CONFIG: {
             CHECK_INTERFACE(IAudioFlinger, data, reply);
             struct audio_port_config config;
-            data.read(&config, sizeof(struct audio_port_config));
-            status_t status = setAudioPortConfig(&config);
+            status_t status = data.read(&config, sizeof(struct audio_port_config));
+            if (status != NO_ERROR) {
+                return status;
+            }
+            status = AudioSanitizer::sanitizeAudioPortConfig(&config);
+            if (status == NO_ERROR) {
+                status = setAudioPortConfig(&config);
+            }
             reply->writeInt32(status);
             return NO_ERROR;
         } break;
diff --git a/media/libaudioclient/IAudioFlingerClient.cpp b/media/libaudioclient/IAudioFlingerClient.cpp
deleted file mode 100644
index 47eb7dc..0000000
--- a/media/libaudioclient/IAudioFlingerClient.cpp
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "IAudioFlingerClient"
-#include <utils/Log.h>
-
-#include <stdint.h>
-#include <sys/types.h>
-
-#include <binder/Parcel.h>
-
-#include <media/IAudioFlingerClient.h>
-#include <media/AudioSystem.h>
-
-namespace android {
-
-enum {
-    IO_CONFIG_CHANGED = IBinder::FIRST_CALL_TRANSACTION
-};
-
-class BpAudioFlingerClient : public BpInterface<IAudioFlingerClient>
-{
-public:
-    explicit BpAudioFlingerClient(const sp<IBinder>& impl)
-        : BpInterface<IAudioFlingerClient>(impl)
-    {
-    }
-
-    void ioConfigChanged(audio_io_config_event event, const sp<AudioIoDescriptor>& ioDesc)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlingerClient::getInterfaceDescriptor());
-        data.writeInt32(event);
-        data.writeInt32((int32_t)ioDesc->mIoHandle);
-        data.write(&ioDesc->mPatch, sizeof(struct audio_patch));
-        data.writeInt32(ioDesc->mSamplingRate);
-        data.writeInt32(ioDesc->mFormat);
-        data.writeInt32(ioDesc->mChannelMask);
-        data.writeInt64(ioDesc->mFrameCount);
-        data.writeInt64(ioDesc->mFrameCountHAL);
-        data.writeInt32(ioDesc->mLatency);
-        data.writeInt32(ioDesc->mPortId);
-        remote()->transact(IO_CONFIG_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-};
-
-IMPLEMENT_META_INTERFACE(AudioFlingerClient, "android.media.IAudioFlingerClient");
-
-// ----------------------------------------------------------------------
-
-status_t BnAudioFlingerClient::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    switch (code) {
-    case IO_CONFIG_CHANGED: {
-            CHECK_INTERFACE(IAudioFlingerClient, data, reply);
-            audio_io_config_event event = (audio_io_config_event)data.readInt32();
-            sp<AudioIoDescriptor> ioDesc = new AudioIoDescriptor();
-            ioDesc->mIoHandle = (audio_io_handle_t) data.readInt32();
-            data.read(&ioDesc->mPatch, sizeof(struct audio_patch));
-            ioDesc->mSamplingRate = data.readInt32();
-            ioDesc->mFormat = (audio_format_t) data.readInt32();
-            ioDesc->mChannelMask = (audio_channel_mask_t) data.readInt32();
-            ioDesc->mFrameCount = data.readInt64();
-            ioDesc->mFrameCountHAL = data.readInt64();
-            ioDesc->mLatency = data.readInt32();
-            ioDesc->mPortId = data.readInt32();
-            ioConfigChanged(event, ioDesc);
-            return NO_ERROR;
-        } break;
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-// ----------------------------------------------------------------------------
-
-} // namespace android
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index 60af84b..cd098b5 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -26,6 +26,7 @@
 #include <binder/IPCThreadState.h>
 #include <binder/Parcel.h>
 #include <media/AudioEffect.h>
+#include <media/AudioSanitizer.h>
 #include <media/IAudioPolicyService.h>
 #include <mediautils/ServiceUtilities.h>
 #include <mediautils/TimeCheck.h>
@@ -112,13 +113,18 @@
     MOVE_EFFECTS_TO_IO,
     SET_RTT_ENABLED,
     IS_CALL_SCREEN_MODE_SUPPORTED,
-    SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
-    REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
-    GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
+    SET_DEVICES_ROLE_FOR_PRODUCT_STRATEGY,
+    REMOVE_DEVICES_ROLE_FOR_PRODUCT_STRATEGY,
+    GET_DEVICES_FOR_ROLE_AND_PRODUCT_STRATEGY,
     GET_DEVICES_FOR_ATTRIBUTES,
     AUDIO_MODULES_UPDATED,  // oneway
     SET_CURRENT_IME_UID,
     REGISTER_SOUNDTRIGGER_CAPTURE_STATE_LISTENER,
+    SET_DEVICES_ROLE_FOR_CAPTURE_PRESET,
+    ADD_DEVICES_ROLE_FOR_CAPTURE_PRESET,
+    REMOVE_DEVICES_ROLE_FOR_CAPTURE_PRESET,
+    CLEAR_DEVICES_ROLE_FOR_CAPTURE_PRESET,
+    GET_DEVICES_FOR_ROLE_AND_CAPTURE_PRESET,
 };
 
 #define MAX_ITEMS_PER_LIST 1024
@@ -1173,31 +1179,18 @@
         return reply.readBool();
     }
 
-    virtual status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices)
+    virtual status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
 
         data.writeInt32((int32_t) uid);
-        size_t size = devices.size();
-        size_t sizePosition = data.dataPosition();
-        data.writeInt32((int32_t) size);
-        size_t finalSize = size;
-        for (size_t i = 0; i < size; i++) {
-            size_t position = data.dataPosition();
-            if (devices[i].writeToParcel(&data) != NO_ERROR) {
-                data.setDataPosition(position);
-                finalSize--;
-            }
-        }
-        if (size != finalSize) {
-            size_t position = data.dataPosition();
-            data.setDataPosition(sizePosition);
-            data.writeInt32(finalSize);
-            data.setDataPosition(position);
+        status_t status = data.writeParcelableVector(devices);
+        if (status != NO_ERROR) {
+            return status;
         }
 
-        status_t status = remote()->transact(SET_UID_DEVICE_AFFINITY, data, &reply);
+        status = remote()->transact(SET_UID_DEVICE_AFFINITY, data, &reply);
         if (status == NO_ERROR) {
             status = (status_t)reply.readInt32();
         }
@@ -1218,51 +1211,37 @@
         return status;
     }
 
-        virtual status_t setUserIdDeviceAffinities(int userId,
-                const Vector<AudioDeviceTypeAddr>& devices)
-        {
-            Parcel data, reply;
-            data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+    virtual status_t setUserIdDeviceAffinities(int userId, const AudioDeviceTypeAddrVector& devices)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
 
-            data.writeInt32((int32_t) userId);
-            size_t size = devices.size();
-            size_t sizePosition = data.dataPosition();
-            data.writeInt32((int32_t) size);
-            size_t finalSize = size;
-            for (size_t i = 0; i < size; i++) {
-                size_t position = data.dataPosition();
-                if (devices[i].writeToParcel(&data) != NO_ERROR) {
-                    data.setDataPosition(position);
-                    finalSize--;
-                }
-            }
-            if (size != finalSize) {
-                size_t position = data.dataPosition();
-                data.setDataPosition(sizePosition);
-                data.writeInt32(finalSize);
-                data.setDataPosition(position);
-            }
-
-            status_t status = remote()->transact(SET_USERID_DEVICE_AFFINITY, data, &reply);
-            if (status == NO_ERROR) {
-                status = (status_t)reply.readInt32();
-            }
+        data.writeInt32((int32_t) userId);
+        status_t status = data.writeParcelableVector(devices);
+        if (status != NO_ERROR) {
             return status;
         }
 
-        virtual status_t removeUserIdDeviceAffinities(int userId) {
-            Parcel data, reply;
-            data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-
-            data.writeInt32((int32_t) userId);
-
-            status_t status =
-                remote()->transact(REMOVE_USERID_DEVICE_AFFINITY, data, &reply);
-            if (status == NO_ERROR) {
-                status = (status_t) reply.readInt32();
-            }
-            return status;
+        status = remote()->transact(SET_USERID_DEVICE_AFFINITY, data, &reply);
+        if (status == NO_ERROR) {
+            status = (status_t)reply.readInt32();
         }
+        return status;
+    }
+
+    virtual status_t removeUserIdDeviceAffinities(int userId) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+
+        data.writeInt32((int32_t) userId);
+
+        status_t status =
+            remote()->transact(REMOVE_USERID_DEVICE_AFFINITY, data, &reply);
+        if (status == NO_ERROR) {
+            status = (status_t) reply.readInt32();
+        }
+        return status;
+    }
 
     virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies)
     {
@@ -1384,17 +1363,31 @@
         return reply.readBool();
     }
 
-    virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
-            const AudioDeviceTypeAddr &device)
+    virtual status_t setDevicesRoleForStrategy(product_strategy_t strategy,
+            device_role_t role, const AudioDeviceTypeAddrVector &devices)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
         data.writeUint32(static_cast<uint32_t>(strategy));
-        status_t status = device.writeToParcel(&data);
+        data.writeUint32(static_cast<uint32_t>(role));
+        status_t status = data.writeParcelableVector(devices);
         if (status != NO_ERROR) {
             return BAD_VALUE;
         }
-        status = remote()->transact(SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
+        status = remote()->transact(SET_DEVICES_ROLE_FOR_PRODUCT_STRATEGY, data, &reply);
+        if (status != NO_ERROR) {
+           return status;
+        }
+        return static_cast<status_t>(reply.readInt32());
+    }
+
+    virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+        data.writeUint32(static_cast<uint32_t>(strategy));
+        data.writeUint32(static_cast<uint32_t>(role));
+        status_t status = remote()->transact(REMOVE_DEVICES_ROLE_FOR_PRODUCT_STRATEGY,
                 data, &reply);
         if (status != NO_ERROR) {
            return status;
@@ -1402,31 +1395,108 @@
         return static_cast<status_t>(reply.readInt32());
     }
 
-    virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy)
+    virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
+            device_role_t role, AudioDeviceTypeAddrVector &devices)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
         data.writeUint32(static_cast<uint32_t>(strategy));
-        status_t status = remote()->transact(REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
-                data, &reply);
-        if (status != NO_ERROR) {
-           return status;
-        }
-        return static_cast<status_t>(reply.readInt32());
-    }
-
-    virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
-            AudioDeviceTypeAddr &device)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeUint32(static_cast<uint32_t>(strategy));
-        status_t status = remote()->transact(GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
+        data.writeUint32(static_cast<uint32_t>(role));
+        status_t status = remote()->transact(GET_DEVICES_FOR_ROLE_AND_PRODUCT_STRATEGY,
                 data, &reply);
         if (status != NO_ERROR) {
             return status;
         }
-        status = device.readFromParcel(&reply);
+        status = reply.readParcelableVector(&devices);
+        if (status != NO_ERROR) {
+            return status;
+        }
+        return static_cast<status_t>(reply.readInt32());
+    }
+
+    virtual status_t setDevicesRoleForCapturePreset(audio_source_t audioSource,
+            device_role_t role, const AudioDeviceTypeAddrVector &devices) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+        data.writeUint32(static_cast<uint32_t>(audioSource));
+        data.writeUint32(static_cast<uint32_t>(role));
+        status_t status = data.writeParcelableVector(devices);
+        if (status != NO_ERROR) {
+            return status;
+        }
+        status = remote()->transact(SET_DEVICES_ROLE_FOR_CAPTURE_PRESET, data, &reply);
+        if (status != NO_ERROR) {
+            return status;
+        }
+        return static_cast<status_t>(reply.readInt32());
+    }
+
+    virtual status_t addDevicesRoleForCapturePreset(audio_source_t audioSource,
+            device_role_t role, const AudioDeviceTypeAddrVector &devices)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+        data.writeUint32(static_cast<uint32_t>(audioSource));
+        data.writeUint32(static_cast<uint32_t>(role));
+        status_t status = data.writeParcelableVector(devices);
+        if (status != NO_ERROR) {
+            return status;
+        }
+        status = remote()->transact(ADD_DEVICES_ROLE_FOR_CAPTURE_PRESET, data, &reply);
+        if (status != NO_ERROR) {
+           return status;
+        }
+        return static_cast<status_t>(reply.readInt32());
+    }
+
+    virtual status_t removeDevicesRoleForCapturePreset(
+            audio_source_t audioSource, device_role_t role,
+            const AudioDeviceTypeAddrVector& devices)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+        data.writeUint32(static_cast<uint32_t>(audioSource));
+        data.writeUint32(static_cast<uint32_t>(role));
+        status_t status = data.writeParcelableVector(devices);
+        if (status != NO_ERROR) {
+            return status;
+        }
+        status = remote()->transact(REMOVE_DEVICES_ROLE_FOR_CAPTURE_PRESET,
+                data, &reply);
+        if (status != NO_ERROR) {
+           return status;
+        }
+        return static_cast<status_t>(reply.readInt32());
+    }
+
+    virtual status_t clearDevicesRoleForCapturePreset(
+            audio_source_t audioSource, device_role_t role)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+        data.writeUint32(static_cast<uint32_t>(audioSource));
+        data.writeUint32(static_cast<uint32_t>(role));
+        status_t status = remote()->transact(CLEAR_DEVICES_ROLE_FOR_CAPTURE_PRESET,
+                data, &reply);
+        if (status != NO_ERROR) {
+           return status;
+        }
+        return static_cast<status_t>(reply.readInt32());
+    }
+
+    virtual status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+            device_role_t role, AudioDeviceTypeAddrVector &devices)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+        data.writeUint32(static_cast<uint32_t>(audioSource));
+        data.writeUint32(static_cast<uint32_t>(role));
+        status_t status = remote()->transact(GET_DEVICES_FOR_ROLE_AND_CAPTURE_PRESET,
+                data, &reply);
+        if (status != NO_ERROR) {
+            return status;
+        }
+        status = reply.readParcelableVector(&devices);
         if (status != NO_ERROR) {
             return status;
         }
@@ -1544,6 +1614,7 @@
 //      case SET_FORCE_USE:
         case INIT_STREAM_VOLUME:
         case SET_STREAM_VOLUME:
+        case SET_VOLUME_ATTRIBUTES:
         case REGISTER_POLICY_MIXES:
         case SET_MASTER_MONO:
         case GET_SURROUND_FORMATS:
@@ -1561,15 +1632,20 @@
         case RELEASE_SOUNDTRIGGER_SESSION:
         case SET_RTT_ENABLED:
         case IS_CALL_SCREEN_MODE_SUPPORTED:
-        case SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
+        case SET_DEVICES_ROLE_FOR_PRODUCT_STRATEGY:
         case SET_SUPPORTED_SYSTEM_USAGES:
-        case REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
-        case GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
+        case REMOVE_DEVICES_ROLE_FOR_PRODUCT_STRATEGY:
+        case GET_DEVICES_FOR_ROLE_AND_PRODUCT_STRATEGY:
         case GET_DEVICES_FOR_ATTRIBUTES:
         case SET_ALLOWED_CAPTURE_POLICY:
         case AUDIO_MODULES_UPDATED:
         case SET_CURRENT_IME_UID:
-        case REGISTER_SOUNDTRIGGER_CAPTURE_STATE_LISTENER: {
+        case REGISTER_SOUNDTRIGGER_CAPTURE_STATE_LISTENER:
+        case SET_DEVICES_ROLE_FOR_CAPTURE_PRESET:
+        case ADD_DEVICES_ROLE_FOR_CAPTURE_PRESET:
+        case REMOVE_DEVICES_ROLE_FOR_CAPTURE_PRESET:
+        case CLEAR_DEVICES_ROLE_FOR_CAPTURE_PRESET:
+        case GET_DEVICES_FOR_ROLE_AND_CAPTURE_PRESET: {
             if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
                 ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
                       __func__, code, IPCThreadState::self()->getCallingPid(),
@@ -1685,7 +1761,6 @@
             if (status != NO_ERROR) {
                 return status;
             }
-            sanetizeAudioAttributes(&attr);
             audio_session_t session = (audio_session_t)data.readInt32();
             audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
             bool hasStream = data.readInt32() != 0;
@@ -1703,10 +1778,14 @@
             audio_port_handle_t portId = (audio_port_handle_t)data.readInt32();
             audio_io_handle_t output = 0;
             std::vector<audio_io_handle_t> secondaryOutputs;
-            status = getOutputForAttr(&attr,
-                    &output, session, &stream, pid, uid,
-                    &config,
-                    flags, &selectedDeviceId, &portId, &secondaryOutputs);
+
+            status = AudioSanitizer::sanitizeAudioAttributes(&attr, "68953950");
+            if (status == NO_ERROR) {
+                status = getOutputForAttr(&attr,
+                                          &output, session, &stream, pid, uid,
+                                          &config,
+                                          flags, &selectedDeviceId, &portId, &secondaryOutputs);
+            }
             reply->writeInt32(status);
             status = reply->write(&attr, sizeof(audio_attributes_t));
             if (status != NO_ERROR) {
@@ -1745,8 +1824,11 @@
         case GET_INPUT_FOR_ATTR: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             audio_attributes_t attr = {};
-            data.read(&attr, sizeof(audio_attributes_t));
-            sanetizeAudioAttributes(&attr);
+            status_t status = data.read(&attr, sizeof(audio_attributes_t));
+            if (status != NO_ERROR) {
+                return status;
+            }
+
             audio_io_handle_t input = (audio_io_handle_t)data.readInt32();
             audio_unique_id_t riid = (audio_unique_id_t)data.readInt32();
             audio_session_t session = (audio_session_t)data.readInt32();
@@ -1759,9 +1841,13 @@
             audio_input_flags_t flags = (audio_input_flags_t) data.readInt32();
             audio_port_handle_t selectedDeviceId = (audio_port_handle_t) data.readInt32();
             audio_port_handle_t portId = (audio_port_handle_t)data.readInt32();
-            status_t status = getInputForAttr(&attr, &input, riid, session, pid, uid,
-                                              opPackageName, &config,
-                                              flags, &selectedDeviceId, &portId);
+
+            status = AudioSanitizer::sanitizeAudioAttributes(&attr, "68953950");
+            if (status == NO_ERROR) {
+                status = getInputForAttr(&attr, &input, riid, session, pid, uid,
+                                         opPackageName, &config,
+                                         flags, &selectedDeviceId, &portId);
+            }
             reply->writeInt32(status);
             if (status == NO_ERROR) {
                 reply->writeInt32(input);
@@ -1842,11 +1928,15 @@
             if (status != NO_ERROR) {
                 return status;
             }
+
             int index = data.readInt32();
             audio_devices_t device = static_cast <audio_devices_t>(data.readInt32());
 
-            reply->writeInt32(static_cast <uint32_t>(setVolumeIndexForAttributes(attributes,
-                                                                                 index, device)));
+            status = AudioSanitizer::sanitizeAudioAttributes(&attributes, "169572641");
+            if (status == NO_ERROR) {
+                status = setVolumeIndexForAttributes(attributes, index, device);
+            }
+            reply->writeInt32(static_cast <int32_t>(status));
             return NO_ERROR;
         } break;
 
@@ -1860,8 +1950,11 @@
             audio_devices_t device = static_cast <audio_devices_t>(data.readInt32());
 
             int index = 0;
-            status = getVolumeIndexForAttributes(attributes, index, device);
-            reply->writeInt32(static_cast <uint32_t>(status));
+            status = AudioSanitizer::sanitizeAudioAttributes(&attributes, "169572641");
+            if (status == NO_ERROR) {
+                status = getVolumeIndexForAttributes(attributes, index, device);
+            }
+            reply->writeInt32(static_cast <int32_t>(status));
             if (status == NO_ERROR) {
                 reply->writeInt32(index);
             }
@@ -1877,8 +1970,11 @@
             }
 
             int index = 0;
-            status = getMinVolumeIndexForAttributes(attributes, index);
-            reply->writeInt32(static_cast <uint32_t>(status));
+            status = AudioSanitizer::sanitizeAudioAttributes(&attributes, "169572641");
+            if (status == NO_ERROR) {
+                status = getMinVolumeIndexForAttributes(attributes, index);
+            }
+            reply->writeInt32(static_cast <int32_t>(status));
             if (status == NO_ERROR) {
                 reply->writeInt32(index);
             }
@@ -1894,8 +1990,11 @@
             }
 
             int index = 0;
-            status = getMaxVolumeIndexForAttributes(attributes, index);
-            reply->writeInt32(static_cast <uint32_t>(status));
+            status = AudioSanitizer::sanitizeAudioAttributes(&attributes, "169572641");
+            if (status == NO_ERROR) {
+                status = getMaxVolumeIndexForAttributes(attributes, index);
+            }
+            reply->writeInt32(static_cast <int32_t>(status));
             if (status == NO_ERROR) {
                 reply->writeInt32(index);
             }
@@ -1913,31 +2012,37 @@
         case GET_OUTPUT_FOR_EFFECT: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             effect_descriptor_t desc = {};
-            if (data.read(&desc, sizeof(desc)) != NO_ERROR) {
+            status_t status = data.read(&desc, sizeof(desc));
+            if (status != NO_ERROR) {
                 android_errorWriteLog(0x534e4554, "73126106");
+                return status;
             }
-            (void)sanitizeEffectDescriptor(&desc);
-            audio_io_handle_t output = getOutputForEffect(&desc);
-            reply->writeInt32(static_cast <int>(output));
+            audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+            status = AudioSanitizer::sanitizeEffectDescriptor(&desc, "73126106");
+            if (status == NO_ERROR) {
+                output = getOutputForEffect(&desc);
+            }
+            reply->writeInt32(static_cast <int32_t>(output));
             return NO_ERROR;
         } break;
 
         case REGISTER_EFFECT: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             effect_descriptor_t desc = {};
-            if (data.read(&desc, sizeof(desc)) != NO_ERROR) {
+            status_t status = data.read(&desc, sizeof(desc));
+            if (status != NO_ERROR) {
                 android_errorWriteLog(0x534e4554, "73126106");
+                return status;
             }
-            (void)sanitizeEffectDescriptor(&desc);
             audio_io_handle_t io = data.readInt32();
             uint32_t strategy = data.readInt32();
             audio_session_t session = (audio_session_t) data.readInt32();
             int id = data.readInt32();
-            reply->writeInt32(static_cast <int32_t>(registerEffect(&desc,
-                                                                   io,
-                                                                   strategy,
-                                                                   session,
-                                                                   id)));
+            status = AudioSanitizer::sanitizeEffectDescriptor(&desc, "73126106");
+            if (status == NO_ERROR) {
+                status = registerEffect(&desc, io, strategy, session, id);
+            }
+            reply->writeInt32(static_cast <int32_t>(status));
             return NO_ERROR;
         } break;
 
@@ -2046,7 +2151,11 @@
             if (status != NO_ERROR) return status;
             status = data.read(&attributes, sizeof(audio_attributes_t));
             if (status != NO_ERROR) return status;
-            reply->writeInt32(isDirectOutputSupported(config, attributes));
+            status = AudioSanitizer::sanitizeAudioAttributes(&attributes, "169572641");
+            if (status == NO_ERROR) {
+                status = isDirectOutputSupported(config, attributes);
+            }
+            reply->writeInt32(static_cast <int32_t>(status));
             return NO_ERROR;
         }
 
@@ -2085,10 +2194,15 @@
         case GET_AUDIO_PORT: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             struct audio_port port = {};
-            if (data.read(&port, sizeof(struct audio_port)) != NO_ERROR) {
+            status_t status = data.read(&port, sizeof(struct audio_port));
+            if (status != NO_ERROR) {
                 ALOGE("b/23912202");
+                return status;
             }
-            status_t status = getAudioPort(&port);
+            status = AudioSanitizer::sanitizeAudioPort(&port);
+            if (status == NO_ERROR) {
+                status = getAudioPort(&port);
+            }
             reply->writeInt32(status);
             if (status == NO_ERROR) {
                 reply->write(&port, sizeof(struct audio_port));
@@ -2099,12 +2213,20 @@
         case CREATE_AUDIO_PATCH: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             struct audio_patch patch = {};
-            data.read(&patch, sizeof(struct audio_patch));
-            audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
-            if (data.read(&handle, sizeof(audio_patch_handle_t)) != NO_ERROR) {
-                ALOGE("b/23912202");
+            status_t status = data.read(&patch, sizeof(struct audio_patch));
+            if (status != NO_ERROR) {
+                return status;
             }
-            status_t status = createAudioPatch(&patch, &handle);
+            audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
+            status = data.read(&handle, sizeof(audio_patch_handle_t));
+            if (status != NO_ERROR) {
+                ALOGE("b/23912202");
+                return status;
+            }
+            status = AudioSanitizer::sanitizeAudioPatch(&patch);
+            if (status == NO_ERROR) {
+                status = createAudioPatch(&patch, &handle);
+            }
             reply->writeInt32(status);
             if (status == NO_ERROR) {
                 reply->write(&handle, sizeof(audio_patch_handle_t));
@@ -2154,9 +2276,12 @@
         case SET_AUDIO_PORT_CONFIG: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             struct audio_port_config config = {};
-            data.read(&config, sizeof(struct audio_port_config));
-            (void)sanitizeAudioPortConfig(&config);
-            status_t status = setAudioPortConfig(&config);
+            status_t status = data.read(&config, sizeof(struct audio_port_config));
+            if (status != NO_ERROR) {
+                return status;
+            }
+            (void)AudioSanitizer::sanitizeAudioPortConfig(&config);
+            status = setAudioPortConfig(&config);
             reply->writeInt32(status);
             return NO_ERROR;
         }
@@ -2232,13 +2357,25 @@
         case START_AUDIO_SOURCE: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             struct audio_port_config source = {};
-            data.read(&source, sizeof(struct audio_port_config));
-            (void)sanitizeAudioPortConfig(&source);
+            status_t status = data.read(&source, sizeof(struct audio_port_config));
+            if (status != NO_ERROR) {
+                return status;
+            }
             audio_attributes_t attributes = {};
-            data.read(&attributes, sizeof(audio_attributes_t));
-            sanetizeAudioAttributes(&attributes);
+            status = data.read(&attributes, sizeof(audio_attributes_t));
+            if (status != NO_ERROR) {
+                return status;
+            }
+            status = AudioSanitizer::sanitizeAudioPortConfig(&source);
+            if (status == NO_ERROR) {
+                // OK to not always sanitize attributes as startAudioSource() is not called if
+                // the port config is invalid.
+                status = AudioSanitizer::sanitizeAudioAttributes(&attributes, "68953950");
+            }
             audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
-            status_t status = startAudioSource(&source, &attributes, &portId);
+            if (status == NO_ERROR) {
+                status = startAudioSource(&source, &attributes, &portId);
+            }
             reply->writeInt32(status);
             reply->writeInt32(portId);
             return NO_ERROR;
@@ -2460,15 +2597,12 @@
         case SET_UID_DEVICE_AFFINITY: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             const uid_t uid = (uid_t) data.readInt32();
-            Vector<AudioDeviceTypeAddr> devices;
-            size_t size = (size_t)data.readInt32();
-            for (size_t i = 0; i < size; i++) {
-                AudioDeviceTypeAddr device;
-                if (device.readFromParcel((Parcel*)&data) == NO_ERROR) {
-                    devices.add(device);
-                }
+            AudioDeviceTypeAddrVector devices;
+            status_t status = data.readParcelableVector(&devices);
+            if (status != NO_ERROR) {
+                return status;
             }
-            status_t status = setUidDeviceAffinities(uid, devices);
+            status = setUidDeviceAffinities(uid, devices);
             reply->writeInt32(status);
             return NO_ERROR;
         }
@@ -2484,15 +2618,12 @@
         case SET_USERID_DEVICE_AFFINITY: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             const int userId = (int) data.readInt32();
-            Vector<AudioDeviceTypeAddr> devices;
-            size_t size = (size_t)data.readInt32();
-            for (size_t i = 0; i < size; i++) {
-                AudioDeviceTypeAddr device;
-                if (device.readFromParcel((Parcel*)&data) == NO_ERROR) {
-                    devices.add(device);
-                }
+            AudioDeviceTypeAddrVector devices;
+            status_t status = data.readParcelableVector(&devices);
+            if (status != NO_ERROR) {
+                return status;
             }
-            status_t status = setUserIdDeviceAffinities(userId, devices);
+            status = setUserIdDeviceAffinities(userId, devices);
             reply->writeInt32(status);
             return NO_ERROR;
         }
@@ -2628,7 +2759,7 @@
         case SET_ALLOWED_CAPTURE_POLICY: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             uid_t uid = data.readInt32();
-            audio_flags_mask_t flags = data.readInt32();
+            audio_flags_mask_t flags = static_cast<audio_flags_mask_t>(data.readInt32());
             status_t status = setAllowedCapturePolicy(uid, flags);
             reply->writeInt32(status);
             return NO_ERROR;
@@ -2649,33 +2780,36 @@
             return NO_ERROR;
         }
 
-        case SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY: {
+        case SET_DEVICES_ROLE_FOR_PRODUCT_STRATEGY: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             product_strategy_t strategy = (product_strategy_t) data.readUint32();
-            AudioDeviceTypeAddr device;
-            status_t status = device.readFromParcel((Parcel*)&data);
+            device_role_t role = (device_role_t) data.readUint32();
+            AudioDeviceTypeAddrVector devices;
+            status_t status = data.readParcelableVector(&devices);
             if (status != NO_ERROR) {
                 return status;
             }
-            status = setPreferredDeviceForStrategy(strategy, device);
+            status = setDevicesRoleForStrategy(strategy, role, devices);
             reply->writeInt32(status);
             return NO_ERROR;
         }
 
-        case REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY: {
+        case REMOVE_DEVICES_ROLE_FOR_PRODUCT_STRATEGY: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             product_strategy_t strategy = (product_strategy_t) data.readUint32();
-            status_t status = removePreferredDeviceForStrategy(strategy);
+            device_role_t role = (device_role_t) data.readUint32();
+            status_t status = removeDevicesRoleForStrategy(strategy, role);
             reply->writeInt32(status);
             return NO_ERROR;
         }
 
-        case GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY: {
+        case GET_DEVICES_FOR_ROLE_AND_PRODUCT_STRATEGY: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             product_strategy_t strategy = (product_strategy_t) data.readUint32();
-            AudioDeviceTypeAddr device;
-            status_t status = getPreferredDeviceForStrategy(strategy, device);
-            status_t marshall_status = device.writeToParcel(reply);
+            device_role_t role = (device_role_t) data.readUint32();
+            AudioDeviceTypeAddrVector devices;
+            status_t status = getDevicesForRoleAndStrategy(strategy, role, devices);
+            status_t marshall_status = reply->writeParcelableVector(devices);
             if (marshall_status != NO_ERROR) {
                 return marshall_status;
             }
@@ -2757,49 +2891,76 @@
             return NO_ERROR;
         } break;
 
+        case SET_DEVICES_ROLE_FOR_CAPTURE_PRESET: {
+            CHECK_INTERFACE(IAudioPolicyService, data, reply);
+            audio_source_t audioSource = (audio_source_t) data.readUint32();
+            device_role_t role = (device_role_t) data.readUint32();
+            AudioDeviceTypeAddrVector devices;
+            status_t status = data.readParcelableVector(&devices);
+            if (status != NO_ERROR) {
+                return status;
+            }
+            status = setDevicesRoleForCapturePreset(audioSource, role, devices);
+            reply->writeInt32(status);
+            return NO_ERROR;
+        }
+
+        case ADD_DEVICES_ROLE_FOR_CAPTURE_PRESET: {
+            CHECK_INTERFACE(IAudioPolicyService, data, reply);
+            audio_source_t audioSource = (audio_source_t) data.readUint32();
+            device_role_t role = (device_role_t) data.readUint32();
+            AudioDeviceTypeAddrVector devices;
+            status_t status = data.readParcelableVector(&devices);
+            if (status != NO_ERROR) {
+                return status;
+            }
+            status = addDevicesRoleForCapturePreset(audioSource, role, devices);
+            reply->writeInt32(status);
+            return NO_ERROR;
+        }
+
+        case REMOVE_DEVICES_ROLE_FOR_CAPTURE_PRESET: {
+            CHECK_INTERFACE(IAudioPolicyService, data, reply);
+            audio_source_t audioSource = (audio_source_t) data.readUint32();
+            device_role_t role = (device_role_t) data.readUint32();
+            AudioDeviceTypeAddrVector devices;
+            status_t status = data.readParcelableVector(&devices);
+            if (status != NO_ERROR) {
+                return status;
+            }
+            status = removeDevicesRoleForCapturePreset(audioSource, role, devices);
+            reply->writeInt32(status);
+            return NO_ERROR;
+        }
+
+        case CLEAR_DEVICES_ROLE_FOR_CAPTURE_PRESET: {
+            CHECK_INTERFACE(IAudioPolicyService, data, reply);
+            audio_source_t audioSource = (audio_source_t) data.readUint32();
+            device_role_t role = (device_role_t) data.readUint32();
+            status_t status = clearDevicesRoleForCapturePreset(audioSource, role);
+            reply->writeInt32(status);
+            return NO_ERROR;
+        }
+
+        case GET_DEVICES_FOR_ROLE_AND_CAPTURE_PRESET: {
+            CHECK_INTERFACE(IAudioPolicyService, data, reply);
+            audio_source_t audioSource = (audio_source_t) data.readUint32();
+            device_role_t role = (device_role_t) data.readUint32();
+            AudioDeviceTypeAddrVector devices;
+            status_t status = getDevicesForRoleAndCapturePreset(audioSource, role, devices);
+            status_t marshall_status = reply->writeParcelableVector(devices);
+            if (marshall_status != NO_ERROR) {
+                return marshall_status;
+            }
+            reply->writeInt32(status);
+            return NO_ERROR;
+        }
+
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
 }
 
-/** returns true if string overflow was prevented by zero termination */
-template <size_t size>
-static bool preventStringOverflow(char (&s)[size]) {
-    if (strnlen(s, size) < size) return false;
-    s[size - 1] = '\0';
-    return true;
-}
-
-void BnAudioPolicyService::sanetizeAudioAttributes(audio_attributes_t* attr)
-{
-    const size_t tagsMaxSize = AUDIO_ATTRIBUTES_TAGS_MAX_SIZE;
-    if (strnlen(attr->tags, tagsMaxSize) >= tagsMaxSize) {
-        android_errorWriteLog(0x534e4554, "68953950"); // SafetyNet logging
-    }
-    attr->tags[tagsMaxSize - 1] = '\0';
-}
-
-/** returns BAD_VALUE if sanitization was required. */
-status_t BnAudioPolicyService::sanitizeEffectDescriptor(effect_descriptor_t* desc)
-{
-    if (preventStringOverflow(desc->name)
-        | /* always */ preventStringOverflow(desc->implementor)) {
-        android_errorWriteLog(0x534e4554, "73126106"); // SafetyNet logging
-        return BAD_VALUE;
-    }
-    return NO_ERROR;
-}
-
-/** returns BAD_VALUE if sanitization was required. */
-status_t BnAudioPolicyService::sanitizeAudioPortConfig(struct audio_port_config* config)
-{
-    if (config->type == AUDIO_PORT_TYPE_DEVICE &&
-        preventStringOverflow(config->ext.device.address)) {
-        return BAD_VALUE;
-    }
-    return NO_ERROR;
-}
-
 // ----------------------------------------------------------------------------
 
 } // namespace android
diff --git a/media/libaudioclient/IEffect.cpp b/media/libaudioclient/IEffect.cpp
deleted file mode 100644
index 5d47dff..0000000
--- a/media/libaudioclient/IEffect.cpp
+++ /dev/null
@@ -1,229 +0,0 @@
-/*
-**
-** Copyright 2010, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "IEffect"
-#include <utils/Log.h>
-#include <stdint.h>
-#include <sys/types.h>
-#include <binder/Parcel.h>
-#include <media/IEffect.h>
-
-namespace android {
-
-// Maximum command/reply size expected
-#define EFFECT_PARAM_SIZE_MAX       65536
-
-enum {
-    ENABLE = IBinder::FIRST_CALL_TRANSACTION,
-    DISABLE,
-    COMMAND,
-    DISCONNECT,
-    GET_CBLK
-};
-
-class BpEffect: public BpInterface<IEffect>
-{
-public:
-    explicit BpEffect(const sp<IBinder>& impl)
-        : BpInterface<IEffect>(impl)
-    {
-    }
-
-    status_t enable()
-    {
-        ALOGV("enable");
-        Parcel data, reply;
-        data.writeInterfaceToken(IEffect::getInterfaceDescriptor());
-        remote()->transact(ENABLE, data, &reply);
-        return reply.readInt32();
-    }
-
-    status_t disable()
-    {
-        ALOGV("disable");
-        Parcel data, reply;
-        data.writeInterfaceToken(IEffect::getInterfaceDescriptor());
-        remote()->transact(DISABLE, data, &reply);
-        return reply.readInt32();
-    }
-
-    status_t command(uint32_t cmdCode,
-                     uint32_t cmdSize,
-                     void *pCmdData,
-                     uint32_t *pReplySize,
-                     void *pReplyData)
-    {
-        ALOGV("command");
-        Parcel data, reply;
-        data.writeInterfaceToken(IEffect::getInterfaceDescriptor());
-        data.writeInt32(cmdCode);
-        int size = cmdSize;
-        if (pCmdData == NULL) {
-            size = 0;
-        }
-        data.writeInt32(size);
-        if (size) {
-            data.write(pCmdData, size);
-        }
-        if (pReplySize == NULL) {
-            size = 0;
-        } else {
-            size = *pReplySize;
-        }
-        data.writeInt32(size);
-
-        status_t status = remote()->transact(COMMAND, data, &reply);
-        if (status == NO_ERROR) {
-            status = reply.readInt32();
-        }
-        if (status != NO_ERROR) {
-            if (pReplySize != NULL)
-                *pReplySize = 0;
-            return status;
-        }
-
-        size = reply.readInt32();
-        if (size != 0 && pReplyData != NULL && pReplySize != NULL) {
-            reply.read(pReplyData, size);
-            *pReplySize = size;
-        }
-        return status;
-    }
-
-    void disconnect()
-    {
-        ALOGV("disconnect");
-        Parcel data, reply;
-        data.writeInterfaceToken(IEffect::getInterfaceDescriptor());
-        remote()->transact(DISCONNECT, data, &reply);
-        return;
-    }
-
-    virtual sp<IMemory> getCblk() const
-    {
-        Parcel data, reply;
-        sp<IMemory> cblk;
-        data.writeInterfaceToken(IEffect::getInterfaceDescriptor());
-        status_t status = remote()->transact(GET_CBLK, data, &reply);
-        if (status == NO_ERROR) {
-            cblk = interface_cast<IMemory>(reply.readStrongBinder());
-            if (cblk != 0 && cblk->unsecurePointer() == NULL) {
-                cblk.clear();
-            }
-        }
-        return cblk;
-    }
- };
-
-IMPLEMENT_META_INTERFACE(Effect, "android.media.IEffect");
-
-// ----------------------------------------------------------------------
-
-status_t BnEffect::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    switch (code) {
-        case ENABLE: {
-            ALOGV("ENABLE");
-            CHECK_INTERFACE(IEffect, data, reply);
-            reply->writeInt32(enable());
-            return NO_ERROR;
-        } break;
-
-        case DISABLE: {
-            ALOGV("DISABLE");
-            CHECK_INTERFACE(IEffect, data, reply);
-            reply->writeInt32(disable());
-            return NO_ERROR;
-        } break;
-
-        case COMMAND: {
-            ALOGV("COMMAND");
-            CHECK_INTERFACE(IEffect, data, reply);
-            uint32_t cmdCode = data.readInt32();
-            uint32_t cmdSize = data.readInt32();
-            char *cmd = NULL;
-            if (cmdSize) {
-                if (cmdSize > EFFECT_PARAM_SIZE_MAX) {
-                    reply->writeInt32(NO_MEMORY);
-                    return NO_ERROR;
-                }
-                cmd = (char *)calloc(cmdSize, 1);
-                if (cmd == NULL) {
-                    reply->writeInt32(NO_MEMORY);
-                    return NO_ERROR;
-                }
-                data.read(cmd, cmdSize);
-            }
-            uint32_t replySize = data.readInt32();
-            uint32_t replySz = replySize;
-            char *resp = NULL;
-            if (replySize) {
-                if (replySize > EFFECT_PARAM_SIZE_MAX) {
-                    free(cmd);
-                    reply->writeInt32(NO_MEMORY);
-                    return NO_ERROR;
-                }
-                resp = (char *)calloc(replySize, 1);
-                if (resp == NULL) {
-                    free(cmd);
-                    reply->writeInt32(NO_MEMORY);
-                    return NO_ERROR;
-                }
-            }
-            status_t status = command(cmdCode, cmdSize, cmd, &replySz, resp);
-            reply->writeInt32(status);
-            if (status == NO_ERROR) {
-                if (replySz < replySize) {
-                    replySize = replySz;
-                }
-                reply->writeInt32(replySize);
-                if (replySize) {
-                    reply->write(resp, replySize);
-                }
-            }
-            if (cmd) {
-                free(cmd);
-            }
-            if (resp) {
-                free(resp);
-            }
-            return NO_ERROR;
-        } break;
-
-        case DISCONNECT: {
-            ALOGV("DISCONNECT");
-            CHECK_INTERFACE(IEffect, data, reply);
-            disconnect();
-            return NO_ERROR;
-        } break;
-
-        case GET_CBLK: {
-            CHECK_INTERFACE(IEffect, data, reply);
-            reply->writeStrongBinder(IInterface::asBinder(getCblk()));
-            return NO_ERROR;
-        } break;
-
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-// ----------------------------------------------------------------------------
-
-} // namespace android
diff --git a/media/libaudioclient/IEffectClient.cpp b/media/libaudioclient/IEffectClient.cpp
deleted file mode 100644
index 3f2c67d..0000000
--- a/media/libaudioclient/IEffectClient.cpp
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
-**
-** Copyright 2010, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "IEffectClient"
-#include <utils/Log.h>
-#include <stdint.h>
-#include <sys/types.h>
-#include <media/IEffectClient.h>
-
-namespace android {
-
-enum {
-    CONTROL_STATUS_CHANGED = IBinder::FIRST_CALL_TRANSACTION,
-    ENABLE_STATUS_CHANGED,
-    COMMAND_EXECUTED
-};
-
-class BpEffectClient: public BpInterface<IEffectClient>
-{
-public:
-    explicit BpEffectClient(const sp<IBinder>& impl)
-        : BpInterface<IEffectClient>(impl)
-    {
-    }
-
-    void controlStatusChanged(bool controlGranted)
-    {
-        ALOGV("controlStatusChanged");
-        Parcel data, reply;
-        data.writeInterfaceToken(IEffectClient::getInterfaceDescriptor());
-        data.writeInt32((uint32_t)controlGranted);
-        remote()->transact(CONTROL_STATUS_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-
-    void enableStatusChanged(bool enabled)
-    {
-        ALOGV("enableStatusChanged");
-        Parcel data, reply;
-        data.writeInterfaceToken(IEffectClient::getInterfaceDescriptor());
-        data.writeInt32((uint32_t)enabled);
-        remote()->transact(ENABLE_STATUS_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-
-    void commandExecuted(uint32_t cmdCode,
-                         uint32_t cmdSize,
-                         void *pCmdData,
-                         uint32_t replySize,
-                         void *pReplyData)
-    {
-        ALOGV("commandExecuted");
-        Parcel data, reply;
-        data.writeInterfaceToken(IEffectClient::getInterfaceDescriptor());
-        data.writeInt32(cmdCode);
-        int size = cmdSize;
-        if (pCmdData == NULL) {
-            size = 0;
-        }
-        data.writeInt32(size);
-        if (size) {
-            data.write(pCmdData, size);
-        }
-        size = replySize;
-        if (pReplyData == NULL) {
-            size = 0;
-        }
-        data.writeInt32(size);
-        if (size) {
-            data.write(pReplyData, size);
-        }
-        remote()->transact(COMMAND_EXECUTED, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-
-};
-
-IMPLEMENT_META_INTERFACE(EffectClient, "android.media.IEffectClient");
-
-// ----------------------------------------------------------------------
-
-status_t BnEffectClient::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    switch (code) {
-        case CONTROL_STATUS_CHANGED: {
-            ALOGV("CONTROL_STATUS_CHANGED");
-            CHECK_INTERFACE(IEffectClient, data, reply);
-            bool hasControl = (bool)data.readInt32();
-            controlStatusChanged(hasControl);
-            return NO_ERROR;
-        } break;
-        case ENABLE_STATUS_CHANGED: {
-            ALOGV("ENABLE_STATUS_CHANGED");
-            CHECK_INTERFACE(IEffectClient, data, reply);
-            bool enabled = (bool)data.readInt32();
-            enableStatusChanged(enabled);
-            return NO_ERROR;
-        } break;
-        case COMMAND_EXECUTED: {
-            ALOGV("COMMAND_EXECUTED");
-            CHECK_INTERFACE(IEffectClient, data, reply);
-            uint32_t cmdCode = data.readInt32();
-            uint32_t cmdSize = data.readInt32();
-            char *cmd = NULL;
-            if (cmdSize) {
-                cmd = (char *)malloc(cmdSize);
-                data.read(cmd, cmdSize);
-            }
-            uint32_t replySize = data.readInt32();
-            char *resp = NULL;
-            if (replySize) {
-                resp = (char *)malloc(replySize);
-                data.read(resp, replySize);
-            }
-            commandExecuted(cmdCode, cmdSize, cmd, replySize, resp);
-            if (cmd) {
-                free(cmd);
-            }
-            if (resp) {
-                free(resp);
-            }
-            return NO_ERROR;
-        } break;
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-// ----------------------------------------------------------------------------
-
-} // namespace android
diff --git a/media/libaudioclient/OWNERS b/media/libaudioclient/OWNERS
index 482b9fb..034d161 100644
--- a/media/libaudioclient/OWNERS
+++ b/media/libaudioclient/OWNERS
@@ -1,3 +1,4 @@
 gkasten@google.com
+hunga@google.com
 jmtrivi@google.com
 mnaganov@google.com
diff --git a/media/libaudioclient/PlayerBase.cpp b/media/libaudioclient/PlayerBase.cpp
index b0c68e5..c443865 100644
--- a/media/libaudioclient/PlayerBase.cpp
+++ b/media/libaudioclient/PlayerBase.cpp
@@ -22,7 +22,8 @@
 
 namespace android {
 
-using media::VolumeShaper;
+using media::VolumeShaperConfiguration;
+using media::VolumeShaperOperation;
 
 //--------------------------------------------------------------------------------------------------
 PlayerBase::PlayerBase() : BnPlayer(),
@@ -178,8 +179,8 @@
 }
 
 binder::Status PlayerBase::applyVolumeShaper(
-            const VolumeShaper::Configuration& configuration __unused,
-            const VolumeShaper::Operation& operation __unused) {
+            const VolumeShaperConfiguration& configuration __unused,
+            const VolumeShaperOperation& operation __unused) {
     ALOGW("applyVolumeShaper() is not supported");
     return binder::Status::ok();
 }
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index 050ad65..ee78a2d 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -853,6 +853,11 @@
                       { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
           .repeatCnt = ToneGenerator::TONEGEN_INF,
           .repeatSegment = 0 },                               // TONE_INDIA_RINGTONE
+        { .segments = { { .duration = 1000, .waveFreq = { 440, 480, 0 }, 0, 0 },
+                        { .duration = 2000, .waveFreq = { 0 }, 0, 0 },
+                        { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+          .repeatCnt = ToneGenerator::TONEGEN_INF,
+          .repeatSegment = 0 },                               // TONE_TW_RINGTONE
 };
 
 // Used by ToneGenerator::getToneForRegion() to convert user specified supervisory tone type
@@ -937,6 +942,16 @@
             TONE_SUP_ERROR,              // TONE_SUP_ERROR
             TONE_INDIA_CALL_WAITING,     // TONE_SUP_CALL_WAITING
             TONE_INDIA_RINGTONE          // TONE_SUP_RINGTONE
+        },
+        {   // TAIWAN
+            TONE_SUP_DIAL,               // TONE_SUP_DIAL
+            TONE_SUP_BUSY,               // TONE_SUP_BUSY
+            TONE_SUP_CONGESTION,         // TONE_SUP_CONGESTION
+            TONE_SUP_RADIO_ACK,          // TONE_SUP_RADIO_ACK
+            TONE_SUP_RADIO_NOTAVAIL,     // TONE_SUP_RADIO_NOTAVAIL
+            TONE_SUP_ERROR,              // TONE_SUP_ERROR
+            TONE_SUP_CALL_WAITING,       // TONE_SUP_CALL_WAITING
+            TONE_TW_RINGTONE             // TONE_SUP_RINGTONE
         }
 };
 
@@ -1010,6 +1025,8 @@
         mRegion = IRELAND;
     } else if (strstr(value, "in") != NULL) {
         mRegion = INDIA;
+    } else if (strstr(value, "tw") != NULL) {
+        mRegion = TAIWAN;
     } else {
         mRegion = CEPT;
     }
diff --git a/media/libaudioclient/TrackPlayerBase.cpp b/media/libaudioclient/TrackPlayerBase.cpp
index 0a914fc..e571838 100644
--- a/media/libaudioclient/TrackPlayerBase.cpp
+++ b/media/libaudioclient/TrackPlayerBase.cpp
@@ -106,11 +106,17 @@
 
 
 binder::Status TrackPlayerBase::applyVolumeShaper(
-        const VolumeShaper::Configuration& configuration,
-        const VolumeShaper::Operation& operation) {
+        const media::VolumeShaperConfiguration& configuration,
+        const media::VolumeShaperOperation& operation) {
 
-    sp<VolumeShaper::Configuration> spConfiguration = new VolumeShaper::Configuration(configuration);
-    sp<VolumeShaper::Operation> spOperation = new VolumeShaper::Operation(operation);
+    sp<VolumeShaper::Configuration> spConfiguration = new VolumeShaper::Configuration();
+    sp<VolumeShaper::Operation> spOperation = new VolumeShaper::Operation();
+
+    status_t s = spConfiguration->readFromParcelable(configuration)
+            ?: spOperation->readFromParcelable(operation);
+    if (s != OK) {
+        return binder::Status::fromStatusT(s);
+    }
 
     if (mAudioTrack != 0) {
         ALOGD("TrackPlayerBase::applyVolumeShaper() from IPlayer");
diff --git a/media/libaudioclient/aidl/android/media/AudioAttributesInternal.aidl b/media/libaudioclient/aidl/android/media/AudioAttributesInternal.aidl
new file mode 100644
index 0000000..699df0a
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioAttributesInternal.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioContentType;
+import android.media.AudioSourceType;
+import android.media.AudioUsage;
+
+/**
+ * The "Internal" suffix of this type name is to disambiguate it from the
+ * android.media.AudioAttributes SDK type.
+ * {@hide}
+ */
+parcelable AudioAttributesInternal {
+    AudioContentType contentType;
+    AudioUsage usage;
+    AudioSourceType source;
+    // Bitmask, indexed by AudioFlag.
+    int flags;
+    @utf8InCpp String tags; /* UTF8 */
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioClient.aidl b/media/libaudioclient/aidl/android/media/AudioClient.aidl
new file mode 100644
index 0000000..7bff0d6
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioClient.aidl
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioClient {
+    /** Interpreted as uid_t. */
+    int clientUid;
+    /** Interpreted as pid_t. */
+    int clientPid;
+    /** Interpreted as pid_t. */
+    int clientTid;
+    @utf8InCpp String packageName;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioConfig.aidl b/media/libaudioclient/aidl/android/media/AudioConfig.aidl
new file mode 100644
index 0000000..8dc97d3
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioConfig.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioOffloadInfo;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioConfig {
+    int sampleRate;
+    /**
+     * Interpreted as audio_channel_mask_t.
+     * TODO(ytai): Create a designated type.
+     */
+    int channelMask;
+    AudioFormat format;
+    AudioOffloadInfo offloadInfo;
+    long frameCount;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioConfigBase.aidl b/media/libaudioclient/aidl/android/media/AudioConfigBase.aidl
new file mode 100644
index 0000000..8353c0d
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioConfigBase.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioConfigBase {
+    int sampleRate;
+    /** Interpreted as audio_channel_mask_t. */
+    int channelMask;
+    AudioFormat format;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioContentType.aidl b/media/libaudioclient/aidl/android/media/AudioContentType.aidl
new file mode 100644
index 0000000..f734fba
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioContentType.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioContentType {
+    UNKNOWN = 0,
+    SPEECH = 1,
+    MUSIC = 2,
+    MOVIE = 3,
+    SONIFICATION = 4,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl b/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl
new file mode 100644
index 0000000..74a6141
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl
@@ -0,0 +1,23 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioEncapsulationMode {
+     NONE = 0,
+     ELEMENTARY_STREAM = 1,
+     HANDLE = 2,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioFlag.aidl b/media/libaudioclient/aidl/android/media/AudioFlag.aidl
new file mode 100644
index 0000000..2602fe5
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioFlag.aidl
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioFlag {
+    AUDIBILITY_ENFORCED = 0,
+    SECURE = 1,
+    SCO = 2,
+    BEACON = 3,
+    HW_AV_SYNC = 4,
+    HW_HOTWORD = 5,
+    BYPASS_INTERRUPTION_POLICY = 6,
+    BYPASS_MUTE = 7,
+    LOW_LATENCY = 8,
+    DEEP_BUFFER = 9,
+    NO_MEDIA_PROJECTION = 10,
+    MUTE_HAPTIC = 11,
+    NO_SYSTEM_CAPTURE = 12,
+    CAPTURE_PRIVATE = 13,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioGainConfig.aidl b/media/libaudioclient/aidl/android/media/AudioGainConfig.aidl
new file mode 100644
index 0000000..b93c2dc
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioGainConfig.aidl
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioGainConfig {
+    /** Index of the corresponding audio_gain in the audio_port gains[] table. */
+    int index;
+
+    /** Mode requested for this command. Bitfield indexed by AudioGainMode. */
+    int mode;
+
+    /**
+     * Channels which gain value follows. N/A in joint mode.
+     * Interpreted as audio_channel_mask_t.
+     */
+    int channelMask;
+
+    /**
+     * Gain values in millibels.
+     * For each channel ordered from LSb to MSb in channel mask. The number of values is 1 in joint
+     * mode, otherwise equals the number of bits implied by channelMask.
+     */
+    int[]  values;
+
+    /** Ramp duration in ms. */
+    int rampDurationMs;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioGainMode.aidl b/media/libaudioclient/aidl/android/media/AudioGainMode.aidl
new file mode 100644
index 0000000..39395e5
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioGainMode.aidl
@@ -0,0 +1,23 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioGainMode {
+    JOINT    = 0,
+    CHANNELS = 1,
+    RAMP     = 2,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl b/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl
new file mode 100644
index 0000000..8f517e7
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioInputFlags {
+    FAST       = 0,
+    HW_HOTWORD = 1,
+    RAW        = 2,
+    SYNC       = 3,
+    MMAP_NOIRQ = 4,
+    VOIP_TX    = 5,
+    HW_AV_SYNC = 6,
+    DIRECT     = 7,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioIoConfigEvent.aidl b/media/libaudioclient/aidl/android/media/AudioIoConfigEvent.aidl
new file mode 100644
index 0000000..d5f23a1
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioIoConfigEvent.aidl
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioIoConfigEvent {
+    OUTPUT_REGISTERED = 0,
+    OUTPUT_OPENED = 1,
+    OUTPUT_CLOSED = 2,
+    OUTPUT_CONFIG_CHANGED = 3,
+    INPUT_REGISTERED = 4,
+    INPUT_OPENED = 5,
+    INPUT_CLOSED = 6,
+    INPUT_CONFIG_CHANGED = 7,
+    CLIENT_STARTED = 8,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl b/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
new file mode 100644
index 0000000..876ef9b
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPatch;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioIoDescriptor {
+    /** Interpreted as audio_io_handle_t. */
+    int ioHandle;
+    AudioPatch patch;
+    int samplingRate;
+    AudioFormat format;
+    /** Interpreted as audio_channel_mask_t. */
+    int channelMask;
+    long frameCount;
+    long frameCountHAL;
+    /** Only valid for output. */
+    int latency;
+    /**
+     * Interpreted as audio_port_handle_t.
+     * valid for event AUDIO_CLIENT_STARTED.
+     */
+    int portId;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl b/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl
new file mode 100644
index 0000000..f9b25bf
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+union AudioIoFlags {
+    /** Bitmask indexed by AudioInputFlags. */
+    int input;
+    /** Bitmask indexed by AudioOutputFlags. */
+    int output;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioOffloadInfo.aidl b/media/libaudioclient/aidl/android/media/AudioOffloadInfo.aidl
new file mode 100644
index 0000000..c86b3f0
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioOffloadInfo.aidl
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioConfigBase;
+import android.media.AudioEncapsulationMode;
+import android.media.AudioStreamType;
+import android.media.AudioUsage;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioOffloadInfo {
+    /** Version of the info structure. Interpreted as a uint16_t version constant. */
+    int version;
+    /** Audio configuration. */
+    AudioConfigBase config;
+    /** Stream type. */
+    AudioStreamType streamType;
+    /** Bit rate in bits per second. */
+    int bitRate;
+    /** Duration in microseconds, -1 if unknown. */
+    long durationUs;
+    /** true if stream is tied to a video stream. */
+    boolean hasVideo;
+    /** true if streaming, false if local playback. */
+    boolean isStreaming;
+    int bitWidth;
+    /** Offload fragment size. */
+    int offloadBufferSize;
+    AudioUsage usage;
+    AudioEncapsulationMode encapsulationMode;
+    /** Content id from tuner HAL (0 if none). */
+    int contentId;
+    /** Sync id from tuner HAL (0 if none). */
+    int syncId;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl b/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
new file mode 100644
index 0000000..aebf871
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioOutputFlags {
+    DIRECT           = 0,
+    PRIMARY          = 1,
+    FAST             = 2,
+    DEEP_BUFFER      = 3,
+    COMPRESS_OFFLOAD = 4,
+    NON_BLOCKING     = 5,
+    HW_AV_SYNC       = 6,
+    TTS              = 7,
+    RAW              = 8,
+    SYNC             = 9,
+    IEC958_NONAUDIO  = 10,
+    DIRECT_PCM       = 11,
+    MMAP_NOIRQ       = 12,
+    VOIP_RX          = 13,
+    INCALL_MUSIC     = 14,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPatch.aidl b/media/libaudioclient/aidl/android/media/AudioPatch.aidl
new file mode 100644
index 0000000..8519faf
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPatch.aidl
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPortConfig;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPatch {
+    /**
+     * Patch unique ID.
+     * Interpreted as audio_patch_handle_t.
+     */
+    int id;
+    AudioPortConfig[] sources;
+    AudioPortConfig[] sinks;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
new file mode 100644
index 0000000..2dd30a4
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioGainConfig;
+import android.media.AudioIoFlags;
+import android.media.AudioPortConfigExt;
+import android.media.AudioPortConfigType;
+import android.media.AudioPortRole;
+import android.media.AudioPortType;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfig {
+    /**
+     * Port unique ID.
+     * Interpreted as audio_port_handle_t.
+     */
+    int id;
+    /** Sink or source. */
+    AudioPortRole role;
+    /** Device, mix ... */
+    AudioPortType type;
+    /** Bitmask, indexed by AudioPortConfigType. */
+    int configMask;
+    /** Sampling rate in Hz. */
+    int sampleRate;
+    /**
+     * Channel mask, if applicable.
+     * Interpreted as audio_channel_mask_t.
+     * TODO: bitmask?
+     */
+    int channelMask;
+    /**
+     * Format, if applicable.
+     */
+    AudioFormat format;
+    /** Gain to apply, if applicable. */
+    AudioGainConfig gain;
+    /** Framework only: HW_AV_SYNC, DIRECT, ... */
+    AudioIoFlags flags;
+    AudioPortConfigExt ext;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigDeviceExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigDeviceExt.aidl
new file mode 100644
index 0000000..a99aa9b
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigDeviceExt.aidl
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfigDeviceExt {
+    /**
+     * Module the device is attached to.
+     * Interpreted as audio_module_handle_t.
+     */
+    int hwModule;
+    /**
+     * Device type (e.g AUDIO_DEVICE_OUT_SPEAKER).
+     * Interpreted as audio_devices_t.
+     * TODO: Convert to a standalone AIDL representation.
+     */
+    int type;
+    /** Device address. "" if N/A. */
+    @utf8InCpp String address;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigExt.aidl
new file mode 100644
index 0000000..38da4f5
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigExt.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPortConfigDeviceExt;
+import android.media.AudioPortConfigMixExt;
+import android.media.AudioPortConfigSessionExt;
+
+/**
+ * {@hide}
+ */
+union AudioPortConfigExt {
+    /**
+     * This represents an empty union. Value is ignored.
+     * TODO(ytai): replace with the canonical representation for an empty union, as soon as it is
+     *             established.
+     */
+    boolean nothing;
+    /** Device specific info. */
+    AudioPortConfigDeviceExt device;
+    /** Mix specific info. */
+    AudioPortConfigMixExt mix;
+    /** Session specific info. */
+    AudioPortConfigSessionExt session;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigMixExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExt.aidl
new file mode 100644
index 0000000..d3226f2
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExt.aidl
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPortConfigMixExtUseCase;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfigMixExt {
+    /**
+     * Module the stream is attached to.
+     * Interpreted as audio_module_handle_t.
+     */
+    int hwModule;
+    /**
+     * I/O handle of the input/output stream.
+     * Interpreted as audio_io_handle_t.
+     */
+    int handle;
+    AudioPortConfigMixExtUseCase usecase;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigMixExtUseCase.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExtUseCase.aidl
new file mode 100644
index 0000000..9e5e081
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExtUseCase.aidl
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioSourceType;
+import android.media.AudioStreamType;
+
+/**
+ * {@hide}
+ */
+union AudioPortConfigMixExtUseCase {
+    /**
+     * This to be set if the containing config has the AudioPortRole::NONE role.
+     * This represents an empty value (value is ignored).
+     * TODO(ytai): replace with the canonical representation for an empty union, as soon as it is
+     *             established.
+     */
+    boolean nothing;
+    /** This to be set if the containing config has the AudioPortRole::SOURCE role. */
+    AudioStreamType stream;
+    /** This to be set if the containing config has the AudioPortRole::SINK role. */
+    AudioSourceType source;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigSessionExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigSessionExt.aidl
new file mode 100644
index 0000000..a2cbf62
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigSessionExt.aidl
@@ -0,0 +1,24 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfigSessionExt {
+    int session;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigType.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigType.aidl
new file mode 100644
index 0000000..c7bb4d8
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigType.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioPortConfigType {
+    SAMPLE_RATE  = 0,
+    CHANNEL_MASK = 1,
+    FORMAT       = 2,
+    GAIN         = 3,
+    FLAGS        = 4,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortRole.aidl b/media/libaudioclient/aidl/android/media/AudioPortRole.aidl
new file mode 100644
index 0000000..3212325
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortRole.aidl
@@ -0,0 +1,23 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioPortRole {
+    NONE = 0,
+    SOURCE = 1,
+    SINK = 2,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortType.aidl b/media/libaudioclient/aidl/android/media/AudioPortType.aidl
new file mode 100644
index 0000000..90eea9a
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortType.aidl
@@ -0,0 +1,24 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioPortType {
+    NONE = 0,
+    DEVICE = 1,
+    MIX = 2,
+    SESSION = 3,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioSourceType.aidl b/media/libaudioclient/aidl/android/media/AudioSourceType.aidl
new file mode 100644
index 0000000..35320f8
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioSourceType.aidl
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioSourceType {
+    INVALID = -1,
+    DEFAULT = 0,
+    MIC = 1,
+    VOICE_UPLINK = 2,
+    VOICE_DOWNLINK = 3,
+    VOICE_CALL = 4,
+    CAMCORDER = 5,
+    VOICE_RECOGNITION = 6,
+    VOICE_COMMUNICATION = 7,
+    REMOTE_SUBMIX = 8,
+    UNPROCESSED = 9,
+    VOICE_PERFORMANCE = 10,
+    ECHO_REFERENCE = 1997,
+    FM_TUNER = 1998,
+    /**
+     * A low-priority, preemptible audio source for for background software
+     * hotword detection. Same tuning as VOICE_RECOGNITION.
+     * Used only internally by the framework.
+     */
+    HOTWORD = 1999,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioStreamType.aidl b/media/libaudioclient/aidl/android/media/AudioStreamType.aidl
new file mode 100644
index 0000000..803b87b
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioStreamType.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioStreamType {
+    DEFAULT = -1,
+    VOICE_CALL = 0,
+    SYSTEM = 1,
+    RING = 2,
+    MUSIC = 3,
+    ALARM = 4,
+    NOTIFICATION = 5,
+    BLUETOOTH_SCO = 6,
+    ENFORCED_AUDIBLE = 7,
+    DTMF = 8,
+    TTS = 9,
+    ACCESSIBILITY = 10,
+    ASSISTANT = 11,
+    /** For dynamic policy output mixes. Only used by the audio policy */
+    REROUTING = 12,
+    /** For audio flinger tracks volume. Only used by the audioflinger */
+    PATCH = 13,
+    /** stream for corresponding to AUDIO_USAGE_CALL_ASSISTANT */
+    CALL_ASSISTANT = 14,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioUsage.aidl b/media/libaudioclient/aidl/android/media/AudioUsage.aidl
new file mode 100644
index 0000000..137e7ff
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioUsage.aidl
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioUsage {
+    UNKNOWN = 0,
+    MEDIA = 1,
+    VOICE_COMMUNICATION = 2,
+    VOICE_COMMUNICATION_SIGNALLING = 3,
+    ALARM = 4,
+    NOTIFICATION = 5,
+    NOTIFICATION_TELEPHONY_RINGTONE = 6,
+    NOTIFICATION_COMMUNICATION_REQUEST = 7,
+    NOTIFICATION_COMMUNICATION_INSTANT = 8,
+    NOTIFICATION_COMMUNICATION_DELAYED = 9,
+    NOTIFICATION_EVENT = 10,
+    ASSISTANCE_ACCESSIBILITY = 11,
+    ASSISTANCE_NAVIGATION_GUIDANCE = 12,
+    ASSISTANCE_SONIFICATION = 13,
+    GAME = 14,
+    VIRTUAL_SOURCE = 15,
+    ASSISTANT = 16,
+    CALL_ASSISTANT = 17,
+    EMERGENCY = 1000,
+    SAFETY = 1001,
+    VEHICLE_STATUS = 1002,
+    ANNOUNCEMENT = 1003,
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl b/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
new file mode 100644
index 0000000..6da743a
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioAttributesInternal;
+import android.media.AudioClient;
+import android.media.AudioConfigBase;
+
+/**
+ * CreateRecordRequest contains all input arguments sent by AudioRecord to AudioFlinger
+ * when calling createRecord() including arguments that will be updated by AudioFlinger
+ * and returned in CreateRecordResponse object.
+ *
+ * {@hide}
+ */
+parcelable CreateRecordRequest {
+    AudioAttributesInternal attr;
+    AudioConfigBase config;
+    AudioClient clientInfo;
+    @utf8InCpp String opPackageName;
+    /** Interpreted as audio_unique_id_t. */
+    int riid;
+    /** Bitmask, indexed by AudioInputFlags. */
+    int flags;
+    long frameCount;
+    long notificationFrameCount;
+    /** Interpreted as audio_port_handle_t. */
+    int selectedDeviceId;
+    int sessionId;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl b/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl
new file mode 100644
index 0000000..0c9d7c3
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.SharedFileRegion;
+
+/**
+ * CreateRecordResponse contains all output arguments returned by AudioFlinger to AudioRecord
+ * when calling createRecord() including arguments that were passed as I/O for update by
+ * CreateRecordRequest.
+ *
+ * {@hide}
+ */
+parcelable CreateRecordResponse {
+    /** Bitmask, indexed by AudioInputFlags. */
+    int flags;
+    long frameCount;
+    long notificationFrameCount;
+    /** Interpreted as audio_port_handle_t. */
+    int selectedDeviceId;
+    int sessionId;
+    int sampleRate;
+    /** Interpreted as audio_io_handle_t. */
+    int inputId;
+    @nullable SharedFileRegion cblk;
+    @nullable SharedFileRegion buffers;
+    /** Interpreted as audio_port_handle_t. */
+    int portId;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl b/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl
new file mode 100644
index 0000000..014b3ca
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioAttributesInternal;
+import android.media.AudioClient;
+import android.media.AudioConfig;
+import android.media.IAudioTrackCallback;
+import android.media.SharedFileRegion;
+
+/**
+ * CreateTrackInput contains all input arguments sent by AudioTrack to AudioFlinger
+ * when calling createTrack() including arguments that will be updated by AudioFlinger
+ * and returned in CreateTrackResponse object.
+ *
+ * {@hide}
+ */
+parcelable CreateTrackRequest {
+    AudioAttributesInternal attr;
+    AudioConfig config;
+    AudioClient clientInfo;
+    @nullable SharedFileRegion sharedBuffer;
+    int notificationsPerBuffer;
+    float speed;
+    IAudioTrackCallback audioTrackCallback;
+    @utf8InCpp String opPackageName;
+    /** Bitmask, indexed by AudioOutputFlags. */
+    int flags;
+    long frameCount;
+    long notificationFrameCount;
+    /** Interpreted as audio_port_handle_t. */
+    int selectedDeviceId;
+    int sessionId;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
new file mode 100644
index 0000000..494e63f
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * CreateTrackOutput contains all output arguments returned by AudioFlinger to AudioTrack
+ * when calling createTrack() including arguments that were passed as I/O for update by
+ * CreateTrackRequest.
+ *
+ * {@hide}
+ */
+parcelable CreateTrackResponse {
+    /** Bitmask, indexed by AudioOutputFlags. */
+    int flags;
+    long frameCount;
+    long notificationFrameCount;
+    /** Interpreted as audio_port_handle_t. */
+    int selectedDeviceId;
+    int sessionId;
+    int sampleRate;
+    long afFrameCount;
+    int afSampleRate;
+    int afLatencyMs;
+    /** Interpreted as audio_io_handle_t. */
+    int outputId;
+    /** Interpreted as audio_port_handle_t. */
+    int portId;
+}
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerClient.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerClient.aidl
new file mode 100644
index 0000000..421c31c
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerClient.aidl
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioIoConfigEvent;
+import android.media.AudioIoDescriptor;
+
+/**
+ * A callback interface for AudioFlinger.
+ *
+ * {@hide}
+ */
+interface IAudioFlingerClient {
+    oneway void ioConfigChanged(AudioIoConfigEvent event,
+                                in AudioIoDescriptor ioDesc);
+}
diff --git a/media/libaudioclient/aidl/android/media/IEffect.aidl b/media/libaudioclient/aidl/android/media/IEffect.aidl
new file mode 100644
index 0000000..9548e46
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IEffect.aidl
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.SharedFileRegion;
+
+/**
+ * The IEffect interface enables control of the effect module activity and parameters.
+ *
+ * @hide
+ */
+interface IEffect {
+    /**
+     * Activates the effect module by connecting it to the audio path.
+     * @return a status_t code.
+     */
+    int enable();
+
+    /**
+     * Deactivates the effect module by disconnecting it from the audio path.
+     * @return a status_t code.
+     */
+    int disable();
+
+    /**
+     * Sends control, reads or writes parameters. Same behavior as the command() method in the
+     * effect control interface.
+     * Refer to system/audio_effect.h for a description of the valid command codes and their
+     * associated parameter and return messages. The cmdData and response parameters are expected to
+     * contain the respective types in a standard C memory layout.
+     *
+     * TODO(ytai): replace opaque byte arrays with strongly typed parameters.
+     */
+    int command(int cmdCode, in byte[] cmdData, int maxResponseSize, out byte[] response);
+
+    /**
+     * Disconnects the IEffect interface from the effect module.
+     * This will also delete the effect module and release the effect engine in the library if this
+     * is the last client disconnected. To release control of the effect module, the application can
+     * disconnect or delete the IEffect interface.
+     */
+    void disconnect();
+
+    /**
+     * returns a pointer to a shared memory area used to pass multiple parameters to the effect
+     * module without multiplying the binder calls.
+     *
+     * TODO(ytai): Explain how this should be used exactly.
+     */
+    SharedFileRegion getCblk();
+}
diff --git a/media/libaudioclient/aidl/android/media/IEffectClient.aidl b/media/libaudioclient/aidl/android/media/IEffectClient.aidl
new file mode 100644
index 0000000..d1e331c
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IEffectClient.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * A callback interface for getting effect-related notifications.
+ *
+ * @hide
+ */
+interface IEffectClient {
+    /**
+     * Called whenever the status of granting control over the effect to the application
+     * has changed.
+     * @param controlGranted true iff the application has the control of the effect module.
+     */
+    oneway void controlStatusChanged(boolean controlGranted);
+
+    /**
+     * Called whenever the effect has been enabled or disabled. Received only if the client is not
+     * currently controlling the effect.
+     * @param enabled true if the effect module has been activated, false if deactivated.
+     */
+    oneway void enableStatusChanged(boolean enabled);
+
+    /**
+     * A command has been send to the effect engine. Received only if the client is not currently
+     * controlling the effect. See IEffect.command() for a description of buffer contents.
+     *
+     * TODO(ytai): replace opaque byte arrays with strongly typed parameters.
+     */
+    oneway void commandExecuted(int cmdCode, in byte[] cmdData, in byte[] replyData);
+}
diff --git a/media/libaudioclient/aidl/android/media/IPlayer.aidl b/media/libaudioclient/aidl/android/media/IPlayer.aidl
index a90fcdd..8c2c471 100644
--- a/media/libaudioclient/aidl/android/media/IPlayer.aidl
+++ b/media/libaudioclient/aidl/android/media/IPlayer.aidl
@@ -16,8 +16,8 @@
 
 package android.media;
 
-import android.media.VolumeShaper.Configuration;
-import android.media.VolumeShaper.Operation;
+import android.media.VolumeShaperConfiguration;
+import android.media.VolumeShaperOperation;
 
 /**
  * @hide
@@ -29,6 +29,6 @@
     oneway void setVolume(float vol);
     oneway void setPan(float pan);
     oneway void setStartDelayMs(int delayMs);
-    oneway void applyVolumeShaper(in Configuration configuration,
-                                  in Operation operation);
+    oneway void applyVolumeShaper(in VolumeShaperConfiguration configuration,
+                                  in VolumeShaperOperation operation);
 }
diff --git a/media/libaudioclient/aidl/android/media/VolumeShaper/Configuration.aidl b/media/libaudioclient/aidl/android/media/VolumeShaper/Configuration.aidl
deleted file mode 100644
index fd0e60f..0000000
--- a/media/libaudioclient/aidl/android/media/VolumeShaper/Configuration.aidl
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.VolumeShaper;
-
-parcelable Configuration cpp_header "media/VolumeShaper.h";
diff --git a/media/libaudioclient/aidl/android/media/VolumeShaper/Operation.aidl b/media/libaudioclient/aidl/android/media/VolumeShaper/Operation.aidl
deleted file mode 100644
index 4290d9d..0000000
--- a/media/libaudioclient/aidl/android/media/VolumeShaper/Operation.aidl
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.VolumeShaper;
-
-parcelable Operation cpp_header "media/VolumeShaper.h";
diff --git a/media/libaudioclient/aidl/android/media/VolumeShaper/State.aidl b/media/libaudioclient/aidl/android/media/VolumeShaper/State.aidl
deleted file mode 100644
index f6a22b8..0000000
--- a/media/libaudioclient/aidl/android/media/VolumeShaper/State.aidl
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.VolumeShaper;
-
-parcelable State cpp_header "media/VolumeShaper.h";
diff --git a/media/libaudioclient/include/media/AidlConversion.h b/media/libaudioclient/include/media/AidlConversion.h
new file mode 100644
index 0000000..4df8083
--- /dev/null
+++ b/media/libaudioclient/include/media/AidlConversion.h
@@ -0,0 +1,283 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <limits>
+#include <type_traits>
+
+#include <system/audio.h>
+
+#include <android-base/expected.h>
+
+#include <android/media/AudioAttributesInternal.h>
+#include <android/media/AudioClient.h>
+#include <android/media/AudioConfig.h>
+#include <android/media/AudioConfigBase.h>
+#include <android/media/AudioFlag.h>
+#include <android/media/AudioGainMode.h>
+#include <android/media/AudioInputFlags.h>
+#include <android/media/AudioIoConfigEvent.h>
+#include <android/media/AudioIoDescriptor.h>
+#include <android/media/AudioOutputFlags.h>
+#include <android/media/AudioPortConfigType.h>
+
+#include <android/media/SharedFileRegion.h>
+
+#include <binder/IMemory.h>
+#include <media/AudioClient.h>
+#include <media/AudioIoDescriptor.h>
+
+namespace android {
+
+template <typename T>
+using ConversionResult = base::expected<T, status_t>;
+
+// Convenience macros for working with ConversionResult, useful for writing converted for aggregate
+// types.
+
+#define VALUE_OR_RETURN(result)                                \
+    ({                                                         \
+        auto _tmp = (result);                                  \
+        if (!_tmp.ok()) return base::unexpected(_tmp.error()); \
+        std::move(_tmp.value());                               \
+    })
+
+#define RETURN_IF_ERROR(result) \
+    if (status_t _tmp = (result); _tmp != OK) return base::unexpected(_tmp);
+
+/**
+ * A generic template to safely cast between integral types, respecting limits of the destination
+ * type.
+ */
+template<typename To, typename From>
+ConversionResult<To> convertIntegral(From from) {
+    // Special handling is required for signed / vs. unsigned comparisons, since otherwise we may
+    // have the signed converted to unsigned and produce wrong results.
+    if (std::is_signed_v<From> && !std::is_signed_v<To>) {
+        if (from < 0 || from > std::numeric_limits<To>::max()) {
+            return base::unexpected(BAD_VALUE);
+        }
+    } else if (std::is_signed_v<To> && !std::is_signed_v<From>) {
+        if (from > std::numeric_limits<To>::max()) {
+            return base::unexpected(BAD_VALUE);
+        }
+    } else {
+        if (from < std::numeric_limits<To>::min() || from > std::numeric_limits<To>::max()) {
+            return base::unexpected(BAD_VALUE);
+        }
+    }
+    return static_cast<To>(from);
+}
+
+// maxSize is the size of the C-string buffer (including the 0-terminator), NOT the max length of
+// the string.
+status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize);
+ConversionResult<std::string> legacy2aidl_string(const char* legacy, size_t maxSize);
+
+ConversionResult<audio_module_handle_t> aidl2legacy_int32_t_audio_module_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_module_handle_t_int32_t(audio_module_handle_t legacy);
+
+ConversionResult<audio_io_handle_t> aidl2legacy_int32_t_audio_io_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_io_handle_t_int32_t(audio_io_handle_t legacy);
+
+ConversionResult<audio_port_handle_t> aidl2legacy_int32_t_audio_port_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_port_handle_t_int32_t(audio_port_handle_t legacy);
+
+ConversionResult<audio_patch_handle_t> aidl2legacy_int32_t_audio_patch_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_patch_handle_t_int32_t(audio_patch_handle_t legacy);
+
+ConversionResult<audio_unique_id_t> aidl2legacy_int32_t_audio_unique_id_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_unique_id_t_int32_t(audio_unique_id_t legacy);
+
+// The legacy enum is unnamed. Thus, we use int.
+ConversionResult<int> aidl2legacy_AudioPortConfigType(media::AudioPortConfigType aidl);
+// The legacy enum is unnamed. Thus, we use int.
+ConversionResult<media::AudioPortConfigType> legacy2aidl_AudioPortConfigType(int legacy);
+
+ConversionResult<unsigned int> aidl2legacy_int32_t_config_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_config_mask_int32_t(unsigned int legacy);
+
+ConversionResult<audio_channel_mask_t> aidl2legacy_int32_t_audio_channel_mask_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_channel_mask_t_int32_t(audio_channel_mask_t legacy);
+
+ConversionResult<pid_t> aidl2legacy_int32_t_pid_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_pid_t_int32_t(pid_t legacy);
+
+ConversionResult<uid_t> aidl2legacy_int32_t_uid_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_uid_t_int32_t(uid_t legacy);
+
+ConversionResult<String16> aidl2legacy_string_view_String16(std::string_view aidl);
+ConversionResult<std::string> legacy2aidl_String16_string(const String16& legacy);
+
+ConversionResult<audio_io_config_event> aidl2legacy_AudioIoConfigEvent_audio_io_config_event(
+        media::AudioIoConfigEvent aidl);
+ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_AudioIoConfigEvent(
+        audio_io_config_event legacy);
+
+ConversionResult<audio_port_role_t> aidl2legacy_AudioPortRole_audio_port_role_t(
+        media::AudioPortRole aidl);
+ConversionResult<media::AudioPortRole> legacy2aidl_audio_port_role_t_AudioPortRole(
+        audio_port_role_t legacy);
+
+ConversionResult<audio_port_type_t> aidl2legacy_AudioPortType_audio_port_type_t(
+        media::AudioPortType aidl);
+ConversionResult<media::AudioPortType> legacy2aidl_audio_port_type_t_AudioPortType(
+        audio_port_type_t legacy);
+
+ConversionResult<audio_format_t> aidl2legacy_AudioFormat_audio_format_t(
+        media::audio::common::AudioFormat aidl);
+ConversionResult<media::audio::common::AudioFormat> legacy2aidl_audio_format_t_AudioFormat(
+        audio_format_t legacy);
+
+ConversionResult<int> aidl2legacy_AudioGainMode_int(media::AudioGainMode aidl);
+ConversionResult<media::AudioGainMode> legacy2aidl_int_AudioGainMode(int legacy);
+
+ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t(audio_gain_mode_t legacy);
+
+ConversionResult<audio_devices_t> aidl2legacy_int32_t_audio_devices_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_devices_t_int32_t(audio_devices_t legacy);
+
+ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
+        const media::AudioGainConfig& aidl, media::AudioPortRole role, media::AudioPortType type);
+ConversionResult<media::AudioGainConfig> legacy2aidl_audio_gain_config_AudioGainConfig(
+        const audio_gain_config& legacy, audio_port_role_t role, audio_port_type_t type);
+
+ConversionResult<audio_input_flags_t> aidl2legacy_AudioInputFlags_audio_input_flags_t(
+        media::AudioInputFlags aidl);
+ConversionResult<media::AudioInputFlags> legacy2aidl_audio_input_flags_t_AudioInputFlags(
+        audio_input_flags_t legacy);
+
+ConversionResult<audio_output_flags_t> aidl2legacy_AudioOutputFlags_audio_output_flags_t(
+        media::AudioOutputFlags aidl);
+ConversionResult<media::AudioOutputFlags> legacy2aidl_audio_output_flags_t_AudioOutputFlags(
+        audio_output_flags_t legacy);
+
+ConversionResult<audio_input_flags_t> aidl2legacy_audio_input_flags_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_input_flags_mask(audio_input_flags_t legacy);
+
+ConversionResult<audio_output_flags_t> aidl2legacy_audio_output_flags_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_output_flags_mask(audio_output_flags_t legacy);
+
+ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
+        const media::AudioIoFlags& aidl, media::AudioPortRole role, media::AudioPortType type);
+ConversionResult<media::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
+        const audio_io_flags& legacy, audio_port_role_t role, audio_port_type_t type);
+
+ConversionResult<audio_port_config_device_ext> aidl2legacy_AudioPortConfigDeviceExt(
+        const media::AudioPortConfigDeviceExt& aidl);
+ConversionResult<media::AudioPortConfigDeviceExt> legacy2aidl_AudioPortConfigDeviceExt(
+        const audio_port_config_device_ext& legacy);
+
+ConversionResult<audio_stream_type_t> aidl2legacy_AudioStreamType_audio_stream_type_t(
+        media::AudioStreamType aidl);
+ConversionResult<media::AudioStreamType> legacy2aidl_audio_stream_type_t_AudioStreamType(
+        audio_stream_type_t legacy);
+
+ConversionResult<audio_source_t> aidl2legacy_AudioSourceType_audio_source_t(
+        media::AudioSourceType aidl);
+ConversionResult<media::AudioSourceType> legacy2aidl_audio_source_t_AudioSourceType(
+        audio_source_t legacy);
+
+ConversionResult<audio_session_t> aidl2legacy_int32_t_audio_session_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_session_t_int32_t(audio_session_t legacy);
+
+ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortConfigMixExt(
+        const media::AudioPortConfigMixExt& aidl, media::AudioPortRole role);
+ConversionResult<media::AudioPortConfigMixExt> legacy2aidl_AudioPortConfigMixExt(
+        const audio_port_config_mix_ext& legacy, audio_port_role_t role);
+
+ConversionResult<audio_port_config_session_ext> aidl2legacy_AudioPortConfigSessionExt(
+        const media::AudioPortConfigSessionExt& aidl);
+ConversionResult<media::AudioPortConfigSessionExt> legacy2aidl_AudioPortConfigSessionExt(
+        const audio_port_config_session_ext& legacy);
+
+ConversionResult<audio_port_config> aidl2legacy_AudioPortConfig_audio_port_config(
+        const media::AudioPortConfig& aidl);
+ConversionResult<media::AudioPortConfig> legacy2aidl_audio_port_config_AudioPortConfig(
+        const audio_port_config& legacy);
+
+ConversionResult<struct audio_patch> aidl2legacy_AudioPatch_audio_patch(
+        const media::AudioPatch& aidl);
+ConversionResult<media::AudioPatch> legacy2aidl_audio_patch_AudioPatch(
+        const struct audio_patch& legacy);
+
+ConversionResult<sp<AudioIoDescriptor>> aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(
+        const media::AudioIoDescriptor& aidl);
+
+ConversionResult<media::AudioIoDescriptor> legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(
+        const sp<AudioIoDescriptor>& legacy);
+
+ConversionResult<AudioClient> aidl2legacy_AudioClient(const media::AudioClient& aidl);
+ConversionResult<media::AudioClient> legacy2aidl_AudioClient(const AudioClient& legacy);
+
+ConversionResult<audio_content_type_t>
+aidl2legacy_AudioContentType_audio_content_type_t(media::AudioContentType aidl);
+ConversionResult<media::AudioContentType>
+legacy2aidl_audio_content_type_t_AudioContentType(audio_content_type_t legacy);
+
+ConversionResult<audio_usage_t>
+aidl2legacy_AudioUsage_audio_usage_t(media::AudioUsage aidl);
+ConversionResult<media::AudioUsage>
+legacy2aidl_audio_usage_t_AudioUsage(audio_usage_t legacy);
+
+ConversionResult<audio_flags_mask_t>
+aidl2legacy_AudioFlag_audio_flags_mask_t(media::AudioFlag aidl);
+ConversionResult<media::AudioFlag>
+legacy2aidl_audio_flags_mask_t_AudioFlag(audio_flags_mask_t legacy);
+
+ConversionResult<audio_flags_mask_t>
+aidl2legacy_int32_t_audio_flags_mask_t_mask(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_audio_flags_mask_t_int32_t_mask(audio_flags_mask_t legacy);
+
+ConversionResult<audio_attributes_t>
+aidl2legacy_AudioAttributesInternal_audio_attributes_t(const media::AudioAttributesInternal& aidl);
+ConversionResult<media::AudioAttributesInternal>
+legacy2aidl_audio_attributes_t_AudioAttributesInternal(const audio_attributes_t& legacy);
+
+ConversionResult<audio_encapsulation_mode_t>
+aidl2legacy_audio_encapsulation_mode_t_AudioEncapsulationMode(media::AudioEncapsulationMode aidl);
+ConversionResult<media::AudioEncapsulationMode>
+legacy2aidl_AudioEncapsulationMode_audio_encapsulation_mode_t(audio_encapsulation_mode_t legacy);
+
+ConversionResult<audio_offload_info_t>
+aidl2legacy_AudioOffloadInfo_audio_offload_info_t(const media::AudioOffloadInfo& aidl);
+ConversionResult<media::AudioOffloadInfo>
+legacy2aidl_audio_offload_info_t_AudioOffloadInfo(const audio_offload_info_t& legacy);
+
+ConversionResult<audio_config_t>
+aidl2legacy_AudioConfig_audio_config_t(const media::AudioConfig& aidl);
+ConversionResult<media::AudioConfig>
+legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy);
+
+ConversionResult<audio_config_base_t>
+aidl2legacy_AudioConfigBase_audio_config_base_t(const media::AudioConfigBase& aidl);
+ConversionResult<media::AudioConfigBase>
+legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy);
+
+ConversionResult<sp<IMemory>>
+aidl2legacy_SharedFileRegion_IMemory(const media::SharedFileRegion& aidl);
+ConversionResult<media::SharedFileRegion>
+legacy2aidl_IMemory_SharedFileRegion(const sp<IMemory>& legacy);
+
+ConversionResult<sp<IMemory>>
+aidl2legacy_NullableSharedFileRegion_IMemory(const std::optional<media::SharedFileRegion>& aidl);
+ConversionResult<std::optional<media::SharedFileRegion>>
+legacy2aidl_NullableIMemory_SharedFileRegion(const sp<IMemory>& legacy);
+
+}  // namespace android
diff --git a/media/libaudioclient/include/media/AudioClient.h b/media/libaudioclient/include/media/AudioClient.h
index 247af9e..0b89d15 100644
--- a/media/libaudioclient/include/media/AudioClient.h
+++ b/media/libaudioclient/include/media/AudioClient.h
@@ -18,14 +18,12 @@
 #ifndef ANDROID_AUDIO_CLIENT_H
 #define ANDROID_AUDIO_CLIENT_H
 
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
-#include <system/audio.h>
+#include <sys/types.h>
 #include <utils/String16.h>
 
 namespace android {
 
-class AudioClient : public Parcelable {
+class AudioClient {
  public:
     AudioClient() :
         clientUid(-1), clientPid(-1), clientTid(-1), packageName("") {}
@@ -34,22 +32,6 @@
     pid_t clientPid;
     pid_t clientTid;
     String16 packageName;
-
-    status_t readFromParcel(const Parcel *parcel) override {
-        clientUid = parcel->readInt32();
-        clientPid = parcel->readInt32();
-        clientTid = parcel->readInt32();
-        packageName = parcel->readString16();
-        return NO_ERROR;
-    }
-
-    status_t writeToParcel(Parcel *parcel) const override {
-        parcel->writeInt32(clientUid);
-        parcel->writeInt32(clientPid);
-        parcel->writeInt32(clientTid);
-        parcel->writeString16(packageName);
-        return NO_ERROR;
-    }
 };
 
 }; // namespace android
diff --git a/media/libaudioclient/include/media/AudioEffect.h b/media/libaudioclient/include/media/AudioEffect.h
index cb76252..8371711 100644
--- a/media/libaudioclient/include/media/AudioEffect.h
+++ b/media/libaudioclient/include/media/AudioEffect.h
@@ -22,8 +22,6 @@
 
 #include <media/IAudioFlinger.h>
 #include <media/IAudioPolicyService.h>
-#include <media/IEffect.h>
-#include <media/IEffectClient.h>
 #include <media/AudioSystem.h>
 #include <system/audio_effect.h>
 
@@ -31,6 +29,9 @@
 #include <utils/Errors.h>
 #include <binder/IInterface.h>
 
+#include "android/media/IEffect.h"
+#include "android/media/BnEffectClient.h"
+
 
 namespace android {
 
@@ -339,16 +340,21 @@
      *
      * opPackageName:      The package name used for app op checks.
      */
-    AudioEffect(const String16& opPackageName);
+    explicit AudioEffect(const String16& opPackageName);
 
+    /* Terminates the AudioEffect and unregisters it from AudioFlinger.
+     * The effect engine is also destroyed if this AudioEffect was the last controlling
+     * the engine.
+     */
+                        ~AudioEffect();
 
-    /* Constructor.
+    /**
+     * Initialize an uninitialized AudioEffect.
      *
      * Parameters:
      *
      * type:  type of effect created: can be null if uuid is specified. This corresponds to
      *        the OpenSL ES interface implemented by this effect.
-     * opPackageName:  The package name used for app op checks.
      * uuid:  Uuid of effect created: can be null if type is specified. This uuid corresponds to
      *        a particular implementation of an effect type.
      * priority:    requested priority for effect control: the priority level corresponds to the
@@ -356,7 +362,7 @@
      *      higher priorities, 0 being the normal priority.
      * cbf:         optional callback function (see effect_callback_t)
      * user:        pointer to context for use by the callback receiver.
-     * sessionID:   audio session this effect is associated to.
+     * sessionId:   audio session this effect is associated to.
      *      If equal to AUDIO_SESSION_OUTPUT_MIX, the effect will be global to
      *      the output mix.  Otherwise, the effect will be applied to all players
      *      (AudioTrack or MediaPLayer) within the same audio session.
@@ -369,46 +375,13 @@
      *        In this mode, no IEffect interface to AudioFlinger is created and all actions
      *        besides getters implemented in client AudioEffect object are no ops
      *        after effect creation.
+     *
+     * Returned status (from utils/Errors.h) can be:
+     *  - NO_ERROR or ALREADY_EXISTS: successful initialization
+     *  - INVALID_OPERATION: AudioEffect is already initialized
+     *  - BAD_VALUE: invalid parameter
+     *  - NO_INIT: audio flinger or audio hardware not initialized
      */
-
-    AudioEffect(const effect_uuid_t *type,
-                const String16& opPackageName,
-                const effect_uuid_t *uuid = NULL,
-                int32_t priority = 0,
-                effect_callback_t cbf = NULL,
-                void* user = NULL,
-                audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
-                audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
-                const AudioDeviceTypeAddr& device = {},
-                bool probe = false);
-
-    /* Constructor.
-     *      Same as above but with type and uuid specified by character strings
-     */
-    AudioEffect(const char *typeStr,
-                    const String16& opPackageName,
-                    const char *uuidStr = NULL,
-                    int32_t priority = 0,
-                    effect_callback_t cbf = NULL,
-                    void* user = NULL,
-                    audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
-                    audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
-                    const AudioDeviceTypeAddr& device = {},
-                    bool probe = false);
-
-    /* Terminates the AudioEffect and unregisters it from AudioFlinger.
-     * The effect engine is also destroyed if this AudioEffect was the last controlling
-     * the engine.
-     */
-                        ~AudioEffect();
-
-    /* Initialize an uninitialized AudioEffect.
-    * Returned status (from utils/Errors.h) can be:
-    *  - NO_ERROR or ALREADY_EXISTS: successful initialization
-    *  - INVALID_OPERATION: AudioEffect is already initialized
-    *  - BAD_VALUE: invalid parameter
-    *  - NO_INIT: audio flinger or audio hardware not initialized
-    * */
             status_t    set(const effect_uuid_t *type,
                             const effect_uuid_t *uuid = NULL,
                             int32_t priority = 0,
@@ -418,6 +391,18 @@
                             audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
                             const AudioDeviceTypeAddr& device = {},
                             bool probe = false);
+    /*
+     * Same as above but with type and uuid specified by character strings.
+     */
+            status_t    set(const char *typeStr,
+                            const char *uuidStr = NULL,
+                            int32_t priority = 0,
+                            effect_callback_t cbf = NULL,
+                            void* user = NULL,
+                            audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
+                            audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
+                            const AudioDeviceTypeAddr& device = {},
+                            bool probe = false);
 
     /* Result of constructing the AudioEffect. This must be checked
      * before using any AudioEffect API.
@@ -547,90 +532,67 @@
      static const uint32_t kMaxPreProcessing = 10;
 
 protected:
-     bool                    mEnabled;           // enable state
-     audio_session_t         mSessionId;         // audio session ID
-     int32_t                 mPriority;          // priority for effect control
-     status_t                mStatus;            // effect status
-     bool                    mProbe;             // effect created in probe mode: all commands
+     const String16          mOpPackageName;     // The package name used for app op checks.
+     bool                    mEnabled = false;   // enable state
+     audio_session_t         mSessionId = AUDIO_SESSION_OUTPUT_MIX; // audio session ID
+     int32_t                 mPriority = 0;      // priority for effect control
+     status_t                mStatus = NO_INIT;  // effect status
+     bool                    mProbe = false;     // effect created in probe mode: all commands
                                                  // are no ops because mIEffect is NULL
-     effect_callback_t       mCbf;               // callback function for status, control and
+     effect_callback_t       mCbf = nullptr;     // callback function for status, control and
                                                  // parameter changes notifications
-     void*                   mUserData;          // client context for callback function
-     effect_descriptor_t     mDescriptor;        // effect descriptor
-     int32_t                 mId;                // system wide unique effect engine instance ID
+     void*                   mUserData = nullptr;// client context for callback function
+     effect_descriptor_t     mDescriptor = {};   // effect descriptor
+     int32_t                 mId = -1;           // system wide unique effect engine instance ID
      Mutex                   mLock;              // Mutex for mEnabled access
-     Mutex                   mConstructLock;     // Mutex for integrality construction
 
-     String16                mOpPackageName;     // The package name used for app op checks.
 
      // IEffectClient
      virtual void controlStatusChanged(bool controlGranted);
      virtual void enableStatusChanged(bool enabled);
-     virtual void commandExecuted(uint32_t cmdCode,
-             uint32_t cmdSize,
-             void *pCmdData,
-             uint32_t replySize,
-             void *pReplyData);
+     virtual void commandExecuted(int32_t cmdCode,
+                                  const std::vector<uint8_t>& cmdData,
+                                  const std::vector<uint8_t>& replyData);
 
 private:
 
      // Implements the IEffectClient interface
     class EffectClient :
-        public android::BnEffectClient, public android::IBinder::DeathRecipient
+        public media::BnEffectClient, public android::IBinder::DeathRecipient
     {
     public:
 
         EffectClient(AudioEffect *effect) : mEffect(effect){}
 
         // IEffectClient
-        virtual void controlStatusChanged(bool controlGranted) {
+        binder::Status controlStatusChanged(bool controlGranted) override {
             sp<AudioEffect> effect = mEffect.promote();
             if (effect != 0) {
-                {
-                    // Got the mConstructLock means the construction of AudioEffect
-                    // has finished, we should release the mConstructLock immediately.
-                    AutoMutex lock(effect->mConstructLock);
-                }
                 effect->controlStatusChanged(controlGranted);
             }
+            return binder::Status::ok();
         }
-        virtual void enableStatusChanged(bool enabled) {
+        binder::Status enableStatusChanged(bool enabled) override {
             sp<AudioEffect> effect = mEffect.promote();
             if (effect != 0) {
-                {
-                    // Got the mConstructLock means the construction of AudioEffect
-                    // has finished, we should release the mConstructLock immediately.
-                    AutoMutex lock(effect->mConstructLock);
-                }
                 effect->enableStatusChanged(enabled);
             }
+            return binder::Status::ok();
         }
-        virtual void commandExecuted(uint32_t cmdCode,
-                                     uint32_t cmdSize,
-                                     void *pCmdData,
-                                     uint32_t replySize,
-                                     void *pReplyData) {
+        binder::Status commandExecuted(int32_t cmdCode,
+                             const std::vector<uint8_t>& cmdData,
+                             const std::vector<uint8_t>& replyData) override {
             sp<AudioEffect> effect = mEffect.promote();
             if (effect != 0) {
-                {
-                    // Got the mConstructLock means the construction of AudioEffect
-                    // has finished, we should release the mConstructLock immediately.
-                    AutoMutex lock(effect->mConstructLock);
-                }
-                effect->commandExecuted(
-                    cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+                effect->commandExecuted(cmdCode, cmdData, replyData);
             }
+            return binder::Status::ok();
         }
 
         // IBinder::DeathRecipient
         virtual void binderDied(const wp<IBinder>& /*who*/) {
             sp<AudioEffect> effect = mEffect.promote();
             if (effect != 0) {
-                {
-                    // Got the mConstructLock means the construction of AudioEffect
-                    // has finished, we should release the mConstructLock immediately.
-                    AutoMutex lock(effect->mConstructLock);
-                }
                 effect->binderDied();
             }
         }
@@ -641,10 +603,10 @@
 
     void binderDied();
 
-    sp<IEffect>             mIEffect;           // IEffect binder interface
+    sp<media::IEffect>      mIEffect;           // IEffect binder interface
     sp<EffectClient>        mIEffectClient;     // IEffectClient implementation
     sp<IMemory>             mCblkMemory;        // shared memory for deferred parameter setting
-    effect_param_cblk_t*    mCblk;              // control block for deferred parameter setting
+    effect_param_cblk_t*    mCblk = nullptr;    // control block for deferred parameter setting
     pid_t                   mClientPid = (pid_t)-1;
     uid_t                   mClientUid = (uid_t)-1;
 };
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 19c2cbd..dfc1982 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -19,12 +19,12 @@
 
 #include <sys/types.h>
 
+#include <android/media/BnAudioFlingerClient.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <media/AudioPolicy.h>
 #include <media/AudioProductStrategy.h>
 #include <media/AudioVolumeGroup.h>
 #include <media/AudioIoDescriptor.h>
-#include <media/IAudioFlingerClient.h>
 #include <media/IAudioPolicyServiceClient.h>
 #include <media/MicrophoneInfo.h>
 #include <set>
@@ -361,11 +361,11 @@
 
     static status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration);
 
-    static status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices);
+    static status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices);
 
     static status_t removeUidDeviceAffinities(uid_t uid);
 
-    static status_t setUserIdDeviceAffinities(int userId, const Vector<AudioDeviceTypeAddr>& devices);
+    static status_t setUserIdDeviceAffinities(int userId, const AudioDeviceTypeAddrVector& devices);
 
     static status_t removeUserIdDeviceAffinities(int userId);
 
@@ -425,13 +425,29 @@
      */
     static status_t setAudioHalPids(const std::vector<pid_t>& pids);
 
-    static status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
-            const AudioDeviceTypeAddr &device);
+    static status_t setDevicesRoleForStrategy(product_strategy_t strategy,
+            device_role_t role, const AudioDeviceTypeAddrVector &devices);
 
-    static status_t removePreferredDeviceForStrategy(product_strategy_t strategy);
+    static status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role);
 
-    static status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
-            AudioDeviceTypeAddr &device);
+    static status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
+            device_role_t role, AudioDeviceTypeAddrVector &devices);
+
+    static status_t setDevicesRoleForCapturePreset(audio_source_t audioSource,
+            device_role_t role, const AudioDeviceTypeAddrVector &devices);
+
+    static status_t addDevicesRoleForCapturePreset(audio_source_t audioSource,
+            device_role_t role, const AudioDeviceTypeAddrVector &devices);
+
+    static status_t removeDevicesRoleForCapturePreset(
+            audio_source_t audioSource, device_role_t role,
+            const AudioDeviceTypeAddrVector& devices);
+
+    static status_t clearDevicesRoleForCapturePreset(
+            audio_source_t audioSource, device_role_t role);
+
+    static status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+            device_role_t role, AudioDeviceTypeAddrVector &devices);
 
     static status_t getDeviceForStrategy(product_strategy_t strategy,
             AudioDeviceTypeAddr &device);
@@ -515,7 +531,7 @@
 
 private:
 
-    class AudioFlingerClient: public IBinder::DeathRecipient, public BnAudioFlingerClient
+    class AudioFlingerClient: public IBinder::DeathRecipient, public media::BnAudioFlingerClient
     {
     public:
         AudioFlingerClient() :
@@ -535,9 +551,9 @@
 
         // indicate a change in the configuration of an output or input: keeps the cached
         // values for output/input parameters up-to-date in client process
-        virtual void ioConfigChanged(audio_io_config_event event,
-                                     const sp<AudioIoDescriptor>& ioDesc);
-
+        binder::Status ioConfigChanged(
+                media::AudioIoConfigEvent event,
+                const media::AudioIoDescriptor& ioDesc) override;
 
         status_t addAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
                                                audio_io_handle_t audioIo,
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 0dbd842..de183d8 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -26,6 +26,8 @@
 #include <media/Modulo.h>
 #include <utils/threads.h>
 
+#include <string>
+
 #include "android/media/BnAudioTrackCallback.h"
 #include "android/media/IAudioTrackCallback.h"
 
@@ -177,6 +179,8 @@
      */
                         AudioTrack();
 
+                        AudioTrack(const std::string& opPackageName);
+
     /* Creates an AudioTrack object and registers it with AudioFlinger.
      * Once created, the track needs to be started before it can be used.
      * Unspecified values are set to appropriate default values.
@@ -258,7 +262,8 @@
                                     const audio_attributes_t* pAttributes = NULL,
                                     bool doNotReconnect = false,
                                     float maxRequiredSpeed = 1.0f,
-                                    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
+                                    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
+                                    const std::string& opPackageName = "");
 
     /* Creates an audio track and registers it with AudioFlinger.
      * With this constructor, the track is configured for static buffer mode.
@@ -288,7 +293,8 @@
                                     pid_t pid = -1,
                                     const audio_attributes_t* pAttributes = NULL,
                                     bool doNotReconnect = false,
-                                    float maxRequiredSpeed = 1.0f);
+                                    float maxRequiredSpeed = 1.0f,
+                                    const std::string& opPackageName = "");
 
     /* Terminates the AudioTrack and unregisters it from AudioFlinger.
      * Also destroys all resources associated with the AudioTrack.
@@ -338,6 +344,27 @@
                             bool doNotReconnect = false,
                             float maxRequiredSpeed = 1.0f,
                             audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
+    // FIXME(b/169889714): Vendor code depends on the old method signature at link time
+            status_t    set(audio_stream_type_t streamType,
+                            uint32_t sampleRate,
+                            audio_format_t format,
+                            uint32_t channelMask,
+                            size_t frameCount   = 0,
+                            audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
+                            callback_t cbf      = NULL,
+                            void* user          = NULL,
+                            int32_t notificationFrames = 0,
+                            const sp<IMemory>& sharedBuffer = 0,
+                            bool threadCanCallJava = false,
+                            audio_session_t sessionId  = AUDIO_SESSION_ALLOCATE,
+                            transfer_type transferType = TRANSFER_DEFAULT,
+                            const audio_offload_info_t *offloadInfo = NULL,
+                            uid_t uid = AUDIO_UID_INVALID,
+                            pid_t pid = -1,
+                            const audio_attributes_t* pAttributes = NULL,
+                            bool doNotReconnect = false,
+                            float maxRequiredSpeed = 1.0f,
+                            audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
 
     /* Result of constructing the AudioTrack. This must be checked for successful initialization
      * before using any AudioTrack API (except for set()), because using
@@ -1236,6 +1263,8 @@
 
     sp<media::VolumeHandler>       mVolumeHandler;
 
+    const std::string      mOpPackageName;
+
 private:
     class DeathNotifier : public IBinder::DeathRecipient {
     public:
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 612ce7a..3491fda 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -26,21 +26,27 @@
 #include <binder/IInterface.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
+#include <media/AidlConversion.h>
 #include <media/AudioClient.h>
 #include <media/DeviceDescriptorBase.h>
 #include <media/IAudioTrack.h>
-#include <media/IAudioFlingerClient.h>
 #include <system/audio.h>
 #include <system/audio_effect.h>
 #include <system/audio_policy.h>
-#include <media/IEffect.h>
-#include <media/IEffectClient.h>
 #include <utils/String8.h>
 #include <media/MicrophoneInfo.h>
+#include <string>
 #include <vector>
 
+#include "android/media/CreateRecordRequest.h"
+#include "android/media/CreateRecordResponse.h"
+#include "android/media/CreateTrackRequest.h"
+#include "android/media/CreateTrackResponse.h"
 #include "android/media/IAudioRecord.h"
+#include "android/media/IAudioFlingerClient.h"
 #include "android/media/IAudioTrackCallback.h"
+#include "android/media/IEffect.h"
+#include "android/media/IEffectClient.h"
 
 namespace android {
 
@@ -55,70 +61,8 @@
      * when calling createTrack() including arguments that will be updated by AudioFlinger
      * and returned in CreateTrackOutput object
      */
-    class CreateTrackInput : public Parcelable {
+    class CreateTrackInput {
     public:
-        status_t readFromParcel(const Parcel *parcel) override {
-            /* input arguments*/
-            memset(&attr, 0, sizeof(audio_attributes_t));
-            if (parcel->read(&attr, sizeof(audio_attributes_t)) != NO_ERROR) {
-                return DEAD_OBJECT;
-            }
-            attr.tags[AUDIO_ATTRIBUTES_TAGS_MAX_SIZE -1] = '\0';
-            memset(&config, 0, sizeof(audio_config_t));
-            if (parcel->read(&config, sizeof(audio_config_t)) != NO_ERROR) {
-                return DEAD_OBJECT;
-            }
-            if (clientInfo.readFromParcel(parcel) != NO_ERROR) {
-                return DEAD_OBJECT;
-            }
-            if (parcel->readInt32() != 0) {
-                // TODO: Using unsecurePointer() has some associated security
-                //       pitfalls (see declaration for details).
-                //       Either document why it is safe in this case or address
-                //       the issue (e.g. by copying).
-                sharedBuffer = interface_cast<IMemory>(parcel->readStrongBinder());
-                if (sharedBuffer == 0 || sharedBuffer->unsecurePointer() == NULL) {
-                    return BAD_VALUE;
-                }
-            }
-            notificationsPerBuffer = parcel->readInt32();
-            speed = parcel->readFloat();
-            audioTrackCallback = interface_cast<media::IAudioTrackCallback>(
-                    parcel->readStrongBinder());
-
-            /* input/output arguments*/
-            (void)parcel->read(&flags, sizeof(audio_output_flags_t));
-            frameCount = parcel->readInt64();
-            notificationFrameCount = parcel->readInt64();
-            (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
-            (void)parcel->read(&sessionId, sizeof(audio_session_t));
-            return NO_ERROR;
-        }
-
-        status_t writeToParcel(Parcel *parcel) const override {
-            /* input arguments*/
-            (void)parcel->write(&attr, sizeof(audio_attributes_t));
-            (void)parcel->write(&config, sizeof(audio_config_t));
-            (void)clientInfo.writeToParcel(parcel);
-            if (sharedBuffer != 0) {
-                (void)parcel->writeInt32(1);
-                (void)parcel->writeStrongBinder(IInterface::asBinder(sharedBuffer));
-            } else {
-                (void)parcel->writeInt32(0);
-            }
-            (void)parcel->writeInt32(notificationsPerBuffer);
-            (void)parcel->writeFloat(speed);
-            (void)parcel->writeStrongBinder(IInterface::asBinder(audioTrackCallback));
-
-            /* input/output arguments*/
-            (void)parcel->write(&flags, sizeof(audio_output_flags_t));
-            (void)parcel->writeInt64(frameCount);
-            (void)parcel->writeInt64(notificationFrameCount);
-            (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
-            (void)parcel->write(&sessionId, sizeof(audio_session_t));
-            return NO_ERROR;
-        }
-
         /* input */
         audio_attributes_t attr;
         audio_config_t config;
@@ -127,6 +71,7 @@
         uint32_t notificationsPerBuffer;
         float speed;
         sp<media::IAudioTrackCallback> audioTrackCallback;
+        std::string opPackageName;
 
         /* input/output */
         audio_output_flags_t flags;
@@ -134,50 +79,17 @@
         size_t notificationFrameCount;
         audio_port_handle_t selectedDeviceId;
         audio_session_t sessionId;
+
+        ConversionResult<media::CreateTrackRequest> toAidl() const;
+        static ConversionResult<CreateTrackInput> fromAidl(const media::CreateTrackRequest& aidl);
     };
 
     /* CreateTrackOutput contains all output arguments returned by AudioFlinger to AudioTrack
      * when calling createTrack() including arguments that were passed as I/O for update by
      * CreateTrackInput.
      */
-    class CreateTrackOutput : public Parcelable {
+    class CreateTrackOutput {
     public:
-        status_t readFromParcel(const Parcel *parcel) override {
-            /* input/output arguments*/
-            (void)parcel->read(&flags, sizeof(audio_output_flags_t));
-            frameCount = parcel->readInt64();
-            notificationFrameCount = parcel->readInt64();
-            (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
-            (void)parcel->read(&sessionId, sizeof(audio_session_t));
-
-            /* output arguments*/
-            sampleRate = parcel->readUint32();
-            afFrameCount = parcel->readInt64();
-            afSampleRate = parcel->readInt64();
-            afLatencyMs = parcel->readInt32();
-            (void)parcel->read(&outputId, sizeof(audio_io_handle_t));
-            (void)parcel->read(&portId, sizeof(audio_port_handle_t));
-            return NO_ERROR;
-        }
-
-        status_t writeToParcel(Parcel *parcel) const override {
-            /* input/output arguments*/
-            (void)parcel->write(&flags, sizeof(audio_output_flags_t));
-            (void)parcel->writeInt64(frameCount);
-            (void)parcel->writeInt64(notificationFrameCount);
-            (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
-            (void)parcel->write(&sessionId, sizeof(audio_session_t));
-
-            /* output arguments*/
-            (void)parcel->writeUint32(sampleRate);
-            (void)parcel->writeInt64(afFrameCount);
-            (void)parcel->writeInt64(afSampleRate);
-            (void)parcel->writeInt32(afLatencyMs);
-            (void)parcel->write(&outputId, sizeof(audio_io_handle_t));
-            (void)parcel->write(&portId, sizeof(audio_port_handle_t));
-            return NO_ERROR;
-        }
-
         /* input/output */
         audio_output_flags_t flags;
         size_t frameCount;
@@ -192,59 +104,17 @@
         uint32_t afLatencyMs;
         audio_io_handle_t outputId;
         audio_port_handle_t portId;
+
+        ConversionResult<media::CreateTrackResponse> toAidl() const;
+        static ConversionResult<CreateTrackOutput> fromAidl(const media::CreateTrackResponse& aidl);
     };
 
     /* CreateRecordInput contains all input arguments sent by AudioRecord to AudioFlinger
      * when calling createRecord() including arguments that will be updated by AudioFlinger
      * and returned in CreateRecordOutput object
      */
-    class CreateRecordInput : public Parcelable {
+    class CreateRecordInput {
     public:
-        status_t readFromParcel(const Parcel *parcel) override {
-            /* input arguments*/
-            memset(&attr, 0, sizeof(audio_attributes_t));
-            if (parcel->read(&attr, sizeof(audio_attributes_t)) != NO_ERROR) {
-                return DEAD_OBJECT;
-            }
-            attr.tags[AUDIO_ATTRIBUTES_TAGS_MAX_SIZE -1] = '\0';
-            memset(&config, 0, sizeof(audio_config_base_t));
-            if (parcel->read(&config, sizeof(audio_config_base_t)) != NO_ERROR) {
-                return DEAD_OBJECT;
-            }
-            if (clientInfo.readFromParcel(parcel) != NO_ERROR) {
-                return DEAD_OBJECT;
-            }
-            opPackageName = parcel->readString16();
-            if (parcel->read(&riid, sizeof(audio_unique_id_t)) != NO_ERROR) {
-                return DEAD_OBJECT;
-            }
-
-            /* input/output arguments*/
-            (void)parcel->read(&flags, sizeof(audio_input_flags_t));
-            frameCount = parcel->readInt64();
-            notificationFrameCount = parcel->readInt64();
-            (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
-            (void)parcel->read(&sessionId, sizeof(audio_session_t));
-            return NO_ERROR;
-        }
-
-        status_t writeToParcel(Parcel *parcel) const override {
-            /* input arguments*/
-            (void)parcel->write(&attr, sizeof(audio_attributes_t));
-            (void)parcel->write(&config, sizeof(audio_config_base_t));
-            (void)clientInfo.writeToParcel(parcel);
-            (void)parcel->writeString16(opPackageName);
-            (void)parcel->write(&riid, sizeof(audio_unique_id_t));
-
-            /* input/output arguments*/
-            (void)parcel->write(&flags, sizeof(audio_input_flags_t));
-            (void)parcel->writeInt64(frameCount);
-            (void)parcel->writeInt64(notificationFrameCount);
-            (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
-            (void)parcel->write(&sessionId, sizeof(audio_session_t));
-            return NO_ERROR;
-        }
-
         /* input */
         audio_attributes_t attr;
         audio_config_base_t config;
@@ -258,77 +128,17 @@
         size_t notificationFrameCount;
         audio_port_handle_t selectedDeviceId;
         audio_session_t sessionId;
+
+        ConversionResult<media::CreateRecordRequest> toAidl() const;
+        static ConversionResult<CreateRecordInput> fromAidl(const media::CreateRecordRequest& aidl);
     };
 
     /* CreateRecordOutput contains all output arguments returned by AudioFlinger to AudioRecord
      * when calling createRecord() including arguments that were passed as I/O for update by
      * CreateRecordInput.
      */
-    class CreateRecordOutput : public Parcelable {
+    class CreateRecordOutput {
     public:
-        status_t readFromParcel(const Parcel *parcel) override {
-            /* input/output arguments*/
-            (void)parcel->read(&flags, sizeof(audio_input_flags_t));
-            frameCount = parcel->readInt64();
-            notificationFrameCount = parcel->readInt64();
-            (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
-            (void)parcel->read(&sessionId, sizeof(audio_session_t));
-
-            /* output arguments*/
-            sampleRate = parcel->readUint32();
-            (void)parcel->read(&inputId, sizeof(audio_io_handle_t));
-            if (parcel->readInt32() != 0) {
-                cblk = interface_cast<IMemory>(parcel->readStrongBinder());
-                // TODO: Using unsecurePointer() has some associated security
-                //       pitfalls (see declaration for details).
-                //       Either document why it is safe in this case or address
-                //       the issue (e.g. by copying).
-                if (cblk == 0 || cblk->unsecurePointer() == NULL) {
-                    return BAD_VALUE;
-                }
-            }
-            if (parcel->readInt32() != 0) {
-                buffers = interface_cast<IMemory>(parcel->readStrongBinder());
-                // TODO: Using unsecurePointer() has some associated security
-                //       pitfalls (see declaration for details).
-                //       Either document why it is safe in this case or address
-                //       the issue (e.g. by copying).
-                if (buffers == 0 || buffers->unsecurePointer() == NULL) {
-                    return BAD_VALUE;
-                }
-            }
-            (void)parcel->read(&portId, sizeof(audio_port_handle_t));
-            return NO_ERROR;
-        }
-
-        status_t writeToParcel(Parcel *parcel) const override {
-            /* input/output arguments*/
-            (void)parcel->write(&flags, sizeof(audio_input_flags_t));
-            (void)parcel->writeInt64(frameCount);
-            (void)parcel->writeInt64(notificationFrameCount);
-            (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
-            (void)parcel->write(&sessionId, sizeof(audio_session_t));
-
-            /* output arguments*/
-            (void)parcel->writeUint32(sampleRate);
-            (void)parcel->write(&inputId, sizeof(audio_io_handle_t));
-            if (cblk != 0) {
-                (void)parcel->writeInt32(1);
-                (void)parcel->writeStrongBinder(IInterface::asBinder(cblk));
-            } else {
-                (void)parcel->writeInt32(0);
-            }
-            if (buffers != 0) {
-                (void)parcel->writeInt32(1);
-                (void)parcel->writeStrongBinder(IInterface::asBinder(buffers));
-            } else {
-                (void)parcel->writeInt32(0);
-            }
-            (void)parcel->write(&portId, sizeof(audio_port_handle_t));
-
-            return NO_ERROR;
-        }
-
         /* input/output */
         audio_input_flags_t flags;
         size_t frameCount;
@@ -342,6 +152,9 @@
         sp<IMemory> cblk;
         sp<IMemory> buffers;
         audio_port_handle_t portId;
+
+        ConversionResult<media::CreateRecordResponse> toAidl() const;
+        static ConversionResult<CreateRecordOutput> fromAidl(const media::CreateRecordResponse& aidl);
     };
 
     // invariant on exit for all APIs that return an sp<>:
@@ -350,13 +163,13 @@
     /* create an audio track and registers it with AudioFlinger.
      * return null if the track cannot be created.
      */
-    virtual sp<IAudioTrack> createTrack(const CreateTrackInput& input,
-                                        CreateTrackOutput& output,
-                                        status_t *status) = 0;
+    virtual sp<IAudioTrack> createTrack(const media::CreateTrackRequest& input,
+                                        media::CreateTrackResponse& output,
+                                        status_t* status) = 0;
 
-    virtual sp<media::IAudioRecord> createRecord(const CreateRecordInput& input,
-                                        CreateRecordOutput& output,
-                                        status_t *status) = 0;
+    virtual sp<media::IAudioRecord> createRecord(const media::CreateRecordRequest& input,
+                                                 media::CreateRecordResponse& output,
+                                                 status_t* status) = 0;
 
     // FIXME Surprisingly, format/latency don't work for input handles
 
@@ -412,7 +225,7 @@
     // Register an object to receive audio input/output change and track notifications.
     // For a given calling pid, AudioFlinger disregards any registrations after the first.
     // Thus the IAudioFlingerClient must be a singleton per process.
-    virtual void registerClient(const sp<IAudioFlingerClient>& client) = 0;
+    virtual void registerClient(const sp<media::IAudioFlingerClient>& client) = 0;
 
     // retrieve the audio recording buffer size in bytes
     // FIXME This API assumes a route, and so should be deprecated.
@@ -463,9 +276,9 @@
                                          uint32_t preferredTypeFlag,
                                          effect_descriptor_t *pDescriptor) const = 0;
 
-    virtual sp<IEffect> createEffect(
+    virtual sp<media::IEffect> createEffect(
                                     effect_descriptor_t *pDesc,
-                                    const sp<IEffectClient>& client,
+                                    const sp<media::IEffectClient>& client,
                                     int32_t priority,
                                     // AudioFlinger doesn't take over handle reference from client
                                     audio_io_handle_t output,
diff --git a/media/libaudioclient/include/media/IAudioFlingerClient.h b/media/libaudioclient/include/media/IAudioFlingerClient.h
deleted file mode 100644
index 0080bc9..0000000
--- a/media/libaudioclient/include/media/IAudioFlingerClient.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IAUDIOFLINGERCLIENT_H
-#define ANDROID_IAUDIOFLINGERCLIENT_H
-
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <utils/KeyedVector.h>
-#include <system/audio.h>
-#include <media/AudioIoDescriptor.h>
-
-namespace android {
-
-// ----------------------------------------------------------------------------
-
-class IAudioFlingerClient : public IInterface
-{
-public:
-    DECLARE_META_INTERFACE(AudioFlingerClient);
-
-    // Notifies a change of audio input/output configuration.
-    virtual void ioConfigChanged(audio_io_config_event event,
-                                 const sp<AudioIoDescriptor>& ioDesc) = 0;
-
-};
-
-
-// ----------------------------------------------------------------------------
-
-class BnAudioFlingerClient : public BnInterface<IAudioFlingerClient>
-{
-public:
-    virtual status_t    onTransact( uint32_t code,
-                                    const Parcel& data,
-                                    Parcel* reply,
-                                    uint32_t flags = 0);
-};
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
-#endif // ANDROID_IAUDIOFLINGERCLIENT_H
diff --git a/media/libaudioclient/include/media/IAudioPolicyService.h b/media/libaudioclient/include/media/IAudioPolicyService.h
index bb1c07f..837375d 100644
--- a/media/libaudioclient/include/media/IAudioPolicyService.h
+++ b/media/libaudioclient/include/media/IAudioPolicyService.h
@@ -196,13 +196,13 @@
 
     virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration) = 0;
 
-    virtual status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices)
+    virtual status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices)
             = 0;
 
     virtual status_t removeUidDeviceAffinities(uid_t uid) = 0;
 
     virtual status_t setUserIdDeviceAffinities(int userId,
-            const Vector<AudioDeviceTypeAddr>& devices) = 0;
+            const AudioDeviceTypeAddrVector& devices) = 0;
 
     virtual status_t removeUserIdDeviceAffinities(int userId) = 0;
 
@@ -241,13 +241,35 @@
 
     virtual bool     isCallScreenModeSupported() = 0;
 
-    virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   const AudioDeviceTypeAddr &device) = 0;
+    virtual status_t setDevicesRoleForStrategy(product_strategy_t strategy,
+                                               device_role_t role,
+                                               const AudioDeviceTypeAddrVector &devices) = 0;
 
-    virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy) = 0;
+    virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
+                                                  device_role_t role) = 0;
 
-    virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   AudioDeviceTypeAddr &device) = 0;
+    virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
+                                                  device_role_t role,
+                                                  AudioDeviceTypeAddrVector &devices) = 0;
+
+    virtual status_t setDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                    device_role_t role,
+                                                    const AudioDeviceTypeAddrVector &devices) = 0;
+
+    virtual status_t addDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                    device_role_t role,
+                                                    const AudioDeviceTypeAddrVector &devices) = 0;
+
+    virtual status_t removeDevicesRoleForCapturePreset(
+            audio_source_t audioSource, device_role_t role,
+            const AudioDeviceTypeAddrVector& devices) = 0;
+
+    virtual status_t clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                      device_role_t role) = 0;
+
+    virtual status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+                                                       device_role_t role,
+                                                       AudioDeviceTypeAddrVector &devices) = 0;
 
     // The return code here is only intended to represent transport errors. The
     // actual server implementation should always return NO_ERROR.
@@ -266,10 +288,6 @@
                                     const Parcel& data,
                                     Parcel* reply,
                                     uint32_t flags = 0);
-private:
-    void sanetizeAudioAttributes(audio_attributes_t* attr);
-    status_t sanitizeEffectDescriptor(effect_descriptor_t* desc);
-    status_t sanitizeAudioPortConfig(struct audio_port_config* config);
 };
 
 // ----------------------------------------------------------------------------
diff --git a/media/libaudioclient/include/media/IEffect.h b/media/libaudioclient/include/media/IEffect.h
deleted file mode 100644
index ff04869..0000000
--- a/media/libaudioclient/include/media/IEffect.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IEFFECT_H
-#define ANDROID_IEFFECT_H
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <binder/Parcel.h>
-#include <binder/IMemory.h>
-
-namespace android {
-
-class IEffect: public IInterface
-{
-public:
-    DECLARE_META_INTERFACE(Effect);
-
-    virtual status_t enable() = 0;
-
-    virtual status_t disable() = 0;
-
-    virtual status_t command(uint32_t cmdCode,
-                             uint32_t cmdSize,
-                             void *pCmdData,
-                             uint32_t *pReplySize,
-                             void *pReplyData) = 0;
-
-    virtual void disconnect() = 0;
-
-    virtual sp<IMemory> getCblk() const = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnEffect: public BnInterface<IEffect>
-{
-public:
-    virtual status_t    onTransact( uint32_t code,
-                                    const Parcel& data,
-                                    Parcel* reply,
-                                    uint32_t flags = 0);
-};
-
-}; // namespace android
-
-#endif // ANDROID_IEFFECT_H
diff --git a/media/libaudioclient/include/media/IEffectClient.h b/media/libaudioclient/include/media/IEffectClient.h
deleted file mode 100644
index 2f78c98..0000000
--- a/media/libaudioclient/include/media/IEffectClient.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IEFFECTCLIENT_H
-#define ANDROID_IEFFECTCLIENT_H
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <binder/Parcel.h>
-#include <binder/IMemory.h>
-
-namespace android {
-
-class IEffectClient: public IInterface
-{
-public:
-    DECLARE_META_INTERFACE(EffectClient);
-
-    virtual void controlStatusChanged(bool controlGranted) = 0;
-    virtual void enableStatusChanged(bool enabled) = 0;
-    virtual void commandExecuted(uint32_t cmdCode,
-                                 uint32_t cmdSize,
-                                 void *pCmdData,
-                                 uint32_t replySize,
-                                 void *pReplyData) = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnEffectClient: public BnInterface<IEffectClient>
-{
-public:
-    virtual status_t    onTransact( uint32_t code,
-                                    const Parcel& data,
-                                    Parcel* reply,
-                                    uint32_t flags = 0);
-};
-
-}; // namespace android
-
-#endif // ANDROID_IEFFECTCLIENT_H
diff --git a/media/libaudioclient/include/media/PlayerBase.h b/media/libaudioclient/include/media/PlayerBase.h
index e7a8abc..4aad9b4 100644
--- a/media/libaudioclient/include/media/PlayerBase.h
+++ b/media/libaudioclient/include/media/PlayerBase.h
@@ -19,6 +19,7 @@
 
 #include <audiomanager/AudioManager.h>
 #include <audiomanager/IAudioManager.h>
+#include <utils/Mutex.h>
 
 #include "android/media/BnPlayer.h"
 
@@ -40,8 +41,8 @@
     virtual binder::Status setPan(float pan) override;
     virtual binder::Status setStartDelayMs(int32_t delayMs) override;
     virtual binder::Status applyVolumeShaper(
-            const media::VolumeShaper::Configuration& configuration,
-            const media::VolumeShaper::Operation& operation) override;
+            const media::VolumeShaperConfiguration& configuration,
+            const media::VolumeShaperOperation& operation) override;
 
             status_t startWithStatus();
             status_t pauseWithStatus();
diff --git a/media/libaudioclient/include/media/ToneGenerator.h b/media/libaudioclient/include/media/ToneGenerator.h
index 5b0689a..04357a8 100644
--- a/media/libaudioclient/include/media/ToneGenerator.h
+++ b/media/libaudioclient/include/media/ToneGenerator.h
@@ -218,6 +218,7 @@
         TONE_INDIA_CONGESTION,      // Congestion tone: 400 Hz, 250ms ON, 250ms OFF...
         TONE_INDIA_CALL_WAITING,    // Call waiting tone: 400 Hz, tone repeated in a 0.2s on, 0.1s off, 0.2s on, 7.5s off pattern.
         TONE_INDIA_RINGTONE,        // Ring tone: 400 Hz tone modulated with 25Hz, 0.4 on 0.2 off 0.4 on 2..0 off
+        TONE_TW_RINGTONE,           // Ring Tone: 440 Hz + 480 Hz repeated with pattern 1s on, 3s off.
         NUM_ALTERNATE_TONES
     };
 
@@ -230,6 +231,7 @@
         HONGKONG,
         IRELAND,
         INDIA,
+        TAIWAN,
         CEPT,
         NUM_REGIONS
     };
diff --git a/media/libaudioclient/include/media/TrackPlayerBase.h b/media/libaudioclient/include/media/TrackPlayerBase.h
index 66e9b3b..6d26e63 100644
--- a/media/libaudioclient/include/media/TrackPlayerBase.h
+++ b/media/libaudioclient/include/media/TrackPlayerBase.h
@@ -33,8 +33,8 @@
 
     //IPlayer implementation
     virtual binder::Status applyVolumeShaper(
-            const media::VolumeShaper::Configuration& configuration,
-            const media::VolumeShaper::Operation& operation);
+            const media::VolumeShaperConfiguration& configuration,
+            const media::VolumeShaperOperation& operation);
 
     //FIXME move to protected field, so far made public to minimize changes to AudioTrack logic
     sp<AudioTrack> mAudioTrack;
diff --git a/media/libaudiofoundation/Android.bp b/media/libaudiofoundation/Android.bp
index 548b080..a8e6c31 100644
--- a/media/libaudiofoundation/Android.bp
+++ b/media/libaudiofoundation/Android.bp
@@ -12,6 +12,12 @@
         "libaudio_system_headers",
         "libmedia_helper_headers",
     ],
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 cc_library {
diff --git a/media/libaudiofoundation/AudioDeviceTypeAddr.cpp b/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
index b44043a..a47337b 100644
--- a/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
+++ b/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
@@ -16,12 +16,57 @@
 
 #include <media/AudioDeviceTypeAddr.h>
 
+#include <arpa/inet.h>
+#include <iostream>
+#include <regex>
+#include <set>
+#include <sstream>
+
 namespace android {
 
+namespace {
+
+static const std::string SUPPRESSED = "SUPPRESSED";
+static const std::regex MAC_ADDRESS_REGEX("([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}");
+
+bool isSenstiveAddress(const std::string &address) {
+    if (std::regex_match(address, MAC_ADDRESS_REGEX)) {
+        return true;
+    }
+
+    sockaddr_storage ss4;
+    if (inet_pton(AF_INET, address.c_str(), &ss4) > 0) {
+        return true;
+    }
+
+    sockaddr_storage ss6;
+    if (inet_pton(AF_INET6, address.c_str(), &ss6) > 0) {
+        return true;
+    }
+
+    return false;
+}
+
+} // namespace
+
+AudioDeviceTypeAddr::AudioDeviceTypeAddr(audio_devices_t type, const std::string &address) :
+        mType(type), mAddress(address) {
+    mIsAddressSensitive = isSenstiveAddress(mAddress);
+}
+
 const char* AudioDeviceTypeAddr::getAddress() const {
     return mAddress.c_str();
 }
 
+const std::string& AudioDeviceTypeAddr::address() const {
+    return mAddress;
+}
+
+void AudioDeviceTypeAddr::setAddress(const std::string& address) {
+    mAddress = address;
+    mIsAddressSensitive = isSenstiveAddress(mAddress);
+}
+
 bool AudioDeviceTypeAddr::equals(const AudioDeviceTypeAddr& other) const {
     return mType == other.mType && mAddress == other.mAddress;
 }
@@ -36,14 +81,34 @@
     return false;
 }
 
+bool AudioDeviceTypeAddr::operator==(const AudioDeviceTypeAddr &rhs) const {
+    return equals(rhs);
+}
+
+bool AudioDeviceTypeAddr::operator!=(const AudioDeviceTypeAddr &rhs) const {
+    return !operator==(rhs);
+}
+
 void AudioDeviceTypeAddr::reset() {
     mType = AUDIO_DEVICE_NONE;
-    mAddress = "";
+    setAddress("");
+}
+
+std::string AudioDeviceTypeAddr::toString(bool includeSensitiveInfo) const {
+    std::stringstream sstream;
+    sstream << "type:0x" << std::hex << mType;
+    // IP and MAC address are sensitive information. The sensitive information will be suppressed
+    // is `includeSensitiveInfo` is false.
+    sstream << ",@:"
+            << (!includeSensitiveInfo && mIsAddressSensitive ? SUPPRESSED : mAddress);
+    return sstream.str();
 }
 
 status_t AudioDeviceTypeAddr::readFromParcel(const Parcel *parcel) {
     status_t status;
-    if ((status = parcel->readUint32(&mType)) != NO_ERROR) return status;
+    uint32_t rawDeviceType;
+    if ((status = parcel->readUint32(&rawDeviceType)) != NO_ERROR) return status;
+    mType = static_cast<audio_devices_t>(rawDeviceType);
     status = parcel->readUtf8FromUtf16(&mAddress);
     return status;
 }
@@ -64,4 +129,30 @@
     return deviceTypes;
 }
 
-}
\ No newline at end of file
+AudioDeviceTypeAddrVector excludeDeviceTypeAddrsFrom(
+        const AudioDeviceTypeAddrVector& devices,
+        const AudioDeviceTypeAddrVector& devicesToExclude) {
+    std::set<AudioDeviceTypeAddr> devicesToExcludeSet(
+            devicesToExclude.begin(), devicesToExclude.end());
+    AudioDeviceTypeAddrVector remainedDevices;
+    for (const auto& device : devices) {
+        if (devicesToExcludeSet.count(device) == 0) {
+            remainedDevices.push_back(device);
+        }
+    }
+    return remainedDevices;
+}
+
+std::string dumpAudioDeviceTypeAddrVector(const AudioDeviceTypeAddrVector& deviceTypeAddrs,
+                                          bool includeSensitiveInfo) {
+    std::stringstream stream;
+    for (auto it = deviceTypeAddrs.begin(); it != deviceTypeAddrs.end(); ++it) {
+        if (it != deviceTypeAddrs.begin()) {
+            stream << " ";
+        }
+        stream << it->toString(includeSensitiveInfo);
+    }
+    return stream.str();
+}
+
+} // namespace android
diff --git a/media/libaudiofoundation/AudioGain.cpp b/media/libaudiofoundation/AudioGain.cpp
index 0d28335..759140e 100644
--- a/media/libaudiofoundation/AudioGain.cpp
+++ b/media/libaudiofoundation/AudioGain.cpp
@@ -152,8 +152,12 @@
     if ((status = parcel->readInt32(&mIndex)) != NO_ERROR) return status;
     if ((status = parcel->readBool(&mUseInChannelMask)) != NO_ERROR) return status;
     if ((status = parcel->readBool(&mUseForVolume)) != NO_ERROR) return status;
-    if ((status = parcel->readUint32(&mGain.mode)) != NO_ERROR) return status;
-    if ((status = parcel->readUint32(&mGain.channel_mask)) != NO_ERROR) return status;
+    uint32_t rawGainMode;
+    if ((status = parcel->readUint32(&rawGainMode)) != NO_ERROR) return status;
+    mGain.mode = static_cast<audio_gain_mode_t>(rawGainMode);
+    uint32_t rawChannelMask;
+    if ((status = parcel->readUint32(&rawChannelMask)) != NO_ERROR) return status;
+    mGain.channel_mask = static_cast<audio_channel_mask_t>(rawChannelMask);
     if ((status = parcel->readInt32(&mGain.min_value)) != NO_ERROR) return status;
     if ((status = parcel->readInt32(&mGain.max_value)) != NO_ERROR) return status;
     if ((status = parcel->readInt32(&mGain.default_value)) != NO_ERROR) return status;
diff --git a/media/libaudiofoundation/AudioPort.cpp b/media/libaudiofoundation/AudioPort.cpp
index f988690..1846a6b 100644
--- a/media/libaudiofoundation/AudioPort.cpp
+++ b/media/libaudiofoundation/AudioPort.cpp
@@ -268,12 +268,17 @@
     if ((status = parcel->readUint32(reinterpret_cast<uint32_t*>(&mFormat))) != NO_ERROR) {
         return status;
     }
-    if ((status = parcel->readUint32(&mChannelMask)) != NO_ERROR) return status;
+    uint32_t rawChannelMask;
+    if ((status = parcel->readUint32(&rawChannelMask)) != NO_ERROR) return status;
+    mChannelMask = static_cast<audio_channel_mask_t>(rawChannelMask);
     if ((status = parcel->readInt32(&mId)) != NO_ERROR) return status;
     // Read mGain from parcel.
     if ((status = parcel->readInt32(&mGain.index)) != NO_ERROR) return status;
-    if ((status = parcel->readUint32(&mGain.mode)) != NO_ERROR) return status;
-    if ((status = parcel->readUint32(&mGain.channel_mask)) != NO_ERROR) return status;
+    uint32_t rawGainMode;
+    if ((status = parcel->readUint32(&rawGainMode)) != NO_ERROR) return status;
+    mGain.mode = static_cast<audio_gain_mode_t>(rawGainMode);
+    if ((status = parcel->readUint32(&rawChannelMask)) != NO_ERROR) return status;
+    mGain.channel_mask = static_cast<audio_channel_mask_t>(rawChannelMask);
     if ((status = parcel->readUint32(&mGain.ramp_duration_ms)) != NO_ERROR) return status;
     std::vector<int> values;
     if ((status = parcel->readInt32Vector(&values)) != NO_ERROR) return status;
diff --git a/media/libaudiofoundation/AudioProfile.cpp b/media/libaudiofoundation/AudioProfile.cpp
index 91be346..67b600e 100644
--- a/media/libaudiofoundation/AudioProfile.cpp
+++ b/media/libaudiofoundation/AudioProfile.cpp
@@ -157,7 +157,9 @@
     std::vector<int> values;
     if ((status = parcel->readInt32Vector(&values)) != NO_ERROR) return status;
     mChannelMasks.clear();
-    mChannelMasks.insert(values.begin(), values.end());
+    for (auto raw : values) {
+        mChannelMasks.insert(static_cast<audio_channel_mask_t>(raw));
+    }
     values.clear();
     if ((status = parcel->readInt32Vector(&values)) != NO_ERROR) return status;
     mSamplingRates.clear();
diff --git a/media/libaudiofoundation/DeviceDescriptorBase.cpp b/media/libaudiofoundation/DeviceDescriptorBase.cpp
index 3dbe37d..16cf71a 100644
--- a/media/libaudiofoundation/DeviceDescriptorBase.cpp
+++ b/media/libaudiofoundation/DeviceDescriptorBase.cpp
@@ -40,11 +40,15 @@
                                          AUDIO_PORT_ROLE_SOURCE),
         mDeviceTypeAddr(deviceTypeAddr)
 {
-    if (mDeviceTypeAddr.mAddress.empty() && audio_is_remote_submix_device(mDeviceTypeAddr.mType)) {
-        mDeviceTypeAddr.mAddress = "0";
+    if (mDeviceTypeAddr.address().empty() && audio_is_remote_submix_device(mDeviceTypeAddr.mType)) {
+        mDeviceTypeAddr.setAddress("0");
     }
 }
 
+void DeviceDescriptorBase::setAddress(const std::string &address) {
+    mDeviceTypeAddr.setAddress(address);
+}
+
 void DeviceDescriptorBase::toAudioPortConfig(struct audio_port_config *dstConfig,
                                              const struct audio_port_config *srcConfig) const
 {
@@ -123,18 +127,16 @@
             "%*s- supported encapsulation metadata types: %u",
             spaces, "", mEncapsulationMetadataTypes));
 
-    if (mDeviceTypeAddr.mAddress.size() != 0) {
+    if (mDeviceTypeAddr.address().size() != 0) {
         dst->append(base::StringPrintf(
                 "%*s- address: %-32s\n", spaces, "", mDeviceTypeAddr.getAddress()));
     }
     AudioPort::dump(dst, spaces, verbose);
 }
 
-std::string DeviceDescriptorBase::toString() const
+std::string DeviceDescriptorBase::toString(bool includeSensitiveInfo) const
 {
-    std::stringstream sstream;
-    sstream << "type:0x" << std::hex << type() << ",@:" << mDeviceTypeAddr.mAddress;
-    return sstream.str();
+    return mDeviceTypeAddr.toString(includeSensitiveInfo);
 }
 
 void DeviceDescriptorBase::log() const
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index 72fda49..aa7ca69 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -96,7 +96,7 @@
 static inline audio_devices_t deviceTypesToBitMask(const DeviceTypeSet& deviceTypes) {
     audio_devices_t types = AUDIO_DEVICE_NONE;
     for (auto deviceType : deviceTypes) {
-        types |= deviceType;
+        types = static_cast<audio_devices_t>(types | deviceType);
     }
     return types;
 }
@@ -131,4 +131,4 @@
 std::string toString(const DeviceTypeSet& deviceTypes);
 
 
-} // namespace android
\ No newline at end of file
+} // namespace android
diff --git a/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h b/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
index 60ea78e..7497faf 100644
--- a/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
+++ b/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
@@ -27,28 +27,43 @@
 
 namespace android {
 
-struct AudioDeviceTypeAddr : public Parcelable {
+class AudioDeviceTypeAddr : public Parcelable {
+public:
     AudioDeviceTypeAddr() = default;
 
-    AudioDeviceTypeAddr(audio_devices_t type, const std::string& address) :
-            mType(type), mAddress(address) {}
+    AudioDeviceTypeAddr(audio_devices_t type, const std::string& address);
 
     const char* getAddress() const;
 
+    const std::string& address() const;
+
+    void setAddress(const std::string& address);
+
+    bool isAddressSensitive();
+
     bool equals(const AudioDeviceTypeAddr& other) const;
 
     AudioDeviceTypeAddr& operator= (const AudioDeviceTypeAddr&) = default;
 
     bool operator<(const AudioDeviceTypeAddr& other) const;
 
+    bool operator==(const AudioDeviceTypeAddr& rhs) const;
+
+    bool operator!=(const AudioDeviceTypeAddr& rhs) const;
+
     void reset();
 
+    std::string toString(bool includeSensitiveInfo=false) const;
+
     status_t readFromParcel(const Parcel *parcel) override;
 
     status_t writeToParcel(Parcel *parcel) const override;
 
     audio_devices_t mType = AUDIO_DEVICE_NONE;
+
+private:
     std::string mAddress;
+    bool mIsAddressSensitive;
 };
 
 using AudioDeviceTypeAddrVector = std::vector<AudioDeviceTypeAddr>;
@@ -58,4 +73,15 @@
  */
 DeviceTypeSet getAudioDeviceTypes(const AudioDeviceTypeAddrVector& deviceTypeAddrs);
 
-}
+/**
+ * Return a collection of AudioDeviceTypeAddrs that are shown in `devices` but not
+ * in `devicesToExclude`
+ */
+AudioDeviceTypeAddrVector excludeDeviceTypeAddrsFrom(
+        const AudioDeviceTypeAddrVector& devices,
+        const AudioDeviceTypeAddrVector& devicesToExclude);
+
+std::string dumpAudioDeviceTypeAddrVector(const AudioDeviceTypeAddrVector& deviceTypeAddrs,
+                                          bool includeSensitiveInfo=false);
+
+} // namespace android
diff --git a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
index af04721..0cbd1de 100644
--- a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
+++ b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
@@ -41,8 +41,8 @@
     virtual ~DeviceDescriptorBase() {}
 
     audio_devices_t type() const { return mDeviceTypeAddr.mType; }
-    std::string address() const { return mDeviceTypeAddr.mAddress; }
-    void setAddress(const std::string &address) { mDeviceTypeAddr.mAddress = address; }
+    const std::string& address() const { return mDeviceTypeAddr.address(); }
+    void setAddress(const std::string &address);
     const AudioDeviceTypeAddr& getDeviceTypeAddr() const { return mDeviceTypeAddr; }
 
     // AudioPortConfig
@@ -61,7 +61,14 @@
     void dump(std::string *dst, int spaces, int index,
               const char* extraInfo = nullptr, bool verbose = true) const;
     void log() const;
-    std::string toString() const;
+
+    /**
+     * Return a string to describe the DeviceDescriptor.
+     *
+     * @param includeSensitiveInfo sensitive information will be added when it is true.
+     * @return a string that can be used to describe the DeviceDescriptor.
+     */
+    std::string toString(bool includeSensitiveInfo = false) const;
 
     bool equals(const sp<DeviceDescriptorBase>& other) const;
 
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index 1709d1e..fab0fea 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -18,6 +18,7 @@
         "libaudiohal@4.0",
         "libaudiohal@5.0",
         "libaudiohal@6.0",
+//        "libaudiohal@7.0",
     ],
 
     shared_libs: [
diff --git a/media/libaudiohal/FactoryHalHidl.cpp b/media/libaudiohal/FactoryHalHidl.cpp
index 5985ef0..7228b22 100644
--- a/media/libaudiohal/FactoryHalHidl.cpp
+++ b/media/libaudiohal/FactoryHalHidl.cpp
@@ -31,6 +31,7 @@
 /** Supported HAL versions, in order of preference.
  */
 const char* sAudioHALVersions[] = {
+    "7.0",
     "6.0",
     "5.0",
     "4.0",
diff --git a/media/libaudiohal/OWNERS b/media/libaudiohal/OWNERS
index 1456ab6..71b17e6 100644
--- a/media/libaudiohal/OWNERS
+++ b/media/libaudiohal/OWNERS
@@ -1,2 +1 @@
-krocard@google.com
 mnaganov@google.com
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index 967fba1..df006b5 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -116,3 +116,20 @@
     ]
 }
 
+cc_library_shared {
+    enabled: false,
+    name: "libaudiohal@7.0",
+    defaults: ["libaudiohal_default"],
+    shared_libs: [
+        "android.hardware.audio.common@7.0",
+        "android.hardware.audio.common@7.0-util",
+        "android.hardware.audio.effect@7.0",
+        "android.hardware.audio@7.0",
+    ],
+    cflags: [
+        "-DMAJOR_VERSION=7",
+        "-DMINOR_VERSION=0",
+        "-include common/all-versions/VersionMacro.h",
+    ]
+}
+
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
index 9192a31..80e2b87 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
@@ -37,7 +37,7 @@
 
 EffectsFactoryHalHidl::EffectsFactoryHalHidl(sp<IEffectsFactory> effectsFactory)
         : ConversionHelperHidl("EffectsFactory") {
-    ALOG_ASSERT(effectsFactory != nullptr, "Provided IDevicesFactory service is NULL");
+    ALOG_ASSERT(effectsFactory != nullptr, "Provided IEffectsFactory service is NULL");
     mEffectsFactory = effectsFactory;
 }
 
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.h b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
index dece1bb..5fa85e7 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
@@ -54,6 +54,8 @@
 
     virtual status_t dumpEffects(int fd);
 
+    virtual float getHalVersion() { return MAJOR_VERSION + (float)MINOR_VERSION / 10; }
+
     status_t allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) override;
     status_t mirrorBuffer(void* external, size_t size,
                           sp<EffectBufferHalInterface>* buffer) override;
diff --git a/media/libaudiohal/impl/StreamPowerLog.h b/media/libaudiohal/impl/StreamPowerLog.h
index 5fd3912..f6a554b 100644
--- a/media/libaudiohal/impl/StreamPowerLog.h
+++ b/media/libaudiohal/impl/StreamPowerLog.h
@@ -19,6 +19,7 @@
 
 #include <audio_utils/clock.h>
 #include <audio_utils/PowerLog.h>
+#include <cutils/bitops.h>
 #include <cutils/properties.h>
 #include <system/audio.h>
 
diff --git a/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h b/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
index 3a76f9f..9fb56ae 100644
--- a/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
@@ -46,6 +46,8 @@
 
     virtual status_t dumpEffects(int fd) = 0;
 
+    virtual float getHalVersion() = 0;
+
     static sp<EffectsFactoryHalInterface> create();
 
     virtual status_t allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) = 0;
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index 1a31420..d85e2e9 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -79,10 +79,14 @@
             && mixerChannelMask == (track->mMixerChannelMask | track->mMixerHapticChannelMask)) {
         return false;  // no need to change
     }
-    const audio_channel_mask_t hapticChannelMask = trackChannelMask & AUDIO_CHANNEL_HAPTIC_ALL;
-    trackChannelMask &= ~AUDIO_CHANNEL_HAPTIC_ALL;
-    const audio_channel_mask_t mixerHapticChannelMask = mixerChannelMask & AUDIO_CHANNEL_HAPTIC_ALL;
-    mixerChannelMask &= ~AUDIO_CHANNEL_HAPTIC_ALL;
+    const audio_channel_mask_t hapticChannelMask =
+            static_cast<audio_channel_mask_t>(trackChannelMask & AUDIO_CHANNEL_HAPTIC_ALL);
+    trackChannelMask = static_cast<audio_channel_mask_t>(
+            trackChannelMask & ~AUDIO_CHANNEL_HAPTIC_ALL);
+    const audio_channel_mask_t mixerHapticChannelMask = static_cast<audio_channel_mask_t>(
+            mixerChannelMask & AUDIO_CHANNEL_HAPTIC_ALL);
+    mixerChannelMask = static_cast<audio_channel_mask_t>(
+            mixerChannelMask & ~AUDIO_CHANNEL_HAPTIC_ALL);
     // always recompute for both channel masks even if only one has changed.
     const uint32_t trackChannelCount = audio_channel_count_from_out_mask(trackChannelMask);
     const uint32_t mixerChannelCount = audio_channel_count_from_out_mask(mixerChannelMask);
@@ -362,7 +366,8 @@
             const audio_channel_mask_t trackChannelMask =
                 static_cast<audio_channel_mask_t>(valueInt);
             if (setChannelMasks(name, trackChannelMask,
-                    (track->mMixerChannelMask | track->mMixerHapticChannelMask))) {
+                    static_cast<audio_channel_mask_t>(
+                            track->mMixerChannelMask | track->mMixerHapticChannelMask))) {
                 ALOGV("setParameter(TRACK, CHANNEL_MASK, %x)", trackChannelMask);
                 invalidate();
             }
@@ -407,7 +412,8 @@
         case MIXER_CHANNEL_MASK: {
             const audio_channel_mask_t mixerChannelMask =
                     static_cast<audio_channel_mask_t>(valueInt);
-            if (setChannelMasks(name, track->channelMask | track->mHapticChannelMask,
+            if (setChannelMasks(name, static_cast<audio_channel_mask_t>(
+                                    track->channelMask | track->mHapticChannelMask),
                     mixerChannelMask)) {
                 ALOGV("setParameter(TRACK, MIXER_CHANNEL_MASK, %#x)", mixerChannelMask);
                 invalidate();
@@ -423,7 +429,7 @@
             }
             } break;
         case HAPTIC_INTENSITY: {
-            const haptic_intensity_t hapticIntensity = static_cast<haptic_intensity_t>(valueInt);
+            const os::HapticScale hapticIntensity = static_cast<os::HapticScale>(valueInt);
             if (track->mHapticIntensity != hapticIntensity) {
                 track->mHapticIntensity = hapticIntensity;
             }
@@ -533,9 +539,10 @@
     Track* t = static_cast<Track*>(track);
 
     audio_channel_mask_t channelMask = t->channelMask;
-    t->mHapticChannelMask = channelMask & AUDIO_CHANNEL_HAPTIC_ALL;
+    t->mHapticChannelMask = static_cast<audio_channel_mask_t>(
+            channelMask & AUDIO_CHANNEL_HAPTIC_ALL);
     t->mHapticChannelCount = audio_channel_count_from_out_mask(t->mHapticChannelMask);
-    channelMask &= ~AUDIO_CHANNEL_HAPTIC_ALL;
+    channelMask = static_cast<audio_channel_mask_t>(channelMask & ~AUDIO_CHANNEL_HAPTIC_ALL);
     t->channelCount = audio_channel_count_from_out_mask(channelMask);
     ALOGV_IF(audio_channel_mask_get_bits(channelMask) != AUDIO_CHANNEL_OUT_STEREO,
             "Non-stereo channel mask: %d\n", channelMask);
@@ -545,7 +552,7 @@
     t->mPlaybackRate = AUDIO_PLAYBACK_RATE_DEFAULT;
     // haptic
     t->mHapticPlaybackEnabled = false;
-    t->mHapticIntensity = HAPTIC_SCALE_NONE;
+    t->mHapticIntensity = os::HapticScale::NONE;
     t->mMixerHapticChannelMask = AUDIO_CHANNEL_NONE;
     t->mMixerHapticChannelCount = 0;
     t->mAdjustInChannelCount = t->channelCount + t->mHapticChannelCount;
@@ -590,19 +597,12 @@
             const std::shared_ptr<Track> &t = getTrack(name);
             if (t->mHapticPlaybackEnabled) {
                 size_t sampleCount = mFrameCount * t->mMixerHapticChannelCount;
-                float gamma = t->getHapticScaleGamma();
-                float maxAmplitudeRatio = t->getHapticMaxAmplitudeRatio();
                 uint8_t* buffer = (uint8_t*)pair.first + mFrameCount * audio_bytes_per_frame(
                         t->mMixerChannelCount, t->mMixerFormat);
                 switch (t->mMixerFormat) {
                 // Mixer format should be AUDIO_FORMAT_PCM_FLOAT.
                 case AUDIO_FORMAT_PCM_FLOAT: {
-                    float* fout = (float*) buffer;
-                    for (size_t i = 0; i < sampleCount; i++) {
-                        float mul = fout[i] >= 0 ? 1.0 : -1.0;
-                        fout[i] = powf(fabsf(fout[i] / HAPTIC_MAX_AMPLITUDE_FLOAT), gamma)
-                                * maxAmplitudeRatio * HAPTIC_MAX_AMPLITUDE_FLOAT * mul;
-                    }
+                    os::scaleHapticData((float*) buffer, sampleCount, t->mHapticIntensity);
                 } break;
                 default:
                     LOG_ALWAYS_FATAL("bad mMixerFormat: %#x", t->mMixerFormat);
diff --git a/media/libaudioprocessing/AudioMixerOps.h b/media/libaudioprocessing/AudioMixerOps.h
index 80bd093..8d374c9 100644
--- a/media/libaudioprocessing/AudioMixerOps.h
+++ b/media/libaudioprocessing/AudioMixerOps.h
@@ -234,17 +234,20 @@
     static_assert(NCHAN > 0 && NCHAN <= 8);
     static_assert(MIXTYPE == MIXTYPE_MULTI_STEREOVOL
             || MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
-            || MIXTYPE == MIXTYPE_STEREOEXPAND);
+            || MIXTYPE == MIXTYPE_STEREOEXPAND
+            || MIXTYPE == MIXTYPE_MONOEXPAND);
     auto proc = [](auto& a, const auto& b) {
         if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
-                || MIXTYPE == MIXTYPE_STEREOEXPAND) {
+                || MIXTYPE == MIXTYPE_STEREOEXPAND
+                || MIXTYPE == MIXTYPE_MONOEXPAND) {
             a += b;
         } else {
             a = b;
         }
     };
     auto inp = [&in]() -> const TI& {
-        if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) {
+        if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND
+                || MIXTYPE == MIXTYPE_MONOEXPAND) {
             return *in;
         } else {
             return *in++;
@@ -312,6 +315,8 @@
  *   TV/TAV: int32_t (U4.28) or int16_t (U4.12) or float
  *   Input channel count is 1.
  *   vol: represents volume array.
+ *   This uses stereo balanced volume vol[0] and vol[1].
+ *   Before R, this was a full volume array but was called only for channels <= 2.
  *
  *   This accumulates into the out pointer.
  *
@@ -356,17 +361,13 @@
         do {
             TA auxaccum = 0;
             if constexpr (MIXTYPE == MIXTYPE_MULTI) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ += MixMulAux<TO, TI, TV, TA>(*in++, vol[i], &auxaccum);
                     vol[i] += volinc[i];
                 }
-            } else if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) {
-                for (int i = 0; i < NCHAN; ++i) {
-                    *out++ += MixMulAux<TO, TI, TV, TA>(*in, vol[i], &auxaccum);
-                    vol[i] += volinc[i];
-                }
-                in++;
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_SAVEONLY) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ = MixMulAux<TO, TI, TV, TA>(*in++, vol[i], &auxaccum);
                     vol[i] += volinc[i];
@@ -383,11 +384,13 @@
                 vol[0] += volinc[0];
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
                     || MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
+                    || MIXTYPE == MIXTYPE_MONOEXPAND
                     || MIXTYPE == MIXTYPE_STEREOEXPAND) {
                 stereoVolumeHelper<MIXTYPE, NCHAN>(
                         out, in, vol, [&auxaccum] (auto &a, const auto &b) {
                     return MixMulAux<TO, TI, TV, TA>(a, b, &auxaccum);
                 });
+                if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) in += 1;
                 if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) in += 2;
                 vol[0] += volinc[0];
                 vol[1] += volinc[1];
@@ -401,17 +404,13 @@
     } else {
         do {
             if constexpr (MIXTYPE == MIXTYPE_MULTI) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ += MixMul<TO, TI, TV>(*in++, vol[i]);
                     vol[i] += volinc[i];
                 }
-            } else if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) {
-                for (int i = 0; i < NCHAN; ++i) {
-                    *out++ += MixMul<TO, TI, TV>(*in, vol[i]);
-                    vol[i] += volinc[i];
-                }
-                in++;
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_SAVEONLY) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ = MixMul<TO, TI, TV>(*in++, vol[i]);
                     vol[i] += volinc[i];
@@ -428,10 +427,12 @@
                 vol[0] += volinc[0];
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
                     || MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
+                    || MIXTYPE == MIXTYPE_MONOEXPAND
                     || MIXTYPE == MIXTYPE_STEREOEXPAND) {
                 stereoVolumeHelper<MIXTYPE, NCHAN>(out, in, vol, [] (auto &a, const auto &b) {
                     return MixMul<TO, TI, TV>(a, b);
                 });
+                if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) in += 1;
                 if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) in += 2;
                 vol[0] += volinc[0];
                 vol[1] += volinc[1];
@@ -454,15 +455,12 @@
         do {
             TA auxaccum = 0;
             if constexpr (MIXTYPE == MIXTYPE_MULTI) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ += MixMulAux<TO, TI, TV, TA>(*in++, vol[i], &auxaccum);
                 }
-            } else if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) {
-                for (int i = 0; i < NCHAN; ++i) {
-                    *out++ += MixMulAux<TO, TI, TV, TA>(*in, vol[i], &auxaccum);
-                }
-                in++;
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_SAVEONLY) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ = MixMulAux<TO, TI, TV, TA>(*in++, vol[i], &auxaccum);
                 }
@@ -476,11 +474,13 @@
                 }
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
                     || MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
+                    || MIXTYPE == MIXTYPE_MONOEXPAND
                     || MIXTYPE == MIXTYPE_STEREOEXPAND) {
                 stereoVolumeHelper<MIXTYPE, NCHAN>(
                         out, in, vol, [&auxaccum] (auto &a, const auto &b) {
                     return MixMulAux<TO, TI, TV, TA>(a, b, &auxaccum);
                 });
+                if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) in += 1;
                 if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) in += 2;
             } else /* constexpr */ {
                 static_assert(dependent_false<MIXTYPE>, "invalid mixtype");
@@ -490,16 +490,14 @@
         } while (--frameCount);
     } else {
         do {
+            // ALOGD("Mixtype:%d NCHAN:%d", MIXTYPE, NCHAN);
             if constexpr (MIXTYPE == MIXTYPE_MULTI) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ += MixMul<TO, TI, TV>(*in++, vol[i]);
                 }
-            } else if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) {
-                for (int i = 0; i < NCHAN; ++i) {
-                    *out++ += MixMul<TO, TI, TV>(*in, vol[i]);
-                }
-                in++;
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_SAVEONLY) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ = MixMul<TO, TI, TV>(*in++, vol[i]);
                 }
@@ -513,10 +511,12 @@
                 }
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
                     || MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
+                    || MIXTYPE == MIXTYPE_MONOEXPAND
                     || MIXTYPE == MIXTYPE_STEREOEXPAND) {
                 stereoVolumeHelper<MIXTYPE, NCHAN>(out, in, vol, [] (auto &a, const auto &b) {
                     return MixMul<TO, TI, TV>(a, b);
                 });
+                if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) in += 1;
                 if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) in += 2;
             } else /* constexpr */ {
                 static_assert(dependent_false<MIXTYPE>, "invalid mixtype");
diff --git a/media/libaudioprocessing/AudioResamplerDyn.cpp b/media/libaudioprocessing/AudioResamplerDyn.cpp
index ec56b00..1aacfd1 100644
--- a/media/libaudioprocessing/AudioResamplerDyn.cpp
+++ b/media/libaudioprocessing/AudioResamplerDyn.cpp
@@ -25,7 +25,6 @@
 
 #include <cutils/compiler.h>
 #include <cutils/properties.h>
-#include <utils/Debug.h>
 #include <utils/Log.h>
 #include <audio_utils/primitives.h>
 
@@ -636,7 +635,7 @@
     const uint32_t phaseWrapLimit = c.mL << c.mShift;
     size_t inFrameCount = (phaseIncrement * (uint64_t)outFrameCount + phaseFraction)
             / phaseWrapLimit;
-    // sanity check that inFrameCount is in signed 32 bit integer range.
+    // validate that inFrameCount is in signed 32 bit integer range.
     ALOG_ASSERT(0 <= inFrameCount && inFrameCount < (1U << 31));
 
     //ALOGV("inFrameCount:%d  outFrameCount:%d"
@@ -646,7 +645,7 @@
     // NOTE: be very careful when modifying the code here. register
     // pressure is very high and a small change might cause the compiler
     // to generate far less efficient code.
-    // Always sanity check the result with objdump or test-resample.
+    // Always validate the result with objdump or test-resample.
 
     // the following logic is a bit convoluted to keep the main processing loop
     // as tight as possible with register allocation.
diff --git a/media/libaudioprocessing/AudioResamplerFirProcess.h b/media/libaudioprocessing/AudioResamplerFirProcess.h
index 9b70a1c..1fcffcc 100644
--- a/media/libaudioprocessing/AudioResamplerFirProcess.h
+++ b/media/libaudioprocessing/AudioResamplerFirProcess.h
@@ -381,7 +381,7 @@
     // NOTE: be very careful when modifying the code here. register
     // pressure is very high and a small change might cause the compiler
     // to generate far less efficient code.
-    // Always sanity check the result with objdump or test-resample.
+    // Always validate the result with objdump or test-resample.
 
     if (LOCKED) {
         // locked polyphase (no interpolation)
diff --git a/media/libaudioprocessing/AudioResamplerSinc.cpp b/media/libaudioprocessing/AudioResamplerSinc.cpp
index 5a03a0d..f2c386d 100644
--- a/media/libaudioprocessing/AudioResamplerSinc.cpp
+++ b/media/libaudioprocessing/AudioResamplerSinc.cpp
@@ -404,7 +404,7 @@
     // NOTE: be very careful when modifying the code here. register
     // pressure is very high and a small change might cause the compiler
     // to generate far less efficient code.
-    // Always sanity check the result with objdump or test-resample.
+    // Always validate the result with objdump or test-resample.
 
     // compute the index of the coefficient on the positive side and
     // negative side
diff --git a/media/libaudioprocessing/include/media/AudioMixer.h b/media/libaudioprocessing/include/media/AudioMixer.h
index 3f7cd48..70eafe3 100644
--- a/media/libaudioprocessing/include/media/AudioMixer.h
+++ b/media/libaudioprocessing/include/media/AudioMixer.h
@@ -22,10 +22,10 @@
 #include <stdint.h>
 #include <sys/types.h>
 
-#include <android/os/IExternalVibratorService.h>
 #include <media/AudioMixerBase.h>
 #include <media/BufferProviders.h>
 #include <utils/threads.h>
+#include <vibrator/ExternalVibrationUtils.h>
 
 // FIXME This is actually unity gain, which might not be max in future, expressed in U.12
 #define MAX_GAIN_INT AudioMixerBase::UNITY_GAIN_INT
@@ -55,32 +55,6 @@
                                   // parameter 'value' is a pointer to the new playback rate.
     };
 
-    typedef enum { // Haptic intensity, should keep consistent with VibratorService
-        HAPTIC_SCALE_MUTE = os::IExternalVibratorService::SCALE_MUTE,
-        HAPTIC_SCALE_VERY_LOW = os::IExternalVibratorService::SCALE_VERY_LOW,
-        HAPTIC_SCALE_LOW = os::IExternalVibratorService::SCALE_LOW,
-        HAPTIC_SCALE_NONE = os::IExternalVibratorService::SCALE_NONE,
-        HAPTIC_SCALE_HIGH = os::IExternalVibratorService::SCALE_HIGH,
-        HAPTIC_SCALE_VERY_HIGH = os::IExternalVibratorService::SCALE_VERY_HIGH,
-    } haptic_intensity_t;
-    static constexpr float HAPTIC_SCALE_VERY_LOW_RATIO = 2.0f / 3.0f;
-    static constexpr float HAPTIC_SCALE_LOW_RATIO = 3.0f / 4.0f;
-    static const constexpr float HAPTIC_MAX_AMPLITUDE_FLOAT = 1.0f;
-
-    static inline bool isValidHapticIntensity(haptic_intensity_t hapticIntensity) {
-        switch (hapticIntensity) {
-        case HAPTIC_SCALE_MUTE:
-        case HAPTIC_SCALE_VERY_LOW:
-        case HAPTIC_SCALE_LOW:
-        case HAPTIC_SCALE_NONE:
-        case HAPTIC_SCALE_HIGH:
-        case HAPTIC_SCALE_VERY_HIGH:
-            return true;
-        default:
-            return false;
-        }
-    }
-
     AudioMixer(size_t frameCount, uint32_t sampleRate)
             : AudioMixerBase(frameCount, sampleRate) {
         pthread_once(&sOnceControl, &sInitRoutine);
@@ -170,7 +144,7 @@
 
         // Haptic
         bool                 mHapticPlaybackEnabled;
-        haptic_intensity_t   mHapticIntensity;
+        os::HapticScale      mHapticIntensity;
         audio_channel_mask_t mHapticChannelMask;
         uint32_t             mHapticChannelCount;
         audio_channel_mask_t mMixerHapticChannelMask;
@@ -180,38 +154,6 @@
         uint32_t             mAdjustNonDestructiveInChannelCount;
         uint32_t             mAdjustNonDestructiveOutChannelCount;
         bool                 mKeepContractedChannels;
-
-        float getHapticScaleGamma() const {
-        // Need to keep consistent with the value in VibratorService.
-        switch (mHapticIntensity) {
-        case HAPTIC_SCALE_VERY_LOW:
-            return 2.0f;
-        case HAPTIC_SCALE_LOW:
-            return 1.5f;
-        case HAPTIC_SCALE_HIGH:
-            return 0.5f;
-        case HAPTIC_SCALE_VERY_HIGH:
-            return 0.25f;
-        default:
-            return 1.0f;
-        }
-        }
-
-        float getHapticMaxAmplitudeRatio() const {
-        // Need to keep consistent with the value in VibratorService.
-        switch (mHapticIntensity) {
-        case HAPTIC_SCALE_VERY_LOW:
-            return HAPTIC_SCALE_VERY_LOW_RATIO;
-        case HAPTIC_SCALE_LOW:
-            return HAPTIC_SCALE_LOW_RATIO;
-        case HAPTIC_SCALE_NONE:
-        case HAPTIC_SCALE_HIGH:
-        case HAPTIC_SCALE_VERY_HIGH:
-            return 1.0f;
-        default:
-            return 0.0f;
-        }
-        }
     };
 
     inline std::shared_ptr<Track> getTrack(int name) {
diff --git a/media/libaudioprocessing/include/media/AudioResamplerPublic.h b/media/libaudioprocessing/include/media/AudioResamplerPublic.h
index 1b39067..200a4c8 100644
--- a/media/libaudioprocessing/include/media/AudioResamplerPublic.h
+++ b/media/libaudioprocessing/include/media/AudioResamplerPublic.h
@@ -59,7 +59,7 @@
 
 static inline bool isAudioPlaybackRateValid(const AudioPlaybackRate &playbackRate) {
     if (playbackRate.mFallbackMode == AUDIO_TIMESTRETCH_FALLBACK_FAIL &&
-            (playbackRate.mStretchMode == AUDIO_TIMESTRETCH_STRETCH_SPEECH ||
+            (playbackRate.mStretchMode == AUDIO_TIMESTRETCH_STRETCH_VOICE ||
                     playbackRate.mStretchMode == AUDIO_TIMESTRETCH_STRETCH_DEFAULT)) {
         //test sonic specific constraints
         return playbackRate.mSpeed >= TIMESTRETCH_SONIC_SPEED_MIN &&
diff --git a/media/libaudioprocessing/tests/fuzzer/Android.bp b/media/libaudioprocessing/tests/fuzzer/Android.bp
index 1df47b7..2a0dec4 100644
--- a/media/libaudioprocessing/tests/fuzzer/Android.bp
+++ b/media/libaudioprocessing/tests/fuzzer/Android.bp
@@ -8,3 +8,14 @@
     "libsndfile",
   ],
 }
+
+cc_fuzz {
+  name: "libaudioprocessing_record_buffer_converter_fuzzer",
+  srcs: [
+    "libaudioprocessing_record_buffer_converter_fuzzer.cpp",
+  ],
+  defaults: ["libaudioprocessing_test_defaults"],
+  static_libs: [
+    "libsndfile",
+  ],
+}
diff --git a/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_fuzz_utils.h b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_fuzz_utils.h
new file mode 100644
index 0000000..5165925
--- /dev/null
+++ b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_fuzz_utils.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_LIBAUDIOPROCESSING_FUZZ_UTILS_H
+#define ANDROID_LIBAUDIOPROCESSING_FUZZ_UTILS_H
+
+#include <media/AudioBufferProvider.h>
+#include <system/audio.h>
+
+namespace android {
+
+class Provider : public AudioBufferProvider {
+  const void* mAddr;        // base address
+  const size_t mNumFrames;  // total frames
+  const size_t mFrameSize;  // size of each frame in bytes
+  size_t mNextFrame;        // index of next frame to provide
+  size_t mUnrel;            // number of frames not yet released
+ public:
+  Provider(const void* addr, size_t frames, size_t frameSize)
+      : mAddr(addr),
+        mNumFrames(frames),
+        mFrameSize(frameSize),
+        mNextFrame(0),
+        mUnrel(0) {}
+  status_t getNextBuffer(Buffer* buffer) override {
+    if (buffer->frameCount > mNumFrames - mNextFrame) {
+      buffer->frameCount = mNumFrames - mNextFrame;
+    }
+    mUnrel = buffer->frameCount;
+    if (buffer->frameCount > 0) {
+      buffer->raw = (char*)mAddr + mFrameSize * mNextFrame;
+      return NO_ERROR;
+    } else {
+      buffer->raw = nullptr;
+      return NOT_ENOUGH_DATA;
+    }
+  }
+  void releaseBuffer(Buffer* buffer) override {
+    if (buffer->frameCount > mUnrel) {
+      mNextFrame += mUnrel;
+      mUnrel = 0;
+    } else {
+      mNextFrame += buffer->frameCount;
+      mUnrel -= buffer->frameCount;
+    }
+    buffer->frameCount = 0;
+    buffer->raw = nullptr;
+  }
+  void reset() { mNextFrame = 0; }
+};
+
+} // namespace android
+
+#endif // ANDROID_LIBAUDIOPROCESSING_FUZZ_UTILS_H
diff --git a/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_record_buffer_converter_fuzzer.cpp b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_record_buffer_converter_fuzzer.cpp
new file mode 100644
index 0000000..017598c
--- /dev/null
+++ b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_record_buffer_converter_fuzzer.cpp
@@ -0,0 +1,177 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "libaudioprocessing_fuzz_utils.h"
+#include "fuzzer/FuzzedDataProvider.h"
+#include <media/AudioResampler.h>
+#include <media/RecordBufferConverter.h>
+#include <stddef.h>
+#include <stdint.h>
+
+using namespace android;
+
+constexpr int MAX_FRAMES = 1024;
+
+#define AUDIO_FORMAT_PCM_MAIN 0
+
+// Copied and simplified from audio-hal-enums.h?l=571
+constexpr uint32_t FUZZ_AUDIO_FORMATS[] = {
+  AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_16_BIT,
+  AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_8_BIT,
+  AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_32_BIT,
+  AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_8_24_BIT,
+  AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_FLOAT,
+  AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_24_BIT_PACKED,
+  0x01000000u,
+  0x02000000u,
+  0x03000000u,
+  0x04000000u,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_MAIN,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_LC,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_SSR,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_LTP,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_HE_V1,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_SCALABLE,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_ERLC,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_LD,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_HE_V2,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_ELD,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_XHE,
+  0x05000000u,
+  0x06000000u,
+  0x07000000u,
+  0x08000000u,
+  0x09000000u,
+  0x0A000000u,
+  AUDIO_FORMAT_E_AC3 | AUDIO_FORMAT_E_AC3_SUB_JOC,
+  0x0B000000u,
+  0x0C000000u,
+  0x0D000000u,
+  0x0E000000u,
+  0x10000000u,
+  0x11000000u,
+  0x12000000u,
+  0x13000000u,
+  0x14000000u,
+  0x15000000u,
+  0x16000000u,
+  0x17000000u,
+  0x18000000u,
+  0x19000000u,
+  0x1A000000u,
+  0x1B000000u,
+  0x1C000000u,
+  0x1D000000u,
+  0x1E000000u,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_MAIN,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_LC,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_SSR,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_LTP,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_HE_V1,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_SCALABLE,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_ERLC,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_LD,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_HE_V2,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_ELD,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_XHE,
+  0x1F000000u,
+  0x20000000u,
+  0x21000000u,
+  0x22000000u,
+  0x23000000u,
+  0x24000000u,
+  AUDIO_FORMAT_MAT | AUDIO_FORMAT_MAT_SUB_1_0,
+  AUDIO_FORMAT_MAT | AUDIO_FORMAT_MAT_SUB_2_0,
+  AUDIO_FORMAT_MAT | AUDIO_FORMAT_MAT_SUB_2_1,
+  0x25000000u,
+  AUDIO_FORMAT_AAC_LATM | AUDIO_FORMAT_AAC_SUB_LC,
+  AUDIO_FORMAT_AAC_LATM | AUDIO_FORMAT_AAC_SUB_HE_V1,
+  AUDIO_FORMAT_AAC_LATM | AUDIO_FORMAT_AAC_SUB_HE_V2,
+  0x26000000u,
+  0x27000000u,
+  0x28000000u,
+  0x29000000u,
+  0x2A000000u,
+  0x2B000000u,
+  0xFFFFFFFFu,
+  AUDIO_FORMAT_PCM_MAIN,
+  AUDIO_FORMAT_PCM,
+};
+constexpr size_t NUM_AUDIO_FORMATS = std::size(FUZZ_AUDIO_FORMATS);
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  FuzzedDataProvider fdp(data, size);
+  fdp.ConsumeIntegral<int>();
+
+  const audio_channel_mask_t srcChannelMask = (audio_channel_mask_t)fdp.ConsumeIntegral<int>();
+  const audio_format_t srcFormat =
+      (audio_format_t)FUZZ_AUDIO_FORMATS[fdp.ConsumeIntegralInRange<int>(0, NUM_AUDIO_FORMATS - 1)];
+  const uint32_t srcSampleRate = fdp.ConsumeIntegralInRange<int>(1, 0x7fffffff);
+  const audio_channel_mask_t dstChannelMask = (audio_channel_mask_t)fdp.ConsumeIntegral<int>();
+  const audio_format_t dstFormat =
+      (audio_format_t)FUZZ_AUDIO_FORMATS[fdp.ConsumeIntegralInRange<int>(0, NUM_AUDIO_FORMATS - 1)];
+  const uint32_t dstSampleRate = fdp.ConsumeIntegralInRange<int>(1, 0x7fffffff);
+
+  // Certain formats will result in LOG_ALWAYS_FATAL errors that aren't interesting crashes
+  // for fuzzing.  Don't use those ones.
+  const uint32_t dstChannelCount = audio_channel_count_from_in_mask(dstChannelMask);
+  constexpr android::AudioResampler::src_quality quality =
+      android::AudioResampler::DEFAULT_QUALITY;
+  const int maxChannels =
+      quality < android::AudioResampler::DYN_LOW_QUALITY ? 2 : 8;
+  if (dstChannelCount < 1 || dstChannelCount > maxChannels) {
+    return 0;
+  }
+
+  const uint32_t srcChannelCount = audio_channel_count_from_in_mask(srcChannelMask);
+  if (srcChannelCount < 1 || srcChannelCount > maxChannels) {
+    return 0;
+  }
+
+  RecordBufferConverter converter(srcChannelMask, srcFormat, srcSampleRate,
+                                  dstChannelMask, dstFormat, dstSampleRate);
+  if (converter.initCheck() != NO_ERROR) {
+    return 0;
+  }
+
+  const uint32_t srcFrameSize = srcChannelCount * audio_bytes_per_sample(srcFormat);
+  const int srcNumFrames = fdp.ConsumeIntegralInRange<int>(0, MAX_FRAMES);
+  constexpr size_t metadataSize = 2 + 3 * sizeof(int) + 2 * sizeof(float);
+  std::vector<uint8_t> inputData = fdp.ConsumeBytes<uint8_t>(
+      metadataSize + (srcFrameSize * srcNumFrames));
+  Provider provider(inputData.data(), srcNumFrames, srcFrameSize);
+
+  const uint32_t dstFrameSize = dstChannelCount * audio_bytes_per_sample(dstFormat);
+  const size_t frames = fdp.ConsumeIntegralInRange<size_t>(0, MAX_FRAMES + 1);
+  int8_t dst[dstFrameSize * frames];
+  memset(dst, 0, sizeof(int8_t) * dstFrameSize * frames);
+
+  // Add a small number of loops to see if repeated calls to convert cause
+  // any change in behavior.
+  const int numLoops = fdp.ConsumeIntegralInRange<int>(1, 3);
+  for (int loop = 0; loop < numLoops; ++loop) {
+    switch (fdp.ConsumeIntegralInRange<int>(0, 1)) {
+      case 0:
+        converter.reset();
+        FALLTHROUGH_INTENDED;
+      case 1:
+        converter.convert(dst, &provider, frames);
+        break;
+    }
+  }
+
+  return 0;
+}
diff --git a/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_resampler_fuzzer.cpp b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_resampler_fuzzer.cpp
index 938c610..65c9a3c 100644
--- a/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_resampler_fuzzer.cpp
+++ b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_resampler_fuzzer.cpp
@@ -34,6 +34,8 @@
 #include <unistd.h>
 #include <utils/Vector.h>
 
+#include "libaudioprocessing_fuzz_utils.h"
+
 #include <memory>
 
 using namespace android;
@@ -53,46 +55,6 @@
     AudioResampler::DYN_HIGH_QUALITY,
 };
 
-class Provider : public AudioBufferProvider {
-  const void* mAddr;        // base address
-  const size_t mNumFrames;  // total frames
-  const size_t mFrameSize;  // size of each frame in bytes
-  size_t mNextFrame;        // index of next frame to provide
-  size_t mUnrel;            // number of frames not yet released
- public:
-  Provider(const void* addr, size_t frames, size_t frameSize)
-      : mAddr(addr),
-        mNumFrames(frames),
-        mFrameSize(frameSize),
-        mNextFrame(0),
-        mUnrel(0) {}
-  status_t getNextBuffer(Buffer* buffer) override {
-    if (buffer->frameCount > mNumFrames - mNextFrame) {
-      buffer->frameCount = mNumFrames - mNextFrame;
-    }
-    mUnrel = buffer->frameCount;
-    if (buffer->frameCount > 0) {
-      buffer->raw = (char*)mAddr + mFrameSize * mNextFrame;
-      return NO_ERROR;
-    } else {
-      buffer->raw = nullptr;
-      return NOT_ENOUGH_DATA;
-    }
-  }
-  virtual void releaseBuffer(Buffer* buffer) {
-    if (buffer->frameCount > mUnrel) {
-      mNextFrame += mUnrel;
-      mUnrel = 0;
-    } else {
-      mNextFrame += buffer->frameCount;
-      mUnrel -= buffer->frameCount;
-    }
-    buffer->frameCount = 0;
-    buffer->raw = nullptr;
-  }
-  void reset() { mNextFrame = 0; }
-};
-
 audio_format_t chooseFormat(AudioResampler::src_quality quality,
                             uint8_t input_byte) {
   switch (quality) {
diff --git a/media/libaudioprocessing/tests/mixerops_benchmark.cpp b/media/libaudioprocessing/tests/mixerops_benchmark.cpp
index 86f5429..7a4c5c7 100644
--- a/media/libaudioprocessing/tests/mixerops_benchmark.cpp
+++ b/media/libaudioprocessing/tests/mixerops_benchmark.cpp
@@ -74,28 +74,32 @@
     }
 }
 
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI, 2);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY, 2);
+// MULTI mode and MULTI_SAVEONLY mode are not used by AudioMixer for channels > 2,
+// which is ensured by a static_assert (won't compile for those configurations).
+// So we benchmark MIXTYPE_MULTI_MONOVOL and MIXTYPE_MULTI_SAVEONLY_MONOVOL compared
+// with MIXTYPE_MULTI_STEREOVOL and MIXTYPE_MULTI_SAVEONLY_STEREOVOL.
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_MONOVOL, 2);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 2);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_STEREOVOL, 2);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 2);
 
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI, 4);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY, 4);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_MONOVOL, 4);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 4);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_STEREOVOL, 4);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 4);
 
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI, 5);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY, 5);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_MONOVOL, 5);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 5);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_STEREOVOL, 5);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 5);
 
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI, 8);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY, 8);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_MONOVOL, 8);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 8);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_STEREOVOL, 8);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 8);
 
-BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI, 8);
-BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_SAVEONLY, 8);
+BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_MONOVOL, 8);
+BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 8);
 BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_STEREOVOL, 8);
 BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 8);
 
diff --git a/media/libaudioprocessing/tests/test-mixer.cpp b/media/libaudioprocessing/tests/test-mixer.cpp
index bc9d2a6..1bbb863 100644
--- a/media/libaudioprocessing/tests/test-mixer.cpp
+++ b/media/libaudioprocessing/tests/test-mixer.cpp
@@ -241,7 +241,8 @@
     // set up the tracks.
     for (size_t i = 0; i < providers.size(); ++i) {
         //printf("track %d out of %d\n", i, providers.size());
-        uint32_t channelMask = audio_channel_out_mask_from_count(providers[i].getNumChannels());
+        audio_channel_mask_t channelMask =
+                audio_channel_out_mask_from_count(providers[i].getNumChannels());
         const int name = i;
         const status_t status = mixer->create(
                 name, channelMask, formats[i], AUDIO_SESSION_OUTPUT_MIX);
diff --git a/media/libcpustats/ThreadCpuUsage.cpp b/media/libcpustats/ThreadCpuUsage.cpp
index 4b7549f..e71a7db 100644
--- a/media/libcpustats/ThreadCpuUsage.cpp
+++ b/media/libcpustats/ThreadCpuUsage.cpp
@@ -21,6 +21,7 @@
 #include <stdlib.h>
 #include <string.h>
 #include <time.h>
+#include <unistd.h>
 
 #include <utils/Log.h>
 
diff --git a/media/libeffects/OWNERS b/media/libeffects/OWNERS
index 7f9ae81..b7832ea 100644
--- a/media/libeffects/OWNERS
+++ b/media/libeffects/OWNERS
@@ -1,4 +1,3 @@
 hunga@google.com
-krocard@google.com
 mnaganov@google.com
 rago@google.com
diff --git a/media/libeffects/config/src/EffectsConfig.cpp b/media/libeffects/config/src/EffectsConfig.cpp
index 26eaaf8..1696233 100644
--- a/media/libeffects/config/src/EffectsConfig.cpp
+++ b/media/libeffects/config/src/EffectsConfig.cpp
@@ -138,7 +138,7 @@
 
 template <>
 bool stringToStreamType(const char *streamName, audio_devices_t* type) {
-    return deviceFromString(streamName, *type);
+    return DeviceConverter::fromString(streamName, *type);
 }
 
 /** Parse a library xml note and push the result in libraries or return false on failure. */
diff --git a/media/libeffects/data/audio_effects.xml b/media/libeffects/data/audio_effects.xml
index 2e5f529..93a2181 100644
--- a/media/libeffects/data/audio_effects.xml
+++ b/media/libeffects/data/audio_effects.xml
@@ -21,6 +21,7 @@
         <library name="downmix" path="libdownmix.so"/>
         <library name="loudness_enhancer" path="libldnhncr.so"/>
         <library name="dynamics_processing" path="libdynproc.so"/>
+        <library name="haptic_generator" path="libhapticgenerator.so"/>
     </libraries>
 
     <!-- list of effects to load.
@@ -58,6 +59,7 @@
         <effect name="downmix" library="downmix" uuid="93f04452-e4fe-41cc-91f9-e475b6d1d69f"/>
         <effect name="loudness_enhancer" library="loudness_enhancer" uuid="fa415329-2034-4bea-b5dc-5b381c8d1e2c"/>
         <effect name="dynamics_processing" library="dynamics_processing" uuid="e0e6539b-1781-7261-676f-6d7573696340"/>
+        <effect name="haptic_generator" library="haptic_generator" uuid="97c4acd1-8b82-4f2f-832e-c2fe5d7a9931"/>
     </effects>
 
     <!-- Audio pre processor configurations.
diff --git a/media/libeffects/factory/EffectsConfigLoader.c b/media/libeffects/factory/EffectsConfigLoader.c
index fcef36f..e23530e 100644
--- a/media/libeffects/factory/EffectsConfigLoader.c
+++ b/media/libeffects/factory/EffectsConfigLoader.c
@@ -394,7 +394,7 @@
        }
        sub_effect_entry_t *subEntry = (sub_effect_entry_t*)gSubEffectList->sub_elem->object;
        effect_descriptor_t *subEffectDesc = (effect_descriptor_t*)(subEntry->object);
-       // Since we return a dummy descriptor for the proxy during
+       // Since we return a stub descriptor for the proxy during
        // get_descriptor call,we replace it with the correspoding
        // sw effect descriptor, but with Proxy UUID
        // check for Sw desc
diff --git a/media/libeffects/factory/EffectsXmlConfigLoader.cpp b/media/libeffects/factory/EffectsXmlConfigLoader.cpp
index 505be7c..30a9007 100644
--- a/media/libeffects/factory/EffectsXmlConfigLoader.cpp
+++ b/media/libeffects/factory/EffectsXmlConfigLoader.cpp
@@ -283,7 +283,7 @@
             }
             listPush(effectLoadResult.effectDesc.get(), subEffectList);
 
-            // Since we return a dummy descriptor for the proxy during
+            // Since we return a stub descriptor for the proxy during
             // get_descriptor call, we replace it with the corresponding
             // sw effect descriptor, but keep the Proxy UUID
             *effectLoadResult.effectDesc = *swEffectLoadResult.effectDesc;
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
new file mode 100644
index 0000000..f947339
--- /dev/null
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -0,0 +1,51 @@
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// HapticGenerator library
+cc_library_shared {
+    name: "libhapticgenerator",
+
+    vendor: true,
+
+    srcs: [
+        "EffectHapticGenerator.cpp",
+        "Processors.cpp",
+    ],
+
+    cflags: [
+        "-O2", // Turning on the optimization in order to reduce effect processing time.
+               // The latency is around 1/5 less than without the optimization.
+        "-Wall",
+        "-Werror",
+        "-ffast-math", // This is needed for the non-zero coefficients optimization for
+                       // BiquadFilter. Try the biquad_filter_benchmark test in audio_utils
+                       // with/without `-ffast-math` for more context.
+        "-fvisibility=hidden",
+    ],
+
+    shared_libs: [
+        "libaudioutils",
+        "libbinder",
+        "liblog",
+        "libutils",
+        "libvibrator",
+    ],
+
+    relative_install_path: "soundfx",
+
+    header_libs: [
+        "libaudioeffects",
+    ],
+}
+
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
new file mode 100644
index 0000000..9b93659
--- /dev/null
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
@@ -0,0 +1,519 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectHG"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "EffectHapticGenerator.h"
+
+#include <algorithm>
+#include <memory>
+#include <utility>
+
+#include <errno.h>
+#include <inttypes.h>
+
+#include <audio_effects/effect_hapticgenerator.h>
+#include <audio_utils/format.h>
+#include <system/audio.h>
+
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+        .tag = AUDIO_EFFECT_LIBRARY_TAG,
+        .version = EFFECT_LIBRARY_API_VERSION,
+        .name = "HapticGenerator Library",
+        .implementor = "The Android Open Source Project",
+        .create_effect = android::audio_effect::haptic_generator::HapticGeneratorLib_Create,
+        .release_effect = android::audio_effect::haptic_generator::HapticGeneratorLib_Release,
+        .get_descriptor = android::audio_effect::haptic_generator::HapticGeneratorLib_GetDescriptor,
+};
+
+namespace android::audio_effect::haptic_generator {
+
+// effect_handle_t interface implementation for haptic generator effect
+const struct effect_interface_s gHapticGeneratorInterface = {
+        HapticGenerator_Process,
+        HapticGenerator_Command,
+        HapticGenerator_GetDescriptor,
+        nullptr /* no process_reverse function, no reference stream needed */
+};
+
+//-----------------------------------------------------------------------------
+// Effect Descriptor
+//-----------------------------------------------------------------------------
+
+// UUIDs for effect types have been generated from http://www.itu.int/ITU-T/asn1/uuid.html
+// Haptic Generator
+static const effect_descriptor_t gHgDescriptor = {
+        FX_IID_HAPTICGENERATOR_, // type
+        {0x97c4acd1, 0x8b82, 0x4f2f, 0x832e, {0xc2, 0xfe, 0x5d, 0x7a, 0x99, 0x31}}, // uuid
+        EFFECT_CONTROL_API_VERSION,
+        EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST,
+        0, // FIXME what value should be reported? // cpu load
+        0, // FIXME what value should be reported? // memory usage
+        "Haptic Generator",
+        "The Android Open Source Project"
+};
+
+//-----------------------------------------------------------------------------
+// Internal functions
+//-----------------------------------------------------------------------------
+
+namespace {
+
+int HapticGenerator_Init(struct HapticGeneratorContext *context) {
+    context->itfe = &gHapticGeneratorInterface;
+
+    context->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+    context->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    context->config.inputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+    context->config.inputCfg.samplingRate = 0;
+    context->config.inputCfg.bufferProvider.getBuffer = nullptr;
+    context->config.inputCfg.bufferProvider.releaseBuffer = nullptr;
+    context->config.inputCfg.bufferProvider.cookie = nullptr;
+    context->config.inputCfg.mask = EFFECT_CONFIG_ALL;
+    context->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+    context->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    context->config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+    context->config.outputCfg.samplingRate = 0;
+    context->config.outputCfg.bufferProvider.getBuffer = nullptr;
+    context->config.outputCfg.bufferProvider.releaseBuffer = nullptr;
+    context->config.outputCfg.bufferProvider.cookie = nullptr;
+    context->config.outputCfg.mask = EFFECT_CONFIG_ALL;
+
+    memset(context->param.hapticChannelSource, 0, sizeof(context->param.hapticChannelSource));
+    context->param.hapticChannelCount = 0;
+    context->param.audioChannelCount = 0;
+    context->param.maxHapticIntensity = os::HapticScale::MUTE;
+
+    context->state = HAPTICGENERATOR_STATE_INITIALIZED;
+    return 0;
+}
+
+void addBiquadFilter(
+        std::vector<std::function<void(float *, const float *, size_t)>> &processingChain,
+        struct HapticGeneratorProcessorsRecord &processorsRecord,
+        std::shared_ptr<HapticBiquadFilter> filter) {
+    // The process chain captures the shared pointer of the filter in lambda.
+    // The process record will keep a shared pointer to the filter so that it is possible to access
+    // the filter outside of the process chain.
+    processorsRecord.filters.push_back(filter);
+    processingChain.push_back([filter](float *out, const float *in, size_t frameCount) {
+            filter->process(out, in, frameCount);
+    });
+}
+
+/**
+ * \brief build haptic generator processing chain.
+ *
+ * \param processingChain
+ * \param processorsRecord a structure to cache all the shared pointers for processors
+ * \param sampleRate the audio sampling rate. Use a float here as it may be used to create filters
+ * \param channelCount haptic channel count
+ */
+void HapticGenerator_buildProcessingChain(
+        std::vector<std::function<void(float*, const float*, size_t)>>& processingChain,
+        struct HapticGeneratorProcessorsRecord& processorsRecord,
+        float sampleRate, size_t channelCount) {
+    float highPassCornerFrequency = 100.0f;
+    auto hpf = createHPF2(highPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, hpf);
+    float lowPassCornerFrequency = 3000.0f;
+    auto lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, lpf);
+
+    auto ramp = std::make_shared<Ramp>(channelCount);
+    // The process chain captures the shared pointer of the ramp in lambda. It will be the only
+    // reference to the ramp.
+    // The process record will keep a weak pointer to the ramp so that it is possible to access
+    // the ramp outside of the process chain.
+    processorsRecord.ramps.push_back(ramp);
+    processingChain.push_back([ramp](float *out, const float *in, size_t frameCount) {
+            ramp->process(out, in, frameCount);
+    });
+
+    highPassCornerFrequency = 60.0f;
+    hpf = createHPF2(highPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, hpf);
+    lowPassCornerFrequency = 700.0f;
+    lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, lpf);
+
+    lowPassCornerFrequency = 5.0f;
+    float normalizationPower = -0.3f;
+    // The process chain captures the shared pointer of the slow envelope in lambda. It will
+    // be the only reference to the slow envelope.
+    // The process record will keep a weak pointer to the slow envelope so that it is possible
+    // to access the slow envelope outside of the process chain.
+    auto slowEnv = std::make_shared<SlowEnvelope>(
+            lowPassCornerFrequency, sampleRate, normalizationPower, channelCount);
+    processorsRecord.slowEnvs.push_back(slowEnv);
+    processingChain.push_back([slowEnv](float *out, const float *in, size_t frameCount) {
+            slowEnv->process(out, in, frameCount);
+    });
+
+    lowPassCornerFrequency = 400.0f;
+    lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, lpf);
+    lowPassCornerFrequency = 500.0f;
+    lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, lpf);
+
+    auto apf = createAPF2(400.0f, 200.0f, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, apf);
+    apf = createAPF2(100.0f, 50.0f, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, apf);
+    float allPassCornerFrequency = 25.0f;
+    apf = createAPF(allPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, apf);
+
+    float resonantFrequency = 150.0f;
+    float bandpassQ = 1.0f;
+    auto bpf = createBPF(resonantFrequency, bandpassQ, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, bpf);
+
+    float zeroQ = 8.0f;
+    float poleQ = 4.0f;
+    auto bsf = createBSF(resonantFrequency, zeroQ, poleQ, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, bsf);
+}
+
+int HapticGenerator_Configure(struct HapticGeneratorContext *context, effect_config_t *config) {
+    if (config->inputCfg.samplingRate != config->outputCfg.samplingRate ||
+        config->inputCfg.format != config->outputCfg.format ||
+        config->inputCfg.format != AUDIO_FORMAT_PCM_FLOAT ||
+        config->inputCfg.channels != config->outputCfg.channels ||
+        config->inputCfg.buffer.frameCount != config->outputCfg.buffer.frameCount) {
+        return -EINVAL;
+    }
+    if (&context->config != config) {
+        context->processingChain.clear();
+        context->processorsRecord.filters.clear();
+        context->processorsRecord.ramps.clear();
+        context->processorsRecord.slowEnvs.clear();
+        memcpy(&context->config, config, sizeof(effect_config_t));
+        context->param.audioChannelCount = audio_channel_count_from_out_mask(
+                ((audio_channel_mask_t) config->inputCfg.channels) & ~AUDIO_CHANNEL_HAPTIC_ALL);
+        context->param.hapticChannelCount = audio_channel_count_from_out_mask(
+                ((audio_channel_mask_t) config->outputCfg.channels) & AUDIO_CHANNEL_HAPTIC_ALL);
+        ALOG_ASSERT(context->param.hapticChannelCount <= 2,
+                    "haptic channel count(%zu) is too large",
+                    context->param.hapticChannelCount);
+        context->audioDataBytesPerFrame = audio_bytes_per_frame(
+                context->param.audioChannelCount, (audio_format_t) config->inputCfg.format);
+        for (size_t i = 0; i < context->param.hapticChannelCount; ++i) {
+            // By default, use the first audio channel to generate haptic channels.
+            context->param.hapticChannelSource[i] = 0;
+        }
+
+        HapticGenerator_buildProcessingChain(context->processingChain,
+                                             context->processorsRecord,
+                                             config->inputCfg.samplingRate,
+                                             context->param.hapticChannelCount);
+    }
+    return 0;
+}
+
+int HapticGenerator_Reset(struct HapticGeneratorContext *context) {
+    for (auto& filter : context->processorsRecord.filters) {
+        filter->clear();
+    }
+    for (auto& slowEnv : context->processorsRecord.slowEnvs) {
+        slowEnv->clear();
+    }
+    return 0;
+}
+
+int HapticGenerator_SetParameter(struct HapticGeneratorContext *context,
+                                 int32_t param,
+                                 uint32_t size,
+                                 void *value) {
+    switch (param) {
+    case HG_PARAM_HAPTIC_INTENSITY: {
+        if (value == nullptr || size != (uint32_t) (2 * sizeof(int))) {
+            return -EINVAL;
+        }
+        int id = *(int *) value;
+        os::HapticScale hapticIntensity = static_cast<os::HapticScale>(*((int *) value + 1));
+        if (hapticIntensity == os::HapticScale::MUTE) {
+            context->param.id2Intensity.erase(id);
+        } else {
+            context->param.id2Intensity.emplace(id, hapticIntensity);
+        }
+        context->param.maxHapticIntensity = hapticIntensity;
+        for (const auto&[id, intensity] : context->param.id2Intensity) {
+            context->param.maxHapticIntensity = std::max(
+                    context->param.maxHapticIntensity, intensity);
+        }
+        break;
+    }
+
+    default:
+        ALOGW("Unknown param: %d", param);
+        return -EINVAL;
+    }
+
+    return 0;
+}
+
+/**
+ * \brief run the processing chain to generate haptic data from audio data
+ *
+ * \param processingChain the processing chain for generating haptic data
+ * \param buf1 a buffer contains raw audio data
+ * \param buf2 a buffer that is large enough to keep all the data
+ * \param frameCount frame count of the data
+ * \return a pointer to the output buffer
+ */
+float* HapticGenerator_runProcessingChain(
+        const std::vector<std::function<void(float*, const float*, size_t)>>& processingChain,
+        float* buf1, float* buf2, size_t frameCount) {
+    float *in = buf1;
+    float *out = buf2;
+    for (const auto processingFunc : processingChain) {
+        processingFunc(out, in, frameCount);
+        std::swap(in, out);
+    }
+    return in;
+}
+
+} // namespace (anonymous)
+
+//-----------------------------------------------------------------------------
+// Effect API Implementation
+//-----------------------------------------------------------------------------
+
+/*--- Effect Library Interface Implementation ---*/
+
+int32_t HapticGeneratorLib_Create(const effect_uuid_t *uuid,
+                                  int32_t sessionId __unused,
+                                  int32_t ioId __unused,
+                                  effect_handle_t *handle) {
+    if (handle == nullptr || uuid == nullptr) {
+        return -EINVAL;
+    }
+
+    if (memcmp(uuid, &gHgDescriptor.uuid, sizeof(*uuid)) != 0) {
+        return -EINVAL;
+    }
+
+    HapticGeneratorContext *context = new HapticGeneratorContext;
+    HapticGenerator_Init(context);
+
+    *handle = (effect_handle_t) context;
+    ALOGV("%s context is %p", __func__, context);
+    return 0;
+}
+
+int32_t HapticGeneratorLib_Release(effect_handle_t handle) {
+    HapticGeneratorContext *context = (HapticGeneratorContext *) handle;
+    delete context;
+    return 0;
+}
+
+int32_t HapticGeneratorLib_GetDescriptor(const effect_uuid_t *uuid,
+                                         effect_descriptor_t *descriptor) {
+
+    if (descriptor == nullptr || uuid == nullptr) {
+        ALOGE("%s() called with NULL pointer", __func__);
+        return -EINVAL;
+    }
+
+    if (memcmp(uuid, &gHgDescriptor.uuid, sizeof(*uuid)) == 0) {
+        *descriptor = gHgDescriptor;
+        return 0;
+    }
+
+    return -EINVAL;
+}
+
+/*--- Effect Control Interface Implementation ---*/
+
+int32_t HapticGenerator_Process(effect_handle_t self,
+                                audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) {
+    HapticGeneratorContext *context = (HapticGeneratorContext *) self;
+
+    if (inBuffer == nullptr || inBuffer->raw == nullptr
+            || outBuffer == nullptr || outBuffer->raw == nullptr) {
+        return 0;
+    }
+
+    // The audio data must not be modified but just written to
+    // output buffer according the access mode.
+    size_t audioBytes = context->audioDataBytesPerFrame * inBuffer->frameCount;
+    size_t audioSampleCount = inBuffer->frameCount * context->param.audioChannelCount;
+    if (inBuffer->raw != outBuffer->raw) {
+        if (context->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+            for (size_t i = 0; i < audioSampleCount; ++i) {
+                outBuffer->f32[i] += inBuffer->f32[i];
+            }
+        } else {
+            memcpy(outBuffer->raw, inBuffer->raw, audioBytes);
+        }
+    }
+
+    if (context->state != HAPTICGENERATOR_STATE_ACTIVE) {
+        ALOGE("State(%d) is not HAPTICGENERATOR_STATE_ACTIVE when calling %s",
+                context->state, __func__);
+        return -ENODATA;
+    }
+
+    if (context->param.maxHapticIntensity == os::HapticScale::MUTE) {
+        // Haptic channels are muted, not need to generate haptic data.
+        return 0;
+    }
+
+    // Resize buffer if the haptic sample count is greater than buffer size.
+    size_t hapticSampleCount = inBuffer->frameCount * context->param.hapticChannelCount;
+    if (hapticSampleCount > context->inputBuffer.size()) {
+        // The context->inputBuffer and context->outputBuffer must have the same size,
+        // which must be at least the haptic sample count.
+        context->inputBuffer.resize(hapticSampleCount);
+        context->outputBuffer.resize(hapticSampleCount);
+    }
+
+    // Construct input buffer according to haptic channel source
+    for (size_t i = 0; i < inBuffer->frameCount; ++i) {
+        for (size_t j = 0; j < context->param.hapticChannelCount; ++j) {
+            context->inputBuffer[i * context->param.hapticChannelCount + j] =
+                    inBuffer->f32[i * context->param.audioChannelCount
+                            + context->param.hapticChannelSource[j]];
+        }
+    }
+
+    float* hapticOutBuffer = HapticGenerator_runProcessingChain(
+            context->processingChain, context->inputBuffer.data(),
+            context->outputBuffer.data(), inBuffer->frameCount);
+    os::scaleHapticData(hapticOutBuffer, hapticSampleCount, context->param.maxHapticIntensity);
+
+    // For haptic data, the haptic playback thread will copy the data from effect input buffer,
+    // which contains haptic data at the end of the buffer, directly to sink buffer.
+    // In that case, copy haptic data to input buffer instead of output buffer.
+    // Note: this may not work with rpc/binder calls
+    memcpy_by_audio_format(static_cast<char*>(inBuffer->raw) + audioBytes,
+                           static_cast<audio_format_t>(context->config.outputCfg.format),
+                           hapticOutBuffer,
+                           AUDIO_FORMAT_PCM_FLOAT,
+                           hapticSampleCount);
+
+    return 0;
+}
+
+int32_t HapticGenerator_Command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
+                                void *cmdData, uint32_t *replySize, void *replyData) {
+    HapticGeneratorContext *context = (HapticGeneratorContext *) self;
+
+    if (context == nullptr || context->state == HAPTICGENERATOR_STATE_UNINITIALIZED) {
+        return -EINVAL;
+    }
+
+    ALOGV("HapticGenerator_Command command %u cmdSize %u", cmdCode, cmdSize);
+
+    switch (cmdCode) {
+        case EFFECT_CMD_INIT:
+            if (replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+                return -EINVAL;
+            }
+            *(int *) replyData = HapticGenerator_Init(context);
+            break;
+
+        case EFFECT_CMD_SET_CONFIG:
+            if (cmdData == nullptr || cmdSize != sizeof(effect_config_t)
+                || replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+                return -EINVAL;
+            }
+            *(int *) replyData = HapticGenerator_Configure(
+                    context, (effect_config_t *) cmdData);
+            break;
+
+        case EFFECT_CMD_RESET:
+            HapticGenerator_Reset(context);
+            break;
+
+        case EFFECT_CMD_GET_PARAM:
+            ALOGV("HapticGenerator_Command EFFECT_CMD_GET_PARAM cmdData %p,"
+                  "*replySize %u, replyData: %p",
+                  cmdData, *replySize, replyData);
+            break;
+
+        case EFFECT_CMD_SET_PARAM: {
+            ALOGV("HapticGenerator_Command EFFECT_CMD_SET_PARAM cmdSize %d cmdData %p, "
+                  "*replySize %u, replyData %p", cmdSize, cmdData,
+                  replySize ? *replySize : 0, replyData);
+            if (cmdData == nullptr || (cmdSize < (int) (sizeof(effect_param_t) + sizeof(int32_t)))
+                || replyData == nullptr || replySize == nullptr ||
+                *replySize != (int) sizeof(int32_t)) {
+                return -EINVAL;
+            }
+            effect_param_t *cmd = (effect_param_t *) cmdData;
+            *(int *) replyData = HapticGenerator_SetParameter(
+                    context, *(int32_t *) cmd->data, cmd->vsize, cmd->data + sizeof(int32_t));
+        }
+            break;
+
+        case EFFECT_CMD_ENABLE:
+            if (replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+                return -EINVAL;
+            }
+            if (context->state != HAPTICGENERATOR_STATE_INITIALIZED) {
+                return -ENOSYS;
+            }
+            context->state = HAPTICGENERATOR_STATE_ACTIVE;
+            ALOGV("EFFECT_CMD_ENABLE() OK");
+            *(int *) replyData = 0;
+            break;
+
+        case EFFECT_CMD_DISABLE:
+            if (replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+                return -EINVAL;
+            }
+            if (context->state != HAPTICGENERATOR_STATE_ACTIVE) {
+                return -ENOSYS;
+            }
+            context->state = HAPTICGENERATOR_STATE_INITIALIZED;
+            ALOGV("EFFECT_CMD_DISABLE() OK");
+            *(int *) replyData = 0;
+            break;
+
+        case EFFECT_CMD_SET_VOLUME:
+        case EFFECT_CMD_SET_DEVICE:
+        case EFFECT_CMD_SET_AUDIO_MODE:
+            break;
+
+        default:
+            ALOGW("HapticGenerator_Command invalid command %u", cmdCode);
+            return -EINVAL;
+    }
+
+    return 0;
+}
+
+int32_t HapticGenerator_GetDescriptor(effect_handle_t self, effect_descriptor_t *descriptor) {
+    HapticGeneratorContext *context = (HapticGeneratorContext *) self;
+
+    if (context == nullptr ||
+        context->state == HAPTICGENERATOR_STATE_UNINITIALIZED) {
+        return -EINVAL;
+    }
+
+    memcpy(descriptor, &gHgDescriptor, sizeof(effect_descriptor_t));
+
+    return 0;
+}
+
+} // namespace android::audio_effect::haptic_generator
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
new file mode 100644
index 0000000..57b4338
--- /dev/null
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECTHAPTICGENERATOR_H_
+#define ANDROID_EFFECTHAPTICGENERATOR_H_
+
+#include <functional>
+#include <vector>
+#include <map>
+
+#include <hardware/audio_effect.h>
+#include <system/audio_effect.h>
+#include <vibrator/ExternalVibrationUtils.h>
+
+#include "Processors.h"
+
+namespace android::audio_effect::haptic_generator {
+
+//-----------------------------------------------------------------------------
+// Definition
+//-----------------------------------------------------------------------------
+
+enum hapticgenerator_state_t {
+    HAPTICGENERATOR_STATE_UNINITIALIZED,
+    HAPTICGENERATOR_STATE_INITIALIZED,
+    HAPTICGENERATOR_STATE_ACTIVE,
+};
+
+// parameters for each haptic generator
+struct HapticGeneratorParam {
+    uint32_t hapticChannelSource[2]; // The audio channels used to generate haptic channels.
+                                     // The first channel will be used to generate HAPTIC_A,
+                                     // The second channel will be used to generate HAPTIC_B
+                                     // The value will be offset of audio channel
+    uint32_t audioChannelCount;
+    uint32_t hapticChannelCount;
+
+    // A map from track id to haptic intensity.
+    std::map<int, os::HapticScale> id2Intensity;
+    os::HapticScale maxHapticIntensity; // max intensity will be used to scale haptic data.
+};
+
+// A structure to keep all shared pointers for all processors in HapticGenerator.
+struct HapticGeneratorProcessorsRecord {
+    std::vector<std::shared_ptr<HapticBiquadFilter>> filters;
+    std::vector<std::shared_ptr<Ramp>> ramps;
+    std::vector<std::shared_ptr<SlowEnvelope>> slowEnvs;
+};
+
+// A structure to keep all the context for HapticGenerator.
+struct HapticGeneratorContext {
+    const struct effect_interface_s *itfe;
+    effect_config_t config;
+    hapticgenerator_state_t state;
+    struct HapticGeneratorParam param;
+    size_t audioDataBytesPerFrame;
+
+    // A cache for all shared pointers of the HapticGenerator
+    struct HapticGeneratorProcessorsRecord processorsRecord;
+
+    // Using a vector of functions to record the processing chain for haptic-generating algorithm.
+    // The three parameters of the processing functions are pointer to output buffer, pointer to
+    // input buffer and frame count.
+    std::vector<std::function<void(float*, const float*, size_t)>> processingChain;
+
+    // inputBuffer is where to keep input buffer for the generating algorithm. It will be
+    // constructed according to HapticGeneratorParam.hapticChannelSource.
+    std::vector<float> inputBuffer;
+
+    // outputBuffer is a buffer having the same length as inputBuffer. It can be used as
+    // intermediate buffer in the generating algorithm.
+    std::vector<float> outputBuffer;
+};
+
+//-----------------------------------------------------------------------------
+// Effect API
+//-----------------------------------------------------------------------------
+
+int32_t HapticGeneratorLib_Create(const effect_uuid_t *uuid,
+                                  int32_t sessionId,
+                                  int32_t ioId,
+                                  effect_handle_t *handle);
+
+int32_t HapticGeneratorLib_Release(effect_handle_t handle);
+
+int32_t HapticGeneratorLib_GetDescriptor(const effect_uuid_t *uuid,
+                                         effect_descriptor_t *descriptor);
+
+int32_t HapticGenerator_Process(effect_handle_t self,
+                                audio_buffer_t *inBuffer,
+                                audio_buffer_t *outBuffer);
+
+int32_t HapticGenerator_Command(effect_handle_t self,
+                                uint32_t cmdCode,
+                                uint32_t cmdSize,
+                                void *cmdData,
+                                uint32_t *replySize,
+                                void *replyData);
+
+int32_t HapticGenerator_GetDescriptor(effect_handle_t self,
+                                      effect_descriptor_t *descriptor);
+
+} // namespace android::audio_effect::haptic_generator
+
+#endif // ANDROID_EFFECTHAPTICGENERATOR_H_
diff --git a/media/libeffects/hapticgenerator/MODULE_LICENSE_APACHE2 b/media/libeffects/hapticgenerator/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/libeffects/hapticgenerator/MODULE_LICENSE_APACHE2
diff --git a/media/libeffects/hapticgenerator/Processors.cpp b/media/libeffects/hapticgenerator/Processors.cpp
new file mode 100644
index 0000000..3157b35
--- /dev/null
+++ b/media/libeffects/hapticgenerator/Processors.cpp
@@ -0,0 +1,234 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectHG_Processors"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <assert.h>
+
+#include <cmath>
+
+#include "Processors.h"
+
+#if defined(__aarch64__) || defined(__ARM_NEON__)
+#ifndef USE_NEON
+#define USE_NEON (true)
+#endif
+#else
+#define USE_NEON (false)
+#endif
+#if USE_NEON
+#include <arm_neon.h>
+#endif
+
+namespace android::audio_effect::haptic_generator {
+
+float getRealPoleZ(float cornerFrequency, float sampleRate) {
+    // This will be a pole of a first order filter.
+    float realPoleS = -2 * M_PI * cornerFrequency;
+    return exp(realPoleS / sampleRate); // zero-pole matching
+}
+
+std::pair<float, float> getComplexPoleZ(float ringingFrequency, float q, float sampleRate) {
+    // This is the pole for 1/(s^2 + s/q + 1) in normalized frequency. The other pole is
+    // the complex conjugate of this.
+    float poleImagS = 2 * M_PI * ringingFrequency;
+    float poleRealS = -poleImagS / (2 * q);
+    float poleRadius = exp(poleRealS / sampleRate);
+    float poleImagZ = poleRadius * sin(poleImagS / sampleRate);
+    float poleRealZ = poleRadius * cos(poleImagS / sampleRate);
+    return {poleRealZ, poleImagZ};
+}
+
+// Implementation of Ramp
+
+Ramp::Ramp(size_t channelCount) : mChannelCount(channelCount) {}
+
+void Ramp::process(float *out, const float *in, size_t frameCount) {
+    size_t i = 0;
+#if USE_NEON
+    size_t sampleCount = frameCount * mChannelCount;
+    float32x2_t allZero = vdup_n_f32(0.0f);
+    while (i + 1 < sampleCount) {
+        vst1_f32(out, vmax_f32(vld1_f32(in), allZero));
+        in += 2;
+        out += 2;
+        i += 2;
+    }
+#endif // USE_NEON
+    for (; i < frameCount * mChannelCount; ++i) {
+        *out = *in >= 0.0f ? *in : 0.0f;
+        out++;
+        in++;
+    }
+}
+
+// Implementation of SlowEnvelope
+
+SlowEnvelope::SlowEnvelope(
+        float cornerFrequency,
+        float sampleRate,
+        float normalizationPower,
+        size_t channelCount)
+        : mLpf(createLPF(cornerFrequency, sampleRate, channelCount)),
+          mNormalizationPower(normalizationPower),
+          mChannelCount(channelCount),
+          mEnv(0.25 * (sampleRate / (2 * M_PI * cornerFrequency))) {}
+
+void SlowEnvelope::process(float* out, const float* in, size_t frameCount) {
+    size_t sampleCount = frameCount * mChannelCount;
+    if (sampleCount > mLpfInBuffer.size()) {
+        mLpfInBuffer.resize(sampleCount, mEnv);
+        mLpfOutBuffer.resize(sampleCount);
+    }
+    mLpf->process(mLpfOutBuffer.data(), mLpfInBuffer.data(), frameCount);
+    for (size_t i = 0; i < sampleCount; ++i) {
+        *out = *in * pow(mLpfOutBuffer[i], mNormalizationPower);
+        out++;
+        in++;
+    }
+}
+
+void SlowEnvelope::clear() {
+    mLpf->clear();
+}
+
+// Implementation of helper functions
+
+BiquadFilterCoefficients cascadeFirstOrderFilters(const BiquadFilterCoefficients &coefs1,
+                                                   const BiquadFilterCoefficients &coefs2) {
+    assert(coefs1[2] == 0.0f);
+    assert(coefs2[2] == 0.0f);
+    assert(coefs1[4] == 0.0f);
+    assert(coefs2[4] == 0.0f);
+    return {coefs1[0] * coefs2[0],
+            coefs1[0] * coefs2[1] + coefs1[1] * coefs2[0],
+            coefs1[1] * coefs2[1],
+            coefs1[3] + coefs2[3],
+            coefs1[3] * coefs2[3]};
+}
+
+BiquadFilterCoefficients lpfCoefs(const float cornerFrequency, const float sampleRate) {
+    BiquadFilterCoefficients coefficient;
+    float realPoleZ = getRealPoleZ(cornerFrequency, sampleRate);
+    // This is a zero at nyquist
+    coefficient[0] = 0.5f * (1 - realPoleZ);
+    coefficient[1] = coefficient[0];
+    coefficient[2] = 0.0f;
+    coefficient[3] = -realPoleZ; // This is traditional 1/(s+1) filter
+    coefficient[4] = 0.0f;
+    return coefficient;
+}
+
+std::shared_ptr<HapticBiquadFilter> createLPF(const float cornerFrequency,
+                                        const float sampleRate,
+                                        const size_t channelCount) {
+    BiquadFilterCoefficients coefficient = lpfCoefs(cornerFrequency, sampleRate);
+    return std::make_shared<HapticBiquadFilter>(channelCount, coefficient);
+}
+
+std::shared_ptr<HapticBiquadFilter> createLPF2(const float cornerFrequency,
+                                         const float sampleRate,
+                                         const size_t channelCount) {
+    BiquadFilterCoefficients coefficient = lpfCoefs(cornerFrequency, sampleRate);
+    return std::make_shared<HapticBiquadFilter>(
+            channelCount, cascadeFirstOrderFilters(coefficient, coefficient));
+}
+
+std::shared_ptr<HapticBiquadFilter> createHPF2(const float cornerFrequency,
+                                         const float sampleRate,
+                                         const size_t channelCount) {
+    BiquadFilterCoefficients coefficient;
+    // Note: this is valid only when corner frequency is less than nyquist / 2.
+    float realPoleZ = getRealPoleZ(cornerFrequency, sampleRate);
+
+    // Note: this is a zero at DC
+    coefficient[0] = 0.5f * (1 + realPoleZ);
+    coefficient[1] = -coefficient[0];
+    coefficient[2] = 0.0f;
+    coefficient[3] = -realPoleZ;
+    coefficient[4] = 0.0f;
+    return std::make_shared<HapticBiquadFilter>(
+            channelCount, cascadeFirstOrderFilters(coefficient, coefficient));
+}
+
+BiquadFilterCoefficients apfCoefs(const float cornerFrequency, const float sampleRate) {
+    BiquadFilterCoefficients coefficient;
+    float realPoleZ = getRealPoleZ(cornerFrequency, sampleRate);
+    float zeroZ = 1.0f / realPoleZ;
+    coefficient[0] = (1.0f - realPoleZ) / (1.0f - zeroZ);
+    coefficient[1] = -coefficient[0] * zeroZ;
+    coefficient[2] = 0.0f;
+    coefficient[3] = -realPoleZ;
+    coefficient[4] = 0.0f;
+    return coefficient;
+}
+
+std::shared_ptr<HapticBiquadFilter> createAPF(const float cornerFrequency,
+                                        const float sampleRate,
+                                        const size_t channelCount) {
+    BiquadFilterCoefficients coefficient = apfCoefs(cornerFrequency, sampleRate);
+    return std::make_shared<HapticBiquadFilter>(channelCount, coefficient);
+}
+
+std::shared_ptr<HapticBiquadFilter> createAPF2(const float cornerFrequency1,
+                                         const float cornerFrequency2,
+                                         const float sampleRate,
+                                         const size_t channelCount) {
+    BiquadFilterCoefficients coefs1 = apfCoefs(cornerFrequency1, sampleRate);
+    BiquadFilterCoefficients coefs2 = apfCoefs(cornerFrequency2, sampleRate);
+    return std::make_shared<HapticBiquadFilter>(
+            channelCount, cascadeFirstOrderFilters(coefs1, coefs2));
+}
+
+std::shared_ptr<HapticBiquadFilter> createBPF(const float ringingFrequency,
+                                        const float q,
+                                        const float sampleRate,
+                                        const size_t channelCount) {
+    BiquadFilterCoefficients coefficient;
+    const auto [real, img] = getComplexPoleZ(ringingFrequency, q, sampleRate);
+    // Note: this is not a standard cookbook BPF, but a low pass filter with zero at DC
+    coefficient[0] = 1.0f;
+    coefficient[1] = -1.0f;
+    coefficient[2] = 0.0f;
+    coefficient[3] = -2 * real;
+    coefficient[4] = real * real + img * img;
+    return std::make_shared<HapticBiquadFilter>(channelCount, coefficient);
+}
+
+std::shared_ptr<HapticBiquadFilter> createBSF(const float ringingFrequency,
+                                        const float zq,
+                                        const float pq,
+                                        const float sampleRate,
+                                        const size_t channelCount) {
+    BiquadFilterCoefficients coefficient;
+    const auto [zeroReal, zeroImg] = getComplexPoleZ(ringingFrequency, zq, sampleRate);
+    float zeroCoeff1 = -2 * zeroReal;
+    float zeroCoeff2 = zeroReal* zeroReal + zeroImg * zeroImg;
+    const auto [poleReal, poleImg] = getComplexPoleZ(ringingFrequency, pq, sampleRate);
+    float poleCoeff1 = -2 * poleReal;
+    float poleCoeff2 = poleReal * poleReal + poleImg * poleImg;
+    const float norm = (1.0f + poleCoeff1 + poleCoeff2) / (1.0f + zeroCoeff1 + zeroCoeff2);
+    coefficient[0] = 1.0f * norm;
+    coefficient[1] = zeroCoeff1 * norm;
+    coefficient[2] = zeroCoeff2 * norm;
+    coefficient[3] = poleCoeff1;
+    coefficient[4] = poleCoeff2;
+    return std::make_shared<HapticBiquadFilter>(channelCount, coefficient);
+}
+
+} // namespace android::audio_effect::haptic_generator
diff --git a/media/libeffects/hapticgenerator/Processors.h b/media/libeffects/hapticgenerator/Processors.h
new file mode 100644
index 0000000..5cf0557
--- /dev/null
+++ b/media/libeffects/hapticgenerator/Processors.h
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _EFFECT_HAPTIC_GENERATOR_PROCESSORS_H_
+#define _EFFECT_HAPTIC_GENERATOR_PROCESSORS_H_
+
+#include <sys/types.h>
+
+#include <memory>
+#include <vector>
+
+#include <audio_utils/BiquadFilter.h>
+
+using HapticBiquadFilter = android::audio_utils::BiquadFilter<float>;
+using BiquadFilterCoefficients = std::array<float, android::audio_utils::kBiquadNumCoefs>;
+
+namespace android::audio_effect::haptic_generator {
+
+// A class providing a process function that makes input data non-negative.
+class Ramp {
+public:
+    explicit Ramp(size_t channelCount);
+
+    void process(float *out, const float *in, size_t frameCount);
+
+private:
+    const size_t mChannelCount;
+};
+
+
+class SlowEnvelope {
+public:
+    SlowEnvelope(float cornerFrequency, float sampleRate,
+                 float normalizationPower, size_t channelCount);
+
+    void process(float *out, const float *in, size_t frameCount);
+
+    void clear();
+
+private:
+    const std::shared_ptr<HapticBiquadFilter> mLpf;
+    std::vector<float> mLpfInBuffer;
+    std::vector<float> mLpfOutBuffer;
+    const float mNormalizationPower;
+    const float mChannelCount;
+    const float mEnv;
+};
+
+// Helper functions
+
+BiquadFilterCoefficients cascadeFirstOrderFilters(const BiquadFilterCoefficients &coefs1,
+                                                  const BiquadFilterCoefficients &coefs2);
+
+std::shared_ptr<HapticBiquadFilter> createLPF(const float cornerFrequency,
+                                        const float sampleRate,
+                                        const size_t channelCount);
+
+// Create two cascaded LPF with same corner frequency.
+std::shared_ptr<HapticBiquadFilter> createLPF2(const float cornerFrequency,
+                                         const float sampleRate,
+                                         const size_t channelCount);
+
+// Create two cascaded HPF with same corner frequency.
+std::shared_ptr<HapticBiquadFilter> createHPF2(const float cornerFrequency,
+                                         const float sampleRate,
+                                         const size_t channelCount);
+
+std::shared_ptr<HapticBiquadFilter> createAPF(const float cornerFrequency,
+                                        const float sampleRate,
+                                        const size_t channelCount);
+
+// Create two cascaded APF with two different corner frequency.
+std::shared_ptr<HapticBiquadFilter> createAPF2(const float cornerFrequency1,
+                                         const float cornerFrequency2,
+                                         const float sampleRate,
+                                         const size_t channelCount);
+
+std::shared_ptr<HapticBiquadFilter> createBPF(const float ringingFrequency,
+                                        const float q,
+                                        const float sampleRate,
+                                        const size_t channelCount);
+
+std::shared_ptr<HapticBiquadFilter> createBSF(const float ringingFrequency,
+                                        const float zq,
+                                        const float pq,
+                                        const float sampleRate,
+                                        const size_t channelCount);
+
+} // namespace android::audio_effect::haptic_generator
+
+#endif // _EFFECT_HAPTIC_GENERATOR_PROCESSORS_H_
diff --git a/media/libeffects/lvm/.clang-format b/media/libeffects/lvm/.clang-format
new file mode 100644
index 0000000..6f4b13e
--- /dev/null
+++ b/media/libeffects/lvm/.clang-format
@@ -0,0 +1,15 @@
+BasedOnStyle: Google
+Standard: Cpp11
+AccessModifierOffset: -2
+AllowShortFunctionsOnASingleLine: Inline
+ColumnLimit: 100
+CommentPragmas: NOLINT:.*
+DerivePointerAlignment: false
+IncludeBlocks: Preserve
+IndentWidth: 4
+ContinuationIndentWidth: 8
+PointerAlignment: Left
+TabWidth: 4
+UseTab: Never
+# Following are specific to libeffects/lvm
+SortIncludes: false
diff --git a/media/libeffects/lvm/benchmarks/Android.bp b/media/libeffects/lvm/benchmarks/Android.bp
new file mode 100644
index 0000000..420e172
--- /dev/null
+++ b/media/libeffects/lvm/benchmarks/Android.bp
@@ -0,0 +1,16 @@
+cc_benchmark {
+    name: "lvm_benchmark",
+    vendor: true,
+    srcs: ["lvm_benchmark.cpp"],
+    static_libs: [
+        "libbundlewrapper",
+        "libmusicbundle",
+    ],
+    shared_libs: [
+        "libaudioutils",
+        "liblog",
+    ],
+    header_libs: [
+        "libhardware_headers",
+    ],
+}
diff --git a/media/libeffects/lvm/benchmarks/lvm_benchmark.cpp b/media/libeffects/lvm/benchmarks/lvm_benchmark.cpp
new file mode 100644
index 0000000..ee9da3f
--- /dev/null
+++ b/media/libeffects/lvm/benchmarks/lvm_benchmark.cpp
@@ -0,0 +1,168 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <array>
+#include <climits>
+#include <cstdlib>
+#include <random>
+#include <vector>
+#include <log/log.h>
+#include <benchmark/benchmark.h>
+#include <hardware/audio_effect.h>
+#include <system/audio.h>
+
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+constexpr effect_uuid_t kEffectUuids[] = {
+        // NXP SW BassBoost
+        {0x8631f300, 0x72e2, 0x11df, 0xb57e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW Virtualizer
+        {0x1d4033c0, 0x8557, 0x11df, 0x9f2d, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW Equalizer
+        {0xce772f20, 0x847d, 0x11df, 0xbb17, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW Volume
+        {0x119341a0, 0x8469, 0x11df, 0x81f9, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+};
+
+constexpr size_t kNumEffectUuids = std::size(kEffectUuids);
+
+constexpr size_t kFrameCount = 2048;
+
+constexpr audio_channel_mask_t kChMasks[] = {
+        AUDIO_CHANNEL_OUT_MONO,    AUDIO_CHANNEL_OUT_STEREO,  AUDIO_CHANNEL_OUT_2POINT1,
+        AUDIO_CHANNEL_OUT_QUAD,    AUDIO_CHANNEL_OUT_PENTA,   AUDIO_CHANNEL_OUT_5POINT1,
+        AUDIO_CHANNEL_OUT_6POINT1, AUDIO_CHANNEL_OUT_7POINT1,
+};
+
+constexpr size_t kNumChMasks = std::size(kChMasks);
+constexpr int kSampleRate = 44100;
+// TODO(b/131240940) Remove once effects are updated to produce mono output
+constexpr size_t kMinOutputChannelCount = 2;
+
+/*******************************************************************
+ * A test result running on Pixel 3 for comparison.
+ * The first parameter indicates the number of channels.
+ * The second parameter indicates the effect.
+ * 0: Bass Boost, 1: Virtualizer, 2: Equalizer, 3: Volume
+ * -----------------------------------------------------
+ * Benchmark           Time             CPU   Iterations
+ * -----------------------------------------------------
+ * BM_LVM/2/0     131279 ns       130855 ns         5195
+ * BM_LVM/2/1     184814 ns       184219 ns         3799
+ * BM_LVM/2/2      91935 ns        91649 ns         7647
+ * BM_LVM/2/3      26707 ns        26623 ns        26281
+ * BM_LVM/3/0     172130 ns       171562 ns         4085
+ * BM_LVM/3/1     192443 ns       191923 ns         3644
+ * BM_LVM/3/2     127444 ns       127107 ns         5483
+ * BM_LVM/3/3      26811 ns        26730 ns        26163
+ * BM_LVM/4/0     223688 ns       223076 ns         3133
+ * BM_LVM/4/1     204961 ns       204408 ns         3425
+ * BM_LVM/4/2     169162 ns       168708 ns         4143
+ * BM_LVM/4/3      37330 ns        37225 ns        18795
+ * BM_LVM/5/0     272628 ns       271668 ns         2568
+ * BM_LVM/5/1     218487 ns       217883 ns         3212
+ * BM_LVM/5/2     211049 ns       210479 ns         3324
+ * BM_LVM/5/3      46962 ns        46835 ns        15051
+ * BM_LVM/6/0     318881 ns       317734 ns         2216
+ * BM_LVM/6/1     231899 ns       231244 ns         3028
+ * BM_LVM/6/2     252655 ns       251963 ns         2771
+ * BM_LVM/6/3      54944 ns        54794 ns        12799
+ * BM_LVM/7/0     366622 ns       365262 ns         1916
+ * BM_LVM/7/1     245076 ns       244388 ns         2866
+ * BM_LVM/7/2     295105 ns       294304 ns         2379
+ * BM_LVM/7/3      63595 ns        63420 ns        11070
+ * BM_LVM/8/0     410957 ns       409387 ns         1706
+ * BM_LVM/8/1     257824 ns       257098 ns         2723
+ * BM_LVM/8/2     342546 ns       341530 ns         2059
+ * BM_LVM/8/3      72896 ns        72700 ns         9685
+ *******************************************************************/
+
+static void BM_LVM(benchmark::State& state) {
+    const size_t chMask = kChMasks[state.range(0) - 1];
+    const effect_uuid_t uuid = kEffectUuids[state.range(1)];
+    const size_t channelCount = audio_channel_count_from_out_mask(chMask);
+
+    // Initialize input buffer with deterministic pseudo-random values
+    std::minstd_rand gen(chMask);
+    std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+    std::vector<float> input(kFrameCount * channelCount);
+    for (auto& in : input) {
+        in = dis(gen);
+    }
+
+    effect_handle_t effectHandle = nullptr;
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(&uuid, 1, 1, &effectHandle);
+        status != 0) {
+        ALOGE("create_effect returned an error = %d\n", status);
+        return;
+    }
+
+    effect_config_t config{};
+    config.inputCfg.samplingRate = config.outputCfg.samplingRate = kSampleRate;
+    config.inputCfg.channels = config.outputCfg.channels = chMask;
+    config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    if (int status = (*effectHandle)
+                             ->command(effectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
+                                       &config, &replySize, &reply);
+        status != 0) {
+        ALOGE("command returned an error = %d\n", status);
+        return;
+    }
+
+    if (int status =
+                (*effectHandle)
+                        ->command(effectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+        status != 0) {
+        ALOGE("Command enable call returned error %d\n", reply);
+        return;
+    }
+
+    // Run the test
+    for (auto _ : state) {
+        std::vector<float> output(kFrameCount * std::max(channelCount, kMinOutputChannelCount));
+
+        benchmark::DoNotOptimize(input.data());
+        benchmark::DoNotOptimize(output.data());
+
+        audio_buffer_t inBuffer = {.frameCount = kFrameCount, .f32 = input.data()};
+        audio_buffer_t outBuffer = {.frameCount = kFrameCount, .f32 = output.data()};
+        (*effectHandle)->process(effectHandle, &inBuffer, &outBuffer);
+
+        benchmark::ClobberMemory();
+    }
+
+    state.SetComplexityN(state.range(0));
+
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle); status != 0) {
+        ALOGE("release_effect returned an error = %d\n", status);
+        return;
+    }
+}
+
+static void LVMArgs(benchmark::internal::Benchmark* b) {
+    // TODO(b/131240940) Test single channel once effects are updated to process mono data
+    for (int i = 2; i <= kNumChMasks; i++) {
+        for (int j = 0; j < kNumEffectUuids; ++j) {
+            b->Args({i, j});
+        }
+    }
+}
+
+BENCHMARK(BM_LVM)->Apply(LVMArgs);
+
+BENCHMARK_MAIN();
diff --git a/media/libeffects/lvm/lib/Android.bp b/media/libeffects/lvm/lib/Android.bp
index 1f2a5e1..8f2f016 100644
--- a/media/libeffects/lvm/lib/Android.bp
+++ b/media/libeffects/lvm/lib/Android.bp
@@ -30,7 +30,6 @@
         "Bundle/src/LVM_Control.cpp",
         "SpectrumAnalyzer/src/LVPSA_Control.cpp",
         "SpectrumAnalyzer/src/LVPSA_Init.cpp",
-        "SpectrumAnalyzer/src/LVPSA_Memory.cpp",
         "SpectrumAnalyzer/src/LVPSA_Process.cpp",
         "SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp",
         "SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp",
@@ -133,11 +132,10 @@
         "liblog",
     ],
     header_libs: [
-        "libhardware_headers"
+        "libhardware_headers",
     ],
     cppflags: [
         "-fvisibility=hidden",
-        "-DSUPPORT_MC",
 
         "-Wall",
         "-Werror",
diff --git a/media/libeffects/lvm/lib/Bass/lib/LVDBE.h b/media/libeffects/lvm/lib/Bass/lib/LVDBE.h
index 948d79c..e60ad07 100644
--- a/media/libeffects/lvm/lib/Bass/lib/LVDBE.h
+++ b/media/libeffects/lvm/lib/Bass/lib/LVDBE.h
@@ -69,15 +69,12 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory table*/
-#define LVDBE_NR_MEMORY_REGIONS        4                            /* Number of memory regions */
-
 /* Bass Enhancement effect level */
-#define LVDBE_EFFECT_03DB            3                              /* Effect defines for backwards compatibility */
-#define LVDBE_EFFECT_06DB            6
-#define LVDBE_EFFECT_09DB            9
-#define LVDBE_EFFECT_12DB            12
-#define LVDBE_EFFECT_15DB            15
+#define LVDBE_EFFECT_03DB 3 /* Effect defines for backwards compatibility */
+#define LVDBE_EFFECT_06DB 6
+#define LVDBE_EFFECT_09DB 9
+#define LVDBE_EFFECT_12DB 12
+#define LVDBE_EFFECT_15DB 15
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -86,52 +83,31 @@
 /****************************************************************************************/
 
 /* Instance handle */
-typedef void    *LVDBE_Handle_t;
+typedef void* LVDBE_Handle_t;
 
 /* Operating modes */
-typedef enum
-{
-    LVDBE_OFF      = 0,
-    LVDBE_ON       = 1,
-    LVDBE_MODE_MAX = LVM_MAXINT_32
-} LVDBE_Mode_en;
+typedef enum { LVDBE_OFF = 0, LVDBE_ON = 1, LVDBE_MODE_MAX = LVM_MAXINT_32 } LVDBE_Mode_en;
 
 /* High pass filter */
-typedef enum
-{
+typedef enum {
     LVDBE_HPF_OFF = 0,
-    LVDBE_HPF_ON  = 1,
+    LVDBE_HPF_ON = 1,
     LVDBE_HPF_MAX = LVM_MAXINT_32
 } LVDBE_FilterSelect_en;
 
 /* Volume control */
-typedef enum
-{
+typedef enum {
     LVDBE_VOLUME_OFF = 0,
-    LVDBE_VOLUME_ON  = 1,
+    LVDBE_VOLUME_ON = 1,
     LVDBE_VOLUME_MAX = LVM_MAXINT_32
 } LVDBE_Volume_en;
 
-/* Memory Types */
-typedef enum
-{
-    LVDBE_PERSISTENT      = 0,
-    LVDBE_PERSISTENT_DATA = 1,
-    LVDBE_PERSISTENT_COEF = 2,
-    LVDBE_SCRATCH         = 3,
-    LVDBE_MEMORY_MAX      = LVM_MAXINT_32
-
-} LVDBE_MemoryTypes_en;
-
 /* Function return status */
-typedef enum
-{
-    LVDBE_SUCCESS        = 0,                        /* Successful return from a routine */
-    LVDBE_ALIGNMENTERROR = 1,                        /* Memory alignment error */
-    LVDBE_NULLADDRESS    = 2,                        /* NULL allocation address */
-    LVDBE_TOOMANYSAMPLES = 3,                        /* Maximum block size exceeded */
-    LVDBE_SIZEERROR      = 4,                        /* Incorrect structure size */
-    LVDBE_STATUS_MAX     = LVM_MAXINT_32
+typedef enum {
+    LVDBE_SUCCESS = 0,        /* Successful return from a routine */
+    LVDBE_NULLADDRESS = 1,    /* NULL allocation address */
+    LVDBE_TOOMANYSAMPLES = 2, /* Maximum block size exceeded */
+    LVDBE_STATUS_MAX = LVM_MAXINT_32
 } LVDBE_ReturnStatus_en;
 
 /****************************************************************************************/
@@ -158,40 +134,38 @@
 /*
  * Bass Enhancement centre frequency
  */
-#define LVDBE_CAP_CENTRE_55Hz       1
-#define LVDBE_CAP_CENTRE_66Hz       2
-#define LVDBE_CAP_CENTRE_78Hz       4
-#define LVDBE_CAP_CENTRE_90Hz       8
+#define LVDBE_CAP_CENTRE_55Hz 1
+#define LVDBE_CAP_CENTRE_66Hz 2
+#define LVDBE_CAP_CENTRE_78Hz 4
+#define LVDBE_CAP_CENTRE_90Hz 8
 
-typedef enum
-{
+typedef enum {
     LVDBE_CENTRE_55HZ = 0,
     LVDBE_CENTRE_66HZ = 1,
     LVDBE_CENTRE_78HZ = 2,
     LVDBE_CENTRE_90HZ = 3,
-    LVDBE_CENTRE_MAX  = LVM_MAXINT_32
+    LVDBE_CENTRE_MAX = LVM_MAXINT_32
 } LVDBE_CentreFreq_en;
 
 /*
  * Supported sample rates in samples per second
  */
-#define LVDBE_CAP_FS_8000                1
-#define LVDBE_CAP_FS_11025               2
-#define LVDBE_CAP_FS_12000               4
-#define LVDBE_CAP_FS_16000               8
-#define LVDBE_CAP_FS_22050               16
-#define LVDBE_CAP_FS_24000               32
-#define LVDBE_CAP_FS_32000               64
-#define LVDBE_CAP_FS_44100               128
-#define LVDBE_CAP_FS_48000               256
-#define LVDBE_CAP_FS_88200               512
-#define LVDBE_CAP_FS_96000               1024
-#define LVDBE_CAP_FS_176400              2048
-#define LVDBE_CAP_FS_192000              4096
+#define LVDBE_CAP_FS_8000 1
+#define LVDBE_CAP_FS_11025 2
+#define LVDBE_CAP_FS_12000 4
+#define LVDBE_CAP_FS_16000 8
+#define LVDBE_CAP_FS_22050 16
+#define LVDBE_CAP_FS_24000 32
+#define LVDBE_CAP_FS_32000 64
+#define LVDBE_CAP_FS_44100 128
+#define LVDBE_CAP_FS_48000 256
+#define LVDBE_CAP_FS_88200 512
+#define LVDBE_CAP_FS_96000 1024
+#define LVDBE_CAP_FS_176400 2048
+#define LVDBE_CAP_FS_192000 4096
 
-typedef enum
-{
-    LVDBE_FS_8000  = 0,
+typedef enum {
+    LVDBE_FS_8000 = 0,
     LVDBE_FS_11025 = 1,
     LVDBE_FS_12000 = 2,
     LVDBE_FS_16000 = 3,
@@ -204,7 +178,7 @@
     LVDBE_FS_96000 = 10,
     LVDBE_FS_176400 = 11,
     LVDBE_FS_192000 = 12,
-    LVDBE_FS_MAX   = LVM_MAXINT_32
+    LVDBE_FS_MAX = LVM_MAXINT_32
 } LVDBE_Fs_en;
 
 /****************************************************************************************/
@@ -213,44 +187,25 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory region definition */
-typedef struct
-{
-    LVM_UINT32                Size;                        /* Region size in bytes */
-    LVM_UINT16                Alignment;                  /* Region alignment in bytes */
-    LVDBE_MemoryTypes_en      Type;                       /* Region type */
-    void                      *pBaseAddress;              /* Pointer to the region base address */
-} LVDBE_MemoryRegion_t;
-
-/* Memory table containing the region definitions */
-typedef struct
-{
-    LVDBE_MemoryRegion_t    Region[LVDBE_NR_MEMORY_REGIONS];  /* One definition for each region */
-} LVDBE_MemTab_t;
-
 /* Parameter structure */
-typedef struct
-{
-    LVDBE_Mode_en           OperatingMode;
-    LVDBE_Fs_en             SampleRate;
-    LVM_INT16               EffectLevel;
-    LVDBE_CentreFreq_en     CentreFrequency;
-    LVDBE_FilterSelect_en   HPFSelect;
-    LVDBE_Volume_en         VolumeControl;
-    LVM_INT16               VolumedB;
-    LVM_INT16               HeadroomdB;
-#ifdef SUPPORT_MC
-    LVM_INT16               NrChannels;
-#endif
+typedef struct {
+    LVDBE_Mode_en OperatingMode;
+    LVDBE_Fs_en SampleRate;
+    LVM_INT16 EffectLevel;
+    LVDBE_CentreFreq_en CentreFrequency;
+    LVDBE_FilterSelect_en HPFSelect;
+    LVDBE_Volume_en VolumeControl;
+    LVM_INT16 VolumedB;
+    LVM_INT16 HeadroomdB;
+    LVM_INT16 NrChannels;
 
 } LVDBE_Params_t;
 
 /* Capability structure */
-typedef struct
-{
-      LVM_UINT16              SampleRate;               /* Sampling rate capabilities */
-      LVM_UINT16              CentreFrequency;          /* Centre frequency capabilities */
-      LVM_UINT16              MaxBlockSize;             /* Maximum block size in sample pairs */
+typedef struct {
+    LVM_UINT16 SampleRate;      /* Sampling rate capabilities */
+    LVM_UINT16 CentreFrequency; /* Centre frequency capabilities */
+    LVM_UINT16 MaxBlockSize;    /* Maximum block size in sample pairs */
 } LVDBE_Capabilities_t;
 
 /****************************************************************************************/
@@ -261,75 +216,39 @@
 
 /****************************************************************************************/
 /*                                                                                      */
-/* FUNCTION:                 LVDBE_Memory                                               */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*    This function is used for memory allocation and free. It can be called in         */
-/*    two ways:                                                                         */
-/*                                                                                      */
-/*        hInstance = NULL                Returns the memory requirements               */
-/*        hInstance = Instance handle        Returns the memory requirements and        */
-/*                                        allocated base addresses for the instance     */
-/*                                                                                      */
-/*    When this function is called for memory allocation (hInstance=NULL) the memory    */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*    When the function is called for free (hInstance = Instance Handle) the memory     */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance                Instance Handle                                            */
-/*  pMemoryTable             Pointer to an empty memory definition table                */
-/*    pCapabilities            Pointer to the default capabilites                       */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVDBE_SUCCESS            Succeeded                                                  */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*    1.    This function may be interrupted by the LVDBE_Process function              */
-/*                                                                                      */
-/****************************************************************************************/
-
-LVDBE_ReturnStatus_en LVDBE_Memory(LVDBE_Handle_t           hInstance,
-                                   LVDBE_MemTab_t           *pMemoryTable,
-                                   LVDBE_Capabilities_t     *pCapabilities);
-
-/****************************************************************************************/
-/*                                                                                      */
 /* FUNCTION:                 LVDBE_Init                                                 */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
 /*    Create and initialisation function for the Bass Enhancement module                */
 /*                                                                                      */
-/*    This function can be used to create an algorithm instance by calling with         */
-/*    hInstance set to NULL. In this case the algorithm returns the new instance        */
-/*    handle.                                                                           */
-/*                                                                                      */
-/*    This function can be used to force a full re-initialisation of the algorithm      */
-/*    by calling with hInstance = Instance Handle. In this case the memory table        */
-/*    should be correct for the instance, this can be ensured by calling the function   */
-/*    LVDBE_Memory before calling this function.                                        */
-/*                                                                                      */
 /* PARAMETERS:                                                                          */
-/*  hInstance                  Instance handle                                          */
-/*  pMemoryTable             Pointer to the memory definition table                     */
+/*  phInstance               Pointer to instance handle                                 */
 /*  pCapabilities            Pointer to the initialisation capabilities                 */
+/*  pScratch                 Pointer to the bundle scratch buffer                       */
 /*                                                                                      */
 /* RETURNS:                                                                             */
-/*  LVDBE_SUCCESS                Initialisation succeeded                               */
-/*  LVDBE_ALIGNMENTERROR        Instance or scratch memory on incorrect alignment       */
-/*    LVDBE_NULLADDRESS            One or more memory has a NULL pointer                */
+/*  LVDBE_SUCCESS            Initialisation succeeded                                   */
+/*  LVDBE_NULLADDRESS        One or more memory has a NULL pointer - malloc failure     */
 /*                                                                                      */
 /* NOTES:                                                                               */
-/*  1.     The instance handle is the pointer to the base address of the first memory   */
-/*        region.                                                                       */
-/*    2.    This function must not be interrupted by the LVDBE_Process function         */
+/*  1.    This function must not be interrupted by the LVDBE_Process function           */
 /*                                                                                      */
 /****************************************************************************************/
+LVDBE_ReturnStatus_en LVDBE_Init(LVDBE_Handle_t* phInstance, LVDBE_Capabilities_t* pCapabilities,
+                                 void* pScratch);
 
-LVDBE_ReturnStatus_en LVDBE_Init(LVDBE_Handle_t             *phInstance,
-                                   LVDBE_MemTab_t           *pMemoryTable,
-                                   LVDBE_Capabilities_t     *pCapabilities);
+/****************************************************************************************/
+/*                                                                                      */
+/* FUNCTION:                 LVDBE_DeInit                                               */
+/*                                                                                      */
+/* DESCRIPTION:                                                                         */
+/*    Free the memories created during LVDBE_Init including instance handle             */
+/*                                                                                      */
+/* PARAMETERS:                                                                          */
+/*  phInstance               Pointer to instance handle                                 */
+/*                                                                                      */
+/****************************************************************************************/
+void LVDBE_DeInit(LVDBE_Handle_t* phInstance);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -351,8 +270,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVDBE_ReturnStatus_en LVDBE_GetParameters(LVDBE_Handle_t        hInstance,
-                                            LVDBE_Params_t      *pParams);
+LVDBE_ReturnStatus_en LVDBE_GetParameters(LVDBE_Handle_t hInstance, LVDBE_Params_t* pParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -374,8 +292,8 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVDBE_ReturnStatus_en LVDBE_GetCapabilities(LVDBE_Handle_t            hInstance,
-                                              LVDBE_Capabilities_t    *pCapabilities);
+LVDBE_ReturnStatus_en LVDBE_GetCapabilities(LVDBE_Handle_t hInstance,
+                                            LVDBE_Capabilities_t* pCapabilities);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -414,8 +332,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVDBE_ReturnStatus_en LVDBE_Control(LVDBE_Handle_t      hInstance,
-                                      LVDBE_Params_t    *pParams);
+LVDBE_ReturnStatus_en LVDBE_Control(LVDBE_Handle_t hInstance, LVDBE_Params_t* pParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -437,9 +354,7 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-LVDBE_ReturnStatus_en LVDBE_Process(LVDBE_Handle_t          hInstance,
-                                       const LVM_FLOAT      *pInData,
-                                       LVM_FLOAT            *pOutData,
-                                       LVM_UINT16           NumSamples);
+LVDBE_ReturnStatus_en LVDBE_Process(LVDBE_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                    LVM_FLOAT* pOutData, LVM_UINT16 NumSamples);
 
 #endif /* __LVDBE_H__ */
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h b/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h
index b364dae..30e1692 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h
@@ -24,7 +24,7 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#define LVDBE_SCALESHIFT                                    10         /* As a power of 2 */
+#define LVDBE_SCALESHIFT 10 /* As a power of 2 */
 
 /************************************************************************************/
 /*                                                                                  */
@@ -32,289 +32,289 @@
 /*                                                                                  */
 /************************************************************************************/
 
- /* Coefficients for centre frequency 55Hz */
-#define HPF_Fs8000_Fc55_A0                        0.958849f
-#define HPF_Fs8000_Fc55_A1                        (-1.917698f)
-#define HPF_Fs8000_Fc55_A2                        0.958849f
-#define HPF_Fs8000_Fc55_B1                        (-1.939001f)
-#define HPF_Fs8000_Fc55_B2                        0.940807f
-#define HPF_Fs11025_Fc55_A0                       0.966909f
-#define HPF_Fs11025_Fc55_A1                       (-1.933818f)
-#define HPF_Fs11025_Fc55_A2                       0.966909f
-#define HPF_Fs11025_Fc55_B1                       (-1.955732f)
-#define HPF_Fs11025_Fc55_B2                       0.956690f
-#define HPF_Fs12000_Fc55_A0                       0.968650f
-#define HPF_Fs12000_Fc55_A1                       (-1.937300f)
-#define HPF_Fs12000_Fc55_A2                       0.968650f
-#define HPF_Fs12000_Fc55_B1                       (-1.959327f)
-#define HPF_Fs12000_Fc55_B2                       0.960138f
-#define HPF_Fs16000_Fc55_A0                       0.973588f
-#define HPF_Fs16000_Fc55_A1                       (-1.947176f)
-#define HPF_Fs16000_Fc55_A2                       0.973588f
-#define HPF_Fs16000_Fc55_B1                       (-1.969494f)
-#define HPF_Fs16000_Fc55_B2                       0.969952f
-#define HPF_Fs22050_Fc55_A0                       0.977671f
-#define HPF_Fs22050_Fc55_A1                       (-1.955343f)
-#define HPF_Fs22050_Fc55_A2                       0.977671f
-#define HPF_Fs22050_Fc55_B1                       (-1.977863f)
-#define HPF_Fs22050_Fc55_B2                       0.978105f
-#define HPF_Fs24000_Fc55_A0                       0.978551f
-#define HPF_Fs24000_Fc55_A1                       (-1.957102f)
-#define HPF_Fs24000_Fc55_A2                       0.978551f
-#define HPF_Fs24000_Fc55_B1                       (-1.979662f)
-#define HPF_Fs24000_Fc55_B2                       0.979866f
-#define HPF_Fs32000_Fc55_A0                       0.981042f
-#define HPF_Fs32000_Fc55_A1                       (-1.962084f)
-#define HPF_Fs32000_Fc55_A2                       0.981042f
-#define HPF_Fs32000_Fc55_B1                       (-1.984746f)
-#define HPF_Fs32000_Fc55_B2                       0.984861f
-#define HPF_Fs44100_Fc55_A0                       0.983097f
-#define HPF_Fs44100_Fc55_A1                       (-1.966194f)
-#define HPF_Fs44100_Fc55_A2                       0.983097f
-#define HPF_Fs44100_Fc55_B1                       (-1.988931f)
-#define HPF_Fs44100_Fc55_B2                       0.988992f
-#define HPF_Fs48000_Fc55_A0                       0.983539f
-#define HPF_Fs48000_Fc55_A1                       (-1.967079f)
-#define HPF_Fs48000_Fc55_A2                       0.983539f
-#define HPF_Fs48000_Fc55_B1                       (-1.989831f)
-#define HPF_Fs48000_Fc55_B2                       0.989882f
+/* Coefficients for centre frequency 55Hz */
+#define HPF_Fs8000_Fc55_A0 0.958849f
+#define HPF_Fs8000_Fc55_A1 (-1.917698f)
+#define HPF_Fs8000_Fc55_A2 0.958849f
+#define HPF_Fs8000_Fc55_B1 (-1.939001f)
+#define HPF_Fs8000_Fc55_B2 0.940807f
+#define HPF_Fs11025_Fc55_A0 0.966909f
+#define HPF_Fs11025_Fc55_A1 (-1.933818f)
+#define HPF_Fs11025_Fc55_A2 0.966909f
+#define HPF_Fs11025_Fc55_B1 (-1.955732f)
+#define HPF_Fs11025_Fc55_B2 0.956690f
+#define HPF_Fs12000_Fc55_A0 0.968650f
+#define HPF_Fs12000_Fc55_A1 (-1.937300f)
+#define HPF_Fs12000_Fc55_A2 0.968650f
+#define HPF_Fs12000_Fc55_B1 (-1.959327f)
+#define HPF_Fs12000_Fc55_B2 0.960138f
+#define HPF_Fs16000_Fc55_A0 0.973588f
+#define HPF_Fs16000_Fc55_A1 (-1.947176f)
+#define HPF_Fs16000_Fc55_A2 0.973588f
+#define HPF_Fs16000_Fc55_B1 (-1.969494f)
+#define HPF_Fs16000_Fc55_B2 0.969952f
+#define HPF_Fs22050_Fc55_A0 0.977671f
+#define HPF_Fs22050_Fc55_A1 (-1.955343f)
+#define HPF_Fs22050_Fc55_A2 0.977671f
+#define HPF_Fs22050_Fc55_B1 (-1.977863f)
+#define HPF_Fs22050_Fc55_B2 0.978105f
+#define HPF_Fs24000_Fc55_A0 0.978551f
+#define HPF_Fs24000_Fc55_A1 (-1.957102f)
+#define HPF_Fs24000_Fc55_A2 0.978551f
+#define HPF_Fs24000_Fc55_B1 (-1.979662f)
+#define HPF_Fs24000_Fc55_B2 0.979866f
+#define HPF_Fs32000_Fc55_A0 0.981042f
+#define HPF_Fs32000_Fc55_A1 (-1.962084f)
+#define HPF_Fs32000_Fc55_A2 0.981042f
+#define HPF_Fs32000_Fc55_B1 (-1.984746f)
+#define HPF_Fs32000_Fc55_B2 0.984861f
+#define HPF_Fs44100_Fc55_A0 0.983097f
+#define HPF_Fs44100_Fc55_A1 (-1.966194f)
+#define HPF_Fs44100_Fc55_A2 0.983097f
+#define HPF_Fs44100_Fc55_B1 (-1.988931f)
+#define HPF_Fs44100_Fc55_B2 0.988992f
+#define HPF_Fs48000_Fc55_A0 0.983539f
+#define HPF_Fs48000_Fc55_A1 (-1.967079f)
+#define HPF_Fs48000_Fc55_A2 0.983539f
+#define HPF_Fs48000_Fc55_B1 (-1.989831f)
+#define HPF_Fs48000_Fc55_B2 0.989882f
 
-#define HPF_Fs88200_Fc55_A0                       0.985818f
-#define HPF_Fs88200_Fc55_A1                       (-1.971636f)
-#define HPF_Fs88200_Fc55_A2                       0.985818f
-#define HPF_Fs88200_Fc55_B1                       (-1.994466f)
-#define HPF_Fs88200_Fc55_B2                       0.994481f
+#define HPF_Fs88200_Fc55_A0 0.985818f
+#define HPF_Fs88200_Fc55_A1 (-1.971636f)
+#define HPF_Fs88200_Fc55_A2 0.985818f
+#define HPF_Fs88200_Fc55_B1 (-1.994466f)
+#define HPF_Fs88200_Fc55_B2 0.994481f
 
-#define HPF_Fs96000_Fc55_A0                       0.986040f
-#define HPF_Fs96000_Fc55_A1                       (-1.972080f)
-#define HPF_Fs96000_Fc55_A2                       0.986040f
-#define HPF_Fs96000_Fc55_B1                       (-1.994915f)
-#define HPF_Fs96000_Fc55_B2                       0.994928f
+#define HPF_Fs96000_Fc55_A0 0.986040f
+#define HPF_Fs96000_Fc55_A1 (-1.972080f)
+#define HPF_Fs96000_Fc55_A2 0.986040f
+#define HPF_Fs96000_Fc55_B1 (-1.994915f)
+#define HPF_Fs96000_Fc55_B2 0.994928f
 
-#define HPF_Fs176400_Fc55_A0                      0.987183f
-#define HPF_Fs176400_Fc55_A1                      (-1.974366f)
-#define HPF_Fs176400_Fc55_A2                      0.987183f
-#define HPF_Fs176400_Fc55_B1                      (-1.997233f)
-#define HPF_Fs176400_Fc55_B2                      0.997237f
+#define HPF_Fs176400_Fc55_A0 0.987183f
+#define HPF_Fs176400_Fc55_A1 (-1.974366f)
+#define HPF_Fs176400_Fc55_A2 0.987183f
+#define HPF_Fs176400_Fc55_B1 (-1.997233f)
+#define HPF_Fs176400_Fc55_B2 0.997237f
 
-#define HPF_Fs192000_Fc55_A0                      0.987294f
-#define HPF_Fs192000_Fc55_A1                      (-1.974588f)
-#define HPF_Fs192000_Fc55_A2                      0.987294f
-#define HPF_Fs192000_Fc55_B1                      (-1.997458f)
-#define HPF_Fs192000_Fc55_B2                      0.997461f
+#define HPF_Fs192000_Fc55_A0 0.987294f
+#define HPF_Fs192000_Fc55_A1 (-1.974588f)
+#define HPF_Fs192000_Fc55_A2 0.987294f
+#define HPF_Fs192000_Fc55_B1 (-1.997458f)
+#define HPF_Fs192000_Fc55_B2 0.997461f
 
- /* Coefficients for centre frequency 66Hz */
-#define HPF_Fs8000_Fc66_A0                        0.953016f
-#define HPF_Fs8000_Fc66_A1                        (-1.906032f)
-#define HPF_Fs8000_Fc66_A2                        0.953016f
-#define HPF_Fs8000_Fc66_B1                        (-1.926810f)
-#define HPF_Fs8000_Fc66_B2                        0.929396f
-#define HPF_Fs11025_Fc66_A0                       0.962638f
-#define HPF_Fs11025_Fc66_A1                       (-1.925275f)
-#define HPF_Fs11025_Fc66_A2                       0.962638f
-#define HPF_Fs11025_Fc66_B1                       (-1.946881f)
-#define HPF_Fs11025_Fc66_B2                       0.948256f
-#define HPF_Fs12000_Fc66_A0                       0.964718f
-#define HPF_Fs12000_Fc66_A1                       (-1.929435f)
-#define HPF_Fs12000_Fc66_A2                       0.964718f
-#define HPF_Fs12000_Fc66_B1                       (-1.951196f)
-#define HPF_Fs12000_Fc66_B2                       0.952359f
-#define HPF_Fs16000_Fc66_A0                       0.970622f
-#define HPF_Fs16000_Fc66_A1                       (-1.941244f)
-#define HPF_Fs16000_Fc66_A2                       0.970622f
-#define HPF_Fs16000_Fc66_B1                       (-1.963394f)
-#define HPF_Fs16000_Fc66_B2                       0.964052f
-#define HPF_Fs22050_Fc66_A0                       0.975509f
-#define HPF_Fs22050_Fc66_A1                       (-1.951019f)
-#define HPF_Fs22050_Fc66_A2                       0.975509f
-#define HPF_Fs22050_Fc66_B1                       (-1.973436f)
-#define HPF_Fs22050_Fc66_B2                       0.973784f
-#define HPF_Fs24000_Fc66_A0                       0.976563f
-#define HPF_Fs24000_Fc66_A1                       (-1.953125f)
-#define HPF_Fs24000_Fc66_A2                       0.976563f
-#define HPF_Fs24000_Fc66_B1                       (-1.975594f)
-#define HPF_Fs24000_Fc66_B2                       0.975889f
-#define HPF_Fs32000_Fc66_A0                       0.979547f
-#define HPF_Fs32000_Fc66_A1                       (-1.959093f)
-#define HPF_Fs32000_Fc66_A2                       0.979547f
-#define HPF_Fs32000_Fc66_B1                       (-1.981695f)
-#define HPF_Fs32000_Fc66_B2                       0.981861f
-#define HPF_Fs44100_Fc66_A0                       0.982010f
-#define HPF_Fs44100_Fc66_A1                       (-1.964019f)
-#define HPF_Fs44100_Fc66_A2                       0.982010f
-#define HPF_Fs44100_Fc66_B1                       (-1.986718f)
-#define HPF_Fs44100_Fc66_B2                       0.986805f
-#define HPF_Fs48000_Fc66_A0                       0.982540f
-#define HPF_Fs48000_Fc66_A1                       (-1.965079f)
-#define HPF_Fs48000_Fc66_A2                       0.982540f
-#define HPF_Fs48000_Fc66_B1                       (-1.987797f)
-#define HPF_Fs48000_Fc66_B2                       0.987871f
+/* Coefficients for centre frequency 66Hz */
+#define HPF_Fs8000_Fc66_A0 0.953016f
+#define HPF_Fs8000_Fc66_A1 (-1.906032f)
+#define HPF_Fs8000_Fc66_A2 0.953016f
+#define HPF_Fs8000_Fc66_B1 (-1.926810f)
+#define HPF_Fs8000_Fc66_B2 0.929396f
+#define HPF_Fs11025_Fc66_A0 0.962638f
+#define HPF_Fs11025_Fc66_A1 (-1.925275f)
+#define HPF_Fs11025_Fc66_A2 0.962638f
+#define HPF_Fs11025_Fc66_B1 (-1.946881f)
+#define HPF_Fs11025_Fc66_B2 0.948256f
+#define HPF_Fs12000_Fc66_A0 0.964718f
+#define HPF_Fs12000_Fc66_A1 (-1.929435f)
+#define HPF_Fs12000_Fc66_A2 0.964718f
+#define HPF_Fs12000_Fc66_B1 (-1.951196f)
+#define HPF_Fs12000_Fc66_B2 0.952359f
+#define HPF_Fs16000_Fc66_A0 0.970622f
+#define HPF_Fs16000_Fc66_A1 (-1.941244f)
+#define HPF_Fs16000_Fc66_A2 0.970622f
+#define HPF_Fs16000_Fc66_B1 (-1.963394f)
+#define HPF_Fs16000_Fc66_B2 0.964052f
+#define HPF_Fs22050_Fc66_A0 0.975509f
+#define HPF_Fs22050_Fc66_A1 (-1.951019f)
+#define HPF_Fs22050_Fc66_A2 0.975509f
+#define HPF_Fs22050_Fc66_B1 (-1.973436f)
+#define HPF_Fs22050_Fc66_B2 0.973784f
+#define HPF_Fs24000_Fc66_A0 0.976563f
+#define HPF_Fs24000_Fc66_A1 (-1.953125f)
+#define HPF_Fs24000_Fc66_A2 0.976563f
+#define HPF_Fs24000_Fc66_B1 (-1.975594f)
+#define HPF_Fs24000_Fc66_B2 0.975889f
+#define HPF_Fs32000_Fc66_A0 0.979547f
+#define HPF_Fs32000_Fc66_A1 (-1.959093f)
+#define HPF_Fs32000_Fc66_A2 0.979547f
+#define HPF_Fs32000_Fc66_B1 (-1.981695f)
+#define HPF_Fs32000_Fc66_B2 0.981861f
+#define HPF_Fs44100_Fc66_A0 0.982010f
+#define HPF_Fs44100_Fc66_A1 (-1.964019f)
+#define HPF_Fs44100_Fc66_A2 0.982010f
+#define HPF_Fs44100_Fc66_B1 (-1.986718f)
+#define HPF_Fs44100_Fc66_B2 0.986805f
+#define HPF_Fs48000_Fc66_A0 0.982540f
+#define HPF_Fs48000_Fc66_A1 (-1.965079f)
+#define HPF_Fs48000_Fc66_A2 0.982540f
+#define HPF_Fs48000_Fc66_B1 (-1.987797f)
+#define HPF_Fs48000_Fc66_B2 0.987871f
 
-#define HPF_Fs88200_Fc66_A0                       0.985273f
-#define HPF_Fs88200_Fc66_A1                       (-1.970546f)
-#define HPF_Fs88200_Fc66_A2                       0.985273f
-#define HPF_Fs88200_Fc66_B1                       (-1.993359f)
-#define HPF_Fs88200_Fc66_B2                       0.993381f
+#define HPF_Fs88200_Fc66_A0 0.985273f
+#define HPF_Fs88200_Fc66_A1 (-1.970546f)
+#define HPF_Fs88200_Fc66_A2 0.985273f
+#define HPF_Fs88200_Fc66_B1 (-1.993359f)
+#define HPF_Fs88200_Fc66_B2 0.993381f
 
-#define HPF_Fs96000_Fc66_A0                       0.985539f
-#define HPF_Fs96000_Fc66_A1                       (-1.971077f)
-#define HPF_Fs96000_Fc66_A2                       0.985539f
-#define HPF_Fs96000_Fc66_B1                       (-1.993898f)
-#define HPF_Fs96000_Fc66_B2                       0.993917f
+#define HPF_Fs96000_Fc66_A0 0.985539f
+#define HPF_Fs96000_Fc66_A1 (-1.971077f)
+#define HPF_Fs96000_Fc66_A2 0.985539f
+#define HPF_Fs96000_Fc66_B1 (-1.993898f)
+#define HPF_Fs96000_Fc66_B2 0.993917f
 
-#define HPF_Fs176400_Fc66_A0                      0.986910f
-#define HPF_Fs176400_Fc66_A1                      (-1.973820f)
-#define HPF_Fs176400_Fc66_A2                      0.986910f
-#define HPF_Fs176400_Fc66_B1                      (-1.996679f)
-#define HPF_Fs176400_Fc66_B2                      0.996685f
+#define HPF_Fs176400_Fc66_A0 0.986910f
+#define HPF_Fs176400_Fc66_A1 (-1.973820f)
+#define HPF_Fs176400_Fc66_A2 0.986910f
+#define HPF_Fs176400_Fc66_B1 (-1.996679f)
+#define HPF_Fs176400_Fc66_B2 0.996685f
 
-#define HPF_Fs192000_Fc66_A0                      0.987043f
-#define HPF_Fs192000_Fc66_A1                      (-1.974086f)
-#define HPF_Fs192000_Fc66_A2                      0.987043f
-#define HPF_Fs192000_Fc66_B1                      (-1.996949f)
-#define HPF_Fs192000_Fc66_B2                      0.996954f
+#define HPF_Fs192000_Fc66_A0 0.987043f
+#define HPF_Fs192000_Fc66_A1 (-1.974086f)
+#define HPF_Fs192000_Fc66_A2 0.987043f
+#define HPF_Fs192000_Fc66_B1 (-1.996949f)
+#define HPF_Fs192000_Fc66_B2 0.996954f
 
 /* Coefficients for centre frequency 78Hz */
-#define HPF_Fs8000_Fc78_A0                        0.946693f
-#define HPF_Fs8000_Fc78_A1                        (-1.893387f)
-#define HPF_Fs8000_Fc78_A2                        0.946693f
-#define HPF_Fs8000_Fc78_B1                        (-1.913517f)
-#define HPF_Fs8000_Fc78_B2                        0.917105f
-#define HPF_Fs11025_Fc78_A0                       0.957999f
-#define HPF_Fs11025_Fc78_A1                       (-1.915998f)
-#define HPF_Fs11025_Fc78_A2                       0.957999f
-#define HPF_Fs11025_Fc78_B1                       (-1.937229f)
-#define HPF_Fs11025_Fc78_B2                       0.939140f
-#define HPF_Fs12000_Fc78_A0                       0.960446f
-#define HPF_Fs12000_Fc78_A1                       (-1.920892f)
-#define HPF_Fs12000_Fc78_A2                       0.960446f
-#define HPF_Fs12000_Fc78_B1                       (-1.942326f)
-#define HPF_Fs12000_Fc78_B2                       0.943944f
-#define HPF_Fs16000_Fc78_A0                       0.967397f
-#define HPF_Fs16000_Fc78_A1                       (-1.934794f)
-#define HPF_Fs16000_Fc78_A2                       0.967397f
-#define HPF_Fs16000_Fc78_B1                       (-1.956740f)
-#define HPF_Fs16000_Fc78_B2                       0.957656f
-#define HPF_Fs22050_Fc78_A0                       0.973156f
-#define HPF_Fs22050_Fc78_A1                       (-1.946313f)
-#define HPF_Fs22050_Fc78_A2                       0.973156f
-#define HPF_Fs22050_Fc78_B1                       (-1.968607f)
-#define HPF_Fs22050_Fc78_B2                       0.969092f
-#define HPF_Fs24000_Fc78_A0                       0.974398f
-#define HPF_Fs24000_Fc78_A1                       (-1.948797f)
-#define HPF_Fs24000_Fc78_A2                       0.974398f
-#define HPF_Fs24000_Fc78_B1                       (-1.971157f)
-#define HPF_Fs24000_Fc78_B2                       0.971568f
-#define HPF_Fs32000_Fc78_A0                       0.977918f
-#define HPF_Fs32000_Fc78_A1                       (-1.955836f)
-#define HPF_Fs32000_Fc78_A2                       0.977918f
-#define HPF_Fs32000_Fc78_B1                       (-1.978367f)
-#define HPF_Fs32000_Fc78_B2                       0.978599f
-#define HPF_Fs44100_Fc78_A0                       0.980824f
-#define HPF_Fs44100_Fc78_A1                       (-1.961649f)
-#define HPF_Fs44100_Fc78_A2                       0.980824f
-#define HPF_Fs44100_Fc78_B1                       (-1.984303f)
-#define HPF_Fs44100_Fc78_B2                       0.984425f
-#define HPF_Fs48000_Fc78_A0                       0.981450f
-#define HPF_Fs48000_Fc78_A1                       (-1.962900f)
-#define HPF_Fs48000_Fc78_A2                       0.981450f
-#define HPF_Fs48000_Fc78_B1                       (-1.985578f)
-#define HPF_Fs48000_Fc78_B2                       0.985681f
+#define HPF_Fs8000_Fc78_A0 0.946693f
+#define HPF_Fs8000_Fc78_A1 (-1.893387f)
+#define HPF_Fs8000_Fc78_A2 0.946693f
+#define HPF_Fs8000_Fc78_B1 (-1.913517f)
+#define HPF_Fs8000_Fc78_B2 0.917105f
+#define HPF_Fs11025_Fc78_A0 0.957999f
+#define HPF_Fs11025_Fc78_A1 (-1.915998f)
+#define HPF_Fs11025_Fc78_A2 0.957999f
+#define HPF_Fs11025_Fc78_B1 (-1.937229f)
+#define HPF_Fs11025_Fc78_B2 0.939140f
+#define HPF_Fs12000_Fc78_A0 0.960446f
+#define HPF_Fs12000_Fc78_A1 (-1.920892f)
+#define HPF_Fs12000_Fc78_A2 0.960446f
+#define HPF_Fs12000_Fc78_B1 (-1.942326f)
+#define HPF_Fs12000_Fc78_B2 0.943944f
+#define HPF_Fs16000_Fc78_A0 0.967397f
+#define HPF_Fs16000_Fc78_A1 (-1.934794f)
+#define HPF_Fs16000_Fc78_A2 0.967397f
+#define HPF_Fs16000_Fc78_B1 (-1.956740f)
+#define HPF_Fs16000_Fc78_B2 0.957656f
+#define HPF_Fs22050_Fc78_A0 0.973156f
+#define HPF_Fs22050_Fc78_A1 (-1.946313f)
+#define HPF_Fs22050_Fc78_A2 0.973156f
+#define HPF_Fs22050_Fc78_B1 (-1.968607f)
+#define HPF_Fs22050_Fc78_B2 0.969092f
+#define HPF_Fs24000_Fc78_A0 0.974398f
+#define HPF_Fs24000_Fc78_A1 (-1.948797f)
+#define HPF_Fs24000_Fc78_A2 0.974398f
+#define HPF_Fs24000_Fc78_B1 (-1.971157f)
+#define HPF_Fs24000_Fc78_B2 0.971568f
+#define HPF_Fs32000_Fc78_A0 0.977918f
+#define HPF_Fs32000_Fc78_A1 (-1.955836f)
+#define HPF_Fs32000_Fc78_A2 0.977918f
+#define HPF_Fs32000_Fc78_B1 (-1.978367f)
+#define HPF_Fs32000_Fc78_B2 0.978599f
+#define HPF_Fs44100_Fc78_A0 0.980824f
+#define HPF_Fs44100_Fc78_A1 (-1.961649f)
+#define HPF_Fs44100_Fc78_A2 0.980824f
+#define HPF_Fs44100_Fc78_B1 (-1.984303f)
+#define HPF_Fs44100_Fc78_B2 0.984425f
+#define HPF_Fs48000_Fc78_A0 0.981450f
+#define HPF_Fs48000_Fc78_A1 (-1.962900f)
+#define HPF_Fs48000_Fc78_A2 0.981450f
+#define HPF_Fs48000_Fc78_B1 (-1.985578f)
+#define HPF_Fs48000_Fc78_B2 0.985681f
 
-#define HPF_Fs88200_Fc78_A0                       0.984678f
-#define HPF_Fs88200_Fc78_A1                       (-1.969356f)
-#define HPF_Fs88200_Fc78_A2                       0.984678f
-#define HPF_Fs88200_Fc78_B1                       (-1.992151f)
-#define HPF_Fs88200_Fc78_B2                       0.992182f
+#define HPF_Fs88200_Fc78_A0 0.984678f
+#define HPF_Fs88200_Fc78_A1 (-1.969356f)
+#define HPF_Fs88200_Fc78_A2 0.984678f
+#define HPF_Fs88200_Fc78_B1 (-1.992151f)
+#define HPF_Fs88200_Fc78_B2 0.992182f
 
-#define HPF_Fs96000_Fc78_A0                       0.984992f
-#define HPF_Fs96000_Fc78_A1                       (-1.969984f)
-#define HPF_Fs96000_Fc78_A2                       0.984992f
-#define HPF_Fs96000_Fc78_B1                       (-1.992789f)
-#define HPF_Fs96000_Fc78_B2                       0.992815f
+#define HPF_Fs96000_Fc78_A0 0.984992f
+#define HPF_Fs96000_Fc78_A1 (-1.969984f)
+#define HPF_Fs96000_Fc78_A2 0.984992f
+#define HPF_Fs96000_Fc78_B1 (-1.992789f)
+#define HPF_Fs96000_Fc78_B2 0.992815f
 
-#define HPF_Fs176400_Fc78_A0                      0.986612f
-#define HPF_Fs176400_Fc78_A1                      (-1.973224f)
-#define HPF_Fs176400_Fc78_A2                      0.986612f
-#define HPF_Fs176400_Fc78_B1                      (-1.996076f)
-#define HPF_Fs176400_Fc78_B2                      0.996083f
+#define HPF_Fs176400_Fc78_A0 0.986612f
+#define HPF_Fs176400_Fc78_A1 (-1.973224f)
+#define HPF_Fs176400_Fc78_A2 0.986612f
+#define HPF_Fs176400_Fc78_B1 (-1.996076f)
+#define HPF_Fs176400_Fc78_B2 0.996083f
 
-#define HPF_Fs192000_Fc78_A0                      0.986769f
-#define HPF_Fs192000_Fc78_A1                      (-1.973539f)
-#define HPF_Fs192000_Fc78_A2                      0.986769f
-#define HPF_Fs192000_Fc78_B1                      (-1.996394f)
-#define HPF_Fs192000_Fc78_B2                      0.996401f
+#define HPF_Fs192000_Fc78_A0 0.986769f
+#define HPF_Fs192000_Fc78_A1 (-1.973539f)
+#define HPF_Fs192000_Fc78_A2 0.986769f
+#define HPF_Fs192000_Fc78_B1 (-1.996394f)
+#define HPF_Fs192000_Fc78_B2 0.996401f
 
 /* Coefficients for centre frequency 90Hz */
-#define HPF_Fs8000_Fc90_A0                       0.940412f
-#define HPF_Fs8000_Fc90_A1                       (-1.880825f)
-#define HPF_Fs8000_Fc90_A2                       0.940412f
-#define HPF_Fs8000_Fc90_B1                       (-1.900231f)
-#define HPF_Fs8000_Fc90_B2                       0.904977f
-#define HPF_Fs11025_Fc90_A0                      0.953383f
-#define HPF_Fs11025_Fc90_A1                      (-1.906766f)
-#define HPF_Fs11025_Fc90_A2                      0.953383f
-#define HPF_Fs11025_Fc90_B1                      (-1.927579f)
-#define HPF_Fs11025_Fc90_B2                      0.930111f
-#define HPF_Fs12000_Fc90_A0                      0.956193f
-#define HPF_Fs12000_Fc90_A1                      (-1.912387f)
-#define HPF_Fs12000_Fc90_A2                      0.956193f
-#define HPF_Fs12000_Fc90_B1                      (-1.933459f)
-#define HPF_Fs12000_Fc90_B2                      0.935603f
-#define HPF_Fs16000_Fc90_A0                      0.964183f
-#define HPF_Fs16000_Fc90_A1                      (-1.928365f)
-#define HPF_Fs16000_Fc90_A2                      0.964183f
-#define HPF_Fs16000_Fc90_B1                      (-1.950087f)
-#define HPF_Fs16000_Fc90_B2                      0.951303f
-#define HPF_Fs22050_Fc90_A0                      0.970809f
-#define HPF_Fs22050_Fc90_A1                      (-1.941618f)
-#define HPF_Fs22050_Fc90_A2                      0.970809f
-#define HPF_Fs22050_Fc90_B1                      (-1.963778f)
-#define HPF_Fs22050_Fc90_B2                      0.964423f
-#define HPF_Fs24000_Fc90_A0                      0.972239f
-#define HPF_Fs24000_Fc90_A1                      (-1.944477f)
-#define HPF_Fs24000_Fc90_A2                      0.972239f
-#define HPF_Fs24000_Fc90_B1                      (-1.966721f)
-#define HPF_Fs24000_Fc90_B2                      0.967266f
-#define HPF_Fs32000_Fc90_A0                      0.976292f
-#define HPF_Fs32000_Fc90_A1                      (-1.952584f)
-#define HPF_Fs32000_Fc90_A2                      0.976292f
-#define HPF_Fs32000_Fc90_B1                      (-1.975040f)
-#define HPF_Fs32000_Fc90_B2                      0.975347f
-#define HPF_Fs44100_Fc90_A0                      0.979641f
-#define HPF_Fs44100_Fc90_A1                      (-1.959282f)
-#define HPF_Fs44100_Fc90_A2                      0.979641f
-#define HPF_Fs44100_Fc90_B1                      (-1.981888f)
-#define HPF_Fs44100_Fc90_B2                      0.982050f
-#define HPF_Fs48000_Fc90_A0                      0.980362f
-#define HPF_Fs48000_Fc90_A1                      (-1.960724f)
-#define HPF_Fs48000_Fc90_A2                      0.980362f
-#define HPF_Fs48000_Fc90_B1                      (-1.983359f)
-#define HPF_Fs48000_Fc90_B2                      0.983497f
+#define HPF_Fs8000_Fc90_A0 0.940412f
+#define HPF_Fs8000_Fc90_A1 (-1.880825f)
+#define HPF_Fs8000_Fc90_A2 0.940412f
+#define HPF_Fs8000_Fc90_B1 (-1.900231f)
+#define HPF_Fs8000_Fc90_B2 0.904977f
+#define HPF_Fs11025_Fc90_A0 0.953383f
+#define HPF_Fs11025_Fc90_A1 (-1.906766f)
+#define HPF_Fs11025_Fc90_A2 0.953383f
+#define HPF_Fs11025_Fc90_B1 (-1.927579f)
+#define HPF_Fs11025_Fc90_B2 0.930111f
+#define HPF_Fs12000_Fc90_A0 0.956193f
+#define HPF_Fs12000_Fc90_A1 (-1.912387f)
+#define HPF_Fs12000_Fc90_A2 0.956193f
+#define HPF_Fs12000_Fc90_B1 (-1.933459f)
+#define HPF_Fs12000_Fc90_B2 0.935603f
+#define HPF_Fs16000_Fc90_A0 0.964183f
+#define HPF_Fs16000_Fc90_A1 (-1.928365f)
+#define HPF_Fs16000_Fc90_A2 0.964183f
+#define HPF_Fs16000_Fc90_B1 (-1.950087f)
+#define HPF_Fs16000_Fc90_B2 0.951303f
+#define HPF_Fs22050_Fc90_A0 0.970809f
+#define HPF_Fs22050_Fc90_A1 (-1.941618f)
+#define HPF_Fs22050_Fc90_A2 0.970809f
+#define HPF_Fs22050_Fc90_B1 (-1.963778f)
+#define HPF_Fs22050_Fc90_B2 0.964423f
+#define HPF_Fs24000_Fc90_A0 0.972239f
+#define HPF_Fs24000_Fc90_A1 (-1.944477f)
+#define HPF_Fs24000_Fc90_A2 0.972239f
+#define HPF_Fs24000_Fc90_B1 (-1.966721f)
+#define HPF_Fs24000_Fc90_B2 0.967266f
+#define HPF_Fs32000_Fc90_A0 0.976292f
+#define HPF_Fs32000_Fc90_A1 (-1.952584f)
+#define HPF_Fs32000_Fc90_A2 0.976292f
+#define HPF_Fs32000_Fc90_B1 (-1.975040f)
+#define HPF_Fs32000_Fc90_B2 0.975347f
+#define HPF_Fs44100_Fc90_A0 0.979641f
+#define HPF_Fs44100_Fc90_A1 (-1.959282f)
+#define HPF_Fs44100_Fc90_A2 0.979641f
+#define HPF_Fs44100_Fc90_B1 (-1.981888f)
+#define HPF_Fs44100_Fc90_B2 0.982050f
+#define HPF_Fs48000_Fc90_A0 0.980362f
+#define HPF_Fs48000_Fc90_A1 (-1.960724f)
+#define HPF_Fs48000_Fc90_A2 0.980362f
+#define HPF_Fs48000_Fc90_B1 (-1.983359f)
+#define HPF_Fs48000_Fc90_B2 0.983497f
 
-#define HPF_Fs88200_Fc90_A0                       0.984084f
-#define HPF_Fs88200_Fc90_A1                       (-1.968168f)
-#define HPF_Fs88200_Fc90_A2                       0.984084f
-#define HPF_Fs88200_Fc90_B1                       (-1.990944f)
-#define HPF_Fs88200_Fc90_B2                       0.990985f
+#define HPF_Fs88200_Fc90_A0 0.984084f
+#define HPF_Fs88200_Fc90_A1 (-1.968168f)
+#define HPF_Fs88200_Fc90_A2 0.984084f
+#define HPF_Fs88200_Fc90_B1 (-1.990944f)
+#define HPF_Fs88200_Fc90_B2 0.990985f
 
-#define HPF_Fs96000_Fc90_A0                       0.984446f
-#define HPF_Fs96000_Fc90_A1                       (-1.968892f)
-#define HPF_Fs96000_Fc90_A2                       0.984446f
-#define HPF_Fs96000_Fc90_B1                       (-1.991680f)
-#define HPF_Fs96000_Fc90_B2                       0.991714f
+#define HPF_Fs96000_Fc90_A0 0.984446f
+#define HPF_Fs96000_Fc90_A1 (-1.968892f)
+#define HPF_Fs96000_Fc90_A2 0.984446f
+#define HPF_Fs96000_Fc90_B1 (-1.991680f)
+#define HPF_Fs96000_Fc90_B2 0.991714f
 
-#define HPF_Fs176400_Fc90_A0                      0.986314f
-#define HPF_Fs176400_Fc90_A1                      (-1.972629f)
-#define HPF_Fs176400_Fc90_A2                      0.986314f
-#define HPF_Fs176400_Fc90_B1                      (-1.995472f)
-#define HPF_Fs176400_Fc90_B2                      0.995482f
+#define HPF_Fs176400_Fc90_A0 0.986314f
+#define HPF_Fs176400_Fc90_A1 (-1.972629f)
+#define HPF_Fs176400_Fc90_A2 0.986314f
+#define HPF_Fs176400_Fc90_B1 (-1.995472f)
+#define HPF_Fs176400_Fc90_B2 0.995482f
 
-#define HPF_Fs192000_Fc90_A0                      0.986496f
-#define HPF_Fs192000_Fc90_A1                      (-1.972992f)
-#define HPF_Fs192000_Fc90_A2                      0.986496f
-#define HPF_Fs192000_Fc90_B1                      (-1.995840f)
-#define HPF_Fs192000_Fc90_B2                      0.995848f
+#define HPF_Fs192000_Fc90_A0 0.986496f
+#define HPF_Fs192000_Fc90_A1 (-1.972992f)
+#define HPF_Fs192000_Fc90_A2 0.986496f
+#define HPF_Fs192000_Fc90_B1 (-1.995840f)
+#define HPF_Fs192000_Fc90_B2 0.995848f
 
 /************************************************************************************/
 /*                                                                                  */
@@ -323,288 +323,288 @@
 /************************************************************************************/
 
 /* Coefficients for centre frequency 55Hz */
-#define BPF_Fs8000_Fc55_A0                       0.009197f
-#define BPF_Fs8000_Fc55_A1                       0.000000f
-#define BPF_Fs8000_Fc55_A2                       (-0.009197f)
-#define BPF_Fs8000_Fc55_B1                       (-1.979545f)
-#define BPF_Fs8000_Fc55_B2                       0.981393f
-#define BPF_Fs11025_Fc55_A0                      0.006691f
-#define BPF_Fs11025_Fc55_A1                      0.000000f
-#define BPF_Fs11025_Fc55_A2                      (-0.006691f)
-#define BPF_Fs11025_Fc55_B1                      (-1.985488f)
-#define BPF_Fs11025_Fc55_B2                      0.986464f
-#define BPF_Fs12000_Fc55_A0                      0.006150f
-#define BPF_Fs12000_Fc55_A1                      0.000000f
-#define BPF_Fs12000_Fc55_A2                      (-0.006150f)
-#define BPF_Fs12000_Fc55_B1                      (-1.986733f)
-#define BPF_Fs12000_Fc55_B2                      0.987557f
-#define BPF_Fs16000_Fc55_A0                      0.004620f
-#define BPF_Fs16000_Fc55_A1                      0.000000f
-#define BPF_Fs16000_Fc55_A2                      (-0.004620f)
-#define BPF_Fs16000_Fc55_B1                      (-1.990189f)
-#define BPF_Fs16000_Fc55_B2                      0.990653f
-#define BPF_Fs22050_Fc55_A0                      0.003357f
-#define BPF_Fs22050_Fc55_A1                      0.000000f
-#define BPF_Fs22050_Fc55_A2                      (-0.003357f)
-#define BPF_Fs22050_Fc55_B1                      (-1.992964f)
-#define BPF_Fs22050_Fc55_B2                      0.993209f
-#define BPF_Fs24000_Fc55_A0                      0.003085f
-#define BPF_Fs24000_Fc55_A1                      0.000000f
-#define BPF_Fs24000_Fc55_A2                      (-0.003085f)
-#define BPF_Fs24000_Fc55_B1                      (-1.993552f)
-#define BPF_Fs24000_Fc55_B2                      0.993759f
-#define BPF_Fs32000_Fc55_A0                      0.002315f
-#define BPF_Fs32000_Fc55_A1                      0.000000f
-#define BPF_Fs32000_Fc55_A2                      (-0.002315f)
-#define BPF_Fs32000_Fc55_B1                      (-1.995199f)
-#define BPF_Fs32000_Fc55_B2                      0.995316f
-#define BPF_Fs44100_Fc55_A0                      0.001681f
-#define BPF_Fs44100_Fc55_A1                      0.000000f
-#define BPF_Fs44100_Fc55_A2                      (-0.001681f)
-#define BPF_Fs44100_Fc55_B1                      (-1.996537f)
-#define BPF_Fs44100_Fc55_B2                      0.996599f
-#define BPF_Fs48000_Fc55_A0                      0.001545f
-#define BPF_Fs48000_Fc55_A1                      0.000000f
-#define BPF_Fs48000_Fc55_A2                      (-0.001545f)
-#define BPF_Fs48000_Fc55_B1                      (-1.996823f)
-#define BPF_Fs48000_Fc55_B2                      0.996875f
+#define BPF_Fs8000_Fc55_A0 0.009197f
+#define BPF_Fs8000_Fc55_A1 0.000000f
+#define BPF_Fs8000_Fc55_A2 (-0.009197f)
+#define BPF_Fs8000_Fc55_B1 (-1.979545f)
+#define BPF_Fs8000_Fc55_B2 0.981393f
+#define BPF_Fs11025_Fc55_A0 0.006691f
+#define BPF_Fs11025_Fc55_A1 0.000000f
+#define BPF_Fs11025_Fc55_A2 (-0.006691f)
+#define BPF_Fs11025_Fc55_B1 (-1.985488f)
+#define BPF_Fs11025_Fc55_B2 0.986464f
+#define BPF_Fs12000_Fc55_A0 0.006150f
+#define BPF_Fs12000_Fc55_A1 0.000000f
+#define BPF_Fs12000_Fc55_A2 (-0.006150f)
+#define BPF_Fs12000_Fc55_B1 (-1.986733f)
+#define BPF_Fs12000_Fc55_B2 0.987557f
+#define BPF_Fs16000_Fc55_A0 0.004620f
+#define BPF_Fs16000_Fc55_A1 0.000000f
+#define BPF_Fs16000_Fc55_A2 (-0.004620f)
+#define BPF_Fs16000_Fc55_B1 (-1.990189f)
+#define BPF_Fs16000_Fc55_B2 0.990653f
+#define BPF_Fs22050_Fc55_A0 0.003357f
+#define BPF_Fs22050_Fc55_A1 0.000000f
+#define BPF_Fs22050_Fc55_A2 (-0.003357f)
+#define BPF_Fs22050_Fc55_B1 (-1.992964f)
+#define BPF_Fs22050_Fc55_B2 0.993209f
+#define BPF_Fs24000_Fc55_A0 0.003085f
+#define BPF_Fs24000_Fc55_A1 0.000000f
+#define BPF_Fs24000_Fc55_A2 (-0.003085f)
+#define BPF_Fs24000_Fc55_B1 (-1.993552f)
+#define BPF_Fs24000_Fc55_B2 0.993759f
+#define BPF_Fs32000_Fc55_A0 0.002315f
+#define BPF_Fs32000_Fc55_A1 0.000000f
+#define BPF_Fs32000_Fc55_A2 (-0.002315f)
+#define BPF_Fs32000_Fc55_B1 (-1.995199f)
+#define BPF_Fs32000_Fc55_B2 0.995316f
+#define BPF_Fs44100_Fc55_A0 0.001681f
+#define BPF_Fs44100_Fc55_A1 0.000000f
+#define BPF_Fs44100_Fc55_A2 (-0.001681f)
+#define BPF_Fs44100_Fc55_B1 (-1.996537f)
+#define BPF_Fs44100_Fc55_B2 0.996599f
+#define BPF_Fs48000_Fc55_A0 0.001545f
+#define BPF_Fs48000_Fc55_A1 0.000000f
+#define BPF_Fs48000_Fc55_A2 (-0.001545f)
+#define BPF_Fs48000_Fc55_B1 (-1.996823f)
+#define BPF_Fs48000_Fc55_B2 0.996875f
 
-#define BPF_Fs88200_Fc55_A0                      0.000831f
-#define BPF_Fs88200_Fc55_A1                      0.000000f
-#define BPF_Fs88200_Fc55_A2                      (-0.000831f)
-#define BPF_Fs88200_Fc55_B1                      (-1.998321f)
-#define BPF_Fs88200_Fc55_B2                      0.998338f
+#define BPF_Fs88200_Fc55_A0 0.000831f
+#define BPF_Fs88200_Fc55_A1 0.000000f
+#define BPF_Fs88200_Fc55_A2 (-0.000831f)
+#define BPF_Fs88200_Fc55_B1 (-1.998321f)
+#define BPF_Fs88200_Fc55_B2 0.998338f
 
-#define BPF_Fs96000_Fc55_A0                      0.000762f
-#define BPF_Fs96000_Fc55_A1                      0.000000f
-#define BPF_Fs96000_Fc55_A2                      (-0.000762f)
-#define BPF_Fs96000_Fc55_B1                      (-1.998461f)
-#define BPF_Fs96000_Fc55_B2                      0.998477f
+#define BPF_Fs96000_Fc55_A0 0.000762f
+#define BPF_Fs96000_Fc55_A1 0.000000f
+#define BPF_Fs96000_Fc55_A2 (-0.000762f)
+#define BPF_Fs96000_Fc55_B1 (-1.998461f)
+#define BPF_Fs96000_Fc55_B2 0.998477f
 
-#define BPF_Fs176400_Fc55_A0                     0.000416f
-#define BPF_Fs176400_Fc55_A1                     0.000000f
-#define BPF_Fs176400_Fc55_A2                     (-0.000416f)
-#define BPF_Fs176400_Fc55_B1                     (-1.999164f)
-#define BPF_Fs176400_Fc55_B2                     0.999169f
+#define BPF_Fs176400_Fc55_A0 0.000416f
+#define BPF_Fs176400_Fc55_A1 0.000000f
+#define BPF_Fs176400_Fc55_A2 (-0.000416f)
+#define BPF_Fs176400_Fc55_B1 (-1.999164f)
+#define BPF_Fs176400_Fc55_B2 0.999169f
 
-#define BPF_Fs192000_Fc55_A0                     0.000381f
-#define BPF_Fs192000_Fc55_A1                     0.000000f
-#define BPF_Fs192000_Fc55_A2                     (-0.000381f)
-#define BPF_Fs192000_Fc55_B1                     (-1.999234f)
-#define BPF_Fs192000_Fc55_B2                     0.999238f
+#define BPF_Fs192000_Fc55_A0 0.000381f
+#define BPF_Fs192000_Fc55_A1 0.000000f
+#define BPF_Fs192000_Fc55_A2 (-0.000381f)
+#define BPF_Fs192000_Fc55_B1 (-1.999234f)
+#define BPF_Fs192000_Fc55_B2 0.999238f
 
 /* Coefficients for centre frequency 66Hz */
-#define BPF_Fs8000_Fc66_A0                      0.012648f
-#define BPF_Fs8000_Fc66_A1                      0.000000f
-#define BPF_Fs8000_Fc66_A2                      (-0.012648f)
-#define BPF_Fs8000_Fc66_B1                      (-1.971760f)
-#define BPF_Fs8000_Fc66_B2                      0.974412f
-#define BPF_Fs11025_Fc66_A0                     0.009209f
-#define BPF_Fs11025_Fc66_A1                     0.000000f
-#define BPF_Fs11025_Fc66_A2                     (-0.009209f)
-#define BPF_Fs11025_Fc66_B1                     (-1.979966f)
-#define BPF_Fs11025_Fc66_B2                     0.981368f
-#define BPF_Fs12000_Fc66_A0                     0.008468f
-#define BPF_Fs12000_Fc66_A1                     0.000000f
-#define BPF_Fs12000_Fc66_A2                     (-0.008468f)
-#define BPF_Fs12000_Fc66_B1                     (-1.981685f)
-#define BPF_Fs12000_Fc66_B2                     0.982869f
-#define BPF_Fs16000_Fc66_A0                     0.006364f
-#define BPF_Fs16000_Fc66_A1                     0.000000f
-#define BPF_Fs16000_Fc66_A2                     (-0.006364f)
-#define BPF_Fs16000_Fc66_B1                     (-1.986457f)
-#define BPF_Fs16000_Fc66_B2                     0.987124f
-#define BPF_Fs22050_Fc66_A0                     0.004626f
-#define BPF_Fs22050_Fc66_A1                     0.000000f
-#define BPF_Fs22050_Fc66_A2                     (-0.004626f)
-#define BPF_Fs22050_Fc66_B1                     (-1.990288f)
-#define BPF_Fs22050_Fc66_B2                     0.990641f
-#define BPF_Fs24000_Fc66_A0                     0.004252f
-#define BPF_Fs24000_Fc66_A1                     0.000000f
-#define BPF_Fs24000_Fc66_A2                     (-0.004252f)
-#define BPF_Fs24000_Fc66_B1                     (-1.991100f)
-#define BPF_Fs24000_Fc66_B2                     0.991398f
-#define BPF_Fs32000_Fc66_A0                     0.003192f
-#define BPF_Fs32000_Fc66_A1                     0.000000f
-#define BPF_Fs32000_Fc66_A2                     (-0.003192f)
-#define BPF_Fs32000_Fc66_B1                     (-1.993374f)
-#define BPF_Fs32000_Fc66_B2                     0.993541f
-#define BPF_Fs44100_Fc66_A0                     0.002318f
-#define BPF_Fs44100_Fc66_A1                     0.000000f
-#define BPF_Fs44100_Fc66_A2                     (-0.002318f)
-#define BPF_Fs44100_Fc66_B1                     (-1.995221f)
-#define BPF_Fs44100_Fc66_B2                     0.995309f
-#define BPF_Fs48000_Fc66_A0                     0.002131f
-#define BPF_Fs48000_Fc66_A1                     0.000000f
-#define BPF_Fs48000_Fc66_A2                     (-0.002131f)
-#define BPF_Fs48000_Fc66_B1                     (-1.995615f)
-#define BPF_Fs48000_Fc66_B2                     0.995690f
+#define BPF_Fs8000_Fc66_A0 0.012648f
+#define BPF_Fs8000_Fc66_A1 0.000000f
+#define BPF_Fs8000_Fc66_A2 (-0.012648f)
+#define BPF_Fs8000_Fc66_B1 (-1.971760f)
+#define BPF_Fs8000_Fc66_B2 0.974412f
+#define BPF_Fs11025_Fc66_A0 0.009209f
+#define BPF_Fs11025_Fc66_A1 0.000000f
+#define BPF_Fs11025_Fc66_A2 (-0.009209f)
+#define BPF_Fs11025_Fc66_B1 (-1.979966f)
+#define BPF_Fs11025_Fc66_B2 0.981368f
+#define BPF_Fs12000_Fc66_A0 0.008468f
+#define BPF_Fs12000_Fc66_A1 0.000000f
+#define BPF_Fs12000_Fc66_A2 (-0.008468f)
+#define BPF_Fs12000_Fc66_B1 (-1.981685f)
+#define BPF_Fs12000_Fc66_B2 0.982869f
+#define BPF_Fs16000_Fc66_A0 0.006364f
+#define BPF_Fs16000_Fc66_A1 0.000000f
+#define BPF_Fs16000_Fc66_A2 (-0.006364f)
+#define BPF_Fs16000_Fc66_B1 (-1.986457f)
+#define BPF_Fs16000_Fc66_B2 0.987124f
+#define BPF_Fs22050_Fc66_A0 0.004626f
+#define BPF_Fs22050_Fc66_A1 0.000000f
+#define BPF_Fs22050_Fc66_A2 (-0.004626f)
+#define BPF_Fs22050_Fc66_B1 (-1.990288f)
+#define BPF_Fs22050_Fc66_B2 0.990641f
+#define BPF_Fs24000_Fc66_A0 0.004252f
+#define BPF_Fs24000_Fc66_A1 0.000000f
+#define BPF_Fs24000_Fc66_A2 (-0.004252f)
+#define BPF_Fs24000_Fc66_B1 (-1.991100f)
+#define BPF_Fs24000_Fc66_B2 0.991398f
+#define BPF_Fs32000_Fc66_A0 0.003192f
+#define BPF_Fs32000_Fc66_A1 0.000000f
+#define BPF_Fs32000_Fc66_A2 (-0.003192f)
+#define BPF_Fs32000_Fc66_B1 (-1.993374f)
+#define BPF_Fs32000_Fc66_B2 0.993541f
+#define BPF_Fs44100_Fc66_A0 0.002318f
+#define BPF_Fs44100_Fc66_A1 0.000000f
+#define BPF_Fs44100_Fc66_A2 (-0.002318f)
+#define BPF_Fs44100_Fc66_B1 (-1.995221f)
+#define BPF_Fs44100_Fc66_B2 0.995309f
+#define BPF_Fs48000_Fc66_A0 0.002131f
+#define BPF_Fs48000_Fc66_A1 0.000000f
+#define BPF_Fs48000_Fc66_A2 (-0.002131f)
+#define BPF_Fs48000_Fc66_B1 (-1.995615f)
+#define BPF_Fs48000_Fc66_B2 0.995690f
 
-#define BPF_Fs88200_Fc66_A0                     0.001146f
-#define BPF_Fs88200_Fc66_A1                     0.000000f
-#define BPF_Fs88200_Fc66_A2                     (-0.001146f)
-#define BPF_Fs88200_Fc66_B1                     (-1.997684f)
-#define BPF_Fs88200_Fc66_B2                     0.997708f
+#define BPF_Fs88200_Fc66_A0 0.001146f
+#define BPF_Fs88200_Fc66_A1 0.000000f
+#define BPF_Fs88200_Fc66_A2 (-0.001146f)
+#define BPF_Fs88200_Fc66_B1 (-1.997684f)
+#define BPF_Fs88200_Fc66_B2 0.997708f
 
-#define BPF_Fs96000_Fc66_A0                     0.001055f
-#define BPF_Fs96000_Fc66_A1                     0.000000f
-#define BPF_Fs96000_Fc66_A2                     (-0.001055f)
-#define BPF_Fs96000_Fc66_B1                     (-1.997868f)
-#define BPF_Fs96000_Fc66_B2                     0.997891f
+#define BPF_Fs96000_Fc66_A0 0.001055f
+#define BPF_Fs96000_Fc66_A1 0.000000f
+#define BPF_Fs96000_Fc66_A2 (-0.001055f)
+#define BPF_Fs96000_Fc66_B1 (-1.997868f)
+#define BPF_Fs96000_Fc66_B2 0.997891f
 
-#define BPF_Fs176400_Fc66_A0                    0.000573f
-#define BPF_Fs176400_Fc66_A1                    0.000000f
-#define BPF_Fs176400_Fc66_A2                    (-0.000573f)
-#define BPF_Fs176400_Fc66_B1                    (-1.998847f)
-#define BPF_Fs176400_Fc66_B2                    0.998853f
+#define BPF_Fs176400_Fc66_A0 0.000573f
+#define BPF_Fs176400_Fc66_A1 0.000000f
+#define BPF_Fs176400_Fc66_A2 (-0.000573f)
+#define BPF_Fs176400_Fc66_B1 (-1.998847f)
+#define BPF_Fs176400_Fc66_B2 0.998853f
 
-#define BPF_Fs192000_Fc66_A0                    0.000528f
-#define BPF_Fs192000_Fc66_A1                    0.000000f
-#define BPF_Fs192000_Fc66_A2                   (-0.000528f)
-#define BPF_Fs192000_Fc66_B1                   (-1.998939f)
-#define BPF_Fs192000_Fc66_B2                    0.998945f
+#define BPF_Fs192000_Fc66_A0 0.000528f
+#define BPF_Fs192000_Fc66_A1 0.000000f
+#define BPF_Fs192000_Fc66_A2 (-0.000528f)
+#define BPF_Fs192000_Fc66_B1 (-1.998939f)
+#define BPF_Fs192000_Fc66_B2 0.998945f
 
 /* Coefficients for centre frequency 78Hz */
-#define BPF_Fs8000_Fc78_A0                      0.018572f
-#define BPF_Fs8000_Fc78_A1                      0.000000f
-#define BPF_Fs8000_Fc78_A2                      (-0.018572f)
-#define BPF_Fs8000_Fc78_B1                      (-1.958745f)
-#define BPF_Fs8000_Fc78_B2                      0.962427f
-#define BPF_Fs11025_Fc78_A0                     0.013545f
-#define BPF_Fs11025_Fc78_A1                     0.000000f
-#define BPF_Fs11025_Fc78_A2                     (-0.013545f)
-#define BPF_Fs11025_Fc78_B1                     (-1.970647f)
-#define BPF_Fs11025_Fc78_B2                     0.972596f
-#define BPF_Fs12000_Fc78_A0                     0.012458f
-#define BPF_Fs12000_Fc78_A1                     0.000000f
-#define BPF_Fs12000_Fc78_A2                     (-0.012458f)
-#define BPF_Fs12000_Fc78_B1                     (-1.973148f)
-#define BPF_Fs12000_Fc78_B2                     0.974795f
-#define BPF_Fs16000_Fc78_A0                     0.009373f
-#define BPF_Fs16000_Fc78_A1                     0.000000f
-#define BPF_Fs16000_Fc78_A2                     (-0.009373f)
-#define BPF_Fs16000_Fc78_B1                     (-1.980108f)
-#define BPF_Fs16000_Fc78_B2                     0.981037f
-#define BPF_Fs22050_Fc78_A0                     0.006819f
-#define BPF_Fs22050_Fc78_A1                     0.000000f
-#define BPF_Fs22050_Fc78_A2                     (-0.006819f)
-#define BPF_Fs22050_Fc78_B1                     (-1.985714f)
-#define BPF_Fs22050_Fc78_B2                     0.986204f
-#define BPF_Fs24000_Fc78_A0                     0.006268f
-#define BPF_Fs24000_Fc78_A1                     0.000000f
-#define BPF_Fs24000_Fc78_A2                     (-0.006268f)
-#define BPF_Fs24000_Fc78_B1                     (-1.986904f)
-#define BPF_Fs24000_Fc78_B2                     0.987318f
-#define BPF_Fs32000_Fc78_A0                     0.004709f
-#define BPF_Fs32000_Fc78_A1                     0.000000f
-#define BPF_Fs32000_Fc78_A2                     (-0.004709f)
-#define BPF_Fs32000_Fc78_B1                     (-1.990240f)
-#define BPF_Fs32000_Fc78_B2                     0.990473f
-#define BPF_Fs44100_Fc78_A0                     0.003421f
-#define BPF_Fs44100_Fc78_A1                     0.000000f
-#define BPF_Fs44100_Fc78_A2                     (-0.003421f)
-#define BPF_Fs44100_Fc78_B1                     (-1.992955f)
-#define BPF_Fs44100_Fc78_B2                     0.993078f
-#define BPF_Fs48000_Fc78_A0                     0.003144f
-#define BPF_Fs48000_Fc78_A1                     0.000000f
-#define BPF_Fs48000_Fc78_A2                     (-0.003144f)
-#define BPF_Fs48000_Fc78_B1                     (-1.993535f)
-#define BPF_Fs48000_Fc78_B2                     0.993639f
+#define BPF_Fs8000_Fc78_A0 0.018572f
+#define BPF_Fs8000_Fc78_A1 0.000000f
+#define BPF_Fs8000_Fc78_A2 (-0.018572f)
+#define BPF_Fs8000_Fc78_B1 (-1.958745f)
+#define BPF_Fs8000_Fc78_B2 0.962427f
+#define BPF_Fs11025_Fc78_A0 0.013545f
+#define BPF_Fs11025_Fc78_A1 0.000000f
+#define BPF_Fs11025_Fc78_A2 (-0.013545f)
+#define BPF_Fs11025_Fc78_B1 (-1.970647f)
+#define BPF_Fs11025_Fc78_B2 0.972596f
+#define BPF_Fs12000_Fc78_A0 0.012458f
+#define BPF_Fs12000_Fc78_A1 0.000000f
+#define BPF_Fs12000_Fc78_A2 (-0.012458f)
+#define BPF_Fs12000_Fc78_B1 (-1.973148f)
+#define BPF_Fs12000_Fc78_B2 0.974795f
+#define BPF_Fs16000_Fc78_A0 0.009373f
+#define BPF_Fs16000_Fc78_A1 0.000000f
+#define BPF_Fs16000_Fc78_A2 (-0.009373f)
+#define BPF_Fs16000_Fc78_B1 (-1.980108f)
+#define BPF_Fs16000_Fc78_B2 0.981037f
+#define BPF_Fs22050_Fc78_A0 0.006819f
+#define BPF_Fs22050_Fc78_A1 0.000000f
+#define BPF_Fs22050_Fc78_A2 (-0.006819f)
+#define BPF_Fs22050_Fc78_B1 (-1.985714f)
+#define BPF_Fs22050_Fc78_B2 0.986204f
+#define BPF_Fs24000_Fc78_A0 0.006268f
+#define BPF_Fs24000_Fc78_A1 0.000000f
+#define BPF_Fs24000_Fc78_A2 (-0.006268f)
+#define BPF_Fs24000_Fc78_B1 (-1.986904f)
+#define BPF_Fs24000_Fc78_B2 0.987318f
+#define BPF_Fs32000_Fc78_A0 0.004709f
+#define BPF_Fs32000_Fc78_A1 0.000000f
+#define BPF_Fs32000_Fc78_A2 (-0.004709f)
+#define BPF_Fs32000_Fc78_B1 (-1.990240f)
+#define BPF_Fs32000_Fc78_B2 0.990473f
+#define BPF_Fs44100_Fc78_A0 0.003421f
+#define BPF_Fs44100_Fc78_A1 0.000000f
+#define BPF_Fs44100_Fc78_A2 (-0.003421f)
+#define BPF_Fs44100_Fc78_B1 (-1.992955f)
+#define BPF_Fs44100_Fc78_B2 0.993078f
+#define BPF_Fs48000_Fc78_A0 0.003144f
+#define BPF_Fs48000_Fc78_A1 0.000000f
+#define BPF_Fs48000_Fc78_A2 (-0.003144f)
+#define BPF_Fs48000_Fc78_B1 (-1.993535f)
+#define BPF_Fs48000_Fc78_B2 0.993639f
 
-#define BPF_Fs88200_Fc78_A0                    0.001693f
-#define BPF_Fs88200_Fc78_A1                    0.000000f
-#define BPF_Fs88200_Fc78_A2                    (-0.001693f)
-#define BPF_Fs88200_Fc78_B1                    (-1.996582f)
-#define BPF_Fs88200_Fc78_B2                    0.996615f
+#define BPF_Fs88200_Fc78_A0 0.001693f
+#define BPF_Fs88200_Fc78_A1 0.000000f
+#define BPF_Fs88200_Fc78_A2 (-0.001693f)
+#define BPF_Fs88200_Fc78_B1 (-1.996582f)
+#define BPF_Fs88200_Fc78_B2 0.996615f
 
-#define BPF_Fs96000_Fc78_A0                     0.001555f
-#define BPF_Fs96000_Fc78_A1                     0.000000f
-#define BPF_Fs96000_Fc78_A2                    (-0.0015555f)
-#define BPF_Fs96000_Fc78_B1                    (-1.996860f)
-#define BPF_Fs96000_Fc78_B2                     0.996891f
+#define BPF_Fs96000_Fc78_A0 0.001555f
+#define BPF_Fs96000_Fc78_A1 0.000000f
+#define BPF_Fs96000_Fc78_A2 (-0.0015555f)
+#define BPF_Fs96000_Fc78_B1 (-1.996860f)
+#define BPF_Fs96000_Fc78_B2 0.996891f
 
-#define BPF_Fs176400_Fc78_A0                    0.000847f
-#define BPF_Fs176400_Fc78_A1                    0.000000f
-#define BPF_Fs176400_Fc78_A2                    (-0.000847f)
-#define BPF_Fs176400_Fc78_B1                    (-1.998298f)
-#define BPF_Fs176400_Fc78_B2                    0.998306f
+#define BPF_Fs176400_Fc78_A0 0.000847f
+#define BPF_Fs176400_Fc78_A1 0.000000f
+#define BPF_Fs176400_Fc78_A2 (-0.000847f)
+#define BPF_Fs176400_Fc78_B1 (-1.998298f)
+#define BPF_Fs176400_Fc78_B2 0.998306f
 
-#define BPF_Fs192000_Fc78_A0                    0.000778f
-#define BPF_Fs192000_Fc78_A1                    0.000000f
-#define BPF_Fs192000_Fc78_A2                   (-0.000778f)
-#define BPF_Fs192000_Fc78_B1                   (-1.998437f)
-#define BPF_Fs192000_Fc78_B2                    0.998444f
+#define BPF_Fs192000_Fc78_A0 0.000778f
+#define BPF_Fs192000_Fc78_A1 0.000000f
+#define BPF_Fs192000_Fc78_A2 (-0.000778f)
+#define BPF_Fs192000_Fc78_B1 (-1.998437f)
+#define BPF_Fs192000_Fc78_B2 0.998444f
 
 /* Coefficients for centre frequency 90Hz */
-#define BPF_Fs8000_Fc90_A0                       0.022760f
-#define BPF_Fs8000_Fc90_A1                       0.000000f
-#define BPF_Fs8000_Fc90_A2                       (-0.022760f)
-#define BPF_Fs8000_Fc90_B1                       (-1.949073f)
-#define BPF_Fs8000_Fc90_B2                       0.953953f
-#define BPF_Fs11025_Fc90_A0                      0.016619f
-#define BPF_Fs11025_Fc90_A1                      0.000000f
-#define BPF_Fs11025_Fc90_A2                      (-0.016619f)
-#define BPF_Fs11025_Fc90_B1                      (-1.963791f)
-#define BPF_Fs11025_Fc90_B2                      0.966377f
-#define BPF_Fs12000_Fc90_A0                      0.015289f
-#define BPF_Fs12000_Fc90_A1                      0.000000f
-#define BPF_Fs12000_Fc90_A2                      (-0.015289f)
-#define BPF_Fs12000_Fc90_B1                      (-1.966882f)
-#define BPF_Fs12000_Fc90_B2                      0.969067f
-#define BPF_Fs16000_Fc90_A0                      0.011511f
-#define BPF_Fs16000_Fc90_A1                      0.000000f
-#define BPF_Fs16000_Fc90_A2                      (-0.011511f)
-#define BPF_Fs16000_Fc90_B1                      (-1.975477f)
-#define BPF_Fs16000_Fc90_B2                      0.976711f
-#define BPF_Fs22050_Fc90_A0                      0.008379f
-#define BPF_Fs22050_Fc90_A1                      0.000000f
-#define BPF_Fs22050_Fc90_A2                      (-0.008379f)
-#define BPF_Fs22050_Fc90_B1                      (-1.982395f)
-#define BPF_Fs22050_Fc90_B2                      0.983047f
-#define BPF_Fs24000_Fc90_A0                      0.007704f
-#define BPF_Fs24000_Fc90_A1                      0.000000f
-#define BPF_Fs24000_Fc90_A2                      (-0.007704f)
-#define BPF_Fs24000_Fc90_B1                      (-1.983863f)
-#define BPF_Fs24000_Fc90_B2                      0.984414f
-#define BPF_Fs32000_Fc90_A0                      0.005789f
-#define BPF_Fs32000_Fc90_A1                      0.000000f
-#define BPF_Fs32000_Fc90_A2                      (-0.005789f)
-#define BPF_Fs32000_Fc90_B1                      (-1.987977f)
-#define BPF_Fs32000_Fc90_B2                      0.988288f
-#define BPF_Fs44100_Fc90_A0                      0.004207f
-#define BPF_Fs44100_Fc90_A1                      0.000000f
-#define BPF_Fs44100_Fc90_A2                      (-0.004207f)
-#define BPF_Fs44100_Fc90_B1                      (-1.991324f)
-#define BPF_Fs44100_Fc90_B2                      0.991488f
-#define BPF_Fs48000_Fc90_A0                      0.003867f
-#define BPF_Fs48000_Fc90_A1                      0.000000f
-#define BPF_Fs48000_Fc90_A2                      (-0.003867f)
-#define BPF_Fs48000_Fc90_B1                      (-1.992038f)
-#define BPF_Fs48000_Fc90_B2                      0.992177f
+#define BPF_Fs8000_Fc90_A0 0.022760f
+#define BPF_Fs8000_Fc90_A1 0.000000f
+#define BPF_Fs8000_Fc90_A2 (-0.022760f)
+#define BPF_Fs8000_Fc90_B1 (-1.949073f)
+#define BPF_Fs8000_Fc90_B2 0.953953f
+#define BPF_Fs11025_Fc90_A0 0.016619f
+#define BPF_Fs11025_Fc90_A1 0.000000f
+#define BPF_Fs11025_Fc90_A2 (-0.016619f)
+#define BPF_Fs11025_Fc90_B1 (-1.963791f)
+#define BPF_Fs11025_Fc90_B2 0.966377f
+#define BPF_Fs12000_Fc90_A0 0.015289f
+#define BPF_Fs12000_Fc90_A1 0.000000f
+#define BPF_Fs12000_Fc90_A2 (-0.015289f)
+#define BPF_Fs12000_Fc90_B1 (-1.966882f)
+#define BPF_Fs12000_Fc90_B2 0.969067f
+#define BPF_Fs16000_Fc90_A0 0.011511f
+#define BPF_Fs16000_Fc90_A1 0.000000f
+#define BPF_Fs16000_Fc90_A2 (-0.011511f)
+#define BPF_Fs16000_Fc90_B1 (-1.975477f)
+#define BPF_Fs16000_Fc90_B2 0.976711f
+#define BPF_Fs22050_Fc90_A0 0.008379f
+#define BPF_Fs22050_Fc90_A1 0.000000f
+#define BPF_Fs22050_Fc90_A2 (-0.008379f)
+#define BPF_Fs22050_Fc90_B1 (-1.982395f)
+#define BPF_Fs22050_Fc90_B2 0.983047f
+#define BPF_Fs24000_Fc90_A0 0.007704f
+#define BPF_Fs24000_Fc90_A1 0.000000f
+#define BPF_Fs24000_Fc90_A2 (-0.007704f)
+#define BPF_Fs24000_Fc90_B1 (-1.983863f)
+#define BPF_Fs24000_Fc90_B2 0.984414f
+#define BPF_Fs32000_Fc90_A0 0.005789f
+#define BPF_Fs32000_Fc90_A1 0.000000f
+#define BPF_Fs32000_Fc90_A2 (-0.005789f)
+#define BPF_Fs32000_Fc90_B1 (-1.987977f)
+#define BPF_Fs32000_Fc90_B2 0.988288f
+#define BPF_Fs44100_Fc90_A0 0.004207f
+#define BPF_Fs44100_Fc90_A1 0.000000f
+#define BPF_Fs44100_Fc90_A2 (-0.004207f)
+#define BPF_Fs44100_Fc90_B1 (-1.991324f)
+#define BPF_Fs44100_Fc90_B2 0.991488f
+#define BPF_Fs48000_Fc90_A0 0.003867f
+#define BPF_Fs48000_Fc90_A1 0.000000f
+#define BPF_Fs48000_Fc90_A2 (-0.003867f)
+#define BPF_Fs48000_Fc90_B1 (-1.992038f)
+#define BPF_Fs48000_Fc90_B2 0.992177f
 
-#define BPF_Fs88200_Fc90_A0                      0.002083f
-#define BPF_Fs88200_Fc90_A1                      0.000000f
-#define BPF_Fs88200_Fc90_A2                      (-0.002083f)
-#define BPF_Fs88200_Fc90_B1                      (-1.995791f)
-#define BPF_Fs88200_Fc90_B2                      0.995835f
+#define BPF_Fs88200_Fc90_A0 0.002083f
+#define BPF_Fs88200_Fc90_A1 0.000000f
+#define BPF_Fs88200_Fc90_A2 (-0.002083f)
+#define BPF_Fs88200_Fc90_B1 (-1.995791f)
+#define BPF_Fs88200_Fc90_B2 0.995835f
 
-#define BPF_Fs96000_Fc90_A0                      0.001913f
-#define BPF_Fs96000_Fc90_A1                      0.000000f
-#define BPF_Fs96000_Fc90_A2                     (-0.001913f)
-#define BPF_Fs96000_Fc90_B1                     (-1.996134f)
-#define BPF_Fs96000_Fc90_B2                      0.996174f
+#define BPF_Fs96000_Fc90_A0 0.001913f
+#define BPF_Fs96000_Fc90_A1 0.000000f
+#define BPF_Fs96000_Fc90_A2 (-0.001913f)
+#define BPF_Fs96000_Fc90_B1 (-1.996134f)
+#define BPF_Fs96000_Fc90_B2 0.996174f
 
-#define BPF_Fs176400_Fc90_A0                     0.001042f
-#define BPF_Fs176400_Fc90_A1                     0.000000f
-#define BPF_Fs176400_Fc90_A2                     (-0.001042f)
-#define BPF_Fs176400_Fc90_B1                     (-1.997904f)
-#define BPF_Fs176400_Fc90_B2                     0.997915f
+#define BPF_Fs176400_Fc90_A0 0.001042f
+#define BPF_Fs176400_Fc90_A1 0.000000f
+#define BPF_Fs176400_Fc90_A2 (-0.001042f)
+#define BPF_Fs176400_Fc90_B1 (-1.997904f)
+#define BPF_Fs176400_Fc90_B2 0.997915f
 
-#define BPF_Fs192000_Fc90_A0                     0.000958f
-#define BPF_Fs192000_Fc90_A1                     0.000000f
-#define BPF_Fs192000_Fc90_A2                    (-0.000958f)
-#define BPF_Fs192000_Fc90_B1                    (-1.998075f)
-#define BPF_Fs192000_Fc90_B2                     0.998085f
+#define BPF_Fs192000_Fc90_A0 0.000958f
+#define BPF_Fs192000_Fc90_A1 0.000000f
+#define BPF_Fs192000_Fc90_A2 (-0.000958f)
+#define BPF_Fs192000_Fc90_B1 (-1.998075f)
+#define BPF_Fs192000_Fc90_B2 0.998085f
 
 /************************************************************************************/
 /*                                                                                  */
@@ -613,74 +613,74 @@
 /************************************************************************************/
 
 /* AGC Time constants */
-#define AGC_ATTACK_Fs8000                             0.841395f
-#define AGC_ATTACK_Fs11025                            0.882223f
-#define AGC_ATTACK_Fs12000                            0.891251f
-#define AGC_ATTACK_Fs16000                            0.917276f
-#define AGC_ATTACK_Fs22050                            0.939267f
-#define AGC_ATTACK_Fs24000                            0.944061f
-#define AGC_ATTACK_Fs32000                            0.957745f
-#define AGC_ATTACK_Fs44100                            0.969158f
-#define AGC_ATTACK_Fs48000                            0.971628f
+#define AGC_ATTACK_Fs8000 0.841395f
+#define AGC_ATTACK_Fs11025 0.882223f
+#define AGC_ATTACK_Fs12000 0.891251f
+#define AGC_ATTACK_Fs16000 0.917276f
+#define AGC_ATTACK_Fs22050 0.939267f
+#define AGC_ATTACK_Fs24000 0.944061f
+#define AGC_ATTACK_Fs32000 0.957745f
+#define AGC_ATTACK_Fs44100 0.969158f
+#define AGC_ATTACK_Fs48000 0.971628f
 
-#define AGC_ATTACK_Fs88200                             0.984458f
-#define AGC_ATTACK_Fs96000                             0.985712f
-#define AGC_ATTACK_Fs176400                            0.992199f
-#define AGC_ATTACK_Fs192000                            0.992830f
+#define AGC_ATTACK_Fs88200 0.984458f
+#define AGC_ATTACK_Fs96000 0.985712f
+#define AGC_ATTACK_Fs176400 0.992199f
+#define AGC_ATTACK_Fs192000 0.992830f
 
-#define DECAY_SHIFT                                   10
+#define DECAY_SHIFT 10
 
-#define AGC_DECAY_Fs8000                              0.000042f
-#define AGC_DECAY_Fs11025                             0.000030f
-#define AGC_DECAY_Fs12000                             0.000028f
-#define AGC_DECAY_Fs16000                             0.000021f
-#define AGC_DECAY_Fs22050                             0.000015f
-#define AGC_DECAY_Fs24000                             0.000014f
-#define AGC_DECAY_Fs32000                             0.000010f
-#define AGC_DECAY_Fs44100                             0.000008f
-#define AGC_DECAY_Fs48000                             0.000007f
+#define AGC_DECAY_Fs8000 0.000042f
+#define AGC_DECAY_Fs11025 0.000030f
+#define AGC_DECAY_Fs12000 0.000028f
+#define AGC_DECAY_Fs16000 0.000021f
+#define AGC_DECAY_Fs22050 0.000015f
+#define AGC_DECAY_Fs24000 0.000014f
+#define AGC_DECAY_Fs32000 0.000010f
+#define AGC_DECAY_Fs44100 0.000008f
+#define AGC_DECAY_Fs48000 0.000007f
 
-#define AGC_DECAY_Fs88200                            0.0000038f
-#define AGC_DECAY_FS96000                            0.0000035f
-#define AGC_DECAY_Fs176400                          0.00000188f
-#define AGC_DECAY_FS192000                          0.00000175f
+#define AGC_DECAY_Fs88200 0.0000038f
+#define AGC_DECAY_FS96000 0.0000035f
+#define AGC_DECAY_Fs176400 0.00000188f
+#define AGC_DECAY_FS192000 0.00000175f
 
 /* AGC Gain settings */
-#define AGC_GAIN_SCALE                                        31         /* As a power of 2 */
-#define AGC_GAIN_SHIFT                                         4         /* As a power of 2 */
-#define AGC_TARGETLEVEL                            0.988553f
-#define AGC_HPFGAIN_0dB                            0.412538f
-#define AGC_GAIN_0dB                               0.000000f
-#define AGC_HPFGAIN_1dB                            0.584893f
-#define AGC_GAIN_1dB                               0.122018f
-#define AGC_HPFGAIN_2dB                            0.778279f
-#define AGC_GAIN_2dB                               0.258925f
-#define AGC_HPFGAIN_3dB                            0.995262f
-#define AGC_GAIN_3dB                               0.412538f
-#define AGC_HPFGAIN_4dB                            1.238721f
-#define AGC_GAIN_4dB                               0.584893f
-#define AGC_HPFGAIN_5dB                            1.511886f
-#define AGC_GAIN_5dB                               0.778279f
-#define AGC_HPFGAIN_6dB                            1.818383f
-#define AGC_GAIN_6dB                               0.995262f
-#define AGC_HPFGAIN_7dB                            2.162278f
-#define AGC_GAIN_7dB                               1.238721f
-#define AGC_HPFGAIN_8dB                            2.548134f
-#define AGC_GAIN_8dB                               1.511886f
-#define AGC_HPFGAIN_9dB                            2.981072f
-#define AGC_GAIN_9dB                               1.818383f
-#define AGC_HPFGAIN_10dB                           3.466836f
-#define AGC_GAIN_10dB                              2.162278f
-#define AGC_HPFGAIN_11dB                           4.011872f
-#define AGC_GAIN_11dB                              2.548134f
-#define AGC_HPFGAIN_12dB                           4.623413f
-#define AGC_GAIN_12dB                              2.981072f
-#define AGC_HPFGAIN_13dB                           5.309573f
-#define AGC_GAIN_13dB                              3.466836f
-#define AGC_HPFGAIN_14dB                           6.079458f
-#define AGC_GAIN_14dB                              4.011872f
-#define AGC_HPFGAIN_15dB                           6.943282f
-#define AGC_GAIN_15dB                              4.623413f
+#define AGC_GAIN_SCALE 31 /* As a power of 2 */
+#define AGC_GAIN_SHIFT 4  /* As a power of 2 */
+#define AGC_TARGETLEVEL 0.988553f
+#define AGC_HPFGAIN_0dB 0.412538f
+#define AGC_GAIN_0dB 0.000000f
+#define AGC_HPFGAIN_1dB 0.584893f
+#define AGC_GAIN_1dB 0.122018f
+#define AGC_HPFGAIN_2dB 0.778279f
+#define AGC_GAIN_2dB 0.258925f
+#define AGC_HPFGAIN_3dB 0.995262f
+#define AGC_GAIN_3dB 0.412538f
+#define AGC_HPFGAIN_4dB 1.238721f
+#define AGC_GAIN_4dB 0.584893f
+#define AGC_HPFGAIN_5dB 1.511886f
+#define AGC_GAIN_5dB 0.778279f
+#define AGC_HPFGAIN_6dB 1.818383f
+#define AGC_GAIN_6dB 0.995262f
+#define AGC_HPFGAIN_7dB 2.162278f
+#define AGC_GAIN_7dB 1.238721f
+#define AGC_HPFGAIN_8dB 2.548134f
+#define AGC_GAIN_8dB 1.511886f
+#define AGC_HPFGAIN_9dB 2.981072f
+#define AGC_GAIN_9dB 1.818383f
+#define AGC_HPFGAIN_10dB 3.466836f
+#define AGC_GAIN_10dB 2.162278f
+#define AGC_HPFGAIN_11dB 4.011872f
+#define AGC_GAIN_11dB 2.548134f
+#define AGC_HPFGAIN_12dB 4.623413f
+#define AGC_GAIN_12dB 2.981072f
+#define AGC_HPFGAIN_13dB 5.309573f
+#define AGC_GAIN_13dB 3.466836f
+#define AGC_HPFGAIN_14dB 6.079458f
+#define AGC_GAIN_14dB 4.011872f
+#define AGC_HPFGAIN_15dB 6.943282f
+#define AGC_GAIN_15dB 4.623413f
 
 /************************************************************************************/
 /*                                                                                  */
@@ -689,38 +689,38 @@
 /************************************************************************************/
 
 /* Volume control gain */
-#define VOLUME_MAX                                          0         /* In dBs */
-#define VOLUME_SHIFT                                        0         /* In dBs */
+#define VOLUME_MAX 0   /* In dBs */
+#define VOLUME_SHIFT 0 /* In dBs */
 
 /* Volume control time constants */
-#define VOL_TC_SHIFT                                       21         /* As a power of 2 */
-#define VOL_TC_Fs8000                                   0.024690f
-#define VOL_TC_Fs11025                                  0.017977f
-#define VOL_TC_Fs12000                                  0.016529f
-#define VOL_TC_Fs16000                                  0.012422f
-#define VOL_TC_Fs22050                                  0.009029f
-#define VOL_TC_Fs24000                                  0.008299f
-#define VOL_TC_Fs32000                                  0.006231f
-#define VOL_TC_Fs44100                                  0.004525f
-#define VOL_TC_Fs48000                                  0.004158f
-#define VOL_TC_Fs88200                                  0.002263f
-#define VOL_TC_Fs96000                                  0.002079f
-#define VOL_TC_Fs176400                                 0.001131f
-#define VOL_TC_Fs192000                                 0.001039f
-#define MIX_TC_Fs8000                                   29365         /* Floating point value 0.896151 */
-#define MIX_TC_Fs11025                                  30230         /* Floating point value 0.922548 */
-#define MIX_TC_Fs12000                                  30422         /* Floating point value 0.928415 */
-#define MIX_TC_Fs16000                                  30978         /* Floating point value 0.945387 */
-#define MIX_TC_Fs22050                                  31451         /* Floating point value 0.959804 */
-#define MIX_TC_Fs24000                                  31554         /* Floating point value 0.962956 */
-#define MIX_TC_Fs32000                                  31850         /* Floating point value 0.971973 */
-#define MIX_TC_Fs44100                                  32097         /* Floating point value 0.979515 */
-#define MIX_TC_Fs48000                                  32150         /* Floating point value 0.981150 */
+#define VOL_TC_SHIFT 21 /* As a power of 2 */
+#define VOL_TC_Fs8000 0.024690f
+#define VOL_TC_Fs11025 0.017977f
+#define VOL_TC_Fs12000 0.016529f
+#define VOL_TC_Fs16000 0.012422f
+#define VOL_TC_Fs22050 0.009029f
+#define VOL_TC_Fs24000 0.008299f
+#define VOL_TC_Fs32000 0.006231f
+#define VOL_TC_Fs44100 0.004525f
+#define VOL_TC_Fs48000 0.004158f
+#define VOL_TC_Fs88200 0.002263f
+#define VOL_TC_Fs96000 0.002079f
+#define VOL_TC_Fs176400 0.001131f
+#define VOL_TC_Fs192000 0.001039f
+#define MIX_TC_Fs8000 29365  /* Floating point value 0.896151 */
+#define MIX_TC_Fs11025 30230 /* Floating point value 0.922548 */
+#define MIX_TC_Fs12000 30422 /* Floating point value 0.928415 */
+#define MIX_TC_Fs16000 30978 /* Floating point value 0.945387 */
+#define MIX_TC_Fs22050 31451 /* Floating point value 0.959804 */
+#define MIX_TC_Fs24000 31554 /* Floating point value 0.962956 */
+#define MIX_TC_Fs32000 31850 /* Floating point value 0.971973 */
+#define MIX_TC_Fs44100 32097 /* Floating point value 0.979515 */
+#define MIX_TC_Fs48000 32150 /* Floating point value 0.981150 */
 /* Floating point value 0.989704 */
-#define MIX_TC_Fs88200                                  32430
-#define MIX_TC_Fs96000                                  32456         /* Floating point value 0.990530 */
+#define MIX_TC_Fs88200 32430
+#define MIX_TC_Fs96000 32456 /* Floating point value 0.990530 */
 /* Floating point value 0.994838 */
-#define MIX_TC_Fs176400                                 32598
-#define MIX_TC_Fs192000                                 32611         /* Floating point value 0.992524 */
+#define MIX_TC_Fs176400 32598
+#define MIX_TC_Fs192000 32611 /* Floating point value 0.992524 */
 
 #endif
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp
index 53feae8..5b47aa6 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp
@@ -47,15 +47,12 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVDBE_ReturnStatus_en LVDBE_GetParameters(LVDBE_Handle_t        hInstance,
-                                            LVDBE_Params_t        *pParams)
-{
-
-    LVDBE_Instance_t    *pInstance =(LVDBE_Instance_t  *)hInstance;
+LVDBE_ReturnStatus_en LVDBE_GetParameters(LVDBE_Handle_t hInstance, LVDBE_Params_t* pParams) {
+    LVDBE_Instance_t* pInstance = (LVDBE_Instance_t*)hInstance;
 
     *pParams = pInstance->Params;
 
-    return(LVDBE_SUCCESS);
+    return (LVDBE_SUCCESS);
 }
 
 /************************************************************************************/
@@ -77,15 +74,13 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVDBE_ReturnStatus_en LVDBE_GetCapabilities(LVDBE_Handle_t            hInstance,
-                                              LVDBE_Capabilities_t    *pCapabilities)
-{
-
-    LVDBE_Instance_t    *pInstance =(LVDBE_Instance_t  *)hInstance;
+LVDBE_ReturnStatus_en LVDBE_GetCapabilities(LVDBE_Handle_t hInstance,
+                                            LVDBE_Capabilities_t* pCapabilities) {
+    LVDBE_Instance_t* pInstance = (LVDBE_Instance_t*)hInstance;
 
     *pCapabilities = pInstance->Capabilities;
 
-    return(LVDBE_SUCCESS);
+    return (LVDBE_SUCCESS);
 }
 
 /************************************************************************************/
@@ -101,35 +96,33 @@
 /*                                                                                  */
 /************************************************************************************/
 
-void    LVDBE_SetFilters(LVDBE_Instance_t     *pInstance,
-                         LVDBE_Params_t       *pParams)
-{
-
+void LVDBE_SetFilters(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams) {
     /*
      * Calculate the table offsets
      */
-    LVM_UINT16 Offset = (LVM_UINT16)((LVM_UINT16)pParams->SampleRate + \
-                                    (LVM_UINT16)(pParams->CentreFrequency * (1+LVDBE_FS_192000)));
+    LVM_UINT16 Offset =
+            (LVM_UINT16)((LVM_UINT16)pParams->SampleRate +
+                         (LVM_UINT16)(pParams->CentreFrequency * (1 + LVDBE_FS_192000)));
 
     /*
      * Setup the high pass filter
      */
-    LoadConst_Float(0,                                          /* Clear the history, value 0 */
-                   (LVM_FLOAT *)&pInstance->pData->HPFTaps,     /* Destination */
+    LoadConst_Float(0,                                      /* Clear the history, value 0 */
+                    (LVM_FLOAT*)&pInstance->pData->HPFTaps, /* Destination */
                     sizeof(pInstance->pData->HPFTaps) / sizeof(LVM_FLOAT)); /* Number of words */
-    BQ_2I_D32F32Cll_TRC_WRA_01_Init(&pInstance->pCoef->HPFInstance,    /* Initialise the filter */
+    BQ_2I_D32F32Cll_TRC_WRA_01_Init(&pInstance->pCoef->HPFInstance, /* Initialise the filter */
                                     &pInstance->pData->HPFTaps,
-                                    (BQ_FLOAT_Coefs_t *)&LVDBE_HPF_Table[Offset]);
+                                    (BQ_FLOAT_Coefs_t*)&LVDBE_HPF_Table[Offset]);
 
     /*
      * Setup the band pass filter
      */
-    LoadConst_Float(0,                                           /* Clear the history, value 0 */
-                 (LVM_FLOAT *)&pInstance->pData->BPFTaps,        /* Destination */
-                 sizeof(pInstance->pData->BPFTaps) / sizeof(LVM_FLOAT));   /* Number of words */
-    BP_1I_D32F32Cll_TRC_WRA_02_Init(&pInstance->pCoef->BPFInstance,    /* Initialise the filter */
+    LoadConst_Float(0,                                      /* Clear the history, value 0 */
+                    (LVM_FLOAT*)&pInstance->pData->BPFTaps, /* Destination */
+                    sizeof(pInstance->pData->BPFTaps) / sizeof(LVM_FLOAT)); /* Number of words */
+    BP_1I_D32F32Cll_TRC_WRA_02_Init(&pInstance->pCoef->BPFInstance, /* Initialise the filter */
                                     &pInstance->pData->BPFTaps,
-                                    (BP_FLOAT_Coefs_t *)&LVDBE_BPF_Table[Offset]);
+                                    (BP_FLOAT_Coefs_t*)&LVDBE_BPF_Table[Offset]);
 }
 
 /************************************************************************************/
@@ -145,29 +138,26 @@
 /*                                                                                  */
 /************************************************************************************/
 
-void    LVDBE_SetAGC(LVDBE_Instance_t     *pInstance,
-                     LVDBE_Params_t       *pParams)
-{
-
+void LVDBE_SetAGC(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams) {
     /*
      * Get the attack and decay time constants
      */
-    pInstance->pData->AGCInstance.AGC_Attack = LVDBE_AGC_ATTACK_Table[(LVM_UINT16)pParams->SampleRate];  /* Attack multiplier */
-    pInstance->pData->AGCInstance.AGC_Decay  = LVDBE_AGC_DECAY_Table[(LVM_UINT16)pParams->SampleRate];   /* Decay multipler */
+    pInstance->pData->AGCInstance.AGC_Attack =
+            LVDBE_AGC_ATTACK_Table[(LVM_UINT16)pParams->SampleRate]; /* Attack multiplier */
+    pInstance->pData->AGCInstance.AGC_Decay =
+            LVDBE_AGC_DECAY_Table[(LVM_UINT16)pParams->SampleRate]; /* Decay multipler */
 
     /*
      * Get the boost gain
      */
-    if (pParams->HPFSelect == LVDBE_HPF_ON)
-    {
-        pInstance->pData->AGCInstance.AGC_MaxGain   = LVDBE_AGC_HPFGAIN_Table[(LVM_UINT16)pParams->EffectLevel];  /* High pass filter on */
-    }
-    else
-    {
-        pInstance->pData->AGCInstance.AGC_MaxGain   = LVDBE_AGC_GAIN_Table[(LVM_UINT16)pParams->EffectLevel];     /* High pass filter off */
+    if (pParams->HPFSelect == LVDBE_HPF_ON) {
+        pInstance->pData->AGCInstance.AGC_MaxGain =
+                LVDBE_AGC_HPFGAIN_Table[(LVM_UINT16)pParams->EffectLevel]; /* High pass filter on */
+    } else {
+        pInstance->pData->AGCInstance.AGC_MaxGain =
+                LVDBE_AGC_GAIN_Table[(LVM_UINT16)pParams->EffectLevel]; /* High pass filter off */
     }
     pInstance->pData->AGCInstance.AGC_Target = AGC_TARGETLEVEL;
-
 }
 
 /************************************************************************************/
@@ -193,29 +183,22 @@
 /*                                                                                  */
 /************************************************************************************/
 
-void    LVDBE_SetVolume(LVDBE_Instance_t     *pInstance,
-                        LVDBE_Params_t       *pParams)
-{
+void LVDBE_SetVolume(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams) {
+    LVM_UINT16 dBShifts;  /* 6dB shifts */
+    LVM_UINT16 dBOffset;  /* Table offset */
+    LVM_INT16 Volume = 0; /* Required volume in dBs */
 
-    LVM_UINT16      dBShifts;                                   /* 6dB shifts */
-    LVM_UINT16      dBOffset;                                   /* Table offset */
-    LVM_INT16       Volume = 0;                                 /* Required volume in dBs */
-
-    LVM_FLOAT        dBShifts_fac;
+    LVM_FLOAT dBShifts_fac;
     /*
      * Apply the volume if enabled
      */
-    if (pParams->VolumeControl == LVDBE_VOLUME_ON)
-    {
+    if (pParams->VolumeControl == LVDBE_VOLUME_ON) {
         /*
          * Limit the gain to the maximum allowed
          */
-        if  (pParams->VolumedB > VOLUME_MAX)
-        {
+        if (pParams->VolumedB > VOLUME_MAX) {
             Volume = VOLUME_MAX;
-        }
-        else
-        {
+        } else {
             Volume = pParams->VolumedB;
         }
     }
@@ -223,8 +206,8 @@
     /*
      * Calculate the required gain and shifts
      */
-    dBOffset = (LVM_UINT16)(6 + Volume % 6);                    /* Get the dBs 0-5 */
-    dBShifts = (LVM_UINT16)(Volume / -6);                       /* Get the 6dB shifts */
+    dBOffset = (LVM_UINT16)(6 + Volume % 6); /* Get the dBs 0-5 */
+    dBShifts = (LVM_UINT16)(Volume / -6);    /* Get the 6dB shifts */
 
     dBShifts_fac = (LVM_FLOAT)(1 << dBShifts);
     /*
@@ -232,27 +215,23 @@
      */
     pInstance->pData->AGCInstance.Target = (LVDBE_VolumeTable[dBOffset]);
     pInstance->pData->AGCInstance.Target = pInstance->pData->AGCInstance.Target / dBShifts_fac;
-    pInstance->pData->AGCInstance.VolumeTC    = LVDBE_VolumeTCTable[(LVM_UINT16)pParams->SampleRate];   /* Volume update time constant */
+    pInstance->pData->AGCInstance.VolumeTC =
+            LVDBE_VolumeTCTable[(LVM_UINT16)pParams->SampleRate]; /* Volume update time constant */
 
     /*
      * When DBE is disabled use the bypass volume control
      */
-    if(dBShifts > 0)
-    {
+    if (dBShifts > 0) {
         LVC_Mixer_SetTarget(&pInstance->pData->BypassVolume.MixerStream[0],
                             LVDBE_VolumeTable[dBOffset] / dBShifts_fac);
-    }
-    else
-    {
+    } else {
         LVC_Mixer_SetTarget(&pInstance->pData->BypassVolume.MixerStream[0],
                             LVDBE_VolumeTable[dBOffset]);
     }
 
     pInstance->pData->BypassVolume.MixerStream[0].CallbackSet = 1;
     LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->pData->BypassVolume.MixerStream[0],
-                                LVDBE_MIXER_TC,
-                                (LVM_Fs_en)pInstance->Params.SampleRate,
-                                2);
+                                       LVDBE_MIXER_TC, (LVM_Fs_en)pInstance->Params.SampleRate, 2);
 }
 
 /****************************************************************************************/
@@ -292,21 +271,17 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVDBE_ReturnStatus_en LVDBE_Control(LVDBE_Handle_t         hInstance,
-                                      LVDBE_Params_t         *pParams)
-{
-
-    LVDBE_Instance_t    *pInstance =(LVDBE_Instance_t  *)hInstance;
-    LVMixer3_2St_FLOAT_st     *pBypassMixer_Instance = &pInstance->pData->BypassMixer;
+LVDBE_ReturnStatus_en LVDBE_Control(LVDBE_Handle_t hInstance, LVDBE_Params_t* pParams) {
+    LVDBE_Instance_t* pInstance = (LVDBE_Instance_t*)hInstance;
+    LVMixer3_2St_FLOAT_st* pBypassMixer_Instance = &pInstance->pData->BypassMixer;
 
     /*
      * Update the filters
      */
     if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
-        (pInstance->Params.CentreFrequency != pParams->CentreFrequency))
-    {
-        LVDBE_SetFilters(pInstance,                     /* Instance pointer */
-                         pParams);                      /* New parameters */
+        (pInstance->Params.CentreFrequency != pParams->CentreFrequency)) {
+        LVDBE_SetFilters(pInstance, /* Instance pointer */
+                         pParams);  /* New parameters */
     }
 
     /*
@@ -314,16 +289,14 @@
      */
     if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
         (pInstance->Params.EffectLevel != pParams->EffectLevel) ||
-        (pInstance->Params.HPFSelect != pParams->HPFSelect))
-    {
-        LVDBE_SetAGC(pInstance,                         /* Instance pointer */
-                     pParams);                          /* New parameters */
-        LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[0],
-            LVDBE_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate, 2);
+        (pInstance->Params.HPFSelect != pParams->HPFSelect)) {
+        LVDBE_SetAGC(pInstance, /* Instance pointer */
+                     pParams);  /* New parameters */
+        LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[0], LVDBE_BYPASS_MIXER_TC,
+                                  (LVM_Fs_en)pParams->SampleRate, 2);
 
-        LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[1],
-            LVDBE_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate, 2);
-
+        LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[1], LVDBE_BYPASS_MIXER_TC,
+                                  (LVM_Fs_en)pParams->SampleRate, 2);
     }
 
     /*
@@ -332,19 +305,16 @@
     if ((pInstance->Params.VolumedB != pParams->VolumedB) ||
         (pInstance->Params.SampleRate != pParams->SampleRate) ||
         (pInstance->Params.HeadroomdB != pParams->HeadroomdB) ||
-        (pInstance->Params.VolumeControl != pParams->VolumeControl))
-    {
-        LVDBE_SetVolume(pInstance,                      /* Instance pointer */
-                       pParams);                        /* New parameters */
+        (pInstance->Params.VolumeControl != pParams->VolumeControl)) {
+        LVDBE_SetVolume(pInstance, /* Instance pointer */
+                        pParams);  /* New parameters */
     }
 
-    if (pInstance->Params.OperatingMode==LVDBE_ON && pParams->OperatingMode==LVDBE_OFF)
-    {
+    if (pInstance->Params.OperatingMode == LVDBE_ON && pParams->OperatingMode == LVDBE_OFF) {
         LVC_Mixer_SetTarget(&pInstance->pData->BypassMixer.MixerStream[0], 0);
         LVC_Mixer_SetTarget(&pInstance->pData->BypassMixer.MixerStream[1], 1.0f);
     }
-    if (pInstance->Params.OperatingMode==LVDBE_OFF && pParams->OperatingMode==LVDBE_ON)
-    {
+    if (pInstance->Params.OperatingMode == LVDBE_OFF && pParams->OperatingMode == LVDBE_ON) {
         LVC_Mixer_SetTarget(&pInstance->pData->BypassMixer.MixerStream[0], 1.0f);
         LVC_Mixer_SetTarget(&pInstance->pData->BypassMixer.MixerStream[1], 0);
     }
@@ -354,5 +324,5 @@
      */
     pInstance->Params = *pParams;
 
-    return(LVDBE_SUCCESS);
+    return (LVDBE_SUCCESS);
 }
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
index ad77696..12af162 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
@@ -20,210 +20,93 @@
 /*    Includes                                                                          */
 /*                                                                                      */
 /****************************************************************************************/
+#include <stdlib.h>
 
 #include "LVDBE.h"
 #include "LVDBE_Private.h"
 
 /****************************************************************************************/
 /*                                                                                      */
-/* FUNCTION:                 LVDBE_Memory                                               */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*    This function is used for memory allocation and free. It can be called in         */
-/*    two ways:                                                                         */
-/*                                                                                      */
-/*        hInstance = NULL                Returns the memory requirements               */
-/*        hInstance = Instance handle        Returns the memory requirements and        */
-/*                                        allocated base addresses for the instance     */
-/*                                                                                      */
-/*    When this function is called for memory allocation (hInstance=NULL) the memory    */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*    When the function is called for free (hInstance = Instance Handle) the memory     */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance                Instance Handle                                            */
-/*  pMemoryTable             Pointer to an empty memory definition table                */
-/*    pCapabilities           Pointer to the instance capabilities                      */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVDBE_SUCCESS            Succeeded                                                  */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*    1.    This function may be interrupted by the LVDBE_Process function              */
-/*                                                                                      */
-/****************************************************************************************/
-
-LVDBE_ReturnStatus_en LVDBE_Memory(LVDBE_Handle_t            hInstance,
-                                   LVDBE_MemTab_t            *pMemoryTable,
-                                   LVDBE_Capabilities_t      *pCapabilities)
-{
-
-    LVM_UINT32          ScratchSize;
-    LVDBE_Instance_t    *pInstance = (LVDBE_Instance_t *)hInstance;
-
-    /*
-     * Fill in the memory table
-     */
-    if (hInstance == LVM_NULL)
-    {
-        /*
-         * Instance memory
-         */
-        pMemoryTable->Region[LVDBE_MEMREGION_INSTANCE].Size         = sizeof(LVDBE_Instance_t);
-        pMemoryTable->Region[LVDBE_MEMREGION_INSTANCE].Alignment    = LVDBE_INSTANCE_ALIGN;
-        pMemoryTable->Region[LVDBE_MEMREGION_INSTANCE].Type         = LVDBE_PERSISTENT;
-        pMemoryTable->Region[LVDBE_MEMREGION_INSTANCE].pBaseAddress = LVM_NULL;
-
-        /*
-         * Data memory
-         */
-        pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_DATA].Size   = sizeof(LVDBE_Data_FLOAT_t);
-        pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_DATA].Alignment    = LVDBE_PERSISTENT_DATA_ALIGN;
-        pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_DATA].Type         = LVDBE_PERSISTENT_DATA;
-        pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_DATA].pBaseAddress = LVM_NULL;
-
-        /*
-         * Coef memory
-         */
-        pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_COEF].Size   = sizeof(LVDBE_Coef_FLOAT_t);
-        pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_COEF].Alignment    = LVDBE_PERSISTENT_COEF_ALIGN;
-        pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_COEF].Type         = LVDBE_PERSISTENT_COEF;
-        pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_COEF].pBaseAddress = LVM_NULL;
-
-        /*
-         * Scratch memory
-         */
-        ScratchSize = (LVM_UINT32)(LVDBE_SCRATCHBUFFERS_INPLACE*sizeof(LVM_FLOAT) * \
-                                        pCapabilities->MaxBlockSize);
-        pMemoryTable->Region[LVDBE_MEMREGION_SCRATCH].Size         = ScratchSize;
-        pMemoryTable->Region[LVDBE_MEMREGION_SCRATCH].Alignment    = LVDBE_SCRATCH_ALIGN;
-        pMemoryTable->Region[LVDBE_MEMREGION_SCRATCH].Type         = LVDBE_SCRATCH;
-        pMemoryTable->Region[LVDBE_MEMREGION_SCRATCH].pBaseAddress = LVM_NULL;
-    }
-    else
-    {
-        /* Read back memory allocation table */
-        *pMemoryTable = pInstance->MemoryTable;
-    }
-
-    return(LVDBE_SUCCESS);
-}
-
-/****************************************************************************************/
-/*                                                                                      */
 /* FUNCTION:                 LVDBE_Init                                                 */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
-/*    Create and initialisation function for the Dynamic Bass Enhancement module        */
-/*                                                                                      */
-/*    This function can be used to create an algorithm instance by calling with         */
-/*    hInstance set to NULL. In this case the algorithm returns the new instance        */
-/*    handle.                                                                           */
-/*                                                                                      */
-/*    This function can be used to force a full re-initialisation of the algorithm      */
-/*    by calling with hInstance = Instance Handle. In this case the memory table        */
-/*    should be correct for the instance, this can be ensured by calling the function   */
-/*    DBE_Memory before calling this function.                                          */
+/*    Create and initialisation function for the Bass Enhancement module                */
 /*                                                                                      */
 /* PARAMETERS:                                                                          */
-/*  hInstance                  Instance handle                                          */
-/*  pMemoryTable             Pointer to the memory definition table                     */
-/*  pCapabilities              Pointer to the instance capabilities                     */
+/*  phInstance               Pointer to instance handle                                 */
+/*  pCapabilities            Pointer to the initialisation capabilities                 */
+/*  pScratch                 Pointer to the bundle scratch buffer                       */
 /*                                                                                      */
 /* RETURNS:                                                                             */
 /*  LVDBE_SUCCESS            Initialisation succeeded                                   */
-/*  LVDBE_ALIGNMENTERROR    Instance or scratch memory on incorrect alignment           */
-/*    LVDBE_NULLADDRESS        Instance or scratch memory has a NULL pointer            */
+/*  LVDBE_NULLADDRESS        One or more memory has a NULL pointer - malloc failure     */
 /*                                                                                      */
 /* NOTES:                                                                               */
-/*  1.     The instance handle is the pointer to the base address of the first memory   */
-/*        region.                                                                       */
-/*    2.    This function must not be interrupted by the LVDBE_Process function         */
+/*  1.    This function must not be interrupted by the LVDBE_Process function           */
 /*                                                                                      */
 /****************************************************************************************/
-
-LVDBE_ReturnStatus_en LVDBE_Init(LVDBE_Handle_t         *phInstance,
-                                   LVDBE_MemTab_t       *pMemoryTable,
-                                   LVDBE_Capabilities_t *pCapabilities)
-{
-
-    LVDBE_Instance_t      *pInstance;
-    LVMixer3_1St_FLOAT_st       *pMixer_Instance;
-    LVMixer3_2St_FLOAT_st       *pBypassMixer_Instance;
-    LVM_FLOAT             MixGain;
-    LVM_INT16             i;
+LVDBE_ReturnStatus_en LVDBE_Init(LVDBE_Handle_t* phInstance, LVDBE_Capabilities_t* pCapabilities,
+                                 void* pScratch) {
+    LVDBE_Instance_t* pInstance;
+    LVMixer3_1St_FLOAT_st* pMixer_Instance;
+    LVMixer3_2St_FLOAT_st* pBypassMixer_Instance;
+    LVM_FLOAT MixGain;
 
     /*
-     * Set the instance handle if not already initialised
+     * Create the instance handle if not already initialised
      */
-    if (*phInstance == LVM_NULL)
-    {
-        *phInstance = (LVDBE_Handle_t)pMemoryTable->Region[LVDBE_MEMREGION_INSTANCE].pBaseAddress;
+    if (*phInstance == LVM_NULL) {
+        *phInstance = calloc(1, sizeof(*pInstance));
     }
-    pInstance =(LVDBE_Instance_t  *)*phInstance;
-
-    /*
-     * Check the memory table for NULL pointers and incorrectly aligned data
-     */
-    for (i=0; i<LVDBE_NR_MEMORY_REGIONS; i++)
-    {
-        if (pMemoryTable->Region[i].Size!=0)
-        {
-            if (pMemoryTable->Region[i].pBaseAddress==LVM_NULL)
-            {
-                return(LVDBE_NULLADDRESS);
-            }
-            if (((uintptr_t)pMemoryTable->Region[i].pBaseAddress % pMemoryTable->Region[i].Alignment)!=0){
-                return(LVDBE_ALIGNMENTERROR);
-            }
-        }
+    if (*phInstance == LVM_NULL) {
+        return LVDBE_NULLADDRESS;
     }
+    pInstance = (LVDBE_Instance_t*)*phInstance;
 
     /*
      * Save the memory table in the instance structure
      */
     pInstance->Capabilities = *pCapabilities;
 
-    /*
-     * Save the memory table in the instance structure
-     */
-    pInstance->MemoryTable = *pMemoryTable;
+    pInstance->pScratch = pScratch;
 
     /*
      * Set the default instance parameters
      */
-    pInstance->Params.CentreFrequency   =    LVDBE_CENTRE_55HZ;
-    pInstance->Params.EffectLevel       =    0;
-    pInstance->Params.HeadroomdB        =    0;
-    pInstance->Params.HPFSelect         =    LVDBE_HPF_OFF;
-    pInstance->Params.OperatingMode     =    LVDBE_OFF;
-    pInstance->Params.SampleRate        =    LVDBE_FS_8000;
-    pInstance->Params.VolumeControl     =    LVDBE_VOLUME_OFF;
-    pInstance->Params.VolumedB          =    0;
+    pInstance->Params.CentreFrequency = LVDBE_CENTRE_55HZ;
+    pInstance->Params.EffectLevel = 0;
+    pInstance->Params.HeadroomdB = 0;
+    pInstance->Params.HPFSelect = LVDBE_HPF_OFF;
+    pInstance->Params.OperatingMode = LVDBE_OFF;
+    pInstance->Params.SampleRate = LVDBE_FS_8000;
+    pInstance->Params.VolumeControl = LVDBE_VOLUME_OFF;
+    pInstance->Params.VolumedB = 0;
 
     /*
-     * Set pointer to data and coef memory
+     * Create pointer to data and coef memory
      */
-    pInstance->pData =
-         (LVDBE_Data_FLOAT_t *)pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_DATA].pBaseAddress;
-    pInstance->pCoef =
-         (LVDBE_Coef_FLOAT_t *)pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_COEF].pBaseAddress;
+    pInstance->pData = (LVDBE_Data_FLOAT_t*)calloc(1, sizeof(*(pInstance->pData)));
+    if (pInstance->pData == NULL) {
+        return LVDBE_NULLADDRESS;
+    }
+    pInstance->pCoef = (LVDBE_Coef_FLOAT_t*)calloc(1, sizeof(*(pInstance->pCoef)));
+    if (pInstance->pCoef == NULL) {
+        return LVDBE_NULLADDRESS;
+    }
 
     /*
      * Initialise the filters
      */
-    LVDBE_SetFilters(pInstance,                 /* Set the filter taps and coefficients */
+    LVDBE_SetFilters(pInstance, /* Set the filter taps and coefficients */
                      &pInstance->Params);
 
     /*
      * Initialise the AGC
      */
-    LVDBE_SetAGC(pInstance,                                     /* Set the AGC gain */
+    LVDBE_SetAGC(pInstance, /* Set the AGC gain */
                  &pInstance->Params);
     pInstance->pData->AGCInstance.AGC_Gain = pInstance->pData->AGCInstance.AGC_MaxGain;
-                                                /* Default to the bass boost setting */
+    /* Default to the bass boost setting */
 
     // initialize the mixer with some fixes values since otherwise LVDBE_SetVolume ends up
     // reading uninitialized data
@@ -233,11 +116,11 @@
     /*
      * Initialise the volume
      */
-    LVDBE_SetVolume(pInstance,                                         /* Set the Volume */
+    LVDBE_SetVolume(pInstance, /* Set the Volume */
                     &pInstance->Params);
 
     pInstance->pData->AGCInstance.Volume = pInstance->pData->AGCInstance.Target;
-                                                /* Initialise as the target */
+    /* Initialise as the target */
     MixGain = LVC_Mixer_GetTarget(&pMixer_Instance->MixerStream[0]);
     LVC_Mixer_Init(&pMixer_Instance->MixerStream[0], MixGain, MixGain);
 
@@ -259,11 +142,11 @@
     pBypassMixer_Instance->MixerStream[0].CallbackParam = 0;
     pBypassMixer_Instance->MixerStream[0].pCallbackHandle = LVM_NULL;
     pBypassMixer_Instance->MixerStream[0].pCallBack = LVM_NULL;
-    pBypassMixer_Instance->MixerStream[0].CallbackSet=0;
+    pBypassMixer_Instance->MixerStream[0].CallbackSet = 0;
 
-    LVC_Mixer_Init(&pBypassMixer_Instance->MixerStream[0],0,0);
-    LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[0],
-        LVDBE_BYPASS_MIXER_TC,(LVM_Fs_en)pInstance->Params.SampleRate,2);
+    LVC_Mixer_Init(&pBypassMixer_Instance->MixerStream[0], 0, 0);
+    LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[0], LVDBE_BYPASS_MIXER_TC,
+                              (LVM_Fs_en)pInstance->Params.SampleRate, 2);
 
     /*
      * Setup the mixer gain for the unprocessed path
@@ -271,10 +154,38 @@
     pBypassMixer_Instance->MixerStream[1].CallbackParam = 0;
     pBypassMixer_Instance->MixerStream[1].pCallbackHandle = LVM_NULL;
     pBypassMixer_Instance->MixerStream[1].pCallBack = LVM_NULL;
-    pBypassMixer_Instance->MixerStream[1].CallbackSet=0;
+    pBypassMixer_Instance->MixerStream[1].CallbackSet = 0;
     LVC_Mixer_Init(&pBypassMixer_Instance->MixerStream[1], 1.0, 1.0);
-    LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[1],
-        LVDBE_BYPASS_MIXER_TC,(LVM_Fs_en)pInstance->Params.SampleRate, 2);
+    LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[1], LVDBE_BYPASS_MIXER_TC,
+                              (LVM_Fs_en)pInstance->Params.SampleRate, 2);
 
-    return(LVDBE_SUCCESS);
+    return (LVDBE_SUCCESS);
+}
+
+/****************************************************************************************/
+/*                                                                                      */
+/* FUNCTION:                 LVDBE_DeInit                                               */
+/*                                                                                      */
+/* DESCRIPTION:                                                                         */
+/*    Free the memories created during LVDBE_Init including instance handle             */
+/*                                                                                      */
+/* PARAMETERS:                                                                          */
+/*  phInstance               Pointer to instance handle                                 */
+/*                                                                                      */
+/****************************************************************************************/
+void LVDBE_DeInit(LVDBE_Handle_t* phInstance) {
+    LVDBE_Instance_t* pInstance = (LVDBE_Instance_t*)*phInstance;
+    if (pInstance == LVM_NULL) {
+        return;
+    }
+    if (pInstance->pData != LVM_NULL) {
+        free(pInstance->pData);
+        pInstance->pData = LVM_NULL;
+    }
+    if (pInstance->pCoef != LVM_NULL) {
+        free(pInstance->pCoef);
+        pInstance->pCoef = LVM_NULL;
+    }
+    free(pInstance);
+    *phInstance = LVM_NULL;
 }
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Private.h b/media/libeffects/lvm/lib/Bass/src/LVDBE_Private.h
index f3faaed..4fef1ef 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Private.h
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Private.h
@@ -33,7 +33,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-#include "LVDBE.h"                                /* Calling or Application layer definitions */
+#include "LVDBE.h" /* Calling or Application layer definitions */
 #include "BIQUAD.h"
 #include "LVC_Mixer.h"
 #include "AGC.h"
@@ -45,28 +45,10 @@
 /****************************************************************************************/
 
 /* General */
-#define    LVDBE_INVALID            0xFFFF        /* Invalid init parameter */
+#define LVDBE_INVALID 0xFFFF /* Invalid init parameter */
 
-/* Memory */
-#define LVDBE_MEMREGION_INSTANCE         0       /* Offset to the instance memory region */
-#define LVDBE_MEMREGION_PERSISTENT_DATA  1       /* Offset to persistent data memory region */
-#define LVDBE_MEMREGION_PERSISTENT_COEF  2       /* Offset to persistent coefficient region */
-#define LVDBE_MEMREGION_SCRATCH          3       /* Offset to data scratch memory region */
-
-#define LVDBE_INSTANCE_ALIGN             4       /* 32-bit alignment for structures */
-#define LVDBE_PERSISTENT_DATA_ALIGN      4       /* 32-bit alignment for data */
-#define LVDBE_PERSISTENT_COEF_ALIGN      4       /* 32-bit alignment for coef */
-#define LVDBE_SCRATCH_ALIGN              4       /* 32-bit alignment for long data */
-
-#ifdef SUPPORT_MC
-/* Number of buffers required for inplace processing */
-#define LVDBE_SCRATCHBUFFERS_INPLACE     (LVM_MAX_CHANNELS * 3)
-#else
-#define LVDBE_SCRATCHBUFFERS_INPLACE     6       /* Number of buffers required for inplace processing */
-#endif
-
-#define LVDBE_MIXER_TC                   5       /* Mixer time  */
-#define LVDBE_BYPASS_MIXER_TC            100     /* Bypass mixer time */
+#define LVDBE_MIXER_TC 5          /* Mixer time  */
+#define LVDBE_BYPASS_MIXER_TC 100 /* Bypass mixer time */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -76,37 +58,34 @@
 
 /* Data structure */
 /* Data structure */
-typedef struct
-{
+typedef struct {
     /* AGC parameters */
-    AGC_MIX_VOL_2St1Mon_FLOAT_t   AGCInstance;        /* AGC instance parameters */
+    AGC_MIX_VOL_2St1Mon_FLOAT_t AGCInstance; /* AGC instance parameters */
 
     /* Process variables */
-    Biquad_2I_Order2_FLOAT_Taps_t     HPFTaps;            /* High pass filter taps */
-    Biquad_1I_Order2_FLOAT_Taps_t     BPFTaps;            /* Band pass filter taps */
-    LVMixer3_1St_FLOAT_st             BypassVolume;       /* Bypass volume scaler */
-    LVMixer3_2St_FLOAT_st             BypassMixer;        /* Bypass Mixer for Click Removal */
+    Biquad_2I_Order2_FLOAT_Taps_t HPFTaps; /* High pass filter taps */
+    Biquad_1I_Order2_FLOAT_Taps_t BPFTaps; /* Band pass filter taps */
+    LVMixer3_1St_FLOAT_st BypassVolume;    /* Bypass volume scaler */
+    LVMixer3_2St_FLOAT_st BypassMixer;     /* Bypass Mixer for Click Removal */
 
 } LVDBE_Data_FLOAT_t;
 
 /* Coefs structure */
-typedef struct
-{
+typedef struct {
     /* Process variables */
-    Biquad_FLOAT_Instance_t           HPFInstance;        /* High pass filter instance */
-    Biquad_FLOAT_Instance_t           BPFInstance;        /* Band pass filter instance */
+    Biquad_FLOAT_Instance_t HPFInstance; /* High pass filter instance */
+    Biquad_FLOAT_Instance_t BPFInstance; /* Band pass filter instance */
 } LVDBE_Coef_FLOAT_t;
 /* Instance structure */
-typedef struct
-{
+typedef struct {
     /* Public parameters */
-    LVDBE_MemTab_t                MemoryTable;        /* Instance memory allocation table */
-    LVDBE_Params_t                Params;             /* Instance parameters */
-    LVDBE_Capabilities_t        Capabilities;         /* Instance capabilities */
+    LVDBE_Params_t Params;             /* Instance parameters */
+    LVDBE_Capabilities_t Capabilities; /* Instance capabilities */
 
     /* Data and coefficient pointers */
-    LVDBE_Data_FLOAT_t                *pData;                /* Instance data */
-    LVDBE_Coef_FLOAT_t                *pCoef;                /* Instance coefficients */
+    LVDBE_Data_FLOAT_t* pData; /* Instance data */
+    LVDBE_Coef_FLOAT_t* pCoef; /* Instance coefficients */
+    void* pScratch;            /* scratch pointer */
 } LVDBE_Instance_t;
 
 /****************************************************************************************/
@@ -115,13 +94,10 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-void    LVDBE_SetAGC(LVDBE_Instance_t       *pInstance,
-                     LVDBE_Params_t         *pParams);
+void LVDBE_SetAGC(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams);
 
-void    LVDBE_SetVolume(LVDBE_Instance_t    *pInstance,
-                        LVDBE_Params_t      *pParams);
+void LVDBE_SetVolume(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams);
 
-void    LVDBE_SetFilters(LVDBE_Instance_t   *pInstance,
-                         LVDBE_Params_t     *pParams);
+void LVDBE_SetFilters(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams);
 
-#endif      /* __LVDBE_PRIVATE_H__ */
+#endif /* __LVDBE_PRIVATE_H__ */
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Process.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Process.cpp
index b4a71c7..f4a4d6f 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Process.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Process.cpp
@@ -21,12 +21,12 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-#include <string.h> // memset
+#include <string.h>  // memset
 #include "LVDBE.h"
 #include "LVDBE_Private.h"
 #include "VectorArithmetic.h"
 #include "AGC.h"
-#include "LVDBE_Coeffs.h"               /* Filter coefficients */
+#include "LVDBE_Coeffs.h" /* Filter coefficients */
 #include <log/log.h>
 
 /********************************************************************************************/
@@ -73,179 +73,122 @@
 /*     overall end to end gain is odB.                                                      */
 /*                                                                                          */
 /********************************************************************************************/
-LVDBE_ReturnStatus_en LVDBE_Process(LVDBE_Handle_t hInstance,
-    const LVM_FLOAT *pInData,
-    LVM_FLOAT *pOutData,
-    const LVM_UINT16 NrFrames) // updated to use samples = frames * channels.
+LVDBE_ReturnStatus_en LVDBE_Process(
+        LVDBE_Handle_t hInstance, const LVM_FLOAT* pInData, LVM_FLOAT* pOutData,
+        const LVM_UINT16 NrFrames)  // updated to use samples = frames * channels.
 {
-  LVDBE_Instance_t *pInstance =(LVDBE_Instance_t *)hInstance;
+    LVDBE_Instance_t* pInstance = (LVDBE_Instance_t*)hInstance;
 
-  /*Extract number of Channels info*/
-#ifdef SUPPORT_MC
-  // Mono passed in as stereo
-  const LVM_INT32 NrChannels = pInstance->Params.NrChannels == 1
-      ? 2 : pInstance->Params.NrChannels;
-#else
-  const LVM_INT32 NrChannels = 2; // FCC_2
-#endif
-  const LVM_INT32 NrSamples = NrChannels * NrFrames;
+    /*Extract number of Channels info*/
+    // Mono passed in as stereo
+    const LVM_INT32 NrChannels =
+            pInstance->Params.NrChannels == 1 ? 2 : pInstance->Params.NrChannels;
+    const LVM_INT32 NrSamples = NrChannels * NrFrames;
 
-  /* Space to store DBE path computation */
-  LVM_FLOAT * const pScratch =
-          (LVM_FLOAT *)pInstance->MemoryTable.Region[LVDBE_MEMREGION_SCRATCH].pBaseAddress;
-
-  /*
-   * Scratch for Mono path starts at offset of
-   * NrSamples float values from pScratch.
-   */
-  LVM_FLOAT * const pMono = pScratch + NrSamples;
-
-  /*
-   * TRICKY: pMono is used and discarded by the DBE path.
-   *         so it is available for use for the pScratchVol
-   *         path which is computed afterwards.
-   *
-   * Space to store Volume Control path computation.
-   * This is identical to pMono (see TRICKY comment).
-   */
-  LVM_FLOAT * const pScratchVol = pMono;
-
-  /*
-   * Check the number of frames is not too large
-   */
-  if (NrFrames > pInstance->Capabilities.MaxBlockSize)
-  {
-    return LVDBE_TOOMANYSAMPLES;
-  }
-
-  /*
-   * Check if the algorithm is enabled
-   */
-  /* DBE path is processed when DBE is ON or during On/Off transitions */
-  if ((pInstance->Params.OperatingMode == LVDBE_ON)||
-      (LVC_Mixer_GetCurrent(&pInstance->pData->BypassMixer.MixerStream[0])
-          !=LVC_Mixer_GetTarget(&pInstance->pData->BypassMixer.MixerStream[0])))
-  {
-    // make copy of input data
-    Copy_Float(pInData,
-        pScratch,
-        (LVM_INT16)NrSamples);
+    /* Space to store DBE path computation */
+    LVM_FLOAT* const pScratch = (LVM_FLOAT*)pInstance->pScratch;
 
     /*
-     * Apply the high pass filter if selected
+     * Scratch for Mono path starts at offset of
+     * NrSamples float values from pScratch.
      */
-    if (pInstance->Params.HPFSelect == LVDBE_HPF_ON)
-    {
-#ifdef SUPPORT_MC
-      BQ_MC_D32F32C30_TRC_WRA_01(&pInstance->pCoef->HPFInstance, /* Filter instance      */
-          pScratch, /* Source               */
-          pScratch, /* Destination          */
-          (LVM_INT16)NrFrames,
-          (LVM_INT16)NrChannels);
-#else
-      BQ_2I_D32F32C30_TRC_WRA_01(&pInstance->pCoef->HPFInstance,/* Filter instance      */
-          pScratch, /* Source               */
-          pScratch, /* Destination          */
-          (LVM_INT16)NrFrames);
-#endif
+    LVM_FLOAT* const pMono = pScratch + NrSamples;
+
+    /*
+     * TRICKY: pMono is used and discarded by the DBE path.
+     *         so it is available for use for the pScratchVol
+     *         path which is computed afterwards.
+     *
+     * Space to store Volume Control path computation.
+     * This is identical to pMono (see TRICKY comment).
+     */
+    LVM_FLOAT* const pScratchVol = pMono;
+
+    /*
+     * Check the number of frames is not too large
+     */
+    if (NrFrames > pInstance->Capabilities.MaxBlockSize) {
+        return LVDBE_TOOMANYSAMPLES;
     }
 
     /*
-     * Create the mono stream
+     * Check if the algorithm is enabled
      */
-#ifdef SUPPORT_MC
-    FromMcToMono_Float(pScratch, /* Source */
-        pMono, /* Mono destination */
-        (LVM_INT16)NrFrames,  /* Number of frames */
-        (LVM_INT16)NrChannels);
-#else
-    From2iToMono_Float(pScratch, /* Stereo source         */
-        pMono, /* Mono destination      */
-        (LVM_INT16)NrFrames);
-#endif
+    /* DBE path is processed when DBE is ON or during On/Off transitions */
+    if ((pInstance->Params.OperatingMode == LVDBE_ON) ||
+        (LVC_Mixer_GetCurrent(&pInstance->pData->BypassMixer.MixerStream[0]) !=
+         LVC_Mixer_GetTarget(&pInstance->pData->BypassMixer.MixerStream[0]))) {
+        // make copy of input data
+        Copy_Float(pInData, pScratch, (LVM_INT16)NrSamples);
 
-    /*
-     * Apply the band pass filter
-     */
-    BP_1I_D32F32C30_TRC_WRA_02(&pInstance->pCoef->BPFInstance, /* Filter instance       */
-        pMono, /* Source                */
-        pMono, /* Destination           */
-        (LVM_INT16)NrFrames);
+        /*
+         * Apply the high pass filter if selected
+         */
+        if (pInstance->Params.HPFSelect == LVDBE_HPF_ON) {
+            BQ_MC_D32F32C30_TRC_WRA_01(&pInstance->pCoef->HPFInstance, /* Filter instance      */
+                                       pScratch,                       /* Source               */
+                                       pScratch,                       /* Destination          */
+                                       (LVM_INT16)NrFrames, (LVM_INT16)NrChannels);
+        }
 
-    /*
-     * Apply the AGC and mix
-     */
-#ifdef SUPPORT_MC
-    AGC_MIX_VOL_Mc1Mon_D32_WRA(&pInstance->pData->AGCInstance, /* Instance pointer      */
-        pScratch, /* Source         */
-        pMono, /* Mono band pass source */
-        pScratch, /* Destination    */
-        NrFrames, /* Number of frames     */
-        NrChannels); /* Number of channels     */
-#else
-    AGC_MIX_VOL_2St1Mon_D32_WRA(&pInstance->pData->AGCInstance, /* Instance pointer      */
-        pScratch, /* Stereo source         */
-        pMono, /* Mono band pass source */
-        pScratch, /* Stereo destination    */
-        NrFrames);
-#endif
+        /*
+         * Create the mono stream
+         */
+        FromMcToMono_Float(pScratch,            /* Source */
+                           pMono,               /* Mono destination */
+                           (LVM_INT16)NrFrames, /* Number of frames */
+                           (LVM_INT16)NrChannels);
 
-    for (LVM_INT32 ii = 0; ii < NrSamples; ++ii) {
-      //TODO: replace with existing clamping function
-      if (pScratch[ii] < -1.0) {
-        pScratch[ii] = -1.0;
-      } else if (pScratch[ii] > 1.0) {
-        pScratch[ii] = 1.0;
-      }
+        /*
+         * Apply the band pass filter
+         */
+        BP_1I_D32F32C30_TRC_WRA_02(&pInstance->pCoef->BPFInstance, /* Filter instance       */
+                                   pMono,                          /* Source                */
+                                   pMono,                          /* Destination           */
+                                   (LVM_INT16)NrFrames);
+
+        /*
+         * Apply the AGC and mix
+         */
+        AGC_MIX_VOL_Mc1Mon_D32_WRA(&pInstance->pData->AGCInstance, /* Instance pointer      */
+                                   pScratch,                       /* Source         */
+                                   pMono,                          /* Mono band pass source */
+                                   pScratch,                       /* Destination    */
+                                   NrFrames,                       /* Number of frames     */
+                                   NrChannels);                    /* Number of channels     */
+
+        for (LVM_INT32 ii = 0; ii < NrSamples; ++ii) {
+            // TODO: replace with existing clamping function
+            if (pScratch[ii] < -1.0) {
+                pScratch[ii] = -1.0;
+            } else if (pScratch[ii] > 1.0) {
+                pScratch[ii] = 1.0;
+            }
+        }
+    } else {
+        // clear DBE processed path
+        memset(pScratch, 0, sizeof(*pScratch) * NrSamples);
     }
-  } else {
-    // clear DBE processed path
-    memset(pScratch, 0, sizeof(*pScratch) * NrSamples);
-  }
 
-  /* Bypass Volume path is processed when DBE is OFF or during On/Off transitions */
-  if ((pInstance->Params.OperatingMode == LVDBE_OFF)||
-      (LVC_Mixer_GetCurrent(&pInstance->pData->BypassMixer.MixerStream[1])
-          !=LVC_Mixer_GetTarget(&pInstance->pData->BypassMixer.MixerStream[1])))
-  {
+    /* Bypass Volume path is processed when DBE is OFF or during On/Off transitions */
+    if ((pInstance->Params.OperatingMode == LVDBE_OFF) ||
+        (LVC_Mixer_GetCurrent(&pInstance->pData->BypassMixer.MixerStream[1]) !=
+         LVC_Mixer_GetTarget(&pInstance->pData->BypassMixer.MixerStream[1]))) {
+        /*
+         * The algorithm is disabled but volume management is required to compensate for
+         * headroom and volume (if enabled)
+         */
+        LVC_MixSoft_Mc_D16C31_SAT(&pInstance->pData->BypassVolume, pInData, pScratchVol,
+                                  (LVM_INT16)NrFrames, (LVM_INT16)NrChannels);
+    } else {
+        // clear bypass volume path
+        memset(pScratchVol, 0, sizeof(*pScratchVol) * NrSamples);
+    }
 
     /*
-     * The algorithm is disabled but volume management is required to compensate for
-     * headroom and volume (if enabled)
+     * Mix DBE processed path and bypass volume path
      */
-#ifdef SUPPORT_MC
-    LVC_MixSoft_Mc_D16C31_SAT(&pInstance->pData->BypassVolume,
-        pInData,
-        pScratchVol,
-        (LVM_INT16)NrFrames,
-        (LVM_INT16)NrChannels);
-#else
-    LVC_MixSoft_1St_D16C31_SAT(&pInstance->pData->BypassVolume,
-        pInData,
-        pScratchVol,
-        (LVM_INT16)NrSamples); /* Left and right, really # samples */
-#endif
-  } else {
-    // clear bypass volume path
-    memset(pScratchVol, 0, sizeof(*pScratchVol) * NrSamples);
-  }
-
-  /*
-   * Mix DBE processed path and bypass volume path
-   */
-#ifdef SUPPORT_MC
-  LVC_MixSoft_2Mc_D16C31_SAT(&pInstance->pData->BypassMixer,
-      pScratch,
-      pScratchVol,
-      pOutData,
-      (LVM_INT16)NrFrames,
-      (LVM_INT16)NrChannels);
-#else
-  LVC_MixSoft_2St_D16C31_SAT(&pInstance->pData->BypassMixer,
-      pScratch,
-      pScratchVol,
-      pOutData,
-      (LVM_INT16)NrSamples);
-#endif
-  return LVDBE_SUCCESS;
+    LVC_MixSoft_2Mc_D16C31_SAT(&pInstance->pData->BypassMixer, pScratch, pScratchVol, pOutData,
+                               (LVM_INT16)NrFrames, (LVM_INT16)NrChannels);
+    return LVDBE_SUCCESS;
 }
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Tables.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Tables.cpp
index 728575c..1b95812 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Tables.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Tables.cpp
@@ -22,7 +22,7 @@
 /************************************************************************************/
 
 #include "LVDBE.h"
-#include "LVDBE_Coeffs.h"               /* Filter coefficients */
+#include "LVDBE_Coeffs.h" /* Filter coefficients */
 #include "LVDBE_Tables.h"
 #include "BIQUAD.h"
 
@@ -36,275 +36,119 @@
  * High Pass Filter Coefficient table
  */
 const BQ_FLOAT_Coefs_t LVDBE_HPF_Table[] = {
-    /* Coefficients for 55Hz centre frequency */
-    {HPF_Fs8000_Fc55_A2,                /* 8kS/s coefficients */
-     HPF_Fs8000_Fc55_A1,
-     HPF_Fs8000_Fc55_A0,
-     -HPF_Fs8000_Fc55_B2,
-     -HPF_Fs8000_Fc55_B1},
-    {HPF_Fs11025_Fc55_A2,                /* 11kS/s coefficients */
-     HPF_Fs11025_Fc55_A1,
-     HPF_Fs11025_Fc55_A0,
-     -HPF_Fs11025_Fc55_B2,
-     -HPF_Fs11025_Fc55_B1},
-    {HPF_Fs12000_Fc55_A2,                /* 12kS/s coefficients */
-     HPF_Fs12000_Fc55_A1,
-     HPF_Fs12000_Fc55_A0,
-     -HPF_Fs12000_Fc55_B2,
-     -HPF_Fs12000_Fc55_B1},
-    {HPF_Fs16000_Fc55_A2,                /* 16kS/s coefficients */
-     HPF_Fs16000_Fc55_A1,
-     HPF_Fs16000_Fc55_A0,
-     -HPF_Fs16000_Fc55_B2,
-     -HPF_Fs16000_Fc55_B1},
-    {HPF_Fs22050_Fc55_A2,                /* 22kS/s coefficients */
-     HPF_Fs22050_Fc55_A1,
-     HPF_Fs22050_Fc55_A0,
-     -HPF_Fs22050_Fc55_B2,
-     -HPF_Fs22050_Fc55_B1},
-    {HPF_Fs24000_Fc55_A2,                /* 24kS/s coefficients */
-     HPF_Fs24000_Fc55_A1,
-     HPF_Fs24000_Fc55_A0,
-     -HPF_Fs24000_Fc55_B2,
-     -HPF_Fs24000_Fc55_B1},
-    {HPF_Fs32000_Fc55_A2,                /* 32kS/s coefficients */
-     HPF_Fs32000_Fc55_A1,
-     HPF_Fs32000_Fc55_A0,
-     -HPF_Fs32000_Fc55_B2,
-     -HPF_Fs32000_Fc55_B1},
-    {HPF_Fs44100_Fc55_A2,                /* 44kS/s coefficients */
-     HPF_Fs44100_Fc55_A1,
-     HPF_Fs44100_Fc55_A0,
-     -HPF_Fs44100_Fc55_B2,
-     -HPF_Fs44100_Fc55_B1},
-    {HPF_Fs48000_Fc55_A2,                /* 48kS/s coefficients */
-     HPF_Fs48000_Fc55_A1,
-     HPF_Fs48000_Fc55_A0,
-     -HPF_Fs48000_Fc55_B2,
-     -HPF_Fs48000_Fc55_B1},
-    {HPF_Fs88200_Fc55_A2,                /* 88kS/s coefficients */
-     HPF_Fs88200_Fc55_A1,
-     HPF_Fs88200_Fc55_A0,
-     -HPF_Fs88200_Fc55_B2,
-     -HPF_Fs88200_Fc55_B1},
-    {HPF_Fs96000_Fc55_A2,                /* 96kS/s coefficients */
-     HPF_Fs96000_Fc55_A1,
-     HPF_Fs96000_Fc55_A0,
-     -HPF_Fs96000_Fc55_B2,
-     -HPF_Fs96000_Fc55_B1},
-    {HPF_Fs176400_Fc55_A2,                /* 176kS/s coefficients */
-     HPF_Fs176400_Fc55_A1,
-     HPF_Fs176400_Fc55_A0,
-     -HPF_Fs176400_Fc55_B2,
-     -HPF_Fs176400_Fc55_B1},
-    {HPF_Fs192000_Fc55_A2,                /* 192kS/s coefficients */
-     HPF_Fs192000_Fc55_A1,
-     HPF_Fs192000_Fc55_A0,
-     -HPF_Fs192000_Fc55_B2,
-     -HPF_Fs192000_Fc55_B1},
+        /* Coefficients for 55Hz centre frequency */
+        {HPF_Fs8000_Fc55_A2, /* 8kS/s coefficients */
+         HPF_Fs8000_Fc55_A1, HPF_Fs8000_Fc55_A0, -HPF_Fs8000_Fc55_B2, -HPF_Fs8000_Fc55_B1},
+        {HPF_Fs11025_Fc55_A2, /* 11kS/s coefficients */
+         HPF_Fs11025_Fc55_A1, HPF_Fs11025_Fc55_A0, -HPF_Fs11025_Fc55_B2, -HPF_Fs11025_Fc55_B1},
+        {HPF_Fs12000_Fc55_A2, /* 12kS/s coefficients */
+         HPF_Fs12000_Fc55_A1, HPF_Fs12000_Fc55_A0, -HPF_Fs12000_Fc55_B2, -HPF_Fs12000_Fc55_B1},
+        {HPF_Fs16000_Fc55_A2, /* 16kS/s coefficients */
+         HPF_Fs16000_Fc55_A1, HPF_Fs16000_Fc55_A0, -HPF_Fs16000_Fc55_B2, -HPF_Fs16000_Fc55_B1},
+        {HPF_Fs22050_Fc55_A2, /* 22kS/s coefficients */
+         HPF_Fs22050_Fc55_A1, HPF_Fs22050_Fc55_A0, -HPF_Fs22050_Fc55_B2, -HPF_Fs22050_Fc55_B1},
+        {HPF_Fs24000_Fc55_A2, /* 24kS/s coefficients */
+         HPF_Fs24000_Fc55_A1, HPF_Fs24000_Fc55_A0, -HPF_Fs24000_Fc55_B2, -HPF_Fs24000_Fc55_B1},
+        {HPF_Fs32000_Fc55_A2, /* 32kS/s coefficients */
+         HPF_Fs32000_Fc55_A1, HPF_Fs32000_Fc55_A0, -HPF_Fs32000_Fc55_B2, -HPF_Fs32000_Fc55_B1},
+        {HPF_Fs44100_Fc55_A2, /* 44kS/s coefficients */
+         HPF_Fs44100_Fc55_A1, HPF_Fs44100_Fc55_A0, -HPF_Fs44100_Fc55_B2, -HPF_Fs44100_Fc55_B1},
+        {HPF_Fs48000_Fc55_A2, /* 48kS/s coefficients */
+         HPF_Fs48000_Fc55_A1, HPF_Fs48000_Fc55_A0, -HPF_Fs48000_Fc55_B2, -HPF_Fs48000_Fc55_B1},
+        {HPF_Fs88200_Fc55_A2, /* 88kS/s coefficients */
+         HPF_Fs88200_Fc55_A1, HPF_Fs88200_Fc55_A0, -HPF_Fs88200_Fc55_B2, -HPF_Fs88200_Fc55_B1},
+        {HPF_Fs96000_Fc55_A2, /* 96kS/s coefficients */
+         HPF_Fs96000_Fc55_A1, HPF_Fs96000_Fc55_A0, -HPF_Fs96000_Fc55_B2, -HPF_Fs96000_Fc55_B1},
+        {HPF_Fs176400_Fc55_A2, /* 176kS/s coefficients */
+         HPF_Fs176400_Fc55_A1, HPF_Fs176400_Fc55_A0, -HPF_Fs176400_Fc55_B2, -HPF_Fs176400_Fc55_B1},
+        {HPF_Fs192000_Fc55_A2, /* 192kS/s coefficients */
+         HPF_Fs192000_Fc55_A1, HPF_Fs192000_Fc55_A0, -HPF_Fs192000_Fc55_B2, -HPF_Fs192000_Fc55_B1},
 
-    /* Coefficients for 66Hz centre frequency */
-    {HPF_Fs8000_Fc66_A2,                /* 8kS/s coefficients */
-     HPF_Fs8000_Fc66_A1,
-     HPF_Fs8000_Fc66_A0,
-     -HPF_Fs8000_Fc66_B2,
-     -HPF_Fs8000_Fc66_B1},
-    {HPF_Fs11025_Fc66_A2,                /* 11kS/s coefficients */
-     HPF_Fs11025_Fc66_A1,
-     HPF_Fs11025_Fc66_A0,
-     -HPF_Fs11025_Fc66_B2,
-     -HPF_Fs11025_Fc66_B1},
-    {HPF_Fs12000_Fc66_A2,                /* 12kS/s coefficients */
-     HPF_Fs12000_Fc66_A1,
-     HPF_Fs12000_Fc66_A0,
-     -HPF_Fs12000_Fc66_B2,
-     -HPF_Fs12000_Fc66_B1},
-    {HPF_Fs16000_Fc66_A2,                /* 16kS/s coefficients */
-     HPF_Fs16000_Fc66_A1,
-     HPF_Fs16000_Fc66_A0,
-     -HPF_Fs16000_Fc66_B2,
-     -HPF_Fs16000_Fc66_B1},
-    {HPF_Fs22050_Fc66_A2,                /* 22kS/s coefficients */
-     HPF_Fs22050_Fc66_A1,
-     HPF_Fs22050_Fc66_A0,
-     -HPF_Fs22050_Fc66_B2,
-     -HPF_Fs22050_Fc66_B1},
-    {HPF_Fs24000_Fc66_A2,                /* 24kS/s coefficients */
-     HPF_Fs24000_Fc66_A1,
-     HPF_Fs24000_Fc66_A0,
-     -HPF_Fs24000_Fc66_B2,
-     -HPF_Fs24000_Fc66_B1},
-    {HPF_Fs32000_Fc66_A2,                /* 32kS/s coefficients */
-     HPF_Fs32000_Fc66_A1,
-     HPF_Fs32000_Fc66_A0,
-     -HPF_Fs32000_Fc66_B2,
-     -HPF_Fs32000_Fc66_B1},
-    {HPF_Fs44100_Fc66_A2,                /* 44kS/s coefficients */
-     HPF_Fs44100_Fc66_A1,
-     HPF_Fs44100_Fc66_A0,
-     -HPF_Fs44100_Fc66_B2,
-     -HPF_Fs44100_Fc66_B1},
-    {HPF_Fs48000_Fc66_A2,                /* 48kS/s coefficients */
-     HPF_Fs48000_Fc66_A1,
-     HPF_Fs48000_Fc66_A0,
-     -HPF_Fs48000_Fc66_B2,
-     -HPF_Fs48000_Fc66_B1},
-    {HPF_Fs88200_Fc66_A2,                /* 88kS/s coefficients */
-     HPF_Fs88200_Fc66_A1,
-     HPF_Fs88200_Fc66_A0,
-     -HPF_Fs88200_Fc66_B2,
-     -HPF_Fs88200_Fc66_B1},
-    {HPF_Fs96000_Fc66_A2,                /* 96kS/s coefficients */
-     HPF_Fs96000_Fc66_A1,
-     HPF_Fs96000_Fc66_A0,
-     -HPF_Fs96000_Fc66_B2,
-     -HPF_Fs96000_Fc66_B1},
-    {HPF_Fs176400_Fc66_A2,                /* 176kS/s coefficients */
-     HPF_Fs176400_Fc66_A1,
-     HPF_Fs176400_Fc66_A0,
-     -HPF_Fs176400_Fc66_B2,
-     -HPF_Fs176400_Fc66_B1},
-    {HPF_Fs192000_Fc66_A2,                /* 192kS/s coefficients */
-     HPF_Fs192000_Fc66_A1,
-     HPF_Fs192000_Fc66_A0,
-     -HPF_Fs192000_Fc66_B2,
-     -HPF_Fs192000_Fc66_B1},
+        /* Coefficients for 66Hz centre frequency */
+        {HPF_Fs8000_Fc66_A2, /* 8kS/s coefficients */
+         HPF_Fs8000_Fc66_A1, HPF_Fs8000_Fc66_A0, -HPF_Fs8000_Fc66_B2, -HPF_Fs8000_Fc66_B1},
+        {HPF_Fs11025_Fc66_A2, /* 11kS/s coefficients */
+         HPF_Fs11025_Fc66_A1, HPF_Fs11025_Fc66_A0, -HPF_Fs11025_Fc66_B2, -HPF_Fs11025_Fc66_B1},
+        {HPF_Fs12000_Fc66_A2, /* 12kS/s coefficients */
+         HPF_Fs12000_Fc66_A1, HPF_Fs12000_Fc66_A0, -HPF_Fs12000_Fc66_B2, -HPF_Fs12000_Fc66_B1},
+        {HPF_Fs16000_Fc66_A2, /* 16kS/s coefficients */
+         HPF_Fs16000_Fc66_A1, HPF_Fs16000_Fc66_A0, -HPF_Fs16000_Fc66_B2, -HPF_Fs16000_Fc66_B1},
+        {HPF_Fs22050_Fc66_A2, /* 22kS/s coefficients */
+         HPF_Fs22050_Fc66_A1, HPF_Fs22050_Fc66_A0, -HPF_Fs22050_Fc66_B2, -HPF_Fs22050_Fc66_B1},
+        {HPF_Fs24000_Fc66_A2, /* 24kS/s coefficients */
+         HPF_Fs24000_Fc66_A1, HPF_Fs24000_Fc66_A0, -HPF_Fs24000_Fc66_B2, -HPF_Fs24000_Fc66_B1},
+        {HPF_Fs32000_Fc66_A2, /* 32kS/s coefficients */
+         HPF_Fs32000_Fc66_A1, HPF_Fs32000_Fc66_A0, -HPF_Fs32000_Fc66_B2, -HPF_Fs32000_Fc66_B1},
+        {HPF_Fs44100_Fc66_A2, /* 44kS/s coefficients */
+         HPF_Fs44100_Fc66_A1, HPF_Fs44100_Fc66_A0, -HPF_Fs44100_Fc66_B2, -HPF_Fs44100_Fc66_B1},
+        {HPF_Fs48000_Fc66_A2, /* 48kS/s coefficients */
+         HPF_Fs48000_Fc66_A1, HPF_Fs48000_Fc66_A0, -HPF_Fs48000_Fc66_B2, -HPF_Fs48000_Fc66_B1},
+        {HPF_Fs88200_Fc66_A2, /* 88kS/s coefficients */
+         HPF_Fs88200_Fc66_A1, HPF_Fs88200_Fc66_A0, -HPF_Fs88200_Fc66_B2, -HPF_Fs88200_Fc66_B1},
+        {HPF_Fs96000_Fc66_A2, /* 96kS/s coefficients */
+         HPF_Fs96000_Fc66_A1, HPF_Fs96000_Fc66_A0, -HPF_Fs96000_Fc66_B2, -HPF_Fs96000_Fc66_B1},
+        {HPF_Fs176400_Fc66_A2, /* 176kS/s coefficients */
+         HPF_Fs176400_Fc66_A1, HPF_Fs176400_Fc66_A0, -HPF_Fs176400_Fc66_B2, -HPF_Fs176400_Fc66_B1},
+        {HPF_Fs192000_Fc66_A2, /* 192kS/s coefficients */
+         HPF_Fs192000_Fc66_A1, HPF_Fs192000_Fc66_A0, -HPF_Fs192000_Fc66_B2, -HPF_Fs192000_Fc66_B1},
 
-    /* Coefficients for 78Hz centre frequency */
-    {HPF_Fs8000_Fc78_A2,                /* 8kS/s coefficients */
-     HPF_Fs8000_Fc78_A1,
-     HPF_Fs8000_Fc78_A0,
-     -HPF_Fs8000_Fc78_B2,
-     -HPF_Fs8000_Fc78_B1},
-    {HPF_Fs11025_Fc78_A2,                /* 11kS/s coefficients */
-     HPF_Fs11025_Fc78_A1,
-     HPF_Fs11025_Fc78_A0,
-     -HPF_Fs11025_Fc78_B2,
-     -HPF_Fs11025_Fc78_B1},
-    {HPF_Fs12000_Fc78_A2,                /* 12kS/s coefficients */
-     HPF_Fs12000_Fc78_A1,
-     HPF_Fs12000_Fc78_A0,
-     -HPF_Fs12000_Fc78_B2,
-     -HPF_Fs12000_Fc78_B1},
-    {HPF_Fs16000_Fc78_A2,                /* 16kS/s coefficients */
-     HPF_Fs16000_Fc78_A1,
-     HPF_Fs16000_Fc78_A0,
-     -HPF_Fs16000_Fc78_B2,
-     -HPF_Fs16000_Fc78_B1},
-    {HPF_Fs22050_Fc78_A2,                /* 22kS/s coefficients */
-     HPF_Fs22050_Fc78_A1,
-     HPF_Fs22050_Fc78_A0,
-     -HPF_Fs22050_Fc78_B2,
-     -HPF_Fs22050_Fc78_B1},
-    {HPF_Fs24000_Fc78_A2,                /* 24kS/s coefficients */
-     HPF_Fs24000_Fc78_A1,
-     HPF_Fs24000_Fc78_A0,
-     -HPF_Fs24000_Fc78_B2,
-     -HPF_Fs24000_Fc78_B1},
-    {HPF_Fs32000_Fc78_A2,                /* 32kS/s coefficients */
-     HPF_Fs32000_Fc78_A1,
-     HPF_Fs32000_Fc78_A0,
-     -HPF_Fs32000_Fc78_B2,
-     -HPF_Fs32000_Fc78_B1},
-    {HPF_Fs44100_Fc78_A2,                /* 44kS/s coefficients */
-     HPF_Fs44100_Fc78_A1,
-     HPF_Fs44100_Fc78_A0,
-     -HPF_Fs44100_Fc78_B2,
-     -HPF_Fs44100_Fc78_B1},
-    {HPF_Fs48000_Fc78_A2,                /* 48kS/s coefficients */
-     HPF_Fs48000_Fc78_A1,
-     HPF_Fs48000_Fc78_A0,
-     -HPF_Fs48000_Fc78_B2,
-     -HPF_Fs48000_Fc78_B1},
-    {HPF_Fs88200_Fc78_A2,                /* 88kS/s coefficients */
-     HPF_Fs88200_Fc78_A1,
-     HPF_Fs88200_Fc78_A0,
-     -HPF_Fs88200_Fc78_B2,
-     -HPF_Fs88200_Fc78_B1},
-    {HPF_Fs96000_Fc78_A2,                /* 96kS/s coefficients */
-     HPF_Fs96000_Fc78_A1,
-     HPF_Fs96000_Fc78_A0,
-     -HPF_Fs96000_Fc78_B2,
-     -HPF_Fs96000_Fc78_B1},
-    {HPF_Fs176400_Fc78_A2,                /* 176kS/s coefficients */
-     HPF_Fs176400_Fc78_A1,
-     HPF_Fs176400_Fc78_A0,
-     -HPF_Fs176400_Fc78_B2,
-     -HPF_Fs176400_Fc78_B1},
-    {HPF_Fs192000_Fc78_A2,                /* 192kS/s coefficients */
-     HPF_Fs192000_Fc78_A1,
-     HPF_Fs192000_Fc78_A0,
-     -HPF_Fs192000_Fc78_B2,
-     -HPF_Fs192000_Fc78_B1},
+        /* Coefficients for 78Hz centre frequency */
+        {HPF_Fs8000_Fc78_A2, /* 8kS/s coefficients */
+         HPF_Fs8000_Fc78_A1, HPF_Fs8000_Fc78_A0, -HPF_Fs8000_Fc78_B2, -HPF_Fs8000_Fc78_B1},
+        {HPF_Fs11025_Fc78_A2, /* 11kS/s coefficients */
+         HPF_Fs11025_Fc78_A1, HPF_Fs11025_Fc78_A0, -HPF_Fs11025_Fc78_B2, -HPF_Fs11025_Fc78_B1},
+        {HPF_Fs12000_Fc78_A2, /* 12kS/s coefficients */
+         HPF_Fs12000_Fc78_A1, HPF_Fs12000_Fc78_A0, -HPF_Fs12000_Fc78_B2, -HPF_Fs12000_Fc78_B1},
+        {HPF_Fs16000_Fc78_A2, /* 16kS/s coefficients */
+         HPF_Fs16000_Fc78_A1, HPF_Fs16000_Fc78_A0, -HPF_Fs16000_Fc78_B2, -HPF_Fs16000_Fc78_B1},
+        {HPF_Fs22050_Fc78_A2, /* 22kS/s coefficients */
+         HPF_Fs22050_Fc78_A1, HPF_Fs22050_Fc78_A0, -HPF_Fs22050_Fc78_B2, -HPF_Fs22050_Fc78_B1},
+        {HPF_Fs24000_Fc78_A2, /* 24kS/s coefficients */
+         HPF_Fs24000_Fc78_A1, HPF_Fs24000_Fc78_A0, -HPF_Fs24000_Fc78_B2, -HPF_Fs24000_Fc78_B1},
+        {HPF_Fs32000_Fc78_A2, /* 32kS/s coefficients */
+         HPF_Fs32000_Fc78_A1, HPF_Fs32000_Fc78_A0, -HPF_Fs32000_Fc78_B2, -HPF_Fs32000_Fc78_B1},
+        {HPF_Fs44100_Fc78_A2, /* 44kS/s coefficients */
+         HPF_Fs44100_Fc78_A1, HPF_Fs44100_Fc78_A0, -HPF_Fs44100_Fc78_B2, -HPF_Fs44100_Fc78_B1},
+        {HPF_Fs48000_Fc78_A2, /* 48kS/s coefficients */
+         HPF_Fs48000_Fc78_A1, HPF_Fs48000_Fc78_A0, -HPF_Fs48000_Fc78_B2, -HPF_Fs48000_Fc78_B1},
+        {HPF_Fs88200_Fc78_A2, /* 88kS/s coefficients */
+         HPF_Fs88200_Fc78_A1, HPF_Fs88200_Fc78_A0, -HPF_Fs88200_Fc78_B2, -HPF_Fs88200_Fc78_B1},
+        {HPF_Fs96000_Fc78_A2, /* 96kS/s coefficients */
+         HPF_Fs96000_Fc78_A1, HPF_Fs96000_Fc78_A0, -HPF_Fs96000_Fc78_B2, -HPF_Fs96000_Fc78_B1},
+        {HPF_Fs176400_Fc78_A2, /* 176kS/s coefficients */
+         HPF_Fs176400_Fc78_A1, HPF_Fs176400_Fc78_A0, -HPF_Fs176400_Fc78_B2, -HPF_Fs176400_Fc78_B1},
+        {HPF_Fs192000_Fc78_A2, /* 192kS/s coefficients */
+         HPF_Fs192000_Fc78_A1, HPF_Fs192000_Fc78_A0, -HPF_Fs192000_Fc78_B2, -HPF_Fs192000_Fc78_B1},
 
-    /* Coefficients for 90Hz centre frequency */
-    {HPF_Fs8000_Fc90_A2,                /* 8kS/s coefficients */
-     HPF_Fs8000_Fc90_A1,
-     HPF_Fs8000_Fc90_A0,
-     -HPF_Fs8000_Fc90_B2,
-     -HPF_Fs8000_Fc90_B1},
-    {HPF_Fs11025_Fc90_A2,                /* 11kS/s coefficients */
-     HPF_Fs11025_Fc90_A1,
-     HPF_Fs11025_Fc90_A0,
-     -HPF_Fs11025_Fc90_B2,
-     -HPF_Fs11025_Fc90_B1},
-    {HPF_Fs12000_Fc90_A2,                /* 12kS/s coefficients */
-     HPF_Fs12000_Fc90_A1,
-     HPF_Fs12000_Fc90_A0,
-     -HPF_Fs12000_Fc90_B2,
-     -HPF_Fs12000_Fc90_B1},
-    {HPF_Fs16000_Fc90_A2,                /* 16kS/s coefficients */
-     HPF_Fs16000_Fc90_A1,
-     HPF_Fs16000_Fc90_A0,
-     -HPF_Fs16000_Fc90_B2,
-     -HPF_Fs16000_Fc90_B1},
-    {HPF_Fs22050_Fc90_A2,                /* 22kS/s coefficients */
-     HPF_Fs22050_Fc90_A1,
-     HPF_Fs22050_Fc90_A0,
-     -HPF_Fs22050_Fc90_B2,
-     -HPF_Fs22050_Fc90_B1},
-    {HPF_Fs24000_Fc90_A2,                /* 24kS/s coefficients */
-     HPF_Fs24000_Fc90_A1,
-     HPF_Fs24000_Fc90_A0,
-     -HPF_Fs24000_Fc90_B2,
-     -HPF_Fs24000_Fc90_B1},
-    {HPF_Fs32000_Fc90_A2,                /* 32kS/s coefficients */
-     HPF_Fs32000_Fc90_A1,
-     HPF_Fs32000_Fc90_A0,
-     -HPF_Fs32000_Fc90_B2,
-     -HPF_Fs32000_Fc90_B1},
-    {HPF_Fs44100_Fc90_A2,                /* 44kS/s coefficients */
-     HPF_Fs44100_Fc90_A1,
-     HPF_Fs44100_Fc90_A0,
-     -HPF_Fs44100_Fc90_B2,
-     -HPF_Fs44100_Fc90_B1},
-    {HPF_Fs48000_Fc90_A2,                /* 48kS/s coefficients */
-     HPF_Fs48000_Fc90_A1,
-     HPF_Fs48000_Fc90_A0,
-     -HPF_Fs48000_Fc90_B2,
-     -HPF_Fs48000_Fc90_B1}
+        /* Coefficients for 90Hz centre frequency */
+        {HPF_Fs8000_Fc90_A2, /* 8kS/s coefficients */
+         HPF_Fs8000_Fc90_A1, HPF_Fs8000_Fc90_A0, -HPF_Fs8000_Fc90_B2, -HPF_Fs8000_Fc90_B1},
+        {HPF_Fs11025_Fc90_A2, /* 11kS/s coefficients */
+         HPF_Fs11025_Fc90_A1, HPF_Fs11025_Fc90_A0, -HPF_Fs11025_Fc90_B2, -HPF_Fs11025_Fc90_B1},
+        {HPF_Fs12000_Fc90_A2, /* 12kS/s coefficients */
+         HPF_Fs12000_Fc90_A1, HPF_Fs12000_Fc90_A0, -HPF_Fs12000_Fc90_B2, -HPF_Fs12000_Fc90_B1},
+        {HPF_Fs16000_Fc90_A2, /* 16kS/s coefficients */
+         HPF_Fs16000_Fc90_A1, HPF_Fs16000_Fc90_A0, -HPF_Fs16000_Fc90_B2, -HPF_Fs16000_Fc90_B1},
+        {HPF_Fs22050_Fc90_A2, /* 22kS/s coefficients */
+         HPF_Fs22050_Fc90_A1, HPF_Fs22050_Fc90_A0, -HPF_Fs22050_Fc90_B2, -HPF_Fs22050_Fc90_B1},
+        {HPF_Fs24000_Fc90_A2, /* 24kS/s coefficients */
+         HPF_Fs24000_Fc90_A1, HPF_Fs24000_Fc90_A0, -HPF_Fs24000_Fc90_B2, -HPF_Fs24000_Fc90_B1},
+        {HPF_Fs32000_Fc90_A2, /* 32kS/s coefficients */
+         HPF_Fs32000_Fc90_A1, HPF_Fs32000_Fc90_A0, -HPF_Fs32000_Fc90_B2, -HPF_Fs32000_Fc90_B1},
+        {HPF_Fs44100_Fc90_A2, /* 44kS/s coefficients */
+         HPF_Fs44100_Fc90_A1, HPF_Fs44100_Fc90_A0, -HPF_Fs44100_Fc90_B2, -HPF_Fs44100_Fc90_B1},
+        {HPF_Fs48000_Fc90_A2, /* 48kS/s coefficients */
+         HPF_Fs48000_Fc90_A1, HPF_Fs48000_Fc90_A0, -HPF_Fs48000_Fc90_B2, -HPF_Fs48000_Fc90_B1}
 
-    ,
-    {HPF_Fs88200_Fc90_A2,                /* 88kS/s coefficients */
-     HPF_Fs88200_Fc90_A1,
-     HPF_Fs88200_Fc90_A0,
-     -HPF_Fs88200_Fc90_B2,
-     -HPF_Fs88200_Fc90_B1},
-    {HPF_Fs96000_Fc90_A2,                /* 96kS/s coefficients */
-     HPF_Fs96000_Fc90_A1,
-     HPF_Fs96000_Fc90_A0,
-     -HPF_Fs96000_Fc90_B2,
-     -HPF_Fs96000_Fc90_B1},
-    {HPF_Fs176400_Fc90_A2,                /* 176kS/s coefficients */
-     HPF_Fs176400_Fc90_A1,
-     HPF_Fs176400_Fc90_A0,
-     -HPF_Fs176400_Fc90_B2,
-     -HPF_Fs176400_Fc90_B1},
-    {HPF_Fs192000_Fc90_A2,                /* 192kS/s coefficients */
-     HPF_Fs192000_Fc90_A1,
-     HPF_Fs192000_Fc90_A0,
-     -HPF_Fs192000_Fc90_B2,
-     -HPF_Fs192000_Fc90_B1}
+        ,
+        {HPF_Fs88200_Fc90_A2, /* 88kS/s coefficients */
+         HPF_Fs88200_Fc90_A1, HPF_Fs88200_Fc90_A0, -HPF_Fs88200_Fc90_B2, -HPF_Fs88200_Fc90_B1},
+        {HPF_Fs96000_Fc90_A2, /* 96kS/s coefficients */
+         HPF_Fs96000_Fc90_A1, HPF_Fs96000_Fc90_A0, -HPF_Fs96000_Fc90_B2, -HPF_Fs96000_Fc90_B1},
+        {HPF_Fs176400_Fc90_A2, /* 176kS/s coefficients */
+         HPF_Fs176400_Fc90_A1, HPF_Fs176400_Fc90_A0, -HPF_Fs176400_Fc90_B2, -HPF_Fs176400_Fc90_B1},
+        {HPF_Fs192000_Fc90_A2, /* 192kS/s coefficients */
+         HPF_Fs192000_Fc90_A1, HPF_Fs192000_Fc90_A0, -HPF_Fs192000_Fc90_B2, -HPF_Fs192000_Fc90_B1}
 
 };
 
@@ -312,170 +156,117 @@
  * Band Pass Filter coefficient table
  */
 const BP_FLOAT_Coefs_t LVDBE_BPF_Table[] = {
-    /* Coefficients for 55Hz centre frequency */
-    {BPF_Fs8000_Fc55_A0,                /* 8kS/s coefficients */
-     -BPF_Fs8000_Fc55_B2,
-     -BPF_Fs8000_Fc55_B1},
-    {BPF_Fs11025_Fc55_A0,                /* 11kS/s coefficients */
-     -BPF_Fs11025_Fc55_B2,
-     -BPF_Fs11025_Fc55_B1},
-    {BPF_Fs12000_Fc55_A0,                /* 12kS/s coefficients */
-     -BPF_Fs12000_Fc55_B2,
-     -BPF_Fs12000_Fc55_B1},
-    {BPF_Fs16000_Fc55_A0,                /* 16kS/s coefficients */
-     -BPF_Fs16000_Fc55_B2,
-     -BPF_Fs16000_Fc55_B1},
-    {BPF_Fs22050_Fc55_A0,                /* 22kS/s coefficients */
-     -BPF_Fs22050_Fc55_B2,
-     -BPF_Fs22050_Fc55_B1},
-    {BPF_Fs24000_Fc55_A0,                /* 24kS/s coefficients */
-     -BPF_Fs24000_Fc55_B2,
-     -BPF_Fs24000_Fc55_B1},
-    {BPF_Fs32000_Fc55_A0,                /* 32kS/s coefficients */
-     -BPF_Fs32000_Fc55_B2,
-     -BPF_Fs32000_Fc55_B1},
-    {BPF_Fs44100_Fc55_A0,                /* 44kS/s coefficients */
-     -BPF_Fs44100_Fc55_B2,
-     -BPF_Fs44100_Fc55_B1},
-    {BPF_Fs48000_Fc55_A0,                /* 48kS/s coefficients */
-     -BPF_Fs48000_Fc55_B2,
-     -BPF_Fs48000_Fc55_B1},
-     {BPF_Fs88200_Fc55_A0,                /* 88kS/s coefficients */
-      -BPF_Fs88200_Fc55_B2,
-      -BPF_Fs88200_Fc55_B1},
-     {BPF_Fs96000_Fc55_A0,                /* 96kS/s coefficients */
-     -BPF_Fs96000_Fc55_B2,
-     -BPF_Fs96000_Fc55_B1},
-     {BPF_Fs176400_Fc55_A0,                /* 176kS/s coefficients */
-      -BPF_Fs176400_Fc55_B2,
-      -BPF_Fs176400_Fc55_B1},
-     {BPF_Fs192000_Fc55_A0,                /* 192kS/s coefficients */
-     -BPF_Fs192000_Fc55_B2,
-     -BPF_Fs192000_Fc55_B1},
+        /* Coefficients for 55Hz centre frequency */
+        {BPF_Fs8000_Fc55_A0, /* 8kS/s coefficients */
+         -BPF_Fs8000_Fc55_B2, -BPF_Fs8000_Fc55_B1},
+        {BPF_Fs11025_Fc55_A0, /* 11kS/s coefficients */
+         -BPF_Fs11025_Fc55_B2, -BPF_Fs11025_Fc55_B1},
+        {BPF_Fs12000_Fc55_A0, /* 12kS/s coefficients */
+         -BPF_Fs12000_Fc55_B2, -BPF_Fs12000_Fc55_B1},
+        {BPF_Fs16000_Fc55_A0, /* 16kS/s coefficients */
+         -BPF_Fs16000_Fc55_B2, -BPF_Fs16000_Fc55_B1},
+        {BPF_Fs22050_Fc55_A0, /* 22kS/s coefficients */
+         -BPF_Fs22050_Fc55_B2, -BPF_Fs22050_Fc55_B1},
+        {BPF_Fs24000_Fc55_A0, /* 24kS/s coefficients */
+         -BPF_Fs24000_Fc55_B2, -BPF_Fs24000_Fc55_B1},
+        {BPF_Fs32000_Fc55_A0, /* 32kS/s coefficients */
+         -BPF_Fs32000_Fc55_B2, -BPF_Fs32000_Fc55_B1},
+        {BPF_Fs44100_Fc55_A0, /* 44kS/s coefficients */
+         -BPF_Fs44100_Fc55_B2, -BPF_Fs44100_Fc55_B1},
+        {BPF_Fs48000_Fc55_A0, /* 48kS/s coefficients */
+         -BPF_Fs48000_Fc55_B2, -BPF_Fs48000_Fc55_B1},
+        {BPF_Fs88200_Fc55_A0, /* 88kS/s coefficients */
+         -BPF_Fs88200_Fc55_B2, -BPF_Fs88200_Fc55_B1},
+        {BPF_Fs96000_Fc55_A0, /* 96kS/s coefficients */
+         -BPF_Fs96000_Fc55_B2, -BPF_Fs96000_Fc55_B1},
+        {BPF_Fs176400_Fc55_A0, /* 176kS/s coefficients */
+         -BPF_Fs176400_Fc55_B2, -BPF_Fs176400_Fc55_B1},
+        {BPF_Fs192000_Fc55_A0, /* 192kS/s coefficients */
+         -BPF_Fs192000_Fc55_B2, -BPF_Fs192000_Fc55_B1},
 
-    /* Coefficients for 66Hz centre frequency */
-    {BPF_Fs8000_Fc66_A0,                /* 8kS/s coefficients */
-     -BPF_Fs8000_Fc66_B2,
-     -BPF_Fs8000_Fc66_B1},
-    {BPF_Fs11025_Fc66_A0,                /* 11kS/s coefficients */
-     -BPF_Fs11025_Fc66_B2,
-     -BPF_Fs11025_Fc66_B1},
-    {BPF_Fs12000_Fc66_A0,                /* 12kS/s coefficients */
-     -BPF_Fs12000_Fc66_B2,
-     -BPF_Fs12000_Fc66_B1},
-    {BPF_Fs16000_Fc66_A0,                /* 16kS/s coefficients */
-     -BPF_Fs16000_Fc66_B2,
-     -BPF_Fs16000_Fc66_B1},
-    {BPF_Fs22050_Fc66_A0,                /* 22kS/s coefficients */
-     -BPF_Fs22050_Fc66_B2,
-     -BPF_Fs22050_Fc66_B1},
-    {BPF_Fs24000_Fc66_A0,                /* 24kS/s coefficients */
-     -BPF_Fs24000_Fc66_B2,
-     -BPF_Fs24000_Fc66_B1},
-    {BPF_Fs32000_Fc66_A0,                /* 32kS/s coefficients */
-     -BPF_Fs32000_Fc66_B2,
-     -BPF_Fs32000_Fc66_B1},
-    {BPF_Fs44100_Fc66_A0,                /* 44kS/s coefficients */
-     -BPF_Fs44100_Fc66_B2,
-     -BPF_Fs44100_Fc66_B1},
-    {BPF_Fs48000_Fc66_A0,                /* 48kS/s coefficients */
-     -BPF_Fs48000_Fc66_B2,
-     -BPF_Fs48000_Fc66_B1},
-    {BPF_Fs88200_Fc66_A0,                /* 88kS/s coefficients */
-     -BPF_Fs88200_Fc66_B2,
-     -BPF_Fs88200_Fc66_B1},
-    {BPF_Fs96000_Fc66_A0,                /* 96kS/s coefficients */
-     -BPF_Fs96000_Fc66_B2,
-     -BPF_Fs96000_Fc66_B1},
-    {BPF_Fs176400_Fc66_A0,                /* 176kS/s coefficients */
-     -BPF_Fs176400_Fc66_B2,
-     -BPF_Fs176400_Fc66_B1},
-    {BPF_Fs192000_Fc66_A0,                /* 192kS/s coefficients */
-     -BPF_Fs192000_Fc66_B2,
-     -BPF_Fs192000_Fc66_B1},
+        /* Coefficients for 66Hz centre frequency */
+        {BPF_Fs8000_Fc66_A0, /* 8kS/s coefficients */
+         -BPF_Fs8000_Fc66_B2, -BPF_Fs8000_Fc66_B1},
+        {BPF_Fs11025_Fc66_A0, /* 11kS/s coefficients */
+         -BPF_Fs11025_Fc66_B2, -BPF_Fs11025_Fc66_B1},
+        {BPF_Fs12000_Fc66_A0, /* 12kS/s coefficients */
+         -BPF_Fs12000_Fc66_B2, -BPF_Fs12000_Fc66_B1},
+        {BPF_Fs16000_Fc66_A0, /* 16kS/s coefficients */
+         -BPF_Fs16000_Fc66_B2, -BPF_Fs16000_Fc66_B1},
+        {BPF_Fs22050_Fc66_A0, /* 22kS/s coefficients */
+         -BPF_Fs22050_Fc66_B2, -BPF_Fs22050_Fc66_B1},
+        {BPF_Fs24000_Fc66_A0, /* 24kS/s coefficients */
+         -BPF_Fs24000_Fc66_B2, -BPF_Fs24000_Fc66_B1},
+        {BPF_Fs32000_Fc66_A0, /* 32kS/s coefficients */
+         -BPF_Fs32000_Fc66_B2, -BPF_Fs32000_Fc66_B1},
+        {BPF_Fs44100_Fc66_A0, /* 44kS/s coefficients */
+         -BPF_Fs44100_Fc66_B2, -BPF_Fs44100_Fc66_B1},
+        {BPF_Fs48000_Fc66_A0, /* 48kS/s coefficients */
+         -BPF_Fs48000_Fc66_B2, -BPF_Fs48000_Fc66_B1},
+        {BPF_Fs88200_Fc66_A0, /* 88kS/s coefficients */
+         -BPF_Fs88200_Fc66_B2, -BPF_Fs88200_Fc66_B1},
+        {BPF_Fs96000_Fc66_A0, /* 96kS/s coefficients */
+         -BPF_Fs96000_Fc66_B2, -BPF_Fs96000_Fc66_B1},
+        {BPF_Fs176400_Fc66_A0, /* 176kS/s coefficients */
+         -BPF_Fs176400_Fc66_B2, -BPF_Fs176400_Fc66_B1},
+        {BPF_Fs192000_Fc66_A0, /* 192kS/s coefficients */
+         -BPF_Fs192000_Fc66_B2, -BPF_Fs192000_Fc66_B1},
 
-    /* Coefficients for 78Hz centre frequency */
-    {BPF_Fs8000_Fc78_A0,                /* 8kS/s coefficients */
-     -BPF_Fs8000_Fc78_B2,
-     -BPF_Fs8000_Fc78_B1},
-    {BPF_Fs11025_Fc78_A0,                /* 11kS/s coefficients */
-     -BPF_Fs11025_Fc78_B2,
-     -BPF_Fs11025_Fc78_B1},
-    {BPF_Fs12000_Fc78_A0,                /* 12kS/s coefficients */
-     -BPF_Fs12000_Fc78_B2,
-     -BPF_Fs12000_Fc78_B1},
-    {BPF_Fs16000_Fc78_A0,                /* 16kS/s coefficients */
-     -BPF_Fs16000_Fc78_B2,
-     -BPF_Fs16000_Fc78_B1},
-    {BPF_Fs22050_Fc78_A0,                /* 22kS/s coefficients */
-     -BPF_Fs22050_Fc78_B2,
-     -BPF_Fs22050_Fc78_B1},
-    {BPF_Fs24000_Fc78_A0,                /* 24kS/s coefficients */
-     -BPF_Fs24000_Fc78_B2,
-     -BPF_Fs24000_Fc78_B1},
-    {BPF_Fs32000_Fc78_A0,                /* 32kS/s coefficients */
-     -BPF_Fs32000_Fc78_B2,
-     -BPF_Fs32000_Fc78_B1},
-    {BPF_Fs44100_Fc78_A0,                /* 44kS/s coefficients */
-     -BPF_Fs44100_Fc78_B2,
-     -BPF_Fs44100_Fc78_B1},
-    {BPF_Fs48000_Fc78_A0,                /* 48kS/s coefficients */
-     -BPF_Fs48000_Fc78_B2,
-     -BPF_Fs48000_Fc78_B1},
-    {BPF_Fs88200_Fc66_A0,                /* 88kS/s coefficients */
-     -BPF_Fs88200_Fc66_B2,
-     -BPF_Fs88200_Fc66_B1},
-    {BPF_Fs96000_Fc78_A0,                /* 96kS/s coefficients */
-     -BPF_Fs96000_Fc78_B2,
-     -BPF_Fs96000_Fc78_B1},
-    {BPF_Fs176400_Fc66_A0,                /* 176kS/s coefficients */
-     -BPF_Fs176400_Fc66_B2,
-     -BPF_Fs176400_Fc66_B1},
-    {BPF_Fs192000_Fc78_A0,                /* 192kS/s coefficients */
-     -BPF_Fs192000_Fc78_B2,
-     -BPF_Fs192000_Fc78_B1},
+        /* Coefficients for 78Hz centre frequency */
+        {BPF_Fs8000_Fc78_A0, /* 8kS/s coefficients */
+         -BPF_Fs8000_Fc78_B2, -BPF_Fs8000_Fc78_B1},
+        {BPF_Fs11025_Fc78_A0, /* 11kS/s coefficients */
+         -BPF_Fs11025_Fc78_B2, -BPF_Fs11025_Fc78_B1},
+        {BPF_Fs12000_Fc78_A0, /* 12kS/s coefficients */
+         -BPF_Fs12000_Fc78_B2, -BPF_Fs12000_Fc78_B1},
+        {BPF_Fs16000_Fc78_A0, /* 16kS/s coefficients */
+         -BPF_Fs16000_Fc78_B2, -BPF_Fs16000_Fc78_B1},
+        {BPF_Fs22050_Fc78_A0, /* 22kS/s coefficients */
+         -BPF_Fs22050_Fc78_B2, -BPF_Fs22050_Fc78_B1},
+        {BPF_Fs24000_Fc78_A0, /* 24kS/s coefficients */
+         -BPF_Fs24000_Fc78_B2, -BPF_Fs24000_Fc78_B1},
+        {BPF_Fs32000_Fc78_A0, /* 32kS/s coefficients */
+         -BPF_Fs32000_Fc78_B2, -BPF_Fs32000_Fc78_B1},
+        {BPF_Fs44100_Fc78_A0, /* 44kS/s coefficients */
+         -BPF_Fs44100_Fc78_B2, -BPF_Fs44100_Fc78_B1},
+        {BPF_Fs48000_Fc78_A0, /* 48kS/s coefficients */
+         -BPF_Fs48000_Fc78_B2, -BPF_Fs48000_Fc78_B1},
+        {BPF_Fs88200_Fc66_A0, /* 88kS/s coefficients */
+         -BPF_Fs88200_Fc66_B2, -BPF_Fs88200_Fc66_B1},
+        {BPF_Fs96000_Fc78_A0, /* 96kS/s coefficients */
+         -BPF_Fs96000_Fc78_B2, -BPF_Fs96000_Fc78_B1},
+        {BPF_Fs176400_Fc66_A0, /* 176kS/s coefficients */
+         -BPF_Fs176400_Fc66_B2, -BPF_Fs176400_Fc66_B1},
+        {BPF_Fs192000_Fc78_A0, /* 192kS/s coefficients */
+         -BPF_Fs192000_Fc78_B2, -BPF_Fs192000_Fc78_B1},
 
-    /* Coefficients for 90Hz centre frequency */
-    {BPF_Fs8000_Fc90_A0,                /* 8kS/s coefficients */
-     -BPF_Fs8000_Fc90_B2,
-     -BPF_Fs8000_Fc90_B1},
-    {BPF_Fs11025_Fc90_A0,                /* 11kS/s coefficients */
-     -BPF_Fs11025_Fc90_B2,
-     -BPF_Fs11025_Fc90_B1},
-    {BPF_Fs12000_Fc90_A0,                /* 12kS/s coefficients */
-     -BPF_Fs12000_Fc90_B2,
-     -BPF_Fs12000_Fc90_B1},
-    {BPF_Fs16000_Fc90_A0,                /* 16kS/s coefficients */
-     -BPF_Fs16000_Fc90_B2,
-     -BPF_Fs16000_Fc90_B1},
-    {BPF_Fs22050_Fc90_A0,                /* 22kS/s coefficients */
-     -BPF_Fs22050_Fc90_B2,
-     -BPF_Fs22050_Fc90_B1},
-    {BPF_Fs24000_Fc90_A0,                /* 24kS/s coefficients */
-     -BPF_Fs24000_Fc90_B2,
-     -BPF_Fs24000_Fc90_B1},
-    {BPF_Fs32000_Fc90_A0,                /* 32kS/s coefficients */
-     -BPF_Fs32000_Fc90_B2,
-     -BPF_Fs32000_Fc90_B1},
-    {BPF_Fs44100_Fc90_A0,                /* 44kS/s coefficients */
-     -BPF_Fs44100_Fc90_B2,
-     -BPF_Fs44100_Fc90_B1},
-    {BPF_Fs48000_Fc90_A0,                /* 48kS/s coefficients */
-     -BPF_Fs48000_Fc90_B2,
-     -BPF_Fs48000_Fc90_B1}
-    ,
-    {BPF_Fs88200_Fc90_A0,                /* 88kS/s coefficients */
-     -BPF_Fs88200_Fc90_B2,
-     -BPF_Fs88200_Fc90_B1},
-    {BPF_Fs96000_Fc90_A0,                /* 96kS/s coefficients */
-     -BPF_Fs96000_Fc90_B2,
-     -BPF_Fs96000_Fc90_B1},
-    {BPF_Fs176400_Fc90_A0,                /* 176kS/s coefficients */
-     -BPF_Fs176400_Fc90_B2,
-     -BPF_Fs176400_Fc90_B1},
-    {BPF_Fs192000_Fc90_A0,                /* 192kS/s coefficients */
-     -BPF_Fs192000_Fc90_B2,
-     -BPF_Fs192000_Fc90_B1}
+        /* Coefficients for 90Hz centre frequency */
+        {BPF_Fs8000_Fc90_A0, /* 8kS/s coefficients */
+         -BPF_Fs8000_Fc90_B2, -BPF_Fs8000_Fc90_B1},
+        {BPF_Fs11025_Fc90_A0, /* 11kS/s coefficients */
+         -BPF_Fs11025_Fc90_B2, -BPF_Fs11025_Fc90_B1},
+        {BPF_Fs12000_Fc90_A0, /* 12kS/s coefficients */
+         -BPF_Fs12000_Fc90_B2, -BPF_Fs12000_Fc90_B1},
+        {BPF_Fs16000_Fc90_A0, /* 16kS/s coefficients */
+         -BPF_Fs16000_Fc90_B2, -BPF_Fs16000_Fc90_B1},
+        {BPF_Fs22050_Fc90_A0, /* 22kS/s coefficients */
+         -BPF_Fs22050_Fc90_B2, -BPF_Fs22050_Fc90_B1},
+        {BPF_Fs24000_Fc90_A0, /* 24kS/s coefficients */
+         -BPF_Fs24000_Fc90_B2, -BPF_Fs24000_Fc90_B1},
+        {BPF_Fs32000_Fc90_A0, /* 32kS/s coefficients */
+         -BPF_Fs32000_Fc90_B2, -BPF_Fs32000_Fc90_B1},
+        {BPF_Fs44100_Fc90_A0, /* 44kS/s coefficients */
+         -BPF_Fs44100_Fc90_B2, -BPF_Fs44100_Fc90_B1},
+        {BPF_Fs48000_Fc90_A0, /* 48kS/s coefficients */
+         -BPF_Fs48000_Fc90_B2, -BPF_Fs48000_Fc90_B1},
+        {BPF_Fs88200_Fc90_A0, /* 88kS/s coefficients */
+         -BPF_Fs88200_Fc90_B2, -BPF_Fs88200_Fc90_B1},
+        {BPF_Fs96000_Fc90_A0, /* 96kS/s coefficients */
+         -BPF_Fs96000_Fc90_B2, -BPF_Fs96000_Fc90_B1},
+        {BPF_Fs176400_Fc90_A0, /* 176kS/s coefficients */
+         -BPF_Fs176400_Fc90_B2, -BPF_Fs176400_Fc90_B1},
+        {BPF_Fs192000_Fc90_A0, /* 192kS/s coefficients */
+         -BPF_Fs192000_Fc90_B2, -BPF_Fs192000_Fc90_B1}
 
 };
 
@@ -487,77 +278,34 @@
 
 /* Attack time (signal too large) */
 const LVM_FLOAT LVDBE_AGC_ATTACK_Table[] = {
-    AGC_ATTACK_Fs8000,
-    AGC_ATTACK_Fs11025,
-    AGC_ATTACK_Fs12000,
-    AGC_ATTACK_Fs16000,
-    AGC_ATTACK_Fs22050,
-    AGC_ATTACK_Fs24000,
-    AGC_ATTACK_Fs32000,
-    AGC_ATTACK_Fs44100,
-    AGC_ATTACK_Fs48000
-    ,AGC_ATTACK_Fs88200
-    ,AGC_ATTACK_Fs96000
-    ,AGC_ATTACK_Fs176400
-    ,AGC_ATTACK_Fs192000
+        AGC_ATTACK_Fs8000,  AGC_ATTACK_Fs11025, AGC_ATTACK_Fs12000, AGC_ATTACK_Fs16000,
+        AGC_ATTACK_Fs22050, AGC_ATTACK_Fs24000, AGC_ATTACK_Fs32000, AGC_ATTACK_Fs44100,
+        AGC_ATTACK_Fs48000, AGC_ATTACK_Fs88200, AGC_ATTACK_Fs96000, AGC_ATTACK_Fs176400,
+        AGC_ATTACK_Fs192000
 
 };
 
 /* Decay time (signal too small) */
-const LVM_FLOAT LVDBE_AGC_DECAY_Table[] = {
-    AGC_DECAY_Fs8000,
-    AGC_DECAY_Fs11025,
-    AGC_DECAY_Fs12000,
-    AGC_DECAY_Fs16000,
-    AGC_DECAY_Fs22050,
-    AGC_DECAY_Fs24000,
-    AGC_DECAY_Fs32000,
-    AGC_DECAY_Fs44100,
-    AGC_DECAY_Fs48000
-    ,AGC_DECAY_Fs88200
-    ,AGC_DECAY_FS96000
-    ,AGC_DECAY_Fs176400
-    ,AGC_DECAY_FS192000
+const LVM_FLOAT LVDBE_AGC_DECAY_Table[] = {AGC_DECAY_Fs8000,  AGC_DECAY_Fs11025, AGC_DECAY_Fs12000,
+                                           AGC_DECAY_Fs16000, AGC_DECAY_Fs22050, AGC_DECAY_Fs24000,
+                                           AGC_DECAY_Fs32000, AGC_DECAY_Fs44100, AGC_DECAY_Fs48000,
+                                           AGC_DECAY_Fs88200, AGC_DECAY_FS96000, AGC_DECAY_Fs176400,
+                                           AGC_DECAY_FS192000
 
 };
 
 /* Gain for use without the high pass filter */
 const LVM_FLOAT LVDBE_AGC_GAIN_Table[] = {
-    AGC_GAIN_0dB,
-    AGC_GAIN_1dB,
-    AGC_GAIN_2dB,
-    AGC_GAIN_3dB,
-    AGC_GAIN_4dB,
-    AGC_GAIN_5dB,
-    AGC_GAIN_6dB,
-    AGC_GAIN_7dB,
-    AGC_GAIN_8dB,
-    AGC_GAIN_9dB,
-    AGC_GAIN_10dB,
-    AGC_GAIN_11dB,
-    AGC_GAIN_12dB,
-    AGC_GAIN_13dB,
-    AGC_GAIN_14dB,
-    AGC_GAIN_15dB};
+        AGC_GAIN_0dB,  AGC_GAIN_1dB,  AGC_GAIN_2dB,  AGC_GAIN_3dB, AGC_GAIN_4dB,  AGC_GAIN_5dB,
+        AGC_GAIN_6dB,  AGC_GAIN_7dB,  AGC_GAIN_8dB,  AGC_GAIN_9dB, AGC_GAIN_10dB, AGC_GAIN_11dB,
+        AGC_GAIN_12dB, AGC_GAIN_13dB, AGC_GAIN_14dB, AGC_GAIN_15dB};
 
 /* Gain for use with the high pass filter */
 const LVM_FLOAT LVDBE_AGC_HPFGAIN_Table[] = {
-    AGC_HPFGAIN_0dB,
-    AGC_HPFGAIN_1dB,
-    AGC_HPFGAIN_2dB,
-    AGC_HPFGAIN_3dB,
-    AGC_HPFGAIN_4dB,
-    AGC_HPFGAIN_5dB,
-    AGC_HPFGAIN_6dB,
-    AGC_HPFGAIN_7dB,
-    AGC_HPFGAIN_8dB,
-    AGC_HPFGAIN_9dB,
-    AGC_HPFGAIN_10dB,
-    AGC_HPFGAIN_11dB,
-    AGC_HPFGAIN_12dB,
-    AGC_HPFGAIN_13dB,
-    AGC_HPFGAIN_14dB,
-    AGC_HPFGAIN_15dB};
+        AGC_HPFGAIN_0dB,  AGC_HPFGAIN_1dB,  AGC_HPFGAIN_2dB,  AGC_HPFGAIN_3dB,
+        AGC_HPFGAIN_4dB,  AGC_HPFGAIN_5dB,  AGC_HPFGAIN_6dB,  AGC_HPFGAIN_7dB,
+        AGC_HPFGAIN_8dB,  AGC_HPFGAIN_9dB,  AGC_HPFGAIN_10dB, AGC_HPFGAIN_11dB,
+        AGC_HPFGAIN_12dB, AGC_HPFGAIN_13dB, AGC_HPFGAIN_14dB, AGC_HPFGAIN_15dB};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -566,45 +314,23 @@
 /************************************************************************************/
 
 /* dB to linear conversion table */
-const LVM_FLOAT LVDBE_VolumeTable[] = {
-    0.500000f,         /* -6dB */
-    0.562341f,         /* -5dB */
-    0.630957f,         /* -4dB */
-    0.707946f,         /* -3dB */
-    0.794328f,         /* -2dB */
-    0.891251f,         /* -1dB */
-    1.000000f};        /*  0dB */
+const LVM_FLOAT LVDBE_VolumeTable[] = {0.500000f,  /* -6dB */
+                                       0.562341f,  /* -5dB */
+                                       0.630957f,  /* -4dB */
+                                       0.707946f,  /* -3dB */
+                                       0.794328f,  /* -2dB */
+                                       0.891251f,  /* -1dB */
+                                       1.000000f}; /*  0dB */
 
 const LVM_FLOAT LVDBE_VolumeTCTable[] = {
-    VOL_TC_Fs8000,
-    VOL_TC_Fs11025,
-    VOL_TC_Fs12000,
-    VOL_TC_Fs16000,
-    VOL_TC_Fs22050,
-    VOL_TC_Fs24000,
-    VOL_TC_Fs32000,
-    VOL_TC_Fs44100,
-    VOL_TC_Fs48000
-    ,VOL_TC_Fs88200
-    ,VOL_TC_Fs96000
-    ,VOL_TC_Fs176400
-    ,VOL_TC_Fs192000
-};
+        VOL_TC_Fs8000,  VOL_TC_Fs11025,  VOL_TC_Fs12000, VOL_TC_Fs16000, VOL_TC_Fs22050,
+        VOL_TC_Fs24000, VOL_TC_Fs32000,  VOL_TC_Fs44100, VOL_TC_Fs48000, VOL_TC_Fs88200,
+        VOL_TC_Fs96000, VOL_TC_Fs176400, VOL_TC_Fs192000};
 
 const LVM_INT16 LVDBE_MixerTCTable[] = {
 
-    MIX_TC_Fs8000,
-    MIX_TC_Fs11025,
-    MIX_TC_Fs12000,
-    MIX_TC_Fs16000,
-    MIX_TC_Fs22050,
-    MIX_TC_Fs24000,
-    MIX_TC_Fs32000,
-    MIX_TC_Fs44100,
-    MIX_TC_Fs48000
-    ,MIX_TC_Fs88200
-    ,MIX_TC_Fs96000
-    ,MIX_TC_Fs176400
-    ,MIX_TC_Fs192000
+        MIX_TC_Fs8000,  MIX_TC_Fs11025,  MIX_TC_Fs12000, MIX_TC_Fs16000, MIX_TC_Fs22050,
+        MIX_TC_Fs24000, MIX_TC_Fs32000,  MIX_TC_Fs44100, MIX_TC_Fs48000, MIX_TC_Fs88200,
+        MIX_TC_Fs96000, MIX_TC_Fs176400, MIX_TC_Fs192000
 
 };
diff --git a/media/libeffects/lvm/lib/Bundle/lib/LVM.h b/media/libeffects/lvm/lib/Bundle/lib/LVM.h
index e4e8450..c90c5cc 100644
--- a/media/libeffects/lvm/lib/Bundle/lib/LVM.h
+++ b/media/libeffects/lvm/lib/Bundle/lib/LVM.h
@@ -67,31 +67,28 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory table*/
-#define LVM_NR_MEMORY_REGIONS                 4     /* Number of memory regions */
-
 /* Concert Sound effect level presets */
-#define LVM_CS_EFFECT_NONE                    0     /* 0% effect, minimum value */
-#define LVM_CS_EFFECT_LOW                 16384     /* 50% effect */
-#define LVM_CS_EFFECT_MED                 24576     /* 75% effect */
-#define LVM_CS_EFFECT_HIGH                32767     /* 100% effect, maximum value */
+#define LVM_CS_EFFECT_NONE 0     /* 0% effect, minimum value */
+#define LVM_CS_EFFECT_LOW 16384  /* 50% effect */
+#define LVM_CS_EFFECT_MED 24576  /* 75% effect */
+#define LVM_CS_EFFECT_HIGH 32767 /* 100% effect, maximum value */
 
 /* Treble enhancement */
-#define LVM_TE_LOW_MIPS                   32767
+#define LVM_TE_LOW_MIPS 32767
 
 /* Bass enhancement effect level presets */
-#define LVM_BE_0DB                            0     /* 0dB boost, no effect */
-#define LVM_BE_3DB                            3     /* +3dB boost */
-#define LVM_BE_6DB                            6     /* +6dB boost */
-#define LVM_BE_9DB                            9     /* +9dB boost */
-#define LVM_BE_12DB                          12     /* +12dB boost */
-#define LVM_BE_15DB                          15     /* +15dB boost */
+#define LVM_BE_0DB 0   /* 0dB boost, no effect */
+#define LVM_BE_3DB 3   /* +3dB boost */
+#define LVM_BE_6DB 6   /* +6dB boost */
+#define LVM_BE_9DB 9   /* +9dB boost */
+#define LVM_BE_12DB 12 /* +12dB boost */
+#define LVM_BE_15DB 15 /* +15dB boost */
 
 /* N-Band Equalizer */
-#define LVM_EQ_NBANDS                         5    /* Number of bands for equalizer */
+#define LVM_EQ_NBANDS 5 /* Number of bands for equalizer */
 
 /* Headroom management */
-#define LVM_HEADROOM_MAX_NBANDS               5
+#define LVM_HEADROOM_MAX_NBANDS 5
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -100,123 +97,89 @@
 /****************************************************************************************/
 
 /* Instance handle */
-typedef void *LVM_Handle_t;
+typedef void* LVM_Handle_t;
 
 /* Status return values */
-typedef enum
-{
-    LVM_SUCCESS            = 0,                     /* Successful return from a routine */
-    LVM_ALIGNMENTERROR     = 1,                     /* Memory alignment error */
-    LVM_NULLADDRESS        = 2,                     /* NULL allocation address */
-    LVM_OUTOFRANGE         = 3,                     /* Out of range control parameter */
-    LVM_INVALIDNUMSAMPLES  = 4,                     /* Invalid number of samples */
-    LVM_WRONGAUDIOTIME     = 5,                     /* Wrong time value for audio time*/
-    LVM_ALGORITHMDISABLED  = 6,                     /* Algorithm is disabled*/
-    LVM_ALGORITHMPSA       = 7,                     /* Algorithm PSA returns an error */
+typedef enum {
+    LVM_SUCCESS = 0,           /* Successful return from a routine */
+    LVM_ALIGNMENTERROR = 1,    /* Memory alignment error */
+    LVM_NULLADDRESS = 2,       /* NULL allocation address */
+    LVM_OUTOFRANGE = 3,        /* Out of range control parameter */
+    LVM_INVALIDNUMSAMPLES = 4, /* Invalid number of samples */
+    LVM_WRONGAUDIOTIME = 5,    /* Wrong time value for audio time*/
+    LVM_ALGORITHMDISABLED = 6, /* Algorithm is disabled*/
+    LVM_ALGORITHMPSA = 7,      /* Algorithm PSA returns an error */
     LVM_RETURNSTATUS_DUMMY = LVM_MAXENUM
 } LVM_ReturnStatus_en;
 
 /* Buffer Management mode */
-typedef enum
-{
-    LVM_MANAGED_BUFFERS   = 0,
+typedef enum {
+    LVM_MANAGED_BUFFERS = 0,
     LVM_UNMANAGED_BUFFERS = 1,
-    LVM_BUFFERS_DUMMY     = LVM_MAXENUM
+    LVM_BUFFERS_DUMMY = LVM_MAXENUM
 } LVM_BufferMode_en;
 
 /* Output device type */
-typedef enum
-{
-    LVM_HEADPHONES             = 0,
-    LVM_EX_HEADPHONES          = 1,
-    LVM_SPEAKERTYPE_MAX        = LVM_MAXENUM
+typedef enum {
+    LVM_HEADPHONES = 0,
+    LVM_EX_HEADPHONES = 1,
+    LVM_SPEAKERTYPE_MAX = LVM_MAXENUM
 } LVM_OutputDeviceType_en;
 
 /* Virtualizer mode selection*/
-typedef enum
-{
-    LVM_CONCERTSOUND       = 0,
-    LVM_VIRTUALIZERTYPE_DUMMY   = LVM_MAXENUM
+typedef enum {
+    LVM_CONCERTSOUND = 0,
+    LVM_VIRTUALIZERTYPE_DUMMY = LVM_MAXENUM
 } LVM_VirtualizerType_en;
 
 /* N-Band Equaliser operating mode */
-typedef enum
-{
-    LVM_EQNB_OFF   = 0,
-    LVM_EQNB_ON    = 1,
-    LVM_EQNB_DUMMY = LVM_MAXENUM
-} LVM_EQNB_Mode_en;
+typedef enum { LVM_EQNB_OFF = 0, LVM_EQNB_ON = 1, LVM_EQNB_DUMMY = LVM_MAXENUM } LVM_EQNB_Mode_en;
 
 /* Bass Enhancement operating mode */
-typedef enum
-{
-    LVM_BE_OFF   = 0,
-    LVM_BE_ON    = 1,
-    LVM_BE_DUMMY = LVM_MAXENUM
-} LVM_BE_Mode_en;
+typedef enum { LVM_BE_OFF = 0, LVM_BE_ON = 1, LVM_BE_DUMMY = LVM_MAXENUM } LVM_BE_Mode_en;
 
 /* Bass Enhancement centre frequency selection control */
-typedef enum
-{
-    LVM_BE_CENTRE_55Hz  = 0,
-    LVM_BE_CENTRE_66Hz  = 1,
-    LVM_BE_CENTRE_78Hz  = 2,
-    LVM_BE_CENTRE_90Hz  = 3,
+typedef enum {
+    LVM_BE_CENTRE_55Hz = 0,
+    LVM_BE_CENTRE_66Hz = 1,
+    LVM_BE_CENTRE_78Hz = 2,
+    LVM_BE_CENTRE_90Hz = 3,
     LVM_BE_CENTRE_DUMMY = LVM_MAXENUM
 } LVM_BE_CentreFreq_en;
 
 /* Bass Enhancement HPF selection control */
-typedef enum
-{
-    LVM_BE_HPF_OFF   = 0,
-    LVM_BE_HPF_ON    = 1,
+typedef enum {
+    LVM_BE_HPF_OFF = 0,
+    LVM_BE_HPF_ON = 1,
     LVM_BE_HPF_DUMMY = LVM_MAXENUM
 } LVM_BE_FilterSelect_en;
 
 /* Volume Control operating mode */
-typedef enum
-{
-    LVM_VC_OFF   = 0,
-    LVM_VC_ON    = 1,
-    LVM_VC_DUMMY = LVM_MAXENUM
-} LVM_VC_Mode_en;
+typedef enum { LVM_VC_OFF = 0, LVM_VC_ON = 1, LVM_VC_DUMMY = LVM_MAXENUM } LVM_VC_Mode_en;
 
 /* Treble Enhancement operating mode */
-typedef enum
-{
-    LVM_TE_OFF   = 0,
-    LVM_TE_ON    = 1,
-    LVM_TE_DUMMY = LVM_MAXENUM
-} LVM_TE_Mode_en;
+typedef enum { LVM_TE_OFF = 0, LVM_TE_ON = 1, LVM_TE_DUMMY = LVM_MAXENUM } LVM_TE_Mode_en;
 
 /* Headroom management operating mode */
-typedef enum
-{
-    LVM_HEADROOM_OFF   = 0,
-    LVM_HEADROOM_ON    = 1,
+typedef enum {
+    LVM_HEADROOM_OFF = 0,
+    LVM_HEADROOM_ON = 1,
     LVM_Headroom_DUMMY = LVM_MAXENUM
 } LVM_Headroom_Mode_en;
 
-typedef enum
-{
-    LVM_PSA_SPEED_SLOW,                                  /* Peak decaying at slow speed */
-    LVM_PSA_SPEED_MEDIUM,                                /* Peak decaying at medium speed */
-    LVM_PSA_SPEED_FAST,                                  /* Peak decaying at fast speed */
+typedef enum {
+    LVM_PSA_SPEED_SLOW,   /* Peak decaying at slow speed */
+    LVM_PSA_SPEED_MEDIUM, /* Peak decaying at medium speed */
+    LVM_PSA_SPEED_FAST,   /* Peak decaying at fast speed */
     LVM_PSA_SPEED_DUMMY = LVM_MAXENUM
 } LVM_PSA_DecaySpeed_en;
 
-typedef enum
-{
-    LVM_PSA_OFF   = 0,
-    LVM_PSA_ON    = 1,
-    LVM_PSA_DUMMY = LVM_MAXENUM
-} LVM_PSA_Mode_en;
+typedef enum { LVM_PSA_OFF = 0, LVM_PSA_ON = 1, LVM_PSA_DUMMY = LVM_MAXENUM } LVM_PSA_Mode_en;
 
 /* Version information */
-typedef struct
-{
-    LVM_CHAR                    *pVersionNumber;        /* Pointer to the version number in the format X.YY.ZZ */
-    LVM_CHAR                    *pPlatform;             /* Pointer to the library platform type */
+typedef struct {
+    LVM_CHAR* pVersionNumber; /* Pointer to the version number in the format X.YY.ZZ */
+    LVM_CHAR* pPlatform;      /* Pointer to the library platform type */
 } LVM_VersionInfo_st;
 
 /****************************************************************************************/
@@ -225,93 +188,80 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory table containing the region definitions */
-typedef struct
-{
-    LVM_MemoryRegion_st         Region[LVM_NR_MEMORY_REGIONS];  /* One definition for each region */
-} LVM_MemTab_t;
-
 /* N-Band equaliser band definition */
-typedef struct
-{
-    LVM_INT16                   Gain;                   /* Band gain in dB */
-    LVM_UINT16                  Frequency;              /* Band centre frequency in Hz */
-    LVM_UINT16                  QFactor;                /* Band quality factor (x100) */
+typedef struct {
+    LVM_INT16 Gain;       /* Band gain in dB */
+    LVM_UINT16 Frequency; /* Band centre frequency in Hz */
+    LVM_UINT16 QFactor;   /* Band quality factor (x100) */
 } LVM_EQNB_BandDef_t;
 
 /* Headroom band definition */
-typedef struct
-{
-    LVM_UINT16                  Limit_Low;              /* Low frequency limit of the band in Hertz */
-    LVM_UINT16                  Limit_High;             /* High frequency limit of the band in Hertz */
-    LVM_INT16                   Headroom_Offset;        /* Headroom = biggest band gain - Headroom_Offset */
+typedef struct {
+    LVM_UINT16 Limit_Low;      /* Low frequency limit of the band in Hertz */
+    LVM_UINT16 Limit_High;     /* High frequency limit of the band in Hertz */
+    LVM_INT16 Headroom_Offset; /* Headroom = biggest band gain - Headroom_Offset */
 } LVM_HeadroomBandDef_t;
 
 /* Control Parameter structure */
-typedef struct
-{
+typedef struct {
     /* General parameters */
-    LVM_Mode_en                 OperatingMode;          /* Bundle operating mode On/Bypass */
-    LVM_Fs_en                   SampleRate;             /* Sample rate */
-    LVM_Format_en               SourceFormat;           /* Input data format */
-    LVM_OutputDeviceType_en     SpeakerType;            /* Output device type */
+    LVM_Mode_en OperatingMode;           /* Bundle operating mode On/Bypass */
+    LVM_Fs_en SampleRate;                /* Sample rate */
+    LVM_Format_en SourceFormat;          /* Input data format */
+    LVM_OutputDeviceType_en SpeakerType; /* Output device type */
 
     /* Concert Sound Virtualizer parameters*/
-    LVM_Mode_en                 VirtualizerOperatingMode; /* Virtualizer operating mode On/Off */
-    LVM_VirtualizerType_en      VirtualizerType;          /* Virtualizer type: ConcertSound */
-    LVM_UINT16                  VirtualizerReverbLevel;   /* Virtualizer reverb level in % */
-    LVM_INT16                   CS_EffectLevel;           /* Concert Sound effect level */
+    LVM_Mode_en VirtualizerOperatingMode;   /* Virtualizer operating mode On/Off */
+    LVM_VirtualizerType_en VirtualizerType; /* Virtualizer type: ConcertSound */
+    LVM_UINT16 VirtualizerReverbLevel;      /* Virtualizer reverb level in % */
+    LVM_INT16 CS_EffectLevel;               /* Concert Sound effect level */
 
     /* N-Band Equaliser parameters */
-    LVM_EQNB_Mode_en            EQNB_OperatingMode;     /* N-Band Equaliser operating mode */
-    LVM_UINT16                  EQNB_NBands;            /* Number of bands */
-    LVM_EQNB_BandDef_t          *pEQNB_BandDefinition;  /* Pointer to equaliser definitions */
+    LVM_EQNB_Mode_en EQNB_OperatingMode;      /* N-Band Equaliser operating mode */
+    LVM_UINT16 EQNB_NBands;                   /* Number of bands */
+    LVM_EQNB_BandDef_t* pEQNB_BandDefinition; /* Pointer to equaliser definitions */
 
     /* Bass Enhancement parameters */
-    LVM_BE_Mode_en              BE_OperatingMode;       /* Bass Enhancement operating mode */
-    LVM_INT16                   BE_EffectLevel;         /* Bass Enhancement effect level */
-    LVM_BE_CentreFreq_en        BE_CentreFreq;          /* Bass Enhancement centre frequency */
-    LVM_BE_FilterSelect_en      BE_HPF;                 /* Bass Enhancement high pass filter selector */
+    LVM_BE_Mode_en BE_OperatingMode;    /* Bass Enhancement operating mode */
+    LVM_INT16 BE_EffectLevel;           /* Bass Enhancement effect level */
+    LVM_BE_CentreFreq_en BE_CentreFreq; /* Bass Enhancement centre frequency */
+    LVM_BE_FilterSelect_en BE_HPF;      /* Bass Enhancement high pass filter selector */
 
     /* Volume Control parameters */
-    LVM_INT16                   VC_EffectLevel;         /* Volume Control setting in dBs */
-    LVM_INT16                   VC_Balance;             /* Left Right Balance control in dB (-96 to 96 dB), -ve values reduce
-                                                           Right channel while +ve value reduces Left channel*/
+    LVM_INT16 VC_EffectLevel; /* Volume Control setting in dBs */
+    LVM_INT16 VC_Balance;     /* Left Right Balance control in dB (-96 to 96 dB), -ve values reduce
+                                 Right channel while +ve value reduces Left channel*/
 
     /* Treble Enhancement parameters */
-    LVM_TE_Mode_en              TE_OperatingMode;       /* Treble Enhancement On/Off */
-    LVM_INT16                   TE_EffectLevel;         /* Treble Enhancement gain dBs */
+    LVM_TE_Mode_en TE_OperatingMode; /* Treble Enhancement On/Off */
+    LVM_INT16 TE_EffectLevel;        /* Treble Enhancement gain dBs */
 
     /* Spectrum Analyzer parameters Control */
-    LVM_PSA_Mode_en             PSA_Enable;
-    LVM_PSA_DecaySpeed_en       PSA_PeakDecayRate;      /* Peak value decay rate*/
-#ifdef SUPPORT_MC
-    LVM_INT32                   NrChannels;
-    LVM_INT32                   ChMask;
-#endif
+    LVM_PSA_Mode_en PSA_Enable;
+    LVM_PSA_DecaySpeed_en PSA_PeakDecayRate; /* Peak value decay rate*/
+    LVM_INT32 NrChannels;
+    LVM_INT32 ChMask;
 
 } LVM_ControlParams_t;
 
 /* Instance Parameter structure */
-typedef struct
-{
+typedef struct {
     /* General */
-    LVM_BufferMode_en           BufferMode;             /* Buffer management mode */
-    LVM_UINT16                  MaxBlockSize;           /* Maximum processing block size */
+    LVM_BufferMode_en BufferMode; /* Buffer management mode */
+    LVM_UINT16 MaxBlockSize;      /* Maximum processing block size */
 
     /* N-Band Equaliser */
-    LVM_UINT16                  EQNB_NumBands;          /* Maximum number of equaliser bands */
+    LVM_UINT16 EQNB_NumBands; /* Maximum number of equaliser bands */
 
     /* PSA */
-    LVM_PSA_Mode_en             PSA_Included;            /* Controls the instance memory allocation for PSA: ON/OFF */
+    LVM_PSA_Mode_en PSA_Included; /* Controls the instance memory allocation for PSA: ON/OFF */
 } LVM_InstParams_t;
 
 /* Headroom management parameter structure */
-typedef struct
-{
-    LVM_Headroom_Mode_en        Headroom_OperatingMode; /* Headroom Control On/Off */
-    LVM_HeadroomBandDef_t       *pHeadroomDefinition;   /* Pointer to headroom bands definition */
-    LVM_UINT16                  NHeadroomBands;         /* Number of headroom bands */
+typedef struct {
+    LVM_Headroom_Mode_en Headroom_OperatingMode; /* Headroom Control On/Off */
+    LVM_HeadroomBandDef_t* pHeadroomDefinition;  /* Pointer to headroom bands definition */
+    LVM_UINT16 NHeadroomBands;                   /* Number of headroom bands */
 
 } LVM_HeadroomParams_t;
 
@@ -339,55 +289,18 @@
 /*  1.  This function may be interrupted by the LVM_Process function                    */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetVersionInfo(LVM_VersionInfo_st  *pVersion);
-
-/****************************************************************************************/
-/*                                                                                      */
-/* FUNCTION:                LVM_GetMemoryTable                                          */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*      hInstance = NULL                Returns the memory requirements                 */
-/*      hInstance = Instance handle     Returns the memory requirements and             */
-/*                                      allocated base addresses for the instance       */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) the memory      */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the memory       */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory definition table                 */
-/*  pInstParams             Pointer to the instance parameters                          */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVM_SUCCESS             Succeeded                                                   */
-/*  LVM_NULLADDRESS         When one of pMemoryTable or pInstParams is NULL             */
-/*  LVM_OUTOFRANGE          When any of the Instance parameters are out of range        */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVM_Process function                    */
-/*                                                                                      */
-/****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetMemoryTable(LVM_Handle_t         hInstance,
-                                       LVM_MemTab_t         *pMemoryTable,
-                                       LVM_InstParams_t     *pInstParams);
+LVM_ReturnStatus_en LVM_GetVersionInfo(LVM_VersionInfo_st* pVersion);
 
 /****************************************************************************************/
 /*                                                                                      */
 /* FUNCTION:                LVM_GetInstanceHandle                                       */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
-/*  This function is used to create a bundle instance. It returns the created instance  */
-/*  handle through phInstance. All parameters are set to their default, inactive state. */
+/*  This function is used to create a bundle instance.                                  */
+/*  All parameters are set to their default, inactive state.                            */
 /*                                                                                      */
 /* PARAMETERS:                                                                          */
-/*  phInstance              pointer to the instance handle                              */
-/*  pMemoryTable            Pointer to the memory definition table                      */
+/*  phInstance              Pointer to the instance handle                              */
 /*  pInstParams             Pointer to the instance parameters                          */
 /*                                                                                      */
 /* RETURNS:                                                                             */
@@ -399,9 +312,24 @@
 /*  1. This function must not be interrupted by the LVM_Process function                */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetInstanceHandle(LVM_Handle_t        *phInstance,
-                                          LVM_MemTab_t        *pMemoryTable,
-                                          LVM_InstParams_t    *pInstParams);
+LVM_ReturnStatus_en LVM_GetInstanceHandle(LVM_Handle_t* phInstance, LVM_InstParams_t* pInstParams);
+
+/****************************************************************************************/
+/*                                                                                      */
+/* FUNCTION:                LVM_DelInstanceHandle                                       */
+/*                                                                                      */
+/* DESCRIPTION:                                                                         */
+/*  This function is used to create a bundle instance. It returns the created instance  */
+/*  handle through phInstance. All parameters are set to their default, inactive state. */
+/*                                                                                      */
+/* PARAMETERS:                                                                          */
+/*  phInstance              Pointer to the instance handle                              */
+/*                                                                                      */
+/* NOTES:                                                                               */
+/*  1. This function must not be interrupted by the LVM_Process function                */
+/*                                                                                      */
+/****************************************************************************************/
+void LVM_DelInstanceHandle(LVM_Handle_t* phInstance);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -421,7 +349,7 @@
 /*  1. This function must not be interrupted by the LVM_Process function                */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_ClearAudioBuffers(LVM_Handle_t  hInstance);
+LVM_ReturnStatus_en LVM_ClearAudioBuffers(LVM_Handle_t hInstance);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -443,8 +371,7 @@
 /*  1.  This function may be interrupted by the LVM_Process function                    */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetControlParameters(LVM_Handle_t           hInstance,
-                                             LVM_ControlParams_t    *pParams);
+LVM_ReturnStatus_en LVM_GetControlParameters(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -466,8 +393,7 @@
 /*  1.  This function may be interrupted by the LVM_Process function                    */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_SetControlParameters(LVM_Handle_t           hInstance,
-                                             LVM_ControlParams_t    *pParams);
+LVM_ReturnStatus_en LVM_SetControlParameters(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -499,11 +425,8 @@
 /*      STEREO              the number of sample pairs in the block                     */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_Process(LVM_Handle_t                hInstance,
-                                const LVM_FLOAT             *pInData,
-                                LVM_FLOAT                      *pOutData,
-                                LVM_UINT16                  NumSamples,
-                                LVM_UINT32                  AudioTime);
+LVM_ReturnStatus_en LVM_Process(LVM_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                LVM_FLOAT* pOutData, LVM_UINT16 NumSamples, LVM_UINT32 AudioTime);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -524,8 +447,8 @@
 /*  1.  This function may be interrupted by the LVM_Process function                    */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_SetHeadroomParams(  LVM_Handle_t            hInstance,
-                                            LVM_HeadroomParams_t    *pHeadroomParams);
+LVM_ReturnStatus_en LVM_SetHeadroomParams(LVM_Handle_t hInstance,
+                                          LVM_HeadroomParams_t* pHeadroomParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -546,8 +469,8 @@
 /*  1.  This function may be interrupted by the LVM_Process function                    */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetHeadroomParams(  LVM_Handle_t            hInstance,
-                                            LVM_HeadroomParams_t    *pHeadroomParams);
+LVM_ReturnStatus_en LVM_GetHeadroomParams(LVM_Handle_t hInstance,
+                                          LVM_HeadroomParams_t* pHeadroomParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -574,10 +497,8 @@
 /*  1. This function may be interrupted by the LVM_Process function                     */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetSpectrum( LVM_Handle_t            hInstance,
-                                     LVM_UINT8               *pCurrentPeaks,
-                                     LVM_UINT8               *pPastPeaks,
-                                     LVM_INT32               AudioTime);
+LVM_ReturnStatus_en LVM_GetSpectrum(LVM_Handle_t hInstance, LVM_UINT8* pCurrentPeaks,
+                                    LVM_UINT8* pPastPeaks, LVM_INT32 AudioTime);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -599,8 +520,6 @@
 /*  1. This function may be interrupted by the LVM_Process function                     */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_SetVolumeNoSmoothing( LVM_Handle_t           hInstance,
-                                              LVM_ControlParams_t    *pParams);
+LVM_ReturnStatus_en LVM_SetVolumeNoSmoothing(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams);
 
-#endif      /* __LVM_H__ */
-
+#endif /* __LVM_H__ */
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_API_Specials.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_API_Specials.cpp
index e241cdd..cea964c 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_API_Specials.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_API_Specials.cpp
@@ -47,69 +47,52 @@
 /*  1. This function may be interrupted by the LVM_Process function                     */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetSpectrum(
-                                    LVM_Handle_t            hInstance,
-                                    LVM_UINT8               *pCurrentPeaks,
-                                    LVM_UINT8               *pPastPeaks,
-                                    LVM_INT32               AudioTime
-                                    )
-{
-    LVM_Instance_t           *pInstance   = (LVM_Instance_t  *)hInstance;
+LVM_ReturnStatus_en LVM_GetSpectrum(LVM_Handle_t hInstance, LVM_UINT8* pCurrentPeaks,
+                                    LVM_UINT8* pPastPeaks, LVM_INT32 AudioTime) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
 
-    pLVPSA_Handle_t        *hPSAInstance;
-    LVPSA_RETURN           LVPSA_Status;
+    pLVPSA_Handle_t* hPSAInstance;
+    LVPSA_RETURN LVPSA_Status;
 
-    if(pInstance == LVM_NULL)
-    {
+    if (pInstance == LVM_NULL) {
         return LVM_NULLADDRESS;
     }
 
     /*If PSA is not included at the time of instance creation, return without any processing*/
-    if(pInstance->InstParams.PSA_Included!=LVM_PSA_ON)
-    {
+    if (pInstance->InstParams.PSA_Included != LVM_PSA_ON) {
         return LVM_SUCCESS;
     }
 
-    hPSAInstance = (pLVPSA_Handle_t *)pInstance->hPSAInstance;
+    hPSAInstance = (pLVPSA_Handle_t*)pInstance->hPSAInstance;
 
-    if((pCurrentPeaks == LVM_NULL) ||
-        (pPastPeaks == LVM_NULL))
-    {
+    if ((pCurrentPeaks == LVM_NULL) || (pPastPeaks == LVM_NULL)) {
         return LVM_NULLADDRESS;
     }
 
     /*
      * Update new parameters if necessary
      */
-    if (pInstance->ControlPending == LVM_TRUE)
-    {
+    if (pInstance->ControlPending == LVM_TRUE) {
         LVM_ApplyNewSettings(hInstance);
     }
 
     /* If PSA module is disabled, do nothing */
-    if(pInstance->Params.PSA_Enable==LVM_PSA_OFF)
-    {
+    if (pInstance->Params.PSA_Enable == LVM_PSA_OFF) {
         return LVM_ALGORITHMDISABLED;
     }
 
-    LVPSA_Status = LVPSA_GetSpectrum(hPSAInstance,
-                            (LVPSA_Time) (AudioTime),
-                            (LVM_UINT8*) pCurrentPeaks,
-                            (LVM_UINT8*) pPastPeaks );
+    LVPSA_Status = LVPSA_GetSpectrum(hPSAInstance, (LVPSA_Time)(AudioTime),
+                                     (LVM_UINT8*)pCurrentPeaks, (LVM_UINT8*)pPastPeaks);
 
-    if(LVPSA_Status != LVPSA_OK)
-    {
-        if(LVPSA_Status == LVPSA_ERROR_WRONGTIME)
-        {
-            return (LVM_ReturnStatus_en) LVM_WRONGAUDIOTIME;
-        }
-        else
-        {
-            return (LVM_ReturnStatus_en) LVM_NULLADDRESS;
+    if (LVPSA_Status != LVPSA_OK) {
+        if (LVPSA_Status == LVPSA_ERROR_WRONGTIME) {
+            return (LVM_ReturnStatus_en)LVM_WRONGAUDIOTIME;
+        } else {
+            return (LVM_ReturnStatus_en)LVM_NULLADDRESS;
         }
     }
 
-    return(LVM_SUCCESS);
+    return (LVM_SUCCESS);
 }
 
 /****************************************************************************************/
@@ -132,15 +115,12 @@
 /*  1. This function may be interrupted by the LVM_Process function                     */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_SetVolumeNoSmoothing( LVM_Handle_t           hInstance,
-                                              LVM_ControlParams_t    *pParams)
-{
-    LVM_Instance_t      *pInstance =(LVM_Instance_t  *)hInstance;
+LVM_ReturnStatus_en LVM_SetVolumeNoSmoothing(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
     LVM_ReturnStatus_en Error;
 
     /*Apply new controls*/
-    Error = LVM_SetControlParameters(hInstance,pParams);
+    Error = LVM_SetControlParameters(hInstance, pParams);
     pInstance->NoSmoothVolume = LVM_TRUE;
     return Error;
 }
-
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Buffers.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Buffers.cpp
index 3aeddbb..fbb0fe1 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Buffers.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Buffers.cpp
@@ -49,109 +49,90 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-void LVM_BufferManagedIn(LVM_Handle_t       hInstance,
-                         const LVM_FLOAT    *pInData,
-                         LVM_FLOAT          **pToProcess,
-                         LVM_FLOAT          **pProcessed,
-                         LVM_UINT16         *pNumSamples)
-{
-
-    LVM_INT16        SampleCount;           /* Number of samples to be processed this call */
-    LVM_INT16        NumSamples;            /* Number of samples in scratch buffer */
-    LVM_FLOAT        *pStart;
-    LVM_Instance_t   *pInstance = (LVM_Instance_t  *)hInstance;
-    LVM_Buffer_t     *pBuffer;
-    LVM_FLOAT        *pDest;
-#ifdef SUPPORT_MC
-    LVM_INT16        NumChannels = pInstance->NrChannels;
-#else
-    LVM_INT16        NumChannels = 2;
-#endif
+void LVM_BufferManagedIn(LVM_Handle_t hInstance, const LVM_FLOAT* pInData, LVM_FLOAT** pToProcess,
+                         LVM_FLOAT** pProcessed, LVM_UINT16* pNumSamples) {
+    LVM_INT16 SampleCount; /* Number of samples to be processed this call */
+    LVM_INT16 NumSamples;  /* Number of samples in scratch buffer */
+    LVM_FLOAT* pStart;
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+    LVM_Buffer_t* pBuffer;
+    LVM_FLOAT* pDest;
+    LVM_INT16 NumChannels = pInstance->NrChannels;
 
     /*
      * Set the processing address pointers
      */
-    pBuffer     = pInstance->pBufferManagement;
-    pDest       = pBuffer->pScratch;
+    pBuffer = pInstance->pBufferManagement;
+    pDest = pBuffer->pScratch;
     *pToProcess = pBuffer->pScratch;
     *pProcessed = pBuffer->pScratch;
 
     /*
      * Check if it is the first call of a block
      */
-    if (pInstance->SamplesToProcess == 0)
-    {
+    if (pInstance->SamplesToProcess == 0) {
         /*
          * First call for a new block of samples
          */
         pInstance->SamplesToProcess = (LVM_INT16)(*pNumSamples + pBuffer->InDelaySamples);
-        pInstance->pInputSamples    = (LVM_FLOAT *)pInData;
-        pBuffer->BufferState        = LVM_FIRSTCALL;
+        pInstance->pInputSamples = (LVM_FLOAT*)pInData;
+        pBuffer->BufferState = LVM_FIRSTCALL;
     }
-    pStart = pInstance->pInputSamples;                 /* Pointer to the input samples */
-    pBuffer->SamplesToOutput  = 0;                     /* Samples to output is same as
-                                                          number read for inplace processing */
+    pStart = pInstance->pInputSamples; /* Pointer to the input samples */
+    pBuffer->SamplesToOutput = 0;      /* Samples to output is same as
+                                          number read for inplace processing */
 
     /*
      * Calculate the number of samples to process this call and update the buffer state
      */
-    if (pInstance->SamplesToProcess > pInstance->InternalBlockSize)
-    {
+    if (pInstance->SamplesToProcess > pInstance->InternalBlockSize) {
         /*
          * Process the maximum bock size of samples.
          */
         SampleCount = pInstance->InternalBlockSize;
-        NumSamples  = pInstance->InternalBlockSize;
-    }
-    else
-    {
+        NumSamples = pInstance->InternalBlockSize;
+    } else {
         /*
          * Last call for the block, so calculate how many frames and samples to process
-          */
-        LVM_INT16   NumFrames;
+         */
+        LVM_INT16 NumFrames;
 
-        NumSamples  = pInstance->SamplesToProcess;
-        NumFrames    = (LVM_INT16)(NumSamples >> MIN_INTERNAL_BLOCKSHIFT);
+        NumSamples = pInstance->SamplesToProcess;
+        NumFrames = (LVM_INT16)(NumSamples >> MIN_INTERNAL_BLOCKSHIFT);
         SampleCount = (LVM_INT16)(NumFrames << MIN_INTERNAL_BLOCKSHIFT);
 
         /*
          * Update the buffer state
          */
-        if (pBuffer->BufferState == LVM_FIRSTCALL)
-        {
+        if (pBuffer->BufferState == LVM_FIRSTCALL) {
             pBuffer->BufferState = LVM_FIRSTLASTCALL;
-        }
-        else
-        {
+        } else {
             pBuffer->BufferState = LVM_LASTCALL;
         }
     }
-    *pNumSamples = (LVM_UINT16)SampleCount;  /* Set the number of samples to process this call */
+    *pNumSamples = (LVM_UINT16)SampleCount; /* Set the number of samples to process this call */
 
     /*
      * Copy samples from the delay buffer as required
      */
-    if (((pBuffer->BufferState == LVM_FIRSTCALL) ||
-        (pBuffer->BufferState == LVM_FIRSTLASTCALL)) &&
-        (pBuffer->InDelaySamples != 0))
-    {
-        Copy_Float(&pBuffer->InDelayBuffer[0],                             /* Source */
-                   pDest,                                                  /* Destination */
-                   (LVM_INT16)(NumChannels * pBuffer->InDelaySamples));    /* Number of delay \
-                                                                       samples, left and right */
+    if (((pBuffer->BufferState == LVM_FIRSTCALL) || (pBuffer->BufferState == LVM_FIRSTLASTCALL)) &&
+        (pBuffer->InDelaySamples != 0)) {
+        Copy_Float(&pBuffer->InDelayBuffer[0],                          /* Source */
+                   pDest,                                               /* Destination */
+                   (LVM_INT16)(NumChannels * pBuffer->InDelaySamples)); /* Number of delay \
+                                                                    samples, left and right */
         NumSamples = (LVM_INT16)(NumSamples - pBuffer->InDelaySamples); /* Update sample count */
-        pDest += NumChannels * pBuffer->InDelaySamples;      /* Update the destination pointer */
+        pDest += NumChannels * pBuffer->InDelaySamples; /* Update the destination pointer */
     }
 
     /*
      * Copy the rest of the samples for this call from the input buffer
      */
-    if (NumSamples > 0)
-    {
-        Copy_Float(pStart,                                      /* Source */
-                   pDest,                                       /* Destination */
-                   (LVM_INT16)(NumChannels * NumSamples));      /* Number of input samples */
-        pStart += NumChannels * NumSamples;                     /* Update the input pointer */
+    if (NumSamples > 0) {
+        Copy_Float(pStart,                                 /* Source */
+                   pDest,                                  /* Destination */
+                   (LVM_INT16)(NumChannels * NumSamples)); /* Number of input samples */
+        pStart += NumChannels * NumSamples;                /* Update the input pointer */
 
         /*
          * Update the input data pointer and samples to output
@@ -161,33 +142,30 @@
     }
 
     /*
-      * Update the sample count and input pointer
+     * Update the sample count and input pointer
      */
     /* Update the count of samples */
-    pInstance->SamplesToProcess  = (LVM_INT16)(pInstance->SamplesToProcess - SampleCount);
-    pInstance->pInputSamples     = pStart; /* Update input sample pointer */
+    pInstance->SamplesToProcess = (LVM_INT16)(pInstance->SamplesToProcess - SampleCount);
+    pInstance->pInputSamples = pStart; /* Update input sample pointer */
 
     /*
      * Save samples to the delay buffer if any left unprocessed
      */
-    if ((pBuffer->BufferState == LVM_FIRSTLASTCALL) ||
-        (pBuffer->BufferState == LVM_LASTCALL))
-    {
+    if ((pBuffer->BufferState == LVM_FIRSTLASTCALL) || (pBuffer->BufferState == LVM_LASTCALL)) {
         NumSamples = pInstance->SamplesToProcess;
-        pStart     = pBuffer->pScratch;                             /* Start of the buffer */
-        pStart    += NumChannels * SampleCount; /* Offset by the number of processed samples */
-        if (NumSamples != 0)
-        {
-            Copy_Float(pStart,                                         /* Source */
-                       &pBuffer->InDelayBuffer[0],                     /* Destination */
-                       (LVM_INT16)(NumChannels * NumSamples));   /* Number of input samples */
+        pStart = pBuffer->pScratch;          /* Start of the buffer */
+        pStart += NumChannels * SampleCount; /* Offset by the number of processed samples */
+        if (NumSamples != 0) {
+            Copy_Float(pStart,                                 /* Source */
+                       &pBuffer->InDelayBuffer[0],             /* Destination */
+                       (LVM_INT16)(NumChannels * NumSamples)); /* Number of input samples */
         }
 
         /*
          * Update the delay sample count
          */
-        pBuffer->InDelaySamples     = NumSamples;       /* Number of delay sample pairs */
-        pInstance->SamplesToProcess = 0;                            /* All Samples used */
+        pBuffer->InDelaySamples = NumSamples; /* Number of delay sample pairs */
+        pInstance->SamplesToProcess = 0;      /* All Samples used */
     }
 }
 
@@ -213,33 +191,25 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-void LVM_BufferUnmanagedIn(LVM_Handle_t     hInstance,
-                           LVM_FLOAT        **pToProcess,
-                           LVM_FLOAT        **pProcessed,
-                           LVM_UINT16       *pNumSamples)
-{
-
-    LVM_Instance_t    *pInstance = (LVM_Instance_t  *)hInstance;
+void LVM_BufferUnmanagedIn(LVM_Handle_t hInstance, LVM_FLOAT** pToProcess, LVM_FLOAT** pProcessed,
+                           LVM_UINT16* pNumSamples) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
 
     /*
      * Check if this is the first call of a block
      */
-    if (pInstance->SamplesToProcess == 0)
-    {
-        pInstance->SamplesToProcess = (LVM_INT16)*pNumSamples;    /* Get the number of samples
-                                                                               on first call */
-        pInstance->pInputSamples    = *pToProcess;                /* Get the I/O pointers */
-        pInstance->pOutputSamples    = *pProcessed;
+    if (pInstance->SamplesToProcess == 0) {
+        pInstance->SamplesToProcess = (LVM_INT16)*pNumSamples; /* Get the number of samples
+                                                                            on first call */
+        pInstance->pInputSamples = *pToProcess;                /* Get the I/O pointers */
+        pInstance->pOutputSamples = *pProcessed;
 
         /*
          * Set te block size to process
          */
-        if (pInstance->SamplesToProcess > pInstance->InternalBlockSize)
-        {
+        if (pInstance->SamplesToProcess > pInstance->InternalBlockSize) {
             *pNumSamples = (LVM_UINT16)pInstance->InternalBlockSize;
-        }
-        else
-        {
+        } else {
             *pNumSamples = (LVM_UINT16)pInstance->SamplesToProcess;
         }
     }
@@ -329,32 +299,17 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-void LVM_BufferIn(LVM_Handle_t      hInstance,
-                  const LVM_FLOAT   *pInData,
-                  LVM_FLOAT         **pToProcess,
-                  LVM_FLOAT         **pProcessed,
-                  LVM_UINT16        *pNumSamples)
-{
-
-    LVM_Instance_t    *pInstance = (LVM_Instance_t  *)hInstance;
+void LVM_BufferIn(LVM_Handle_t hInstance, const LVM_FLOAT* pInData, LVM_FLOAT** pToProcess,
+                  LVM_FLOAT** pProcessed, LVM_UINT16* pNumSamples) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
 
     /*
      * Check which mode, managed or unmanaged
      */
-    if (pInstance->InstParams.BufferMode == LVM_MANAGED_BUFFERS)
-    {
-        LVM_BufferManagedIn(hInstance,
-                            pInData,
-                            pToProcess,
-                            pProcessed,
-                            pNumSamples);
-    }
-    else
-    {
-        LVM_BufferUnmanagedIn(hInstance,
-                              pToProcess,
-                              pProcessed,
-                              pNumSamples);
+    if (pInstance->InstParams.BufferMode == LVM_MANAGED_BUFFERS) {
+        LVM_BufferManagedIn(hInstance, pInData, pToProcess, pProcessed, pNumSamples);
+    } else {
+        LVM_BufferUnmanagedIn(hInstance, pToProcess, pProcessed, pNumSamples);
     }
 }
 /****************************************************************************************/
@@ -377,196 +332,124 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-void LVM_BufferManagedOut(LVM_Handle_t        hInstance,
-                          LVM_FLOAT            *pOutData,
-                          LVM_UINT16        *pNumSamples)
-{
-
-    LVM_Instance_t  *pInstance  = (LVM_Instance_t  *)hInstance;
-    LVM_Buffer_t    *pBuffer    = pInstance->pBufferManagement;
-    LVM_INT16       SampleCount = (LVM_INT16)*pNumSamples;
-    LVM_INT16       NumSamples;
-    LVM_FLOAT       *pStart;
-    LVM_FLOAT       *pDest;
-#ifdef SUPPORT_MC
-    LVM_INT32       NrChannels = pInstance->NrChannels;
+void LVM_BufferManagedOut(LVM_Handle_t hInstance, LVM_FLOAT* pOutData, LVM_UINT16* pNumSamples) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+    LVM_Buffer_t* pBuffer = pInstance->pBufferManagement;
+    LVM_INT16 SampleCount = (LVM_INT16)*pNumSamples;
+    LVM_INT16 NumSamples;
+    LVM_FLOAT* pStart;
+    LVM_FLOAT* pDest;
+    LVM_INT32 NrChannels = pInstance->NrChannels;
 #define NrFrames NumSamples  // alias for clarity
 #define FrameCount SampleCount
-#endif
 
     /*
      * Set the pointers
      */
     NumSamples = pBuffer->SamplesToOutput;
-    pStart     = pBuffer->pScratch;
+    pStart = pBuffer->pScratch;
 
     /*
      * check if it is the first call of a block
-      */
-    if ((pBuffer->BufferState == LVM_FIRSTCALL) ||
-        (pBuffer->BufferState == LVM_FIRSTLASTCALL))
-    {
+     */
+    if ((pBuffer->BufferState == LVM_FIRSTCALL) || (pBuffer->BufferState == LVM_FIRSTLASTCALL)) {
         /* First call for a new block */
-        pInstance->pOutputSamples = pOutData;                 /* Initialise the destination */
+        pInstance->pOutputSamples = pOutData; /* Initialise the destination */
     }
-    pDest = pInstance->pOutputSamples;                        /* Set the output address */
+    pDest = pInstance->pOutputSamples; /* Set the output address */
 
     /*
      * If the number of samples is non-zero then there are still samples to send to
      * the output buffer
      */
-    if ((NumSamples != 0) &&
-        (pBuffer->OutDelaySamples != 0))
-    {
+    if ((NumSamples != 0) && (pBuffer->OutDelaySamples != 0)) {
         /*
          * Copy the delayed output buffer samples to the output
          */
-        if (pBuffer->OutDelaySamples <= NumSamples)
-        {
+        if (pBuffer->OutDelaySamples <= NumSamples) {
             /*
              * Copy all output delay samples to the output
              */
-#ifdef SUPPORT_MC
-            Copy_Float(&pBuffer->OutDelayBuffer[0],                /* Source */
-                       pDest,                                      /* Destination */
+            Copy_Float(&pBuffer->OutDelayBuffer[0], /* Source */
+                       pDest,                       /* Destination */
                        /* Number of delay samples */
                        (LVM_INT16)(NrChannels * pBuffer->OutDelaySamples));
-#else
-            Copy_Float(&pBuffer->OutDelayBuffer[0],                /* Source */
-                       pDest,                                      /* Destination */
-                       (LVM_INT16)(2 * pBuffer->OutDelaySamples)); /* Number of delay samples */
-#endif
 
             /*
              * Update the pointer and sample counts
              */
-#ifdef SUPPORT_MC
             pDest += NrChannels * pBuffer->OutDelaySamples; /* Output sample pointer */
-#else
-            pDest += 2 * pBuffer->OutDelaySamples; /* Output sample pointer */
-#endif
             NumSamples = (LVM_INT16)(NumSamples - pBuffer->OutDelaySamples); /* Samples left \
                                                                                 to send */
             pBuffer->OutDelaySamples = 0; /* No samples left in the buffer */
-        }
-        else
-        {
+        } else {
             /*
-             * Copy only some of the ouput delay samples to the output
+             * Copy only some of the output delay samples to the output
              */
-#ifdef SUPPORT_MC
-            Copy_Float(&pBuffer->OutDelayBuffer[0],                    /* Source */
-                       pDest,                                          /* Destination */
-                       (LVM_INT16)(NrChannels * NrFrames));       /* Number of delay samples */
-#else
-            Copy_Float(&pBuffer->OutDelayBuffer[0],                    /* Source */
-                       pDest,                                          /* Destination */
-                       (LVM_INT16)(2 * NumSamples));       /* Number of delay samples */
-#endif
+            Copy_Float(&pBuffer->OutDelayBuffer[0],         /* Source */
+                       pDest,                               /* Destination */
+                       (LVM_INT16)(NrChannels * NrFrames)); /* Number of delay samples */
 
             /*
              * Update the pointer and sample counts
              */
-#ifdef SUPPORT_MC
             pDest += NrChannels * NrFrames; /* Output sample pointer */
-#else
-            pDest += 2 * NumSamples; /* Output sample pointer */
-#endif
             /* No samples left in the buffer */
             pBuffer->OutDelaySamples = (LVM_INT16)(pBuffer->OutDelaySamples - NumSamples);
 
             /*
              * Realign the delay buffer data to avoid using circular buffer management
              */
-#ifdef SUPPORT_MC
-            Copy_Float(&pBuffer->OutDelayBuffer[NrChannels * NrFrames],         /* Source */
-                       &pBuffer->OutDelayBuffer[0],                    /* Destination */
+            Copy_Float(&pBuffer->OutDelayBuffer[NrChannels * NrFrames], /* Source */
+                       &pBuffer->OutDelayBuffer[0],                     /* Destination */
                        /* Number of samples to move */
                        (LVM_INT16)(NrChannels * pBuffer->OutDelaySamples));
-#else
-            Copy_Float(&pBuffer->OutDelayBuffer[2 * NumSamples],         /* Source */
-                       &pBuffer->OutDelayBuffer[0],                    /* Destination */
-                       (LVM_INT16)(2 * pBuffer->OutDelaySamples)); /* Number of samples to move */
-#endif
-            NumSamples = 0;                                /* Samples left to send */
+            NumSamples = 0; /* Samples left to send */
         }
     }
 
     /*
      * Copy the processed results to the output
      */
-    if ((NumSamples != 0) &&
-        (SampleCount != 0))
-    {
-        if (SampleCount <= NumSamples)
-        {
+    if ((NumSamples != 0) && (SampleCount != 0)) {
+        if (SampleCount <= NumSamples) {
             /*
              * Copy all processed samples to the output
              */
-#ifdef SUPPORT_MC
-            Copy_Float(pStart,                                      /* Source */
-                       pDest,                                       /* Destination */
+            Copy_Float(pStart,                                /* Source */
+                       pDest,                                 /* Destination */
                        (LVM_INT16)(NrChannels * FrameCount)); /* Number of processed samples */
-#else
-            Copy_Float(pStart,                                      /* Source */
-                       pDest,                                       /* Destination */
-                       (LVM_INT16)(2 * SampleCount)); /* Number of processed samples */
-#endif
             /*
              * Update the pointer and sample counts
              */
-#ifdef SUPPORT_MC
-            pDest      += NrChannels * FrameCount;                 /* Output sample pointer */
-#else
-            pDest      += 2 * SampleCount;                          /* Output sample pointer */
-#endif
-            NumSamples  = (LVM_INT16)(NumSamples - SampleCount);    /* Samples left to send */
-            SampleCount = 0; /* No samples left in the buffer */
-        }
-        else
-        {
+            pDest += NrChannels * FrameCount;                   /* Output sample pointer */
+            NumSamples = (LVM_INT16)(NumSamples - SampleCount); /* Samples left to send */
+            SampleCount = 0;                                    /* No samples left in the buffer */
+        } else {
             /*
              * Copy only some processed samples to the output
              */
-#ifdef SUPPORT_MC
-            Copy_Float(pStart,                                         /* Source */
-                       pDest,                                          /* Destination */
-                       (LVM_INT16)(NrChannels * NrFrames));  /* Number of processed samples */
-#else
-            Copy_Float(pStart,                                         /* Source */
-                       pDest,                                          /* Destination */
-                       (LVM_INT16)(2 * NumSamples));     /* Number of processed samples */
-#endif
+            Copy_Float(pStart,                              /* Source */
+                       pDest,                               /* Destination */
+                       (LVM_INT16)(NrChannels * NrFrames)); /* Number of processed samples */
             /*
              * Update the pointers and sample counts
-               */
-#ifdef SUPPORT_MC
-            pStart      += NrChannels * NrFrames;               /* Processed sample pointer */
-            pDest       += NrChannels * NrFrames;               /* Output sample pointer */
-#else
-            pStart      += 2 * NumSamples;                        /* Processed sample pointer */
-            pDest       += 2 * NumSamples;                        /* Output sample pointer */
-#endif
-            SampleCount  = (LVM_INT16)(SampleCount - NumSamples); /* Processed samples left */
-            NumSamples   = 0;                                     /* Clear the sample count */
+             */
+            pStart += NrChannels * NrFrames;                     /* Processed sample pointer */
+            pDest += NrChannels * NrFrames;                      /* Output sample pointer */
+            SampleCount = (LVM_INT16)(SampleCount - NumSamples); /* Processed samples left */
+            NumSamples = 0;                                      /* Clear the sample count */
         }
     }
 
     /*
      * Copy the remaining processed data to the output delay buffer
      */
-    if (SampleCount != 0)
-    {
-#ifdef SUPPORT_MC
-        Copy_Float(pStart,                                                 /* Source */
+    if (SampleCount != 0) {
+        Copy_Float(pStart, /* Source */
                    /* Destination */
                    &pBuffer->OutDelayBuffer[NrChannels * pBuffer->OutDelaySamples],
-                   (LVM_INT16)(NrChannels * FrameCount));      /* Number of processed samples */
-#else
-        Copy_Float(pStart,                                                 /* Source */
-                   &pBuffer->OutDelayBuffer[2 * pBuffer->OutDelaySamples], /* Destination */
-                   (LVM_INT16)(2 * SampleCount));               /* Number of processed samples */
-#endif
+                   (LVM_INT16)(NrChannels * FrameCount)); /* Number of processed samples */
         /* Update the buffer count */
         pBuffer->OutDelaySamples = (LVM_INT16)(pBuffer->OutDelaySamples + SampleCount);
     }
@@ -574,10 +457,10 @@
     /*
      * pointers, counts and set default buffer processing
      */
-    pBuffer->SamplesToOutput  = NumSamples;                         /* Samples left to send */
-    pInstance->pOutputSamples = pDest;                              /* Output sample pointer */
-    pBuffer->BufferState      = LVM_MAXBLOCKCALL;                   /* Set for the default call \
-                                                                            block size */
+    pBuffer->SamplesToOutput = NumSamples;   /* Samples left to send */
+    pInstance->pOutputSamples = pDest;       /* Output sample pointer */
+    pBuffer->BufferState = LVM_MAXBLOCKCALL; /* Set for the default call \
+                                                     block size */
     /* This will terminate the loop when all samples processed */
     *pNumSamples = (LVM_UINT16)pInstance->SamplesToProcess;
 }
@@ -601,44 +484,31 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-void LVM_BufferUnmanagedOut(LVM_Handle_t        hInstance,
-                            LVM_UINT16          *pNumSamples)
-{
-
-    LVM_Instance_t      *pInstance  = (LVM_Instance_t  *)hInstance;
-#ifdef SUPPORT_MC
-    LVM_INT16           NumChannels = pInstance->NrChannels;
-    if (NumChannels == 1)
-    {
+void LVM_BufferUnmanagedOut(LVM_Handle_t hInstance, LVM_UINT16* pNumSamples) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+    LVM_INT16 NumChannels = pInstance->NrChannels;
+    if (NumChannels == 1) {
         /* Mono input is processed as stereo by LVM module */
         NumChannels = 2;
     }
 #undef NrFrames
-#define NrFrames (*pNumSamples) // alias for clarity
-#else
-    LVM_INT16           NumChannels = 2;
-#endif
+#define NrFrames (*pNumSamples)  // alias for clarity
 
     /*
      * Update sample counts
      */
-    pInstance->pInputSamples    += (LVM_INT16)(*pNumSamples * NumChannels); /* Update the I/O pointers */
-#ifdef SUPPORT_MC
-    pInstance->pOutputSamples   += (LVM_INT16)(NrFrames * NumChannels);
-#else
-    pInstance->pOutputSamples   += (LVM_INT16)(*pNumSamples * 2);
-#endif
-    pInstance->SamplesToProcess  = (LVM_INT16)(pInstance->SamplesToProcess - *pNumSamples); /* Update the sample count */
+    pInstance->pInputSamples +=
+            (LVM_INT16)(*pNumSamples * NumChannels); /* Update the I/O pointers */
+    pInstance->pOutputSamples += (LVM_INT16)(NrFrames * NumChannels);
+    pInstance->SamplesToProcess =
+            (LVM_INT16)(pInstance->SamplesToProcess - *pNumSamples); /* Update the sample count */
 
     /*
      * Set te block size to process
      */
-    if (pInstance->SamplesToProcess > pInstance->InternalBlockSize)
-    {
+    if (pInstance->SamplesToProcess > pInstance->InternalBlockSize) {
         *pNumSamples = (LVM_UINT16)pInstance->InternalBlockSize;
-    }
-    else
-    {
+    } else {
         *pNumSamples = (LVM_UINT16)pInstance->SamplesToProcess;
     }
 }
@@ -698,25 +568,15 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-void LVM_BufferOut(LVM_Handle_t     hInstance,
-                   LVM_FLOAT        *pOutData,
-                   LVM_UINT16       *pNumSamples)
-{
-
-    LVM_Instance_t    *pInstance  = (LVM_Instance_t  *)hInstance;
+void LVM_BufferOut(LVM_Handle_t hInstance, LVM_FLOAT* pOutData, LVM_UINT16* pNumSamples) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
 
     /*
      * Check which mode, managed or unmanaged
      */
-    if (pInstance->InstParams.BufferMode == LVM_MANAGED_BUFFERS)
-    {
-        LVM_BufferManagedOut(hInstance,
-                             pOutData,
-                             pNumSamples);
-    }
-    else
-    {
-        LVM_BufferUnmanagedOut(hInstance,
-                               pNumSamples);
+    if (pInstance->InstParams.BufferMode == LVM_MANAGED_BUFFERS) {
+        LVM_BufferManagedOut(hInstance, pOutData, pNumSamples);
+    } else {
+        LVM_BufferUnmanagedOut(hInstance, pNumSamples);
     }
 }
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h b/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h
index 812f8e5..c02caa1 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h
@@ -24,832 +24,832 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#define TrebleBoostCorner                                  8000
-#define TrebleBoostMinRate                                    4
-#define TrebleBoostSteps                                     15
+#define TrebleBoostCorner 8000
+#define TrebleBoostMinRate 4
+#define TrebleBoostSteps 15
 
 /* Coefficients for sample rate 22050Hz */
-                                                                    /* Gain =  1.000000 dB */
-#define HPF_Fs22050_Gain1_A0                            1.038434
-#define HPF_Fs22050_Gain1_A1                            0.331599
-#define HPF_Fs22050_Gain1_A2                            0.000000
-#define HPF_Fs22050_Gain1_B1                            0.370033
-#define HPF_Fs22050_Gain1_B2                            0.000000
-                                                                    /* Gain =  2.000000 dB */
-#define HPF_Fs22050_Gain2_A0                            1.081557
-#define HPF_Fs22050_Gain2_A1                            0.288475
-#define HPF_Fs22050_Gain2_A2                            0.000000
-#define HPF_Fs22050_Gain2_B1                            0.370033
-#define HPF_Fs22050_Gain2_B2                            0.000000
-                                                                    /* Gain =  3.000000 dB */
-#define HPF_Fs22050_Gain3_A0                            1.129943
-#define HPF_Fs22050_Gain3_A1                            0.240090
-#define HPF_Fs22050_Gain3_A2                            0.000000
-#define HPF_Fs22050_Gain3_B1                            0.370033
-#define HPF_Fs22050_Gain3_B2                            0.000000
-                                                                    /* Gain =  4.000000 dB */
-#define HPF_Fs22050_Gain4_A0                            1.184232
-#define HPF_Fs22050_Gain4_A1                            0.185801
-#define HPF_Fs22050_Gain4_A2                            0.000000
-#define HPF_Fs22050_Gain4_B1                            0.370033
-#define HPF_Fs22050_Gain4_B2                            0.000000
-                                                                    /* Gain =  5.000000 dB */
-#define HPF_Fs22050_Gain5_A0                            1.245145
-#define HPF_Fs22050_Gain5_A1                            0.124887
-#define HPF_Fs22050_Gain5_A2                            0.000000
-#define HPF_Fs22050_Gain5_B1                            0.370033
-#define HPF_Fs22050_Gain5_B2                            0.000000
-                                                                    /* Gain =  6.000000 dB */
-#define HPF_Fs22050_Gain6_A0                            1.313491
-#define HPF_Fs22050_Gain6_A1                            0.056541
-#define HPF_Fs22050_Gain6_A2                            0.000000
-#define HPF_Fs22050_Gain6_B1                            0.370033
-#define HPF_Fs22050_Gain6_B2                            0.000000
-                                                                    /* Gain =  7.000000 dB */
-#define HPF_Fs22050_Gain7_A0                            1.390177
-#define HPF_Fs22050_Gain7_A1                            (-0.020144)
-#define HPF_Fs22050_Gain7_A2                            0.000000
-#define HPF_Fs22050_Gain7_B1                            0.370033
-#define HPF_Fs22050_Gain7_B2                            0.000000
-                                                                    /* Gain =  8.000000 dB */
-#define HPF_Fs22050_Gain8_A0                            1.476219
-#define HPF_Fs22050_Gain8_A1                            (-0.106187)
-#define HPF_Fs22050_Gain8_A2                            0.000000
-#define HPF_Fs22050_Gain8_B1                            0.370033
-#define HPF_Fs22050_Gain8_B2                            0.000000
-                                                                    /* Gain =  9.000000 dB */
-#define HPF_Fs22050_Gain9_A0                            1.572761
-#define HPF_Fs22050_Gain9_A1                            (-0.202728)
-#define HPF_Fs22050_Gain9_A2                            0.000000
-#define HPF_Fs22050_Gain9_B1                            0.370033
-#define HPF_Fs22050_Gain9_B2                            0.000000
-                                                                    /* Gain =  10.000000 dB */
-#define HPF_Fs22050_Gain10_A0                           1.681082
-#define HPF_Fs22050_Gain10_A1                           (-0.311049)
-#define HPF_Fs22050_Gain10_A2                           0.000000
-#define HPF_Fs22050_Gain10_B1                           0.370033
-#define HPF_Fs22050_Gain10_B2                           0.000000
-                                                                    /* Gain =  11.000000 dB */
-#define HPF_Fs22050_Gain11_A0                           1.802620
-#define HPF_Fs22050_Gain11_A1                           (-0.432588)
-#define HPF_Fs22050_Gain11_A2                           0.000000
-#define HPF_Fs22050_Gain11_B1                           0.370033
-#define HPF_Fs22050_Gain11_B2                           0.000000
-                                                                    /* Gain =  12.000000 dB */
-#define HPF_Fs22050_Gain12_A0                           1.938989
-#define HPF_Fs22050_Gain12_A1                           (-0.568956)
-#define HPF_Fs22050_Gain12_A2                           0.000000
-#define HPF_Fs22050_Gain12_B1                           0.370033
-#define HPF_Fs22050_Gain12_B2                           0.000000
-                                                                    /* Gain =  13.000000 dB */
-#define HPF_Fs22050_Gain13_A0                           2.091997
-#define HPF_Fs22050_Gain13_A1                           (-0.721964)
-#define HPF_Fs22050_Gain13_A2                           0.000000
-#define HPF_Fs22050_Gain13_B1                           0.370033
-#define HPF_Fs22050_Gain13_B2                           0.000000
-                                                                    /* Gain =  14.000000 dB */
-#define HPF_Fs22050_Gain14_A0                           2.263674
-#define HPF_Fs22050_Gain14_A1                           (-0.893641)
-#define HPF_Fs22050_Gain14_A2                           0.000000
-#define HPF_Fs22050_Gain14_B1                           0.370033
-#define HPF_Fs22050_Gain14_B2                           0.000000
-                                                                    /* Gain =  15.000000 dB */
-#define HPF_Fs22050_Gain15_A0                           2.456300
-#define HPF_Fs22050_Gain15_A1                           (-1.086267)
-#define HPF_Fs22050_Gain15_A2                           0.000000
-#define HPF_Fs22050_Gain15_B1                           0.370033
-#define HPF_Fs22050_Gain15_B2                           0.000000
+/* Gain =  1.000000 dB */
+#define HPF_Fs22050_Gain1_A0 1.038434
+#define HPF_Fs22050_Gain1_A1 0.331599
+#define HPF_Fs22050_Gain1_A2 0.000000
+#define HPF_Fs22050_Gain1_B1 0.370033
+#define HPF_Fs22050_Gain1_B2 0.000000
+/* Gain =  2.000000 dB */
+#define HPF_Fs22050_Gain2_A0 1.081557
+#define HPF_Fs22050_Gain2_A1 0.288475
+#define HPF_Fs22050_Gain2_A2 0.000000
+#define HPF_Fs22050_Gain2_B1 0.370033
+#define HPF_Fs22050_Gain2_B2 0.000000
+/* Gain =  3.000000 dB */
+#define HPF_Fs22050_Gain3_A0 1.129943
+#define HPF_Fs22050_Gain3_A1 0.240090
+#define HPF_Fs22050_Gain3_A2 0.000000
+#define HPF_Fs22050_Gain3_B1 0.370033
+#define HPF_Fs22050_Gain3_B2 0.000000
+/* Gain =  4.000000 dB */
+#define HPF_Fs22050_Gain4_A0 1.184232
+#define HPF_Fs22050_Gain4_A1 0.185801
+#define HPF_Fs22050_Gain4_A2 0.000000
+#define HPF_Fs22050_Gain4_B1 0.370033
+#define HPF_Fs22050_Gain4_B2 0.000000
+/* Gain =  5.000000 dB */
+#define HPF_Fs22050_Gain5_A0 1.245145
+#define HPF_Fs22050_Gain5_A1 0.124887
+#define HPF_Fs22050_Gain5_A2 0.000000
+#define HPF_Fs22050_Gain5_B1 0.370033
+#define HPF_Fs22050_Gain5_B2 0.000000
+/* Gain =  6.000000 dB */
+#define HPF_Fs22050_Gain6_A0 1.313491
+#define HPF_Fs22050_Gain6_A1 0.056541
+#define HPF_Fs22050_Gain6_A2 0.000000
+#define HPF_Fs22050_Gain6_B1 0.370033
+#define HPF_Fs22050_Gain6_B2 0.000000
+/* Gain =  7.000000 dB */
+#define HPF_Fs22050_Gain7_A0 1.390177
+#define HPF_Fs22050_Gain7_A1 (-0.020144)
+#define HPF_Fs22050_Gain7_A2 0.000000
+#define HPF_Fs22050_Gain7_B1 0.370033
+#define HPF_Fs22050_Gain7_B2 0.000000
+/* Gain =  8.000000 dB */
+#define HPF_Fs22050_Gain8_A0 1.476219
+#define HPF_Fs22050_Gain8_A1 (-0.106187)
+#define HPF_Fs22050_Gain8_A2 0.000000
+#define HPF_Fs22050_Gain8_B1 0.370033
+#define HPF_Fs22050_Gain8_B2 0.000000
+/* Gain =  9.000000 dB */
+#define HPF_Fs22050_Gain9_A0 1.572761
+#define HPF_Fs22050_Gain9_A1 (-0.202728)
+#define HPF_Fs22050_Gain9_A2 0.000000
+#define HPF_Fs22050_Gain9_B1 0.370033
+#define HPF_Fs22050_Gain9_B2 0.000000
+/* Gain =  10.000000 dB */
+#define HPF_Fs22050_Gain10_A0 1.681082
+#define HPF_Fs22050_Gain10_A1 (-0.311049)
+#define HPF_Fs22050_Gain10_A2 0.000000
+#define HPF_Fs22050_Gain10_B1 0.370033
+#define HPF_Fs22050_Gain10_B2 0.000000
+/* Gain =  11.000000 dB */
+#define HPF_Fs22050_Gain11_A0 1.802620
+#define HPF_Fs22050_Gain11_A1 (-0.432588)
+#define HPF_Fs22050_Gain11_A2 0.000000
+#define HPF_Fs22050_Gain11_B1 0.370033
+#define HPF_Fs22050_Gain11_B2 0.000000
+/* Gain =  12.000000 dB */
+#define HPF_Fs22050_Gain12_A0 1.938989
+#define HPF_Fs22050_Gain12_A1 (-0.568956)
+#define HPF_Fs22050_Gain12_A2 0.000000
+#define HPF_Fs22050_Gain12_B1 0.370033
+#define HPF_Fs22050_Gain12_B2 0.000000
+/* Gain =  13.000000 dB */
+#define HPF_Fs22050_Gain13_A0 2.091997
+#define HPF_Fs22050_Gain13_A1 (-0.721964)
+#define HPF_Fs22050_Gain13_A2 0.000000
+#define HPF_Fs22050_Gain13_B1 0.370033
+#define HPF_Fs22050_Gain13_B2 0.000000
+/* Gain =  14.000000 dB */
+#define HPF_Fs22050_Gain14_A0 2.263674
+#define HPF_Fs22050_Gain14_A1 (-0.893641)
+#define HPF_Fs22050_Gain14_A2 0.000000
+#define HPF_Fs22050_Gain14_B1 0.370033
+#define HPF_Fs22050_Gain14_B2 0.000000
+/* Gain =  15.000000 dB */
+#define HPF_Fs22050_Gain15_A0 2.456300
+#define HPF_Fs22050_Gain15_A1 (-1.086267)
+#define HPF_Fs22050_Gain15_A2 0.000000
+#define HPF_Fs22050_Gain15_B1 0.370033
+#define HPF_Fs22050_Gain15_B2 0.000000
 /* Coefficients for sample rate 24000Hz */
-                                                                    /* Gain =  1.000000 dB */
-#define HPF_Fs24000_Gain1_A0                            1.044662
-#define HPF_Fs24000_Gain1_A1                            0.223287
-#define HPF_Fs24000_Gain1_A2                            0.000000
-#define HPF_Fs24000_Gain1_B1                            0.267949
-#define HPF_Fs24000_Gain1_B2                            0.000000
-                                                                    /* Gain =  2.000000 dB */
-#define HPF_Fs24000_Gain2_A0                            1.094773
-#define HPF_Fs24000_Gain2_A1                            0.173176
-#define HPF_Fs24000_Gain2_A2                            0.000000
-#define HPF_Fs24000_Gain2_B1                            0.267949
-#define HPF_Fs24000_Gain2_B2                            0.000000
-                                                                    /* Gain =  3.000000 dB */
-#define HPF_Fs24000_Gain3_A0                            1.150999
-#define HPF_Fs24000_Gain3_A1                            0.116950
-#define HPF_Fs24000_Gain3_A2                            0.000000
-#define HPF_Fs24000_Gain3_B1                            0.267949
-#define HPF_Fs24000_Gain3_B2                            0.000000
-                                                                    /* Gain =  4.000000 dB */
-#define HPF_Fs24000_Gain4_A0                            1.214086
-#define HPF_Fs24000_Gain4_A1                            0.053863
-#define HPF_Fs24000_Gain4_A2                            0.000000
-#define HPF_Fs24000_Gain4_B1                            0.267949
-#define HPF_Fs24000_Gain4_B2                            0.000000
-                                                                    /* Gain =  5.000000 dB */
-#define HPF_Fs24000_Gain5_A0                            1.284870
-#define HPF_Fs24000_Gain5_A1                            (-0.016921)
-#define HPF_Fs24000_Gain5_A2                            0.000000
-#define HPF_Fs24000_Gain5_B1                            0.267949
-#define HPF_Fs24000_Gain5_B2                            0.000000
-                                                                    /* Gain =  6.000000 dB */
-#define HPF_Fs24000_Gain6_A0                           1.364291
-#define HPF_Fs24000_Gain6_A1                           (-0.096342)
-#define HPF_Fs24000_Gain6_A2                           0.000000
-#define HPF_Fs24000_Gain6_B1                           0.267949
-#define HPF_Fs24000_Gain6_B2                           0.000000
-                                                                    /* Gain =  7.000000 dB */
-#define HPF_Fs24000_Gain7_A0                            1.453403
-#define HPF_Fs24000_Gain7_A1                            (-0.185454)
-#define HPF_Fs24000_Gain7_A2                            0.000000
-#define HPF_Fs24000_Gain7_B1                            0.267949
-#define HPF_Fs24000_Gain7_B2                            0.000000
-                                                                    /* Gain =  8.000000 dB */
-#define HPF_Fs24000_Gain8_A0                            1.553389
-#define HPF_Fs24000_Gain8_A1                            (-0.285440)
-#define HPF_Fs24000_Gain8_A2                            0.000000
-#define HPF_Fs24000_Gain8_B1                            0.267949
-#define HPF_Fs24000_Gain8_B2                            0.000000
-                                                                    /* Gain =  9.000000 dB */
-#define HPF_Fs24000_Gain9_A0                            1.665574
-#define HPF_Fs24000_Gain9_A1                            (-0.397625)
-#define HPF_Fs24000_Gain9_A2                            0.000000
-#define HPF_Fs24000_Gain9_B1                            0.267949
-#define HPF_Fs24000_Gain9_B2                            0.000000
-                                                                    /* Gain =  10.000000 dB */
-#define HPF_Fs24000_Gain10_A0                           1.791449
-#define HPF_Fs24000_Gain10_A1                           (-0.523499)
-#define HPF_Fs24000_Gain10_A2                           0.000000
-#define HPF_Fs24000_Gain10_B1                           0.267949
-#define HPF_Fs24000_Gain10_B2                           0.000000
-                                                                    /* Gain =  11.000000 dB */
-#define HPF_Fs24000_Gain11_A0                           1.932682
-#define HPF_Fs24000_Gain11_A1                           (-0.664733)
-#define HPF_Fs24000_Gain11_A2                           0.000000
-#define HPF_Fs24000_Gain11_B1                           0.267949
-#define HPF_Fs24000_Gain11_B2                           0.000000
-                                                                    /* Gain =  12.000000 dB */
-#define HPF_Fs24000_Gain12_A0                           2.091148
-#define HPF_Fs24000_Gain12_A1                           (-0.823199)
-#define HPF_Fs24000_Gain12_A2                           0.000000
-#define HPF_Fs24000_Gain12_B1                           0.267949
-#define HPF_Fs24000_Gain12_B2                           0.000000
-                                                                    /* Gain =  13.000000 dB */
-#define HPF_Fs24000_Gain13_A0                           2.268950
-#define HPF_Fs24000_Gain13_A1                           (-1.001001)
-#define HPF_Fs24000_Gain13_A2                           0.000000
-#define HPF_Fs24000_Gain13_B1                           0.267949
-#define HPF_Fs24000_Gain13_B2                           0.000000
-                                                                    /* Gain =  14.000000 dB */
-#define HPF_Fs24000_Gain14_A0                           2.468447
-#define HPF_Fs24000_Gain14_A1                           (-1.200498)
-#define HPF_Fs24000_Gain14_A2                           0.000000
-#define HPF_Fs24000_Gain14_B1                           0.267949
-#define HPF_Fs24000_Gain14_B2                           0.000000
-                                                                    /* Gain =  15.000000 dB */
-#define HPF_Fs24000_Gain15_A0                           2.692287
-#define HPF_Fs24000_Gain15_A1                           (-1.424338)
-#define HPF_Fs24000_Gain15_A2                           0.000000
-#define HPF_Fs24000_Gain15_B1                           0.267949
-#define HPF_Fs24000_Gain15_B2                           0.000000
+/* Gain =  1.000000 dB */
+#define HPF_Fs24000_Gain1_A0 1.044662
+#define HPF_Fs24000_Gain1_A1 0.223287
+#define HPF_Fs24000_Gain1_A2 0.000000
+#define HPF_Fs24000_Gain1_B1 0.267949
+#define HPF_Fs24000_Gain1_B2 0.000000
+/* Gain =  2.000000 dB */
+#define HPF_Fs24000_Gain2_A0 1.094773
+#define HPF_Fs24000_Gain2_A1 0.173176
+#define HPF_Fs24000_Gain2_A2 0.000000
+#define HPF_Fs24000_Gain2_B1 0.267949
+#define HPF_Fs24000_Gain2_B2 0.000000
+/* Gain =  3.000000 dB */
+#define HPF_Fs24000_Gain3_A0 1.150999
+#define HPF_Fs24000_Gain3_A1 0.116950
+#define HPF_Fs24000_Gain3_A2 0.000000
+#define HPF_Fs24000_Gain3_B1 0.267949
+#define HPF_Fs24000_Gain3_B2 0.000000
+/* Gain =  4.000000 dB */
+#define HPF_Fs24000_Gain4_A0 1.214086
+#define HPF_Fs24000_Gain4_A1 0.053863
+#define HPF_Fs24000_Gain4_A2 0.000000
+#define HPF_Fs24000_Gain4_B1 0.267949
+#define HPF_Fs24000_Gain4_B2 0.000000
+/* Gain =  5.000000 dB */
+#define HPF_Fs24000_Gain5_A0 1.284870
+#define HPF_Fs24000_Gain5_A1 (-0.016921)
+#define HPF_Fs24000_Gain5_A2 0.000000
+#define HPF_Fs24000_Gain5_B1 0.267949
+#define HPF_Fs24000_Gain5_B2 0.000000
+/* Gain =  6.000000 dB */
+#define HPF_Fs24000_Gain6_A0 1.364291
+#define HPF_Fs24000_Gain6_A1 (-0.096342)
+#define HPF_Fs24000_Gain6_A2 0.000000
+#define HPF_Fs24000_Gain6_B1 0.267949
+#define HPF_Fs24000_Gain6_B2 0.000000
+/* Gain =  7.000000 dB */
+#define HPF_Fs24000_Gain7_A0 1.453403
+#define HPF_Fs24000_Gain7_A1 (-0.185454)
+#define HPF_Fs24000_Gain7_A2 0.000000
+#define HPF_Fs24000_Gain7_B1 0.267949
+#define HPF_Fs24000_Gain7_B2 0.000000
+/* Gain =  8.000000 dB */
+#define HPF_Fs24000_Gain8_A0 1.553389
+#define HPF_Fs24000_Gain8_A1 (-0.285440)
+#define HPF_Fs24000_Gain8_A2 0.000000
+#define HPF_Fs24000_Gain8_B1 0.267949
+#define HPF_Fs24000_Gain8_B2 0.000000
+/* Gain =  9.000000 dB */
+#define HPF_Fs24000_Gain9_A0 1.665574
+#define HPF_Fs24000_Gain9_A1 (-0.397625)
+#define HPF_Fs24000_Gain9_A2 0.000000
+#define HPF_Fs24000_Gain9_B1 0.267949
+#define HPF_Fs24000_Gain9_B2 0.000000
+/* Gain =  10.000000 dB */
+#define HPF_Fs24000_Gain10_A0 1.791449
+#define HPF_Fs24000_Gain10_A1 (-0.523499)
+#define HPF_Fs24000_Gain10_A2 0.000000
+#define HPF_Fs24000_Gain10_B1 0.267949
+#define HPF_Fs24000_Gain10_B2 0.000000
+/* Gain =  11.000000 dB */
+#define HPF_Fs24000_Gain11_A0 1.932682
+#define HPF_Fs24000_Gain11_A1 (-0.664733)
+#define HPF_Fs24000_Gain11_A2 0.000000
+#define HPF_Fs24000_Gain11_B1 0.267949
+#define HPF_Fs24000_Gain11_B2 0.000000
+/* Gain =  12.000000 dB */
+#define HPF_Fs24000_Gain12_A0 2.091148
+#define HPF_Fs24000_Gain12_A1 (-0.823199)
+#define HPF_Fs24000_Gain12_A2 0.000000
+#define HPF_Fs24000_Gain12_B1 0.267949
+#define HPF_Fs24000_Gain12_B2 0.000000
+/* Gain =  13.000000 dB */
+#define HPF_Fs24000_Gain13_A0 2.268950
+#define HPF_Fs24000_Gain13_A1 (-1.001001)
+#define HPF_Fs24000_Gain13_A2 0.000000
+#define HPF_Fs24000_Gain13_B1 0.267949
+#define HPF_Fs24000_Gain13_B2 0.000000
+/* Gain =  14.000000 dB */
+#define HPF_Fs24000_Gain14_A0 2.468447
+#define HPF_Fs24000_Gain14_A1 (-1.200498)
+#define HPF_Fs24000_Gain14_A2 0.000000
+#define HPF_Fs24000_Gain14_B1 0.267949
+#define HPF_Fs24000_Gain14_B2 0.000000
+/* Gain =  15.000000 dB */
+#define HPF_Fs24000_Gain15_A0 2.692287
+#define HPF_Fs24000_Gain15_A1 (-1.424338)
+#define HPF_Fs24000_Gain15_A2 0.000000
+#define HPF_Fs24000_Gain15_B1 0.267949
+#define HPF_Fs24000_Gain15_B2 0.000000
 /* Coefficients for sample rate 32000Hz */
-                                                                    /* Gain =  1.000000 dB */
-#define HPF_Fs32000_Gain1_A0                            1.061009
-#define HPF_Fs32000_Gain1_A1                            (-0.061009)
-#define HPF_Fs32000_Gain1_A2                            0.000000
-#define HPF_Fs32000_Gain1_B1                            (-0.000000)
-#define HPF_Fs32000_Gain1_B2                            0.000000
-                                                                    /* Gain =  2.000000 dB */
-#define HPF_Fs32000_Gain2_A0                             1.129463
-#define HPF_Fs32000_Gain2_A1                             (-0.129463)
-#define HPF_Fs32000_Gain2_A2                             0.000000
-#define HPF_Fs32000_Gain2_B1                             (-0.000000)
-#define HPF_Fs32000_Gain2_B2                             0.000000
-                                                                    /* Gain =  3.000000 dB */
-#define HPF_Fs32000_Gain3_A0                             1.206267
-#define HPF_Fs32000_Gain3_A1                             (-0.206267)
-#define HPF_Fs32000_Gain3_A2                             0.000000
-#define HPF_Fs32000_Gain3_B1                             (-0.000000)
-#define HPF_Fs32000_Gain3_B2                             0.000000
-                                                                    /* Gain =  4.000000 dB */
-#define HPF_Fs32000_Gain4_A0                            1.292447
-#define HPF_Fs32000_Gain4_A1                            (-0.292447)
-#define HPF_Fs32000_Gain4_A2                            0.000000
-#define HPF_Fs32000_Gain4_B1                            (-0.000000)
-#define HPF_Fs32000_Gain4_B2                            0.000000
-                                                                    /* Gain =  5.000000 dB */
-#define HPF_Fs32000_Gain5_A0                            1.389140
-#define HPF_Fs32000_Gain5_A1                            (-0.389140)
-#define HPF_Fs32000_Gain5_A2                            0.000000
-#define HPF_Fs32000_Gain5_B1                            (-0.000000)
-#define HPF_Fs32000_Gain5_B2                            0.000000
-                                                                    /* Gain =  6.000000 dB */
-#define HPF_Fs32000_Gain6_A0                             1.497631
-#define HPF_Fs32000_Gain6_A1                             (-0.497631)
-#define HPF_Fs32000_Gain6_A2                             0.000000
-#define HPF_Fs32000_Gain6_B1                             (-0.000000)
-#define HPF_Fs32000_Gain6_B2                             0.000000
-                                                                    /* Gain =  7.000000 dB */
-#define HPF_Fs32000_Gain7_A0                             1.619361
-#define HPF_Fs32000_Gain7_A1                             (-0.619361)
-#define HPF_Fs32000_Gain7_A2                             0.000000
-#define HPF_Fs32000_Gain7_B1                             (-0.000000)
-#define HPF_Fs32000_Gain7_B2                             0.000000
-                                                                    /* Gain =  8.000000 dB */
-#define HPF_Fs32000_Gain8_A0                             1.755943
-#define HPF_Fs32000_Gain8_A1                             (-0.755943)
-#define HPF_Fs32000_Gain8_A2                             0.000000
-#define HPF_Fs32000_Gain8_B1                             (-0.000000)
-#define HPF_Fs32000_Gain8_B2                             0.000000
-                                                                    /* Gain =  9.000000 dB */
-#define HPF_Fs32000_Gain9_A0                             1.909191
-#define HPF_Fs32000_Gain9_A1                             (-0.909191)
-#define HPF_Fs32000_Gain9_A2                             0.000000
-#define HPF_Fs32000_Gain9_B1                             (-0.000000)
-#define HPF_Fs32000_Gain9_B2                             0.000000
-                                                                    /* Gain =  10.000000 dB */
-#define HPF_Fs32000_Gain10_A0                            2.081139
-#define HPF_Fs32000_Gain10_A1                            (-1.081139)
-#define HPF_Fs32000_Gain10_A2                            0.000000
-#define HPF_Fs32000_Gain10_B1                            (-0.000000)
-#define HPF_Fs32000_Gain10_B2                            0.000000
-                                                                    /* Gain =  11.000000 dB */
-#define HPF_Fs32000_Gain11_A0                           2.274067
-#define HPF_Fs32000_Gain11_A1                           (-1.274067)
-#define HPF_Fs32000_Gain11_A2                           0.000000
-#define HPF_Fs32000_Gain11_B1                           (-0.000000)
-#define HPF_Fs32000_Gain11_B2                           0.000000
-                                                                    /* Gain =  12.000000 dB */
-#define HPF_Fs32000_Gain12_A0                          2.490536
-#define HPF_Fs32000_Gain12_A1                          (-1.490536)
-#define HPF_Fs32000_Gain12_A2                          0.000000
-#define HPF_Fs32000_Gain12_B1                          (-0.000000)
-#define HPF_Fs32000_Gain12_B2                          0.000000
-                                                                    /* Gain =  13.000000 dB */
-#define HPF_Fs32000_Gain13_A0                           2.733418
-#define HPF_Fs32000_Gain13_A1                           (-1.733418)
-#define HPF_Fs32000_Gain13_A2                           0.000000
-#define HPF_Fs32000_Gain13_B1                           (-0.000000)
-#define HPF_Fs32000_Gain13_B2                           0.000000
-                                                                    /* Gain =  14.000000 dB */
-#define HPF_Fs32000_Gain14_A0                           3.005936
-#define HPF_Fs32000_Gain14_A1                           (-2.005936)
-#define HPF_Fs32000_Gain14_A2                           0.000000
-#define HPF_Fs32000_Gain14_B1                           (-0.000000)
-#define HPF_Fs32000_Gain14_B2                           0.000000
-                                                                    /* Gain =  15.000000 dB */
-#define HPF_Fs32000_Gain15_A0                          3.311707
-#define HPF_Fs32000_Gain15_A1                          (-2.311707)
-#define HPF_Fs32000_Gain15_A2                          0.000000
-#define HPF_Fs32000_Gain15_B1                          (-0.000000)
-#define HPF_Fs32000_Gain15_B2                          0.000000
+/* Gain =  1.000000 dB */
+#define HPF_Fs32000_Gain1_A0 1.061009
+#define HPF_Fs32000_Gain1_A1 (-0.061009)
+#define HPF_Fs32000_Gain1_A2 0.000000
+#define HPF_Fs32000_Gain1_B1 (-0.000000)
+#define HPF_Fs32000_Gain1_B2 0.000000
+/* Gain =  2.000000 dB */
+#define HPF_Fs32000_Gain2_A0 1.129463
+#define HPF_Fs32000_Gain2_A1 (-0.129463)
+#define HPF_Fs32000_Gain2_A2 0.000000
+#define HPF_Fs32000_Gain2_B1 (-0.000000)
+#define HPF_Fs32000_Gain2_B2 0.000000
+/* Gain =  3.000000 dB */
+#define HPF_Fs32000_Gain3_A0 1.206267
+#define HPF_Fs32000_Gain3_A1 (-0.206267)
+#define HPF_Fs32000_Gain3_A2 0.000000
+#define HPF_Fs32000_Gain3_B1 (-0.000000)
+#define HPF_Fs32000_Gain3_B2 0.000000
+/* Gain =  4.000000 dB */
+#define HPF_Fs32000_Gain4_A0 1.292447
+#define HPF_Fs32000_Gain4_A1 (-0.292447)
+#define HPF_Fs32000_Gain4_A2 0.000000
+#define HPF_Fs32000_Gain4_B1 (-0.000000)
+#define HPF_Fs32000_Gain4_B2 0.000000
+/* Gain =  5.000000 dB */
+#define HPF_Fs32000_Gain5_A0 1.389140
+#define HPF_Fs32000_Gain5_A1 (-0.389140)
+#define HPF_Fs32000_Gain5_A2 0.000000
+#define HPF_Fs32000_Gain5_B1 (-0.000000)
+#define HPF_Fs32000_Gain5_B2 0.000000
+/* Gain =  6.000000 dB */
+#define HPF_Fs32000_Gain6_A0 1.497631
+#define HPF_Fs32000_Gain6_A1 (-0.497631)
+#define HPF_Fs32000_Gain6_A2 0.000000
+#define HPF_Fs32000_Gain6_B1 (-0.000000)
+#define HPF_Fs32000_Gain6_B2 0.000000
+/* Gain =  7.000000 dB */
+#define HPF_Fs32000_Gain7_A0 1.619361
+#define HPF_Fs32000_Gain7_A1 (-0.619361)
+#define HPF_Fs32000_Gain7_A2 0.000000
+#define HPF_Fs32000_Gain7_B1 (-0.000000)
+#define HPF_Fs32000_Gain7_B2 0.000000
+/* Gain =  8.000000 dB */
+#define HPF_Fs32000_Gain8_A0 1.755943
+#define HPF_Fs32000_Gain8_A1 (-0.755943)
+#define HPF_Fs32000_Gain8_A2 0.000000
+#define HPF_Fs32000_Gain8_B1 (-0.000000)
+#define HPF_Fs32000_Gain8_B2 0.000000
+/* Gain =  9.000000 dB */
+#define HPF_Fs32000_Gain9_A0 1.909191
+#define HPF_Fs32000_Gain9_A1 (-0.909191)
+#define HPF_Fs32000_Gain9_A2 0.000000
+#define HPF_Fs32000_Gain9_B1 (-0.000000)
+#define HPF_Fs32000_Gain9_B2 0.000000
+/* Gain =  10.000000 dB */
+#define HPF_Fs32000_Gain10_A0 2.081139
+#define HPF_Fs32000_Gain10_A1 (-1.081139)
+#define HPF_Fs32000_Gain10_A2 0.000000
+#define HPF_Fs32000_Gain10_B1 (-0.000000)
+#define HPF_Fs32000_Gain10_B2 0.000000
+/* Gain =  11.000000 dB */
+#define HPF_Fs32000_Gain11_A0 2.274067
+#define HPF_Fs32000_Gain11_A1 (-1.274067)
+#define HPF_Fs32000_Gain11_A2 0.000000
+#define HPF_Fs32000_Gain11_B1 (-0.000000)
+#define HPF_Fs32000_Gain11_B2 0.000000
+/* Gain =  12.000000 dB */
+#define HPF_Fs32000_Gain12_A0 2.490536
+#define HPF_Fs32000_Gain12_A1 (-1.490536)
+#define HPF_Fs32000_Gain12_A2 0.000000
+#define HPF_Fs32000_Gain12_B1 (-0.000000)
+#define HPF_Fs32000_Gain12_B2 0.000000
+/* Gain =  13.000000 dB */
+#define HPF_Fs32000_Gain13_A0 2.733418
+#define HPF_Fs32000_Gain13_A1 (-1.733418)
+#define HPF_Fs32000_Gain13_A2 0.000000
+#define HPF_Fs32000_Gain13_B1 (-0.000000)
+#define HPF_Fs32000_Gain13_B2 0.000000
+/* Gain =  14.000000 dB */
+#define HPF_Fs32000_Gain14_A0 3.005936
+#define HPF_Fs32000_Gain14_A1 (-2.005936)
+#define HPF_Fs32000_Gain14_A2 0.000000
+#define HPF_Fs32000_Gain14_B1 (-0.000000)
+#define HPF_Fs32000_Gain14_B2 0.000000
+/* Gain =  15.000000 dB */
+#define HPF_Fs32000_Gain15_A0 3.311707
+#define HPF_Fs32000_Gain15_A1 (-2.311707)
+#define HPF_Fs32000_Gain15_A2 0.000000
+#define HPF_Fs32000_Gain15_B1 (-0.000000)
+#define HPF_Fs32000_Gain15_B2 0.000000
 /* Coefficients for sample rate 44100Hz */
-                                                                    /* Gain =  1.000000 dB */
-#define HPF_Fs44100_Gain1_A0                            1.074364
-#define HPF_Fs44100_Gain1_A1                            (-0.293257)
-#define HPF_Fs44100_Gain1_A2                            0.000000
-#define HPF_Fs44100_Gain1_B1                            (-0.218894)
-#define HPF_Fs44100_Gain1_B2                            0.000000
-                                                                    /* Gain =  2.000000 dB */
-#define HPF_Fs44100_Gain2_A0                            1.157801
-#define HPF_Fs44100_Gain2_A1                            (-0.376695)
-#define HPF_Fs44100_Gain2_A2                            0.000000
-#define HPF_Fs44100_Gain2_B1                            (-0.218894)
-#define HPF_Fs44100_Gain2_B2                            0.000000
-                                                                    /* Gain =  3.000000 dB */
-#define HPF_Fs44100_Gain3_A0                           1.251420
-#define HPF_Fs44100_Gain3_A1                           (-0.470313)
-#define HPF_Fs44100_Gain3_A2                           0.000000
-#define HPF_Fs44100_Gain3_B1                           (-0.218894)
-#define HPF_Fs44100_Gain3_B2                           0.000000
-                                                                    /* Gain =  4.000000 dB */
-#define HPF_Fs44100_Gain4_A0                            1.356461
-#define HPF_Fs44100_Gain4_A1                            (-0.575355)
-#define HPF_Fs44100_Gain4_A2                            0.000000
-#define HPF_Fs44100_Gain4_B1                            (-0.218894)
-#define HPF_Fs44100_Gain4_B2                            0.000000
-                                                                    /* Gain =  5.000000 dB */
-#define HPF_Fs44100_Gain5_A0                            1.474320
-#define HPF_Fs44100_Gain5_A1                            (-0.693213)
-#define HPF_Fs44100_Gain5_A2                            0.000000
-#define HPF_Fs44100_Gain5_B1                            (-0.218894)
-#define HPF_Fs44100_Gain5_B2                            0.000000
-                                                                    /* Gain =  6.000000 dB */
-#define HPF_Fs44100_Gain6_A0                           1.606559
-#define HPF_Fs44100_Gain6_A1                           (-0.825453)
-#define HPF_Fs44100_Gain6_A2                           0.000000
-#define HPF_Fs44100_Gain6_B1                           (-0.218894)
-#define HPF_Fs44100_Gain6_B2                           0.000000
-                                                                    /* Gain =  7.000000 dB */
-#define HPF_Fs44100_Gain7_A0                           1.754935
-#define HPF_Fs44100_Gain7_A1                           (-0.973828)
-#define HPF_Fs44100_Gain7_A2                           0.000000
-#define HPF_Fs44100_Gain7_B1                           (-0.218894)
-#define HPF_Fs44100_Gain7_B2                           0.000000
-                                                                    /* Gain =  8.000000 dB */
-#define HPF_Fs44100_Gain8_A0                            1.921414
-#define HPF_Fs44100_Gain8_A1                            (-1.140308)
-#define HPF_Fs44100_Gain8_A2                            0.000000
-#define HPF_Fs44100_Gain8_B1                            (-0.218894)
-#define HPF_Fs44100_Gain8_B2                            0.000000
-                                                                    /* Gain =  9.000000 dB */
-#define HPF_Fs44100_Gain9_A0                            2.108208
-#define HPF_Fs44100_Gain9_A1                            (-1.327101)
-#define HPF_Fs44100_Gain9_A2                            0.000000
-#define HPF_Fs44100_Gain9_B1                            (-0.218894)
-#define HPF_Fs44100_Gain9_B2                            0.000000
-                                                                    /* Gain =  10.000000 dB */
-#define HPF_Fs44100_Gain10_A0                          2.317793
-#define HPF_Fs44100_Gain10_A1                          (-1.536687)
-#define HPF_Fs44100_Gain10_A2                          0.000000
-#define HPF_Fs44100_Gain10_B1                          (-0.218894)
-#define HPF_Fs44100_Gain10_B2                          0.000000
-                                                                    /* Gain =  11.000000 dB */
-#define HPF_Fs44100_Gain11_A0                          2.552952
-#define HPF_Fs44100_Gain11_A1                          (-1.771846)
-#define HPF_Fs44100_Gain11_A2                          0.000000
-#define HPF_Fs44100_Gain11_B1                          (-0.218894)
-#define HPF_Fs44100_Gain11_B2                          0.000000
-                                                                    /* Gain =  12.000000 dB */
-#define HPF_Fs44100_Gain12_A0                          2.816805
-#define HPF_Fs44100_Gain12_A1                          (-2.035698)
-#define HPF_Fs44100_Gain12_A2                          0.000000
-#define HPF_Fs44100_Gain12_B1                          (-0.218894)
-#define HPF_Fs44100_Gain12_B2                          0.000000
-                                                                    /* Gain =  13.000000 dB */
-#define HPF_Fs44100_Gain13_A0                           3.112852
-#define HPF_Fs44100_Gain13_A1                           (-2.331746)
-#define HPF_Fs44100_Gain13_A2                           0.000000
-#define HPF_Fs44100_Gain13_B1                           (-0.218894)
-#define HPF_Fs44100_Gain13_B2                           0.000000
-                                                                    /* Gain =  14.000000 dB */
-#define HPF_Fs44100_Gain14_A0                          3.445023
-#define HPF_Fs44100_Gain14_A1                          (-2.663916)
-#define HPF_Fs44100_Gain14_A2                          0.000000
-#define HPF_Fs44100_Gain14_B1                          (-0.218894)
-#define HPF_Fs44100_Gain14_B2                          0.000000
-                                                                    /* Gain =  15.000000 dB */
-#define HPF_Fs44100_Gain15_A0                          3.817724
-#define HPF_Fs44100_Gain15_A1                          (-3.036618)
-#define HPF_Fs44100_Gain15_A2                          0.000000
-#define HPF_Fs44100_Gain15_B1                          (-0.218894)
-#define HPF_Fs44100_Gain15_B2                          0.000000
+/* Gain =  1.000000 dB */
+#define HPF_Fs44100_Gain1_A0 1.074364
+#define HPF_Fs44100_Gain1_A1 (-0.293257)
+#define HPF_Fs44100_Gain1_A2 0.000000
+#define HPF_Fs44100_Gain1_B1 (-0.218894)
+#define HPF_Fs44100_Gain1_B2 0.000000
+/* Gain =  2.000000 dB */
+#define HPF_Fs44100_Gain2_A0 1.157801
+#define HPF_Fs44100_Gain2_A1 (-0.376695)
+#define HPF_Fs44100_Gain2_A2 0.000000
+#define HPF_Fs44100_Gain2_B1 (-0.218894)
+#define HPF_Fs44100_Gain2_B2 0.000000
+/* Gain =  3.000000 dB */
+#define HPF_Fs44100_Gain3_A0 1.251420
+#define HPF_Fs44100_Gain3_A1 (-0.470313)
+#define HPF_Fs44100_Gain3_A2 0.000000
+#define HPF_Fs44100_Gain3_B1 (-0.218894)
+#define HPF_Fs44100_Gain3_B2 0.000000
+/* Gain =  4.000000 dB */
+#define HPF_Fs44100_Gain4_A0 1.356461
+#define HPF_Fs44100_Gain4_A1 (-0.575355)
+#define HPF_Fs44100_Gain4_A2 0.000000
+#define HPF_Fs44100_Gain4_B1 (-0.218894)
+#define HPF_Fs44100_Gain4_B2 0.000000
+/* Gain =  5.000000 dB */
+#define HPF_Fs44100_Gain5_A0 1.474320
+#define HPF_Fs44100_Gain5_A1 (-0.693213)
+#define HPF_Fs44100_Gain5_A2 0.000000
+#define HPF_Fs44100_Gain5_B1 (-0.218894)
+#define HPF_Fs44100_Gain5_B2 0.000000
+/* Gain =  6.000000 dB */
+#define HPF_Fs44100_Gain6_A0 1.606559
+#define HPF_Fs44100_Gain6_A1 (-0.825453)
+#define HPF_Fs44100_Gain6_A2 0.000000
+#define HPF_Fs44100_Gain6_B1 (-0.218894)
+#define HPF_Fs44100_Gain6_B2 0.000000
+/* Gain =  7.000000 dB */
+#define HPF_Fs44100_Gain7_A0 1.754935
+#define HPF_Fs44100_Gain7_A1 (-0.973828)
+#define HPF_Fs44100_Gain7_A2 0.000000
+#define HPF_Fs44100_Gain7_B1 (-0.218894)
+#define HPF_Fs44100_Gain7_B2 0.000000
+/* Gain =  8.000000 dB */
+#define HPF_Fs44100_Gain8_A0 1.921414
+#define HPF_Fs44100_Gain8_A1 (-1.140308)
+#define HPF_Fs44100_Gain8_A2 0.000000
+#define HPF_Fs44100_Gain8_B1 (-0.218894)
+#define HPF_Fs44100_Gain8_B2 0.000000
+/* Gain =  9.000000 dB */
+#define HPF_Fs44100_Gain9_A0 2.108208
+#define HPF_Fs44100_Gain9_A1 (-1.327101)
+#define HPF_Fs44100_Gain9_A2 0.000000
+#define HPF_Fs44100_Gain9_B1 (-0.218894)
+#define HPF_Fs44100_Gain9_B2 0.000000
+/* Gain =  10.000000 dB */
+#define HPF_Fs44100_Gain10_A0 2.317793
+#define HPF_Fs44100_Gain10_A1 (-1.536687)
+#define HPF_Fs44100_Gain10_A2 0.000000
+#define HPF_Fs44100_Gain10_B1 (-0.218894)
+#define HPF_Fs44100_Gain10_B2 0.000000
+/* Gain =  11.000000 dB */
+#define HPF_Fs44100_Gain11_A0 2.552952
+#define HPF_Fs44100_Gain11_A1 (-1.771846)
+#define HPF_Fs44100_Gain11_A2 0.000000
+#define HPF_Fs44100_Gain11_B1 (-0.218894)
+#define HPF_Fs44100_Gain11_B2 0.000000
+/* Gain =  12.000000 dB */
+#define HPF_Fs44100_Gain12_A0 2.816805
+#define HPF_Fs44100_Gain12_A1 (-2.035698)
+#define HPF_Fs44100_Gain12_A2 0.000000
+#define HPF_Fs44100_Gain12_B1 (-0.218894)
+#define HPF_Fs44100_Gain12_B2 0.000000
+/* Gain =  13.000000 dB */
+#define HPF_Fs44100_Gain13_A0 3.112852
+#define HPF_Fs44100_Gain13_A1 (-2.331746)
+#define HPF_Fs44100_Gain13_A2 0.000000
+#define HPF_Fs44100_Gain13_B1 (-0.218894)
+#define HPF_Fs44100_Gain13_B2 0.000000
+/* Gain =  14.000000 dB */
+#define HPF_Fs44100_Gain14_A0 3.445023
+#define HPF_Fs44100_Gain14_A1 (-2.663916)
+#define HPF_Fs44100_Gain14_A2 0.000000
+#define HPF_Fs44100_Gain14_B1 (-0.218894)
+#define HPF_Fs44100_Gain14_B2 0.000000
+/* Gain =  15.000000 dB */
+#define HPF_Fs44100_Gain15_A0 3.817724
+#define HPF_Fs44100_Gain15_A1 (-3.036618)
+#define HPF_Fs44100_Gain15_A2 0.000000
+#define HPF_Fs44100_Gain15_B1 (-0.218894)
+#define HPF_Fs44100_Gain15_B2 0.000000
 /* Coefficients for sample rate 48000Hz */
-                                                                    /* Gain =  1.000000 dB */
-#define HPF_Fs48000_Gain1_A0                          1.077357
-#define HPF_Fs48000_Gain1_A1                          (-0.345306)
-#define HPF_Fs48000_Gain1_A2                          0.000000
-#define HPF_Fs48000_Gain1_B1                          (-0.267949)
-#define HPF_Fs48000_Gain1_B2                          0.000000
-                                                                    /* Gain =  2.000000 dB */
-#define HPF_Fs48000_Gain2_A0                          1.164152
-#define HPF_Fs48000_Gain2_A1                          (-0.432101)
-#define HPF_Fs48000_Gain2_A2                          0.000000
-#define HPF_Fs48000_Gain2_B1                          (-0.267949)
-#define HPF_Fs48000_Gain2_B2                          0.000000
-                                                                    /* Gain =  3.000000 dB */
-#define HPF_Fs48000_Gain3_A0                          1.261538
-#define HPF_Fs48000_Gain3_A1                          (-0.529488)
-#define HPF_Fs48000_Gain3_A2                          0.000000
-#define HPF_Fs48000_Gain3_B1                          (-0.267949)
-#define HPF_Fs48000_Gain3_B2                          0.000000
-                                                                    /* Gain =  4.000000 dB */
-#define HPF_Fs48000_Gain4_A0                           1.370807
-#define HPF_Fs48000_Gain4_A1                           (-0.638757)
-#define HPF_Fs48000_Gain4_A2                           0.000000
-#define HPF_Fs48000_Gain4_B1                           (-0.267949)
-#define HPF_Fs48000_Gain4_B2                           0.000000
-                                                                    /* Gain =  5.000000 dB */
-#define HPF_Fs48000_Gain5_A0                           1.493409
-#define HPF_Fs48000_Gain5_A1                           (-0.761359)
-#define HPF_Fs48000_Gain5_A2                           0.000000
-#define HPF_Fs48000_Gain5_B1                           (-0.267949)
-#define HPF_Fs48000_Gain5_B2                           0.000000
-                                                                    /* Gain =  6.000000 dB */
-#define HPF_Fs48000_Gain6_A0                            1.630971
-#define HPF_Fs48000_Gain6_A1                            (-0.898920)
-#define HPF_Fs48000_Gain6_A2                            0.000000
-#define HPF_Fs48000_Gain6_B1                            (-0.267949)
-#define HPF_Fs48000_Gain6_B2                            0.000000
-                                                                    /* Gain =  7.000000 dB */
-#define HPF_Fs48000_Gain7_A0                            1.785318
-#define HPF_Fs48000_Gain7_A1                            (-1.053267)
-#define HPF_Fs48000_Gain7_A2                            0.000000
-#define HPF_Fs48000_Gain7_B1                            (-0.267949)
-#define HPF_Fs48000_Gain7_B2                            0.000000
-                                                                    /* Gain =  8.000000 dB */
-#define HPF_Fs48000_Gain8_A0                           1.958498
-#define HPF_Fs48000_Gain8_A1                           (-1.226447)
-#define HPF_Fs48000_Gain8_A2                           0.000000
-#define HPF_Fs48000_Gain8_B1                           (-0.267949)
-#define HPF_Fs48000_Gain8_B2                           0.000000
-                                                                    /* Gain =  9.000000 dB */
-#define HPF_Fs48000_Gain9_A0                          2.152809
-#define HPF_Fs48000_Gain9_A1                          (-1.420758)
-#define HPF_Fs48000_Gain9_A2                          0.000000
-#define HPF_Fs48000_Gain9_B1                          (-0.267949)
-#define HPF_Fs48000_Gain9_B2                          0.000000
-                                                                    /* Gain =  10.000000 dB */
-#define HPF_Fs48000_Gain10_A0                         2.370829
-#define HPF_Fs48000_Gain10_A1                         (-1.638778)
-#define HPF_Fs48000_Gain10_A2                         0.000000
-#define HPF_Fs48000_Gain10_B1                         (-0.267949)
-#define HPF_Fs48000_Gain10_B2                         0.000000
-                                                                    /* Gain =  11.000000 dB */
-#define HPF_Fs48000_Gain11_A0                          2.615452
-#define HPF_Fs48000_Gain11_A1                          (-1.883401)
-#define HPF_Fs48000_Gain11_A2                          0.000000
-#define HPF_Fs48000_Gain11_B1                          (-0.267949)
-#define HPF_Fs48000_Gain11_B2                          0.000000
-                                                                    /* Gain =  12.000000 dB */
-#define HPF_Fs48000_Gain12_A0                          2.889924
-#define HPF_Fs48000_Gain12_A1                          (-2.157873)
-#define HPF_Fs48000_Gain12_A2                          0.000000
-#define HPF_Fs48000_Gain12_B1                          (-0.267949)
-#define HPF_Fs48000_Gain12_B2                          0.000000
-                                                                    /* Gain =  13.000000 dB */
-#define HPF_Fs48000_Gain13_A0                           3.197886
-#define HPF_Fs48000_Gain13_A1                           (-2.465835)
-#define HPF_Fs48000_Gain13_A2                           0.000000
-#define HPF_Fs48000_Gain13_B1                           (-0.267949)
-#define HPF_Fs48000_Gain13_B2                           0.000000
-                                                                    /* Gain =  14.000000 dB */
-#define HPF_Fs48000_Gain14_A0                          3.543425
-#define HPF_Fs48000_Gain14_A1                          (-2.811374)
-#define HPF_Fs48000_Gain14_A2                          0.000000
-#define HPF_Fs48000_Gain14_B1                          (-0.267949)
-#define HPF_Fs48000_Gain14_B2                          0.000000
-                                                                    /* Gain =  15.000000 dB */
-#define HPF_Fs48000_Gain15_A0                         3.931127
-#define HPF_Fs48000_Gain15_A1                         (-3.199076)
-#define HPF_Fs48000_Gain15_A2                         0.000000
-#define HPF_Fs48000_Gain15_B1                         (-0.267949)
-#define HPF_Fs48000_Gain15_B2                         0.000000
+/* Gain =  1.000000 dB */
+#define HPF_Fs48000_Gain1_A0 1.077357
+#define HPF_Fs48000_Gain1_A1 (-0.345306)
+#define HPF_Fs48000_Gain1_A2 0.000000
+#define HPF_Fs48000_Gain1_B1 (-0.267949)
+#define HPF_Fs48000_Gain1_B2 0.000000
+/* Gain =  2.000000 dB */
+#define HPF_Fs48000_Gain2_A0 1.164152
+#define HPF_Fs48000_Gain2_A1 (-0.432101)
+#define HPF_Fs48000_Gain2_A2 0.000000
+#define HPF_Fs48000_Gain2_B1 (-0.267949)
+#define HPF_Fs48000_Gain2_B2 0.000000
+/* Gain =  3.000000 dB */
+#define HPF_Fs48000_Gain3_A0 1.261538
+#define HPF_Fs48000_Gain3_A1 (-0.529488)
+#define HPF_Fs48000_Gain3_A2 0.000000
+#define HPF_Fs48000_Gain3_B1 (-0.267949)
+#define HPF_Fs48000_Gain3_B2 0.000000
+/* Gain =  4.000000 dB */
+#define HPF_Fs48000_Gain4_A0 1.370807
+#define HPF_Fs48000_Gain4_A1 (-0.638757)
+#define HPF_Fs48000_Gain4_A2 0.000000
+#define HPF_Fs48000_Gain4_B1 (-0.267949)
+#define HPF_Fs48000_Gain4_B2 0.000000
+/* Gain =  5.000000 dB */
+#define HPF_Fs48000_Gain5_A0 1.493409
+#define HPF_Fs48000_Gain5_A1 (-0.761359)
+#define HPF_Fs48000_Gain5_A2 0.000000
+#define HPF_Fs48000_Gain5_B1 (-0.267949)
+#define HPF_Fs48000_Gain5_B2 0.000000
+/* Gain =  6.000000 dB */
+#define HPF_Fs48000_Gain6_A0 1.630971
+#define HPF_Fs48000_Gain6_A1 (-0.898920)
+#define HPF_Fs48000_Gain6_A2 0.000000
+#define HPF_Fs48000_Gain6_B1 (-0.267949)
+#define HPF_Fs48000_Gain6_B2 0.000000
+/* Gain =  7.000000 dB */
+#define HPF_Fs48000_Gain7_A0 1.785318
+#define HPF_Fs48000_Gain7_A1 (-1.053267)
+#define HPF_Fs48000_Gain7_A2 0.000000
+#define HPF_Fs48000_Gain7_B1 (-0.267949)
+#define HPF_Fs48000_Gain7_B2 0.000000
+/* Gain =  8.000000 dB */
+#define HPF_Fs48000_Gain8_A0 1.958498
+#define HPF_Fs48000_Gain8_A1 (-1.226447)
+#define HPF_Fs48000_Gain8_A2 0.000000
+#define HPF_Fs48000_Gain8_B1 (-0.267949)
+#define HPF_Fs48000_Gain8_B2 0.000000
+/* Gain =  9.000000 dB */
+#define HPF_Fs48000_Gain9_A0 2.152809
+#define HPF_Fs48000_Gain9_A1 (-1.420758)
+#define HPF_Fs48000_Gain9_A2 0.000000
+#define HPF_Fs48000_Gain9_B1 (-0.267949)
+#define HPF_Fs48000_Gain9_B2 0.000000
+/* Gain =  10.000000 dB */
+#define HPF_Fs48000_Gain10_A0 2.370829
+#define HPF_Fs48000_Gain10_A1 (-1.638778)
+#define HPF_Fs48000_Gain10_A2 0.000000
+#define HPF_Fs48000_Gain10_B1 (-0.267949)
+#define HPF_Fs48000_Gain10_B2 0.000000
+/* Gain =  11.000000 dB */
+#define HPF_Fs48000_Gain11_A0 2.615452
+#define HPF_Fs48000_Gain11_A1 (-1.883401)
+#define HPF_Fs48000_Gain11_A2 0.000000
+#define HPF_Fs48000_Gain11_B1 (-0.267949)
+#define HPF_Fs48000_Gain11_B2 0.000000
+/* Gain =  12.000000 dB */
+#define HPF_Fs48000_Gain12_A0 2.889924
+#define HPF_Fs48000_Gain12_A1 (-2.157873)
+#define HPF_Fs48000_Gain12_A2 0.000000
+#define HPF_Fs48000_Gain12_B1 (-0.267949)
+#define HPF_Fs48000_Gain12_B2 0.000000
+/* Gain =  13.000000 dB */
+#define HPF_Fs48000_Gain13_A0 3.197886
+#define HPF_Fs48000_Gain13_A1 (-2.465835)
+#define HPF_Fs48000_Gain13_A2 0.000000
+#define HPF_Fs48000_Gain13_B1 (-0.267949)
+#define HPF_Fs48000_Gain13_B2 0.000000
+/* Gain =  14.000000 dB */
+#define HPF_Fs48000_Gain14_A0 3.543425
+#define HPF_Fs48000_Gain14_A1 (-2.811374)
+#define HPF_Fs48000_Gain14_A2 0.000000
+#define HPF_Fs48000_Gain14_B1 (-0.267949)
+#define HPF_Fs48000_Gain14_B2 0.000000
+/* Gain =  15.000000 dB */
+#define HPF_Fs48000_Gain15_A0 3.931127
+#define HPF_Fs48000_Gain15_A1 (-3.199076)
+#define HPF_Fs48000_Gain15_A2 0.000000
+#define HPF_Fs48000_Gain15_B1 (-0.267949)
+#define HPF_Fs48000_Gain15_B2 0.000000
 
 /* Coefficients for sample rate 88200 */
 /* Gain = 1.000000 dB */
-#define HPF_Fs88200_Gain1_A0                          1.094374f
-#define HPF_Fs88200_Gain1_A1                          (-0.641256f)
-#define HPF_Fs88200_Gain1_A2                          0.000000f
-#define HPF_Fs88200_Gain1_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain1_B2                          0.000000f
+#define HPF_Fs88200_Gain1_A0 1.094374f
+#define HPF_Fs88200_Gain1_A1 (-0.641256f)
+#define HPF_Fs88200_Gain1_A2 0.000000f
+#define HPF_Fs88200_Gain1_B1 (-0.546882f)
+#define HPF_Fs88200_Gain1_B2 0.000000f
 /* Gain = 2.000000 dB */
-#define HPF_Fs88200_Gain2_A0                          1.200264f
-#define HPF_Fs88200_Gain2_A1                          (-0.747146f)
-#define HPF_Fs88200_Gain2_A2                          0.000000f
-#define HPF_Fs88200_Gain2_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain2_B2                          0.000000f
+#define HPF_Fs88200_Gain2_A0 1.200264f
+#define HPF_Fs88200_Gain2_A1 (-0.747146f)
+#define HPF_Fs88200_Gain2_A2 0.000000f
+#define HPF_Fs88200_Gain2_B1 (-0.546882f)
+#define HPF_Fs88200_Gain2_B2 0.000000f
 /* Gain = 3.000000 dB */
-#define HPF_Fs88200_Gain3_A0                          1.319074f
-#define HPF_Fs88200_Gain3_A1                          (-0.865956f)
-#define HPF_Fs88200_Gain3_A2                          0.000000f
-#define HPF_Fs88200_Gain3_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain3_B2                          0.000000f
+#define HPF_Fs88200_Gain3_A0 1.319074f
+#define HPF_Fs88200_Gain3_A1 (-0.865956f)
+#define HPF_Fs88200_Gain3_A2 0.000000f
+#define HPF_Fs88200_Gain3_B1 (-0.546882f)
+#define HPF_Fs88200_Gain3_B2 0.000000f
 /* Gain = 4.000000 dB */
-#define HPF_Fs88200_Gain4_A0                          1.452380f
-#define HPF_Fs88200_Gain4_A1                          (-0.999263f)
-#define HPF_Fs88200_Gain4_A2                          0.000000f
-#define HPF_Fs88200_Gain4_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain4_B2                          0.000000f
+#define HPF_Fs88200_Gain4_A0 1.452380f
+#define HPF_Fs88200_Gain4_A1 (-0.999263f)
+#define HPF_Fs88200_Gain4_A2 0.000000f
+#define HPF_Fs88200_Gain4_B1 (-0.546882f)
+#define HPF_Fs88200_Gain4_B2 0.000000f
 /* Gain = 5.000000 dB */
-#define HPF_Fs88200_Gain5_A0                          1.601953f
-#define HPF_Fs88200_Gain5_A1                          (-1.148836f)
-#define HPF_Fs88200_Gain5_A2                          0.000000f
-#define HPF_Fs88200_Gain5_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain5_B2                          0.000000f
+#define HPF_Fs88200_Gain5_A0 1.601953f
+#define HPF_Fs88200_Gain5_A1 (-1.148836f)
+#define HPF_Fs88200_Gain5_A2 0.000000f
+#define HPF_Fs88200_Gain5_B1 (-0.546882f)
+#define HPF_Fs88200_Gain5_B2 0.000000f
 /* Gain = 6.000000 dB */
-#define HPF_Fs88200_Gain6_A0                          1.769777f
-#define HPF_Fs88200_Gain6_A1                          (-1.316659f)
-#define HPF_Fs88200_Gain6_A2                          0.000000f
-#define HPF_Fs88200_Gain6_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain6_B2                          0.000000f
+#define HPF_Fs88200_Gain6_A0 1.769777f
+#define HPF_Fs88200_Gain6_A1 (-1.316659f)
+#define HPF_Fs88200_Gain6_A2 0.000000f
+#define HPF_Fs88200_Gain6_B1 (-0.546882f)
+#define HPF_Fs88200_Gain6_B2 0.000000f
 /* Gain = 7.000000 dB */
-#define HPF_Fs88200_Gain7_A0                          1.958078f
-#define HPF_Fs88200_Gain7_A1                          (-1.504960f)
-#define HPF_Fs88200_Gain7_A2                          0.000000f
-#define HPF_Fs88200_Gain7_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain7_B2                          0.000000f
+#define HPF_Fs88200_Gain7_A0 1.958078f
+#define HPF_Fs88200_Gain7_A1 (-1.504960f)
+#define HPF_Fs88200_Gain7_A2 0.000000f
+#define HPF_Fs88200_Gain7_B1 (-0.546882f)
+#define HPF_Fs88200_Gain7_B2 0.000000f
 /* Gain = 8.000000 dB */
-#define HPF_Fs88200_Gain8_A0                          2.169355f
-#define HPF_Fs88200_Gain8_A1                          (-1.716238f)
-#define HPF_Fs88200_Gain8_A2                          0.000000f
-#define HPF_Fs88200_Gain8_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain8_B2                          0.000000f
+#define HPF_Fs88200_Gain8_A0 2.169355f
+#define HPF_Fs88200_Gain8_A1 (-1.716238f)
+#define HPF_Fs88200_Gain8_A2 0.000000f
+#define HPF_Fs88200_Gain8_B1 (-0.546882f)
+#define HPF_Fs88200_Gain8_B2 0.000000f
 /* Gain = 9.000000 dB */
-#define HPF_Fs88200_Gain9_A0                          2.406412f
-#define HPF_Fs88200_Gain9_A1                          (-1.953295f)
-#define HPF_Fs88200_Gain9_A2                          0.000000f
-#define HPF_Fs88200_Gain9_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain9_B2                          0.000000f
+#define HPF_Fs88200_Gain9_A0 2.406412f
+#define HPF_Fs88200_Gain9_A1 (-1.953295f)
+#define HPF_Fs88200_Gain9_A2 0.000000f
+#define HPF_Fs88200_Gain9_B1 (-0.546882f)
+#define HPF_Fs88200_Gain9_B2 0.000000f
 /* Gain = 10.000000 dB */
-#define HPF_Fs88200_Gain10_A0                          2.672395f
-#define HPF_Fs88200_Gain10_A1                          (-2.219277f)
-#define HPF_Fs88200_Gain10_A2                          0.000000f
-#define HPF_Fs88200_Gain10_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain10_B2                          0.000000f
+#define HPF_Fs88200_Gain10_A0 2.672395f
+#define HPF_Fs88200_Gain10_A1 (-2.219277f)
+#define HPF_Fs88200_Gain10_A2 0.000000f
+#define HPF_Fs88200_Gain10_B1 (-0.546882f)
+#define HPF_Fs88200_Gain10_B2 0.000000f
 /* Gain = 11.000000 dB */
-#define HPF_Fs88200_Gain11_A0                          2.970832f
-#define HPF_Fs88200_Gain11_A1                          (-2.517714f)
-#define HPF_Fs88200_Gain11_A2                          0.000000f
-#define HPF_Fs88200_Gain11_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain11_B2                          0.000000f
+#define HPF_Fs88200_Gain11_A0 2.970832f
+#define HPF_Fs88200_Gain11_A1 (-2.517714f)
+#define HPF_Fs88200_Gain11_A2 0.000000f
+#define HPF_Fs88200_Gain11_B1 (-0.546882f)
+#define HPF_Fs88200_Gain11_B2 0.000000f
 /* Gain = 12.000000 dB */
-#define HPF_Fs88200_Gain12_A0                          3.305684f
-#define HPF_Fs88200_Gain12_A1                          (-2.852566f)
-#define HPF_Fs88200_Gain12_A2                          0.000000f
-#define HPF_Fs88200_Gain12_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain12_B2                          0.000000f
+#define HPF_Fs88200_Gain12_A0 3.305684f
+#define HPF_Fs88200_Gain12_A1 (-2.852566f)
+#define HPF_Fs88200_Gain12_A2 0.000000f
+#define HPF_Fs88200_Gain12_B1 (-0.546882f)
+#define HPF_Fs88200_Gain12_B2 0.000000f
 /* Gain = 13.000000 dB */
-#define HPF_Fs88200_Gain13_A0                          3.681394f
-#define HPF_Fs88200_Gain13_A1                          (-3.228276f)
-#define HPF_Fs88200_Gain13_A2                          0.000000f
-#define HPF_Fs88200_Gain13_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain13_B2                          0.000000f
+#define HPF_Fs88200_Gain13_A0 3.681394f
+#define HPF_Fs88200_Gain13_A1 (-3.228276f)
+#define HPF_Fs88200_Gain13_A2 0.000000f
+#define HPF_Fs88200_Gain13_B1 (-0.546882f)
+#define HPF_Fs88200_Gain13_B2 0.000000f
 /* Gain = 14.000000 dB */
-#define HPF_Fs88200_Gain14_A0                          4.102947f
-#define HPF_Fs88200_Gain14_A1                          (-3.649830f)
-#define HPF_Fs88200_Gain14_A2                          0.000000f
-#define HPF_Fs88200_Gain14_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain14_B2                          0.000000f
+#define HPF_Fs88200_Gain14_A0 4.102947f
+#define HPF_Fs88200_Gain14_A1 (-3.649830f)
+#define HPF_Fs88200_Gain14_A2 0.000000f
+#define HPF_Fs88200_Gain14_B1 (-0.546882f)
+#define HPF_Fs88200_Gain14_B2 0.000000f
 /* Gain = 15.000000 dB */
-#define HPF_Fs88200_Gain15_A0                          4.575938f
-#define HPF_Fs88200_Gain15_A1                          (-4.122820f)
-#define HPF_Fs88200_Gain15_A2                          0.000000f
-#define HPF_Fs88200_Gain15_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain15_B2                          0.000000f
+#define HPF_Fs88200_Gain15_A0 4.575938f
+#define HPF_Fs88200_Gain15_A1 (-4.122820f)
+#define HPF_Fs88200_Gain15_A2 0.000000f
+#define HPF_Fs88200_Gain15_B1 (-0.546882f)
+#define HPF_Fs88200_Gain15_B2 0.000000f
 
 /* Coefficients for sample rate 96000Hz */
-                                                                 /* Gain =  1.000000 dB */
-#define HPF_Fs96000_Gain1_A0                          1.096233
-#define HPF_Fs96000_Gain1_A1                          (-0.673583)
-#define HPF_Fs96000_Gain1_A2                          0.000000
-#define HPF_Fs96000_Gain1_B1                          (-0.577350)
-#define HPF_Fs96000_Gain1_B2                          0.000000
-                                                                 /* Gain =  2.000000 dB */
-#define HPF_Fs96000_Gain2_A0                          1.204208
-#define HPF_Fs96000_Gain2_A1                          (-0.781558)
-#define HPF_Fs96000_Gain2_A2                          0.000000
-#define HPF_Fs96000_Gain2_B1                          (-0.577350)
-#define HPF_Fs96000_Gain2_B2                          0.000000
-                                                                 /* Gain =  3.000000 dB */
-#define HPF_Fs96000_Gain3_A0                          1.325358
-#define HPF_Fs96000_Gain3_A1                          (-0.902708)
-#define HPF_Fs96000_Gain3_A2                          0.000000
-#define HPF_Fs96000_Gain3_B1                          (-0.577350)
-#define HPF_Fs96000_Gain3_B2                          0.000000
-                                                                 /* Gain =  4.000000 dB */
-#define HPF_Fs96000_Gain4_A0                           1.461291
-#define HPF_Fs96000_Gain4_A1                           (-1.038641)
-#define HPF_Fs96000_Gain4_A2                           0.000000
-#define HPF_Fs96000_Gain4_B1                           (-0.577350)
-#define HPF_Fs96000_Gain4_B2                           0.000000
-                                                                 /* Gain =  5.000000 dB */
-#define HPF_Fs96000_Gain5_A0                           1.613810
-#define HPF_Fs96000_Gain5_A1                           (-1.191160)
-#define HPF_Fs96000_Gain5_A2                           0.000000
-#define HPF_Fs96000_Gain5_B1                           (-0.577350)
-#define HPF_Fs96000_Gain5_B2                           0.000000
-                                                                 /* Gain =  6.000000 dB */
-#define HPF_Fs96000_Gain6_A0                            1.784939
-#define HPF_Fs96000_Gain6_A1                            (-1.362289)
-#define HPF_Fs96000_Gain6_A2                            0.000000
-#define HPF_Fs96000_Gain6_B1                            (-0.577350)
-#define HPF_Fs96000_Gain6_B2                            0.000000
-                                                                /* Gain =  7.000000 dB */
-#define HPF_Fs96000_Gain7_A0                            1.976949
-#define HPF_Fs96000_Gain7_A1                            (-1.554299)
-#define HPF_Fs96000_Gain7_A2                            0.000000
-#define HPF_Fs96000_Gain7_B1                            (-0.577350)
-#define HPF_Fs96000_Gain7_B2                            0.000000
-                                                                 /* Gain =  8.000000 dB */
-#define HPF_Fs96000_Gain8_A0                           2.192387
-#define HPF_Fs96000_Gain8_A1                           (-1.769738)
-#define HPF_Fs96000_Gain8_A2                           0.000000
-#define HPF_Fs96000_Gain8_B1                           (-0.577350)
-#define HPF_Fs96000_Gain8_B2                           0.000000
-                                                                /* Gain =  9.000000 dB */
-#define HPF_Fs96000_Gain9_A0                          2.434113
-#define HPF_Fs96000_Gain9_A1                          (-2.011464)
-#define HPF_Fs96000_Gain9_A2                          0.000000
-#define HPF_Fs96000_Gain9_B1                          (-0.577350)
-#define HPF_Fs96000_Gain9_B2                          0.000000
-                                                               /* Gain =  10.000000 dB */
-#define HPF_Fs96000_Gain10_A0                        2.705335
-#define HPF_Fs96000_Gain10_A1                        (-2.282685)
-#define HPF_Fs96000_Gain10_A2                         0.000000
-#define HPF_Fs96000_Gain10_B1                         (-0.577350)
-#define HPF_Fs96000_Gain10_B2                         0.000000
-                                                              /* Gain =  11.000000 dB */
-#define HPF_Fs96000_Gain11_A0                          3.009650
-#define HPF_Fs96000_Gain11_A1                          (-2.587000)
-#define HPF_Fs96000_Gain11_A2                          0.000000
-#define HPF_Fs96000_Gain11_B1                          (-0.577350)
-#define HPF_Fs96000_Gain11_B2                          0.000000
-                                                                  /* Gain =  12.000000 dB */
-#define HPF_Fs96000_Gain12_A0                          3.351097
-#define HPF_Fs96000_Gain12_A1                          (-2.928447)
-#define HPF_Fs96000_Gain12_A2                          0.000000
-#define HPF_Fs96000_Gain12_B1                          (-0.577350)
-#define HPF_Fs96000_Gain12_B2                          0.000000
-                                                                /* Gain =  13.000000 dB */
-#define HPF_Fs96000_Gain13_A0                           3.734207
-#define HPF_Fs96000_Gain13_A1                           (-3.311558)
-#define HPF_Fs96000_Gain13_A2                           0.000000
-#define HPF_Fs96000_Gain13_B1                           (-0.577350)
-#define HPF_Fs96000_Gain13_B2                           0.000000
-                                                                 /* Gain =  14.000000 dB */
-#define HPF_Fs96000_Gain14_A0                         4.164064
-#define HPF_Fs96000_Gain14_A1                         (-3.741414)
-#define HPF_Fs96000_Gain14_A2                          0.000000
-#define HPF_Fs96000_Gain14_B1                          (-0.577350)
-#define HPF_Fs96000_Gain14_B2                          0.000000
-                                                                 /* Gain =  15.000000 dB */
-#define HPF_Fs96000_Gain15_A0                         4.646371
-#define HPF_Fs96000_Gain15_A1                         (-4.223721)
-#define HPF_Fs96000_Gain15_A2                         0.000000
-#define HPF_Fs96000_Gain15_B1                         (-0.577350)
-#define HPF_Fs96000_Gain15_B2                         0.000000
+/* Gain =  1.000000 dB */
+#define HPF_Fs96000_Gain1_A0 1.096233
+#define HPF_Fs96000_Gain1_A1 (-0.673583)
+#define HPF_Fs96000_Gain1_A2 0.000000
+#define HPF_Fs96000_Gain1_B1 (-0.577350)
+#define HPF_Fs96000_Gain1_B2 0.000000
+/* Gain =  2.000000 dB */
+#define HPF_Fs96000_Gain2_A0 1.204208
+#define HPF_Fs96000_Gain2_A1 (-0.781558)
+#define HPF_Fs96000_Gain2_A2 0.000000
+#define HPF_Fs96000_Gain2_B1 (-0.577350)
+#define HPF_Fs96000_Gain2_B2 0.000000
+/* Gain =  3.000000 dB */
+#define HPF_Fs96000_Gain3_A0 1.325358
+#define HPF_Fs96000_Gain3_A1 (-0.902708)
+#define HPF_Fs96000_Gain3_A2 0.000000
+#define HPF_Fs96000_Gain3_B1 (-0.577350)
+#define HPF_Fs96000_Gain3_B2 0.000000
+/* Gain =  4.000000 dB */
+#define HPF_Fs96000_Gain4_A0 1.461291
+#define HPF_Fs96000_Gain4_A1 (-1.038641)
+#define HPF_Fs96000_Gain4_A2 0.000000
+#define HPF_Fs96000_Gain4_B1 (-0.577350)
+#define HPF_Fs96000_Gain4_B2 0.000000
+/* Gain =  5.000000 dB */
+#define HPF_Fs96000_Gain5_A0 1.613810
+#define HPF_Fs96000_Gain5_A1 (-1.191160)
+#define HPF_Fs96000_Gain5_A2 0.000000
+#define HPF_Fs96000_Gain5_B1 (-0.577350)
+#define HPF_Fs96000_Gain5_B2 0.000000
+/* Gain =  6.000000 dB */
+#define HPF_Fs96000_Gain6_A0 1.784939
+#define HPF_Fs96000_Gain6_A1 (-1.362289)
+#define HPF_Fs96000_Gain6_A2 0.000000
+#define HPF_Fs96000_Gain6_B1 (-0.577350)
+#define HPF_Fs96000_Gain6_B2 0.000000
+/* Gain =  7.000000 dB */
+#define HPF_Fs96000_Gain7_A0 1.976949
+#define HPF_Fs96000_Gain7_A1 (-1.554299)
+#define HPF_Fs96000_Gain7_A2 0.000000
+#define HPF_Fs96000_Gain7_B1 (-0.577350)
+#define HPF_Fs96000_Gain7_B2 0.000000
+/* Gain =  8.000000 dB */
+#define HPF_Fs96000_Gain8_A0 2.192387
+#define HPF_Fs96000_Gain8_A1 (-1.769738)
+#define HPF_Fs96000_Gain8_A2 0.000000
+#define HPF_Fs96000_Gain8_B1 (-0.577350)
+#define HPF_Fs96000_Gain8_B2 0.000000
+/* Gain =  9.000000 dB */
+#define HPF_Fs96000_Gain9_A0 2.434113
+#define HPF_Fs96000_Gain9_A1 (-2.011464)
+#define HPF_Fs96000_Gain9_A2 0.000000
+#define HPF_Fs96000_Gain9_B1 (-0.577350)
+#define HPF_Fs96000_Gain9_B2 0.000000
+/* Gain =  10.000000 dB */
+#define HPF_Fs96000_Gain10_A0 2.705335
+#define HPF_Fs96000_Gain10_A1 (-2.282685)
+#define HPF_Fs96000_Gain10_A2 0.000000
+#define HPF_Fs96000_Gain10_B1 (-0.577350)
+#define HPF_Fs96000_Gain10_B2 0.000000
+/* Gain =  11.000000 dB */
+#define HPF_Fs96000_Gain11_A0 3.009650
+#define HPF_Fs96000_Gain11_A1 (-2.587000)
+#define HPF_Fs96000_Gain11_A2 0.000000
+#define HPF_Fs96000_Gain11_B1 (-0.577350)
+#define HPF_Fs96000_Gain11_B2 0.000000
+/* Gain =  12.000000 dB */
+#define HPF_Fs96000_Gain12_A0 3.351097
+#define HPF_Fs96000_Gain12_A1 (-2.928447)
+#define HPF_Fs96000_Gain12_A2 0.000000
+#define HPF_Fs96000_Gain12_B1 (-0.577350)
+#define HPF_Fs96000_Gain12_B2 0.000000
+/* Gain =  13.000000 dB */
+#define HPF_Fs96000_Gain13_A0 3.734207
+#define HPF_Fs96000_Gain13_A1 (-3.311558)
+#define HPF_Fs96000_Gain13_A2 0.000000
+#define HPF_Fs96000_Gain13_B1 (-0.577350)
+#define HPF_Fs96000_Gain13_B2 0.000000
+/* Gain =  14.000000 dB */
+#define HPF_Fs96000_Gain14_A0 4.164064
+#define HPF_Fs96000_Gain14_A1 (-3.741414)
+#define HPF_Fs96000_Gain14_A2 0.000000
+#define HPF_Fs96000_Gain14_B1 (-0.577350)
+#define HPF_Fs96000_Gain14_B2 0.000000
+/* Gain =  15.000000 dB */
+#define HPF_Fs96000_Gain15_A0 4.646371
+#define HPF_Fs96000_Gain15_A1 (-4.223721)
+#define HPF_Fs96000_Gain15_A2 0.000000
+#define HPF_Fs96000_Gain15_B1 (-0.577350)
+#define HPF_Fs96000_Gain15_B2 0.000000
 
 /* Coefficients for sample rate 176400 */
 /* Gain = 1.000000 dB */
-#define HPF_Fs176400_Gain1_A0                          1.106711f
-#define HPF_Fs176400_Gain1_A1                          (-0.855807f)
-#define HPF_Fs176400_Gain1_A2                          0.000000f
-#define HPF_Fs176400_Gain1_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain1_B2                          0.000000f
+#define HPF_Fs176400_Gain1_A0 1.106711f
+#define HPF_Fs176400_Gain1_A1 (-0.855807f)
+#define HPF_Fs176400_Gain1_A2 0.000000f
+#define HPF_Fs176400_Gain1_B1 (-0.749096f)
+#define HPF_Fs176400_Gain1_B2 0.000000f
 /* Gain = 2.000000 dB */
-#define HPF_Fs176400_Gain2_A0                          1.226443f
-#define HPF_Fs176400_Gain2_A1                          (-0.975539f)
-#define HPF_Fs176400_Gain2_A2                          0.000000f
-#define HPF_Fs176400_Gain2_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain2_B2                          0.000000f
+#define HPF_Fs176400_Gain2_A0 1.226443f
+#define HPF_Fs176400_Gain2_A1 (-0.975539f)
+#define HPF_Fs176400_Gain2_A2 0.000000f
+#define HPF_Fs176400_Gain2_B1 (-0.749096f)
+#define HPF_Fs176400_Gain2_B2 0.000000f
 /* Gain = 3.000000 dB */
-#define HPF_Fs176400_Gain3_A0                          1.360784f
-#define HPF_Fs176400_Gain3_A1                          (-1.109880f)
-#define HPF_Fs176400_Gain3_A2                          0.000000f
-#define HPF_Fs176400_Gain3_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain3_B2                          0.000000f
+#define HPF_Fs176400_Gain3_A0 1.360784f
+#define HPF_Fs176400_Gain3_A1 (-1.109880f)
+#define HPF_Fs176400_Gain3_A2 0.000000f
+#define HPF_Fs176400_Gain3_B1 (-0.749096f)
+#define HPF_Fs176400_Gain3_B2 0.000000f
 /* Gain = 4.000000 dB */
-#define HPF_Fs176400_Gain4_A0                          1.511517f
-#define HPF_Fs176400_Gain4_A1                          (-1.260613f)
-#define HPF_Fs176400_Gain4_A2                          0.000000f
-#define HPF_Fs176400_Gain4_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain4_B2                          0.000000f
+#define HPF_Fs176400_Gain4_A0 1.511517f
+#define HPF_Fs176400_Gain4_A1 (-1.260613f)
+#define HPF_Fs176400_Gain4_A2 0.000000f
+#define HPF_Fs176400_Gain4_B1 (-0.749096f)
+#define HPF_Fs176400_Gain4_B2 0.000000f
 /* Gain = 5.000000 dB */
-#define HPF_Fs176400_Gain5_A0                          1.680643f
-#define HPF_Fs176400_Gain5_A1                          (-1.429739f)
-#define HPF_Fs176400_Gain5_A2                          0.000000f
-#define HPF_Fs176400_Gain5_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain5_B2                          0.000000f
+#define HPF_Fs176400_Gain5_A0 1.680643f
+#define HPF_Fs176400_Gain5_A1 (-1.429739f)
+#define HPF_Fs176400_Gain5_A2 0.000000f
+#define HPF_Fs176400_Gain5_B1 (-0.749096f)
+#define HPF_Fs176400_Gain5_B2 0.000000f
 /* Gain = 6.000000 dB */
-#define HPF_Fs176400_Gain6_A0                          1.870405f
-#define HPF_Fs176400_Gain6_A1                          (-1.619501f)
-#define HPF_Fs176400_Gain6_A2                          0.000000f
-#define HPF_Fs176400_Gain6_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain6_B2                          0.000000f
+#define HPF_Fs176400_Gain6_A0 1.870405f
+#define HPF_Fs176400_Gain6_A1 (-1.619501f)
+#define HPF_Fs176400_Gain6_A2 0.000000f
+#define HPF_Fs176400_Gain6_B1 (-0.749096f)
+#define HPF_Fs176400_Gain6_B2 0.000000f
 /* Gain = 7.000000 dB */
-#define HPF_Fs176400_Gain7_A0                          2.083321f
-#define HPF_Fs176400_Gain7_A1                          (-1.832417f)
-#define HPF_Fs176400_Gain7_A2                          0.000000f
-#define HPF_Fs176400_Gain7_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain7_B2                          0.000000f
+#define HPF_Fs176400_Gain7_A0 2.083321f
+#define HPF_Fs176400_Gain7_A1 (-1.832417f)
+#define HPF_Fs176400_Gain7_A2 0.000000f
+#define HPF_Fs176400_Gain7_B1 (-0.749096f)
+#define HPF_Fs176400_Gain7_B2 0.000000f
 /* Gain = 8.000000 dB */
-#define HPF_Fs176400_Gain8_A0                          2.322217f
-#define HPF_Fs176400_Gain8_A1                          (-2.071313f)
-#define HPF_Fs176400_Gain8_A2                          0.000000f
-#define HPF_Fs176400_Gain8_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain8_B2                          0.000000f
+#define HPF_Fs176400_Gain8_A0 2.322217f
+#define HPF_Fs176400_Gain8_A1 (-2.071313f)
+#define HPF_Fs176400_Gain8_A2 0.000000f
+#define HPF_Fs176400_Gain8_B1 (-0.749096f)
+#define HPF_Fs176400_Gain8_B2 0.000000f
 /* Gain = 9.000000 dB */
-#define HPF_Fs176400_Gain9_A0                          2.590263f
-#define HPF_Fs176400_Gain9_A1                          (-2.339359f)
-#define HPF_Fs176400_Gain9_A2                          0.000000f
-#define HPF_Fs176400_Gain9_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain9_B2                          0.000000f
+#define HPF_Fs176400_Gain9_A0 2.590263f
+#define HPF_Fs176400_Gain9_A1 (-2.339359f)
+#define HPF_Fs176400_Gain9_A2 0.000000f
+#define HPF_Fs176400_Gain9_B1 (-0.749096f)
+#define HPF_Fs176400_Gain9_B2 0.000000f
 /* Gain = 10.000000 dB */
-#define HPF_Fs176400_Gain10_A0                          2.891016f
-#define HPF_Fs176400_Gain10_A1                          (-2.640112f)
-#define HPF_Fs176400_Gain10_A2                          0.000000f
-#define HPF_Fs176400_Gain10_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain10_B2                          0.000000f
+#define HPF_Fs176400_Gain10_A0 2.891016f
+#define HPF_Fs176400_Gain10_A1 (-2.640112f)
+#define HPF_Fs176400_Gain10_A2 0.000000f
+#define HPF_Fs176400_Gain10_B1 (-0.749096f)
+#define HPF_Fs176400_Gain10_B2 0.000000f
 /* Gain = 11.000000 dB */
-#define HPF_Fs176400_Gain11_A0                          3.228465f
-#define HPF_Fs176400_Gain11_A1                          (-2.977561f)
-#define HPF_Fs176400_Gain11_A2                          0.000000f
-#define HPF_Fs176400_Gain11_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain11_B2                          0.000000f
+#define HPF_Fs176400_Gain11_A0 3.228465f
+#define HPF_Fs176400_Gain11_A1 (-2.977561f)
+#define HPF_Fs176400_Gain11_A2 0.000000f
+#define HPF_Fs176400_Gain11_B1 (-0.749096f)
+#define HPF_Fs176400_Gain11_B2 0.000000f
 /* Gain = 12.000000 dB */
-#define HPF_Fs176400_Gain12_A0                          3.607090f
-#define HPF_Fs176400_Gain12_A1                          (-3.356186f)
-#define HPF_Fs176400_Gain12_A2                          0.000000f
-#define HPF_Fs176400_Gain12_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain12_B2                          0.000000f
+#define HPF_Fs176400_Gain12_A0 3.607090f
+#define HPF_Fs176400_Gain12_A1 (-3.356186f)
+#define HPF_Fs176400_Gain12_A2 0.000000f
+#define HPF_Fs176400_Gain12_B1 (-0.749096f)
+#define HPF_Fs176400_Gain12_B2 0.000000f
 /* Gain = 13.000000 dB */
-#define HPF_Fs176400_Gain13_A0                          4.031914f
-#define HPF_Fs176400_Gain13_A1                          (-3.781010f)
-#define HPF_Fs176400_Gain13_A2                          0.000000f
-#define HPF_Fs176400_Gain13_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain13_B2                          0.000000f
+#define HPF_Fs176400_Gain13_A0 4.031914f
+#define HPF_Fs176400_Gain13_A1 (-3.781010f)
+#define HPF_Fs176400_Gain13_A2 0.000000f
+#define HPF_Fs176400_Gain13_B1 (-0.749096f)
+#define HPF_Fs176400_Gain13_B2 0.000000f
 /* Gain = 14.000000 dB */
-#define HPF_Fs176400_Gain14_A0                          4.508575f
-#define HPF_Fs176400_Gain14_A1                          (-4.257671f)
-#define HPF_Fs176400_Gain14_A2                          0.000000f
-#define HPF_Fs176400_Gain14_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain14_B2                          0.000000f
+#define HPF_Fs176400_Gain14_A0 4.508575f
+#define HPF_Fs176400_Gain14_A1 (-4.257671f)
+#define HPF_Fs176400_Gain14_A2 0.000000f
+#define HPF_Fs176400_Gain14_B1 (-0.749096f)
+#define HPF_Fs176400_Gain14_B2 0.000000f
 /* Gain = 15.000000 dB */
-#define HPF_Fs176400_Gain15_A0                          5.043397f
-#define HPF_Fs176400_Gain15_A1                          (-4.792493f)
-#define HPF_Fs176400_Gain15_A2                          0.000000f
-#define HPF_Fs176400_Gain15_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain15_B2                          0.000000f
+#define HPF_Fs176400_Gain15_A0 5.043397f
+#define HPF_Fs176400_Gain15_A1 (-4.792493f)
+#define HPF_Fs176400_Gain15_A2 0.000000f
+#define HPF_Fs176400_Gain15_B1 (-0.749096f)
+#define HPF_Fs176400_Gain15_B2 0.000000f
 
 /* Coefficients for sample rate 192000Hz */
-                                                                  /* Gain =  1.000000 dB */
-#define HPF_Fs192000_Gain1_A0                          1.107823
-#define HPF_Fs192000_Gain1_A1                          (-0.875150)
-#define HPF_Fs192000_Gain1_A2                          0.000000
-#define HPF_Fs192000_Gain1_B1                          (-0.767327)
-#define HPF_Fs192000_Gain1_B2                          0.000000
-                                                                  /* Gain =  2.000000 dB */
-#define HPF_Fs192000_Gain2_A0                          1.228803
-#define HPF_Fs192000_Gain2_A1                          (-0.996130)
-#define HPF_Fs192000_Gain2_A2                          0.000000
-#define HPF_Fs192000_Gain2_B1                          (-0.767327)
-#define HPF_Fs192000_Gain2_B2                          0.000000
-                                                                   /* Gain =  3.000000 dB */
-#define HPF_Fs192000_Gain3_A0                          1.364544
-#define HPF_Fs192000_Gain3_A1                          (-1.131871)
-#define HPF_Fs192000_Gain3_A2                          0.000000
-#define HPF_Fs192000_Gain3_B1                          (-0.767327)
-#define HPF_Fs192000_Gain3_B2                          0.000000
-                                                                   /* Gain =  4.000000 dB */
-#define HPF_Fs192000_Gain4_A0                          1.516849
-#define HPF_Fs192000_Gain4_A1                          (-1.284176)
-#define HPF_Fs192000_Gain4_A2                           0.000000
-#define HPF_Fs192000_Gain4_B1                           (-0.767327)
-#define HPF_Fs192000_Gain4_B2                           0.000000
-                                                                   /* Gain =  5.000000 dB */
-#define HPF_Fs192000_Gain5_A0                           1.687737
-#define HPF_Fs192000_Gain5_A1                           (-1.455064)
-#define HPF_Fs192000_Gain5_A2                           0.000000
-#define HPF_Fs192000_Gain5_B1                           (-0.767327)
-#define HPF_Fs192000_Gain5_B2                           0.000000
-                                                                   /* Gain =  6.000000 dB */
-#define HPF_Fs192000_Gain6_A0                            1.879477
-#define HPF_Fs192000_Gain6_A1                            (-1.646804)
-#define HPF_Fs192000_Gain6_A2                            0.000000
-#define HPF_Fs192000_Gain6_B1                            (-0.767327)
-#define HPF_Fs192000_Gain6_B2                            0.000000
-                                                                 /* Gain =  7.000000 dB */
-#define HPF_Fs192000_Gain7_A0                            2.094613
-#define HPF_Fs192000_Gain7_A1                            (-1.861940)
-#define HPF_Fs192000_Gain7_A2                            0.000000
-#define HPF_Fs192000_Gain7_B1                            (-0.767327)
-#define HPF_Fs192000_Gain7_B2                            0.000000
-                                                                   /* Gain =  8.000000 dB */
-#define HPF_Fs192000_Gain8_A0                           2.335999
-#define HPF_Fs192000_Gain8_A1                           (-2.103326)
-#define HPF_Fs192000_Gain8_A2                           0.000000
-#define HPF_Fs192000_Gain8_B1                           (-0.767327)
-#define HPF_Fs192000_Gain8_B2                           0.000000
-                                                                   /* Gain =  9.000000 dB */
-#define HPF_Fs192000_Gain9_A0                          2.606839
-#define HPF_Fs192000_Gain9_A1                          (-2.374166)
-#define HPF_Fs192000_Gain9_A2                          0.000000
-#define HPF_Fs192000_Gain9_B1                          (-0.767327)
-#define HPF_Fs192000_Gain9_B2                          0.000000
-                                                                 /* Gain =  10.000000 dB */
-#define HPF_Fs192000_Gain10_A0                        2.910726
-#define HPF_Fs192000_Gain10_A1                        (-2.678053)
-#define HPF_Fs192000_Gain10_A2                         0.000000
-#define HPF_Fs192000_Gain10_B1                         (-0.767327)
-#define HPF_Fs192000_Gain10_B2                         0.000000
-                                                                  /* Gain =  11.000000 dB */
-#define HPF_Fs192000_Gain11_A0                          3.251693
-#define HPF_Fs192000_Gain11_A1                          (-3.019020)
-#define HPF_Fs192000_Gain11_A2                          0.000000
-#define HPF_Fs192000_Gain11_B1                          (-0.767327)
-#define HPF_Fs192000_Gain11_B2                          0.000000
-                                                                  /* Gain =  12.000000 dB */
-#define HPF_Fs192000_Gain12_A0                          3.634264
-#define HPF_Fs192000_Gain12_A1                          (-3.401591)
-#define HPF_Fs192000_Gain12_A2                          0.000000
-#define HPF_Fs192000_Gain12_B1                          (-0.767327)
-#define HPF_Fs192000_Gain12_B2                          0.000000
-                                                                /* Gain =  13.000000 dB */
-#define HPF_Fs192000_Gain13_A0                           4.063516
-#define HPF_Fs192000_Gain13_A1                           (-3.830843)
-#define HPF_Fs192000_Gain13_A2                           0.000000
-#define HPF_Fs192000_Gain13_B1                           (-0.767327)
-#define HPF_Fs192000_Gain13_B2                           0.000000
-                                                                /* Gain =  14.000000 dB */
-#define HPF_Fs192000_Gain14_A0                          4.545145
-#define HPF_Fs192000_Gain14_A1                          (-4.312472)
-#define HPF_Fs192000_Gain14_A2                          0.000000
-#define HPF_Fs192000_Gain14_B1                          (-0.767327)
-#define HPF_Fs192000_Gain14_B2                          0.000000
-                                                                  /* Gain =  15.000000 dB */
-#define HPF_Fs192000_Gain15_A0                         5.085542
-#define HPF_Fs192000_Gain15_A1                         (-4.852868)
-#define HPF_Fs192000_Gain15_A2                         0.000000
-#define HPF_Fs192000_Gain15_B1                         (-0.767327)
-#define HPF_Fs192000_Gain15_B2                         0.000000
+/* Gain =  1.000000 dB */
+#define HPF_Fs192000_Gain1_A0 1.107823
+#define HPF_Fs192000_Gain1_A1 (-0.875150)
+#define HPF_Fs192000_Gain1_A2 0.000000
+#define HPF_Fs192000_Gain1_B1 (-0.767327)
+#define HPF_Fs192000_Gain1_B2 0.000000
+/* Gain =  2.000000 dB */
+#define HPF_Fs192000_Gain2_A0 1.228803
+#define HPF_Fs192000_Gain2_A1 (-0.996130)
+#define HPF_Fs192000_Gain2_A2 0.000000
+#define HPF_Fs192000_Gain2_B1 (-0.767327)
+#define HPF_Fs192000_Gain2_B2 0.000000
+/* Gain =  3.000000 dB */
+#define HPF_Fs192000_Gain3_A0 1.364544
+#define HPF_Fs192000_Gain3_A1 (-1.131871)
+#define HPF_Fs192000_Gain3_A2 0.000000
+#define HPF_Fs192000_Gain3_B1 (-0.767327)
+#define HPF_Fs192000_Gain3_B2 0.000000
+/* Gain =  4.000000 dB */
+#define HPF_Fs192000_Gain4_A0 1.516849
+#define HPF_Fs192000_Gain4_A1 (-1.284176)
+#define HPF_Fs192000_Gain4_A2 0.000000
+#define HPF_Fs192000_Gain4_B1 (-0.767327)
+#define HPF_Fs192000_Gain4_B2 0.000000
+/* Gain =  5.000000 dB */
+#define HPF_Fs192000_Gain5_A0 1.687737
+#define HPF_Fs192000_Gain5_A1 (-1.455064)
+#define HPF_Fs192000_Gain5_A2 0.000000
+#define HPF_Fs192000_Gain5_B1 (-0.767327)
+#define HPF_Fs192000_Gain5_B2 0.000000
+/* Gain =  6.000000 dB */
+#define HPF_Fs192000_Gain6_A0 1.879477
+#define HPF_Fs192000_Gain6_A1 (-1.646804)
+#define HPF_Fs192000_Gain6_A2 0.000000
+#define HPF_Fs192000_Gain6_B1 (-0.767327)
+#define HPF_Fs192000_Gain6_B2 0.000000
+/* Gain =  7.000000 dB */
+#define HPF_Fs192000_Gain7_A0 2.094613
+#define HPF_Fs192000_Gain7_A1 (-1.861940)
+#define HPF_Fs192000_Gain7_A2 0.000000
+#define HPF_Fs192000_Gain7_B1 (-0.767327)
+#define HPF_Fs192000_Gain7_B2 0.000000
+/* Gain =  8.000000 dB */
+#define HPF_Fs192000_Gain8_A0 2.335999
+#define HPF_Fs192000_Gain8_A1 (-2.103326)
+#define HPF_Fs192000_Gain8_A2 0.000000
+#define HPF_Fs192000_Gain8_B1 (-0.767327)
+#define HPF_Fs192000_Gain8_B2 0.000000
+/* Gain =  9.000000 dB */
+#define HPF_Fs192000_Gain9_A0 2.606839
+#define HPF_Fs192000_Gain9_A1 (-2.374166)
+#define HPF_Fs192000_Gain9_A2 0.000000
+#define HPF_Fs192000_Gain9_B1 (-0.767327)
+#define HPF_Fs192000_Gain9_B2 0.000000
+/* Gain =  10.000000 dB */
+#define HPF_Fs192000_Gain10_A0 2.910726
+#define HPF_Fs192000_Gain10_A1 (-2.678053)
+#define HPF_Fs192000_Gain10_A2 0.000000
+#define HPF_Fs192000_Gain10_B1 (-0.767327)
+#define HPF_Fs192000_Gain10_B2 0.000000
+/* Gain =  11.000000 dB */
+#define HPF_Fs192000_Gain11_A0 3.251693
+#define HPF_Fs192000_Gain11_A1 (-3.019020)
+#define HPF_Fs192000_Gain11_A2 0.000000
+#define HPF_Fs192000_Gain11_B1 (-0.767327)
+#define HPF_Fs192000_Gain11_B2 0.000000
+/* Gain =  12.000000 dB */
+#define HPF_Fs192000_Gain12_A0 3.634264
+#define HPF_Fs192000_Gain12_A1 (-3.401591)
+#define HPF_Fs192000_Gain12_A2 0.000000
+#define HPF_Fs192000_Gain12_B1 (-0.767327)
+#define HPF_Fs192000_Gain12_B2 0.000000
+/* Gain =  13.000000 dB */
+#define HPF_Fs192000_Gain13_A0 4.063516
+#define HPF_Fs192000_Gain13_A1 (-3.830843)
+#define HPF_Fs192000_Gain13_A2 0.000000
+#define HPF_Fs192000_Gain13_B1 (-0.767327)
+#define HPF_Fs192000_Gain13_B2 0.000000
+/* Gain =  14.000000 dB */
+#define HPF_Fs192000_Gain14_A0 4.545145
+#define HPF_Fs192000_Gain14_A1 (-4.312472)
+#define HPF_Fs192000_Gain14_A2 0.000000
+#define HPF_Fs192000_Gain14_B1 (-0.767327)
+#define HPF_Fs192000_Gain14_B2 0.000000
+/* Gain =  15.000000 dB */
+#define HPF_Fs192000_Gain15_A0 5.085542
+#define HPF_Fs192000_Gain15_A1 (-4.852868)
+#define HPF_Fs192000_Gain15_A2 0.000000
+#define HPF_Fs192000_Gain15_B1 (-0.767327)
+#define HPF_Fs192000_Gain15_B2 0.000000
 
 #endif
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Control.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Control.cpp
index ff2c90a..3118e77 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Control.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Control.cpp
@@ -50,78 +50,62 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVM_ReturnStatus_en LVM_SetControlParameters(LVM_Handle_t           hInstance,
-                                             LVM_ControlParams_t    *pParams)
-{
-    LVM_Instance_t    *pInstance =(LVM_Instance_t  *)hInstance;
+LVM_ReturnStatus_en LVM_SetControlParameters(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
 
-    if ((pParams == LVM_NULL) || (hInstance == LVM_NULL))
-    {
+    if ((pParams == LVM_NULL) || (hInstance == LVM_NULL)) {
         return (LVM_NULLADDRESS);
     }
 
     pInstance->NewParams = *pParams;
 
-    if(
-        /* General parameters */
-        ((pParams->OperatingMode != LVM_MODE_OFF) && (pParams->OperatingMode != LVM_MODE_ON))                                         ||
-    ((pParams->SampleRate != LVM_FS_8000) && (pParams->SampleRate != LVM_FS_11025) && (pParams->SampleRate != LVM_FS_12000)       &&
-     (pParams->SampleRate != LVM_FS_16000) && (pParams->SampleRate != LVM_FS_22050) && (pParams->SampleRate != LVM_FS_24000)      &&
-     (pParams->SampleRate != LVM_FS_32000) && (pParams->SampleRate != LVM_FS_44100) && (pParams->SampleRate != LVM_FS_48000)      &&
-     (pParams->SampleRate != LVM_FS_88200) && (pParams->SampleRate != LVM_FS_96000) &&
-     (pParams->SampleRate != LVM_FS_176400) && (pParams->SampleRate != LVM_FS_192000))      ||
-#ifdef SUPPORT_MC
-        ((pParams->SourceFormat != LVM_STEREO) &&
-         (pParams->SourceFormat != LVM_MONOINSTEREO) &&
-         (pParams->SourceFormat != LVM_MONO) &&
-         (pParams->SourceFormat != LVM_MULTICHANNEL)) ||
-#else
-        ((pParams->SourceFormat != LVM_STEREO) && (pParams->SourceFormat != LVM_MONOINSTEREO) && (pParams->SourceFormat != LVM_MONO)) ||
-#endif
-        (pParams->SpeakerType > LVM_EX_HEADPHONES))
-    {
+    if (
+            /* General parameters */
+            ((pParams->OperatingMode != LVM_MODE_OFF) && (pParams->OperatingMode != LVM_MODE_ON)) ||
+            ((pParams->SampleRate != LVM_FS_8000) && (pParams->SampleRate != LVM_FS_11025) &&
+             (pParams->SampleRate != LVM_FS_12000) && (pParams->SampleRate != LVM_FS_16000) &&
+             (pParams->SampleRate != LVM_FS_22050) && (pParams->SampleRate != LVM_FS_24000) &&
+             (pParams->SampleRate != LVM_FS_32000) && (pParams->SampleRate != LVM_FS_44100) &&
+             (pParams->SampleRate != LVM_FS_48000) && (pParams->SampleRate != LVM_FS_88200) &&
+             (pParams->SampleRate != LVM_FS_96000) && (pParams->SampleRate != LVM_FS_176400) &&
+             (pParams->SampleRate != LVM_FS_192000)) ||
+            ((pParams->SourceFormat != LVM_STEREO) && (pParams->SourceFormat != LVM_MONOINSTEREO) &&
+             (pParams->SourceFormat != LVM_MONO) && (pParams->SourceFormat != LVM_MULTICHANNEL)) ||
+            (pParams->SpeakerType > LVM_EX_HEADPHONES)) {
         return (LVM_OUTOFRANGE);
     }
 
-#ifdef SUPPORT_MC
     pInstance->Params.NrChannels = pParams->NrChannels;
-    pInstance->Params.ChMask     = pParams->ChMask;
-#endif
+    pInstance->Params.ChMask = pParams->ChMask;
     /*
      * Cinema Sound parameters
      */
-    if((pParams->VirtualizerOperatingMode != LVM_MODE_OFF) && (pParams->VirtualizerOperatingMode != LVM_MODE_ON))
-    {
+    if ((pParams->VirtualizerOperatingMode != LVM_MODE_OFF) &&
+        (pParams->VirtualizerOperatingMode != LVM_MODE_ON)) {
         return (LVM_OUTOFRANGE);
     }
 
-    if(pParams->VirtualizerType != LVM_CONCERTSOUND)
-    {
+    if (pParams->VirtualizerType != LVM_CONCERTSOUND) {
         return (LVM_OUTOFRANGE);
     }
 
-    if(pParams->VirtualizerReverbLevel > LVM_VIRTUALIZER_MAX_REVERB_LEVEL)
-    {
+    if (pParams->VirtualizerReverbLevel > LVM_VIRTUALIZER_MAX_REVERB_LEVEL) {
         return (LVM_OUTOFRANGE);
     }
 
-    if(pParams->CS_EffectLevel < LVM_CS_MIN_EFFECT_LEVEL)
-    {
+    if (pParams->CS_EffectLevel < LVM_CS_MIN_EFFECT_LEVEL) {
         return (LVM_OUTOFRANGE);
     }
 
     /*
      * N-Band Equalizer
      */
-    if(pParams->EQNB_NBands > pInstance->InstParams.EQNB_NumBands)
-    {
+    if (pParams->EQNB_NBands > pInstance->InstParams.EQNB_NumBands) {
         return (LVM_OUTOFRANGE);
     }
 
     /* Definition pointer */
-    if ((pParams->pEQNB_BandDefinition == LVM_NULL) &&
-        (pParams->EQNB_NBands != 0))
-    {
+    if ((pParams->pEQNB_BandDefinition == LVM_NULL) && (pParams->EQNB_NBands != 0)) {
         return (LVM_NULLADDRESS);
     }
 
@@ -129,35 +113,31 @@
      * Copy the filter definitions for the Equaliser
      */
     {
-        LVM_INT16           i;
+        LVM_INT16 i;
 
-        if (pParams->EQNB_NBands != 0)
-        {
-            for (i=0; i<pParams->EQNB_NBands; i++)
-            {
+        if (pParams->EQNB_NBands != 0) {
+            for (i = 0; i < pParams->EQNB_NBands; i++) {
                 pInstance->pEQNB_BandDefs[i] = pParams->pEQNB_BandDefinition[i];
             }
             pInstance->NewParams.pEQNB_BandDefinition = pInstance->pEQNB_BandDefs;
         }
     }
-    if( /* N-Band Equaliser parameters */
-        ((pParams->EQNB_OperatingMode != LVM_EQNB_OFF) && (pParams->EQNB_OperatingMode != LVM_EQNB_ON)) ||
-        (pParams->EQNB_NBands > pInstance->InstParams.EQNB_NumBands))
-    {
+    if (/* N-Band Equaliser parameters */
+        ((pParams->EQNB_OperatingMode != LVM_EQNB_OFF) &&
+         (pParams->EQNB_OperatingMode != LVM_EQNB_ON)) ||
+        (pParams->EQNB_NBands > pInstance->InstParams.EQNB_NumBands)) {
         return (LVM_OUTOFRANGE);
     }
     /* Band parameters*/
     {
         LVM_INT16 i;
-        for(i = 0; i < pParams->EQNB_NBands; i++)
-        {
-            if(((pParams->pEQNB_BandDefinition[i].Frequency < LVM_EQNB_MIN_BAND_FREQ)  ||
-                (pParams->pEQNB_BandDefinition[i].Frequency > LVM_EQNB_MAX_BAND_FREQ)) ||
-                ((pParams->pEQNB_BandDefinition[i].Gain     < LVM_EQNB_MIN_BAND_GAIN)  ||
-                (pParams->pEQNB_BandDefinition[i].Gain      > LVM_EQNB_MAX_BAND_GAIN)) ||
-                ((pParams->pEQNB_BandDefinition[i].QFactor  < LVM_EQNB_MIN_QFACTOR)     ||
-                (pParams->pEQNB_BandDefinition[i].QFactor   > LVM_EQNB_MAX_QFACTOR)))
-            {
+        for (i = 0; i < pParams->EQNB_NBands; i++) {
+            if (((pParams->pEQNB_BandDefinition[i].Frequency < LVM_EQNB_MIN_BAND_FREQ) ||
+                 (pParams->pEQNB_BandDefinition[i].Frequency > LVM_EQNB_MAX_BAND_FREQ)) ||
+                ((pParams->pEQNB_BandDefinition[i].Gain < LVM_EQNB_MIN_BAND_GAIN) ||
+                 (pParams->pEQNB_BandDefinition[i].Gain > LVM_EQNB_MAX_BAND_GAIN)) ||
+                ((pParams->pEQNB_BandDefinition[i].QFactor < LVM_EQNB_MIN_QFACTOR) ||
+                 (pParams->pEQNB_BandDefinition[i].QFactor > LVM_EQNB_MAX_QFACTOR))) {
                 return (LVM_OUTOFRANGE);
             }
         }
@@ -166,24 +146,25 @@
     /*
      * Bass Enhancement parameters
      */
-    if(((pParams->BE_OperatingMode != LVM_BE_OFF) && (pParams->BE_OperatingMode != LVM_BE_ON))                      ||
-        ((pParams->BE_EffectLevel < LVM_BE_MIN_EFFECTLEVEL ) || (pParams->BE_EffectLevel > LVM_BE_MAX_EFFECTLEVEL ))||
-        ((pParams->BE_CentreFreq != LVM_BE_CENTRE_55Hz) && (pParams->BE_CentreFreq != LVM_BE_CENTRE_66Hz)           &&
-        (pParams->BE_CentreFreq != LVM_BE_CENTRE_78Hz) && (pParams->BE_CentreFreq != LVM_BE_CENTRE_90Hz))           ||
-        ((pParams->BE_HPF != LVM_BE_HPF_OFF) && (pParams->BE_HPF != LVM_BE_HPF_ON)))
-    {
+    if (((pParams->BE_OperatingMode != LVM_BE_OFF) && (pParams->BE_OperatingMode != LVM_BE_ON)) ||
+        ((pParams->BE_EffectLevel < LVM_BE_MIN_EFFECTLEVEL) ||
+         (pParams->BE_EffectLevel > LVM_BE_MAX_EFFECTLEVEL)) ||
+        ((pParams->BE_CentreFreq != LVM_BE_CENTRE_55Hz) &&
+         (pParams->BE_CentreFreq != LVM_BE_CENTRE_66Hz) &&
+         (pParams->BE_CentreFreq != LVM_BE_CENTRE_78Hz) &&
+         (pParams->BE_CentreFreq != LVM_BE_CENTRE_90Hz)) ||
+        ((pParams->BE_HPF != LVM_BE_HPF_OFF) && (pParams->BE_HPF != LVM_BE_HPF_ON))) {
         return (LVM_OUTOFRANGE);
     }
 
     /*
      * Volume Control parameters
      */
-    if((pParams->VC_EffectLevel < LVM_VC_MIN_EFFECTLEVEL ) || (pParams->VC_EffectLevel > LVM_VC_MAX_EFFECTLEVEL ))
-    {
+    if ((pParams->VC_EffectLevel < LVM_VC_MIN_EFFECTLEVEL) ||
+        (pParams->VC_EffectLevel > LVM_VC_MAX_EFFECTLEVEL)) {
         return (LVM_OUTOFRANGE);
     }
-    if((pParams->VC_Balance < LVM_VC_BALANCE_MIN ) || (pParams->VC_Balance > LVM_VC_BALANCE_MAX ))
-    {
+    if ((pParams->VC_Balance < LVM_VC_BALANCE_MIN) || (pParams->VC_Balance > LVM_VC_BALANCE_MAX)) {
         return (LVM_OUTOFRANGE);
     }
 
@@ -191,22 +172,21 @@
      * PSA parameters
      */
     if (((LVPSA_LevelDetectSpeed_en)pParams->PSA_PeakDecayRate > LVPSA_SPEED_HIGH) ||
-        (pParams->PSA_Enable > LVM_PSA_ON))
-    {
+        (pParams->PSA_Enable > LVM_PSA_ON)) {
         return (LVM_OUTOFRANGE);
     }
 
     /*
-    * Set the flag to indicate there are new parameters to use
-    *
-    * Protect the copy of the new parameters from interrupts to avoid possible problems
-    * with loss control parameters. This problem can occur if this control function is called more
-    * than once before a call to the process function. If the process function interrupts
-    * the copy to NewParams then one frame may have mixed parameters, some old and some new.
-    */
+     * Set the flag to indicate there are new parameters to use
+     *
+     * Protect the copy of the new parameters from interrupts to avoid possible problems
+     * with loss control parameters. This problem can occur if this control function is called more
+     * than once before a call to the process function. If the process function interrupts
+     * the copy to NewParams then one frame may have mixed parameters, some old and some new.
+     */
     pInstance->ControlPending = LVM_TRUE;
 
-    return(LVM_SUCCESS);
+    return (LVM_SUCCESS);
 }
 
 /****************************************************************************************/
@@ -230,16 +210,13 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVM_ReturnStatus_en LVM_GetControlParameters(LVM_Handle_t           hInstance,
-                                             LVM_ControlParams_t    *pParams)
-{
-    LVM_Instance_t    *pInstance =(LVM_Instance_t  *)hInstance;
+LVM_ReturnStatus_en LVM_GetControlParameters(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
 
     /*
      * Check pointer
      */
-    if ((pParams == LVM_NULL) || (hInstance == LVM_NULL))
-    {
+    if ((pParams == LVM_NULL) || (hInstance == LVM_NULL)) {
         return (LVM_NULLADDRESS);
     }
     *pParams = pInstance->NewParams;
@@ -248,17 +225,16 @@
      * Copy the filter definitions for the Equaliser
      */
     {
-        LVM_INT16           i;
+        LVM_INT16 i;
 
         if (pInstance->NewParams.EQNB_NBands != 0)
-        for (i=0; i<pInstance->NewParams.EQNB_NBands; i++)
-        {
-            pInstance->pEQNB_UserDefs[i] = pInstance->pEQNB_BandDefs[i];
-        }
+            for (i = 0; i < pInstance->NewParams.EQNB_NBands; i++) {
+                pInstance->pEQNB_UserDefs[i] = pInstance->pEQNB_BandDefs[i];
+            }
         pParams->pEQNB_BandDefinition = pInstance->pEQNB_UserDefs;
     }
 
-    return(LVM_SUCCESS);
+    return (LVM_SUCCESS);
 }
 
 /****************************************************************************************/
@@ -274,56 +250,46 @@
 /*  pParams                 Pointer to the parameters to use                            */
 /*                                                                                      */
 /****************************************************************************************/
-void LVM_SetTrebleBoost(LVM_Instance_t         *pInstance,
-                        LVM_ControlParams_t    *pParams)
-{
-    extern FO_FLOAT_LShx_Coefs_t  LVM_TrebleBoostCoefs[];
+void LVM_SetTrebleBoost(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams) {
+    extern FO_FLOAT_LShx_Coefs_t LVM_TrebleBoostCoefs[];
 
-    LVM_INT16               Offset;
-    LVM_INT16               EffectLevel = 0;
+    LVM_INT16 Offset;
+    LVM_INT16 EffectLevel = 0;
 
     /*
      * Load the coefficients
      */
-    if ((pParams->TE_OperatingMode == LVM_TE_ON) &&
-        (pParams->SampleRate >= TrebleBoostMinRate) &&
-        (pParams->OperatingMode == LVM_MODE_ON) &&
-        (pParams->TE_EffectLevel > 0))
-    {
-        if((pParams->TE_EffectLevel == LVM_TE_LOW_MIPS) &&
-            ((pParams->SpeakerType == LVM_HEADPHONES)||
-            (pParams->SpeakerType == LVM_EX_HEADPHONES)))
-        {
+    if ((pParams->TE_OperatingMode == LVM_TE_ON) && (pParams->SampleRate >= TrebleBoostMinRate) &&
+        (pParams->OperatingMode == LVM_MODE_ON) && (pParams->TE_EffectLevel > 0)) {
+        if ((pParams->TE_EffectLevel == LVM_TE_LOW_MIPS) &&
+            ((pParams->SpeakerType == LVM_HEADPHONES) ||
+             (pParams->SpeakerType == LVM_EX_HEADPHONES))) {
             pInstance->TE_Active = LVM_FALSE;
-        }
-        else
-        {
+        } else {
             EffectLevel = pParams->TE_EffectLevel;
             pInstance->TE_Active = LVM_TRUE;
         }
 
-        if(pInstance->TE_Active == LVM_TRUE)
-        {
+        if (pInstance->TE_Active == LVM_TRUE) {
             /*
              * Load the coefficients and enabled the treble boost
              */
-            Offset = (LVM_INT16)(EffectLevel - 1 + TrebleBoostSteps * (pParams->SampleRate - TrebleBoostMinRate));
+            Offset = (LVM_INT16)(EffectLevel - 1 +
+                                 TrebleBoostSteps * (pParams->SampleRate - TrebleBoostMinRate));
             FO_2I_D16F32Css_LShx_TRC_WRA_01_Init(&pInstance->pTE_State->TrebleBoost_State,
-                                            &pInstance->pTE_Taps->TrebleBoost_Taps,
-                                            &LVM_TrebleBoostCoefs[Offset]);
+                                                 &pInstance->pTE_Taps->TrebleBoost_Taps,
+                                                 &LVM_TrebleBoostCoefs[Offset]);
 
             /*
              * Clear the taps
              */
-            LoadConst_Float((LVM_FLOAT)0,                                     /* Value */
-                            (LVM_FLOAT *)&pInstance->pTE_Taps->TrebleBoost_Taps,  /* Destination.\
-                                                     Cast to void: no dereferencing in function */
-                            (LVM_UINT16)(sizeof(pInstance->pTE_Taps->TrebleBoost_Taps) / \
-                                                        sizeof(LVM_FLOAT))); /* Number of words */
+            LoadConst_Float((LVM_FLOAT)0,                                       /* Value */
+                            (LVM_FLOAT*)&pInstance->pTE_Taps->TrebleBoost_Taps, /* Destination.\
+                                                   Cast to void: no dereferencing in function */
+                            (LVM_UINT16)(sizeof(pInstance->pTE_Taps->TrebleBoost_Taps) /
+                                         sizeof(LVM_FLOAT))); /* Number of words */
         }
-    }
-    else
-    {
+    } else {
         /*
          * Disable the treble boost
          */
@@ -345,94 +311,76 @@
 /*  pParams             Initialisation parameters                                   */
 /*                                                                                  */
 /************************************************************************************/
-void    LVM_SetVolume(LVM_Instance_t         *pInstance,
-                      LVM_ControlParams_t    *pParams)
-{
-
-    LVM_UINT16      dBShifts;                                   /* 6dB shifts */
-    LVM_UINT16      dBOffset;                                   /* Table offset */
-    LVM_INT16       Volume = 0;                                 /* Required volume in dBs */
-    LVM_FLOAT        Temp;
+void LVM_SetVolume(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams) {
+    LVM_UINT16 dBShifts;  /* 6dB shifts */
+    LVM_UINT16 dBOffset;  /* Table offset */
+    LVM_INT16 Volume = 0; /* Required volume in dBs */
+    LVM_FLOAT Temp;
 
     /*
      * Limit the gain to the maximum allowed
      */
-     if  (pParams->VC_EffectLevel > 0)
-     {
-         Volume = 0;
-     }
-     else
-     {
-         Volume = pParams->VC_EffectLevel;
-     }
+    if (pParams->VC_EffectLevel > 0) {
+        Volume = 0;
+    } else {
+        Volume = pParams->VC_EffectLevel;
+    }
 
-     /* Compensate this volume in PSA plot */
-     if(Volume > -60)  /* Limit volume loss to PSA Limits*/
-         pInstance->PSA_GainOffset=(LVM_INT16)(-Volume);/* Loss is compensated by Gain*/
-     else
-         pInstance->PSA_GainOffset=(LVM_INT16)60;/* Loss is compensated by Gain*/
+    /* Compensate this volume in PSA plot */
+    if (Volume > -60)                                     /* Limit volume loss to PSA Limits*/
+        pInstance->PSA_GainOffset = (LVM_INT16)(-Volume); /* Loss is compensated by Gain*/
+    else
+        pInstance->PSA_GainOffset = (LVM_INT16)60; /* Loss is compensated by Gain*/
 
     pInstance->VC_AVLFixedVolume = 0;
 
     /*
      * Set volume control and AVL volumes according to headroom and volume user setting
      */
-    if(pParams->OperatingMode == LVM_MODE_ON)
-    {
+    if (pParams->OperatingMode == LVM_MODE_ON) {
         /* Default Situation with no AVL and no RS */
-        if(pParams->EQNB_OperatingMode == LVM_EQNB_ON)
-        {
-            if(Volume > -pInstance->Headroom)
-                Volume = (LVM_INT16)-pInstance->Headroom;
+        if (pParams->EQNB_OperatingMode == LVM_EQNB_ON) {
+            if (Volume > -pInstance->Headroom) Volume = (LVM_INT16)-pInstance->Headroom;
         }
     }
 
     /*
      * Activate volume control if necessary
      */
-    pInstance->VC_Active   = LVM_TRUE;
-    if (Volume != 0)
-    {
+    pInstance->VC_Active = LVM_TRUE;
+    if (Volume != 0) {
         pInstance->VC_VolumedB = Volume;
-    }
-    else
-    {
+    } else {
         pInstance->VC_VolumedB = 0;
     }
 
     /*
      * Calculate the required gain and shifts
      */
-    dBOffset = (LVM_UINT16)((-Volume) % 6);             /* Get the dBs 0-5 */
-    dBShifts = (LVM_UINT16)(Volume / -6);               /* Get the 6dB shifts */
+    dBOffset = (LVM_UINT16)((-Volume) % 6); /* Get the dBs 0-5 */
+    dBShifts = (LVM_UINT16)(Volume / -6);   /* Get the 6dB shifts */
 
     /*
      * Set the parameters
      */
-    if(dBShifts == 0)
-    {
+    if (dBShifts == 0) {
         LVC_Mixer_SetTarget(&pInstance->VC_Volume.MixerStream[0],
-                                (LVM_FLOAT)LVM_VolumeTable[dBOffset]);
-        }
-    else
-    {
+                            (LVM_FLOAT)LVM_VolumeTable[dBOffset]);
+    } else {
         Temp = LVM_VolumeTable[dBOffset];
-        while(dBShifts) {
+        while (dBShifts) {
             Temp = Temp / 2.0f;
             dBShifts--;
         }
         LVC_Mixer_SetTarget(&pInstance->VC_Volume.MixerStream[0], Temp);
     }
     pInstance->VC_Volume.MixerStream[0].CallbackSet = 1;
-    if(pInstance->NoSmoothVolume == LVM_TRUE)
-    {
+    if (pInstance->NoSmoothVolume == LVM_TRUE) {
         LVC_Mixer_SetTimeConstant(&pInstance->VC_Volume.MixerStream[0], 0,
                                   pInstance->Params.SampleRate, 2);
-    }
-    else
-    {
-        LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_Volume.MixerStream[0],
-                                           LVM_VC_MIXER_TIME, pInstance->Params.SampleRate, 2);
+    } else {
+        LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_Volume.MixerStream[0], LVM_VC_MIXER_TIME,
+                                           pInstance->Params.SampleRate, 2);
     }
 }
 
@@ -453,43 +401,39 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-void    LVM_SetHeadroom(LVM_Instance_t         *pInstance,
-                        LVM_ControlParams_t    *pParams)
-{
-    LVM_INT16   ii, jj;
-    LVM_INT16   Headroom = 0;
-    LVM_INT16   MaxGain = 0;
+void LVM_SetHeadroom(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams) {
+    LVM_INT16 ii, jj;
+    LVM_INT16 Headroom = 0;
+    LVM_INT16 MaxGain = 0;
 
-    if (((LVEQNB_Mode_en)pParams->EQNB_OperatingMode == LVEQNB_ON)
-           && (pInstance->HeadroomParams.Headroom_OperatingMode == LVM_HEADROOM_ON))
-    {
+    if (((LVEQNB_Mode_en)pParams->EQNB_OperatingMode == LVEQNB_ON) &&
+        (pInstance->HeadroomParams.Headroom_OperatingMode == LVM_HEADROOM_ON)) {
         /* Find typical headroom value */
-        for(jj = 0; jj < pInstance->HeadroomParams.NHeadroomBands; jj++)
-        {
+        for (jj = 0; jj < pInstance->HeadroomParams.NHeadroomBands; jj++) {
             MaxGain = 0;
-            for( ii = 0; ii < pParams->EQNB_NBands; ii++)
-            {
-                if((pParams->pEQNB_BandDefinition[ii].Frequency >= pInstance->HeadroomParams.pHeadroomDefinition[jj].Limit_Low) &&
-                   (pParams->pEQNB_BandDefinition[ii].Frequency <= pInstance->HeadroomParams.pHeadroomDefinition[jj].Limit_High))
-                {
-                    if(pParams->pEQNB_BandDefinition[ii].Gain > MaxGain)
-                    {
+            for (ii = 0; ii < pParams->EQNB_NBands; ii++) {
+                if ((pParams->pEQNB_BandDefinition[ii].Frequency >=
+                     pInstance->HeadroomParams.pHeadroomDefinition[jj].Limit_Low) &&
+                    (pParams->pEQNB_BandDefinition[ii].Frequency <=
+                     pInstance->HeadroomParams.pHeadroomDefinition[jj].Limit_High)) {
+                    if (pParams->pEQNB_BandDefinition[ii].Gain > MaxGain) {
                         MaxGain = pParams->pEQNB_BandDefinition[ii].Gain;
                     }
                 }
             }
 
-            if((MaxGain - pInstance->HeadroomParams.pHeadroomDefinition[jj].Headroom_Offset) > Headroom){
-                Headroom = (LVM_INT16)(MaxGain - pInstance->HeadroomParams.pHeadroomDefinition[jj].Headroom_Offset);
+            if ((MaxGain - pInstance->HeadroomParams.pHeadroomDefinition[jj].Headroom_Offset) >
+                Headroom) {
+                Headroom = (LVM_INT16)(
+                        MaxGain -
+                        pInstance->HeadroomParams.pHeadroomDefinition[jj].Headroom_Offset);
             }
         }
 
         /* Saturate */
-        if(Headroom < 0)
-            Headroom = 0;
+        if (Headroom < 0) Headroom = 0;
     }
-    pInstance->Headroom = (LVM_UINT16)Headroom ;
-
+    pInstance->Headroom = (LVM_UINT16)Headroom;
 }
 
 /****************************************************************************************/
@@ -510,32 +454,26 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVM_ReturnStatus_en LVM_ApplyNewSettings(LVM_Handle_t   hInstance)
-{
-    LVM_Instance_t         *pInstance =(LVM_Instance_t *)hInstance;
-    LVM_ControlParams_t    LocalParams;
-    LVM_INT16              Count = 5;
+LVM_ReturnStatus_en LVM_ApplyNewSettings(LVM_Handle_t hInstance) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+    LVM_ControlParams_t LocalParams;
+    LVM_INT16 Count = 5;
 
     /*
      * Copy the new parameters but make sure they didn't change while copying
      */
-    do
-    {
+    do {
         pInstance->ControlPending = LVM_FALSE;
         LocalParams = pInstance->NewParams;
         pInstance->HeadroomParams = pInstance->NewHeadroomParams;
         Count--;
-    } while ((pInstance->ControlPending != LVM_FALSE) &&
-             (Count > 0));
+    } while ((pInstance->ControlPending != LVM_FALSE) && (Count > 0));
 
-#ifdef SUPPORT_MC
     pInstance->NrChannels = LocalParams.NrChannels;
     pInstance->ChMask = LocalParams.ChMask;
-#endif
 
     /* Clear all internal data if format change*/
-    if(LocalParams.SourceFormat != pInstance->Params.SourceFormat)
-    {
+    if (LocalParams.SourceFormat != pInstance->Params.SourceFormat) {
         LVM_ClearAudioBuffers(pInstance);
         pInstance->ControlPending = LVM_FALSE;
     }
@@ -547,31 +485,27 @@
         (pInstance->Params.TE_EffectLevel != LocalParams.TE_EffectLevel) ||
         (pInstance->Params.TE_OperatingMode != LocalParams.TE_OperatingMode) ||
         (pInstance->Params.OperatingMode != LocalParams.OperatingMode) ||
-        (pInstance->Params.SpeakerType != LocalParams.SpeakerType))
-    {
-        LVM_SetTrebleBoost(pInstance,
-                           &LocalParams);
+        (pInstance->Params.SpeakerType != LocalParams.SpeakerType)) {
+        LVM_SetTrebleBoost(pInstance, &LocalParams);
     }
 
     /*
      * Update the headroom if required
      */
-        LVM_SetHeadroom(pInstance,                      /* Instance pointer */
-                        &LocalParams);                  /* New parameters */
+    LVM_SetHeadroom(pInstance,     /* Instance pointer */
+                    &LocalParams); /* New parameters */
 
     /*
      * Update the volume if required
      */
     {
-        LVM_SetVolume(pInstance,                      /* Instance pointer */
-                      &LocalParams);                  /* New parameters */
+        LVM_SetVolume(pInstance,     /* Instance pointer */
+                      &LocalParams); /* New parameters */
     }
     /* Apply balance changes*/
-    if(pInstance->Params.VC_Balance != LocalParams.VC_Balance)
-    {
+    if (pInstance->Params.VC_Balance != LocalParams.VC_Balance) {
         /* Configure Mixer module for gradual changes to volume*/
-        if(LocalParams.VC_Balance < 0)
-        {
+        if (LocalParams.VC_Balance < 0) {
             LVM_FLOAT Target_Float;
             /* Drop in right channel volume*/
             Target_Float = LVM_MAXFLOAT;
@@ -583,9 +517,7 @@
             LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[1], Target_Float);
             LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[1],
                                                LVM_VC_MIXER_TIME, LocalParams.SampleRate, 1);
-        }
-        else if(LocalParams.VC_Balance >0)
-        {
+        } else if (LocalParams.VC_Balance > 0) {
             LVM_FLOAT Target_Float;
             /* Drop in left channel volume*/
             Target_Float = dB_to_LinFloat((LVM_INT16)((-LocalParams.VC_Balance) << 4));
@@ -597,63 +529,54 @@
             LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[1], Target_Float);
             LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[1],
                                                LVM_VC_MIXER_TIME, LocalParams.SampleRate, 1);
-        }
-        else
-        {
+        } else {
             LVM_FLOAT Target_Float;
             /* No drop*/
             Target_Float = LVM_MAXFLOAT;
-            LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[0],Target_Float);
+            LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[0], Target_Float);
             LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[0],
-                                               LVM_VC_MIXER_TIME,LocalParams.SampleRate, 1);
+                                               LVM_VC_MIXER_TIME, LocalParams.SampleRate, 1);
 
-            LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[1],Target_Float);
+            LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[1], Target_Float);
             LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[1],
-                                               LVM_VC_MIXER_TIME,LocalParams.SampleRate, 1);
+                                               LVM_VC_MIXER_TIME, LocalParams.SampleRate, 1);
         }
     }
     /*
      * Update the bass enhancement
      */
     {
-        LVDBE_ReturnStatus_en       DBE_Status;
-        LVDBE_Params_t              DBE_Params;
-        LVDBE_Handle_t              *hDBEInstance = (LVDBE_Handle_t *)pInstance->hDBEInstance;
+        LVDBE_ReturnStatus_en DBE_Status;
+        LVDBE_Params_t DBE_Params;
+        LVDBE_Handle_t* hDBEInstance = (LVDBE_Handle_t*)pInstance->hDBEInstance;
 
         /*
          * Set the new parameters
          */
-        if(LocalParams.OperatingMode == LVM_MODE_OFF)
-        {
+        if (LocalParams.OperatingMode == LVM_MODE_OFF) {
             DBE_Params.OperatingMode = LVDBE_OFF;
+        } else {
+            DBE_Params.OperatingMode = (LVDBE_Mode_en)LocalParams.BE_OperatingMode;
         }
-        else
-        {
-            DBE_Params.OperatingMode    = (LVDBE_Mode_en)LocalParams.BE_OperatingMode;
-        }
-        DBE_Params.SampleRate       = (LVDBE_Fs_en)LocalParams.SampleRate;
-        DBE_Params.EffectLevel      = LocalParams.BE_EffectLevel;
-        DBE_Params.CentreFrequency  = (LVDBE_CentreFreq_en)LocalParams.BE_CentreFreq;
-        DBE_Params.HPFSelect        = (LVDBE_FilterSelect_en)LocalParams.BE_HPF;
-        DBE_Params.HeadroomdB       = 0;
-        DBE_Params.VolumeControl    = LVDBE_VOLUME_OFF;
-        DBE_Params.VolumedB         = 0;
-#ifdef SUPPORT_MC
-        DBE_Params.NrChannels         = LocalParams.NrChannels;
-#endif
+        DBE_Params.SampleRate = (LVDBE_Fs_en)LocalParams.SampleRate;
+        DBE_Params.EffectLevel = LocalParams.BE_EffectLevel;
+        DBE_Params.CentreFrequency = (LVDBE_CentreFreq_en)LocalParams.BE_CentreFreq;
+        DBE_Params.HPFSelect = (LVDBE_FilterSelect_en)LocalParams.BE_HPF;
+        DBE_Params.HeadroomdB = 0;
+        DBE_Params.VolumeControl = LVDBE_VOLUME_OFF;
+        DBE_Params.VolumedB = 0;
+        DBE_Params.NrChannels = LocalParams.NrChannels;
 
         /*
          * Make the changes
          */
-        DBE_Status = LVDBE_Control(hDBEInstance,
-                                   &DBE_Params);
+        DBE_Status = LVDBE_Control(hDBEInstance, &DBE_Params);
 
         /*
          * Quit if the changes were not accepted
          */
-        if (DBE_Status != LVDBE_SUCCESS)
-        {
-            return((LVM_ReturnStatus_en)DBE_Status);
+        if (DBE_Status != LVDBE_SUCCESS) {
+            return ((LVM_ReturnStatus_en)DBE_Status);
         }
 
         /*
@@ -666,168 +589,132 @@
      * Update the N-Band Equaliser
      */
     {
-        LVEQNB_ReturnStatus_en      EQNB_Status;
-        LVEQNB_Params_t             EQNB_Params;
-        LVEQNB_Handle_t             *hEQNBInstance = (LVEQNB_Handle_t *)pInstance->hEQNBInstance;
+        LVEQNB_ReturnStatus_en EQNB_Status;
+        LVEQNB_Params_t EQNB_Params;
+        LVEQNB_Handle_t* hEQNBInstance = (LVEQNB_Handle_t*)pInstance->hEQNBInstance;
 
         /*
          * Set the new parameters
          */
 
-        if(LocalParams.OperatingMode == LVM_MODE_OFF)
-        {
-            EQNB_Params.OperatingMode    = LVEQNB_BYPASS;
-        }
-        else
-        {
-            EQNB_Params.OperatingMode    = (LVEQNB_Mode_en)LocalParams.EQNB_OperatingMode;
+        if (LocalParams.OperatingMode == LVM_MODE_OFF) {
+            EQNB_Params.OperatingMode = LVEQNB_BYPASS;
+        } else {
+            EQNB_Params.OperatingMode = (LVEQNB_Mode_en)LocalParams.EQNB_OperatingMode;
         }
 
-        EQNB_Params.SampleRate       = (LVEQNB_Fs_en)LocalParams.SampleRate;
-        EQNB_Params.NBands           = LocalParams.EQNB_NBands;
-        EQNB_Params.pBandDefinition  = (LVEQNB_BandDef_t *)LocalParams.pEQNB_BandDefinition;
-        if (LocalParams.SourceFormat == LVM_STEREO)    /* Mono format not supported */
+        EQNB_Params.SampleRate = (LVEQNB_Fs_en)LocalParams.SampleRate;
+        EQNB_Params.NBands = LocalParams.EQNB_NBands;
+        EQNB_Params.pBandDefinition = (LVEQNB_BandDef_t*)LocalParams.pEQNB_BandDefinition;
+        if (LocalParams.SourceFormat == LVM_STEREO) /* Mono format not supported */
         {
             EQNB_Params.SourceFormat = LVEQNB_STEREO;
         }
-#ifdef SUPPORT_MC
         /* Note: Currently SourceFormat field of EQNB is not been
          *       used by the module.
          */
-        else if (LocalParams.SourceFormat == LVM_MULTICHANNEL)
-        {
+        else if (LocalParams.SourceFormat == LVM_MULTICHANNEL) {
             EQNB_Params.SourceFormat = LVEQNB_MULTICHANNEL;
+        } else {
+            EQNB_Params.SourceFormat = LVEQNB_MONOINSTEREO; /* Force to Mono-in-Stereo mode */
         }
-#endif
-        else
-        {
-            EQNB_Params.SourceFormat = LVEQNB_MONOINSTEREO;     /* Force to Mono-in-Stereo mode */
-        }
-#ifdef SUPPORT_MC
-        EQNB_Params.NrChannels         = LocalParams.NrChannels;
-#endif
+        EQNB_Params.NrChannels = LocalParams.NrChannels;
 
         /*
          * Set the control flag
          */
         if ((LocalParams.OperatingMode == LVM_MODE_ON) &&
-            (LocalParams.EQNB_OperatingMode == LVM_EQNB_ON))
-        {
+            (LocalParams.EQNB_OperatingMode == LVM_EQNB_ON)) {
             pInstance->EQNB_Active = LVM_TRUE;
-        }
-        else
-        {
+        } else {
             EQNB_Params.OperatingMode = LVEQNB_BYPASS;
         }
 
         /*
          * Make the changes
          */
-        EQNB_Status = LVEQNB_Control(hEQNBInstance,
-                                     &EQNB_Params);
+        EQNB_Status = LVEQNB_Control(hEQNBInstance, &EQNB_Params);
 
         /*
          * Quit if the changes were not accepted
          */
-        if (EQNB_Status != LVEQNB_SUCCESS)
-        {
-            return((LVM_ReturnStatus_en)EQNB_Status);
+        if (EQNB_Status != LVEQNB_SUCCESS) {
+            return ((LVM_ReturnStatus_en)EQNB_Status);
         }
-
     }
 
     /*
      * Update concert sound
      */
     {
-        LVCS_ReturnStatus_en        CS_Status;
-        LVCS_Params_t               CS_Params;
-        LVCS_Handle_t               *hCSInstance = (LVCS_Handle_t *)pInstance->hCSInstance;
-        LVM_Mode_en                 CompressorMode=LVM_MODE_ON;
+        LVCS_ReturnStatus_en CS_Status;
+        LVCS_Params_t CS_Params;
+        LVCS_Handle_t* hCSInstance = (LVCS_Handle_t*)pInstance->hCSInstance;
+        LVM_Mode_en CompressorMode = LVM_MODE_ON;
 
         /*
          * Set the new parameters
          */
-        if(LocalParams.VirtualizerOperatingMode == LVM_MODE_ON)
-        {
-            CS_Params.OperatingMode    = LVCS_ON;
-        }
-        else
-        {
-            CS_Params.OperatingMode    = LVCS_OFF;
+        if (LocalParams.VirtualizerOperatingMode == LVM_MODE_ON) {
+            CS_Params.OperatingMode = LVCS_ON;
+        } else {
+            CS_Params.OperatingMode = LVCS_OFF;
         }
 
-        if((LocalParams.TE_OperatingMode == LVM_TE_ON) && (LocalParams.TE_EffectLevel == LVM_TE_LOW_MIPS))
-        {
-            CS_Params.SpeakerType  = LVCS_EX_HEADPHONES;
-        }
-        else
-        {
-            CS_Params.SpeakerType  = LVCS_HEADPHONES;
+        if ((LocalParams.TE_OperatingMode == LVM_TE_ON) &&
+            (LocalParams.TE_EffectLevel == LVM_TE_LOW_MIPS)) {
+            CS_Params.SpeakerType = LVCS_EX_HEADPHONES;
+        } else {
+            CS_Params.SpeakerType = LVCS_HEADPHONES;
         }
 
-#ifdef SUPPORT_MC
         /* Concert sound module processes only the left and right channels
          * data. So the Source Format is set to LVCS_STEREO for multichannel
          * input also.
          */
         if (LocalParams.SourceFormat == LVM_STEREO ||
-            LocalParams.SourceFormat == LVM_MULTICHANNEL)
-#else
-        if (LocalParams.SourceFormat == LVM_STEREO)    /* Mono format not supported */
-#endif
-        {
+            LocalParams.SourceFormat == LVM_MULTICHANNEL) {
             CS_Params.SourceFormat = LVCS_STEREO;
+        } else {
+            CS_Params.SourceFormat = LVCS_MONOINSTEREO; /* Force to Mono-in-Stereo mode */
         }
-        else
-        {
-            CS_Params.SourceFormat = LVCS_MONOINSTEREO;          /* Force to Mono-in-Stereo mode */
-        }
-        CS_Params.SampleRate  = LocalParams.SampleRate;
+        CS_Params.SampleRate = LocalParams.SampleRate;
         CS_Params.ReverbLevel = LocalParams.VirtualizerReverbLevel;
         CS_Params.EffectLevel = LocalParams.CS_EffectLevel;
-#ifdef SUPPORT_MC
-        CS_Params.NrChannels  = LocalParams.NrChannels;
-#endif
+        CS_Params.NrChannels = LocalParams.NrChannels;
 
         /*
          * Set the control flag
          */
         if (((LVM_Mode_en)LocalParams.OperatingMode == LVM_MODE_ON) &&
-            ((LVCS_Modes_en)LocalParams.VirtualizerOperatingMode != LVCS_OFF))
-        {
+            ((LVCS_Modes_en)LocalParams.VirtualizerOperatingMode != LVCS_OFF)) {
             pInstance->CS_Active = LVM_TRUE;
-        }
-        else
-        {
+        } else {
             CS_Params.OperatingMode = LVCS_OFF;
         }
 
-        CS_Params.CompressorMode=CompressorMode;
+        CS_Params.CompressorMode = CompressorMode;
 
         /*
          * Make the changes
          */
-        CS_Status = LVCS_Control(hCSInstance,
-                                 &CS_Params);
+        CS_Status = LVCS_Control(hCSInstance, &CS_Params);
 
         /*
          * Quit if the changes were not accepted
          */
-        if (CS_Status != LVCS_SUCCESS)
-        {
-            return((LVM_ReturnStatus_en)CS_Status);
+        if (CS_Status != LVCS_SUCCESS) {
+            return ((LVM_ReturnStatus_en)CS_Status);
         }
-
     }
 
     /*
      * Update the Power Spectrum Analyser
      */
     {
-        LVPSA_RETURN                PSA_Status;
-        LVPSA_ControlParams_t       PSA_Params;
-        pLVPSA_Handle_t             *hPSAInstance = (pLVPSA_Handle_t *)pInstance->hPSAInstance;
+        LVPSA_RETURN PSA_Status;
+        LVPSA_ControlParams_t PSA_Params;
+        pLVPSA_Handle_t* hPSAInstance = (pLVPSA_Handle_t*)pInstance->hPSAInstance;
 
         /*
          * Set the new parameters
@@ -838,23 +725,19 @@
         /*
          * Make the changes
          */
-        if(pInstance->InstParams.PSA_Included==LVM_PSA_ON)
-        {
-            PSA_Status = LVPSA_Control(hPSAInstance,
-                &PSA_Params);
+        if (pInstance->InstParams.PSA_Included == LVM_PSA_ON) {
+            PSA_Status = LVPSA_Control(hPSAInstance, &PSA_Params);
 
-            if (PSA_Status != LVPSA_OK)
-            {
-                return((LVM_ReturnStatus_en)PSA_Status);
+            if (PSA_Status != LVPSA_OK) {
+                return ((LVM_ReturnStatus_en)PSA_Status);
             }
 
             /*
              * Apply new settings
              */
-            PSA_Status = LVPSA_ApplyNewSettings ((LVPSA_InstancePr_t*)hPSAInstance);
-            if(PSA_Status != LVPSA_OK)
-            {
-                return((LVM_ReturnStatus_en)PSA_Status);
+            PSA_Status = LVPSA_ApplyNewSettings((LVPSA_InstancePr_t*)hPSAInstance);
+            if (PSA_Status != LVPSA_OK) {
+                return ((LVM_ReturnStatus_en)PSA_Status);
             }
         }
     }
@@ -863,9 +746,9 @@
      * Update the parameters and clear the flag
      */
     pInstance->NoSmoothVolume = LVM_FALSE;
-    pInstance->Params =  LocalParams;
+    pInstance->Params = LocalParams;
 
-    return(LVM_SUCCESS);
+    return (LVM_SUCCESS);
 }
 
 /****************************************************************************************/
@@ -887,36 +770,30 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVM_ReturnStatus_en LVM_SetHeadroomParams(LVM_Handle_t              hInstance,
-                                          LVM_HeadroomParams_t      *pHeadroomParams)
-{
-    LVM_Instance_t      *pInstance =(LVM_Instance_t  *)hInstance;
-    LVM_UINT16          ii, NBands;
+LVM_ReturnStatus_en LVM_SetHeadroomParams(LVM_Handle_t hInstance,
+                                          LVM_HeadroomParams_t* pHeadroomParams) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+    LVM_UINT16 ii, NBands;
 
     /* Check for NULL pointers */
-    if ((hInstance == LVM_NULL) || (pHeadroomParams == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pHeadroomParams == LVM_NULL)) {
         return (LVM_NULLADDRESS);
     }
-    if ((pHeadroomParams->NHeadroomBands != 0) && (pHeadroomParams->pHeadroomDefinition == LVM_NULL))
-    {
+    if ((pHeadroomParams->NHeadroomBands != 0) &&
+        (pHeadroomParams->pHeadroomDefinition == LVM_NULL)) {
         return (LVM_NULLADDRESS);
     }
 
     /* Consider only the LVM_HEADROOM_MAX_NBANDS first bands*/
-    if (pHeadroomParams->NHeadroomBands > LVM_HEADROOM_MAX_NBANDS)
-    {
+    if (pHeadroomParams->NHeadroomBands > LVM_HEADROOM_MAX_NBANDS) {
         NBands = LVM_HEADROOM_MAX_NBANDS;
-    }
-    else
-    {
+    } else {
         NBands = pHeadroomParams->NHeadroomBands;
     }
     pInstance->NewHeadroomParams.NHeadroomBands = NBands;
 
     /* Copy settings in memory */
-    for(ii = 0; ii < NBands; ii++)
-    {
+    for (ii = 0; ii < NBands; ii++) {
         pInstance->pHeadroom_BandDefs[ii] = pHeadroomParams->pHeadroomDefinition[ii];
     }
 
@@ -924,7 +801,7 @@
     pInstance->NewHeadroomParams.Headroom_OperatingMode = pHeadroomParams->Headroom_OperatingMode;
     pInstance->ControlPending = LVM_TRUE;
 
-    return(LVM_SUCCESS);
+    return (LVM_SUCCESS);
 }
 
 /****************************************************************************************/
@@ -947,29 +824,26 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVM_ReturnStatus_en LVM_GetHeadroomParams(LVM_Handle_t          hInstance,
-                                          LVM_HeadroomParams_t  *pHeadroomParams)
-{
-    LVM_Instance_t      *pInstance =(LVM_Instance_t  *)hInstance;
-    LVM_UINT16          ii;
+LVM_ReturnStatus_en LVM_GetHeadroomParams(LVM_Handle_t hInstance,
+                                          LVM_HeadroomParams_t* pHeadroomParams) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+    LVM_UINT16 ii;
 
     /* Check for NULL pointers */
-    if ((hInstance == LVM_NULL) || (pHeadroomParams == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pHeadroomParams == LVM_NULL)) {
         return (LVM_NULLADDRESS);
     }
 
     pHeadroomParams->NHeadroomBands = pInstance->NewHeadroomParams.NHeadroomBands;
 
     /* Copy settings in memory */
-    for(ii = 0; ii < pInstance->NewHeadroomParams.NHeadroomBands; ii++)
-    {
+    for (ii = 0; ii < pInstance->NewHeadroomParams.NHeadroomBands; ii++) {
         pInstance->pHeadroom_UserDefs[ii] = pInstance->pHeadroom_BandDefs[ii];
     }
 
     pHeadroomParams->pHeadroomDefinition = pInstance->pHeadroom_UserDefs;
     pHeadroomParams->Headroom_OperatingMode = pInstance->NewHeadroomParams.Headroom_OperatingMode;
-    return(LVM_SUCCESS);
+    return (LVM_SUCCESS);
 }
 
 /****************************************************************************************/
@@ -988,18 +862,14 @@
 /*  1.  This function may be interrupted by the LVM_Process function                    */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_INT32 LVM_AlgoCallBack( void          *pBundleHandle,
-                            void          *pData,
-                            LVM_INT16     callbackId)
-{
-    LVM_Instance_t      *pInstance =(LVM_Instance_t  *)pBundleHandle;
+LVM_INT32 LVM_AlgoCallBack(void* pBundleHandle, void* pData, LVM_INT16 callbackId) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)pBundleHandle;
 
-    (void) pData;
+    (void)pData;
 
-    switch(callbackId & 0xFF00){
+    switch (callbackId & 0xFF00) {
         case ALGORITHM_CS_ID:
-            switch(callbackId & 0x00FF)
-            {
+            switch (callbackId & 0x00FF) {
                 case LVCS_EVENT_ALGOFF:
                     pInstance->CS_Active = LVM_FALSE;
                     break;
@@ -1008,8 +878,7 @@
             }
             break;
         case ALGORITHM_EQNB_ID:
-            switch(callbackId & 0x00FF)
-            {
+            switch (callbackId & 0x00FF) {
                 case LVEQNB_EVENT_ALGOFF:
                     pInstance->EQNB_Active = LVM_FALSE;
                     break;
@@ -1040,21 +909,17 @@
 /*  1.  This function may be interrupted by the LVM_Process function                    */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_INT32    LVM_VCCallBack(void*   pBundleHandle,
-                            void*   pGeneralPurpose,
-                            short   CallBackParam)
-{
-    LVM_Instance_t *pInstance =(LVM_Instance_t  *)pBundleHandle;
-    LVM_FLOAT    Target;
+LVM_INT32 LVM_VCCallBack(void* pBundleHandle, void* pGeneralPurpose, short CallBackParam) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)pBundleHandle;
+    LVM_FLOAT Target;
 
-    (void) pGeneralPurpose;
-    (void) CallBackParam;
+    (void)pGeneralPurpose;
+    (void)CallBackParam;
 
     /* When volume mixer has reached 0 dB target then stop it to avoid
        unnecessary processing. */
     Target = LVC_Mixer_GetTarget(&pInstance->VC_Volume.MixerStream[0]);
-    if(Target == 1.0f)
-    {
+    if (Target == 1.0f) {
         pInstance->VC_Active = LVM_FALSE;
     }
     return 1;
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
index 5620529..bb962df 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
@@ -20,6 +20,7 @@
 /*  Includes                                                                        */
 /*                                                                                  */
 /************************************************************************************/
+#include <stdlib.h>
 
 #include "LVM_Private.h"
 #include "LVM_Tables.h"
@@ -28,570 +29,90 @@
 
 /****************************************************************************************/
 /*                                                                                      */
-/* FUNCTION:                LVM_GetMemoryTable                                          */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*      hInstance = NULL                Returns the memory requirements                 */
-/*      hInstance = Instance handle     Returns the memory requirements and             */
-/*                                      allocated base addresses for the instance       */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) the memory      */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the memory       */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory definition table                 */
-/*  pCapabilities           Pointer to the default capabilities                         */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVM_SUCCESS             Succeeded                                                   */
-/*  LVM_NULLADDRESS         When one of pMemoryTable or pInstParams is NULL             */
-/*  LVM_OUTOFRANGE          When any of the Instance parameters are out of range        */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVM_Process function                    */
-/*  2.  The scratch memory is the largest required by any of the sub-modules plus any   */
-/*      additional scratch requirements of the bundle                                   */
-/*                                                                                      */
-/****************************************************************************************/
-
-/*
- * 4 Types of Memory Regions of LVM
- * TODO: Allocate on the fly.
- * i)   LVM_MEMREGION_PERSISTENT_SLOW_DATA - For Instance Handles
- * ii)  LVM_MEMREGION_PERSISTENT_FAST_DATA - Persistent Buffers
- * iii) LVM_MEMREGION_PERSISTENT_FAST_COEF - For Holding Structure values
- * iv)  LVM_MEMREGION_TEMPORARY_FAST       - For Holding Structure values
- *
- * LVM_MEMREGION_PERSISTENT_SLOW_DATA:
- *   Total Memory size:
- *     sizeof(LVM_Instance_t) + \
- *     sizeof(LVM_Buffer_t) + \
- *     sizeof(LVPSA_InstancePr_t) + \
- *     sizeof(LVM_Buffer_t) - needed if buffer mode is LVM_MANAGED_BUFFER
- *
- * LVM_MEMREGION_PERSISTENT_FAST_DATA:
- *   Total Memory size:
- *     sizeof(LVM_TE_Data_t) + \
- *     2 * pInstParams->EQNB_NumBands * sizeof(LVM_EQNB_BandDef_t) + \
- *     sizeof(LVCS_Data_t) + \
- *     sizeof(LVDBE_Data_FLOAT_t) + \
- *     sizeof(Biquad_2I_Order2_FLOAT_Taps_t) + \
- *     sizeof(Biquad_2I_Order2_FLOAT_Taps_t) + \
- *     pInstParams->EQNB_NumBands * sizeof(Biquad_2I_Order2_FLOAT_Taps_t) + \
- *     pInstParams->EQNB_NumBands * sizeof(LVEQNB_BandDef_t) + \
- *     pInstParams->EQNB_NumBands * sizeof(LVEQNB_BiquadType_en) + \
- *     2 * LVM_HEADROOM_MAX_NBANDS * sizeof(LVM_HeadroomBandDef_t) + \
- *     PSA_InitParams.nBands * sizeof(Biquad_1I_Order2_Taps_t) + \
- *     PSA_InitParams.nBands * sizeof(QPD_Taps_t)
- *
- * LVM_MEMREGION_PERSISTENT_FAST_COEF:
- *   Total Memory size:
- *     sizeof(LVM_TE_Coefs_t) + \
- *     sizeof(LVCS_Coefficient_t) + \
- *     sizeof(LVDBE_Coef_FLOAT_t) + \
- *     sizeof(Biquad_FLOAT_Instance_t) + \
- *     sizeof(Biquad_FLOAT_Instance_t) + \
- *     pInstParams->EQNB_NumBands * sizeof(Biquad_FLOAT_Instance_t) + \
- *     PSA_InitParams.nBands * sizeof(Biquad_Instance_t) + \
- *     PSA_InitParams.nBands * sizeof(QPD_State_t)
- *
- * LVM_MEMREGION_TEMPORARY_FAST (Scratch):
- *   Total Memory Size:
- *     BundleScratchSize + \
- *     MAX_INTERNAL_BLOCKSIZE * sizeof(LVM_FLOAT) + \
- *     MaxScratchOf (CS, EQNB, DBE, PSA)
- *
- *     a)BundleScratchSize:
- *         3 * LVM_MAX_CHANNELS \
- *         * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) * sizeof(LVM_FLOAT)
- *       This Memory is allocated only when Buffer mode is LVM_MANAGED_BUFFER.
- *     b)MaxScratchOf (CS, EQNB, DBE, PSA)
- *       This Memory is needed for scratch usage for CS, EQNB, DBE, PSA.
- *       CS   = (LVCS_SCRATCHBUFFERS * sizeof(LVM_FLOAT)
- *               * pCapabilities->MaxBlockSize)
- *       EQNB = (LVEQNB_SCRATCHBUFFERS * sizeof(LVM_FLOAT)
- *               * pCapabilities->MaxBlockSize)
- *       DBE  = (LVDBE_SCRATCHBUFFERS_INPLACE*sizeof(LVM_FLOAT)
- *               * pCapabilities->MaxBlockSize)
- *       PSA  = (2 * pInitParams->MaxInputBlockSize * sizeof(LVM_FLOAT))
- *              one MaxInputBlockSize for input and another for filter output
- *     c)MAX_INTERNAL_BLOCKSIZE
- *       This Memory is needed for PSAInput - Temp memory to store output
- *       from McToMono block and given as input to PSA block
- */
-
-LVM_ReturnStatus_en LVM_GetMemoryTable(LVM_Handle_t         hInstance,
-                                       LVM_MemTab_t         *pMemoryTable,
-                                       LVM_InstParams_t     *pInstParams)
-{
-
-    LVM_Instance_t      *pInstance = (LVM_Instance_t *)hInstance;
-    LVM_UINT32          AlgScratchSize;
-    LVM_UINT32          BundleScratchSize;
-    LVM_UINT16          InternalBlockSize;
-    INST_ALLOC          AllocMem[LVM_NR_MEMORY_REGIONS];
-    LVM_INT16           i;
-
-    /*
-     * Check parameters
-     */
-    if(pMemoryTable == LVM_NULL)
-    {
-        return LVM_NULLADDRESS;
-    }
-
-    /*
-     * Return memory table if the instance has already been created
-     */
-    if (hInstance != LVM_NULL)
-    {
-       /* Read back memory allocation table */
-        *pMemoryTable = pInstance->MemoryTable;
-        return(LVM_SUCCESS);
-    }
-
-    if(pInstParams == LVM_NULL)
-    {
-        return LVM_NULLADDRESS;
-    }
-
-    /*
-     *  Power Spectrum Analyser
-     */
-    if(pInstParams->PSA_Included > LVM_PSA_ON)
-    {
-        return (LVM_OUTOFRANGE);
-    }
-
-    /*
-     * Check the instance parameters
-     */
-    if( (pInstParams->BufferMode != LVM_MANAGED_BUFFERS) && (pInstParams->BufferMode != LVM_UNMANAGED_BUFFERS) )
-    {
-        return (LVM_OUTOFRANGE);
-    }
-
-    /* N-Band Equalizer */
-    if( pInstParams->EQNB_NumBands > 32 )
-    {
-        return (LVM_OUTOFRANGE);
-    }
-
-    if(pInstParams->BufferMode == LVM_MANAGED_BUFFERS)
-    {
-        if( (pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE ) || (pInstParams->MaxBlockSize > LVM_MANAGED_MAX_MAXBLOCKSIZE ) )
-        {
-            return (LVM_OUTOFRANGE);
-        }
-    }
-    else
-    {
-        if( (pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE ) || (pInstParams->MaxBlockSize > LVM_UNMANAGED_MAX_MAXBLOCKSIZE) )
-        {
-            return (LVM_OUTOFRANGE);
-        }
-    }
-
-    /*
-    * Initialise the AllocMem structures
-    */
-    for (i=0; i<LVM_NR_MEMORY_REGIONS; i++)
-    {
-        InstAlloc_Init(&AllocMem[i], LVM_NULL);
-    }
-    InternalBlockSize = (LVM_UINT16)((pInstParams->MaxBlockSize) & MIN_INTERNAL_BLOCKMASK); /* Force to a multiple of MIN_INTERNAL_BLOCKSIZE */
-
-    if (InternalBlockSize < MIN_INTERNAL_BLOCKSIZE)
-    {
-        InternalBlockSize = MIN_INTERNAL_BLOCKSIZE;
-    }
-
-    /* Maximum Internal Black Size should not be more than MAX_INTERNAL_BLOCKSIZE*/
-    if(InternalBlockSize > MAX_INTERNAL_BLOCKSIZE)
-    {
-        InternalBlockSize = MAX_INTERNAL_BLOCKSIZE;
-    }
-
-    /*
-    * Bundle requirements
-    */
-    InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
-        sizeof(LVM_Instance_t));
-
-    /*
-     * Set the algorithm and bundle scratch requirements
-     */
-    AlgScratchSize    = 0;
-    if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS)
-    {
-        BundleScratchSize = 3 * LVM_MAX_CHANNELS \
-                            * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) \
-                            * sizeof(LVM_FLOAT);
-        InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],        /* Scratch buffer */
-                            BundleScratchSize);
-        InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
-                            sizeof(LVM_Buffer_t));
-    }
-
-    /*
-     * Treble Enhancement requirements
-     */
-    InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                        sizeof(LVM_TE_Data_t));
-    InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                        sizeof(LVM_TE_Coefs_t));
-
-    /*
-     * N-Band Equalizer requirements
-     */
-    InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],      /* Local storage */
-                        (pInstParams->EQNB_NumBands * sizeof(LVM_EQNB_BandDef_t)));
-    InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],      /* User storage */
-                        (pInstParams->EQNB_NumBands * sizeof(LVM_EQNB_BandDef_t)));
-
-    /*
-     * Concert Sound requirements
-     */
-    {
-        LVCS_MemTab_t           CS_MemTab;
-        LVCS_Capabilities_t     CS_Capabilities;
-
-        /*
-         * Set the capabilities
-         */
-        CS_Capabilities.MaxBlockSize     = InternalBlockSize;
-
-        /*
-         * Get the memory requirements
-         */
-        LVCS_Memory(LVM_NULL,
-                    &CS_MemTab,
-                    &CS_Capabilities);
-
-        /*
-         * Update the memory allocation structures
-         */
-        InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                            CS_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size);
-        InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                            CS_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size);
-        if (CS_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size > AlgScratchSize) AlgScratchSize = CS_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size;
-
-    }
-
-    /*
-     * Dynamic Bass Enhancement requirements
-     */
-    {
-        LVDBE_MemTab_t          DBE_MemTab;
-        LVDBE_Capabilities_t    DBE_Capabilities;
-
-        /*
-         * Set the capabilities
-         */
-        DBE_Capabilities.SampleRate      = LVDBE_CAP_FS_8000 | LVDBE_CAP_FS_11025 |
-                                           LVDBE_CAP_FS_12000 | LVDBE_CAP_FS_16000 |
-                                           LVDBE_CAP_FS_22050 | LVDBE_CAP_FS_24000 |
-                                           LVDBE_CAP_FS_32000 | LVDBE_CAP_FS_44100 |
-                                           LVDBE_CAP_FS_48000 | LVDBE_CAP_FS_88200 |
-                                           LVDBE_CAP_FS_96000 | LVDBE_CAP_FS_176400 |
-                                           LVDBE_CAP_FS_192000;
-        DBE_Capabilities.CentreFrequency = LVDBE_CAP_CENTRE_55Hz | LVDBE_CAP_CENTRE_55Hz | LVDBE_CAP_CENTRE_66Hz | LVDBE_CAP_CENTRE_78Hz | LVDBE_CAP_CENTRE_90Hz;
-        DBE_Capabilities.MaxBlockSize    = InternalBlockSize;
-
-        /*
-         * Get the memory requirements
-         */
-        LVDBE_Memory(LVM_NULL,
-                    &DBE_MemTab,
-
-                    &DBE_Capabilities);
-        /*
-         * Update the bundle table
-         */
-        InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                            DBE_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size);
-        InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                            DBE_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size);
-        if (DBE_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size > AlgScratchSize) AlgScratchSize = DBE_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size;
-
-    }
-
-    /*
-     * N-Band equaliser requirements
-     */
-    {
-        LVEQNB_MemTab_t         EQNB_MemTab;            /* For N-Band Equaliser */
-        LVEQNB_Capabilities_t   EQNB_Capabilities;
-
-        /*
-         * Set the capabilities
-         */
-        EQNB_Capabilities.SampleRate   = LVEQNB_CAP_FS_8000 | LVEQNB_CAP_FS_11025 |
-                                         LVEQNB_CAP_FS_12000 | LVEQNB_CAP_FS_16000 |
-                                         LVEQNB_CAP_FS_22050 | LVEQNB_CAP_FS_24000 |
-                                         LVEQNB_CAP_FS_32000 | LVEQNB_CAP_FS_44100 |
-                                         LVEQNB_CAP_FS_48000 | LVEQNB_CAP_FS_88200 |
-                                         LVEQNB_CAP_FS_96000 | LVEQNB_CAP_FS_176400 |
-                                         LVEQNB_CAP_FS_192000;
-        EQNB_Capabilities.SourceFormat = LVEQNB_CAP_STEREO | LVEQNB_CAP_MONOINSTEREO;
-        EQNB_Capabilities.MaxBlockSize = InternalBlockSize;
-        EQNB_Capabilities.MaxBands     = pInstParams->EQNB_NumBands;
-
-        /*
-         * Get the memory requirements
-         */
-        LVEQNB_Memory(LVM_NULL,
-                      &EQNB_MemTab,
-                      &EQNB_Capabilities);
-
-        /*
-         * Update the bundle table
-         */
-        InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                            EQNB_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size);
-        InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                            EQNB_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size);
-        if (EQNB_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size > AlgScratchSize) AlgScratchSize = EQNB_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size;
-
-    }
-
-    /*
-     * Headroom management memory allocation
-     */
-    InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                       (LVM_HEADROOM_MAX_NBANDS * sizeof(LVM_HeadroomBandDef_t)));
-    InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                       (LVM_HEADROOM_MAX_NBANDS * sizeof(LVM_HeadroomBandDef_t)));
-
-    /*
-     * Spectrum Analyzer memory requirements
-     */
-    {
-        pLVPSA_Handle_t     hPSAInst = LVM_NULL;
-        LVPSA_MemTab_t      PSA_MemTab;
-        LVPSA_InitParams_t  PSA_InitParams;
-        LVPSA_FilterParam_t FiltersParams[9];
-        LVPSA_RETURN        PSA_Status;
-
-        if(pInstParams->PSA_Included == LVM_PSA_ON)
-        {
-            PSA_InitParams.SpectralDataBufferDuration   = (LVM_UINT16) 500;
-            PSA_InitParams.MaxInputBlockSize            = (LVM_UINT16) 1000;
-            PSA_InitParams.nBands                       = (LVM_UINT16) 9;
-
-            PSA_InitParams.pFiltersParams = &FiltersParams[0];
-            for(i = 0; i < PSA_InitParams.nBands; i++)
-            {
-                FiltersParams[i].CenterFrequency    = (LVM_UINT16) 1000;
-                FiltersParams[i].QFactor            = (LVM_UINT16) 25;
-                FiltersParams[i].PostGain           = (LVM_INT16)  0;
-            }
-
-            /*
-            * Get the memory requirements
-            */
-            PSA_Status = LVPSA_Memory (hPSAInst,
-                                        &PSA_MemTab,
-                                        &PSA_InitParams);
-
-            if (PSA_Status != LVPSA_OK)
-            {
-                return((LVM_ReturnStatus_en) LVM_ALGORITHMPSA);
-            }
-
-            /*
-            * Update the bundle table
-            */
-            /* Slow Data */
-            InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
-                PSA_MemTab.Region[LVM_PERSISTENT_SLOW_DATA].Size);
-
-            /* Fast Data */
-            InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                PSA_MemTab.Region[LVM_PERSISTENT_FAST_DATA].Size);
-
-            /* Fast Coef */
-            InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                PSA_MemTab.Region[LVM_PERSISTENT_FAST_COEF].Size);
-
-            /* Fast Temporary */
-            InstAlloc_AddMember(&AllocMem[LVM_TEMPORARY_FAST],
-                                MAX_INTERNAL_BLOCKSIZE * sizeof(LVM_FLOAT));
-
-            if (PSA_MemTab.Region[LVM_TEMPORARY_FAST].Size > AlgScratchSize)
-            {
-                AlgScratchSize = PSA_MemTab.Region[LVM_TEMPORARY_FAST].Size;
-            }
-        }
-    }
-
-    /*
-     * Return the memory table
-     */
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_SLOW_DATA].Size         = InstAlloc_GetTotal(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA]);
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_SLOW_DATA].Type         = LVM_PERSISTENT_SLOW_DATA;
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_SLOW_DATA].pBaseAddress = LVM_NULL;
-
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size         = InstAlloc_GetTotal(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA]);
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Type         = LVM_PERSISTENT_FAST_DATA;
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress = LVM_NULL;
-    if (pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size < 4)
-    {
-        pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size = 0;
-    }
-
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size         = InstAlloc_GetTotal(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF]);
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Type         = LVM_PERSISTENT_FAST_COEF;
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress = LVM_NULL;
-    if (pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size < 4)
-    {
-        pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size = 0;
-    }
-
-    InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],
-                        AlgScratchSize);
-    pMemoryTable->Region[LVM_MEMREGION_TEMPORARY_FAST].Size             = InstAlloc_GetTotal(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST]);
-    pMemoryTable->Region[LVM_MEMREGION_TEMPORARY_FAST].Type             = LVM_TEMPORARY_FAST;
-    pMemoryTable->Region[LVM_MEMREGION_TEMPORARY_FAST].pBaseAddress     = LVM_NULL;
-    if (pMemoryTable->Region[LVM_MEMREGION_TEMPORARY_FAST].Size < 4)
-    {
-        pMemoryTable->Region[LVM_MEMREGION_TEMPORARY_FAST].Size = 0;
-    }
-
-    return(LVM_SUCCESS);
-
-}
-
-/****************************************************************************************/
-/*                                                                                      */
 /* FUNCTION:                LVM_GetInstanceHandle                                       */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
-/*  This function is used to create a bundle instance. It returns the created instance  */
-/*  handle through phInstance. All parameters are set to their default, inactive state. */
+/*  This function is used to create a bundle instance.                                  */
+/*  All parameters are set to their default, inactive state.                            */
 /*                                                                                      */
 /* PARAMETERS:                                                                          */
-/*  phInstance              pointer to the instance handle                              */
-/*  pMemoryTable            Pointer to the memory definition table                      */
-/*  pInstParams             Pointer to the initialisation capabilities                  */
+/*  phInstance              Pointer to the instance handle                              */
+/*  pInstParams             Pointer to the instance parameters                          */
 /*                                                                                      */
 /* RETURNS:                                                                             */
 /*  LVM_SUCCESS             Initialisation succeeded                                    */
+/*  LVM_NULLADDRESS         One or more memory has a NULL pointer                       */
 /*  LVM_OUTOFRANGE          When any of the Instance parameters are out of range        */
-/*  LVM_NULLADDRESS         When one of phInstance, pMemoryTable or pInstParams are NULL*/
 /*                                                                                      */
 /* NOTES:                                                                               */
 /*  1. This function must not be interrupted by the LVM_Process function                */
 /*                                                                                      */
 /****************************************************************************************/
-
-LVM_ReturnStatus_en LVM_GetInstanceHandle(LVM_Handle_t           *phInstance,
-                                          LVM_MemTab_t           *pMemoryTable,
-                                          LVM_InstParams_t       *pInstParams)
-{
-
-    LVM_ReturnStatus_en     Status = LVM_SUCCESS;
-    LVM_Instance_t          *pInstance;
-    INST_ALLOC              AllocMem[LVM_NR_MEMORY_REGIONS];
-    LVM_INT16               i;
-    LVM_UINT16              InternalBlockSize;
-    LVM_INT32               BundleScratchSize;
+LVM_ReturnStatus_en LVM_GetInstanceHandle(LVM_Handle_t* phInstance, LVM_InstParams_t* pInstParams) {
+    LVM_ReturnStatus_en Status = LVM_SUCCESS;
+    LVM_Instance_t* pInstance;
+    LVM_INT16 i;
+    LVM_UINT16 InternalBlockSize;
+    LVM_INT32 BundleScratchSize;
 
     /*
      * Check valid points have been given
      */
-    if ((phInstance == LVM_NULL) || (pMemoryTable == LVM_NULL) || (pInstParams == LVM_NULL))
-    {
+    if ((phInstance == LVM_NULL) || (pInstParams == LVM_NULL)) {
         return (LVM_NULLADDRESS);
     }
 
     /*
-     * Check the memory table for NULL pointers
-     */
-    for (i=0; i<LVM_NR_MEMORY_REGIONS; i++)
-    {
-        if ((pMemoryTable->Region[i].Size != 0) &&
-            (pMemoryTable->Region[i].pBaseAddress==LVM_NULL))
-        {
-            return(LVM_NULLADDRESS);
-        }
-    }
-
-    /*
      * Check the instance parameters
      */
-    if( (pInstParams->BufferMode != LVM_MANAGED_BUFFERS) && (pInstParams->BufferMode != LVM_UNMANAGED_BUFFERS) )
-    {
+    if ((pInstParams->BufferMode != LVM_MANAGED_BUFFERS) &&
+        (pInstParams->BufferMode != LVM_UNMANAGED_BUFFERS)) {
         return (LVM_OUTOFRANGE);
     }
 
-    if( pInstParams->EQNB_NumBands > 32 )
-    {
+    if (pInstParams->EQNB_NumBands > 32) {
         return (LVM_OUTOFRANGE);
     }
 
-    if(pInstParams->BufferMode == LVM_MANAGED_BUFFERS)
-    {
-        if( (pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE ) || (pInstParams->MaxBlockSize > LVM_MANAGED_MAX_MAXBLOCKSIZE ) )
-        {
+    if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS) {
+        if ((pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE) ||
+            (pInstParams->MaxBlockSize > LVM_MANAGED_MAX_MAXBLOCKSIZE)) {
             return (LVM_OUTOFRANGE);
         }
-    }
-    else
-    {
-        if( (pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE ) || (pInstParams->MaxBlockSize > LVM_UNMANAGED_MAX_MAXBLOCKSIZE) )
-        {
+    } else {
+        if ((pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE) ||
+            (pInstParams->MaxBlockSize > LVM_UNMANAGED_MAX_MAXBLOCKSIZE)) {
             return (LVM_OUTOFRANGE);
         }
     }
 
-    if(pInstParams->PSA_Included > LVM_PSA_ON)
-    {
+    if (pInstParams->PSA_Included > LVM_PSA_ON) {
         return (LVM_OUTOFRANGE);
     }
 
     /*
-     * Initialise the AllocMem structures
+     * Create the instance handle
      */
-    for (i=0; i<LVM_NR_MEMORY_REGIONS; i++)
-    {
-        InstAlloc_Init(&AllocMem[i],
-                       pMemoryTable->Region[i].pBaseAddress);
+    *phInstance = (LVM_Handle_t)calloc(1, sizeof(*pInstance));
+    if (*phInstance == LVM_NULL) {
+        return LVM_NULLADDRESS;
     }
+    pInstance = (LVM_Instance_t*)*phInstance;
+
+    pInstance->InstParams = *pInstParams;
 
     /*
-     * Set the instance handle
+     * Create the bundle scratch memory and initialse the buffer management
      */
-    *phInstance  = (LVM_Handle_t)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
-                                                     sizeof(LVM_Instance_t));
-    pInstance =(LVM_Instance_t  *)*phInstance;
-
-    /*
-     * Save the memory table, parameters and capabilities
-     */
-    pInstance->MemoryTable    = *pMemoryTable;
-    pInstance->InstParams     = *pInstParams;
-
-    /*
-     * Set the bundle scratch memory and initialse the buffer management
-     */
-    InternalBlockSize = (LVM_UINT16)((pInstParams->MaxBlockSize) & MIN_INTERNAL_BLOCKMASK); /* Force to a multiple of MIN_INTERNAL_BLOCKSIZE */
-    if (InternalBlockSize < MIN_INTERNAL_BLOCKSIZE)
-    {
+    InternalBlockSize = (LVM_UINT16)(
+            (pInstParams->MaxBlockSize) &
+            MIN_INTERNAL_BLOCKMASK); /* Force to a multiple of MIN_INTERNAL_BLOCKSIZE */
+    if (InternalBlockSize < MIN_INTERNAL_BLOCKSIZE) {
         InternalBlockSize = MIN_INTERNAL_BLOCKSIZE;
     }
 
     /* Maximum Internal Black Size should not be more than MAX_INTERNAL_BLOCKSIZE*/
-    if(InternalBlockSize > MAX_INTERNAL_BLOCKSIZE)
-    {
+    if (InternalBlockSize > MAX_INTERNAL_BLOCKSIZE) {
         InternalBlockSize = MAX_INTERNAL_BLOCKSIZE;
     }
     pInstance->InternalBlockSize = (LVM_INT16)InternalBlockSize;
@@ -599,40 +120,46 @@
     /*
      * Common settings for managed and unmanaged buffers
      */
-    pInstance->SamplesToProcess = 0;                /* No samples left to process */
-    if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS)
-    {
+    pInstance->SamplesToProcess = 0; /* No samples left to process */
+    BundleScratchSize =
+            (LVM_INT32)(3 * LVM_MAX_CHANNELS * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) *
+                        sizeof(LVM_FLOAT));
+    pInstance->pScratch = calloc(1, BundleScratchSize);
+    if (pInstance->pScratch == LVM_NULL) {
+        return LVM_NULLADDRESS;
+    }
+
+    if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS) {
         /*
          * Managed buffers required
          */
-        pInstance->pBufferManagement = (LVM_Buffer_t *)
-            InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
-                                                           sizeof(LVM_Buffer_t));
-        BundleScratchSize = (LVM_INT32)
-                            (3 * LVM_MAX_CHANNELS \
-                             * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) \
-                             * sizeof(LVM_FLOAT));
-        pInstance->pBufferManagement->pScratch = (LVM_FLOAT *)
-            InstAlloc_AddMember(
-                         &AllocMem[LVM_MEMREGION_TEMPORARY_FAST], /* Scratch 1 buffer */
-                                                  (LVM_UINT32)BundleScratchSize);
-        LoadConst_Float(0,                                   /* Clear the input delay buffer */
-                        (LVM_FLOAT *)&pInstance->pBufferManagement->InDelayBuffer,
+        pInstance->pBufferManagement =
+                (LVM_Buffer_t*)calloc(1, sizeof(*(pInstance->pBufferManagement)));
+        if (pInstance->pBufferManagement == LVM_NULL) {
+            return LVM_NULLADDRESS;
+        }
+
+        pInstance->pBufferManagement->pScratch = (LVM_FLOAT*)pInstance->pScratch;
+
+        LoadConst_Float(0, /* Clear the input delay buffer */
+                        (LVM_FLOAT*)&pInstance->pBufferManagement->InDelayBuffer,
                         (LVM_INT16)(LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE));
-        pInstance->pBufferManagement->InDelaySamples = MIN_INTERNAL_BLOCKSIZE; /* Set the number of delay samples */
-        pInstance->pBufferManagement->OutDelaySamples = 0;                     /* No samples in the output buffer */
-        pInstance->pBufferManagement->BufferState = LVM_FIRSTCALL;             /* Set the state ready for the first call */
+        pInstance->pBufferManagement->InDelaySamples =
+                MIN_INTERNAL_BLOCKSIZE;                    /* Set the number of delay samples */
+        pInstance->pBufferManagement->OutDelaySamples = 0; /* No samples in the output buffer */
+        pInstance->pBufferManagement->BufferState =
+                LVM_FIRSTCALL; /* Set the state ready for the first call */
     }
 
     /*
      * Set default parameters
      */
-    pInstance->Params.OperatingMode    = LVM_MODE_OFF;
-    pInstance->Params.SampleRate       = LVM_FS_8000;
-    pInstance->Params.SourceFormat     = LVM_MONO;
-    pInstance->Params.SpeakerType      = LVM_HEADPHONES;
-    pInstance->Params.VC_EffectLevel   = 0;
-    pInstance->Params.VC_Balance       = 0;
+    pInstance->Params.OperatingMode = LVM_MODE_OFF;
+    pInstance->Params.SampleRate = LVM_FS_8000;
+    pInstance->Params.SourceFormat = LVM_MONO;
+    pInstance->Params.SpeakerType = LVM_HEADPHONES;
+    pInstance->Params.VC_EffectLevel = 0;
+    pInstance->Params.VC_Balance = 0;
 
     /*
      * Set callback
@@ -642,338 +169,265 @@
     /*
      * DC removal filter
      */
-#ifdef SUPPORT_MC
     DC_Mc_D16_TRC_WRA_01_Init(&pInstance->DC_RemovalInstance);
-#else
-    DC_2I_D16_TRC_WRA_01_Init(&pInstance->DC_RemovalInstance);
-#endif
 
     /*
      * Treble Enhancement
      */
-    pInstance->pTE_Taps  = (LVM_TE_Data_t *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                                                                sizeof(LVM_TE_Data_t));
-
-    pInstance->pTE_State = (LVM_TE_Coefs_t *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                                                                 sizeof(LVM_TE_Coefs_t));
+    pInstance->pTE_Taps = (LVM_TE_Data_t*)calloc(1, sizeof(*(pInstance->pTE_Taps)));
+    if (pInstance->pTE_Taps == LVM_NULL) {
+        return LVM_NULLADDRESS;
+    }
+    pInstance->pTE_State = (LVM_TE_Coefs_t*)calloc(1, sizeof(*(pInstance->pTE_State)));
+    if (pInstance->pTE_State == LVM_NULL) {
+        return LVM_NULLADDRESS;
+    }
     pInstance->Params.TE_OperatingMode = LVM_TE_OFF;
-    pInstance->Params.TE_EffectLevel   = 0;
-    pInstance->TE_Active               = LVM_FALSE;
+    pInstance->Params.TE_EffectLevel = 0;
+    pInstance->TE_Active = LVM_FALSE;
 
     /*
      * Set the volume control and initialise Current to Target
      */
-    pInstance->VC_Volume.MixerStream[0].CallbackParam      = 0;
-    pInstance->VC_Volume.MixerStream[0].CallbackSet        = 0;
-    pInstance->VC_Volume.MixerStream[0].pCallbackHandle    = pInstance;
-    pInstance->VC_Volume.MixerStream[0].pCallBack          = LVM_VCCallBack;
+    pInstance->VC_Volume.MixerStream[0].CallbackParam = 0;
+    pInstance->VC_Volume.MixerStream[0].CallbackSet = 0;
+    pInstance->VC_Volume.MixerStream[0].pCallbackHandle = pInstance;
+    pInstance->VC_Volume.MixerStream[0].pCallBack = LVM_VCCallBack;
 
-    /* In managed buffering, start with low signal level as delay in buffer management causes a click*/
-    if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS)
-    {
+    /* In managed buffering, start with low signal level as delay in buffer management causes a
+     * click*/
+    if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS) {
         LVC_Mixer_Init(&pInstance->VC_Volume.MixerStream[0], 0, 0);
-    }
-    else
-    {
+    } else {
         LVC_Mixer_Init(&pInstance->VC_Volume.MixerStream[0], LVM_MAXFLOAT, LVM_MAXFLOAT);
     }
 
-    LVC_Mixer_SetTimeConstant(&pInstance->VC_Volume.MixerStream[0],0,LVM_FS_8000,2);
+    LVC_Mixer_SetTimeConstant(&pInstance->VC_Volume.MixerStream[0], 0, LVM_FS_8000, 2);
 
-    pInstance->VC_VolumedB                  = 0;
-    pInstance->VC_AVLFixedVolume            = 0;
-    pInstance->VC_Active                    = LVM_FALSE;
+    pInstance->VC_VolumedB = 0;
+    pInstance->VC_AVLFixedVolume = 0;
+    pInstance->VC_Active = LVM_FALSE;
 
-    pInstance->VC_BalanceMix.MixerStream[0].CallbackParam      = 0;
-    pInstance->VC_BalanceMix.MixerStream[0].CallbackSet        = 0;
-    pInstance->VC_BalanceMix.MixerStream[0].pCallbackHandle    = pInstance;
-    pInstance->VC_BalanceMix.MixerStream[0].pCallBack          = LVM_VCCallBack;
+    pInstance->VC_BalanceMix.MixerStream[0].CallbackParam = 0;
+    pInstance->VC_BalanceMix.MixerStream[0].CallbackSet = 0;
+    pInstance->VC_BalanceMix.MixerStream[0].pCallbackHandle = pInstance;
+    pInstance->VC_BalanceMix.MixerStream[0].pCallBack = LVM_VCCallBack;
     LVC_Mixer_Init(&pInstance->VC_BalanceMix.MixerStream[0], LVM_MAXFLOAT, LVM_MAXFLOAT);
-    LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[0],LVM_VC_MIXER_TIME,LVM_FS_8000,2);
+    LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[0], LVM_VC_MIXER_TIME,
+                                       LVM_FS_8000, 2);
 
-    pInstance->VC_BalanceMix.MixerStream[1].CallbackParam      = 0;
-    pInstance->VC_BalanceMix.MixerStream[1].CallbackSet        = 0;
-    pInstance->VC_BalanceMix.MixerStream[1].pCallbackHandle    = pInstance;
-    pInstance->VC_BalanceMix.MixerStream[1].pCallBack          = LVM_VCCallBack;
+    pInstance->VC_BalanceMix.MixerStream[1].CallbackParam = 0;
+    pInstance->VC_BalanceMix.MixerStream[1].CallbackSet = 0;
+    pInstance->VC_BalanceMix.MixerStream[1].pCallbackHandle = pInstance;
+    pInstance->VC_BalanceMix.MixerStream[1].pCallBack = LVM_VCCallBack;
     LVC_Mixer_Init(&pInstance->VC_BalanceMix.MixerStream[1], LVM_MAXFLOAT, LVM_MAXFLOAT);
-    LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[1],LVM_VC_MIXER_TIME,LVM_FS_8000,2);
+    LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[1], LVM_VC_MIXER_TIME,
+                                       LVM_FS_8000, 2);
 
     /*
-     * Set the default EQNB pre-gain and pointer to the band definitions
+     * Create the default EQNB pre-gain and pointer to the band definitions
      */
-    pInstance->pEQNB_BandDefs =
-        (LVM_EQNB_BandDef_t *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                                   (pInstParams->EQNB_NumBands * sizeof(LVM_EQNB_BandDef_t)));
-    pInstance->pEQNB_UserDefs =
-        (LVM_EQNB_BandDef_t *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                                   (pInstParams->EQNB_NumBands * sizeof(LVM_EQNB_BandDef_t)));
+    pInstance->pEQNB_BandDefs = (LVM_EQNB_BandDef_t*)calloc(pInstParams->EQNB_NumBands,
+                                                            sizeof(*(pInstance->pEQNB_BandDefs)));
+    if (pInstance->pEQNB_BandDefs == LVM_NULL) {
+        return LVM_NULLADDRESS;
+    }
+    pInstance->pEQNB_UserDefs = (LVM_EQNB_BandDef_t*)calloc(pInstParams->EQNB_NumBands,
+                                                            sizeof(*(pInstance->pEQNB_UserDefs)));
+    if (pInstance->pEQNB_UserDefs == LVM_NULL) {
+        return LVM_NULLADDRESS;
+    }
 
     /*
      * Initialise the Concert Sound module
      */
     {
-        LVCS_Handle_t           hCSInstance;                /* Instance handle */
-        LVCS_MemTab_t           CS_MemTab;                  /* Memory table */
-        LVCS_Capabilities_t     CS_Capabilities;            /* Initial capabilities */
-        LVCS_ReturnStatus_en    LVCS_Status;                /* Function call status */
+        LVCS_Handle_t hCSInstance;           /* Instance handle */
+        LVCS_Capabilities_t CS_Capabilities; /* Initial capabilities */
+        LVCS_ReturnStatus_en LVCS_Status;    /* Function call status */
 
         /*
          * Set default parameters
          */
-        pInstance->Params.VirtualizerReverbLevel    = 100;
-        pInstance->Params.VirtualizerType           = LVM_CONCERTSOUND;
-        pInstance->Params.VirtualizerOperatingMode  = LVM_MODE_OFF;
-        pInstance->CS_Active                        = LVM_FALSE;
+        pInstance->Params.VirtualizerReverbLevel = 100;
+        pInstance->Params.VirtualizerType = LVM_CONCERTSOUND;
+        pInstance->Params.VirtualizerOperatingMode = LVM_MODE_OFF;
+        pInstance->CS_Active = LVM_FALSE;
 
         /*
          * Set the initialisation capabilities
          */
-        CS_Capabilities.MaxBlockSize    = (LVM_UINT16)InternalBlockSize;
+        CS_Capabilities.MaxBlockSize = (LVM_UINT16)InternalBlockSize;
         CS_Capabilities.CallBack = pInstance->CallBack;
         CS_Capabilities.pBundleInstance = (void*)pInstance;
 
         /*
-         * Get the memory requirements and then set the address pointers, forcing alignment
-         */
-        LVCS_Status = LVCS_Memory(LVM_NULL,                /* Get the memory requirements */
-                                  &CS_MemTab,
-                                  &CS_Capabilities);
-        CS_MemTab.Region[LVCS_MEMREGION_PERSISTENT_SLOW_DATA].pBaseAddress = &pInstance->CS_Instance;
-        CS_MemTab.Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                                                                                                         CS_MemTab.Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].Size);
-        CS_MemTab.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                                                                                                         CS_MemTab.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].Size);
-        CS_MemTab.Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress       = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],
-                                                                                                         0);
-
-        /*
          * Initialise the Concert Sound instance and save the instance handle
          */
-        hCSInstance = LVM_NULL;                            /* Set to NULL to return handle */
-        LVCS_Status = LVCS_Init(&hCSInstance,              /* Initiailse */
-                                &CS_MemTab,
-                                &CS_Capabilities);
-        if (LVCS_Status != LVCS_SUCCESS) return((LVM_ReturnStatus_en)LVCS_Status);
-        pInstance->hCSInstance = hCSInstance;              /* Save the instance handle */
-
+        hCSInstance = LVM_NULL;               /* Set to NULL to return handle */
+        LVCS_Status = LVCS_Init(&hCSInstance, /* Create and initiailse */
+                                &CS_Capabilities, pInstance->pScratch);
+        if (LVCS_Status != LVCS_SUCCESS) return ((LVM_ReturnStatus_en)LVCS_Status);
+        pInstance->hCSInstance = hCSInstance; /* Save the instance handle */
     }
 
     /*
      * Initialise the Bass Enhancement module
      */
     {
-        LVDBE_Handle_t          hDBEInstance;               /* Instance handle */
-        LVDBE_MemTab_t          DBE_MemTab;                 /* Memory table */
-        LVDBE_Capabilities_t    DBE_Capabilities;           /* Initial capabilities */
-        LVDBE_ReturnStatus_en   LVDBE_Status;               /* Function call status */
+        LVDBE_Handle_t hDBEInstance;           /* Instance handle */
+        LVDBE_Capabilities_t DBE_Capabilities; /* Initial capabilities */
+        LVDBE_ReturnStatus_en LVDBE_Status;    /* Function call status */
 
         /*
          * Set the initialisation parameters
          */
         pInstance->Params.BE_OperatingMode = LVM_BE_OFF;
-        pInstance->Params.BE_CentreFreq    = LVM_BE_CENTRE_55Hz;
-        pInstance->Params.BE_EffectLevel   = 0;
-        pInstance->Params.BE_HPF           = LVM_BE_HPF_OFF;
+        pInstance->Params.BE_CentreFreq = LVM_BE_CENTRE_55Hz;
+        pInstance->Params.BE_EffectLevel = 0;
+        pInstance->Params.BE_HPF = LVM_BE_HPF_OFF;
 
-        pInstance->DBE_Active              = LVM_FALSE;
+        pInstance->DBE_Active = LVM_FALSE;
 
         /*
          * Set the initialisation capabilities
          */
-        DBE_Capabilities.SampleRate      = LVDBE_CAP_FS_8000 | LVDBE_CAP_FS_11025 |
-                                           LVDBE_CAP_FS_12000 | LVDBE_CAP_FS_16000 |
-                                           LVDBE_CAP_FS_22050 | LVDBE_CAP_FS_24000 |
-                                           LVDBE_CAP_FS_32000 | LVDBE_CAP_FS_44100 |
-                                           LVDBE_CAP_FS_48000 | LVDBE_CAP_FS_88200 |
-                                           LVDBE_CAP_FS_96000 | LVDBE_CAP_FS_176400 |
-                                           LVDBE_CAP_FS_192000;
-        DBE_Capabilities.CentreFrequency = LVDBE_CAP_CENTRE_55Hz | LVDBE_CAP_CENTRE_55Hz | LVDBE_CAP_CENTRE_66Hz | LVDBE_CAP_CENTRE_78Hz | LVDBE_CAP_CENTRE_90Hz;
-        DBE_Capabilities.MaxBlockSize    = (LVM_UINT16)InternalBlockSize;
+        DBE_Capabilities.SampleRate = LVDBE_CAP_FS_8000 | LVDBE_CAP_FS_11025 | LVDBE_CAP_FS_12000 |
+                                      LVDBE_CAP_FS_16000 | LVDBE_CAP_FS_22050 | LVDBE_CAP_FS_24000 |
+                                      LVDBE_CAP_FS_32000 | LVDBE_CAP_FS_44100 | LVDBE_CAP_FS_48000 |
+                                      LVDBE_CAP_FS_88200 | LVDBE_CAP_FS_96000 |
+                                      LVDBE_CAP_FS_176400 | LVDBE_CAP_FS_192000;
 
-        /*
-         * Get the memory requirements and then set the address pointers
-         */
-        LVDBE_Status = LVDBE_Memory(LVM_NULL,               /* Get the memory requirements */
-                                    &DBE_MemTab,
-                                    &DBE_Capabilities);
-        DBE_MemTab.Region[LVDBE_MEMREGION_INSTANCE].pBaseAddress        = &pInstance->DBE_Instance;
-        DBE_MemTab.Region[LVDBE_MEMREGION_PERSISTENT_DATA].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                                                                                                      DBE_MemTab.Region[LVDBE_MEMREGION_PERSISTENT_DATA].Size);
-        DBE_MemTab.Region[LVDBE_MEMREGION_PERSISTENT_COEF].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                                                                                                      DBE_MemTab.Region[LVDBE_MEMREGION_PERSISTENT_COEF].Size);
-        DBE_MemTab.Region[LVDBE_MEMREGION_SCRATCH].pBaseAddress         = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],
-                                                                                                      0);
+        DBE_Capabilities.CentreFrequency = LVDBE_CAP_CENTRE_55Hz | LVDBE_CAP_CENTRE_55Hz |
+                                           LVDBE_CAP_CENTRE_66Hz | LVDBE_CAP_CENTRE_78Hz |
+                                           LVDBE_CAP_CENTRE_90Hz;
+        DBE_Capabilities.MaxBlockSize = (LVM_UINT16)InternalBlockSize;
 
         /*
          * Initialise the Dynamic Bass Enhancement instance and save the instance handle
          */
-        hDBEInstance = LVM_NULL;                            /* Set to NULL to return handle */
-        LVDBE_Status = LVDBE_Init(&hDBEInstance,            /* Initiailse */
-                                  &DBE_MemTab,
-                                  &DBE_Capabilities);
-        if (LVDBE_Status != LVDBE_SUCCESS) return((LVM_ReturnStatus_en)LVDBE_Status);
-        pInstance->hDBEInstance = hDBEInstance;             /* Save the instance handle */
+        hDBEInstance = LVM_NULL;                 /* Set to NULL to return handle */
+        LVDBE_Status = LVDBE_Init(&hDBEInstance, /* Create and initiailse */
+                                  &DBE_Capabilities, pInstance->pScratch);
+        if (LVDBE_Status != LVDBE_SUCCESS) return ((LVM_ReturnStatus_en)LVDBE_Status);
+        pInstance->hDBEInstance = hDBEInstance; /* Save the instance handle */
     }
 
     /*
      * Initialise the N-Band Equaliser module
      */
     {
-        LVEQNB_Handle_t          hEQNBInstance;             /* Instance handle */
-        LVEQNB_MemTab_t          EQNB_MemTab;               /* Memory table */
-        LVEQNB_Capabilities_t    EQNB_Capabilities;         /* Initial capabilities */
-        LVEQNB_ReturnStatus_en   LVEQNB_Status;             /* Function call status */
+        LVEQNB_Handle_t hEQNBInstance;           /* Instance handle */
+        LVEQNB_Capabilities_t EQNB_Capabilities; /* Initial capabilities */
+        LVEQNB_ReturnStatus_en LVEQNB_Status;    /* Function call status */
 
         /*
          * Set the initialisation parameters
          */
-        pInstance->Params.EQNB_OperatingMode   = LVM_EQNB_OFF;
-        pInstance->Params.EQNB_NBands          = 0;
+        pInstance->Params.EQNB_OperatingMode = LVM_EQNB_OFF;
+        pInstance->Params.EQNB_NBands = 0;
         pInstance->Params.pEQNB_BandDefinition = LVM_NULL;
-        pInstance->EQNB_Active                 = LVM_FALSE;
+        pInstance->EQNB_Active = LVM_FALSE;
 
         /*
          * Set the initialisation capabilities
          */
-        EQNB_Capabilities.SampleRate      = LVEQNB_CAP_FS_8000 | LVEQNB_CAP_FS_11025 |
-                                            LVEQNB_CAP_FS_12000 | LVEQNB_CAP_FS_16000 |
-                                            LVEQNB_CAP_FS_22050 | LVEQNB_CAP_FS_24000 |
-                                            LVEQNB_CAP_FS_32000 | LVEQNB_CAP_FS_44100 |
-                                            LVEQNB_CAP_FS_48000 | LVEQNB_CAP_FS_88200 |
-                                            LVEQNB_CAP_FS_96000 | LVEQNB_CAP_FS_176400 |
-                                            LVEQNB_CAP_FS_192000;
-        EQNB_Capabilities.MaxBlockSize    = (LVM_UINT16)InternalBlockSize;
-        EQNB_Capabilities.MaxBands        = pInstParams->EQNB_NumBands;
-        EQNB_Capabilities.SourceFormat    = LVEQNB_CAP_STEREO | LVEQNB_CAP_MONOINSTEREO;
-        EQNB_Capabilities.CallBack        = pInstance->CallBack;
-        EQNB_Capabilities.pBundleInstance  = (void*)pInstance;
+        EQNB_Capabilities.SampleRate =
+                LVEQNB_CAP_FS_8000 | LVEQNB_CAP_FS_11025 | LVEQNB_CAP_FS_12000 |
+                LVEQNB_CAP_FS_16000 | LVEQNB_CAP_FS_22050 | LVEQNB_CAP_FS_24000 |
+                LVEQNB_CAP_FS_32000 | LVEQNB_CAP_FS_44100 | LVEQNB_CAP_FS_48000 |
+                LVEQNB_CAP_FS_88200 | LVEQNB_CAP_FS_96000 | LVEQNB_CAP_FS_176400 |
+                LVEQNB_CAP_FS_192000;
 
-        /*
-         * Get the memory requirements and then set the address pointers, forcing alignment
-         */
-        LVEQNB_Status = LVEQNB_Memory(LVM_NULL,             /* Get the memory requirements */
-                                      &EQNB_MemTab,
-                                      &EQNB_Capabilities);
-        EQNB_MemTab.Region[LVEQNB_MEMREGION_INSTANCE].pBaseAddress        = &pInstance->EQNB_Instance;
-        EQNB_MemTab.Region[LVEQNB_MEMREGION_PERSISTENT_DATA].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                                                                                                        EQNB_MemTab.Region[LVEQNB_MEMREGION_PERSISTENT_DATA].Size);
-        EQNB_MemTab.Region[LVEQNB_MEMREGION_PERSISTENT_COEF].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                                                                                                        EQNB_MemTab.Region[LVEQNB_MEMREGION_PERSISTENT_COEF].Size);
-        EQNB_MemTab.Region[LVEQNB_MEMREGION_SCRATCH].pBaseAddress         = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],
-                                                                                                        0);
+        EQNB_Capabilities.MaxBlockSize = (LVM_UINT16)InternalBlockSize;
+        EQNB_Capabilities.MaxBands = pInstParams->EQNB_NumBands;
+        EQNB_Capabilities.SourceFormat = LVEQNB_CAP_STEREO | LVEQNB_CAP_MONOINSTEREO;
+        EQNB_Capabilities.CallBack = pInstance->CallBack;
+        EQNB_Capabilities.pBundleInstance = (void*)pInstance;
 
         /*
          * Initialise the Dynamic Bass Enhancement instance and save the instance handle
          */
-        hEQNBInstance = LVM_NULL;                           /* Set to NULL to return handle */
-        LVEQNB_Status = LVEQNB_Init(&hEQNBInstance,         /* Initiailse */
-                                    &EQNB_MemTab,
-                                    &EQNB_Capabilities);
-        if (LVEQNB_Status != LVEQNB_SUCCESS) return((LVM_ReturnStatus_en)LVEQNB_Status);
-        pInstance->hEQNBInstance = hEQNBInstance;           /* Save the instance handle */
+        hEQNBInstance = LVM_NULL;                   /* Set to NULL to return handle */
+        LVEQNB_Status = LVEQNB_Init(&hEQNBInstance, /* Create and initiailse */
+                                    &EQNB_Capabilities, pInstance->pScratch);
+        if (LVEQNB_Status != LVEQNB_SUCCESS) return ((LVM_ReturnStatus_en)LVEQNB_Status);
+        pInstance->hEQNBInstance = hEQNBInstance; /* Save the instance handle */
     }
 
     /*
      * Headroom management memory allocation
      */
     {
-        pInstance->pHeadroom_BandDefs = (LVM_HeadroomBandDef_t *)
-              InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                                       (LVM_HEADROOM_MAX_NBANDS * sizeof(LVM_HeadroomBandDef_t)));
-        pInstance->pHeadroom_UserDefs = (LVM_HeadroomBandDef_t *)
-              InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                                       (LVM_HEADROOM_MAX_NBANDS * sizeof(LVM_HeadroomBandDef_t)));
+        pInstance->pHeadroom_BandDefs = (LVM_HeadroomBandDef_t*)calloc(
+                LVM_HEADROOM_MAX_NBANDS, sizeof(*(pInstance->pHeadroom_BandDefs)));
+        if (pInstance->pHeadroom_BandDefs == LVM_NULL) {
+            return LVM_NULLADDRESS;
+        }
+        pInstance->pHeadroom_UserDefs = (LVM_HeadroomBandDef_t*)calloc(
+                LVM_HEADROOM_MAX_NBANDS, sizeof(*(pInstance->pHeadroom_UserDefs)));
+        if (pInstance->pHeadroom_UserDefs == LVM_NULL) {
+            return LVM_NULLADDRESS;
+        }
 
         /* Headroom management parameters initialisation */
         pInstance->NewHeadroomParams.NHeadroomBands = 2;
         pInstance->NewHeadroomParams.pHeadroomDefinition = pInstance->pHeadroom_BandDefs;
-        pInstance->NewHeadroomParams.pHeadroomDefinition[0].Limit_Low          = 20;
-        pInstance->NewHeadroomParams.pHeadroomDefinition[0].Limit_High         = 4999;
-        pInstance->NewHeadroomParams.pHeadroomDefinition[0].Headroom_Offset    = 3;
-        pInstance->NewHeadroomParams.pHeadroomDefinition[1].Limit_Low          = 5000;
-        pInstance->NewHeadroomParams.pHeadroomDefinition[1].Limit_High         = 24000;
-        pInstance->NewHeadroomParams.pHeadroomDefinition[1].Headroom_Offset    = 4;
+        pInstance->NewHeadroomParams.pHeadroomDefinition[0].Limit_Low = 20;
+        pInstance->NewHeadroomParams.pHeadroomDefinition[0].Limit_High = 4999;
+        pInstance->NewHeadroomParams.pHeadroomDefinition[0].Headroom_Offset = 3;
+        pInstance->NewHeadroomParams.pHeadroomDefinition[1].Limit_Low = 5000;
+        pInstance->NewHeadroomParams.pHeadroomDefinition[1].Limit_High = 24000;
+        pInstance->NewHeadroomParams.pHeadroomDefinition[1].Headroom_Offset = 4;
         pInstance->NewHeadroomParams.Headroom_OperatingMode = LVM_HEADROOM_ON;
 
-        pInstance->Headroom =0;
+        pInstance->Headroom = 0;
     }
 
     /*
      * Initialise the PSA module
      */
     {
-        pLVPSA_Handle_t     hPSAInstance = LVM_NULL;   /* Instance handle */
-        LVPSA_MemTab_t      PSA_MemTab;
-        LVPSA_RETURN        PSA_Status;                 /* Function call status */
+        pLVPSA_Handle_t hPSAInstance = LVM_NULL; /* Instance handle */
+        LVPSA_RETURN PSA_Status;                 /* Function call status */
         LVPSA_FilterParam_t FiltersParams[9];
 
-        if(pInstParams->PSA_Included==LVM_PSA_ON)
-        {
-            pInstance->PSA_InitParams.SpectralDataBufferDuration   = (LVM_UINT16) 500;
-            pInstance->PSA_InitParams.MaxInputBlockSize            = (LVM_UINT16) 2048;
-            pInstance->PSA_InitParams.nBands                       = (LVM_UINT16) 9;
-            pInstance->PSA_InitParams.pFiltersParams               = &FiltersParams[0];
-            for(i = 0; i < pInstance->PSA_InitParams.nBands; i++)
-            {
-                FiltersParams[i].CenterFrequency    = (LVM_UINT16) 1000;
-                FiltersParams[i].QFactor            = (LVM_UINT16) 100;
-                FiltersParams[i].PostGain           = (LVM_INT16)  0;
+        if (pInstParams->PSA_Included == LVM_PSA_ON) {
+            pInstance->PSA_InitParams.SpectralDataBufferDuration = (LVM_UINT16)500;
+            pInstance->PSA_InitParams.MaxInputBlockSize = (LVM_UINT16)2048;
+            pInstance->PSA_InitParams.nBands = (LVM_UINT16)9;
+            pInstance->PSA_InitParams.pFiltersParams = &FiltersParams[0];
+            for (i = 0; i < pInstance->PSA_InitParams.nBands; i++) {
+                FiltersParams[i].CenterFrequency = (LVM_UINT16)1000;
+                FiltersParams[i].QFactor = (LVM_UINT16)100;
+                FiltersParams[i].PostGain = (LVM_INT16)0;
             }
 
-            /*Get the memory requirements and then set the address pointers*/
-            PSA_Status = LVPSA_Memory (hPSAInstance,
-                                          &PSA_MemTab,
-                                          &pInstance->PSA_InitParams);
-
-            if (PSA_Status != LVPSA_OK)
-            {
-                return((LVM_ReturnStatus_en) LVM_ALGORITHMPSA);
-            }
-
-            /* Slow Data */
-            PSA_MemTab.Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
-                PSA_MemTab.Region[LVM_PERSISTENT_SLOW_DATA].Size);
-
-            /* Fast Data */
-            PSA_MemTab.Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                PSA_MemTab.Region[LVM_PERSISTENT_FAST_DATA].Size);
-
-            /* Fast Coef */
-            PSA_MemTab.Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                PSA_MemTab.Region[LVM_PERSISTENT_FAST_COEF].Size);
-
-            /* Fast Temporary */
-            pInstance->pPSAInput = (LVM_FLOAT *)InstAlloc_AddMember(&AllocMem[LVM_TEMPORARY_FAST],
-                                                       (LVM_UINT32) MAX_INTERNAL_BLOCKSIZE * \
-                                                       sizeof(LVM_FLOAT));
-            PSA_MemTab.Region[LVM_TEMPORARY_FAST].pBaseAddress       = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],0);
-
             /*Initialise PSA instance and save the instance handle*/
             pInstance->PSA_ControlParams.Fs = LVM_FS_48000;
-            pInstance->PSA_ControlParams.LevelDetectionSpeed  = LVPSA_SPEED_MEDIUM;
-            PSA_Status = LVPSA_Init (&hPSAInstance,
-                                    &pInstance->PSA_InitParams,
-                                    &pInstance->PSA_ControlParams,
-                                    &PSA_MemTab);
+            pInstance->PSA_ControlParams.LevelDetectionSpeed = LVPSA_SPEED_MEDIUM;
+            pInstance->pPSAInput = (LVM_FLOAT*)calloc(MAX_INTERNAL_BLOCKSIZE, sizeof(LVM_FLOAT));
+            if (pInstance->pPSAInput == LVM_NULL) {
+                return LVM_NULLADDRESS;
+            }
+            PSA_Status = LVPSA_Init(&hPSAInstance, &pInstance->PSA_InitParams,
+                                    &pInstance->PSA_ControlParams, pInstance->pScratch);
 
-            if (PSA_Status != LVPSA_OK)
-            {
-                return((LVM_ReturnStatus_en) LVM_ALGORITHMPSA);
+            if (PSA_Status != LVPSA_OK) {
+                return ((LVM_ReturnStatus_en)LVM_ALGORITHMPSA);
             }
 
-            pInstance->hPSAInstance = hPSAInstance;       /* Save the instance handle */
+            pInstance->hPSAInstance = hPSAInstance; /* Save the instance handle */
             pInstance->PSA_GainOffset = 0;
-        }
-        else
-        {
+        } else {
             pInstance->hPSAInstance = LVM_NULL;
         }
 
         /*
          * Set the initialisation parameters.
          */
-        pInstance->Params.PSA_PeakDecayRate   = LVM_PSA_SPEED_MEDIUM;
-        pInstance->Params.PSA_Enable          = LVM_PSA_OFF;
+        pInstance->Params.PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
+        pInstance->Params.PSA_Enable = LVM_PSA_OFF;
     }
 
     /*
@@ -992,20 +446,121 @@
     pInstance->ConfigurationNumber += LVM_VC_MASK;
     pInstance->ConfigurationNumber += LVM_PSA_MASK;
 
-    if(((pInstance->ConfigurationNumber  & LVM_CS_MASK)!=0)  ||
-        ((pInstance->ConfigurationNumber & LVM_DBE_MASK)!=0) ||
-        ((pInstance->ConfigurationNumber & LVM_EQNB_MASK)!=0)||
-        ((pInstance->ConfigurationNumber & LVM_TE_MASK)!=0)  ||
-        ((pInstance->ConfigurationNumber & LVM_VC_MASK)!=0))
-    {
-        pInstance->BlickSizeMultiple    = 4;
-    }
-    else
-    {
-        pInstance->BlickSizeMultiple    = 1;
+    if (((pInstance->ConfigurationNumber & LVM_CS_MASK) != 0) ||
+        ((pInstance->ConfigurationNumber & LVM_DBE_MASK) != 0) ||
+        ((pInstance->ConfigurationNumber & LVM_EQNB_MASK) != 0) ||
+        ((pInstance->ConfigurationNumber & LVM_TE_MASK) != 0) ||
+        ((pInstance->ConfigurationNumber & LVM_VC_MASK) != 0)) {
+        pInstance->BlickSizeMultiple = 4;
+    } else {
+        pInstance->BlickSizeMultiple = 1;
     }
 
-    return(Status);
+    return (Status);
+}
+/****************************************************************************************/
+/*                                                                                      */
+/* FUNCTION:                LVM_DelInstanceHandle                                       */
+/*                                                                                      */
+/* DESCRIPTION:                                                                         */
+/*  This function is used to create a bundle instance. It returns the created instance  */
+/*  handle through phInstance. All parameters are set to their default, inactive state. */
+/*                                                                                      */
+/* PARAMETERS:                                                                          */
+/*  phInstance              Pointer to the instance handle                              */
+/*                                                                                      */
+/* NOTES:                                                                               */
+/*  1. This function must not be interrupted by the LVM_Process function                */
+/*                                                                                      */
+/****************************************************************************************/
+void LVM_DelInstanceHandle(LVM_Handle_t* phInstance) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)*phInstance;
+
+    if (pInstance->pScratch != LVM_NULL) {
+        free(pInstance->pScratch);
+        pInstance->pScratch = LVM_NULL;
+    }
+
+    if (pInstance->InstParams.BufferMode == LVM_MANAGED_BUFFERS) {
+        /*
+         * Managed buffers required
+         */
+        if (pInstance->pBufferManagement != LVM_NULL) {
+            free(pInstance->pBufferManagement);
+            pInstance->pBufferManagement = LVM_NULL;
+        }
+    }
+
+    /*
+     * Treble Enhancement
+     */
+    if (pInstance->pTE_Taps != LVM_NULL) {
+        free(pInstance->pTE_Taps);
+        pInstance->pTE_Taps = LVM_NULL;
+    }
+    if (pInstance->pTE_State != LVM_NULL) {
+        free(pInstance->pTE_State);
+        pInstance->pTE_State = LVM_NULL;
+    }
+
+    /*
+     * Free the default EQNB pre-gain and pointer to the band definitions
+     */
+    if (pInstance->pEQNB_BandDefs != LVM_NULL) {
+        free(pInstance->pEQNB_BandDefs);
+        pInstance->pEQNB_BandDefs = LVM_NULL;
+    }
+    if (pInstance->pEQNB_UserDefs != LVM_NULL) {
+        free(pInstance->pEQNB_UserDefs);
+        pInstance->pEQNB_UserDefs = LVM_NULL;
+    }
+
+    /*
+     * De-initialise the Concert Sound module
+     */
+    if (pInstance->hCSInstance != LVM_NULL) {
+        LVCS_DeInit(&pInstance->hCSInstance);
+    }
+
+    /*
+     * De-initialise the Bass Enhancement module
+     */
+    if (pInstance->hDBEInstance != LVM_NULL) {
+        LVDBE_DeInit(&pInstance->hDBEInstance);
+    }
+
+    /*
+     * De-initialise the N-Band Equaliser module
+     */
+    if (pInstance->hEQNBInstance != LVM_NULL) {
+        LVEQNB_DeInit(&pInstance->hEQNBInstance);
+    }
+
+    /*
+     * Free Headroom management memory.
+     */
+    if (pInstance->pHeadroom_BandDefs != LVM_NULL) {
+        free(pInstance->pHeadroom_BandDefs);
+        pInstance->pHeadroom_BandDefs = LVM_NULL;
+    }
+    if (pInstance->pHeadroom_UserDefs != LVM_NULL) {
+        free(pInstance->pHeadroom_UserDefs);
+        pInstance->pHeadroom_UserDefs = LVM_NULL;
+    }
+
+    /*
+     * De-initialise the PSA module
+     */
+    if (pInstance->hPSAInstance != LVM_NULL) {
+        LVPSA_DeInit(&pInstance->hPSAInstance);
+    }
+    if (pInstance->pPSAInput != LVM_NULL) {
+        free(pInstance->pPSAInput);
+        pInstance->pPSAInput = LVM_NULL;
+    }
+
+    free(*phInstance);
+    return;
 }
 
 /****************************************************************************************/
@@ -1027,48 +582,36 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVM_ReturnStatus_en LVM_ClearAudioBuffers(LVM_Handle_t  hInstance)
-{
-    LVM_MemTab_t            MemTab;                                     /* Memory table */
-    LVM_InstParams_t        InstParams;                                 /* Instance parameters */
-    LVM_ControlParams_t     Params;                                     /* Control Parameters */
-    LVM_Instance_t          *pInstance  = (LVM_Instance_t  *)hInstance; /* Pointer to Instance */
-    LVM_HeadroomParams_t    HeadroomParams;
+LVM_ReturnStatus_en LVM_ClearAudioBuffers(LVM_Handle_t hInstance) {
+    LVM_InstParams_t InstParams;                            /* Instance parameters */
+    LVM_ControlParams_t Params;                             /* Control Parameters */
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance; /* Pointer to Instance */
+    LVM_HeadroomParams_t HeadroomParams;
 
-    if(hInstance == LVM_NULL){
+    if (hInstance == LVM_NULL) {
         return LVM_NULLADDRESS;
     }
 
-    /* Save the control parameters */ /* coverity[unchecked_value] */ /* Do not check return value internal function calls */
+    /* Save the control parameters */ /* coverity[unchecked_value] */ /* Do not check return value
+                                                                         internal function calls */
     LVM_GetControlParameters(hInstance, &Params);
 
     /*Save the headroom parameters*/
     LVM_GetHeadroomParams(hInstance, &HeadroomParams);
 
-    /*  Retrieve allocated buffers in memtab */
-    LVM_GetMemoryTable(hInstance, &MemTab,  LVM_NULL);
-
     /*  Save the instance parameters */
     InstParams = pInstance->InstParams;
 
     /*  Call  LVM_GetInstanceHandle to re-initialise the bundle */
-    LVM_GetInstanceHandle( &hInstance,
-                           &MemTab,
-                           &InstParams);
-
-    /* Restore control parameters */ /* coverity[unchecked_value] */ /* Do not check return value internal function calls */
+    /* Restore control parameters */ /* coverity[unchecked_value] */ /* Do not check return value
+                                                                        internal function calls */
     LVM_SetControlParameters(hInstance, &Params);
 
     /*Restore the headroom parameters*/
     LVM_SetHeadroomParams(hInstance, &HeadroomParams);
 
     /* DC removal filter */
-#ifdef SUPPORT_MC
     DC_Mc_D16_TRC_WRA_01_Init(&pInstance->DC_RemovalInstance);
-#else
-    DC_2I_D16_TRC_WRA_01_Init(&pInstance->DC_RemovalInstance);
-#endif
 
     return LVM_SUCCESS;
 }
-
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Private.h b/media/libeffects/lvm/lib/Bundle/src/LVM_Private.h
index ddaac99..90a1f19 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Private.h
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Private.h
@@ -33,14 +33,14 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#include "LVM.h"                                /* LifeVibes */
-#include "LVM_Common.h"                         /* LifeVibes common */
-#include "BIQUAD.h"                             /* Biquad library */
-#include "LVC_Mixer.h"                          /* Mixer library */
-#include "LVCS_Private.h"                       /* Concert Sound */
-#include "LVDBE_Private.h"                      /* Dynamic Bass Enhancement */
-#include "LVEQNB_Private.h"                     /* N-Band equaliser */
-#include "LVPSA_Private.h"                      /* Parametric Spectrum Analyzer */
+#include "LVM.h"            /* LifeVibes */
+#include "LVM_Common.h"     /* LifeVibes common */
+#include "BIQUAD.h"         /* Biquad library */
+#include "LVC_Mixer.h"      /* Mixer library */
+#include "LVCS_Private.h"   /* Concert Sound */
+#include "LVDBE_Private.h"  /* Dynamic Bass Enhancement */
+#include "LVEQNB_Private.h" /* N-Band equaliser */
+#include "LVPSA_Private.h"  /* Parametric Spectrum Analyzer */
 
 /************************************************************************************/
 /*                                                                                  */
@@ -49,63 +49,64 @@
 /************************************************************************************/
 
 /* General */
-#define LVM_INVALID                     0xFFFF    /* Invalid init parameter */
+#define LVM_INVALID 0xFFFF /* Invalid init parameter */
 
 /* Memory */
-#define LVM_INSTANCE_ALIGN              4         /* 32-bit for structures */
-#define LVM_FIRSTCALL                   0         /* First call to the buffer */
-#define LVM_MAXBLOCKCALL                1         /* Maximum block size calls to the buffer */
-#define LVM_LASTCALL                    2         /* Last call to the buffer */
-#define LVM_FIRSTLASTCALL               3         /* Single call for small number of samples */
+#define LVM_INSTANCE_ALIGN 4 /* 32-bit for structures */
+#define LVM_FIRSTCALL 0      /* First call to the buffer */
+#define LVM_MAXBLOCKCALL 1   /* Maximum block size calls to the buffer */
+#define LVM_LASTCALL 2       /* Last call to the buffer */
+#define LVM_FIRSTLASTCALL 3  /* Single call for small number of samples */
 
 /* Block Size */
-#define LVM_MIN_MAXBLOCKSIZE            16        /* Minimum MaxBlockSize Limit*/
-#define LVM_MANAGED_MAX_MAXBLOCKSIZE    8191      /* Maximum MaxBlockSzie Limit for Managed Buffer Mode*/
-#define LVM_UNMANAGED_MAX_MAXBLOCKSIZE  4096      /* Maximum MaxBlockSzie Limit for Unmanaged Buffer Mode */
+#define LVM_MIN_MAXBLOCKSIZE 16           /* Minimum MaxBlockSize Limit*/
+#define LVM_MANAGED_MAX_MAXBLOCKSIZE 8191 /* Maximum MaxBlockSzie Limit for Managed Buffer Mode*/
+#define LVM_UNMANAGED_MAX_MAXBLOCKSIZE \
+    4096 /* Maximum MaxBlockSzie Limit for Unmanaged Buffer Mode */
 
-#define MAX_INTERNAL_BLOCKSIZE          8128      /* Maximum multiple of 64  below 8191*/
+#define MAX_INTERNAL_BLOCKSIZE 8128 /* Maximum multiple of 64  below 8191*/
 
-#define MIN_INTERNAL_BLOCKSIZE          16        /* Minimum internal block size */
-#define MIN_INTERNAL_BLOCKSHIFT         4         /* Minimum internal block size as a power of 2 */
-#define MIN_INTERNAL_BLOCKMASK          0xFFF0    /* Minimum internal block size mask */
+#define MIN_INTERNAL_BLOCKSIZE 16     /* Minimum internal block size */
+#define MIN_INTERNAL_BLOCKSHIFT 4     /* Minimum internal block size as a power of 2 */
+#define MIN_INTERNAL_BLOCKMASK 0xFFF0 /* Minimum internal block size mask */
 
-#define LVM_PSA_DYNAMICRANGE            60        /* Spectral Dynamic range: used for offseting output*/
-#define LVM_PSA_BARHEIGHT               127       /* Spectral Bar Height*/
+#define LVM_PSA_DYNAMICRANGE 60 /* Spectral Dynamic range: used for offseting output*/
+#define LVM_PSA_BARHEIGHT 127   /* Spectral Bar Height*/
 
-#define LVM_TE_MIN_EFFECTLEVEL          0         /*TE Minimum EffectLevel*/
-#define LVM_TE_MAX_EFFECTLEVEL          15        /*TE Maximum Effect level*/
+#define LVM_TE_MIN_EFFECTLEVEL 0  /*TE Minimum EffectLevel*/
+#define LVM_TE_MAX_EFFECTLEVEL 15 /*TE Maximum Effect level*/
 
-#define LVM_VC_MIN_EFFECTLEVEL          (-96)     /*VC Minimum EffectLevel*/
-#define LVM_VC_MAX_EFFECTLEVEL          0         /*VC Maximum Effect level*/
+#define LVM_VC_MIN_EFFECTLEVEL (-96) /*VC Minimum EffectLevel*/
+#define LVM_VC_MAX_EFFECTLEVEL 0     /*VC Maximum Effect level*/
 
-#define LVM_BE_MIN_EFFECTLEVEL          0         /*BE Minimum EffectLevel*/
-#define LVM_BE_MAX_EFFECTLEVEL          15        /*BE Maximum Effect level*/
+#define LVM_BE_MIN_EFFECTLEVEL 0  /*BE Minimum EffectLevel*/
+#define LVM_BE_MAX_EFFECTLEVEL 15 /*BE Maximum Effect level*/
 
-#define LVM_EQNB_MIN_BAND_FREQ          20        /*EQNB Minimum Band Frequency*/
-#define LVM_EQNB_MAX_BAND_FREQ          24000     /*EQNB Maximum Band Frequency*/
-#define LVM_EQNB_MIN_BAND_GAIN          (-15)     /*EQNB Minimum Band Frequency*/
-#define LVM_EQNB_MAX_BAND_GAIN          15        /*EQNB Maximum Band Frequency*/
-#define LVM_EQNB_MIN_QFACTOR            25        /*EQNB Minimum Q Factor*/
-#define LVM_EQNB_MAX_QFACTOR            1200      /*EQNB Maximum Q Factor*/
-#define LVM_EQNB_MIN_LPF_FREQ           1000      /*EQNB Minimum Low Pass Corner frequency*/
-#define LVM_EQNB_MIN_HPF_FREQ           20        /*EQNB Minimum High Pass Corner frequency*/
-#define LVM_EQNB_MAX_HPF_FREQ           1000      /*EQNB Maximum High Pass Corner frequency*/
+#define LVM_EQNB_MIN_BAND_FREQ 20    /*EQNB Minimum Band Frequency*/
+#define LVM_EQNB_MAX_BAND_FREQ 24000 /*EQNB Maximum Band Frequency*/
+#define LVM_EQNB_MIN_BAND_GAIN (-15) /*EQNB Minimum Band Frequency*/
+#define LVM_EQNB_MAX_BAND_GAIN 15    /*EQNB Maximum Band Frequency*/
+#define LVM_EQNB_MIN_QFACTOR 25      /*EQNB Minimum Q Factor*/
+#define LVM_EQNB_MAX_QFACTOR 1200    /*EQNB Maximum Q Factor*/
+#define LVM_EQNB_MIN_LPF_FREQ 1000   /*EQNB Minimum Low Pass Corner frequency*/
+#define LVM_EQNB_MIN_HPF_FREQ 20     /*EQNB Minimum High Pass Corner frequency*/
+#define LVM_EQNB_MAX_HPF_FREQ 1000   /*EQNB Maximum High Pass Corner frequency*/
 
-#define LVM_CS_MIN_EFFECT_LEVEL         0         /*CS Minimum Effect Level*/
-#define LVM_CS_MAX_REVERB_LEVEL         100       /*CS Maximum Reverb Level*/
-#define LVM_VIRTUALIZER_MAX_REVERB_LEVEL 100      /*Vitrualizer Maximum Reverb Level*/
+#define LVM_CS_MIN_EFFECT_LEVEL 0            /*CS Minimum Effect Level*/
+#define LVM_CS_MAX_REVERB_LEVEL 100          /*CS Maximum Reverb Level*/
+#define LVM_VIRTUALIZER_MAX_REVERB_LEVEL 100 /*Vitrualizer Maximum Reverb Level*/
 
-#define LVM_VC_MIXER_TIME              100       /*VC mixer time*/
-#define LVM_VC_BALANCE_MAX             96        /*VC balance max value*/
-#define LVM_VC_BALANCE_MIN             (-96)     /*VC balance min value*/
+#define LVM_VC_MIXER_TIME 100    /*VC mixer time*/
+#define LVM_VC_BALANCE_MAX 96    /*VC balance max value*/
+#define LVM_VC_BALANCE_MIN (-96) /*VC balance min value*/
 
 /* Algorithm masks */
-#define LVM_CS_MASK                     1
-#define LVM_EQNB_MASK                   2
-#define LVM_DBE_MASK                    4
-#define LVM_VC_MASK                     16
-#define LVM_TE_MASK                     32
-#define LVM_PSA_MASK                    2048
+#define LVM_CS_MASK 1
+#define LVM_EQNB_MASK 2
+#define LVM_DBE_MASK 4
+#define LVM_VC_MASK 16
+#define LVM_TE_MASK 32
+#define LVM_PSA_MASK 2048
 
 /************************************************************************************/
 /*                                                                                  */
@@ -113,133 +114,102 @@
 /*                                                                                  */
 /************************************************************************************/
 
-/* Memory region definition */
-typedef struct
-{
-    LVM_UINT32              Size;               /* Region size in bytes */
-    LVM_UINT16              Alignment;          /* Byte alignment */
-    LVM_MemoryTypes_en      Type;               /* Region type */
-    void                    *pBaseAddress;      /* Pointer to the region base address */
-} LVM_IntMemoryRegion_t;
-
-/* Memory table containing the region definitions */
-typedef struct
-{
-    LVM_IntMemoryRegion_t   Region[LVM_NR_MEMORY_REGIONS];  /* One definition for each region */
-} LVM_IntMemTab_t;
-
 /* Buffer Management */
-typedef struct
-{
-    LVM_FLOAT               *pScratch;          /* Bundle scratch buffer */
+typedef struct {
+    LVM_FLOAT* pScratch; /* Bundle scratch buffer */
 
-    LVM_INT16               BufferState;        /* Buffer status */
-#ifdef SUPPORT_MC
-    LVM_FLOAT               InDelayBuffer[3 * LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE];
-#else
-    LVM_FLOAT               InDelayBuffer[6 * MIN_INTERNAL_BLOCKSIZE]; /* Input buffer delay line, \
-                                                                           left and right */
-#endif
-    LVM_INT16               InDelaySamples;     /* Number of samples in the input delay buffer */
-#ifdef SUPPORT_MC
-    LVM_FLOAT               OutDelayBuffer[LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE];
-#else
-    LVM_FLOAT               OutDelayBuffer[2 * MIN_INTERNAL_BLOCKSIZE]; /* Output buffer delay \
-                                                                                      line */
-#endif
-    LVM_INT16               OutDelaySamples;    /* Number of samples in the output delay buffer, \
-                                                                             left and right */
-    LVM_INT16               SamplesToOutput;    /* Samples to write to the output */
+    LVM_INT16 BufferState; /* Buffer status */
+    LVM_FLOAT InDelayBuffer[3 * LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE];
+    LVM_INT16 InDelaySamples; /* Number of samples in the input delay buffer */
+    LVM_FLOAT OutDelayBuffer[LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE];
+    LVM_INT16 OutDelaySamples; /* Number of samples in the output delay buffer, \
+                                                            left and right */
+    LVM_INT16 SamplesToOutput; /* Samples to write to the output */
 } LVM_Buffer_t;
 
 /* Filter taps */
-typedef struct
-{
-    Biquad_2I_Order1_FLOAT_Taps_t TrebleBoost_Taps;   /* Treble boost Taps */
+typedef struct {
+    Biquad_2I_Order1_FLOAT_Taps_t TrebleBoost_Taps; /* Treble boost Taps */
 } LVM_TE_Data_t;
 
 /* Coefficients */
-typedef struct
-{
-    Biquad_FLOAT_Instance_t       TrebleBoost_State;  /* State for the treble boost filter */
+typedef struct {
+    Biquad_FLOAT_Instance_t TrebleBoost_State; /* State for the treble boost filter */
 } LVM_TE_Coefs_t;
 
-typedef struct
-{
+typedef struct {
     /* Public parameters */
-    LVM_MemTab_t            MemoryTable;        /* Instance memory allocation table */
-    LVM_ControlParams_t     Params;             /* Control parameters */
-    LVM_InstParams_t        InstParams;         /* Instance parameters */
+    LVM_ControlParams_t Params;  /* Control parameters */
+    LVM_InstParams_t InstParams; /* Instance parameters */
 
     /* Private parameters */
-    LVM_UINT16              ControlPending;     /* Control flag to indicate update pending */
-    LVM_ControlParams_t     NewParams;          /* New control parameters pending update */
+    LVM_UINT16 ControlPending;     /* Control flag to indicate update pending */
+    LVM_ControlParams_t NewParams; /* New control parameters pending update */
 
     /* Buffer control */
-    LVM_INT16               InternalBlockSize;  /* Maximum internal block size */
-    LVM_Buffer_t            *pBufferManagement; /* Buffer management variables */
-    LVM_INT16               SamplesToProcess;   /* Input samples left to process */
-    LVM_FLOAT               *pInputSamples;     /* External input sample pointer */
-    LVM_FLOAT               *pOutputSamples;    /* External output sample pointer */
+    LVM_INT16 InternalBlockSize;     /* Maximum internal block size */
+    LVM_Buffer_t* pBufferManagement; /* Buffer management variables */
+    LVM_INT16 SamplesToProcess;      /* Input samples left to process */
+    LVM_FLOAT* pInputSamples;        /* External input sample pointer */
+    LVM_FLOAT* pOutputSamples;       /* External output sample pointer */
 
     /* Configuration number */
-    LVM_INT32               ConfigurationNumber;
-    LVM_INT32               BlickSizeMultiple;
+    LVM_INT32 ConfigurationNumber;
+    LVM_INT32 BlickSizeMultiple;
 
     /* DC removal */
-    Biquad_FLOAT_Instance_t       DC_RemovalInstance; /* DC removal filter instance */
+    Biquad_FLOAT_Instance_t DC_RemovalInstance; /* DC removal filter instance */
 
     /* Concert Sound */
-    LVCS_Handle_t           hCSInstance;        /* Concert Sound instance handle */
-    LVCS_Instance_t         CS_Instance;        /* Concert Sound instance */
-    LVM_INT16               CS_Active;          /* Control flag */
+    LVCS_Handle_t hCSInstance;   /* Concert Sound instance handle */
+    LVCS_Instance_t CS_Instance; /* Concert Sound instance */
+    LVM_INT16 CS_Active;         /* Control flag */
 
     /* Equalizer */
-    LVEQNB_Handle_t         hEQNBInstance;      /* N-Band Equaliser instance handle */
-    LVEQNB_Instance_t       EQNB_Instance;      /* N-Band Equaliser instance */
-    LVM_EQNB_BandDef_t      *pEQNB_BandDefs;    /* Local storage for new definitions */
-    LVM_EQNB_BandDef_t      *pEQNB_UserDefs;    /* Local storage for the user's definitions */
-    LVM_INT16               EQNB_Active;        /* Control flag */
+    LVEQNB_Handle_t hEQNBInstance;      /* N-Band Equaliser instance handle */
+    LVEQNB_Instance_t EQNB_Instance;    /* N-Band Equaliser instance */
+    LVM_EQNB_BandDef_t* pEQNB_BandDefs; /* Local storage for new definitions */
+    LVM_EQNB_BandDef_t* pEQNB_UserDefs; /* Local storage for the user's definitions */
+    LVM_INT16 EQNB_Active;              /* Control flag */
 
     /* Dynamic Bass Enhancement */
-    LVDBE_Handle_t          hDBEInstance;       /* Dynamic Bass Enhancement instance handle */
-    LVDBE_Instance_t        DBE_Instance;       /* Dynamic Bass Enhancement instance */
-    LVM_INT16               DBE_Active;         /* Control flag */
+    LVDBE_Handle_t hDBEInstance;   /* Dynamic Bass Enhancement instance handle */
+    LVDBE_Instance_t DBE_Instance; /* Dynamic Bass Enhancement instance */
+    LVM_INT16 DBE_Active;          /* Control flag */
 
     /* Volume Control */
-    LVMixer3_1St_FLOAT_st   VC_Volume;          /* Volume scaler */
-    LVMixer3_2St_FLOAT_st         VC_BalanceMix;      /* VC balance mixer */
-    LVM_INT16               VC_VolumedB;        /* Gain in dB */
-    LVM_INT16               VC_Active;          /* Control flag */
-    LVM_INT16               VC_AVLFixedVolume;  /* AVL fixed volume */
+    LVMixer3_1St_FLOAT_st VC_Volume;     /* Volume scaler */
+    LVMixer3_2St_FLOAT_st VC_BalanceMix; /* VC balance mixer */
+    LVM_INT16 VC_VolumedB;               /* Gain in dB */
+    LVM_INT16 VC_Active;                 /* Control flag */
+    LVM_INT16 VC_AVLFixedVolume;         /* AVL fixed volume */
 
     /* Treble Enhancement */
-    LVM_TE_Data_t           *pTE_Taps;          /* Treble boost Taps */
-    LVM_TE_Coefs_t          *pTE_State;         /* State for the treble boost filter */
-    LVM_INT16               TE_Active;          /* Control flag */
+    LVM_TE_Data_t* pTE_Taps;   /* Treble boost Taps */
+    LVM_TE_Coefs_t* pTE_State; /* State for the treble boost filter */
+    LVM_INT16 TE_Active;       /* Control flag */
 
     /* Headroom */
-    LVM_HeadroomParams_t    NewHeadroomParams;   /* New headroom parameters pending update */
-    LVM_HeadroomParams_t    HeadroomParams;      /* Headroom parameters */
-    LVM_HeadroomBandDef_t   *pHeadroom_BandDefs; /* Local storage for new definitions */
-    LVM_HeadroomBandDef_t   *pHeadroom_UserDefs; /* Local storage for the user's definitions */
-    LVM_UINT16              Headroom;            /* Value of the current headroom */
+    LVM_HeadroomParams_t NewHeadroomParams;    /* New headroom parameters pending update */
+    LVM_HeadroomParams_t HeadroomParams;       /* Headroom parameters */
+    LVM_HeadroomBandDef_t* pHeadroom_BandDefs; /* Local storage for new definitions */
+    LVM_HeadroomBandDef_t* pHeadroom_UserDefs; /* Local storage for the user's definitions */
+    LVM_UINT16 Headroom;                       /* Value of the current headroom */
 
     /* Spectrum Analyzer */
-    pLVPSA_Handle_t         hPSAInstance;       /* Spectrum Analyzer instance handle */
-    LVPSA_InstancePr_t      PSA_Instance;       /* Spectrum Analyzer instance */
-    LVPSA_InitParams_t      PSA_InitParams;     /* Spectrum Analyzer initialization parameters */
-    LVPSA_ControlParams_t   PSA_ControlParams;  /* Spectrum Analyzer control parameters */
-    LVM_INT16               PSA_GainOffset;     /* Tone control flag */
-    LVM_Callback            CallBack;
-    LVM_FLOAT               *pPSAInput;         /* PSA input pointer */
+    pLVPSA_Handle_t hPSAInstance;            /* Spectrum Analyzer instance handle */
+    LVPSA_InstancePr_t PSA_Instance;         /* Spectrum Analyzer instance */
+    LVPSA_InitParams_t PSA_InitParams;       /* Spectrum Analyzer initialization parameters */
+    LVPSA_ControlParams_t PSA_ControlParams; /* Spectrum Analyzer control parameters */
+    LVM_INT16 PSA_GainOffset;                /* Tone control flag */
+    LVM_Callback CallBack;
+    LVM_FLOAT* pPSAInput; /* PSA input pointer */
 
-    LVM_INT16              NoSmoothVolume;      /* Enable or disable smooth volume changes*/
+    LVM_INT16 NoSmoothVolume; /* Enable or disable smooth volume changes*/
 
-#ifdef SUPPORT_MC
-    LVM_INT16              NrChannels;
-    LVM_INT32              ChMask;
-#endif
+    LVM_INT16 NrChannels;
+    LVM_INT32 ChMask;
+    void* pScratch; /* Pointer to bundle scratch buffer*/
 
 } LVM_Instance_t;
 
@@ -249,32 +219,19 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVM_ReturnStatus_en LVM_ApplyNewSettings(LVM_Handle_t       hInstance);
+LVM_ReturnStatus_en LVM_ApplyNewSettings(LVM_Handle_t hInstance);
 
-void    LVM_SetTrebleBoost( LVM_Instance_t         *pInstance,
-                            LVM_ControlParams_t    *pParams);
+void LVM_SetTrebleBoost(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams);
 
-void    LVM_SetVolume(  LVM_Instance_t         *pInstance,
-                        LVM_ControlParams_t    *pParams);
+void LVM_SetVolume(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams);
 
-LVM_INT32    LVM_VCCallBack(void*   pBundleHandle,
-                            void*   pGeneralPurpose,
-                            short   CallBackParam);
+LVM_INT32 LVM_VCCallBack(void* pBundleHandle, void* pGeneralPurpose, short CallBackParam);
 
-void    LVM_SetHeadroom(    LVM_Instance_t         *pInstance,
-                            LVM_ControlParams_t    *pParams);
-void    LVM_BufferIn(   LVM_Handle_t      hInstance,
-                        const LVM_FLOAT   *pInData,
-                        LVM_FLOAT         **pToProcess,
-                        LVM_FLOAT         **pProcessed,
-                        LVM_UINT16        *pNumSamples);
-void    LVM_BufferOut(  LVM_Handle_t     hInstance,
-                        LVM_FLOAT        *pOutData,
-                        LVM_UINT16       *pNumSamples);
+void LVM_SetHeadroom(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams);
+void LVM_BufferIn(LVM_Handle_t hInstance, const LVM_FLOAT* pInData, LVM_FLOAT** pToProcess,
+                  LVM_FLOAT** pProcessed, LVM_UINT16* pNumSamples);
+void LVM_BufferOut(LVM_Handle_t hInstance, LVM_FLOAT* pOutData, LVM_UINT16* pNumSamples);
 
-LVM_INT32 LVM_AlgoCallBack(     void          *pBundleHandle,
-                                void          *pData,
-                                LVM_INT16     callbackId);
+LVM_INT32 LVM_AlgoCallBack(void* pBundleHandle, void* pData, LVM_INT16 callbackId);
 
-#endif      /* __LVM_PRIVATE_H__ */
-
+#endif /* __LVM_PRIVATE_H__ */
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
index dc86cfd..c94c469 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
@@ -51,77 +51,61 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_Process(LVM_Handle_t                hInstance,
-                                const LVM_FLOAT             *pInData,
-                                LVM_FLOAT                   *pOutData,
-                                LVM_UINT16                  NumSamples,
-                                LVM_UINT32                  AudioTime)
-{
-
-    LVM_Instance_t      *pInstance  = (LVM_Instance_t  *)hInstance;
-    LVM_UINT16          SampleCount = NumSamples;
-    LVM_FLOAT           *pInput     = (LVM_FLOAT *)pInData;
-    LVM_FLOAT           *pToProcess = (LVM_FLOAT *)pInData;
-    LVM_FLOAT           *pProcessed = pOutData;
-    LVM_ReturnStatus_en  Status;
-#ifdef SUPPORT_MC
-    LVM_INT32           NrChannels  = pInstance->NrChannels;
-    LVM_INT32           ChMask      = pInstance->ChMask;
+LVM_ReturnStatus_en LVM_Process(LVM_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                LVM_FLOAT* pOutData, LVM_UINT16 NumSamples, LVM_UINT32 AudioTime) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+    LVM_UINT16 SampleCount = NumSamples;
+    LVM_FLOAT* pInput = (LVM_FLOAT*)pInData;
+    LVM_FLOAT* pToProcess = (LVM_FLOAT*)pInData;
+    LVM_FLOAT* pProcessed = pOutData;
+    LVM_ReturnStatus_en Status;
+    LVM_INT32 NrChannels = pInstance->NrChannels;
+    LVM_INT32 ChMask = pInstance->ChMask;
 #define NrFrames SampleCount  // alias for clarity
-#endif
 
     /*
      * Check if the number of samples is zero
      */
-    if (NumSamples == 0)
-    {
-        return(LVM_SUCCESS);
+    if (NumSamples == 0) {
+        return (LVM_SUCCESS);
     }
 
     /*
      * Check valid points have been given
      */
-    if ((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL)) {
         return (LVM_NULLADDRESS);
     }
 
     /*
      * For unmanaged mode only
      */
-    if(pInstance->InstParams.BufferMode == LVM_UNMANAGED_BUFFERS)
-    {
-         /*
+    if (pInstance->InstParams.BufferMode == LVM_UNMANAGED_BUFFERS) {
+        /*
          * Check if the number of samples is a good multiple (unmanaged mode only)
          */
-        if((NumSamples % pInstance->BlickSizeMultiple) != 0)
-        {
-            return(LVM_INVALIDNUMSAMPLES);
+        if ((NumSamples % pInstance->BlickSizeMultiple) != 0) {
+            return (LVM_INVALIDNUMSAMPLES);
         }
 
         /*
          * Check the buffer alignment
          */
-        if((((uintptr_t)pInData % 4) != 0) || (((uintptr_t)pOutData % 4) != 0))
-        {
-            return(LVM_ALIGNMENTERROR);
+        if ((((uintptr_t)pInData % 4) != 0) || (((uintptr_t)pOutData % 4) != 0)) {
+            return (LVM_ALIGNMENTERROR);
         }
     }
 
     /*
      * Update new parameters if necessary
      */
-    if (pInstance->ControlPending == LVM_TRUE)
-    {
+    if (pInstance->ControlPending == LVM_TRUE) {
         Status = LVM_ApplyNewSettings(hInstance);
-#ifdef SUPPORT_MC
         /* Update the local variable NrChannels from pInstance->NrChannels value */
         NrChannels = pInstance->NrChannels;
-        ChMask     = pInstance->ChMask;
-#endif
+        ChMask = pInstance->ChMask;
 
-        if(Status != LVM_SUCCESS)
-        {
+        if (Status != LVM_SUCCESS) {
             return Status;
         }
     }
@@ -129,201 +113,116 @@
     /*
      * Convert from Mono if necessary
      */
-    if (pInstance->Params.SourceFormat == LVM_MONO)
-    {
-        MonoTo2I_Float(pInData,                                /* Source */
-                       pOutData,                               /* Destination */
-                       (LVM_INT16)NumSamples);                 /* Number of input samples */
-        pInput     = pOutData;
+    if (pInstance->Params.SourceFormat == LVM_MONO) {
+        MonoTo2I_Float(pInData,                /* Source */
+                       pOutData,               /* Destination */
+                       (LVM_INT16)NumSamples); /* Number of input samples */
+        pInput = pOutData;
         pToProcess = pOutData;
-#ifdef SUPPORT_MC
         NrChannels = 2;
-        ChMask     = AUDIO_CHANNEL_OUT_STEREO;
-#endif
+        ChMask = AUDIO_CHANNEL_OUT_STEREO;
     }
 
     /*
      * Process the data with managed buffers
      */
-    while (SampleCount != 0)
-    {
+    while (SampleCount != 0) {
         /*
          * Manage the input buffer and frame processing
          */
-        LVM_BufferIn(hInstance,
-                     pInput,
-                     &pToProcess,
-                     &pProcessed,
-                     &SampleCount);
+        LVM_BufferIn(hInstance, pInput, &pToProcess, &pProcessed, &SampleCount);
 
         /*
          * Only process data when SampleCount is none zero, a zero count can occur when
          * the BufferIn routine is working in managed mode.
          */
-        if (SampleCount != 0)
-        {
+        if (SampleCount != 0) {
             /*
              * Apply ConcertSound if required
              */
-            if (pInstance->CS_Active == LVM_TRUE)
-            {
-                (void)LVCS_Process(pInstance->hCSInstance,     /* Concert Sound instance handle */
-                                   pToProcess,
-                                   pProcessed,
-                                   SampleCount);
+            if (pInstance->CS_Active == LVM_TRUE) {
+                (void)LVCS_Process(pInstance->hCSInstance, /* Concert Sound instance handle */
+                                   pToProcess, pProcessed, SampleCount);
                 pToProcess = pProcessed;
             }
 
             /*
              * Apply volume if required
              */
-            if (pInstance->VC_Active!=0)
-            {
-#ifdef SUPPORT_MC
-                LVC_MixSoft_Mc_D16C31_SAT(&pInstance->VC_Volume,
-                                       pToProcess,
-                                       pProcessed,
-                                       (LVM_INT16)(NrFrames),
-                                       NrChannels);
-#else
-                LVC_MixSoft_1St_D16C31_SAT(&pInstance->VC_Volume,
-                                       pToProcess,
-                                       pProcessed,
-                                       (LVM_INT16)(2 * SampleCount));     /* Left and right*/
-#endif
+            if (pInstance->VC_Active != 0) {
+                LVC_MixSoft_Mc_D16C31_SAT(&pInstance->VC_Volume, pToProcess, pProcessed,
+                                          (LVM_INT16)(NrFrames), NrChannels);
                 pToProcess = pProcessed;
             }
 
             /*
              * Call N-Band equaliser if enabled
              */
-            if (pInstance->EQNB_Active == LVM_TRUE)
-            {
-                LVEQNB_Process(pInstance->hEQNBInstance,    /* N-Band equaliser instance handle */
-                               pToProcess,
-                               pProcessed,
-                               SampleCount);
+            if (pInstance->EQNB_Active == LVM_TRUE) {
+                LVEQNB_Process(pInstance->hEQNBInstance, /* N-Band equaliser instance handle */
+                               pToProcess, pProcessed, SampleCount);
                 pToProcess = pProcessed;
             }
 
             /*
              * Call bass enhancement if enabled
              */
-            if (pInstance->DBE_Active == LVM_TRUE)
-            {
-                LVDBE_Process(pInstance->hDBEInstance,       /* Dynamic Bass Enhancement \
-                                                                instance handle */
-                              pToProcess,
-                              pProcessed,
-                              SampleCount);
+            if (pInstance->DBE_Active == LVM_TRUE) {
+                LVDBE_Process(pInstance->hDBEInstance, /* Dynamic Bass Enhancement \
+                                                          instance handle */
+                              pToProcess, pProcessed, SampleCount);
                 pToProcess = pProcessed;
             }
 
             /*
              * Bypass mode or everything off, so copy the input to the output
              */
-            if (pToProcess != pProcessed)
-            {
-#ifdef SUPPORT_MC
-                Copy_Float(pToProcess,                             /* Source */
-                           pProcessed,                             /* Destination */
-                           (LVM_INT16)(NrChannels * NrFrames));    /* Copy all samples */
-#else
-                Copy_Float(pToProcess,                             /* Source */
-                           pProcessed,                             /* Destination */
-                           (LVM_INT16)(2 * SampleCount));          /* Left and right */
-#endif
+            if (pToProcess != pProcessed) {
+                Copy_Float(pToProcess,                          /* Source */
+                           pProcessed,                          /* Destination */
+                           (LVM_INT16)(NrChannels * NrFrames)); /* Copy all samples */
             }
 
             /*
              * Apply treble boost if required
              */
-            if (pInstance->TE_Active == LVM_TRUE)
-            {
+            if (pInstance->TE_Active == LVM_TRUE) {
                 /*
                  * Apply the filter
                  */
-#ifdef SUPPORT_MC
                 FO_Mc_D16F32C15_LShx_TRC_WRA_01(&pInstance->pTE_State->TrebleBoost_State,
-                                           pProcessed,
-                                           pProcessed,
-                                           (LVM_INT16)NrFrames,
-                                           (LVM_INT16)NrChannels);
-#else
-                FO_2I_D16F32C15_LShx_TRC_WRA_01(&pInstance->pTE_State->TrebleBoost_State,
-                                           pProcessed,
-                                           pProcessed,
-                                           (LVM_INT16)SampleCount);
-#endif
-
+                                                pProcessed, pProcessed, (LVM_INT16)NrFrames,
+                                                (LVM_INT16)NrChannels);
             }
-#ifdef SUPPORT_MC
             /*
              * Volume balance
              */
-            LVC_MixSoft_1St_MC_float_SAT(&pInstance->VC_BalanceMix,
-                                          pProcessed,
-                                          pProcessed,
-                                          NrFrames,
-                                          NrChannels,
-                                          ChMask);
-#else
-            /*
-             * Volume balance
-             */
-            LVC_MixSoft_1St_2i_D16C31_SAT(&pInstance->VC_BalanceMix,
-                                          pProcessed,
-                                          pProcessed,
-                                          SampleCount);
-#endif
+            LVC_MixSoft_1St_MC_float_SAT(&pInstance->VC_BalanceMix, pProcessed, pProcessed,
+                                         NrFrames, NrChannels, ChMask);
 
             /*
              * Perform Parametric Spectum Analysis
              */
             if ((pInstance->Params.PSA_Enable == LVM_PSA_ON) &&
-                                            (pInstance->InstParams.PSA_Included == LVM_PSA_ON))
-            {
-#ifdef SUPPORT_MC
-                FromMcToMono_Float(pProcessed,
-                                   pInstance->pPSAInput,
-                                   (LVM_INT16)(NrFrames),
+                (pInstance->InstParams.PSA_Included == LVM_PSA_ON)) {
+                FromMcToMono_Float(pProcessed, pInstance->pPSAInput, (LVM_INT16)(NrFrames),
                                    NrChannels);
-#else
-                From2iToMono_Float(pProcessed,
-                                   pInstance->pPSAInput,
-                                   (LVM_INT16)(SampleCount));
-#endif
 
-                LVPSA_Process(pInstance->hPSAInstance,
-                        pInstance->pPSAInput,
-                        (LVM_UINT16)(SampleCount),
-                        AudioTime);
+                LVPSA_Process(pInstance->hPSAInstance, pInstance->pPSAInput,
+                              (LVM_UINT16)(SampleCount), AudioTime);
             }
 
             /*
              * DC removal
              */
-#ifdef SUPPORT_MC
-            DC_Mc_D16_TRC_WRA_01(&pInstance->DC_RemovalInstance,
-                                 pProcessed,
-                                 pProcessed,
-                                 (LVM_INT16)NrFrames,
-                                 NrChannels);
-#else
-            DC_2I_D16_TRC_WRA_01(&pInstance->DC_RemovalInstance,
-                                 pProcessed,
-                                 pProcessed,
-                                 (LVM_INT16)SampleCount);
-#endif
+            DC_Mc_D16_TRC_WRA_01(&pInstance->DC_RemovalInstance, pProcessed, pProcessed,
+                                 (LVM_INT16)NrFrames, NrChannels);
         }
         /*
          * Manage the output buffer
          */
-        LVM_BufferOut(hInstance,
-                      pOutData,
-                      &SampleCount);
-
+        LVM_BufferOut(hInstance, pOutData, &SampleCount);
     }
 
-    return(LVM_SUCCESS);
+    return (LVM_SUCCESS);
 }
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.cpp
index 66392e2..860196b 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.cpp
@@ -30,431 +30,297 @@
 /*                                                                                  */
 /************************************************************************************/
 
-FO_FLOAT_LShx_Coefs_t    LVM_TrebleBoostCoefs[] = {
+FO_FLOAT_LShx_Coefs_t LVM_TrebleBoostCoefs[] = {
 
-                    /* 22kHz sampling rate */
-                    {HPF_Fs22050_Gain1_A1,             /* Gain setting 1 */
-                     HPF_Fs22050_Gain1_A0,
-                     -HPF_Fs22050_Gain1_B1},
-                    {HPF_Fs22050_Gain2_A1,             /* Gain setting 2 */
-                     HPF_Fs22050_Gain2_A0,
-                     -HPF_Fs22050_Gain2_B1},
-                    {HPF_Fs22050_Gain3_A1,             /* Gain setting 3 */
-                     HPF_Fs22050_Gain3_A0,
-                     -HPF_Fs22050_Gain3_B1},
-                    {HPF_Fs22050_Gain4_A1,             /* Gain setting 4 */
-                     HPF_Fs22050_Gain4_A0,
-                     -HPF_Fs22050_Gain4_B1},
-                    {HPF_Fs22050_Gain5_A1,             /* Gain setting 5 */
-                     HPF_Fs22050_Gain5_A0,
-                     -HPF_Fs22050_Gain5_B1},
-                    {HPF_Fs22050_Gain6_A1,             /* Gain setting 6 */
-                     HPF_Fs22050_Gain6_A0,
-                     -HPF_Fs22050_Gain6_B1},
-                    {HPF_Fs22050_Gain7_A1,             /* Gain setting 7 */
-                     HPF_Fs22050_Gain7_A0,
-                     -HPF_Fs22050_Gain7_B1},
-                    {HPF_Fs22050_Gain8_A1,             /* Gain setting 8 */
-                     HPF_Fs22050_Gain8_A0,
-                     -HPF_Fs22050_Gain8_B1},
-                    {HPF_Fs22050_Gain9_A1,             /* Gain setting 9 */
-                     HPF_Fs22050_Gain9_A0,
-                     -HPF_Fs22050_Gain9_B1},
-                    {HPF_Fs22050_Gain10_A1,             /* Gain setting 10 */
-                     HPF_Fs22050_Gain10_A0,
-                     -HPF_Fs22050_Gain10_B1},
-                    {HPF_Fs22050_Gain11_A1,             /* Gain setting 11 */
-                     HPF_Fs22050_Gain11_A0,
-                     -HPF_Fs22050_Gain11_B1},
-                    {HPF_Fs22050_Gain12_A1,             /* Gain setting 12 */
-                     HPF_Fs22050_Gain12_A0,
-                     -HPF_Fs22050_Gain12_B1},
-                    {HPF_Fs22050_Gain13_A1,             /* Gain setting 13 */
-                     HPF_Fs22050_Gain13_A0,
-                     -HPF_Fs22050_Gain13_B1},
-                    {HPF_Fs22050_Gain14_A1,             /* Gain setting 14 */
-                     HPF_Fs22050_Gain14_A0,
-                     -HPF_Fs22050_Gain14_B1},
-                    {HPF_Fs22050_Gain15_A1,             /* Gain setting 15 */
-                     HPF_Fs22050_Gain15_A0,
-                     -HPF_Fs22050_Gain15_B1},
+        /* 22kHz sampling rate */
+        {HPF_Fs22050_Gain1_A1, /* Gain setting 1 */
+         HPF_Fs22050_Gain1_A0, -HPF_Fs22050_Gain1_B1},
+        {HPF_Fs22050_Gain2_A1, /* Gain setting 2 */
+         HPF_Fs22050_Gain2_A0, -HPF_Fs22050_Gain2_B1},
+        {HPF_Fs22050_Gain3_A1, /* Gain setting 3 */
+         HPF_Fs22050_Gain3_A0, -HPF_Fs22050_Gain3_B1},
+        {HPF_Fs22050_Gain4_A1, /* Gain setting 4 */
+         HPF_Fs22050_Gain4_A0, -HPF_Fs22050_Gain4_B1},
+        {HPF_Fs22050_Gain5_A1, /* Gain setting 5 */
+         HPF_Fs22050_Gain5_A0, -HPF_Fs22050_Gain5_B1},
+        {HPF_Fs22050_Gain6_A1, /* Gain setting 6 */
+         HPF_Fs22050_Gain6_A0, -HPF_Fs22050_Gain6_B1},
+        {HPF_Fs22050_Gain7_A1, /* Gain setting 7 */
+         HPF_Fs22050_Gain7_A0, -HPF_Fs22050_Gain7_B1},
+        {HPF_Fs22050_Gain8_A1, /* Gain setting 8 */
+         HPF_Fs22050_Gain8_A0, -HPF_Fs22050_Gain8_B1},
+        {HPF_Fs22050_Gain9_A1, /* Gain setting 9 */
+         HPF_Fs22050_Gain9_A0, -HPF_Fs22050_Gain9_B1},
+        {HPF_Fs22050_Gain10_A1, /* Gain setting 10 */
+         HPF_Fs22050_Gain10_A0, -HPF_Fs22050_Gain10_B1},
+        {HPF_Fs22050_Gain11_A1, /* Gain setting 11 */
+         HPF_Fs22050_Gain11_A0, -HPF_Fs22050_Gain11_B1},
+        {HPF_Fs22050_Gain12_A1, /* Gain setting 12 */
+         HPF_Fs22050_Gain12_A0, -HPF_Fs22050_Gain12_B1},
+        {HPF_Fs22050_Gain13_A1, /* Gain setting 13 */
+         HPF_Fs22050_Gain13_A0, -HPF_Fs22050_Gain13_B1},
+        {HPF_Fs22050_Gain14_A1, /* Gain setting 14 */
+         HPF_Fs22050_Gain14_A0, -HPF_Fs22050_Gain14_B1},
+        {HPF_Fs22050_Gain15_A1, /* Gain setting 15 */
+         HPF_Fs22050_Gain15_A0, -HPF_Fs22050_Gain15_B1},
 
-                    /* 24kHz sampling rate */
-                    {HPF_Fs24000_Gain1_A1,             /* Gain setting 1 */
-                     HPF_Fs24000_Gain1_A0,
-                     -HPF_Fs24000_Gain1_B1},
-                    {HPF_Fs24000_Gain2_A1,             /* Gain setting 2 */
-                     HPF_Fs24000_Gain2_A0,
-                     -HPF_Fs24000_Gain2_B1},
-                    {HPF_Fs24000_Gain3_A1,             /* Gain setting 3 */
-                     HPF_Fs24000_Gain3_A0,
-                     -HPF_Fs24000_Gain3_B1},
-                    {HPF_Fs24000_Gain4_A1,             /* Gain setting 4 */
-                     HPF_Fs24000_Gain4_A0,
-                     -HPF_Fs24000_Gain4_B1},
-                    {HPF_Fs24000_Gain5_A1,             /* Gain setting 5 */
-                     HPF_Fs24000_Gain5_A0,
-                     -HPF_Fs24000_Gain5_B1},
-                    {HPF_Fs24000_Gain6_A1,             /* Gain setting 6 */
-                     HPF_Fs24000_Gain6_A0,
-                     -HPF_Fs24000_Gain6_B1},
-                    {HPF_Fs24000_Gain7_A1,             /* Gain setting 7 */
-                     HPF_Fs24000_Gain7_A0,
-                     -HPF_Fs24000_Gain7_B1},
-                    {HPF_Fs24000_Gain8_A1,             /* Gain setting 8 */
-                     HPF_Fs24000_Gain8_A0,
-                     -HPF_Fs24000_Gain8_B1},
-                    {HPF_Fs24000_Gain9_A1,             /* Gain setting 9 */
-                     HPF_Fs24000_Gain9_A0,
-                     -HPF_Fs24000_Gain9_B1},
-                    {HPF_Fs24000_Gain10_A1,             /* Gain setting 10 */
-                     HPF_Fs24000_Gain10_A0,
-                     -HPF_Fs24000_Gain10_B1},
-                    {HPF_Fs24000_Gain11_A1,             /* Gain setting 11 */
-                     HPF_Fs24000_Gain11_A0,
-                     -HPF_Fs24000_Gain11_B1},
-                    {HPF_Fs24000_Gain12_A1,             /* Gain setting 12 */
-                     HPF_Fs24000_Gain12_A0,
-                     -HPF_Fs24000_Gain12_B1},
-                    {HPF_Fs24000_Gain13_A1,             /* Gain setting 13 */
-                     HPF_Fs24000_Gain13_A0,
-                     -HPF_Fs24000_Gain13_B1},
-                    {HPF_Fs24000_Gain14_A1,             /* Gain setting 14 */
-                     HPF_Fs24000_Gain14_A0,
-                     -HPF_Fs24000_Gain14_B1},
-                    {HPF_Fs24000_Gain15_A1,             /* Gain setting 15 */
-                     HPF_Fs24000_Gain15_A0,
-                     -HPF_Fs24000_Gain15_B1},
+        /* 24kHz sampling rate */
+        {HPF_Fs24000_Gain1_A1, /* Gain setting 1 */
+         HPF_Fs24000_Gain1_A0, -HPF_Fs24000_Gain1_B1},
+        {HPF_Fs24000_Gain2_A1, /* Gain setting 2 */
+         HPF_Fs24000_Gain2_A0, -HPF_Fs24000_Gain2_B1},
+        {HPF_Fs24000_Gain3_A1, /* Gain setting 3 */
+         HPF_Fs24000_Gain3_A0, -HPF_Fs24000_Gain3_B1},
+        {HPF_Fs24000_Gain4_A1, /* Gain setting 4 */
+         HPF_Fs24000_Gain4_A0, -HPF_Fs24000_Gain4_B1},
+        {HPF_Fs24000_Gain5_A1, /* Gain setting 5 */
+         HPF_Fs24000_Gain5_A0, -HPF_Fs24000_Gain5_B1},
+        {HPF_Fs24000_Gain6_A1, /* Gain setting 6 */
+         HPF_Fs24000_Gain6_A0, -HPF_Fs24000_Gain6_B1},
+        {HPF_Fs24000_Gain7_A1, /* Gain setting 7 */
+         HPF_Fs24000_Gain7_A0, -HPF_Fs24000_Gain7_B1},
+        {HPF_Fs24000_Gain8_A1, /* Gain setting 8 */
+         HPF_Fs24000_Gain8_A0, -HPF_Fs24000_Gain8_B1},
+        {HPF_Fs24000_Gain9_A1, /* Gain setting 9 */
+         HPF_Fs24000_Gain9_A0, -HPF_Fs24000_Gain9_B1},
+        {HPF_Fs24000_Gain10_A1, /* Gain setting 10 */
+         HPF_Fs24000_Gain10_A0, -HPF_Fs24000_Gain10_B1},
+        {HPF_Fs24000_Gain11_A1, /* Gain setting 11 */
+         HPF_Fs24000_Gain11_A0, -HPF_Fs24000_Gain11_B1},
+        {HPF_Fs24000_Gain12_A1, /* Gain setting 12 */
+         HPF_Fs24000_Gain12_A0, -HPF_Fs24000_Gain12_B1},
+        {HPF_Fs24000_Gain13_A1, /* Gain setting 13 */
+         HPF_Fs24000_Gain13_A0, -HPF_Fs24000_Gain13_B1},
+        {HPF_Fs24000_Gain14_A1, /* Gain setting 14 */
+         HPF_Fs24000_Gain14_A0, -HPF_Fs24000_Gain14_B1},
+        {HPF_Fs24000_Gain15_A1, /* Gain setting 15 */
+         HPF_Fs24000_Gain15_A0, -HPF_Fs24000_Gain15_B1},
 
-                    /* 32kHz sampling rate */
-                    {HPF_Fs32000_Gain1_A1,             /* Gain setting 1 */
-                     HPF_Fs32000_Gain1_A0,
-                     -HPF_Fs32000_Gain1_B1},
-                    {HPF_Fs32000_Gain2_A1,             /* Gain setting 2 */
-                     HPF_Fs32000_Gain2_A0,
-                     -HPF_Fs32000_Gain2_B1},
-                    {HPF_Fs32000_Gain3_A1,             /* Gain setting 3 */
-                     HPF_Fs32000_Gain3_A0,
-                     -HPF_Fs32000_Gain3_B1},
-                    {HPF_Fs32000_Gain4_A1,             /* Gain setting 4 */
-                     HPF_Fs32000_Gain4_A0,
-                     -HPF_Fs32000_Gain4_B1},
-                    {HPF_Fs32000_Gain5_A1,             /* Gain setting 5 */
-                     HPF_Fs32000_Gain5_A0,
-                     -HPF_Fs32000_Gain5_B1},
-                    {HPF_Fs32000_Gain6_A1,             /* Gain setting 6 */
-                     HPF_Fs32000_Gain6_A0,
-                     -HPF_Fs32000_Gain6_B1},
-                    {HPF_Fs32000_Gain7_A1,             /* Gain setting 7 */
-                     HPF_Fs32000_Gain7_A0,
-                     -HPF_Fs32000_Gain7_B1},
-                    {HPF_Fs32000_Gain8_A1,             /* Gain setting 8 */
-                     HPF_Fs32000_Gain8_A0,
-                     -HPF_Fs32000_Gain8_B1},
-                    {HPF_Fs32000_Gain9_A1,             /* Gain setting 9 */
-                     HPF_Fs32000_Gain9_A0,
-                     -HPF_Fs32000_Gain9_B1},
-                    {HPF_Fs32000_Gain10_A1,             /* Gain setting 10 */
-                     HPF_Fs32000_Gain10_A0,
-                     -HPF_Fs32000_Gain10_B1},
-                    {HPF_Fs32000_Gain11_A1,             /* Gain setting 11 */
-                     HPF_Fs32000_Gain11_A0,
-                     -HPF_Fs32000_Gain11_B1},
-                    {HPF_Fs32000_Gain12_A1,             /* Gain setting 12 */
-                     HPF_Fs32000_Gain12_A0,
-                     -HPF_Fs32000_Gain12_B1},
-                    {HPF_Fs32000_Gain13_A1,             /* Gain setting 13 */
-                     HPF_Fs32000_Gain13_A0,
-                     -HPF_Fs32000_Gain13_B1},
-                    {HPF_Fs32000_Gain14_A1,             /* Gain setting 14 */
-                     HPF_Fs32000_Gain14_A0,
-                     -HPF_Fs32000_Gain14_B1},
-                    {HPF_Fs32000_Gain15_A1,             /* Gain setting 15 */
-                     HPF_Fs32000_Gain15_A0,
-                     -HPF_Fs32000_Gain15_B1},
+        /* 32kHz sampling rate */
+        {HPF_Fs32000_Gain1_A1, /* Gain setting 1 */
+         HPF_Fs32000_Gain1_A0, -HPF_Fs32000_Gain1_B1},
+        {HPF_Fs32000_Gain2_A1, /* Gain setting 2 */
+         HPF_Fs32000_Gain2_A0, -HPF_Fs32000_Gain2_B1},
+        {HPF_Fs32000_Gain3_A1, /* Gain setting 3 */
+         HPF_Fs32000_Gain3_A0, -HPF_Fs32000_Gain3_B1},
+        {HPF_Fs32000_Gain4_A1, /* Gain setting 4 */
+         HPF_Fs32000_Gain4_A0, -HPF_Fs32000_Gain4_B1},
+        {HPF_Fs32000_Gain5_A1, /* Gain setting 5 */
+         HPF_Fs32000_Gain5_A0, -HPF_Fs32000_Gain5_B1},
+        {HPF_Fs32000_Gain6_A1, /* Gain setting 6 */
+         HPF_Fs32000_Gain6_A0, -HPF_Fs32000_Gain6_B1},
+        {HPF_Fs32000_Gain7_A1, /* Gain setting 7 */
+         HPF_Fs32000_Gain7_A0, -HPF_Fs32000_Gain7_B1},
+        {HPF_Fs32000_Gain8_A1, /* Gain setting 8 */
+         HPF_Fs32000_Gain8_A0, -HPF_Fs32000_Gain8_B1},
+        {HPF_Fs32000_Gain9_A1, /* Gain setting 9 */
+         HPF_Fs32000_Gain9_A0, -HPF_Fs32000_Gain9_B1},
+        {HPF_Fs32000_Gain10_A1, /* Gain setting 10 */
+         HPF_Fs32000_Gain10_A0, -HPF_Fs32000_Gain10_B1},
+        {HPF_Fs32000_Gain11_A1, /* Gain setting 11 */
+         HPF_Fs32000_Gain11_A0, -HPF_Fs32000_Gain11_B1},
+        {HPF_Fs32000_Gain12_A1, /* Gain setting 12 */
+         HPF_Fs32000_Gain12_A0, -HPF_Fs32000_Gain12_B1},
+        {HPF_Fs32000_Gain13_A1, /* Gain setting 13 */
+         HPF_Fs32000_Gain13_A0, -HPF_Fs32000_Gain13_B1},
+        {HPF_Fs32000_Gain14_A1, /* Gain setting 14 */
+         HPF_Fs32000_Gain14_A0, -HPF_Fs32000_Gain14_B1},
+        {HPF_Fs32000_Gain15_A1, /* Gain setting 15 */
+         HPF_Fs32000_Gain15_A0, -HPF_Fs32000_Gain15_B1},
 
-                    /* 44kHz sampling rate */
-                    {HPF_Fs44100_Gain1_A1,             /* Gain setting 1 */
-                     HPF_Fs44100_Gain1_A0,
-                     -HPF_Fs44100_Gain1_B1,},
-                    {HPF_Fs44100_Gain2_A1,             /* Gain setting 2 */
-                     HPF_Fs44100_Gain2_A0,
-                     -HPF_Fs44100_Gain2_B1},
-                    {HPF_Fs44100_Gain3_A1,             /* Gain setting 3 */
-                     HPF_Fs44100_Gain3_A0,
-                     -HPF_Fs44100_Gain3_B1},
-                    {HPF_Fs44100_Gain4_A1,             /* Gain setting 4 */
-                     HPF_Fs44100_Gain4_A0,
-                     -HPF_Fs44100_Gain4_B1},
-                    {HPF_Fs44100_Gain5_A1,             /* Gain setting 5 */
-                     HPF_Fs44100_Gain5_A0,
-                     -HPF_Fs44100_Gain5_B1},
-                    {HPF_Fs44100_Gain6_A1,             /* Gain setting 6 */
-                     HPF_Fs44100_Gain6_A0,
-                     -HPF_Fs44100_Gain6_B1},
-                    {HPF_Fs44100_Gain7_A1,             /* Gain setting 7 */
-                     HPF_Fs44100_Gain7_A0,
-                     -HPF_Fs44100_Gain7_B1},
-                    {HPF_Fs44100_Gain8_A1,             /* Gain setting 8 */
-                     HPF_Fs44100_Gain8_A0,
-                     -HPF_Fs44100_Gain8_B1},
-                    {HPF_Fs44100_Gain9_A1,             /* Gain setting 9 */
-                     HPF_Fs44100_Gain9_A0,
-                     -HPF_Fs44100_Gain9_B1},
-                    {HPF_Fs44100_Gain10_A1,             /* Gain setting 10 */
-                     HPF_Fs44100_Gain10_A0,
-                     -HPF_Fs44100_Gain10_B1},
-                    {HPF_Fs44100_Gain11_A1,             /* Gain setting 11 */
-                     HPF_Fs44100_Gain11_A0,
-                     -HPF_Fs44100_Gain11_B1},
-                    {HPF_Fs44100_Gain12_A1,             /* Gain setting 12 */
-                     HPF_Fs44100_Gain12_A0,
-                     -HPF_Fs44100_Gain12_B1},
-                    {HPF_Fs44100_Gain13_A1,             /* Gain setting 13 */
-                     HPF_Fs44100_Gain13_A0,
-                     -HPF_Fs44100_Gain13_B1},
-                    {HPF_Fs44100_Gain14_A1,             /* Gain setting 14 */
-                     HPF_Fs44100_Gain14_A0,
-                     -HPF_Fs44100_Gain14_B1},
-                    {HPF_Fs44100_Gain15_A1,             /* Gain setting 15 */
-                     HPF_Fs44100_Gain15_A0,
-                     -HPF_Fs44100_Gain15_B1},
+        /* 44kHz sampling rate */
+        {
+                HPF_Fs44100_Gain1_A1, /* Gain setting 1 */
+                HPF_Fs44100_Gain1_A0,
+                -HPF_Fs44100_Gain1_B1,
+        },
+        {HPF_Fs44100_Gain2_A1, /* Gain setting 2 */
+         HPF_Fs44100_Gain2_A0, -HPF_Fs44100_Gain2_B1},
+        {HPF_Fs44100_Gain3_A1, /* Gain setting 3 */
+         HPF_Fs44100_Gain3_A0, -HPF_Fs44100_Gain3_B1},
+        {HPF_Fs44100_Gain4_A1, /* Gain setting 4 */
+         HPF_Fs44100_Gain4_A0, -HPF_Fs44100_Gain4_B1},
+        {HPF_Fs44100_Gain5_A1, /* Gain setting 5 */
+         HPF_Fs44100_Gain5_A0, -HPF_Fs44100_Gain5_B1},
+        {HPF_Fs44100_Gain6_A1, /* Gain setting 6 */
+         HPF_Fs44100_Gain6_A0, -HPF_Fs44100_Gain6_B1},
+        {HPF_Fs44100_Gain7_A1, /* Gain setting 7 */
+         HPF_Fs44100_Gain7_A0, -HPF_Fs44100_Gain7_B1},
+        {HPF_Fs44100_Gain8_A1, /* Gain setting 8 */
+         HPF_Fs44100_Gain8_A0, -HPF_Fs44100_Gain8_B1},
+        {HPF_Fs44100_Gain9_A1, /* Gain setting 9 */
+         HPF_Fs44100_Gain9_A0, -HPF_Fs44100_Gain9_B1},
+        {HPF_Fs44100_Gain10_A1, /* Gain setting 10 */
+         HPF_Fs44100_Gain10_A0, -HPF_Fs44100_Gain10_B1},
+        {HPF_Fs44100_Gain11_A1, /* Gain setting 11 */
+         HPF_Fs44100_Gain11_A0, -HPF_Fs44100_Gain11_B1},
+        {HPF_Fs44100_Gain12_A1, /* Gain setting 12 */
+         HPF_Fs44100_Gain12_A0, -HPF_Fs44100_Gain12_B1},
+        {HPF_Fs44100_Gain13_A1, /* Gain setting 13 */
+         HPF_Fs44100_Gain13_A0, -HPF_Fs44100_Gain13_B1},
+        {HPF_Fs44100_Gain14_A1, /* Gain setting 14 */
+         HPF_Fs44100_Gain14_A0, -HPF_Fs44100_Gain14_B1},
+        {HPF_Fs44100_Gain15_A1, /* Gain setting 15 */
+         HPF_Fs44100_Gain15_A0, -HPF_Fs44100_Gain15_B1},
 
-                    /* 48kHz sampling rate */
-                    {HPF_Fs48000_Gain1_A1,             /* Gain setting 1 */
-                     HPF_Fs48000_Gain1_A0,
-                     -HPF_Fs48000_Gain1_B1},
-                    {HPF_Fs48000_Gain2_A1,             /* Gain setting 2 */
-                     HPF_Fs48000_Gain2_A0,
-                     -HPF_Fs48000_Gain2_B1},
-                    {HPF_Fs48000_Gain3_A1,             /* Gain setting 3 */
-                     HPF_Fs48000_Gain3_A0,
-                     -HPF_Fs48000_Gain3_B1},
-                    {HPF_Fs48000_Gain4_A1,             /* Gain setting 4 */
-                     HPF_Fs48000_Gain4_A0,
-                     -HPF_Fs48000_Gain4_B1},
-                    {HPF_Fs48000_Gain5_A1,             /* Gain setting 5 */
-                     HPF_Fs48000_Gain5_A0,
-                     -HPF_Fs48000_Gain5_B1},
-                    {HPF_Fs48000_Gain6_A1,             /* Gain setting 6 */
-                     HPF_Fs48000_Gain6_A0,
-                     -HPF_Fs48000_Gain6_B1},
-                    {HPF_Fs48000_Gain7_A1,             /* Gain setting 7 */
-                     HPF_Fs48000_Gain7_A0,
-                     -HPF_Fs48000_Gain7_B1},
-                    {HPF_Fs48000_Gain8_A1,             /* Gain setting 8 */
-                     HPF_Fs48000_Gain8_A0,
-                     -HPF_Fs48000_Gain8_B1},
-                    {HPF_Fs48000_Gain9_A1,             /* Gain setting 9 */
-                     HPF_Fs48000_Gain9_A0,
-                     -HPF_Fs48000_Gain9_B1},
-                    {HPF_Fs48000_Gain10_A1,             /* Gain setting 10 */
-                     HPF_Fs48000_Gain10_A0,
-                     -HPF_Fs48000_Gain10_B1},
-                    {HPF_Fs48000_Gain11_A1,             /* Gain setting 11 */
-                     HPF_Fs48000_Gain11_A0,
-                     -HPF_Fs48000_Gain11_B1},
-                    {HPF_Fs48000_Gain12_A1,             /* Gain setting 12 */
-                     HPF_Fs48000_Gain12_A0,
-                     -HPF_Fs48000_Gain12_B1},
-                    {HPF_Fs48000_Gain13_A1,             /* Gain setting 13 */
-                     HPF_Fs48000_Gain13_A0,
-                     -HPF_Fs48000_Gain13_B1},
-                    {HPF_Fs48000_Gain14_A1,             /* Gain setting 14 */
-                     HPF_Fs48000_Gain14_A0,
-                     -HPF_Fs48000_Gain14_B1},
-                    {HPF_Fs48000_Gain15_A1,             /* Gain setting 15 */
-                     HPF_Fs48000_Gain15_A0,
-                     -HPF_Fs48000_Gain15_B1}
-                    ,
-                    /* 88kHz Sampling rate */
-                    {HPF_Fs88200_Gain1_A1,             /* Gain Setting  1 */
-                    HPF_Fs88200_Gain1_A0,
-                    -HPF_Fs88200_Gain1_B1},
-                    {HPF_Fs88200_Gain2_A1,             /* Gain Setting  2 */
-                    HPF_Fs88200_Gain2_A0,
-                    -HPF_Fs88200_Gain2_B1},
-                    {HPF_Fs88200_Gain3_A1,             /* Gain Setting  3 */
-                    HPF_Fs88200_Gain3_A0,
-                    -HPF_Fs88200_Gain3_B1},
-                    {HPF_Fs88200_Gain4_A1,             /* Gain Setting  4 */
-                    HPF_Fs88200_Gain4_A0,
-                    -HPF_Fs88200_Gain4_B1},
-                    {HPF_Fs88200_Gain5_A1,             /* Gain Setting  5 */
-                    HPF_Fs88200_Gain5_A0,
-                    -HPF_Fs88200_Gain5_B1},
-                    {HPF_Fs88200_Gain6_A1,             /* Gain Setting  6 */
-                    HPF_Fs88200_Gain6_A0,
-                    -HPF_Fs88200_Gain6_B1},
-                    {HPF_Fs88200_Gain7_A1,             /* Gain Setting  7 */
-                    HPF_Fs88200_Gain7_A0,
-                    -HPF_Fs88200_Gain7_B1},
-                    {HPF_Fs88200_Gain8_A1,             /* Gain Setting  8 */
-                    HPF_Fs88200_Gain8_A0,
-                    -HPF_Fs88200_Gain8_B1},
-                    {HPF_Fs88200_Gain9_A1,             /* Gain Setting  9 */
-                    HPF_Fs88200_Gain9_A0,
-                    -HPF_Fs88200_Gain9_B1},
-                    {HPF_Fs88200_Gain10_A1,             /* Gain Setting  10 */
-                    HPF_Fs88200_Gain10_A0,
-                    -HPF_Fs88200_Gain10_B1},
-                    {HPF_Fs88200_Gain11_A1,             /* Gain Setting  11 */
-                    HPF_Fs88200_Gain11_A0,
-                    -HPF_Fs88200_Gain11_B1},
-                    {HPF_Fs88200_Gain12_A1,             /* Gain Setting  12 */
-                    HPF_Fs88200_Gain12_A0,
-                    -HPF_Fs88200_Gain12_B1},
-                    {HPF_Fs88200_Gain13_A1,             /* Gain Setting  13 */
-                    HPF_Fs88200_Gain13_A0,
-                    -HPF_Fs88200_Gain13_B1},
-                    {HPF_Fs88200_Gain14_A1,             /* Gain Setting  14 */
-                    HPF_Fs88200_Gain14_A0,
-                    -HPF_Fs88200_Gain14_B1},
-                    {HPF_Fs88200_Gain15_A1,             /* Gain Setting  15 */
-                    HPF_Fs88200_Gain15_A0,
-                    -HPF_Fs88200_Gain15_B1},
+        /* 48kHz sampling rate */
+        {HPF_Fs48000_Gain1_A1, /* Gain setting 1 */
+         HPF_Fs48000_Gain1_A0, -HPF_Fs48000_Gain1_B1},
+        {HPF_Fs48000_Gain2_A1, /* Gain setting 2 */
+         HPF_Fs48000_Gain2_A0, -HPF_Fs48000_Gain2_B1},
+        {HPF_Fs48000_Gain3_A1, /* Gain setting 3 */
+         HPF_Fs48000_Gain3_A0, -HPF_Fs48000_Gain3_B1},
+        {HPF_Fs48000_Gain4_A1, /* Gain setting 4 */
+         HPF_Fs48000_Gain4_A0, -HPF_Fs48000_Gain4_B1},
+        {HPF_Fs48000_Gain5_A1, /* Gain setting 5 */
+         HPF_Fs48000_Gain5_A0, -HPF_Fs48000_Gain5_B1},
+        {HPF_Fs48000_Gain6_A1, /* Gain setting 6 */
+         HPF_Fs48000_Gain6_A0, -HPF_Fs48000_Gain6_B1},
+        {HPF_Fs48000_Gain7_A1, /* Gain setting 7 */
+         HPF_Fs48000_Gain7_A0, -HPF_Fs48000_Gain7_B1},
+        {HPF_Fs48000_Gain8_A1, /* Gain setting 8 */
+         HPF_Fs48000_Gain8_A0, -HPF_Fs48000_Gain8_B1},
+        {HPF_Fs48000_Gain9_A1, /* Gain setting 9 */
+         HPF_Fs48000_Gain9_A0, -HPF_Fs48000_Gain9_B1},
+        {HPF_Fs48000_Gain10_A1, /* Gain setting 10 */
+         HPF_Fs48000_Gain10_A0, -HPF_Fs48000_Gain10_B1},
+        {HPF_Fs48000_Gain11_A1, /* Gain setting 11 */
+         HPF_Fs48000_Gain11_A0, -HPF_Fs48000_Gain11_B1},
+        {HPF_Fs48000_Gain12_A1, /* Gain setting 12 */
+         HPF_Fs48000_Gain12_A0, -HPF_Fs48000_Gain12_B1},
+        {HPF_Fs48000_Gain13_A1, /* Gain setting 13 */
+         HPF_Fs48000_Gain13_A0, -HPF_Fs48000_Gain13_B1},
+        {HPF_Fs48000_Gain14_A1, /* Gain setting 14 */
+         HPF_Fs48000_Gain14_A0, -HPF_Fs48000_Gain14_B1},
+        {HPF_Fs48000_Gain15_A1, /* Gain setting 15 */
+         HPF_Fs48000_Gain15_A0, -HPF_Fs48000_Gain15_B1},
+        /* 88kHz Sampling rate */
+        {HPF_Fs88200_Gain1_A1, /* Gain Setting  1 */
+         HPF_Fs88200_Gain1_A0, -HPF_Fs88200_Gain1_B1},
+        {HPF_Fs88200_Gain2_A1, /* Gain Setting  2 */
+         HPF_Fs88200_Gain2_A0, -HPF_Fs88200_Gain2_B1},
+        {HPF_Fs88200_Gain3_A1, /* Gain Setting  3 */
+         HPF_Fs88200_Gain3_A0, -HPF_Fs88200_Gain3_B1},
+        {HPF_Fs88200_Gain4_A1, /* Gain Setting  4 */
+         HPF_Fs88200_Gain4_A0, -HPF_Fs88200_Gain4_B1},
+        {HPF_Fs88200_Gain5_A1, /* Gain Setting  5 */
+         HPF_Fs88200_Gain5_A0, -HPF_Fs88200_Gain5_B1},
+        {HPF_Fs88200_Gain6_A1, /* Gain Setting  6 */
+         HPF_Fs88200_Gain6_A0, -HPF_Fs88200_Gain6_B1},
+        {HPF_Fs88200_Gain7_A1, /* Gain Setting  7 */
+         HPF_Fs88200_Gain7_A0, -HPF_Fs88200_Gain7_B1},
+        {HPF_Fs88200_Gain8_A1, /* Gain Setting  8 */
+         HPF_Fs88200_Gain8_A0, -HPF_Fs88200_Gain8_B1},
+        {HPF_Fs88200_Gain9_A1, /* Gain Setting  9 */
+         HPF_Fs88200_Gain9_A0, -HPF_Fs88200_Gain9_B1},
+        {HPF_Fs88200_Gain10_A1, /* Gain Setting  10 */
+         HPF_Fs88200_Gain10_A0, -HPF_Fs88200_Gain10_B1},
+        {HPF_Fs88200_Gain11_A1, /* Gain Setting  11 */
+         HPF_Fs88200_Gain11_A0, -HPF_Fs88200_Gain11_B1},
+        {HPF_Fs88200_Gain12_A1, /* Gain Setting  12 */
+         HPF_Fs88200_Gain12_A0, -HPF_Fs88200_Gain12_B1},
+        {HPF_Fs88200_Gain13_A1, /* Gain Setting  13 */
+         HPF_Fs88200_Gain13_A0, -HPF_Fs88200_Gain13_B1},
+        {HPF_Fs88200_Gain14_A1, /* Gain Setting  14 */
+         HPF_Fs88200_Gain14_A0, -HPF_Fs88200_Gain14_B1},
+        {HPF_Fs88200_Gain15_A1, /* Gain Setting  15 */
+         HPF_Fs88200_Gain15_A0, -HPF_Fs88200_Gain15_B1},
 
-                    /* 96kHz sampling rate */
-                    {HPF_Fs96000_Gain1_A1,             /* Gain setting 1 */
-                    HPF_Fs96000_Gain1_A0,
-                    -HPF_Fs96000_Gain1_B1},
-                    {HPF_Fs96000_Gain2_A1,             /* Gain setting 2 */
-                    HPF_Fs96000_Gain2_A0,
-                    -HPF_Fs96000_Gain2_B1},
-                    {HPF_Fs96000_Gain3_A1,             /* Gain setting 3 */
-                    HPF_Fs96000_Gain3_A0,
-                    -HPF_Fs96000_Gain3_B1},
-                    {HPF_Fs96000_Gain4_A1,             /* Gain setting 4 */
-                    HPF_Fs96000_Gain4_A0,
-                    -HPF_Fs96000_Gain4_B1},
-                    {HPF_Fs96000_Gain5_A1,             /* Gain setting 5 */
-                    HPF_Fs96000_Gain5_A0,
-                    -HPF_Fs96000_Gain5_B1},
-                    {HPF_Fs96000_Gain6_A1,             /* Gain setting 6 */
-                    HPF_Fs96000_Gain6_A0,
-                    -HPF_Fs96000_Gain6_B1},
-                    {HPF_Fs96000_Gain7_A1,             /* Gain setting 7 */
-                    HPF_Fs96000_Gain7_A0,
-                    -HPF_Fs96000_Gain7_B1},
-                    {HPF_Fs96000_Gain8_A1,             /* Gain setting 8 */
-                    HPF_Fs96000_Gain8_A0,
-                    -HPF_Fs96000_Gain8_B1},
-                    {HPF_Fs96000_Gain9_A1,             /* Gain setting 9 */
-                    HPF_Fs96000_Gain9_A0,
-                    -HPF_Fs96000_Gain9_B1},
-                    {HPF_Fs96000_Gain10_A1,             /* Gain setting 10 */
-                    HPF_Fs96000_Gain10_A0,
-                    -HPF_Fs96000_Gain10_B1},
-                    {HPF_Fs96000_Gain11_A1,             /* Gain setting 11 */
-                    HPF_Fs96000_Gain11_A0,
-                    -HPF_Fs96000_Gain11_B1},
-                    {HPF_Fs96000_Gain12_A1,             /* Gain setting 12 */
-                    HPF_Fs96000_Gain12_A0,
-                    -HPF_Fs96000_Gain12_B1},
-                    {HPF_Fs96000_Gain13_A1,             /* Gain setting 13 */
-                    HPF_Fs96000_Gain13_A0,
-                    -HPF_Fs96000_Gain13_B1},
-                    {HPF_Fs96000_Gain14_A1,             /* Gain setting 14 */
-                    HPF_Fs96000_Gain14_A0,
-                    -HPF_Fs96000_Gain14_B1},
-                    {HPF_Fs96000_Gain15_A1,             /* Gain setting 15 */
-                    HPF_Fs96000_Gain15_A0,
-                    -HPF_Fs96000_Gain15_B1},
+        /* 96kHz sampling rate */
+        {HPF_Fs96000_Gain1_A1, /* Gain setting 1 */
+         HPF_Fs96000_Gain1_A0, -HPF_Fs96000_Gain1_B1},
+        {HPF_Fs96000_Gain2_A1, /* Gain setting 2 */
+         HPF_Fs96000_Gain2_A0, -HPF_Fs96000_Gain2_B1},
+        {HPF_Fs96000_Gain3_A1, /* Gain setting 3 */
+         HPF_Fs96000_Gain3_A0, -HPF_Fs96000_Gain3_B1},
+        {HPF_Fs96000_Gain4_A1, /* Gain setting 4 */
+         HPF_Fs96000_Gain4_A0, -HPF_Fs96000_Gain4_B1},
+        {HPF_Fs96000_Gain5_A1, /* Gain setting 5 */
+         HPF_Fs96000_Gain5_A0, -HPF_Fs96000_Gain5_B1},
+        {HPF_Fs96000_Gain6_A1, /* Gain setting 6 */
+         HPF_Fs96000_Gain6_A0, -HPF_Fs96000_Gain6_B1},
+        {HPF_Fs96000_Gain7_A1, /* Gain setting 7 */
+         HPF_Fs96000_Gain7_A0, -HPF_Fs96000_Gain7_B1},
+        {HPF_Fs96000_Gain8_A1, /* Gain setting 8 */
+         HPF_Fs96000_Gain8_A0, -HPF_Fs96000_Gain8_B1},
+        {HPF_Fs96000_Gain9_A1, /* Gain setting 9 */
+         HPF_Fs96000_Gain9_A0, -HPF_Fs96000_Gain9_B1},
+        {HPF_Fs96000_Gain10_A1, /* Gain setting 10 */
+         HPF_Fs96000_Gain10_A0, -HPF_Fs96000_Gain10_B1},
+        {HPF_Fs96000_Gain11_A1, /* Gain setting 11 */
+         HPF_Fs96000_Gain11_A0, -HPF_Fs96000_Gain11_B1},
+        {HPF_Fs96000_Gain12_A1, /* Gain setting 12 */
+         HPF_Fs96000_Gain12_A0, -HPF_Fs96000_Gain12_B1},
+        {HPF_Fs96000_Gain13_A1, /* Gain setting 13 */
+         HPF_Fs96000_Gain13_A0, -HPF_Fs96000_Gain13_B1},
+        {HPF_Fs96000_Gain14_A1, /* Gain setting 14 */
+         HPF_Fs96000_Gain14_A0, -HPF_Fs96000_Gain14_B1},
+        {HPF_Fs96000_Gain15_A1, /* Gain setting 15 */
+         HPF_Fs96000_Gain15_A0, -HPF_Fs96000_Gain15_B1},
 
-                    /* 176kHz Sampling rate */
-                    {HPF_Fs176400_Gain1_A1,             /* Gain Setting  1 */
-                    HPF_Fs176400_Gain1_A0,
-                    -HPF_Fs176400_Gain1_B1},
-                    {HPF_Fs176400_Gain2_A1,             /* Gain Setting  2 */
-                    HPF_Fs176400_Gain2_A0,
-                    -HPF_Fs176400_Gain2_B1},
-                    {HPF_Fs176400_Gain3_A1,             /* Gain Setting  3 */
-                    HPF_Fs176400_Gain3_A0,
-                    -HPF_Fs176400_Gain3_B1},
-                    {HPF_Fs176400_Gain4_A1,             /* Gain Setting  4 */
-                    HPF_Fs176400_Gain4_A0,
-                    -HPF_Fs176400_Gain4_B1},
-                    {HPF_Fs176400_Gain5_A1,             /* Gain Setting  5 */
-                    HPF_Fs176400_Gain5_A0,
-                    -HPF_Fs176400_Gain5_B1},
-                    {HPF_Fs176400_Gain6_A1,             /* Gain Setting  6 */
-                    HPF_Fs176400_Gain6_A0,
-                    -HPF_Fs176400_Gain6_B1},
-                    {HPF_Fs176400_Gain7_A1,             /* Gain Setting  7 */
-                    HPF_Fs176400_Gain7_A0,
-                    -HPF_Fs176400_Gain7_B1},
-                    {HPF_Fs176400_Gain8_A1,             /* Gain Setting  8 */
-                    HPF_Fs176400_Gain8_A0,
-                    -HPF_Fs176400_Gain8_B1},
-                    {HPF_Fs176400_Gain9_A1,             /* Gain Setting  9 */
-                    HPF_Fs176400_Gain9_A0,
-                    -HPF_Fs176400_Gain9_B1},
-                    {HPF_Fs176400_Gain10_A1,             /* Gain Setting  10 */
-                    HPF_Fs176400_Gain10_A0,
-                    -HPF_Fs176400_Gain10_B1},
-                    {HPF_Fs176400_Gain11_A1,             /* Gain Setting  11 */
-                    HPF_Fs176400_Gain11_A0,
-                    -HPF_Fs176400_Gain11_B1},
-                    {HPF_Fs176400_Gain12_A1,             /* Gain Setting  12 */
-                    HPF_Fs176400_Gain12_A0,
-                    -HPF_Fs176400_Gain12_B1},
-                    {HPF_Fs176400_Gain13_A1,             /* Gain Setting  13 */
-                    HPF_Fs176400_Gain13_A0,
-                    -HPF_Fs176400_Gain13_B1},
-                    {HPF_Fs176400_Gain14_A1,             /* Gain Setting  14 */
-                    HPF_Fs176400_Gain14_A0,
-                    -HPF_Fs176400_Gain14_B1},
-                    {HPF_Fs176400_Gain15_A1,             /* Gain Setting  15 */
-                    HPF_Fs176400_Gain15_A0,
-                    -HPF_Fs176400_Gain15_B1},
+        /* 176kHz Sampling rate */
+        {HPF_Fs176400_Gain1_A1, /* Gain Setting  1 */
+         HPF_Fs176400_Gain1_A0, -HPF_Fs176400_Gain1_B1},
+        {HPF_Fs176400_Gain2_A1, /* Gain Setting  2 */
+         HPF_Fs176400_Gain2_A0, -HPF_Fs176400_Gain2_B1},
+        {HPF_Fs176400_Gain3_A1, /* Gain Setting  3 */
+         HPF_Fs176400_Gain3_A0, -HPF_Fs176400_Gain3_B1},
+        {HPF_Fs176400_Gain4_A1, /* Gain Setting  4 */
+         HPF_Fs176400_Gain4_A0, -HPF_Fs176400_Gain4_B1},
+        {HPF_Fs176400_Gain5_A1, /* Gain Setting  5 */
+         HPF_Fs176400_Gain5_A0, -HPF_Fs176400_Gain5_B1},
+        {HPF_Fs176400_Gain6_A1, /* Gain Setting  6 */
+         HPF_Fs176400_Gain6_A0, -HPF_Fs176400_Gain6_B1},
+        {HPF_Fs176400_Gain7_A1, /* Gain Setting  7 */
+         HPF_Fs176400_Gain7_A0, -HPF_Fs176400_Gain7_B1},
+        {HPF_Fs176400_Gain8_A1, /* Gain Setting  8 */
+         HPF_Fs176400_Gain8_A0, -HPF_Fs176400_Gain8_B1},
+        {HPF_Fs176400_Gain9_A1, /* Gain Setting  9 */
+         HPF_Fs176400_Gain9_A0, -HPF_Fs176400_Gain9_B1},
+        {HPF_Fs176400_Gain10_A1, /* Gain Setting  10 */
+         HPF_Fs176400_Gain10_A0, -HPF_Fs176400_Gain10_B1},
+        {HPF_Fs176400_Gain11_A1, /* Gain Setting  11 */
+         HPF_Fs176400_Gain11_A0, -HPF_Fs176400_Gain11_B1},
+        {HPF_Fs176400_Gain12_A1, /* Gain Setting  12 */
+         HPF_Fs176400_Gain12_A0, -HPF_Fs176400_Gain12_B1},
+        {HPF_Fs176400_Gain13_A1, /* Gain Setting  13 */
+         HPF_Fs176400_Gain13_A0, -HPF_Fs176400_Gain13_B1},
+        {HPF_Fs176400_Gain14_A1, /* Gain Setting  14 */
+         HPF_Fs176400_Gain14_A0, -HPF_Fs176400_Gain14_B1},
+        {HPF_Fs176400_Gain15_A1, /* Gain Setting  15 */
+         HPF_Fs176400_Gain15_A0, -HPF_Fs176400_Gain15_B1},
 
-                    /* 192kHz sampling rate */
-                    {HPF_Fs192000_Gain1_A1,             /* Gain setting 1 */
-                    HPF_Fs192000_Gain1_A0,
-                    -HPF_Fs192000_Gain1_B1},
-                    {HPF_Fs192000_Gain2_A1,             /* Gain setting 2 */
-                    HPF_Fs192000_Gain2_A0,
-                    -HPF_Fs192000_Gain2_B1},
-                    {HPF_Fs192000_Gain3_A1,             /* Gain setting 3 */
-                    HPF_Fs192000_Gain3_A0,
-                    -HPF_Fs192000_Gain3_B1},
-                    {HPF_Fs192000_Gain4_A1,             /* Gain setting 4 */
-                    HPF_Fs192000_Gain4_A0,
-                    -HPF_Fs192000_Gain4_B1},
-                    {HPF_Fs192000_Gain5_A1,             /* Gain setting 5 */
-                    HPF_Fs192000_Gain5_A0,
-                    -HPF_Fs192000_Gain5_B1},
-                    {HPF_Fs192000_Gain6_A1,             /* Gain setting 6 */
-                    HPF_Fs192000_Gain6_A0,
-                    -HPF_Fs192000_Gain6_B1},
-                    {HPF_Fs192000_Gain7_A1,             /* Gain setting 7 */
-                    HPF_Fs192000_Gain7_A0,
-                    -HPF_Fs192000_Gain7_B1},
-                    {HPF_Fs192000_Gain8_A1,             /* Gain setting 8 */
-                    HPF_Fs192000_Gain8_A0,
-                    -HPF_Fs192000_Gain8_B1},
-                    {HPF_Fs192000_Gain9_A1,             /* Gain setting 9 */
-                    HPF_Fs192000_Gain9_A0,
-                    -HPF_Fs192000_Gain9_B1},
-                    {HPF_Fs192000_Gain10_A1,             /* Gain setting 10 */
-                    HPF_Fs192000_Gain10_A0,
-                    -HPF_Fs192000_Gain10_B1},
-                    {HPF_Fs192000_Gain11_A1,             /* Gain setting 11 */
-                    HPF_Fs192000_Gain11_A0,
-                    -HPF_Fs192000_Gain11_B1},
-                    {HPF_Fs192000_Gain12_A1,             /* Gain setting 12 */
-                    HPF_Fs192000_Gain12_A0,
-                    -HPF_Fs192000_Gain12_B1},
-                    {HPF_Fs192000_Gain13_A1,             /* Gain setting 13 */
-                    HPF_Fs192000_Gain13_A0,
-                    -HPF_Fs192000_Gain13_B1},
-                    {HPF_Fs192000_Gain14_A1,             /* Gain setting 14 */
-                    HPF_Fs192000_Gain14_A0,
-                    -HPF_Fs192000_Gain14_B1},
-                    {HPF_Fs192000_Gain15_A1,             /* Gain setting 15 */
-                    HPF_Fs192000_Gain15_A0,
-                    -HPF_Fs192000_Gain15_B1}
-                    };
+        /* 192kHz sampling rate */
+        {HPF_Fs192000_Gain1_A1, /* Gain setting 1 */
+         HPF_Fs192000_Gain1_A0, -HPF_Fs192000_Gain1_B1},
+        {HPF_Fs192000_Gain2_A1, /* Gain setting 2 */
+         HPF_Fs192000_Gain2_A0, -HPF_Fs192000_Gain2_B1},
+        {HPF_Fs192000_Gain3_A1, /* Gain setting 3 */
+         HPF_Fs192000_Gain3_A0, -HPF_Fs192000_Gain3_B1},
+        {HPF_Fs192000_Gain4_A1, /* Gain setting 4 */
+         HPF_Fs192000_Gain4_A0, -HPF_Fs192000_Gain4_B1},
+        {HPF_Fs192000_Gain5_A1, /* Gain setting 5 */
+         HPF_Fs192000_Gain5_A0, -HPF_Fs192000_Gain5_B1},
+        {HPF_Fs192000_Gain6_A1, /* Gain setting 6 */
+         HPF_Fs192000_Gain6_A0, -HPF_Fs192000_Gain6_B1},
+        {HPF_Fs192000_Gain7_A1, /* Gain setting 7 */
+         HPF_Fs192000_Gain7_A0, -HPF_Fs192000_Gain7_B1},
+        {HPF_Fs192000_Gain8_A1, /* Gain setting 8 */
+         HPF_Fs192000_Gain8_A0, -HPF_Fs192000_Gain8_B1},
+        {HPF_Fs192000_Gain9_A1, /* Gain setting 9 */
+         HPF_Fs192000_Gain9_A0, -HPF_Fs192000_Gain9_B1},
+        {HPF_Fs192000_Gain10_A1, /* Gain setting 10 */
+         HPF_Fs192000_Gain10_A0, -HPF_Fs192000_Gain10_B1},
+        {HPF_Fs192000_Gain11_A1, /* Gain setting 11 */
+         HPF_Fs192000_Gain11_A0, -HPF_Fs192000_Gain11_B1},
+        {HPF_Fs192000_Gain12_A1, /* Gain setting 12 */
+         HPF_Fs192000_Gain12_A0, -HPF_Fs192000_Gain12_B1},
+        {HPF_Fs192000_Gain13_A1, /* Gain setting 13 */
+         HPF_Fs192000_Gain13_A0, -HPF_Fs192000_Gain13_B1},
+        {HPF_Fs192000_Gain14_A1, /* Gain setting 14 */
+         HPF_Fs192000_Gain14_A0, -HPF_Fs192000_Gain14_B1},
+        {HPF_Fs192000_Gain15_A1, /* Gain setting 15 */
+         HPF_Fs192000_Gain15_A0, -HPF_Fs192000_Gain15_B1}};
 
 /************************************************************************************/
 /*                                                                                    */
@@ -463,14 +329,13 @@
 /************************************************************************************/
 
 /* dB to linear conversion table */
-const LVM_FLOAT LVM_VolumeTable[] = {
-    1.000f,             /*  0dB */
-    0.891f,             /* -1dB */
-    0.794f,             /* -2dB */
-    0.708f,             /* -3dB */
-    0.631f,             /* -4dB */
-    0.562f,             /* -5dB */
-    0.501f};            /* -6dB */
+const LVM_FLOAT LVM_VolumeTable[] = {1.000f,  /*  0dB */
+                                     0.891f,  /* -1dB */
+                                     0.794f,  /* -2dB */
+                                     0.708f,  /* -3dB */
+                                     0.631f,  /* -4dB */
+                                     0.562f,  /* -5dB */
+                                     0.501f}; /* -6dB */
 
 /************************************************************************************/
 /*                                                                                  */
@@ -478,24 +343,16 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#define LVM_MIX_TC_Fs8000      32580         /* Floating point value 0.994262695 */
-#define LVM_MIX_TC_Fs11025     32632         /* Floating point value 0.995849609 */
-#define LVM_MIX_TC_Fs12000     32643         /* Floating point value 0.996185303 */
-#define LVM_MIX_TC_Fs16000     32674         /* Floating point value 0.997131348 */
-#define LVM_MIX_TC_Fs22050     32700         /* Floating point value 0.997924805 */
-#define LVM_MIX_TC_Fs24000     32705         /* Floating point value 0.998077393 */
-#define LVM_MIX_TC_Fs32000     32721         /* Floating point value 0.998565674 */
-#define LVM_MIX_TC_Fs44100     32734         /* Floating point value 0.998962402 */
-#define LVM_MIX_TC_Fs48000     32737         /* Floating point value 0.999053955 */
+#define LVM_MIX_TC_Fs8000 32580  /* Floating point value 0.994262695 */
+#define LVM_MIX_TC_Fs11025 32632 /* Floating point value 0.995849609 */
+#define LVM_MIX_TC_Fs12000 32643 /* Floating point value 0.996185303 */
+#define LVM_MIX_TC_Fs16000 32674 /* Floating point value 0.997131348 */
+#define LVM_MIX_TC_Fs22050 32700 /* Floating point value 0.997924805 */
+#define LVM_MIX_TC_Fs24000 32705 /* Floating point value 0.998077393 */
+#define LVM_MIX_TC_Fs32000 32721 /* Floating point value 0.998565674 */
+#define LVM_MIX_TC_Fs44100 32734 /* Floating point value 0.998962402 */
+#define LVM_MIX_TC_Fs48000 32737 /* Floating point value 0.999053955 */
 
-const LVM_INT16 LVM_MixerTCTable[] = {
-    LVM_MIX_TC_Fs8000,
-    LVM_MIX_TC_Fs11025,
-    LVM_MIX_TC_Fs12000,
-    LVM_MIX_TC_Fs16000,
-    LVM_MIX_TC_Fs22050,
-    LVM_MIX_TC_Fs24000,
-    LVM_MIX_TC_Fs32000,
-    LVM_MIX_TC_Fs44100,
-    LVM_MIX_TC_Fs48000};
-
+const LVM_INT16 LVM_MixerTCTable[] = {LVM_MIX_TC_Fs8000,  LVM_MIX_TC_Fs11025, LVM_MIX_TC_Fs12000,
+                                      LVM_MIX_TC_Fs16000, LVM_MIX_TC_Fs22050, LVM_MIX_TC_Fs24000,
+                                      LVM_MIX_TC_Fs32000, LVM_MIX_TC_Fs44100, LVM_MIX_TC_Fs48000};
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.h b/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.h
index fc82194..cf2fb5d 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.h
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.h
@@ -33,7 +33,7 @@
 /*                                                                                  */
 /************************************************************************************/
 
-extern FO_FLOAT_LShx_Coefs_t     LVM_TrebleBoostCoefs[];
+extern FO_FLOAT_LShx_Coefs_t LVM_TrebleBoostCoefs[];
 
 /************************************************************************************/
 /*                                                                                  */
@@ -45,4 +45,3 @@
 extern const LVM_INT16 LVM_MixerTCTable[];
 
 #endif /* __LVM_TABLES_H__ */
-
diff --git a/media/libeffects/lvm/lib/Common/lib/AGC.h b/media/libeffects/lvm/lib/Common/lib/AGC.h
index bef7fa1..c20b49a 100644
--- a/media/libeffects/lvm/lib/Common/lib/AGC.h
+++ b/media/libeffects/lvm/lib/Common/lib/AGC.h
@@ -31,16 +31,15 @@
 /*    Types                                                                       */
 /*                                                                                */
 /**********************************************************************************/
-typedef struct
-{
-    LVM_FLOAT  AGC_Gain;                        /* The current AGC gain */
-    LVM_FLOAT  AGC_MaxGain;                     /* The maximum AGC gain */
-    LVM_FLOAT  Volume;                          /* The current volume setting */
-    LVM_FLOAT  Target;                          /* The target volume setting */
-    LVM_FLOAT  AGC_Target;                      /* AGC target level */
-    LVM_FLOAT  AGC_Attack;                      /* AGC attack scaler */
-    LVM_FLOAT  AGC_Decay;                       /* AGC decay scaler */
-    LVM_FLOAT  VolumeTC;                        /* Volume update time constant */
+typedef struct {
+    LVM_FLOAT AGC_Gain;    /* The current AGC gain */
+    LVM_FLOAT AGC_MaxGain; /* The maximum AGC gain */
+    LVM_FLOAT Volume;      /* The current volume setting */
+    LVM_FLOAT Target;      /* The target volume setting */
+    LVM_FLOAT AGC_Target;  /* AGC target level */
+    LVM_FLOAT AGC_Attack;  /* AGC attack scaler */
+    LVM_FLOAT AGC_Decay;   /* AGC decay scaler */
+    LVM_FLOAT VolumeTC;    /* Volume update time constant */
 
 } AGC_MIX_VOL_2St1Mon_FLOAT_t;
 
@@ -49,19 +48,16 @@
 /*    Function Prototypes                                                              */
 /*                                                                                */
 /**********************************************************************************/
-void AGC_MIX_VOL_2St1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t  *pInstance,     /* Instance pointer */
-                                 const LVM_FLOAT            *pStSrc,        /* Stereo source */
-                                 const LVM_FLOAT            *pMonoSrc,      /* Mono source */
-                                 LVM_FLOAT                  *pDst,          /* Stereo destination */
-                                 LVM_UINT16                 n);             /* Number of samples */
-#ifdef SUPPORT_MC
-void AGC_MIX_VOL_Mc1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t  *pInstance,  /* Instance pointer */
-                                 const LVM_FLOAT            *pStSrc,      /* Source */
-                                 const LVM_FLOAT            *pMonoSrc,    /* Mono source */
-                                 LVM_FLOAT                  *pDst,        /* Destination */
-                                 LVM_UINT16                 NrFrames,     /* Number of frames */
-                                 LVM_UINT16                 NrChannels);  /* Number of channels */
-#endif
+void AGC_MIX_VOL_2St1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t* pInstance, /* Instance pointer */
+                                 const LVM_FLOAT* pStSrc,                /* Stereo source */
+                                 const LVM_FLOAT* pMonoSrc,              /* Mono source */
+                                 LVM_FLOAT* pDst,                        /* Stereo destination */
+                                 LVM_UINT16 n);                          /* Number of samples */
+void AGC_MIX_VOL_Mc1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t* pInstance,  /* Instance pointer */
+                                const LVM_FLOAT* pStSrc,                 /* Source */
+                                const LVM_FLOAT* pMonoSrc,               /* Mono source */
+                                LVM_FLOAT* pDst,                         /* Destination */
+                                LVM_UINT16 NrFrames,                     /* Number of frames */
+                                LVM_UINT16 NrChannels);                  /* Number of channels */
 
-#endif  /* __AGC_H__ */
-
+#endif /* __AGC_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/BIQUAD.h b/media/libeffects/lvm/lib/Common/lib/BIQUAD.h
index c050cd0..b38e9fb 100644
--- a/media/libeffects/lvm/lib/Common/lib/BIQUAD.h
+++ b/media/libeffects/lvm/lib/Common/lib/BIQUAD.h
@@ -22,64 +22,54 @@
 /**********************************************************************************
    INSTANCE MEMORY TYPE DEFINITION
 ***********************************************************************************/
-typedef struct
-{
-#ifdef SUPPORT_MC
+typedef struct {
     /* The memory region created by this structure instance is typecast
      * into another structure containing a pointer and an array of filter
      * coefficients. In one case this memory region is used for storing
      * DC component of channels
      */
-    LVM_FLOAT *pStorage;
+    LVM_FLOAT* pStorage;
     LVM_FLOAT Storage[LVM_MAX_CHANNELS];
-#else
-    LVM_FLOAT Storage[6];
-#endif
 } Biquad_FLOAT_Instance_t;
 /**********************************************************************************
    COEFFICIENT TYPE DEFINITIONS
 ***********************************************************************************/
 
 /*** Biquad coefficients **********************************************************/
-typedef struct
-{
-    LVM_FLOAT  A2;   /*  a2  */
-    LVM_FLOAT  A1;   /*  a1  */
-    LVM_FLOAT  A0;   /*  a0  */
-    LVM_FLOAT  B2;   /* -b2! */
-    LVM_FLOAT  B1;   /* -b1! */
+typedef struct {
+    LVM_FLOAT A2; /*  a2  */
+    LVM_FLOAT A1; /*  a1  */
+    LVM_FLOAT A0; /*  a0  */
+    LVM_FLOAT B2; /* -b2! */
+    LVM_FLOAT B1; /* -b1! */
 } BQ_FLOAT_Coefs_t;
 
 /*** First order coefficients *****************************************************/
-typedef struct
-{
-    LVM_FLOAT A1;   /*  a1  */
-    LVM_FLOAT A0;   /*  a0  */
-    LVM_FLOAT B1;   /* -b1! */
+typedef struct {
+    LVM_FLOAT A1; /*  a1  */
+    LVM_FLOAT A0; /*  a0  */
+    LVM_FLOAT B1; /* -b1! */
 } FO_FLOAT_Coefs_t;
 
 /*** First order coefficients with Shift*****************************************************/
-typedef struct
-{
-    LVM_FLOAT A1;    /*  a1  */
-    LVM_FLOAT A0;    /*  a0  */
-    LVM_FLOAT B1;    /* -b1! */
+typedef struct {
+    LVM_FLOAT A1; /*  a1  */
+    LVM_FLOAT A0; /*  a0  */
+    LVM_FLOAT B1; /* -b1! */
 } FO_FLOAT_LShx_Coefs_t;
 /*** Band pass coefficients *******************************************************/
-typedef struct
-{
-    LVM_FLOAT  A0;   /*  a0  */
-    LVM_FLOAT  B2;   /* -b2! */
-    LVM_FLOAT  B1;   /* -b1! */
+typedef struct {
+    LVM_FLOAT A0; /*  a0  */
+    LVM_FLOAT B2; /* -b2! */
+    LVM_FLOAT B1; /* -b1! */
 } BP_FLOAT_Coefs_t;
 
 /*** Peaking coefficients *********************************************************/
-typedef struct
-{
-    LVM_FLOAT A0;   /*  a0  */
-    LVM_FLOAT B2;   /* -b2! */
-    LVM_FLOAT B1;   /* -b1! */
-    LVM_FLOAT  G;   /* Gain */
+typedef struct {
+    LVM_FLOAT A0; /*  a0  */
+    LVM_FLOAT B2; /* -b2! */
+    LVM_FLOAT B1; /* -b1! */
+    LVM_FLOAT G;  /* Gain */
 } PK_FLOAT_Coefs_t;
 
 /**********************************************************************************
@@ -87,39 +77,28 @@
 ***********************************************************************************/
 
 /*** Types used for first order and shelving filter *******************************/
-typedef struct
-{
-    LVM_FLOAT Storage[ (1 * 2) ];  /* One channel, two taps of size LVM_INT32 */
+typedef struct {
+    LVM_FLOAT Storage[(1 * 2)]; /* One channel, two taps of size LVM_INT32 */
 } Biquad_1I_Order1_FLOAT_Taps_t;
 
-typedef struct
-{
-#ifdef SUPPORT_MC
+typedef struct {
     /* LVM_MAX_CHANNELS channels, two taps of size LVM_FLOAT */
-    LVM_FLOAT Storage[ (LVM_MAX_CHANNELS * 2) ];
-#else
-    LVM_FLOAT Storage[ (2 * 2) ];  /* Two channels, two taps of size LVM_FLOAT */
-#endif
+    LVM_FLOAT Storage[(LVM_MAX_CHANNELS * 2)];
 } Biquad_2I_Order1_FLOAT_Taps_t;
 
 /*** Types used for biquad, band pass and peaking filter **************************/
-typedef struct
-{
-    LVM_FLOAT Storage[ (1 * 4) ];  /* One channel, four taps of size LVM_FLOAT */
+typedef struct {
+    LVM_FLOAT Storage[(1 * 4)]; /* One channel, four taps of size LVM_FLOAT */
 } Biquad_1I_Order2_FLOAT_Taps_t;
 
-typedef struct
-{
-#ifdef SUPPORT_MC
+typedef struct {
     /* LVM_MAX_CHANNELS, four taps of size LVM_FLOAT */
-    LVM_FLOAT Storage[ (LVM_MAX_CHANNELS * 4) ];
-#else
-    LVM_FLOAT Storage[ (2 * 4) ];  /* Two channels, four taps of size LVM_FLOAT */
-#endif
+    LVM_FLOAT Storage[(LVM_MAX_CHANNELS * 4)];
 } Biquad_2I_Order2_FLOAT_Taps_t;
-/* The names of the functions are changed to satisfy QAC rules: Name should be Unique withing 16 characters*/
-#define BQ_2I_D32F32Cll_TRC_WRA_01_Init  Init_BQ_2I_D32F32Cll_TRC_WRA_01
-#define BP_1I_D32F32C30_TRC_WRA_02       TWO_BP_1I_D32F32C30_TRC_WRA_02
+/* The names of the functions are changed to satisfy QAC rules: Name should be Unique within 16
+ * characters*/
+#define BQ_2I_D32F32Cll_TRC_WRA_01_Init Init_BQ_2I_D32F32Cll_TRC_WRA_01
+#define BP_1I_D32F32C30_TRC_WRA_02 TWO_BP_1I_D32F32C30_TRC_WRA_02
 
 /**********************************************************************************
    FUNCTION PROTOTYPES: BIQUAD FILTERS
@@ -127,176 +106,108 @@
 
 /*** 16 bit data path *************************************************************/
 
-void BQ_2I_D16F32Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_2I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef);
+void BQ_2I_D16F32Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_2I_Order2_FLOAT_Taps_t* pTaps, BQ_FLOAT_Coefs_t* pCoef);
 
-void BQ_2I_D16F32C15_TRC_WRA_01 (           Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples);
+void BQ_2I_D16F32C15_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
 
-void BQ_2I_D16F32C14_TRC_WRA_01 (           Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples);
+void BQ_2I_D16F32C14_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
 
-void BQ_2I_D16F32C13_TRC_WRA_01 (           Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples);
+void BQ_2I_D16F32C13_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
 
-void BQ_2I_D16F16Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_2I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef);
+void BQ_2I_D16F16Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_2I_Order2_FLOAT_Taps_t* pTaps, BQ_FLOAT_Coefs_t* pCoef);
 
-void BQ_2I_D16F16C15_TRC_WRA_01( Biquad_FLOAT_Instance_t       *pInstance,
-                                 LVM_FLOAT               *pDataIn,
-                                 LVM_FLOAT               *pDataOut,
-                                 LVM_INT16               NrSamples);
+void BQ_2I_D16F16C15_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
 
-void BQ_2I_D16F16C14_TRC_WRA_01( Biquad_FLOAT_Instance_t       *pInstance,
-                                 LVM_FLOAT               *pDataIn,
-                                 LVM_FLOAT               *pDataOut,
-                                 LVM_INT16               NrSamples);
+void BQ_2I_D16F16C14_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
 
-void BQ_1I_D16F16Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_1I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef);
+void BQ_1I_D16F16Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_1I_Order2_FLOAT_Taps_t* pTaps, BQ_FLOAT_Coefs_t* pCoef);
 
-void BQ_1I_D16F16C15_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT               *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples);
+void BQ_1I_D16F16C15_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
 
-void BQ_1I_D16F32Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_1I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef);
+void BQ_1I_D16F32Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_1I_Order2_FLOAT_Taps_t* pTaps, BQ_FLOAT_Coefs_t* pCoef);
 
-void BQ_1I_D16F32C14_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT              *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples);
+void BQ_1I_D16F32C14_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
 /*** 32 bit data path *************************************************************/
-void BQ_2I_D32F32Cll_TRC_WRA_01_Init (      Biquad_FLOAT_Instance_t       *pInstance,
-                                            Biquad_2I_Order2_FLOAT_Taps_t *pTaps,
-                                            BQ_FLOAT_Coefs_t          *pCoef);
-void BQ_2I_D32F32C30_TRC_WRA_01 (           Biquad_FLOAT_Instance_t  *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                 NrSamples);
-#ifdef SUPPORT_MC
-void BQ_MC_D32F32C30_TRC_WRA_01 (           Biquad_FLOAT_Instance_t      *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrFrames,
-                                            LVM_INT16                    NrChannels);
-#endif
+void BQ_2I_D32F32Cll_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_2I_Order2_FLOAT_Taps_t* pTaps, BQ_FLOAT_Coefs_t* pCoef);
+void BQ_2I_D32F32C30_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
+void BQ_MC_D32F32C30_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
 
 /**********************************************************************************
    FUNCTION PROTOTYPES: FIRST ORDER FILTERS
 ***********************************************************************************/
 
 /*** 16 bit data path *************************************************************/
-void FO_1I_D16F16Css_TRC_WRA_01_Init(    Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_1I_Order1_FLOAT_Taps_t   *pTaps,
-                                         FO_FLOAT_Coefs_t            *pCoef);
+void FO_1I_D16F16Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_1I_Order1_FLOAT_Taps_t* pTaps, FO_FLOAT_Coefs_t* pCoef);
 
-void FO_1I_D16F16C15_TRC_WRA_01( Biquad_FLOAT_Instance_t       *pInstance,
-                                 LVM_FLOAT               *pDataIn,
-                                 LVM_FLOAT               *pDataOut,
-                                 LVM_INT16               NrSamples);
+void FO_1I_D16F16C15_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
 
-void FO_2I_D16F32Css_LShx_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t       *pInstance,
-                                          Biquad_2I_Order1_FLOAT_Taps_t *pTaps,
-                                          FO_FLOAT_LShx_Coefs_t     *pCoef);
+void FO_2I_D16F32Css_LShx_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                          Biquad_2I_Order1_FLOAT_Taps_t* pTaps,
+                                          FO_FLOAT_LShx_Coefs_t* pCoef);
 
-void FO_2I_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t       *pInstance,
-                                     LVM_FLOAT               *pDataIn,
-                                     LVM_FLOAT               *pDataOut,
-                                     LVM_INT16               NrSamples);
+void FO_2I_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                     LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
 /*** 32 bit data path *************************************************************/
-void FO_1I_D32F32Cll_TRC_WRA_01_Init( Biquad_FLOAT_Instance_t       *pInstance,
-                                      Biquad_1I_Order1_FLOAT_Taps_t *pTaps,
-                                      FO_FLOAT_Coefs_t          *pCoef);
-void FO_1I_D32F32C31_TRC_WRA_01( Biquad_FLOAT_Instance_t       *pInstance,
-                                 LVM_FLOAT                     *pDataIn,
-                                 LVM_FLOAT                     *pDataOut,
-                                 LVM_INT16                     NrSamples);
-#ifdef SUPPORT_MC
-void FO_Mc_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t  *pInstance,
-                                     LVM_FLOAT                *pDataIn,
-                                     LVM_FLOAT                *pDataOut,
-                                     LVM_INT16                NrFrames,
-                                     LVM_INT16                NrChannels);
-#endif
+void FO_1I_D32F32Cll_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_1I_Order1_FLOAT_Taps_t* pTaps, FO_FLOAT_Coefs_t* pCoef);
+void FO_1I_D32F32C31_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
+void FO_Mc_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                     LVM_FLOAT* pDataOut, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
 /**********************************************************************************
    FUNCTION PROTOTYPES: BAND PASS FILTERS
 ***********************************************************************************/
 
 /*** 16 bit data path *************************************************************/
-void BP_1I_D16F16Css_TRC_WRA_01_Init( Biquad_FLOAT_Instance_t       *pInstance,
-                                      Biquad_1I_Order2_FLOAT_Taps_t *pTaps,
-                                      BP_FLOAT_Coefs_t              *pCoef);
-void BP_1I_D16F16C14_TRC_WRA_01 (     Biquad_FLOAT_Instance_t       *pInstance,
-                                      LVM_FLOAT                     *pDataIn,
-                                      LVM_FLOAT                     *pDataOut,
-                                      LVM_INT16                     NrSamples);
-void BP_1I_D16F32Cll_TRC_WRA_01_Init (Biquad_FLOAT_Instance_t       *pInstance,
-                                      Biquad_1I_Order2_FLOAT_Taps_t *pTaps,
-                                      BP_FLOAT_Coefs_t              *pCoef);
-void BP_1I_D16F32C30_TRC_WRA_01 (           Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples);
+void BP_1I_D16F16Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_1I_Order2_FLOAT_Taps_t* pTaps, BP_FLOAT_Coefs_t* pCoef);
+void BP_1I_D16F16C14_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
+void BP_1I_D16F32Cll_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_1I_Order2_FLOAT_Taps_t* pTaps, BP_FLOAT_Coefs_t* pCoef);
+void BP_1I_D16F32C30_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
 /*** 32 bit data path *************************************************************/
-void BP_1I_D32F32Cll_TRC_WRA_02_Init (      Biquad_FLOAT_Instance_t       *pInstance,
-                                            Biquad_1I_Order2_FLOAT_Taps_t *pTaps,
-                                            BP_FLOAT_Coefs_t          *pCoef);
-void BP_1I_D32F32C30_TRC_WRA_02(            Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples);
+void BP_1I_D32F32Cll_TRC_WRA_02_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_1I_Order2_FLOAT_Taps_t* pTaps, BP_FLOAT_Coefs_t* pCoef);
+void BP_1I_D32F32C30_TRC_WRA_02(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
 
 /*** 32 bit data path STEREO ******************************************************/
-void PK_2I_D32F32CssGss_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t       *pInstance,
-                                            Biquad_2I_Order2_FLOAT_Taps_t *pTaps,
-                                            PK_FLOAT_Coefs_t          *pCoef);
-void PK_2I_D32F32C14G11_TRC_WRA_01( Biquad_FLOAT_Instance_t       *pInstance,
-                                    LVM_FLOAT               *pDataIn,
-                                    LVM_FLOAT               *pDataOut,
-                                    LVM_INT16               NrSamples);
-#ifdef SUPPORT_MC
-void PK_Mc_D32F32C14G11_TRC_WRA_01(Biquad_FLOAT_Instance_t       *pInstance,
-                                   LVM_FLOAT               *pDataIn,
-                                   LVM_FLOAT               *pDataOut,
-                                   LVM_INT16               NrFrames,
-                                   LVM_INT16               NrChannels);
-#endif
+void PK_2I_D32F32CssGss_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                        Biquad_2I_Order2_FLOAT_Taps_t* pTaps,
+                                        PK_FLOAT_Coefs_t* pCoef);
+void PK_2I_D32F32C14G11_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                   LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
+void PK_Mc_D32F32C14G11_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                   LVM_FLOAT* pDataOut, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
 
 /**********************************************************************************
    FUNCTION PROTOTYPES: DC REMOVAL FILTERS
 ***********************************************************************************/
 
 /*** 16 bit data path STEREO ******************************************************/
-#ifdef SUPPORT_MC
-void DC_Mc_D16_TRC_WRA_01_Init     (        Biquad_FLOAT_Instance_t       *pInstance);
+void DC_Mc_D16_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance);
 
-void DC_Mc_D16_TRC_WRA_01          (        Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT               *pDataIn,
-                                            LVM_FLOAT               *pDataOut,
-                                            LVM_INT16               NrFrames,
-                                            LVM_INT16               NrChannels);
-#else
-void DC_2I_D16_TRC_WRA_01_Init     (        Biquad_FLOAT_Instance_t       *pInstance);
-
-void DC_2I_D16_TRC_WRA_01          (        Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT               *pDataIn,
-                                            LVM_FLOAT               *pDataOut,
-                                            LVM_INT16               NrSamples);
-#endif
+void DC_Mc_D16_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                          LVM_FLOAT* pDataOut, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
 
 /**********************************************************************************/
 
-#endif  /** _BIQUAD_H_ **/
-
+#endif /** _BIQUAD_H_ **/
diff --git a/media/libeffects/lvm/lib/Common/lib/CompLim.h b/media/libeffects/lvm/lib/Common/lib/CompLim.h
index 5b7cb1b..2fc78b4 100644
--- a/media/libeffects/lvm/lib/Common/lib/CompLim.h
+++ b/media/libeffects/lvm/lib/Common/lib/CompLim.h
@@ -35,21 +35,21 @@
 typedef struct /* Compressor state */
 {
     /* Normaliser */
-    LVM_INT16   Norm_Attack;        /* Attack time constant of the Normaliser integrator */
-    LVM_INT16   Norm_Decay;         /* Decay time constant of the Normaliser integrator */
-    LVM_INT32   NormInt;            /* Normaliser integrator current value */
-    LVM_INT16   Shift;              /* Shift gain */
-    LVM_INT16   Threshold;          /* Target threshold */
+    LVM_INT16 Norm_Attack; /* Attack time constant of the Normaliser integrator */
+    LVM_INT16 Norm_Decay;  /* Decay time constant of the Normaliser integrator */
+    LVM_INT32 NormInt;     /* Normaliser integrator current value */
+    LVM_INT16 Shift;       /* Shift gain */
+    LVM_INT16 Threshold;   /* Target threshold */
 
     /* Compressor */
-    LVM_INT16   Comp_Atten;         /* Attenuation applied before soft knee compressor */
-    LVM_INT16   Comp_Attack_S;      /* Attack time constant of the slow integrator */
-    LVM_INT16   Comp_Decay_S;       /* Decay time constant of slow the integrator */
-    LVM_INT16   Comp_Attack_F;      /* Attack time constant of fast the integrator */
-    LVM_INT16   Comp_Decay_F;       /* Decay time constant of fast the integrator */
-    LVM_INT16   SoftClipGain;       /* Soft clip gain control */
-    LVM_INT32   CompIntSlow;        /* Compressor slow integrator current value */
-    LVM_INT32   CompIntFast;        /* Compressor fast integrator current value */
+    LVM_INT16 Comp_Atten;    /* Attenuation applied before soft knee compressor */
+    LVM_INT16 Comp_Attack_S; /* Attack time constant of the slow integrator */
+    LVM_INT16 Comp_Decay_S;  /* Decay time constant of slow the integrator */
+    LVM_INT16 Comp_Attack_F; /* Attack time constant of fast the integrator */
+    LVM_INT16 Comp_Decay_F;  /* Decay time constant of fast the integrator */
+    LVM_INT16 SoftClipGain;  /* Soft clip gain control */
+    LVM_INT32 CompIntSlow;   /* Compressor slow integrator current value */
+    LVM_INT32 CompIntFast;   /* Compressor fast integrator current value */
 
 } CompLim_Instance_t;
 
@@ -58,10 +58,7 @@
 /*  Function Prototypes                                                             */
 /*                                                                                  */
 /************************************************************************************/
-void NonLinComp_Float(LVM_FLOAT        Gain,
-                      LVM_FLOAT        *pDataIn,
-                      LVM_FLOAT        *pDataOut,
-                      LVM_INT32        BlockLength);
+void NonLinComp_Float(LVM_FLOAT Gain, LVM_FLOAT* pDataIn, LVM_FLOAT* pDataOut,
+                      LVM_INT32 BlockLength);
 
 #endif /* #ifndef _COMP_LIM_H */
-
diff --git a/media/libeffects/lvm/lib/Common/lib/Filter.h b/media/libeffects/lvm/lib/Common/lib/Filter.h
index 1eeb321..0ba5223 100644
--- a/media/libeffects/lvm/lib/Common/lib/Filter.h
+++ b/media/libeffects/lvm/lib/Common/lib/Filter.h
@@ -27,26 +27,20 @@
 /**********************************************************************************
    DEFINES
 ***********************************************************************************/
-#define FILTER_LOSS     32730       /* -0.01dB loss to avoid wrapping due to band ripple */
-#define FILTER_LOSS_FLOAT    0.998849f
+#define FILTER_LOSS 32730 /* -0.01dB loss to avoid wrapping due to band ripple */
+#define FILTER_LOSS_FLOAT 0.998849f
 /**********************************************************************************
    FUNCTION PROTOTYPES
 ***********************************************************************************/
 
-LVM_FLOAT LVM_Power10(   LVM_FLOAT  X);
+LVM_FLOAT LVM_Power10(LVM_FLOAT X);
 
-LVM_FLOAT LVM_Polynomial(LVM_UINT16 N,
-                         LVM_FLOAT  *pCoefficients,
-                         LVM_FLOAT  X);
-LVM_FLOAT   LVM_GetOmega(LVM_UINT32  Fc,
-                         LVM_Fs_en   SampleRate);
+LVM_FLOAT LVM_Polynomial(LVM_UINT16 N, LVM_FLOAT* pCoefficients, LVM_FLOAT X);
+LVM_FLOAT LVM_GetOmega(LVM_UINT32 Fc, LVM_Fs_en SampleRate);
 
-LVM_FLOAT LVM_FO_LPF(    LVM_FLOAT  w,
-                         FO_FLOAT_Coefs_t  *pCoeffs);
+LVM_FLOAT LVM_FO_LPF(LVM_FLOAT w, FO_FLOAT_Coefs_t* pCoeffs);
 
-LVM_FLOAT LVM_FO_HPF(    LVM_FLOAT  w,
-                         FO_FLOAT_Coefs_t  *pCoeffs);
+LVM_FLOAT LVM_FO_HPF(LVM_FLOAT w, FO_FLOAT_Coefs_t* pCoeffs);
 /**********************************************************************************/
 
-#endif  /** _FILTER_H_ **/
-
+#endif /** _FILTER_H_ **/
diff --git a/media/libeffects/lvm/lib/Common/lib/InstAlloc.h b/media/libeffects/lvm/lib/Common/lib/InstAlloc.h
index bae84e7..17699ef 100644
--- a/media/libeffects/lvm/lib/Common/lib/InstAlloc.h
+++ b/media/libeffects/lvm/lib/Common/lib/InstAlloc.h
@@ -22,11 +22,10 @@
 /*######################################################################################*/
 /*  Type declarations                                                                   */
 /*######################################################################################*/
-typedef struct
-{
-    LVM_UINT32              TotalSize;      /*  Accumulative total memory size                      */
-    uintptr_t               pNextMember;    /*  Pointer to the next instance member to be allocated */
-}   INST_ALLOC;
+typedef struct {
+    LVM_UINT32 TotalSize;  /*  Accumulative total memory size                      */
+    uintptr_t pNextMember; /*  Pointer to the next instance member to be allocated */
+} INST_ALLOC;
 
 /*######################################################################################*/
 /*  Function prototypes                                                          */
@@ -41,7 +40,7 @@
  *  Remarks     :
  ****************************************************************************************/
 
-void   InstAlloc_Init( INST_ALLOC *pms, void *StartAddr );
+void InstAlloc_Init(INST_ALLOC* pms, void* StartAddr);
 
 /****************************************************************************************
  *  Name        : InstAlloc_AddMember()
@@ -54,7 +53,7 @@
  *  Remarks     :
  ****************************************************************************************/
 
-void* InstAlloc_AddMember( INST_ALLOC *pms, LVM_UINT32 Size );
+void* InstAlloc_AddMember(INST_ALLOC* pms, LVM_UINT32 Size);
 
 /****************************************************************************************
  *  Name        : InstAlloc_GetTotal()
@@ -64,19 +63,14 @@
  *  Remarks     :
  ****************************************************************************************/
 
-LVM_UINT32 InstAlloc_GetTotal( INST_ALLOC *pms);
+LVM_UINT32 InstAlloc_GetTotal(INST_ALLOC* pms);
 
-void*   InstAlloc_AddMemberAllRet(     INST_ALLOC                 *pms,
-                                     LVM_UINT32               Size[],
-                                     void                    **ptr);
+void* InstAlloc_AddMemberAllRet(INST_ALLOC* pms, LVM_UINT32 Size[], void** ptr);
 
-void*   InstAlloc_AddMemberAll( INST_ALLOC                     *pms,
-                                 LVM_UINT32                   Size[],
-                                 LVM_MemoryTable_st           *pMemoryTable);
+void* InstAlloc_AddMemberAll(INST_ALLOC* pms, LVM_UINT32 Size[], LVM_MemoryTable_st* pMemoryTable);
 
-void    InstAlloc_InitAll( INST_ALLOC                      *pms,
-                           LVM_MemoryTable_st             *pMemoryTable);
+void InstAlloc_InitAll(INST_ALLOC* pms, LVM_MemoryTable_st* pMemoryTable);
 
-void    InstAlloc_InitAll_NULL( INST_ALLOC              *pms);
+void InstAlloc_InitAll_NULL(INST_ALLOC* pms);
 
 #endif /* __JBS_INSTALLOC_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Common.h b/media/libeffects/lvm/lib/Common/lib/LVM_Common.h
index 49f16ad..d3d128a 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Common.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Common.h
@@ -39,12 +39,11 @@
 /*                                                                                      */
 /****************************************************************************************/
 /* Algorithm identification */
-#define ALGORITHM_NONE_ID      0x0000
-#define ALGORITHM_CS_ID        0x0100
-#define ALGORITHM_EQNB_ID      0x0200
-#define ALGORITHM_DBE_ID       0x0300
-#define ALGORITHM_VC_ID        0x0500
-#define ALGORITHM_TE_ID        0x0600
+#define ALGORITHM_NONE_ID 0x0000
+#define ALGORITHM_CS_ID 0x0100
+#define ALGORITHM_EQNB_ID 0x0200
+#define ALGORITHM_DBE_ID 0x0300
+#define ALGORITHM_VC_ID 0x0500
+#define ALGORITHM_TE_ID 0x0600
 
-#endif      /* __LVM_COMMON_H__ */
-
+#endif /* __LVM_COMMON_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Macros.h b/media/libeffects/lvm/lib/Common/lib/LVM_Macros.h
index 1a15125..b984745 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Macros.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Macros.h
@@ -28,31 +28,36 @@
         of overflow is undefined.
 
 ***********************************************************************************/
-#define MUL32x32INTO32(A,B,C,ShiftR)   \
-        {LVM_INT32 MUL32x32INTO32_temp,MUL32x32INTO32_temp2,MUL32x32INTO32_mask,MUL32x32INTO32_HH,MUL32x32INTO32_HL,MUL32x32INTO32_LH,MUL32x32INTO32_LL;\
-         LVM_INT32  shiftValue;\
-        shiftValue = (ShiftR);\
-        MUL32x32INTO32_mask=0x0000FFFF;\
-        MUL32x32INTO32_HH= ((LVM_INT32)((LVM_INT16)((A)>>16))*((LVM_INT16)((B)>>16)) );\
-        MUL32x32INTO32_HL= ((LVM_INT32)((B)&MUL32x32INTO32_mask)*((LVM_INT16)((A)>>16))) ;\
-        MUL32x32INTO32_LH= ((LVM_INT32)((A)&MUL32x32INTO32_mask)*((LVM_INT16)((B)>>16)));\
-        MUL32x32INTO32_LL= (LVM_INT32)((A)&MUL32x32INTO32_mask)*(LVM_INT32)((B)&MUL32x32INTO32_mask);\
-        MUL32x32INTO32_temp= (LVM_INT32)(MUL32x32INTO32_HL&MUL32x32INTO32_mask)+(LVM_INT32)(MUL32x32INTO32_LH&MUL32x32INTO32_mask)+(LVM_INT32)((MUL32x32INTO32_LL>>16)&MUL32x32INTO32_mask);\
-        MUL32x32INTO32_HH= MUL32x32INTO32_HH+(LVM_INT32)(MUL32x32INTO32_HL>>16)+(LVM_INT32)(MUL32x32INTO32_LH>>16)+(LVM_INT32)(MUL32x32INTO32_temp>>16);\
-        MUL32x32INTO32_LL=MUL32x32INTO32_LL+(LVM_INT32)(MUL32x32INTO32_HL<<16)+(LVM_INT32)(MUL32x32INTO32_LH<<16);\
-        if(shiftValue<32)\
-        {\
-        MUL32x32INTO32_HH=MUL32x32INTO32_HH<<(32-shiftValue);\
-        MUL32x32INTO32_mask=((LVM_INT32)1<<(32-shiftValue))-1;\
-        MUL32x32INTO32_LL=(MUL32x32INTO32_LL>>shiftValue)&MUL32x32INTO32_mask;\
-        MUL32x32INTO32_temp2=MUL32x32INTO32_HH|MUL32x32INTO32_LL;\
-        }\
-        else\
-       {\
-        MUL32x32INTO32_temp2=(LVM_INT32)MUL32x32INTO32_HH>>(shiftValue-32);\
-       }\
-       (C) = MUL32x32INTO32_temp2;\
-       }
+#define MUL32x32INTO32(A, B, C, ShiftR)                                                        \
+    {                                                                                          \
+        LVM_INT32 MUL32x32INTO32_temp, MUL32x32INTO32_temp2, MUL32x32INTO32_mask,              \
+                MUL32x32INTO32_HH, MUL32x32INTO32_HL, MUL32x32INTO32_LH, MUL32x32INTO32_LL;    \
+        LVM_INT32 shiftValue;                                                                  \
+        shiftValue = (ShiftR);                                                                 \
+        MUL32x32INTO32_mask = 0x0000FFFF;                                                      \
+        MUL32x32INTO32_HH = ((LVM_INT32)((LVM_INT16)((A) >> 16)) * ((LVM_INT16)((B) >> 16)));  \
+        MUL32x32INTO32_HL = ((LVM_INT32)((B)&MUL32x32INTO32_mask) * ((LVM_INT16)((A) >> 16))); \
+        MUL32x32INTO32_LH = ((LVM_INT32)((A)&MUL32x32INTO32_mask) * ((LVM_INT16)((B) >> 16))); \
+        MUL32x32INTO32_LL =                                                                    \
+                (LVM_INT32)((A)&MUL32x32INTO32_mask) * (LVM_INT32)((B)&MUL32x32INTO32_mask);   \
+        MUL32x32INTO32_temp = (LVM_INT32)(MUL32x32INTO32_HL & MUL32x32INTO32_mask) +           \
+                              (LVM_INT32)(MUL32x32INTO32_LH & MUL32x32INTO32_mask) +           \
+                              (LVM_INT32)((MUL32x32INTO32_LL >> 16) & MUL32x32INTO32_mask);    \
+        MUL32x32INTO32_HH = MUL32x32INTO32_HH + (LVM_INT32)(MUL32x32INTO32_HL >> 16) +         \
+                            (LVM_INT32)(MUL32x32INTO32_LH >> 16) +                             \
+                            (LVM_INT32)(MUL32x32INTO32_temp >> 16);                            \
+        MUL32x32INTO32_LL = MUL32x32INTO32_LL + (LVM_INT32)(MUL32x32INTO32_HL << 16) +         \
+                            (LVM_INT32)(MUL32x32INTO32_LH << 16);                              \
+        if (shiftValue < 32) {                                                                 \
+            MUL32x32INTO32_HH = MUL32x32INTO32_HH << (32 - shiftValue);                        \
+            MUL32x32INTO32_mask = ((LVM_INT32)1 << (32 - shiftValue)) - 1;                     \
+            MUL32x32INTO32_LL = (MUL32x32INTO32_LL >> shiftValue) & MUL32x32INTO32_mask;       \
+            MUL32x32INTO32_temp2 = MUL32x32INTO32_HH | MUL32x32INTO32_LL;                      \
+        } else {                                                                               \
+            MUL32x32INTO32_temp2 = (LVM_INT32)MUL32x32INTO32_HH >> (shiftValue - 32);          \
+        }                                                                                      \
+        (C) = MUL32x32INTO32_temp2;                                                            \
+    }
 
 /**********************************************************************************
    MUL32x16INTO32(A,B,C,ShiftR)
@@ -65,25 +70,24 @@
         of overflow is undefined.
 
 ***********************************************************************************/
-#define MUL32x16INTO32(A,B,C,ShiftR)   \
-        {LVM_INT32 MUL32x16INTO32_mask,MUL32x16INTO32_HH,MUL32x16INTO32_LL;\
-         LVM_INT32  shiftValue;\
-        shiftValue = (ShiftR);\
-        MUL32x16INTO32_mask=0x0000FFFF;\
-        MUL32x16INTO32_HH= ((LVM_INT32)(B)*((LVM_INT16)((A)>>16)));\
-        MUL32x16INTO32_LL= ((LVM_INT32)((A)&MUL32x16INTO32_mask)*(B));\
-        if(shiftValue<16)\
-        {\
-        MUL32x16INTO32_HH=(LVM_INT32)((LVM_UINT32)MUL32x16INTO32_HH<<(16-shiftValue));\
-        (C)=MUL32x16INTO32_HH+(LVM_INT32)(MUL32x16INTO32_LL>>shiftValue);\
-        }\
-        else if(shiftValue<32) {\
-        MUL32x16INTO32_HH=(LVM_INT32)(MUL32x16INTO32_HH>>(shiftValue-16));\
-        (C)=MUL32x16INTO32_HH+(LVM_INT32)(MUL32x16INTO32_LL>>shiftValue);\
-        }\
-        else {\
-        (C)=MUL32x16INTO32_HH>>(shiftValue-16);}\
-        }
+#define MUL32x16INTO32(A, B, C, ShiftR)                                                          \
+    {                                                                                            \
+        LVM_INT32 MUL32x16INTO32_mask, MUL32x16INTO32_HH, MUL32x16INTO32_LL;                     \
+        LVM_INT32 shiftValue;                                                                    \
+        shiftValue = (ShiftR);                                                                   \
+        MUL32x16INTO32_mask = 0x0000FFFF;                                                        \
+        MUL32x16INTO32_HH = ((LVM_INT32)(B) * ((LVM_INT16)((A) >> 16)));                         \
+        MUL32x16INTO32_LL = ((LVM_INT32)((A)&MUL32x16INTO32_mask) * (B));                        \
+        if (shiftValue < 16) {                                                                   \
+            MUL32x16INTO32_HH = (LVM_INT32)((LVM_UINT32)MUL32x16INTO32_HH << (16 - shiftValue)); \
+            (C) = MUL32x16INTO32_HH + (LVM_INT32)(MUL32x16INTO32_LL >> shiftValue);              \
+        } else if (shiftValue < 32) {                                                            \
+            MUL32x16INTO32_HH = (LVM_INT32)(MUL32x16INTO32_HH >> (shiftValue - 16));             \
+            (C) = MUL32x16INTO32_HH + (LVM_INT32)(MUL32x16INTO32_LL >> shiftValue);              \
+        } else {                                                                                 \
+            (C) = MUL32x16INTO32_HH >> (shiftValue - 16);                                        \
+        }                                                                                        \
+    }
 
 /**********************************************************************************
    ADD2_SAT_32x32(A,B,C)
@@ -91,16 +95,16 @@
 
         A,B and C are 32 bit SIGNED numbers.
 ***********************************************************************************/
-#define ADD2_SAT_32x32(A,B,C)   \
-        {(C)=(A)+(B);\
-         if ((((C) ^ (A)) & ((C) ^ (B))) >> 31)\
-            {\
-                if((A)<0)\
-                    (C)=0x80000000l;\
-                else\
-                    (C)=0x7FFFFFFFl;\
-            }\
-        }
+#define ADD2_SAT_32x32(A, B, C)                  \
+    {                                            \
+        (C) = (A) + (B);                         \
+        if ((((C) ^ (A)) & ((C) ^ (B))) >> 31) { \
+            if ((A) < 0)                         \
+                (C) = 0x80000000l;               \
+            else                                 \
+                (C) = 0x7FFFFFFFl;               \
+        }                                        \
+    }
 
 #endif /* _LVM_MACROS_H_ */
 
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Timer.h b/media/libeffects/lvm/lib/Common/lib/LVM_Timer.h
index dbf9e6a..75f4785 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Timer.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Timer.h
@@ -37,8 +37,7 @@
 /*  TYPE DEFINITIONS                                                                    */
 /****************************************************************************************/
 
-typedef struct
-{
+typedef struct {
     /*
      * The memory area created using this structure is internally
      * typecast to LVM_Timer_Instance_Private_t and used.
@@ -51,14 +50,13 @@
 
 } LVM_Timer_Instance_t;
 
-typedef struct
-{
-    LVM_INT32  SamplingRate;
-    LVM_INT16  TimeInMs;
-    LVM_INT32  CallBackParam;
-    void       *pCallBackParams;
-    void       *pCallbackInstance;
-    void       (*pCallBack)(void*,void*,LVM_INT32);
+typedef struct {
+    LVM_INT32 SamplingRate;
+    LVM_INT16 TimeInMs;
+    LVM_INT32 CallBackParam;
+    void* pCallBackParams;
+    void* pCallbackInstance;
+    void (*pCallBack)(void*, void*, LVM_INT32);
 
 } LVM_Timer_Params_t;
 
@@ -66,14 +64,12 @@
 /*  FUNCTION PROTOTYPES                                                                 */
 /****************************************************************************************/
 
-void LVM_Timer_Init (   LVM_Timer_Instance_t       *pInstance,
-                        LVM_Timer_Params_t         *pParams     );
+void LVM_Timer_Init(LVM_Timer_Instance_t* pInstance, LVM_Timer_Params_t* pParams);
 
-void LVM_Timer      (   LVM_Timer_Instance_t       *pInstance,
-                        LVM_INT16                       BlockSize );
+void LVM_Timer(LVM_Timer_Instance_t* pInstance, LVM_INT16 BlockSize);
 
 /****************************************************************************************/
 /*  END OF HEADER                                                                       */
 /****************************************************************************************/
 
-#endif  /* __LVM_TIMER_H__ */
+#endif /* __LVM_TIMER_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Types.h b/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
index 8b687f6..fb797be 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
@@ -33,46 +33,27 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-#define LVM_NULL                0                   /* NULL pointer */
+#define LVM_NULL 0 /* NULL pointer */
 
-#define LVM_TRUE                1                   /* Booleans */
-#define LVM_FALSE               0
+#define LVM_TRUE 1 /* Booleans */
+#define LVM_FALSE 0
 
-#define LVM_MAXINT_8            127                 /* Maximum positive integer size */
-#define LVM_MAXINT_16           32767
-#define LVM_MAXINT_32           2147483647
-#define LVM_MAXENUM             2147483647
+#define LVM_MAXINT_8 127 /* Maximum positive integer size */
+#define LVM_MAXINT_16 32767
+#define LVM_MAXINT_32 2147483647
+#define LVM_MAXENUM 2147483647
 
-#define LVM_MODULEID_MASK       0xFF00              /* Mask to extract the calling module ID from callbackId */
-#define LVM_EVENTID_MASK        0x00FF              /* Mask to extract the callback event from callbackId */
+#define LVM_MODULEID_MASK 0xFF00 /* Mask to extract the calling module ID from callbackId */
+#define LVM_EVENTID_MASK 0x00FF  /* Mask to extract the callback event from callbackId */
 
 /* Memory table*/
-#define LVM_MEMREGION_PERSISTENT_SLOW_DATA      0   /* Offset to the instance memory region */
-#define LVM_MEMREGION_PERSISTENT_FAST_DATA      1   /* Offset to the persistent data memory region */
-#define LVM_MEMREGION_PERSISTENT_FAST_COEF      2   /* Offset to the persistent coefficient memory region */
-#define LVM_MEMREGION_TEMPORARY_FAST            3   /* Offset to temporary memory region */
+#define LVM_MEMREGION_PERSISTENT_SLOW_DATA 0 /* Offset to the instance memory region */
+#define LVM_MEMREGION_PERSISTENT_FAST_DATA 1 /* Offset to the persistent data memory region */
+#define LVM_MEMREGION_PERSISTENT_FAST_COEF \
+    2                                  /* Offset to the persistent coefficient memory region */
+#define LVM_MEMREGION_TEMPORARY_FAST 3 /* Offset to temporary memory region */
 
-#define LVM_NR_MEMORY_REGIONS                   4   /* Number of memory regions */
-
-/* Memory partition type */
-#define LVM_MEM_PARTITION0      0                   /* 1st memory partition */
-#define LVM_MEM_PARTITION1      1                   /* 2nd memory partition */
-#define LVM_MEM_PARTITION2      2                   /* 3rd memory partition */
-#define LVM_MEM_PARTITION3      3                   /* 4th memory partition */
-
-/* Use type */
-#define LVM_MEM_PERSISTENT      0                   /* Persistent memory type */
-#define LVM_MEM_SCRATCH         4                   /* Scratch  memory type */
-
-/* Access type */
-#define LVM_MEM_INTERNAL        0                   /* Internal (fast) access memory */
-#define LVM_MEM_EXTERNAL        8                   /* External (slow) access memory */
-
-/* Platform specific */
-#define LVM_PERSISTENT          (LVM_MEM_PARTITION0+LVM_MEM_PERSISTENT+LVM_MEM_INTERNAL)
-#define LVM_PERSISTENT_DATA     (LVM_MEM_PARTITION1+LVM_MEM_PERSISTENT+LVM_MEM_INTERNAL)
-#define LVM_PERSISTENT_COEF     (LVM_MEM_PARTITION2+LVM_MEM_PERSISTENT+LVM_MEM_INTERNAL)
-#define LVM_SCRATCH             (LVM_MEM_PARTITION3+LVM_MEM_SCRATCH+LVM_MEM_INTERNAL)
+#define LVM_NR_MEMORY_REGIONS 4 /* Number of memory regions */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -80,33 +61,28 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-typedef     char                LVM_CHAR;           /* ASCII character */
+typedef char LVM_CHAR; /* ASCII character */
 
-typedef     int8_t              LVM_INT8;           /* Signed 8-bit word */
-typedef     uint8_t             LVM_UINT8;          /* Unsigned 8-bit word */
+typedef int8_t LVM_INT8;   /* Signed 8-bit word */
+typedef uint8_t LVM_UINT8; /* Unsigned 8-bit word */
 
-typedef     int16_t             LVM_INT16;          /* Signed 16-bit word */
-typedef     uint16_t            LVM_UINT16;         /* Unsigned 16-bit word */
+typedef int16_t LVM_INT16;   /* Signed 16-bit word */
+typedef uint16_t LVM_UINT16; /* Unsigned 16-bit word */
 
-typedef     int32_t             LVM_INT32;          /* Signed 32-bit word */
-typedef     uint32_t            LVM_UINT32;         /* Unsigned 32-bit word */
-typedef     int64_t             LVM_INT64;          /* Signed 64-bit word */
+typedef int32_t LVM_INT32;   /* Signed 32-bit word */
+typedef uint32_t LVM_UINT32; /* Unsigned 32-bit word */
+typedef int64_t LVM_INT64;   /* Signed 64-bit word */
 
-#define LVM_MAXFLOAT            1.f
+#define LVM_MAXFLOAT 1.f
 
-typedef     float               LVM_FLOAT;          /* single precision floating point */
+typedef float LVM_FLOAT; /* single precision floating point */
 
 // Select whether we expose int16_t or float buffers.
 
-#define    EFFECT_BUFFER_FORMAT AUDIO_FORMAT_PCM_FLOAT
-typedef     float               effect_buffer_t;
+#define EFFECT_BUFFER_FORMAT AUDIO_FORMAT_PCM_FLOAT
+typedef float effect_buffer_t;
 
-
-#ifdef SUPPORT_MC
-#define LVM_MAX_CHANNELS 8 // FCC_8
-#else
-#define LVM_MAX_CHANNELS 2 // FCC_2
-#endif
+#define LVM_MAX_CHANNELS 8  // FCC_8
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -115,29 +91,20 @@
 /****************************************************************************************/
 
 /* Operating mode */
-typedef enum
-{
-    LVM_MODE_OFF    = 0,
-    LVM_MODE_ON     = 1,
-    LVM_MODE_DUMMY  = LVM_MAXENUM
-} LVM_Mode_en;
+typedef enum { LVM_MODE_OFF = 0, LVM_MODE_ON = 1, LVM_MODE_DUMMY = LVM_MAXENUM } LVM_Mode_en;
 
 /* Format */
-typedef enum
-{
-    LVM_STEREO          = 0,
-    LVM_MONOINSTEREO    = 1,
-    LVM_MONO            = 2,
-#ifdef SUPPORT_MC
-    LVM_MULTICHANNEL    = 3,
-#endif
-    LVM_SOURCE_DUMMY    = LVM_MAXENUM
+typedef enum {
+    LVM_STEREO = 0,
+    LVM_MONOINSTEREO = 1,
+    LVM_MONO = 2,
+    LVM_MULTICHANNEL = 3,
+    LVM_SOURCE_DUMMY = LVM_MAXENUM
 } LVM_Format_en;
 
 /* LVM sampling rates */
-typedef enum
-{
-    LVM_FS_8000  = 0,
+typedef enum {
+    LVM_FS_8000 = 0,
     LVM_FS_11025 = 1,
     LVM_FS_12000 = 2,
     LVM_FS_16000 = 3,
@@ -150,32 +117,29 @@
     LVM_FS_96000 = 10,
     LVM_FS_176400 = 11,
     LVM_FS_192000 = 12,
-    LVM_FS_INVALID = LVM_MAXENUM-1,
+    LVM_FS_INVALID = LVM_MAXENUM - 1,
     LVM_FS_DUMMY = LVM_MAXENUM
 } LVM_Fs_en;
 
 /* Memory Types */
-typedef enum
-{
-    LVM_PERSISTENT_SLOW_DATA    = LVM_MEMREGION_PERSISTENT_SLOW_DATA,
-    LVM_PERSISTENT_FAST_DATA    = LVM_MEMREGION_PERSISTENT_FAST_DATA,
-    LVM_PERSISTENT_FAST_COEF    = LVM_MEMREGION_PERSISTENT_FAST_COEF,
-    LVM_TEMPORARY_FAST          = LVM_MEMREGION_TEMPORARY_FAST,
-    LVM_MEMORYTYPE_DUMMY        = LVM_MAXENUM
+typedef enum {
+    LVM_PERSISTENT_SLOW_DATA = LVM_MEMREGION_PERSISTENT_SLOW_DATA,
+    LVM_PERSISTENT_FAST_DATA = LVM_MEMREGION_PERSISTENT_FAST_DATA,
+    LVM_PERSISTENT_FAST_COEF = LVM_MEMREGION_PERSISTENT_FAST_COEF,
+    LVM_TEMPORARY_FAST = LVM_MEMREGION_TEMPORARY_FAST,
+    LVM_MEMORYTYPE_DUMMY = LVM_MAXENUM
 } LVM_MemoryTypes_en;
 
 /* Memory region definition */
-typedef struct
-{
-    LVM_UINT32                  Size;                   /* Region size in bytes */
-    LVM_MemoryTypes_en          Type;                   /* Region type */
-    void                        *pBaseAddress;          /* Pointer to the region base address */
+typedef struct {
+    LVM_UINT32 Size;         /* Region size in bytes */
+    LVM_MemoryTypes_en Type; /* Region type */
+    void* pBaseAddress;      /* Pointer to the region base address */
 } LVM_MemoryRegion_st;
 
 /* Memory table containing the region definitions */
-typedef struct
-{
-    LVM_MemoryRegion_st         Region[LVM_NR_MEMORY_REGIONS];  /* One definition for each region */
+typedef struct {
+    LVM_MemoryRegion_st Region[LVM_NR_MEMORY_REGIONS]; /* One definition for each region */
 } LVM_MemoryTable_st;
 
 /****************************************************************************************/
@@ -183,9 +147,11 @@
 /*  Standard Function Prototypes                                                        */
 /*                                                                                      */
 /****************************************************************************************/
-typedef LVM_INT32 (*LVM_Callback)(void          *pCallbackData,     /* Pointer to the callback data structure */
-                                  void          *pGeneralPurpose,   /* General purpose pointer (e.g. to a data structure needed in the callback) */
-                                  LVM_INT16     GeneralPurpose );   /* General purpose variable (e.g. to be used as callback ID) */
+typedef LVM_INT32 (*LVM_Callback)(
+        void* pCallbackData,   /* Pointer to the callback data structure */
+        void* pGeneralPurpose, /* General purpose pointer (e.g. to a data structure needed in the
+                                  callback) */
+        LVM_INT16 GeneralPurpose); /* General purpose variable (e.g. to be used as callback ID) */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -193,4 +159,4 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-#endif  /* LVM_TYPES_H */
+#endif /* LVM_TYPES_H */
diff --git a/media/libeffects/lvm/lib/Common/lib/Mixer.h b/media/libeffects/lvm/lib/Common/lib/Mixer.h
index b2e0195..ba605e5 100644
--- a/media/libeffects/lvm/lib/Common/lib/Mixer.h
+++ b/media/libeffects/lvm/lib/Common/lib/Mixer.h
@@ -24,80 +24,62 @@
    INSTANCE MEMORY TYPE DEFINITION
 ***********************************************************************************/
 
-typedef struct
-{
-    LVM_FLOAT   Alpha;                   /* Time constant. Set by calling application. \
-                                            Can be changed at any time */
-    LVM_FLOAT   Target;                  /* Target value.  Set by calling application. \
-                                            Can be changed at any time */
-    LVM_FLOAT   Current;                 /* Current value.  Set by the mixer function. */
-    LVM_INT16   CallbackSet;             /* Boolean.  Should be set by calling application \
-                                            each time the target value is updated */
-    LVM_INT16   CallbackParam;           /* Parameter that will be used in the calback function */
-    void        *pCallbackHandle;        /* Pointer to the instance of the callback function */
-    void        *pGeneralPurpose;        /* Pointer for general purpose usage */
-    LVM_Callback pCallBack;              /* Pointer to the callback function */
+typedef struct {
+    LVM_FLOAT Alpha;         /* Time constant. Set by calling application. \
+                                Can be changed at any time */
+    LVM_FLOAT Target;        /* Target value.  Set by calling application. \
+                                Can be changed at any time */
+    LVM_FLOAT Current;       /* Current value.  Set by the mixer function. */
+    LVM_INT16 CallbackSet;   /* Boolean.  Should be set by calling application \
+                                each time the target value is updated */
+    LVM_INT16 CallbackParam; /* Parameter that will be used in the calback function */
+    void* pCallbackHandle;   /* Pointer to the instance of the callback function */
+    void* pGeneralPurpose;   /* Pointer for general purpose usage */
+    LVM_Callback pCallBack;  /* Pointer to the callback function */
 } Mix_1St_Cll_FLOAT_t;
-typedef struct
-{
-    LVM_FLOAT   Alpha1;
-    LVM_FLOAT   Target1;
-    LVM_FLOAT   Current1;
-    LVM_INT16   CallbackSet1;
-    LVM_INT16   CallbackParam1;
-    void        *pCallbackHandle1;
-    void        *pGeneralPurpose1;
+typedef struct {
+    LVM_FLOAT Alpha1;
+    LVM_FLOAT Target1;
+    LVM_FLOAT Current1;
+    LVM_INT16 CallbackSet1;
+    LVM_INT16 CallbackParam1;
+    void* pCallbackHandle1;
+    void* pGeneralPurpose1;
     LVM_Callback pCallBack1;
 
-    LVM_FLOAT   Alpha2;                   /* Warning the address of this location is passed as a \
-                                             pointer to Mix_1St_Cll_t in some functions */
-    LVM_FLOAT   Target2;
-    LVM_FLOAT   Current2;
-    LVM_INT16   CallbackSet2;
-    LVM_INT16   CallbackParam2;
-    void        *pCallbackHandle2;
-    void        *pGeneralPurpose2;
+    LVM_FLOAT Alpha2; /* Warning the address of this location is passed as a \
+                         pointer to Mix_1St_Cll_t in some functions */
+    LVM_FLOAT Target2;
+    LVM_FLOAT Current2;
+    LVM_INT16 CallbackSet2;
+    LVM_INT16 CallbackParam2;
+    void* pCallbackHandle2;
+    void* pGeneralPurpose2;
     LVM_Callback pCallBack2;
 } Mix_2St_Cll_FLOAT_t;
 
 /*** General functions ************************************************************/
 
-LVM_FLOAT LVM_Mixer_TimeConstant(LVM_UINT32   tc,
-                                 LVM_UINT32   Fs,
-                                 LVM_UINT16   NumChannels);
+LVM_FLOAT LVM_Mixer_TimeConstant(LVM_UINT32 tc, LVM_UINT32 Fs, LVM_UINT16 NumChannels);
 
-void MixSoft_1St_D32C31_WRA(    Mix_1St_Cll_FLOAT_t       *pInstance,
-                                const LVM_FLOAT     *src,
-                                LVM_FLOAT     *dst,
-                                LVM_INT16     n);
+void MixSoft_1St_D32C31_WRA(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+                            LVM_INT16 n);
 
-void MixSoft_2St_D32C31_SAT(    Mix_2St_Cll_FLOAT_t       *pInstance,
-                                const LVM_FLOAT     *src1,
-                                const LVM_FLOAT     *src2,
-                                LVM_FLOAT     *dst,
-                                LVM_INT16     n);
+void MixSoft_2St_D32C31_SAT(Mix_2St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src1,
+                            const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n);
 
-void MixInSoft_D32C31_SAT(      Mix_1St_Cll_FLOAT_t       *pInstance,
-                                const LVM_FLOAT     *src,
-                                LVM_FLOAT     *dst,
-                                LVM_INT16     n);
+void MixInSoft_D32C31_SAT(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+                          LVM_INT16 n);
 
 /**********************************************************************************
    FUNCTION PROTOTYPES (LOW LEVEL SUBFUNCTIONS)
 ***********************************************************************************/
-void Core_MixSoft_1St_D32C31_WRA(   Mix_1St_Cll_FLOAT_t       *pInstance,
-                                    const LVM_FLOAT     *src,
-                                    LVM_FLOAT     *dst,
-                                    LVM_INT16     n);
-void Core_MixHard_2St_D32C31_SAT(   Mix_2St_Cll_FLOAT_t       *pInstance,
-                                    const LVM_FLOAT     *src1,
-                                    const LVM_FLOAT     *src2,
-                                    LVM_FLOAT     *dst,
-                                    LVM_INT16     n);
-void Core_MixInSoft_D32C31_SAT(     Mix_1St_Cll_FLOAT_t       *pInstance,
-                                    const LVM_FLOAT     *src,
-                                    LVM_FLOAT     *dst,
-                                    LVM_INT16     n);
+void Core_MixSoft_1St_D32C31_WRA(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src,
+                                 LVM_FLOAT* dst, LVM_INT16 n);
+void Core_MixHard_2St_D32C31_SAT(Mix_2St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src1,
+                                 const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n);
+void Core_MixInSoft_D32C31_SAT(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+                               LVM_INT16 n);
 
 /**********************************************************************************/
 
diff --git a/media/libeffects/lvm/lib/Common/lib/ScalarArithmetic.h b/media/libeffects/lvm/lib/Common/lib/ScalarArithmetic.h
index ae54419..04b180c 100644
--- a/media/libeffects/lvm/lib/Common/lib/ScalarArithmetic.h
+++ b/media/libeffects/lvm/lib/Common/lib/ScalarArithmetic.h
@@ -30,7 +30,7 @@
 
 /* Absolute value including the corner case for the extreme negative value */
 
-LVM_FLOAT   Abs_Float(LVM_FLOAT     input);
+LVM_FLOAT Abs_Float(LVM_FLOAT input);
 
 /****************************************************************************************
  *  Name        : dB_to_Lin32()
@@ -44,7 +44,6 @@
  *                  (15->01) = decimal part
  *  Returns     : Lin value format 1.16.15
  ****************************************************************************************/
-LVM_FLOAT dB_to_LinFloat(LVM_INT16    db_fix);
+LVM_FLOAT dB_to_LinFloat(LVM_INT16 db_fix);
 
-#endif  /* __SCALARARITHMETIC_H__ */
-
+#endif /* __SCALARARITHMETIC_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h b/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
index b27bac5..66e3e79 100644
--- a/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
+++ b/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
@@ -24,24 +24,13 @@
     VARIOUS FUNCTIONS
 ***********************************************************************************/
 
-void LoadConst_Float(          const LVM_FLOAT val,
-                               LVM_FLOAT *dst,
-                               LVM_INT16 n );
+void LoadConst_Float(const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n);
 
-void Copy_Float(                 const LVM_FLOAT *src,
-                                 LVM_FLOAT *dst,
-                                 LVM_INT16 n );
-#ifdef SUPPORT_MC
-void Copy_Float_Mc_Stereo(       const LVM_FLOAT *src,
-                                 LVM_FLOAT *dst,
-                                 LVM_INT16 NrFrames,
-                                 LVM_INT32 NrChannels);
-void Copy_Float_Stereo_Mc(       const LVM_FLOAT *src,
-                                 LVM_FLOAT *StereoOut,
-                                 LVM_FLOAT *dst,
-                                 LVM_INT16 NrFrames,
-                                 LVM_INT32 NrChannels);
-#endif
+void Copy_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
+void Copy_Float_Mc_Stereo(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 NrFrames,
+                          LVM_INT32 NrChannels);
+void Copy_Float_Stereo_Mc(const LVM_FLOAT* src, LVM_FLOAT* StereoOut, LVM_FLOAT* dst,
+                          LVM_INT16 NrFrames, LVM_INT32 NrChannels);
 
 /*********************************************************************************
  * note: In Mult3s_16x16() saturation of result is not taken care when           *
@@ -51,10 +40,7 @@
  *       This is the only case which will give wrong result.                     *
  *       For more information refer to Vector_Arithmetic.doc in /doc folder      *
  *********************************************************************************/
-void Mult3s_Float(            const LVM_FLOAT *src,
-                              const LVM_FLOAT val,
-                              LVM_FLOAT *dst,
-                              LVM_INT16 n);
+void Mult3s_Float(const LVM_FLOAT* src, const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n);
 
 /*********************************************************************************
  * note: In Mult3s_32x16() saturation of result is not taken care when           *
@@ -64,87 +50,54 @@
  *       This is the only extreme condition which is giving unexpected result    *
  *       For more information refer to Vector_Arithmetic.doc in /doc folder      *
  *********************************************************************************/
-void Mult3s_32x16(            const LVM_INT32  *src,
-                              const LVM_INT16 val,
-                                    LVM_INT32  *dst,
-                                    LVM_INT16 n);
-void DelayMix_Float(const LVM_FLOAT *src,           /* Source 1, to be delayed */
-                    LVM_FLOAT *delay,         /* Delay buffer */
-                    LVM_INT16 size,           /* Delay size */
-                    LVM_FLOAT *dst,           /* Source/destination */
-                    LVM_INT16 *pOffset,       /* Delay offset */
-                    LVM_INT16 n)  ;            /* Number of stereo samples */
-void DelayWrite_32(           const LVM_INT32  *src,               /* Source 1, to be delayed */
-                                    LVM_INT32  *delay,             /* Delay buffer */
-                                    LVM_UINT16 size,               /* Delay size */
-                                    LVM_UINT16 *pOffset,           /* Delay offset */
-                                    LVM_INT16 n);
-void Add2_Sat_Float(          const LVM_FLOAT *src,
-                              LVM_FLOAT *dst,
-                              LVM_INT16 n );
-void Mac3s_Sat_Float(         const LVM_FLOAT *src,
-                              const LVM_FLOAT val,
-                              LVM_FLOAT *dst,
-                              LVM_INT16 n);
-void DelayAllPass_Sat_32x16To32(    LVM_INT32  *delay,              /* Delay buffer */
-                                    LVM_UINT16 size,                /* Delay size */
-                                    LVM_INT16 coeff,                /* All pass filter coefficient */
-                                    LVM_UINT16 DelayOffset,         /* Simple delay offset */
-                                    LVM_UINT16 *pAllPassOffset,     /* All pass filter delay offset */
-                                    LVM_INT32  *dst,                /* Source/destination */
-                                    LVM_INT16 n);
+void Mult3s_32x16(const LVM_INT32* src, const LVM_INT16 val, LVM_INT32* dst, LVM_INT16 n);
+void DelayMix_Float(const LVM_FLOAT* src, /* Source 1, to be delayed */
+                    LVM_FLOAT* delay,     /* Delay buffer */
+                    LVM_INT16 size,       /* Delay size */
+                    LVM_FLOAT* dst,       /* Source/destination */
+                    LVM_INT16* pOffset,   /* Delay offset */
+                    LVM_INT16 n);         /* Number of stereo samples */
+void DelayWrite_32(const LVM_INT32* src,  /* Source 1, to be delayed */
+                   LVM_INT32* delay,      /* Delay buffer */
+                   LVM_UINT16 size,       /* Delay size */
+                   LVM_UINT16* pOffset,   /* Delay offset */
+                   LVM_INT16 n);
+void Add2_Sat_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
+void Mac3s_Sat_Float(const LVM_FLOAT* src, const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n);
+void DelayAllPass_Sat_32x16To32(LVM_INT32* delay,           /* Delay buffer */
+                                LVM_UINT16 size,            /* Delay size */
+                                LVM_INT16 coeff,            /* All pass filter coefficient */
+                                LVM_UINT16 DelayOffset,     /* Simple delay offset */
+                                LVM_UINT16* pAllPassOffset, /* All pass filter delay offset */
+                                LVM_INT32* dst,             /* Source/destination */
+                                LVM_INT16 n);
 
 /**********************************************************************************
     SHIFT FUNCTIONS
 ***********************************************************************************/
-void Shift_Sat_Float (const   LVM_INT16   val,
-                      const   LVM_FLOAT   *src,
-                      LVM_FLOAT   *dst,
-                      LVM_INT16   n);
+void Shift_Sat_Float(const LVM_INT16 val, const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
 /**********************************************************************************
     AUDIO FORMAT CONVERSION FUNCTIONS
 ***********************************************************************************/
-void MonoTo2I_Float( const LVM_FLOAT     *src,
-                     LVM_FLOAT     *dst,
-                     LVM_INT16 n);
-void From2iToMono_Float(         const LVM_FLOAT  *src,
-                                 LVM_FLOAT  *dst,
-                                 LVM_INT16 n);
-#ifdef SUPPORT_MC
-void FromMcToMono_Float(const LVM_FLOAT *src,
-                        LVM_FLOAT *dst,
-                        LVM_INT16 NrFrames,
+void MonoTo2I_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
+void From2iToMono_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
+void FromMcToMono_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 NrFrames,
                         LVM_INT16 NrChannels);
-#endif
-void MSTo2i_Sat_Float(        const LVM_FLOAT *srcM,
-                              const LVM_FLOAT *srcS,
-                              LVM_FLOAT *dst,
-                              LVM_INT16 n );
-void From2iToMS_Float(        const LVM_FLOAT *src,
-                              LVM_FLOAT *dstM,
-                              LVM_FLOAT *dstS,
-                              LVM_INT16 n );
-void JoinTo2i_Float(          const LVM_FLOAT  *srcL,
-                              const LVM_FLOAT  *srcR,
-                              LVM_FLOAT  *dst,
-                              LVM_INT16 n );
+void MSTo2i_Sat_Float(const LVM_FLOAT* srcM, const LVM_FLOAT* srcS, LVM_FLOAT* dst, LVM_INT16 n);
+void From2iToMS_Float(const LVM_FLOAT* src, LVM_FLOAT* dstM, LVM_FLOAT* dstS, LVM_INT16 n);
+void JoinTo2i_Float(const LVM_FLOAT* srcL, const LVM_FLOAT* srcR, LVM_FLOAT* dst, LVM_INT16 n);
 
 /**********************************************************************************
     DATA TYPE CONVERSION FUNCTIONS
 ***********************************************************************************/
 
-void Int16LShiftToInt32_16x32(const LVM_INT16 *src,
-                                    LVM_INT32  *dst,
-                                    LVM_INT16 n,
-                                    LVM_INT16 shift );
+void Int16LShiftToInt32_16x32(const LVM_INT16* src, LVM_INT32* dst, LVM_INT16 n, LVM_INT16 shift);
 
-void Int32RShiftToInt16_Sat_32x16(const  LVM_INT32  *src,
-                                    LVM_INT16 *dst,
-                                    LVM_INT16 n,
-                                    LVM_INT16 shift );
+void Int32RShiftToInt16_Sat_32x16(const LVM_INT32* src, LVM_INT16* dst, LVM_INT16 n,
+                                  LVM_INT16 shift);
 
 /**********************************************************************************/
 
-#endif  /* _VECTOR_ARITHMETIC_H_ */
+#endif /* _VECTOR_ARITHMETIC_H_ */
 
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp b/media/libeffects/lvm/lib/Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp
index e18aa78..ae8cdad 100644
--- a/media/libeffects/lvm/lib/Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp
@@ -30,10 +30,10 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-#define VOL_TC_SHIFT                                        21          /* As a power of 2 */
-#define DECAY_SHIFT                                        10           /* As a power of 2 */
-#define VOL_TC_FLOAT                                      2.0f          /* As a power of 2 */
-#define DECAY_FAC_FLOAT                                  64.0f          /* As a power of 2 */
+#define VOL_TC_SHIFT 21       /* As a power of 2 */
+#define DECAY_SHIFT 10        /* As a power of 2 */
+#define VOL_TC_FLOAT 2.0f     /* As a power of 2 */
+#define DECAY_FAC_FLOAT 64.0f /* As a power of 2 */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -69,91 +69,83 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-void AGC_MIX_VOL_2St1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t  *pInstance,     /* Instance pointer */
-                                 const LVM_FLOAT            *pStSrc,        /* Stereo source */
-                                 const LVM_FLOAT            *pMonoSrc,      /* Mono source */
-                                 LVM_FLOAT                  *pDst,          /* Stereo destination */
-                                 LVM_UINT16                 NumSamples)     /* Number of samples */
+void AGC_MIX_VOL_2St1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t* pInstance, /* Instance pointer */
+                                 const LVM_FLOAT* pStSrc,                /* Stereo source */
+                                 const LVM_FLOAT* pMonoSrc,              /* Mono source */
+                                 LVM_FLOAT* pDst,                        /* Stereo destination */
+                                 LVM_UINT16 NumSamples)                  /* Number of samples */
 {
-
     /*
      * General variables
      */
-    LVM_UINT16      i;                                          /* Sample index */
-    LVM_FLOAT       Left;                                       /* Left sample */
-    LVM_FLOAT       Right;                                      /* Right sample */
-    LVM_FLOAT       Mono;                                       /* Mono sample */
-    LVM_FLOAT       AbsPeak;                                    /* Absolute peak signal */
-    LVM_FLOAT       AGC_Mult;                                   /* Short AGC gain */
-    LVM_FLOAT       Vol_Mult;                                   /* Short volume */
+    LVM_UINT16 i;       /* Sample index */
+    LVM_FLOAT Left;     /* Left sample */
+    LVM_FLOAT Right;    /* Right sample */
+    LVM_FLOAT Mono;     /* Mono sample */
+    LVM_FLOAT AbsPeak;  /* Absolute peak signal */
+    LVM_FLOAT AGC_Mult; /* Short AGC gain */
+    LVM_FLOAT Vol_Mult; /* Short volume */
 
     /*
      * Instance control variables
      */
-    LVM_FLOAT      AGC_Gain      = pInstance->AGC_Gain;         /* Get the current AGC gain */
-    LVM_FLOAT      AGC_MaxGain   = pInstance->AGC_MaxGain;      /* Get maximum AGC gain */
-    LVM_FLOAT      AGC_Attack    = pInstance->AGC_Attack;       /* Attack scaler */
-    LVM_FLOAT      AGC_Decay     = (pInstance->AGC_Decay * (1 << (DECAY_SHIFT)));/* Decay scaler */
-    LVM_FLOAT      AGC_Target    = pInstance->AGC_Target;       /* Get the target level */
-    LVM_FLOAT      Vol_Current   = pInstance->Volume;           /* Actual volume setting */
-    LVM_FLOAT      Vol_Target    = pInstance->Target;           /* Target volume setting */
-    LVM_FLOAT      Vol_TC        = pInstance->VolumeTC;         /* Time constant */
+    LVM_FLOAT AGC_Gain = pInstance->AGC_Gain;       /* Get the current AGC gain */
+    LVM_FLOAT AGC_MaxGain = pInstance->AGC_MaxGain; /* Get maximum AGC gain */
+    LVM_FLOAT AGC_Attack = pInstance->AGC_Attack;   /* Attack scaler */
+    LVM_FLOAT AGC_Decay = (pInstance->AGC_Decay * (1 << (DECAY_SHIFT))); /* Decay scaler */
+    LVM_FLOAT AGC_Target = pInstance->AGC_Target;                        /* Get the target level */
+    LVM_FLOAT Vol_Current = pInstance->Volume;                           /* Actual volume setting */
+    LVM_FLOAT Vol_Target = pInstance->Target;                            /* Target volume setting */
+    LVM_FLOAT Vol_TC = pInstance->VolumeTC;                              /* Time constant */
 
     /*
      * Process on a sample by sample basis
      */
-    for (i = 0; i < NumSamples; i++)                                  /* For each sample */
+    for (i = 0; i < NumSamples; i++) /* For each sample */
     {
-
         /*
          * Get the short scalers
          */
-        AGC_Mult    = (LVM_FLOAT)(AGC_Gain);              /* Get the short AGC gain */
-        Vol_Mult    = (LVM_FLOAT)(Vol_Current);           /* Get the short volume gain */
+        AGC_Mult = (LVM_FLOAT)(AGC_Gain);    /* Get the short AGC gain */
+        Vol_Mult = (LVM_FLOAT)(Vol_Current); /* Get the short volume gain */
 
         /*
          * Get the input samples
          */
-        Left  = *pStSrc++;                                      /* Get the left sample */
-        Right = *pStSrc++;                                      /* Get the right sample */
-        Mono  = *pMonoSrc++;                                    /* Get the mono sample */
+        Left = *pStSrc++;   /* Get the left sample */
+        Right = *pStSrc++;  /* Get the right sample */
+        Mono = *pMonoSrc++; /* Get the mono sample */
 
         /*
          * Apply the AGC gain to the mono input and mix with the stereo signal
          */
-        Left  += (Mono * AGC_Mult);                               /* Mix in the mono signal */
+        Left += (Mono * AGC_Mult); /* Mix in the mono signal */
         Right += (Mono * AGC_Mult);
 
         /*
          * Apply the volume and write to the output stream
          */
-        Left  = Left  * Vol_Mult;
+        Left = Left * Vol_Mult;
         Right = Right * Vol_Mult;
-        *pDst++ = Left;                                         /* Save the results */
+        *pDst++ = Left; /* Save the results */
         *pDst++ = Right;
 
         /*
          * Update the AGC gain
          */
         AbsPeak = Abs_Float(Left) > Abs_Float(Right) ? Abs_Float(Left) : Abs_Float(Right);
-        if (AbsPeak > AGC_Target)
-        {
+        if (AbsPeak > AGC_Target) {
             /*
              * The signal is too large so decrease the gain
              */
             AGC_Gain = AGC_Gain * AGC_Attack;
-        }
-        else
-        {
+        } else {
             /*
              * The signal is too small so increase the gain
              */
-            if (AGC_Gain > AGC_MaxGain)
-            {
+            if (AGC_Gain > AGC_MaxGain) {
                 AGC_Gain -= (AGC_Decay);
-            }
-            else
-            {
+            } else {
                 AGC_Gain += (AGC_Decay);
             }
         }
@@ -161,18 +153,17 @@
         /*
          * Update the gain
          */
-        Vol_Current +=  (Vol_Target - Vol_Current) * ((LVM_FLOAT)Vol_TC / VOL_TC_FLOAT);
+        Vol_Current += (Vol_Target - Vol_Current) * ((LVM_FLOAT)Vol_TC / VOL_TC_FLOAT);
     }
 
     /*
      * Update the parameters
      */
-    pInstance->Volume = Vol_Current;                            /* Actual volume setting */
+    pInstance->Volume = Vol_Current; /* Actual volume setting */
     pInstance->AGC_Gain = AGC_Gain;
 
     return;
 }
-#ifdef SUPPORT_MC
 /****************************************************************************************/
 /*                                                                                      */
 /* FUNCTION:                  AGC_MIX_VOL_Mc1Mon_D32_WRA                                */
@@ -209,93 +200,80 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-void AGC_MIX_VOL_Mc1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t  *pInstance,
-                                 const LVM_FLOAT            *pMcSrc,
-                                 const LVM_FLOAT            *pMonoSrc,
-                                 LVM_FLOAT                  *pDst,
-                                 LVM_UINT16                 NrFrames,
-                                 LVM_UINT16                 NrChannels)
-{
-
+void AGC_MIX_VOL_Mc1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t* pInstance, const LVM_FLOAT* pMcSrc,
+                                const LVM_FLOAT* pMonoSrc, LVM_FLOAT* pDst, LVM_UINT16 NrFrames,
+                                LVM_UINT16 NrChannels) {
     /*
      * General variables
      */
-    LVM_UINT16      i, jj;                                      /* Sample index */
-    LVM_FLOAT       SampleVal;                                  /* Sample value */
-    LVM_FLOAT       Mono;                                       /* Mono sample */
-    LVM_FLOAT       AbsPeak;                                    /* Absolute peak signal */
-    LVM_FLOAT       AGC_Mult;                                   /* Short AGC gain */
-    LVM_FLOAT       Vol_Mult;                                   /* Short volume */
+    LVM_UINT16 i, jj;    /* Sample index */
+    LVM_FLOAT SampleVal; /* Sample value */
+    LVM_FLOAT Mono;      /* Mono sample */
+    LVM_FLOAT AbsPeak;   /* Absolute peak signal */
+    LVM_FLOAT AGC_Mult;  /* Short AGC gain */
+    LVM_FLOAT Vol_Mult;  /* Short volume */
 
     /*
      * Instance control variables
      */
-    LVM_FLOAT      AGC_Gain      = pInstance->AGC_Gain;         /* Get the current AGC gain */
-    LVM_FLOAT      AGC_MaxGain   = pInstance->AGC_MaxGain;      /* Get maximum AGC gain */
-    LVM_FLOAT      AGC_Attack    = pInstance->AGC_Attack;       /* Attack scaler */
+    LVM_FLOAT AGC_Gain = pInstance->AGC_Gain;       /* Get the current AGC gain */
+    LVM_FLOAT AGC_MaxGain = pInstance->AGC_MaxGain; /* Get maximum AGC gain */
+    LVM_FLOAT AGC_Attack = pInstance->AGC_Attack;   /* Attack scaler */
     /* Decay scaler */
-    LVM_FLOAT      AGC_Decay     = (pInstance->AGC_Decay * (1 << (DECAY_SHIFT)));
-    LVM_FLOAT      AGC_Target    = pInstance->AGC_Target;       /* Get the target level */
-    LVM_FLOAT      Vol_Current   = pInstance->Volume;           /* Actual volume setting */
-    LVM_FLOAT      Vol_Target    = pInstance->Target;           /* Target volume setting */
-    LVM_FLOAT      Vol_TC        = pInstance->VolumeTC;         /* Time constant */
+    LVM_FLOAT AGC_Decay = (pInstance->AGC_Decay * (1 << (DECAY_SHIFT)));
+    LVM_FLOAT AGC_Target = pInstance->AGC_Target; /* Get the target level */
+    LVM_FLOAT Vol_Current = pInstance->Volume;    /* Actual volume setting */
+    LVM_FLOAT Vol_Target = pInstance->Target;     /* Target volume setting */
+    LVM_FLOAT Vol_TC = pInstance->VolumeTC;       /* Time constant */
 
     /*
      * Process on a sample by sample basis
      */
-    for (i = 0; i < NrFrames; i++)                                  /* For each frame */
+    for (i = 0; i < NrFrames; i++) /* For each frame */
     {
-
         /*
          * Get the scalers
          */
-        AGC_Mult    = (LVM_FLOAT)(AGC_Gain);              /* Get the AGC gain */
-        Vol_Mult    = (LVM_FLOAT)(Vol_Current);           /* Get the volume gain */
+        AGC_Mult = (LVM_FLOAT)(AGC_Gain);    /* Get the AGC gain */
+        Vol_Mult = (LVM_FLOAT)(Vol_Current); /* Get the volume gain */
 
         AbsPeak = 0.0f;
         /*
          * Get the input samples
          */
-        for (jj = 0; jj < NrChannels; jj++)
-        {
-            SampleVal  = *pMcSrc++;                       /* Get the sample value of jj Channel*/
-            Mono       = *pMonoSrc;                       /* Get the mono sample */
+        for (jj = 0; jj < NrChannels; jj++) {
+            SampleVal = *pMcSrc++; /* Get the sample value of jj Channel*/
+            Mono = *pMonoSrc;      /* Get the mono sample */
 
             /*
              * Apply the AGC gain to the mono input and mix with the input signal
              */
-            SampleVal  += (Mono * AGC_Mult);                        /* Mix in the mono signal */
+            SampleVal += (Mono * AGC_Mult); /* Mix in the mono signal */
 
             /*
              * Apply the volume and write to the output stream
              */
-            SampleVal  = SampleVal  * Vol_Mult;
+            SampleVal = SampleVal * Vol_Mult;
 
-            *pDst++ = SampleVal;                                         /* Save the results */
+            *pDst++ = SampleVal; /* Save the results */
 
             /*
              * Update the AGC gain
              */
             AbsPeak = Abs_Float(SampleVal) > AbsPeak ? Abs_Float(SampleVal) : AbsPeak;
         }
-        if (AbsPeak > AGC_Target)
-        {
+        if (AbsPeak > AGC_Target) {
             /*
              * The signal is too large so decrease the gain
              */
             AGC_Gain = AGC_Gain * AGC_Attack;
-        }
-        else
-        {
+        } else {
             /*
              * The signal is too small so increase the gain
              */
-            if (AGC_Gain > AGC_MaxGain)
-            {
+            if (AGC_Gain > AGC_MaxGain) {
                 AGC_Gain -= (AGC_Decay);
-            }
-            else
-            {
+            } else {
                 AGC_Gain += (AGC_Decay);
             }
         }
@@ -303,15 +281,14 @@
         /*
          * Update the gain
          */
-        Vol_Current +=  (Vol_Target - Vol_Current) * ((LVM_FLOAT)Vol_TC / VOL_TC_FLOAT);
+        Vol_Current += (Vol_Target - Vol_Current) * ((LVM_FLOAT)Vol_TC / VOL_TC_FLOAT);
     }
 
     /*
      * Update the parameters
      */
-    pInstance->Volume = Vol_Current;                            /* Actual volume setting */
+    pInstance->Volume = Vol_Current; /* Actual volume setting */
     pInstance->AGC_Gain = AGC_Gain;
 
     return;
 }
-#endif /*SUPPORT_MC*/
diff --git a/media/libeffects/lvm/lib/Common/src/Abs_32.cpp b/media/libeffects/lvm/lib/Common/src/Abs_32.cpp
index e013809..3e37d89 100644
--- a/media/libeffects/lvm/lib/Common/src/Abs_32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Abs_32.cpp
@@ -19,7 +19,7 @@
 /*  Include files                                                                       */
 /*######################################################################################*/
 
-#include    "ScalarArithmetic.h"
+#include "ScalarArithmetic.h"
 
 /****************************************************************************************
  *  Name        : Abs_32()
@@ -30,27 +30,20 @@
  *  Remarks     :
  ****************************************************************************************/
 
-LVM_INT32    Abs_32(LVM_INT32    input)
-{
-    if(input <  0)
-    {
-        if (input == (LVM_INT32)(0x80000000U))
-        {
+LVM_INT32 Abs_32(LVM_INT32 input) {
+    if (input < 0) {
+        if (input == (LVM_INT32)(0x80000000U)) {
             /* The corner case, so set to the maximum positive value */
-            input=(LVM_INT32) 0x7fffffff;
-        }
-        else
-        {
+            input = (LVM_INT32)0x7fffffff;
+        } else {
             /* Negative input, so invert */
             input = (LVM_INT32)(-input);
         }
     }
     return input;
 }
-LVM_FLOAT    Abs_Float(LVM_FLOAT    input)
-{
-    if(input <  0)
-    {
+LVM_FLOAT Abs_Float(LVM_FLOAT input) {
+    if (input < 0) {
         /* Negative input, so invert */
         input = (LVM_FLOAT)(-input);
     }
diff --git a/media/libeffects/lvm/lib/Common/src/Add2_Sat_16x16.cpp b/media/libeffects/lvm/lib/Common/src/Add2_Sat_16x16.cpp
index 6978fe7..be20521 100644
--- a/media/libeffects/lvm/lib/Common/src/Add2_Sat_16x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Add2_Sat_16x16.cpp
@@ -25,27 +25,18 @@
    FUNCTION ADD2_SAT_16X16
 ***********************************************************************************/
 
-void Add2_Sat_16x16( const LVM_INT16 *src,
-                           LVM_INT16 *dst,
-                           LVM_INT16  n )
-{
+void Add2_Sat_16x16(const LVM_INT16* src, LVM_INT16* dst, LVM_INT16 n) {
     LVM_INT32 Temp;
     LVM_INT16 ii;
-    for (ii = n; ii != 0; ii--)
-    {
-        Temp = ((LVM_INT32) *src) + ((LVM_INT32) *dst);
+    for (ii = n; ii != 0; ii--) {
+        Temp = ((LVM_INT32)*src) + ((LVM_INT32)*dst);
         src++;
 
-        if (Temp > 0x00007FFF)
-        {
+        if (Temp > 0x00007FFF) {
             *dst = 0x7FFF;
-        }
-        else if (Temp < -0x00008000)
-        {
-            *dst = - 0x8000;
-        }
-        else
-        {
+        } else if (Temp < -0x00008000) {
+            *dst = -0x8000;
+        } else {
             *dst = (LVM_INT16)Temp;
         }
         dst++;
diff --git a/media/libeffects/lvm/lib/Common/src/Add2_Sat_32x32.cpp b/media/libeffects/lvm/lib/Common/src/Add2_Sat_32x32.cpp
index a48e668..420f93e 100644
--- a/media/libeffects/lvm/lib/Common/src/Add2_Sat_32x32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Add2_Sat_32x32.cpp
@@ -25,28 +25,21 @@
    FUNCTION ADD2_SAT_32X32
 ***********************************************************************************/
 
-void Add2_Sat_32x32( const LVM_INT32  *src,
-                           LVM_INT32  *dst,
-                           LVM_INT16  n )
-{
-    LVM_INT32 a,b,c;
+void Add2_Sat_32x32(const LVM_INT32* src, LVM_INT32* dst, LVM_INT16 n) {
+    LVM_INT32 a, b, c;
     LVM_INT16 ii;
-    for (ii = n; ii != 0; ii--)
-    {
-        a=*src;
+    for (ii = n; ii != 0; ii--) {
+        a = *src;
         src++;
 
-        b=*dst;
-        c=a+b;
-        if ((((c ^ a) & (c ^ b)) >> 31)!=0)     /* overflow / underflow */
+        b = *dst;
+        c = a + b;
+        if ((((c ^ a) & (c ^ b)) >> 31) != 0) /* overflow / underflow */
         {
-            if(a<0)
-            {
-                c=0x80000000L;
-            }
-            else
-            {
-                c=0x7FFFFFFFL;
+            if (a < 0) {
+                c = 0x80000000L;
+            } else {
+                c = 0x7FFFFFFFL;
             }
         }
 
@@ -56,27 +49,18 @@
     return;
 }
 
-void Add2_Sat_Float( const LVM_FLOAT  *src,
-                           LVM_FLOAT  *dst,
-                           LVM_INT16  n )
-{
+void Add2_Sat_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n) {
     LVM_FLOAT Temp;
     LVM_INT16 ii;
-    for (ii = n; ii != 0; ii--)
-    {
-        Temp = ((LVM_FLOAT) *src) + ((LVM_FLOAT) *dst);
+    for (ii = n; ii != 0; ii--) {
+        Temp = ((LVM_FLOAT)*src) + ((LVM_FLOAT)*dst);
         src++;
 
-        if (Temp > 1.000000f)
-        {
+        if (Temp > 1.000000f) {
             *dst = 1.000000f;
-        }
-        else if (Temp < -1.000000f)
-        {
+        } else if (Temp < -1.000000f) {
             *dst = -1.000000f;
-        }
-        else
-        {
+        } else {
             *dst = Temp;
         }
         dst++;
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16C14_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16C14_TRC_WRA_01.cpp
index 1a5e07f..198a6a1 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16C14_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16C14_TRC_WRA_01.cpp
@@ -32,45 +32,38 @@
  pBiquadState->pDelays[2] is y(n-1)L in Q0 format
  pBiquadState->pDelays[3] is y(n-2)L in Q0 format
 ***************************************************************************/
-void BP_1I_D16F16C14_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT               *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples)
+void BP_1I_D16F16C14_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples)
 
-    {
-        LVM_FLOAT ynL;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+{
+    LVM_FLOAT ynL;
+    LVM_INT16 ii;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
 
-         for (ii = NrSamples; ii != 0; ii--)
-         {
+    for (ii = NrSamples; ii != 0; ii--) {
+        /**************************************************************************
+                        PROCESSING OF THE LEFT CHANNEL
+        ***************************************************************************/
+        // ynL= (A0  * (x(n)L  - x(n-2)L  ) )
+        ynL = pBiquadState->coefs[0] * ((*pDataIn) - pBiquadState->pDelays[1]);
 
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            // ynL= (A0  * (x(n)L  - x(n-2)L  ) )
-            ynL = pBiquadState->coefs[0] * ((*pDataIn)-pBiquadState->pDelays[1]);
+        // ynL+= ((-B2  * y(n-2)L  ) )
+        ynL += pBiquadState->coefs[1] * pBiquadState->pDelays[3];
 
-            // ynL+= ((-B2  * y(n-2)L  ) )
-            ynL += pBiquadState->coefs[1] * pBiquadState->pDelays[3];
+        // ynL+= ((-B1  * y(n-1)L  ) )
+        ynL += pBiquadState->coefs[2] * pBiquadState->pDelays[2];
 
-            // ynL+= ((-B1  * y(n-1)L  ) )
-            ynL += pBiquadState->coefs[2] * pBiquadState->pDelays[2];
+        /**************************************************************************
+                        UPDATING THE DELAYS
+        ***************************************************************************/
+        pBiquadState->pDelays[3] = pBiquadState->pDelays[2];  // y(n-2)L=y(n-1)L
+        pBiquadState->pDelays[1] = pBiquadState->pDelays[0];  // x(n-2)L=x(n-1)L
+        pBiquadState->pDelays[2] = ynL;                       // Update y(n-1)L
+        pBiquadState->pDelays[0] = (*pDataIn++);              // Update x(n-1)L
 
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
-            pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
-            pBiquadState->pDelays[2] = ynL; // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut++=ynL; // Write Left output
-
-        }
-
+        /**************************************************************************
+                        WRITING THE OUTPUT
+        ***************************************************************************/
+        *pDataOut++ = ynL;  // Write Left output
     }
-
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Init.cpp
index 60b6c16..6d36302 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Init.cpp
@@ -37,12 +37,11 @@
 /* RETURNS:                                                                */
 /*   void return code                                                      */
 /*-------------------------------------------------------------------------*/
-void BP_1I_D16F16Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t          *pInstance,
-                                         Biquad_1I_Order2_FLOAT_Taps_t    *pTaps,
-                                         BP_FLOAT_Coefs_t                  *pCoef)
-{
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *) pTaps;
+void BP_1I_D16F16Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_1I_Order2_FLOAT_Taps_t* pTaps,
+                                     BP_FLOAT_Coefs_t* pCoef) {
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+    pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
 
     pBiquadState->coefs[0] = pCoef->A0;
     pBiquadState->coefs[1] = pCoef->B2;
@@ -50,4 +49,3 @@
 }
 /*-------------------------------------------------------------------------*/
 /* End Of File: BP_1I_D16F16Css_TRC_WRA_01_Init.c                              */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Private.h
index 8a000b6..a41c855 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Private.h
@@ -19,19 +19,16 @@
 
 /* The internal state variables are implemented in a (for the user)  hidden structure */
 /* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT32         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+    LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_INT32 coefs[3]; /* pointer to the filter coefficients */
+} Filter_State;
 
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
 
-typedef struct _Filter_State_FLOAT
-{
-
-    LVM_FLOAT *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
+typedef struct _Filter_State_FLOAT {
+    LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
+} Filter_State_FLOAT;
+typedef Filter_State_FLOAT* PFilter_State_FLOAT;
 #endif /*_BP_1I_D16F16CSS_TRC_WRA_01_PRIVATE_H_*/
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32C30_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32C30_TRC_WRA_01.cpp
index c844d03..d4d4eb1 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32C30_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32C30_TRC_WRA_01.cpp
@@ -32,22 +32,18 @@
  pBiquadState->pDelays[2] is y(n-1)L in Q16 format
  pBiquadState->pDelays[3] is y(n-2)L in Q16 format
 ***************************************************************************/
-void BP_1I_D16F32C30_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT               *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples)
-{
-    LVM_FLOAT ynL,templ;
+void BP_1I_D16F32C30_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+    LVM_FLOAT ynL, templ;
     LVM_INT16 ii;
     PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
 
-    for (ii = NrSamples; ii != 0; ii--)
-    {
+    for (ii = NrSamples; ii != 0; ii--) {
         /**************************************************************************
                        PROCESSING OF THE LEFT CHANNEL
         ***************************************************************************/
         // ynL= (A0 * (x(n)L - x(n-2)L ))
-        templ = (LVM_FLOAT) *pDataIn - pBiquadState->pDelays[1];
+        templ = (LVM_FLOAT)*pDataIn - pBiquadState->pDelays[1];
         ynL = pBiquadState->coefs[0] * templ;
 
         // ynL+= ((-B2  * y(n-2)L  ) )
@@ -61,14 +57,14 @@
         /**************************************************************************
                         UPDATING THE DELAYS
         ***************************************************************************/
-        pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
-        pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
-        pBiquadState->pDelays[2] = ynL; // Update y(n-1)L in Q16
-        pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L in Q0
+        pBiquadState->pDelays[3] = pBiquadState->pDelays[2];  // y(n-2)L=y(n-1)L
+        pBiquadState->pDelays[1] = pBiquadState->pDelays[0];  // x(n-2)L=x(n-1)L
+        pBiquadState->pDelays[2] = ynL;                       // Update y(n-1)L in Q16
+        pBiquadState->pDelays[0] = (*pDataIn++);              // Update x(n-1)L in Q0
 
         /**************************************************************************
                         WRITING THE OUTPUT
         ***************************************************************************/
-        *pDataOut++ = (ynL); // Write Left output
-        }
+        *pDataOut++ = (ynL);  // Write Left output
+    }
 }
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Init.cpp
index eb15032..d322a8e 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Init.cpp
@@ -47,17 +47,15 @@
 /* RETURNS:                                                                */
 /*   void return code                                                      */
 /*-------------------------------------------------------------------------*/
-void BP_1I_D16F32Cll_TRC_WRA_01_Init (    Biquad_FLOAT_Instance_t         *pInstance,
-                                          Biquad_1I_Order2_FLOAT_Taps_t   *pTaps,
-                                          BP_FLOAT_Coefs_t                *pCoef)
-{
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-    pBiquadState->pDelays       =(LVM_FLOAT *) pTaps;
+void BP_1I_D16F32Cll_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_1I_Order2_FLOAT_Taps_t* pTaps,
+                                     BP_FLOAT_Coefs_t* pCoef) {
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+    pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
 
-    pBiquadState->coefs[0] =  pCoef->A0;
-    pBiquadState->coefs[1] =  pCoef->B2;
-    pBiquadState->coefs[2] =  pCoef->B1;
+    pBiquadState->coefs[0] = pCoef->A0;
+    pBiquadState->coefs[1] = pCoef->B2;
+    pBiquadState->coefs[2] = pCoef->B1;
 }
 /*-------------------------------------------------------------------------*/
 /* End Of File: BP_1I_D16F32Cll_TRC_WRA_01_Init.c                              */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Private.h
index 6d754e2..0603256 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Private.h
@@ -19,17 +19,15 @@
 
 /* The internal state variables are implemented in a (for the user)  hidden structure */
 /* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT32         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+    LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_INT32 coefs[3]; /* pointer to the filter coefficients */
+} Filter_State;
 
-typedef Filter_State * PFilter_State ;
-typedef struct _Filter_State_FLOAT
-{
-    LVM_FLOAT *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State_Float;
-typedef Filter_State_Float * PFilter_State_FLOAT ;
+typedef Filter_State* PFilter_State;
+typedef struct _Filter_State_FLOAT {
+    LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
+} Filter_State_Float;
+typedef Filter_State_Float* PFilter_State_FLOAT;
 #endif /*_BP_1I_D16F32CLL_TRC_WRA_01_PRIVATE_H_*/
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32C30_TRC_WRA_02.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32C30_TRC_WRA_02.cpp
index d0ba206..0670334 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32C30_TRC_WRA_02.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32C30_TRC_WRA_02.cpp
@@ -32,46 +32,39 @@
  pBiquadState->pDelays[2] is y(n-1)L in Q0 format
  pBiquadState->pDelays[3] is y(n-2)L in Q0 format
 ***************************************************************************/
-void BP_1I_D32F32C30_TRC_WRA_02 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT               *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT ynL,templ;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void BP_1I_D32F32C30_TRC_WRA_02(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+    LVM_FLOAT ynL, templ;
+    LVM_INT16 ii;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
 
-        for (ii = NrSamples; ii != 0; ii--)
-        {
+    for (ii = NrSamples; ii != 0; ii--) {
+        /**************************************************************************
+                        PROCESSING OF THE LEFT CHANNEL
+        ***************************************************************************/
+        // ynL= (A0  * (x(n)L  - x(n-2)L  ) )
+        templ = (*pDataIn) - pBiquadState->pDelays[1];
+        ynL = pBiquadState->coefs[0] * templ;
 
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            // ynL= (A0  * (x(n)L  - x(n-2)L  ) )
-            templ = (*pDataIn) - pBiquadState->pDelays[1];
-            ynL = pBiquadState->coefs[0] * templ;
+        // ynL+= ((-B2  * y(n-2)L  ) )
+        templ = pBiquadState->coefs[1] * pBiquadState->pDelays[3];
+        ynL += templ;
 
-            // ynL+= ((-B2  * y(n-2)L  ) )
-            templ = pBiquadState->coefs[1] * pBiquadState->pDelays[3];
-            ynL += templ;
+        // ynL+= ((-B1  * y(n-1)L  ) )
+        templ = pBiquadState->coefs[2] * pBiquadState->pDelays[2];
+        ynL += templ;
 
-            // ynL+= ((-B1  * y(n-1)L  ) )
-            templ = pBiquadState->coefs[2] * pBiquadState->pDelays[2];
-            ynL += templ;
+        /**************************************************************************
+                        UPDATING THE DELAYS
+        ***************************************************************************/
+        pBiquadState->pDelays[3] = pBiquadState->pDelays[2];  // y(n-2)L=y(n-1)L
+        pBiquadState->pDelays[1] = pBiquadState->pDelays[0];  // x(n-2)L=x(n-1)L
+        pBiquadState->pDelays[2] = ynL;                       // Update y(n-1)L
+        pBiquadState->pDelays[0] = (*pDataIn++);              // Update x(n-1)L
 
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
-            pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
-            pBiquadState->pDelays[2] = ynL; // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut++ = ynL; // Write Left output in Q0
-
-        }
-
+        /**************************************************************************
+                        WRITING THE OUTPUT
+        ***************************************************************************/
+        *pDataOut++ = ynL;  // Write Left output in Q0
     }
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Init.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Init.cpp
index 6f7d0b5..146cc63 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Init.cpp
@@ -37,12 +37,11 @@
 /* RETURNS:                                                                */
 /*   void return code                                                      */
 /*-------------------------------------------------------------------------*/
-void BP_1I_D32F32Cll_TRC_WRA_02_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_1I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BP_FLOAT_Coefs_t            *pCoef)
-{
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-    pBiquadState->pDelays       =(LVM_FLOAT *) pTaps;
+void BP_1I_D32F32Cll_TRC_WRA_02_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_1I_Order2_FLOAT_Taps_t* pTaps,
+                                     BP_FLOAT_Coefs_t* pCoef) {
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+    pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
 
     pBiquadState->coefs[0] = pCoef->A0;
 
@@ -52,4 +51,3 @@
 }
 /*-------------------------------------------------------------------------*/
 /* End Of File: BP_1I_D32F32Cll_TRC_WRA_02_Init.c                              */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Private.h b/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Private.h
index 9f1c66a..ea83c0b 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Private.h
@@ -19,18 +19,16 @@
 
 /* The internal state variables are implemented in a (for the user)  hidden structure */
 /* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT32         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+    LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_INT32 coefs[3]; /* pointer to the filter coefficients */
+} Filter_State;
 
-typedef Filter_State * PFilter_State ;
-typedef struct _Filter_State_FLOAT
-{
-    LVM_FLOAT *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State_Float;
-typedef Filter_State_Float* PFilter_State_FLOAT ;
+typedef Filter_State* PFilter_State;
+typedef struct _Filter_State_FLOAT {
+    LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
+} Filter_State_Float;
+typedef Filter_State_Float* PFilter_State_FLOAT;
 
 #endif /*_BP_1I_D32F32CLL_TRC_WRA_02_PRIVATE_H_*/
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16C15_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16C15_TRC_WRA_01.cpp
index 9aecc40..a46b1ef 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16C15_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16C15_TRC_WRA_01.cpp
@@ -32,49 +32,42 @@
  pBiquadState->pDelays[2] is y(n-1)L in Q0 format
  pBiquadState->pDelays[3] is y(n-2)L in Q0 format
 ***************************************************************************/
-void BQ_1I_D16F16C15_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT               *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT  ynL;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void BQ_1I_D16F16C15_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+    LVM_FLOAT ynL;
+    LVM_INT16 ii;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
 
-         for (ii = NrSamples; ii != 0; ii--)
-         {
+    for (ii = NrSamples; ii != 0; ii--) {
+        /**************************************************************************
+                        PROCESSING OF THE LEFT CHANNEL
+        ***************************************************************************/
+        // ynL=A2  * x(n-2)L
+        ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[1];
 
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            // ynL=A2  * x(n-2)L
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[1];
+        // ynL+=A1 * x(n-1)L
+        ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
 
-            // ynL+=A1 * x(n-1)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
+        // ynL+=A0 * x(n)L
+        ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
 
-            // ynL+=A0 * x(n)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
+        // ynL+=  (-B2  * y(n-2)L )
+        ynL += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[3];
 
-            // ynL+=  (-B2  * y(n-2)L )
-            ynL += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[3];
+        // ynL+= (-B1  * y(n-1)L  )
+        ynL += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[2];
 
-            // ynL+= (-B1  * y(n-1)L  )
-            ynL += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[2];
+        /**************************************************************************
+                        UPDATING THE DELAYS
+        ***************************************************************************/
+        pBiquadState->pDelays[3] = pBiquadState->pDelays[2];  // y(n-2)L=y(n-1)L
+        pBiquadState->pDelays[1] = pBiquadState->pDelays[0];  // x(n-2)L=x(n-1)L
+        pBiquadState->pDelays[2] = ynL;                       // Update y(n-1)L
+        pBiquadState->pDelays[0] = (*pDataIn++);              // Update x(n-1)L
 
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
-            pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
-            pBiquadState->pDelays[2] = ynL; // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output in Q0
-
-        }
-
+        /**************************************************************************
+                        WRITING THE OUTPUT
+        ***************************************************************************/
+        *pDataOut++ = (LVM_FLOAT)ynL;  // Write Left output in Q0
     }
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Init.cpp
index f0b5d06..e8bfcd8 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Init.cpp
@@ -37,13 +37,12 @@
 /* RETURNS:                                                                */
 /*   void return code                                                      */
 /*-------------------------------------------------------------------------*/
-void BQ_1I_D16F16Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_1I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef)
-{
+void BQ_1I_D16F16Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_1I_Order2_FLOAT_Taps_t* pTaps,
+                                     BQ_FLOAT_Coefs_t* pCoef) {
     LVM_FLOAT temp;
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *) pTaps ;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+    pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
     temp = pCoef->A2;
     pBiquadState->coefs[0] = temp;
     temp = pCoef->A1;
@@ -57,4 +56,3 @@
 }
 /*-------------------------------------------------------------------------*/
 /* End Of File: BQ_1I_D16F16Css_TRC_WRA_01_Init.c                              */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Private.h
index fad345d..ac2819e 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Private.h
@@ -19,19 +19,17 @@
 
 /* The internal state variables are implemented in a (for the user)  hidden structure */
 /* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT16         coefs[5];       /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+    LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_INT16 coefs[5]; /* pointer to the filter coefficients */
+} Filter_State;
 
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
 
-typedef struct _Filter_State_FLOAT
-{
-    LVM_FLOAT *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT         coefs[5];       /* pointer to the filter coefficients */
+typedef struct _Filter_State_FLOAT {
+    LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
 
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
+} Filter_State_FLOAT;
+typedef Filter_State_FLOAT* PFilter_State_FLOAT;
 #endif /*_BQ_1I_D16F16CSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32C14_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32C14_TRC_WRA_01.cpp
index 043bc5f..c60dcf8 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32C14_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32C14_TRC_WRA_01.cpp
@@ -32,48 +32,42 @@
  pBiquadState->pDelays[2] is y(n-1)L in Q16 format
  pBiquadState->pDelays[3] is y(n-2)L in Q16 format
 ***************************************************************************/
-void BQ_1I_D16F32C14_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT               *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT  ynL;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void BQ_1I_D16F32C14_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+    LVM_FLOAT ynL;
+    LVM_INT16 ii;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
 
-         for (ii = NrSamples; ii != 0; ii--)
-         {
+    for (ii = NrSamples; ii != 0; ii--) {
+        /**************************************************************************
+                        PROCESSING OF THE LEFT CHANNEL
+        ***************************************************************************/
+        // ynL=A2  * x(n-2)L
+        ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[1];
 
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            // ynL=A2  * x(n-2)L
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[1];
+        // ynL+=A1  * x(n-1)L
+        ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
 
-            // ynL+=A1  * x(n-1)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
+        // ynL+=A0  * x(n)L
+        ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
 
-            // ynL+=A0  * x(n)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
+        // ynL+= ( (-B2  * y(n-2)L )
+        ynL += pBiquadState->pDelays[3] * pBiquadState->coefs[3];
 
-            // ynL+= ( (-B2  * y(n-2)L )
-            ynL += pBiquadState->pDelays[3] * pBiquadState->coefs[3];
+        // ynL+= -B1  * y(n-1)L
+        ynL += pBiquadState->pDelays[2] * pBiquadState->coefs[4];
 
-            // ynL+= -B1  * y(n-1)L
-            ynL += pBiquadState->pDelays[2] * pBiquadState->coefs[4];
+        /**************************************************************************
+                        UPDATING THE DELAYS
+        ***************************************************************************/
+        pBiquadState->pDelays[3] = pBiquadState->pDelays[2];  // y(n-2)L=y(n-1)L
+        pBiquadState->pDelays[1] = pBiquadState->pDelays[0];  // x(n-2)L=x(n-1)L
+        pBiquadState->pDelays[2] = ynL;                       // Update y(n-1)L
+        pBiquadState->pDelays[0] = (*pDataIn++);              // Update x(n-1)L
 
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[2];  // y(n-2)L=y(n-1)L
-            pBiquadState->pDelays[1] = pBiquadState->pDelays[0];  // x(n-2)L=x(n-1)L
-            pBiquadState->pDelays[2] = ynL;                    // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++);              // Update x(n-1)L
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut++ = (LVM_FLOAT)(ynL); // Write Left output
-
-        }
+        /**************************************************************************
+                        WRITING THE OUTPUT
+        ***************************************************************************/
+        *pDataOut++ = (LVM_FLOAT)(ynL);  // Write Left output
     }
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_Private.h
index 6a61d9a..af0efc8 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_Private.h
@@ -19,19 +19,17 @@
 
 /* The internal state variables are implemented in a (for the user)  hidden structure */
 /* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *   pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT16     coefs[5];       /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+    LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_INT16 coefs[5]; /* pointer to the filter coefficients */
+} Filter_State;
 
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
 
-typedef struct _Filter_State_FLOAT
-{
-    LVM_FLOAT *   pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT     coefs[5];       /* pointer to the filter coefficients */
+typedef struct _Filter_State_FLOAT {
+    LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
 
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
+} Filter_State_FLOAT;
+typedef Filter_State_FLOAT* PFilter_State_FLOAT;
 #endif /*_BQ_1I_D16F32CSS_TRC_WRA_01_PRIVATE_H_*/
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_init.cpp b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_init.cpp
index 2b80691..ecf44ca 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_init.cpp
@@ -37,13 +37,12 @@
 /* RETURNS:                                                                */
 /*   void return code                                                      */
 /*-------------------------------------------------------------------------*/
-void BQ_1I_D16F32Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_1I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef)
-{
+void BQ_1I_D16F32Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_1I_Order2_FLOAT_Taps_t* pTaps,
+                                     BQ_FLOAT_Coefs_t* pCoef) {
     LVM_FLOAT temp;
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *)pTaps;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+    pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
 
     temp = pCoef->A2;
     pBiquadState->coefs[0] = temp;
@@ -58,4 +57,3 @@
 }
 /*-------------------------------------------------------------------------*/
 /* End Of File: BQ_1I_D16F32Css_TRC_WRA_01_Init                              */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C14_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C14_TRC_WRA_01.cpp
index 51cd918..d047e91 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C14_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C14_TRC_WRA_01.cpp
@@ -36,72 +36,65 @@
  pBiquadState->pDelays[6] is y(n-2)L in Q0 format
  pBiquadState->pDelays[7] is y(n-2)R in Q0 format
 ***************************************************************************/
-void BQ_2I_D16F16C14_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT               *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT  ynL,ynR;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void BQ_2I_D16F16C14_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+    LVM_FLOAT ynL, ynR;
+    LVM_INT16 ii;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
 
-         for (ii = NrSamples; ii != 0; ii--)
-         {
+    for (ii = NrSamples; ii != 0; ii--) {
+        /**************************************************************************
+                        PROCESSING OF THE LEFT CHANNEL
+        ***************************************************************************/
+        // ynL=A2  * x(n-2)L
+        ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
 
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            // ynL=A2  * x(n-2)L
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
+        // ynL+=A1  * x(n-1)L
+        ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
 
-            // ynL+=A1  * x(n-1)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
+        // ynL+=A0  * x(n)L
+        ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
 
-            // ynL+=A0  * x(n)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
+        // ynL+= ( -B2  * y(n-2)L  )
+        ynL += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[6];
 
-            // ynL+= ( -B2  * y(n-2)L  )
-            ynL += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[6];
+        // ynL+=( -B1  * y(n-1)L )
+        ynL += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[4];
 
-            // ynL+=( -B1  * y(n-1)L )
-            ynL += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[4];
+        /**************************************************************************
+                        PROCESSING OF THE RIGHT CHANNEL
+        ***************************************************************************/
+        // ynR=A2  * x(n-2)R
+        ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
 
-            /**************************************************************************
-                            PROCESSING OF THE RIGHT CHANNEL
-            ***************************************************************************/
-            // ynR=A2  * x(n-2)R
-            ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
+        // ynR+=A1  * x(n-1)R
+        ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
 
-            // ynR+=A1  * x(n-1)R
-            ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
+        // ynR+=A0  * x(n)R
+        ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn + 1));
 
-            // ynR+=A0  * x(n)R
-            ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn+1));
+        // ynR+= ( -B2  * y(n-2)R  )
+        ynR += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[7];
 
-            // ynR+= ( -B2  * y(n-2)R  )
-            ynR += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[7];
+        // ynR+=( -B1  * y(n-1)R  )
+        ynR += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[5];
 
-            // ynR+=( -B1  * y(n-1)R  )
-            ynR += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[5];
+        /**************************************************************************
+                        UPDATING THE DELAYS
+        ***************************************************************************/
+        pBiquadState->pDelays[7] = pBiquadState->pDelays[5];  // y(n-2)R=y(n-1)R
+        pBiquadState->pDelays[6] = pBiquadState->pDelays[4];  // y(n-2)L=y(n-1)L
+        pBiquadState->pDelays[3] = pBiquadState->pDelays[1];  // x(n-2)R=x(n-1)R
+        pBiquadState->pDelays[2] = pBiquadState->pDelays[0];  // x(n-2)L=x(n-1)L
+        pBiquadState->pDelays[5] = ynR;                       // Update y(n-1)R
+        pBiquadState->pDelays[4] = ynL;                       // Update y(n-1)L
+        pBiquadState->pDelays[0] = (*pDataIn++);              // Update x(n-1)L
+        pBiquadState->pDelays[1] = (*pDataIn++);              // Update x(n-1)R
 
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[7] = pBiquadState->pDelays[5];  // y(n-2)R=y(n-1)R
-            pBiquadState->pDelays[6] = pBiquadState->pDelays[4];  // y(n-2)L=y(n-1)L
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[1];  // x(n-2)R=x(n-1)R
-            pBiquadState->pDelays[2] = pBiquadState->pDelays[0];  // x(n-2)L=x(n-1)L
-            pBiquadState->pDelays[5] = ynR;                       // Update y(n-1)R
-            pBiquadState->pDelays[4] = ynL;                       // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++);              // Update x(n-1)L
-            pBiquadState->pDelays[1] = (*pDataIn++);              // Update x(n-1)R
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output
-            *pDataOut++ = (LVM_FLOAT)ynR; // Write Right ouput
-
-        }
-
+        /**************************************************************************
+                        WRITING THE OUTPUT
+        ***************************************************************************/
+        *pDataOut++ = (LVM_FLOAT)ynL;  // Write Left output
+        *pDataOut++ = (LVM_FLOAT)ynR;  // Write Right output
     }
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C15_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C15_TRC_WRA_01.cpp
index 8f74749..399b5ec 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C15_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C15_TRC_WRA_01.cpp
@@ -36,72 +36,65 @@
  pBiquadState->pDelays[6] is y(n-2)L in Q0 format
  pBiquadState->pDelays[7] is y(n-2)R in Q0 format
 ***************************************************************************/
-void BQ_2I_D16F16C15_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT               *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT  ynL,ynR;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void BQ_2I_D16F16C15_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+    LVM_FLOAT ynL, ynR;
+    LVM_INT16 ii;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
 
-         for (ii = NrSamples; ii != 0; ii--)
-         {
+    for (ii = NrSamples; ii != 0; ii--) {
+        /**************************************************************************
+                        PROCESSING OF THE LEFT CHANNEL
+        ***************************************************************************/
+        // ynL=A2  * x(n-2)L
+        ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
 
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            // ynL=A2  * x(n-2)L
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
+        // ynL+=A1  * x(n-1)L
+        ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
 
-            // ynL+=A1  * x(n-1)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
+        // ynL+=A0  * x(n)L
+        ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
 
-            // ynL+=A0  * x(n)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
+        // ynL+= ( -B2  * y(n-2)L
+        ynL += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[6];
 
-            // ynL+= ( -B2  * y(n-2)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[6];
+        // ynL+=( -B1  * y(n-1)L
+        ynL += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[4];
 
-            // ynL+=( -B1  * y(n-1)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[4];
+        /**************************************************************************
+                        PROCESSING OF THE RIGHT CHANNEL
+        ***************************************************************************/
+        // ynR=A2  * x(n-2)R
+        ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
 
-            /**************************************************************************
-                            PROCESSING OF THE RIGHT CHANNEL
-            ***************************************************************************/
-            // ynR=A2  * x(n-2)R
-            ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
+        // ynR+=A1  * x(n-1)R
+        ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
 
-            // ynR+=A1  * x(n-1)R
-            ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
+        // ynR+=A0  * x(n)R
+        ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn + 1));
 
-            // ynR+=A0  * x(n)R
-            ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn+1));
+        // ynR+= ( -B2  * y(n-2)R  )
+        ynR += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[7];
 
-            // ynR+= ( -B2  * y(n-2)R  )
-            ynR += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[7];
+        // ynR+=( -B1  * y(n-1)R  )
+        ynR += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[5];
 
-            // ynR+=( -B1  * y(n-1)R  )
-            ynR += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[5];
+        /**************************************************************************
+                        UPDATING THE DELAYS
+        ***************************************************************************/
+        pBiquadState->pDelays[7] = pBiquadState->pDelays[5];  // y(n-2)R=y(n-1)R
+        pBiquadState->pDelays[6] = pBiquadState->pDelays[4];  // y(n-2)L=y(n-1)L
+        pBiquadState->pDelays[3] = pBiquadState->pDelays[1];  // x(n-2)R=x(n-1)R
+        pBiquadState->pDelays[2] = pBiquadState->pDelays[0];  // x(n-2)L=x(n-1)L
+        pBiquadState->pDelays[5] = ynR;                       // Update y(n-1)R
+        pBiquadState->pDelays[4] = ynL;                       // Update y(n-1)L
+        pBiquadState->pDelays[0] = (*pDataIn++);              // Update x(n-1)L
+        pBiquadState->pDelays[1] = (*pDataIn++);              // Update x(n-1)R
 
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[7] = pBiquadState->pDelays[5];  // y(n-2)R=y(n-1)R
-            pBiquadState->pDelays[6] = pBiquadState->pDelays[4];  // y(n-2)L=y(n-1)L
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[1];  // x(n-2)R=x(n-1)R
-            pBiquadState->pDelays[2] = pBiquadState->pDelays[0];  // x(n-2)L=x(n-1)L
-            pBiquadState->pDelays[5] = ynR;                       // Update y(n-1)R
-            pBiquadState->pDelays[4] = ynL;                       // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++);              // Update x(n-1)L
-            pBiquadState->pDelays[1] = (*pDataIn++);              // Update x(n-1)R
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output
-            *pDataOut++ = (LVM_FLOAT)ynR; // Write Right ouput
-
-        }
-
+        /**************************************************************************
+                        WRITING THE OUTPUT
+        ***************************************************************************/
+        *pDataOut++ = (LVM_FLOAT)ynL;  // Write Left output
+        *pDataOut++ = (LVM_FLOAT)ynR;  // Write Right output
     }
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Init.cpp
index 987cbcf..e0cd934 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Init.cpp
@@ -37,13 +37,12 @@
 /* RETURNS:                                                                */
 /*   void return code                                                      */
 /*-------------------------------------------------------------------------*/
-void BQ_2I_D16F16Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_2I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef)
-{
+void BQ_2I_D16F16Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_2I_Order2_FLOAT_Taps_t* pTaps,
+                                     BQ_FLOAT_Coefs_t* pCoef) {
     LVM_FLOAT temp;
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *) pTaps            ;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+    pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
 
     temp = pCoef->A2;
     pBiquadState->coefs[0] = temp;
@@ -58,4 +57,3 @@
 }
 /*-------------------------------------------------------------------------*/
 /* End Of File: BQ_2I_D16F16Css_TRC_WRA_01_Init.c                              */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Private.h
index 5a9a0e9..94cc794 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Private.h
@@ -20,20 +20,18 @@
 
 /* The internal state variables are implemented in a (for the user)  hidden structure */
 /* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *   pDelays;            /* pointer to the delayed samples (data of 32 bits) */
-  LVM_INT16     coefs[5];           /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+    LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+    LVM_INT16 coefs[5]; /* pointer to the filter coefficients */
+} Filter_State;
 
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
 
-typedef struct _Filter_State_FLOAT
-{
-    LVM_FLOAT *   pDelays;            /* pointer to the delayed samples (data of 32 bits) */
-    LVM_FLOAT     coefs[5];           /* pointer to the filter coefficients */
+typedef struct _Filter_State_FLOAT {
+    LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+    LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
 
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
+} Filter_State_FLOAT;
+typedef Filter_State_FLOAT* PFilter_State_FLOAT;
 
 #endif /* _BQ_2I_D16F16CSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C13_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C13_TRC_WRA_01.cpp
index 331c97f..3b7eb5e 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C13_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C13_TRC_WRA_01.cpp
@@ -36,74 +36,69 @@
  pBiquadState->pDelays[6] is y(n-2)L in Q16 format
  pBiquadState->pDelays[7] is y(n-2)R in Q16 format
 ***************************************************************************/
-void BQ_2I_D16F32C13_TRC_WRA_01 (           Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples)
-    {
-        LVM_FLOAT  ynL,ynR;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void BQ_2I_D16F32C13_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+    LVM_FLOAT ynL, ynR;
+    LVM_INT16 ii;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
 
-        for (ii = NrSamples; ii != 0; ii--)
-        {
+    for (ii = NrSamples; ii != 0; ii--) {
+        /**************************************************************************
+                        PROCESSING OF THE LEFT CHANNEL
+        ***************************************************************************/
+        /* ynL=A2 * x(n-2)L */
+        ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
 
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            /* ynL=A2 * x(n-2)L */
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
+        /* ynL+=A1* x(n-1)L */
+        ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
 
-            /* ynL+=A1* x(n-1)L */
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
+        /* ynL+=A0* x(n)L   */
+        ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
 
-            /* ynL+=A0* x(n)L   */
-            ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
+        /* ynL+=-B2*y(n-2)L */
+        ynL += pBiquadState->pDelays[6] * pBiquadState->coefs[3];
 
-            /* ynL+=-B2*y(n-2)L */
-            ynL += pBiquadState->pDelays[6] * pBiquadState->coefs[3];
+        /* ynL+=-B1*y(n-1)L */
+        ynL += pBiquadState->pDelays[4] * pBiquadState->coefs[4];
 
-            /* ynL+=-B1*y(n-1)L */
-            ynL += pBiquadState->pDelays[4] * pBiquadState->coefs[4];
+        /**************************************************************************
+                        PROCESSING OF THE RIGHT CHANNEL
+        ***************************************************************************/
+        /* ynR=A2 * x(n-2)R */
+        ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
 
-            /**************************************************************************
-                            PROCESSING OF THE RIGHT CHANNEL
-            ***************************************************************************/
-            /* ynR=A2 * x(n-2)R */
-            ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
+        /* ynR+=A1* x(n-1)R */
+        ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
 
-            /* ynR+=A1* x(n-1)R */
-            ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
+        /* ynR+=A0* x(n)R   */
+        ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn + 1));
 
-            /* ynR+=A0* x(n)R   */
-            ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn+1));
+        /* ynR+=-B2 * y(n-2)R */
+        ynR += pBiquadState->pDelays[7] * pBiquadState->coefs[3];
 
-            /* ynR+=-B2 * y(n-2)R */
-            ynR += pBiquadState->pDelays[7] * pBiquadState->coefs[3];
+        /* ynR+=-B1 * y(n-1)R */
+        ynR += pBiquadState->pDelays[5] * pBiquadState->coefs[4];
 
-            /* ynR+=-B1 * y(n-1)R */
-            ynR += pBiquadState->pDelays[5] * pBiquadState->coefs[4];
+        /**************************************************************************
+                        UPDATING THE DELAYS
+        ***************************************************************************/
+        pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
+        pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
+        pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
+        pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
+        pBiquadState->pDelays[5] = ynR;                      /* Update y(n-1)R */
+        pBiquadState->pDelays[4] = ynL;                      /* Update y(n-1)L */
+        pBiquadState->pDelays[0] = (*pDataIn);               /* Update x(n-1)L */
+        pDataIn++;
+        pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R */
+        pDataIn++;
 
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[7] = pBiquadState->pDelays[5];  /* y(n-2)R=y(n-1)R*/
-            pBiquadState->pDelays[6] = pBiquadState->pDelays[4];  /* y(n-2)L=y(n-1)L*/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[1];  /* x(n-2)R=x(n-1)R*/
-            pBiquadState->pDelays[2] = pBiquadState->pDelays[0];  /* x(n-2)L=x(n-1)L*/
-            pBiquadState->pDelays[5] = ynR;                       /* Update y(n-1)R */
-            pBiquadState->pDelays[4] = ynL;                       /* Update y(n-1)L */
-            pBiquadState->pDelays[0] = (*pDataIn);                /* Update x(n-1)L */
-            pDataIn++;
-            pBiquadState->pDelays[1] = (*pDataIn);                /* Update x(n-1)R */
-            pDataIn++;
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut = (LVM_FLOAT)(ynL); /* Write Left output */
-            pDataOut++;
-            *pDataOut = (LVM_FLOAT)(ynR); /* Write Right ouput */
-            pDataOut++;
-        }
+        /**************************************************************************
+                        WRITING THE OUTPUT
+        ***************************************************************************/
+        *pDataOut = (LVM_FLOAT)(ynL); /* Write Left output */
+        pDataOut++;
+        *pDataOut = (LVM_FLOAT)(ynR); /* Write Right output */
+        pDataOut++;
     }
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C14_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C14_TRC_WRA_01.cpp
index 3a396df..8c43430 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C14_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C14_TRC_WRA_01.cpp
@@ -36,75 +36,69 @@
  pBiquadState->pDelays[6] is y(n-2)L in Q16 format
  pBiquadState->pDelays[7] is y(n-2)R in Q16 format
 ***************************************************************************/
-void BQ_2I_D16F32C14_TRC_WRA_01 (           Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples)
-    {
-        LVM_FLOAT  ynL,ynR;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void BQ_2I_D16F32C14_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+    LVM_FLOAT ynL, ynR;
+    LVM_INT16 ii;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
 
-        for (ii = NrSamples; ii != 0; ii--)
-        {
+    for (ii = NrSamples; ii != 0; ii--) {
+        /**************************************************************************
+                        PROCESSING OF THE LEFT CHANNEL
+        ***************************************************************************/
+        /* ynL=A2  * x(n-2)L */
+        ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
 
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            /* ynL=A2  * x(n-2)L */
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
+        /* ynL+=A1  * x(n-1)L */
+        ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
 
-            /* ynL+=A1  * x(n-1)L */
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
+        /* ynL+=A0  * x(n)L */
+        ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
 
-            /* ynL+=A0  * x(n)L */
-            ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
+        /* ynL+= ( (-B2  * y(n-2)L  ))*/
+        ynL += pBiquadState->pDelays[6] * pBiquadState->coefs[3];
 
-            /* ynL+= ( (-B2  * y(n-2)L  ))*/
-            ynL += pBiquadState->pDelays[6] * pBiquadState->coefs[3];
+        /* ynL+=( (-B1  * y(n-1)L  ))  */
+        ynL += pBiquadState->pDelays[4] * pBiquadState->coefs[4];
 
-            /* ynL+=( (-B1  * y(n-1)L  ))  */
-            ynL += pBiquadState->pDelays[4] * pBiquadState->coefs[4];
+        /**************************************************************************
+                        PROCESSING OF THE RIGHT CHANNEL
+        ***************************************************************************/
+        /* ynR=A2  * x(n-2)R */
+        ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
 
-            /**************************************************************************
-                            PROCESSING OF THE RIGHT CHANNEL
-            ***************************************************************************/
-            /* ynR=A2  * x(n-2)R */
-            ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
+        /* ynR+=A1  * x(n-1)R */
+        ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
 
-            /* ynR+=A1  * x(n-1)R */
-            ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
+        /* ynR+=A0  * x(n)R */
+        ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn + 1));
 
-            /* ynR+=A0  * x(n)R */
-            ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn+1));
+        /* ynR+= ( (-B2  * y(n-2)R  ))*/
+        ynR += pBiquadState->pDelays[7] * pBiquadState->coefs[3];
 
-            /* ynR+= ( (-B2  * y(n-2)R  ))*/
-            ynR += pBiquadState->pDelays[7] * pBiquadState->coefs[3];
+        /* ynR+=( (-B1  * y(n-1)R  ))  */
+        ynR += pBiquadState->pDelays[5] * pBiquadState->coefs[4];
 
-            /* ynR+=( (-B1  * y(n-1)R  ))  */
-            ynR += pBiquadState->pDelays[5] * pBiquadState->coefs[4];
+        /**************************************************************************
+                        UPDATING THE DELAYS
+        ***************************************************************************/
+        pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
+        pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
+        pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
+        pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
+        pBiquadState->pDelays[5] = ynR;                      /* Update y(n-1)R */
+        pBiquadState->pDelays[4] = ynL;                      /* Update y(n-1)L */
+        pBiquadState->pDelays[0] = (*pDataIn);               /* Update x(n-1)L */
+        pDataIn++;
+        pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R */
+        pDataIn++;
 
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[7] = pBiquadState->pDelays[5];  /* y(n-2)R=y(n-1)R*/
-            pBiquadState->pDelays[6] = pBiquadState->pDelays[4];  /* y(n-2)L=y(n-1)L*/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[1];  /* x(n-2)R=x(n-1)R*/
-            pBiquadState->pDelays[2] = pBiquadState->pDelays[0];  /* x(n-2)L=x(n-1)L*/
-            pBiquadState->pDelays[5] = ynR;                    /* Update y(n-1)R */
-            pBiquadState->pDelays[4] = ynL;                    /* Update y(n-1)L */
-            pBiquadState->pDelays[0] = (*pDataIn);                /* Update x(n-1)L */
-            pDataIn++;
-            pBiquadState->pDelays[1] = (*pDataIn);                /* Update x(n-1)R */
-            pDataIn++;
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut = (LVM_FLOAT)(ynL); /* Write Left output */
-            pDataOut++;
-            *pDataOut = (LVM_FLOAT)(ynR); /* Write Right ouput */
-            pDataOut++;
-        }
-
+        /**************************************************************************
+                        WRITING THE OUTPUT
+        ***************************************************************************/
+        *pDataOut = (LVM_FLOAT)(ynL); /* Write Left output */
+        pDataOut++;
+        *pDataOut = (LVM_FLOAT)(ynR); /* Write Right output */
+        pDataOut++;
     }
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C15_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C15_TRC_WRA_01.cpp
index 1cbff1a..84fbadf 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C15_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C15_TRC_WRA_01.cpp
@@ -36,75 +36,69 @@
  pBiquadState->pDelays[6] is y(n-2)L in Q16 format
  pBiquadState->pDelays[7] is y(n-2)R in Q16 format
 ***************************************************************************/
-void BQ_2I_D16F32C15_TRC_WRA_01 (           Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples)
-    {
-        LVM_FLOAT  ynL,ynR;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void BQ_2I_D16F32C15_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+    LVM_FLOAT ynL, ynR;
+    LVM_INT16 ii;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
 
-         for (ii = NrSamples; ii != 0; ii--)
-         {
+    for (ii = NrSamples; ii != 0; ii--) {
+        /**************************************************************************
+                        PROCESSING OF THE LEFT CHANNEL
+        ***************************************************************************/
+        /* ynL=A2  * x(n-2)L */
+        ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
 
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            /* ynL=A2  * x(n-2)L */
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
+        /* ynL+=A1  * x(n-1)L */
+        ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
 
-            /* ynL+=A1  * x(n-1)L */
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
+        /* ynL+=A0  * x(n)L */
+        ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
 
-            /* ynL+=A0  * x(n)L */
-            ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
+        /* ynL+= ( (-B2  * y(n-2)L )  */
+        ynL += pBiquadState->pDelays[6] * pBiquadState->coefs[3];
 
-            /* ynL+= ( (-B2  * y(n-2)L )  */
-            ynL += pBiquadState->pDelays[6] * pBiquadState->coefs[3];
+        /* ynL+=( (-B1  * y(n-1)L  ))  */
+        ynL += pBiquadState->pDelays[4] * pBiquadState->coefs[4];
 
-            /* ynL+=( (-B1  * y(n-1)L  ))  */
-            ynL += pBiquadState->pDelays[4] * pBiquadState->coefs[4];
+        /**************************************************************************
+                        PROCESSING OF THE RIGHT CHANNEL
+        ***************************************************************************/
+        /* ynR=A2  * x(n-2)R */
+        ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
 
-            /**************************************************************************
-                            PROCESSING OF THE RIGHT CHANNEL
-            ***************************************************************************/
-            /* ynR=A2  * x(n-2)R */
-            ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
+        /* ynR+=A1  * x(n-1)R */
+        ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
 
-            /* ynR+=A1  * x(n-1)R */
-            ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
+        /* ynR+=A0  * x(n)R */
+        ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn + 1));
 
-            /* ynR+=A0  * x(n)R */
-            ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn+1));
+        /* ynR+= ( (-B2  * y(n-2)R ) */
+        ynR += pBiquadState->pDelays[7] * pBiquadState->coefs[3];
 
-            /* ynR+= ( (-B2  * y(n-2)R ) */
-            ynR += pBiquadState->pDelays[7] * pBiquadState->coefs[3];
+        /* ynR+=( (-B1  * y(n-1)R  )) in Q15 */
+        ynR += pBiquadState->pDelays[5] * pBiquadState->coefs[4];
 
-            /* ynR+=( (-B1  * y(n-1)R  )) in Q15 */
-            ynR += pBiquadState->pDelays[5] * pBiquadState->coefs[4];
+        /**************************************************************************
+                        UPDATING THE DELAYS
+        ***************************************************************************/
+        pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
+        pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
+        pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
+        pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
+        pBiquadState->pDelays[5] = ynR;                      /* Update y(n-1)R*/
+        pBiquadState->pDelays[4] = ynL;                      /* Update y(n-1)L*/
+        pBiquadState->pDelays[0] = (*pDataIn);               /* Update x(n-1)L*/
+        pDataIn++;
+        pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R*/
+        pDataIn++;
 
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
-            pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
-            pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
-            pBiquadState->pDelays[5] = ynR; /* Update y(n-1)R*/
-            pBiquadState->pDelays[4] = ynL; /* Update y(n-1)L*/
-            pBiquadState->pDelays[0] = (*pDataIn); /* Update x(n-1)L*/
-            pDataIn++;
-            pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R*/
-            pDataIn++;
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut = (LVM_FLOAT)(ynL); /* Write Left output*/
-            pDataOut++;
-            *pDataOut = (LVM_FLOAT)(ynR); /* Write Right ouput*/
-            pDataOut++;
-        }
-
+        /**************************************************************************
+                        WRITING THE OUTPUT
+        ***************************************************************************/
+        *pDataOut = (LVM_FLOAT)(ynL); /* Write Left output*/
+        pDataOut++;
+        *pDataOut = (LVM_FLOAT)(ynR); /* Write Right output*/
+        pDataOut++;
     }
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_Private.h
index 314388a..1cc7618 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_Private.h
@@ -20,20 +20,18 @@
 
 /* The internal state variables are implemented in a (for the user)  hidden structure */
 /* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *                          pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT16                           coefs[5];         /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+    LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_INT16 coefs[5]; /* pointer to the filter coefficients */
+} Filter_State;
 
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
 
-typedef struct _Filter_State_FLOAT
-{
-    LVM_FLOAT *                          pDelays;        /* pointer to the delayed samples \
-                                                           (data of 32 bits)   */
-    LVM_FLOAT                           coefs[5];        /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
+typedef struct _Filter_State_FLOAT {
+    LVM_FLOAT* pDelays; /* pointer to the delayed samples \
+                          (data of 32 bits)   */
+    LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
+} Filter_State_FLOAT;
+typedef Filter_State_FLOAT* PFilter_State_FLOAT;
 
 #endif /* _BQ_2I_D16F32CSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_init.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_init.cpp
index 058541a..6817d9f 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_init.cpp
@@ -36,13 +36,12 @@
 /* RETURNS:                                                                */
 /*   void return code                                                      */
 /*-------------------------------------------------------------------------*/
-void BQ_2I_D16F32Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_2I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef)
-{
+void BQ_2I_D16F32Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_2I_Order2_FLOAT_Taps_t* pTaps,
+                                     BQ_FLOAT_Coefs_t* pCoef) {
     LVM_FLOAT temp;
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *) pTaps;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+    pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
     temp = pCoef->A2;
     pBiquadState->coefs[0] = temp;
     temp = pCoef->A1;
@@ -56,4 +55,3 @@
 }
 /*-------------------------------------------------------------------------*/
 /* End Of File: BQ_2I_D16F32Css_TRC_WRA_01_Init                              */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32C30_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32C30_TRC_WRA_01.cpp
index 78d1ba1..4eeaaa8 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32C30_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32C30_TRC_WRA_01.cpp
@@ -36,91 +36,84 @@
  pBiquadState->pDelays[6] is y(n-2)L in Q0 format
  pBiquadState->pDelays[7] is y(n-2)R in Q0 format
 ***************************************************************************/
-void BQ_2I_D32F32C30_TRC_WRA_01 (           Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples)
+void BQ_2I_D32F32C30_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples)
 
-    {
-        LVM_FLOAT ynL,ynR,templ,tempd;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+{
+    LVM_FLOAT ynL, ynR, templ, tempd;
+    LVM_INT16 ii;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
 
-         for (ii = NrSamples; ii != 0; ii--)
-         {
+    for (ii = NrSamples; ii != 0; ii--) {
+        /**************************************************************************
+                        PROCESSING OF THE LEFT CHANNEL
+        ***************************************************************************/
+        /* ynL= ( A2  * x(n-2)L  ) */
+        ynL = pBiquadState->coefs[0] * pBiquadState->pDelays[2];
 
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            /* ynL= ( A2  * x(n-2)L  ) */
-            ynL = pBiquadState->coefs[0] * pBiquadState->pDelays[2];
+        /* ynL+= ( A1  * x(n-1)L  )*/
+        templ = pBiquadState->coefs[1] * pBiquadState->pDelays[0];
+        ynL += templ;
 
-            /* ynL+= ( A1  * x(n-1)L  )*/
-            templ = pBiquadState->coefs[1] * pBiquadState->pDelays[0];
-            ynL += templ;
+        /* ynL+= ( A0  * x(n)L  ) */
+        templ = pBiquadState->coefs[2] * (*pDataIn);
+        ynL += templ;
 
-            /* ynL+= ( A0  * x(n)L  ) */
-            templ = pBiquadState->coefs[2] * (*pDataIn);
-            ynL += templ;
+        /* ynL+= (-B2  * y(n-2)L  ) */
+        templ = pBiquadState->coefs[3] * pBiquadState->pDelays[6];
+        ynL += templ;
 
-             /* ynL+= (-B2  * y(n-2)L  ) */
-            templ = pBiquadState->coefs[3] * pBiquadState->pDelays[6];
-            ynL += templ;
+        /* ynL+= (-B1  * y(n-1)L  )*/
+        templ = pBiquadState->coefs[4] * pBiquadState->pDelays[4];
+        ynL += templ;
 
-            /* ynL+= (-B1  * y(n-1)L  )*/
-            templ = pBiquadState->coefs[4] * pBiquadState->pDelays[4];
-            ynL += templ;
+        /**************************************************************************
+                        PROCESSING OF THE RIGHT CHANNEL
+        ***************************************************************************/
+        /* ynR= ( A2  * x(n-2)R  ) */
+        ynR = pBiquadState->coefs[0] * pBiquadState->pDelays[3];
 
-            /**************************************************************************
-                            PROCESSING OF THE RIGHT CHANNEL
-            ***************************************************************************/
-            /* ynR= ( A2  * x(n-2)R  ) */
-            ynR = pBiquadState->coefs[0] * pBiquadState->pDelays[3];
+        /* ynR+= ( A1  * x(n-1)R  ) */
+        templ = pBiquadState->coefs[1] * pBiquadState->pDelays[1];
+        ynR += templ;
 
-            /* ynR+= ( A1  * x(n-1)R  ) */
-            templ = pBiquadState->coefs[1] * pBiquadState->pDelays[1];
-            ynR += templ;
+        /* ynR+= ( A0  * x(n)R  ) */
+        tempd = *(pDataIn + 1);
+        templ = pBiquadState->coefs[2] * tempd;
+        ynR += templ;
 
-            /* ynR+= ( A0  * x(n)R  ) */
-            tempd =* (pDataIn+1);
-            templ = pBiquadState->coefs[2] * tempd;
-            ynR += templ;
+        /* ynR+= (-B2  * y(n-2)R  ) */
+        templ = pBiquadState->coefs[3] * pBiquadState->pDelays[7];
+        ynR += templ;
 
-            /* ynR+= (-B2  * y(n-2)R  ) */
-            templ = pBiquadState->coefs[3] * pBiquadState->pDelays[7];
-            ynR += templ;
+        /* ynR+= (-B1  * y(n-1)R  )  */
+        templ = pBiquadState->coefs[4] * pBiquadState->pDelays[5];
+        ynR += templ;
 
-            /* ynR+= (-B1  * y(n-1)R  )  */
-            templ = pBiquadState->coefs[4] * pBiquadState->pDelays[5];
-            ynR += templ;
+        /**************************************************************************
+                        UPDATING THE DELAYS
+        ***************************************************************************/
+        pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
+        pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
+        pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
+        pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
+        pBiquadState->pDelays[5] = (LVM_FLOAT)ynR;           /* Update y(n-1)R */
+        pBiquadState->pDelays[4] = (LVM_FLOAT)ynL;           /* Update y(n-1)L */
+        pBiquadState->pDelays[0] = (*pDataIn);               /* Update x(n-1)L */
+        pDataIn++;
+        pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R */
+        pDataIn++;
 
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
-            pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
-            pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
-            pBiquadState->pDelays[5] = (LVM_FLOAT)ynR; /* Update y(n-1)R */
-            pBiquadState->pDelays[4] = (LVM_FLOAT)ynL; /* Update y(n-1)L */
-            pBiquadState->pDelays[0] = (*pDataIn); /* Update x(n-1)L */
-            pDataIn++;
-            pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R */
-            pDataIn++;
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut = (LVM_FLOAT)ynL; /* Write Left output */
-            pDataOut++;
-            *pDataOut = (LVM_FLOAT)ynR; /* Write Right ouput */
-            pDataOut++;
-
-        }
-
+        /**************************************************************************
+                        WRITING THE OUTPUT
+        ***************************************************************************/
+        *pDataOut = (LVM_FLOAT)ynL; /* Write Left output */
+        pDataOut++;
+        *pDataOut = (LVM_FLOAT)ynR; /* Write Right output */
+        pDataOut++;
     }
+}
 
-#ifdef SUPPORT_MC
 /**************************************************************************
  ASSUMPTIONS:
  COEFS-
@@ -141,61 +134,53 @@
  pBiquadState->pDelays[3*NrChannels] to
  pBiquadState->pDelays[4*NrChannels - 1] is y(n-2) for all NrChannels
 ***************************************************************************/
-void BQ_MC_D32F32C30_TRC_WRA_01 (           Biquad_FLOAT_Instance_t      *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrFrames,
-                                            LVM_INT16                    NrChannels)
+void BQ_MC_D32F32C30_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrFrames, LVM_INT16 NrChannels)
 
-    {
-        LVM_FLOAT yn, temp;
-        LVM_INT16 ii, jj;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+{
+    LVM_FLOAT yn, temp;
+    LVM_INT16 ii, jj;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
 
-         for (ii = NrFrames; ii != 0; ii--)
-         {
+    for (ii = NrFrames; ii != 0; ii--) {
+        /**************************************************************************
+                        PROCESSING CHANNEL-WISE
+        ***************************************************************************/
+        for (jj = 0; jj < NrChannels; jj++) {
+            /* yn= (A2  * x(n-2)) */
+            yn = pBiquadState->coefs[0] * pBiquadState->pDelays[NrChannels + jj];
+
+            /* yn+= (A1  * x(n-1)) */
+            temp = pBiquadState->coefs[1] * pBiquadState->pDelays[jj];
+            yn += temp;
+
+            /* yn+= (A0  * x(n)) */
+            temp = pBiquadState->coefs[2] * (*pDataIn);
+            yn += temp;
+
+            /* yn+= (-B2  * y(n-2)) */
+            temp = pBiquadState->coefs[3] * pBiquadState->pDelays[NrChannels * 3 + jj];
+            yn += temp;
+
+            /* yn+= (-B1  * y(n-1)) */
+            temp = pBiquadState->coefs[4] * pBiquadState->pDelays[NrChannels * 2 + jj];
+            yn += temp;
+
             /**************************************************************************
-                            PROCESSING CHANNEL-WISE
+                            UPDATING THE DELAYS
             ***************************************************************************/
-            for (jj = 0; jj < NrChannels; jj++)
-            {
-                /* yn= (A2  * x(n-2)) */
-                yn = pBiquadState->coefs[0] * pBiquadState->pDelays[NrChannels + jj];
-
-                /* yn+= (A1  * x(n-1)) */
-                temp = pBiquadState->coefs[1] * pBiquadState->pDelays[jj];
-                yn += temp;
-
-                /* yn+= (A0  * x(n)) */
-                temp = pBiquadState->coefs[2] * (*pDataIn);
-                yn += temp;
-
-                 /* yn+= (-B2  * y(n-2)) */
-                temp = pBiquadState->coefs[3] * pBiquadState->pDelays[NrChannels*3 + jj];
-                yn += temp;
-
-                /* yn+= (-B1  * y(n-1)) */
-                temp = pBiquadState->coefs[4] * pBiquadState->pDelays[NrChannels*2 + jj];
-                yn += temp;
-
-                /**************************************************************************
-                                UPDATING THE DELAYS
-                ***************************************************************************/
-                pBiquadState->pDelays[NrChannels * 3 + jj] =
+            pBiquadState->pDelays[NrChannels * 3 + jj] =
                     pBiquadState->pDelays[NrChannels * 2 + jj]; /* y(n-2)=y(n-1)*/
-                pBiquadState->pDelays[NrChannels * 1 + jj] =
-                    pBiquadState->pDelays[jj]; /* x(n-2)=x(n-1)*/
-                pBiquadState->pDelays[NrChannels * 2 + jj] = (LVM_FLOAT)yn; /* Update y(n-1)*/
-                pBiquadState->pDelays[jj] = (*pDataIn); /* Update x(n-1)*/
-                pDataIn++;
-                /**************************************************************************
-                                WRITING THE OUTPUT
-                ***************************************************************************/
-                *pDataOut = (LVM_FLOAT)yn; /* Write jj Channel output */
-                pDataOut++;
-            }
+            pBiquadState->pDelays[NrChannels * 1 + jj] =
+                    pBiquadState->pDelays[jj];                          /* x(n-2)=x(n-1)*/
+            pBiquadState->pDelays[NrChannels * 2 + jj] = (LVM_FLOAT)yn; /* Update y(n-1)*/
+            pBiquadState->pDelays[jj] = (*pDataIn);                     /* Update x(n-1)*/
+            pDataIn++;
+            /**************************************************************************
+                            WRITING THE OUTPUT
+            ***************************************************************************/
+            *pDataOut = (LVM_FLOAT)yn; /* Write jj Channel output */
+            pDataOut++;
         }
-
     }
-#endif /*SUPPORT_MC*/
-
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Init.cpp
index 492a9e0..1e27391 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Init.cpp
@@ -37,13 +37,12 @@
 /* RETURNS:                                                                */
 /*   void return code                                                      */
 /*-------------------------------------------------------------------------*/
-void BQ_2I_D32F32Cll_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_2I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef)
-{
+void BQ_2I_D32F32Cll_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_2I_Order2_FLOAT_Taps_t* pTaps,
+                                     BQ_FLOAT_Coefs_t* pCoef) {
     LVM_FLOAT temp;
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *) pTaps;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+    pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
     temp = pCoef->A2;
     pBiquadState->coefs[0] = temp;
     temp = pCoef->A1;
@@ -57,4 +56,3 @@
 }
 /*-------------------------------------------------------------------------*/
 /* End Of File: BQ_2I_D32F32C32_TRC_WRA_01_Init.c                              */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Private.h
index 7eb6474..4a2149d 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Private.h
@@ -20,20 +20,18 @@
 
 /* The internal state variables are implemented in a (for the user)  hidden structure */
 /* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *                          pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT32                            coefs[5];       /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+    LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_INT32 coefs[5]; /* pointer to the filter coefficients */
+} Filter_State;
 
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
 
-typedef struct _Filter_State_FLOAT
-{
-    LVM_FLOAT *                          pDelays;        /* pointer to the delayed samples \
-                                                            (data of 32 bits)   */
-    LVM_FLOAT                            coefs[5];       /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
+typedef struct _Filter_State_FLOAT {
+    LVM_FLOAT* pDelays; /* pointer to the delayed samples \
+                           (data of 32 bits)   */
+    LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
+} Filter_State_FLOAT;
+typedef Filter_State_FLOAT* PFilter_State_FLOAT;
 
 #endif /* _BQ_2I_D32F32CLL_TRC_WRA_01_PRIVATE_H_*/
diff --git a/media/libeffects/lvm/lib/Common/src/CompLim_private.h b/media/libeffects/lvm/lib/Common/src/CompLim_private.h
index 06a21c3..9c7a96b 100644
--- a/media/libeffects/lvm/lib/Common/src/CompLim_private.h
+++ b/media/libeffects/lvm/lib/Common/src/CompLim_private.h
@@ -28,17 +28,16 @@
    DEFINITIONS
 ***********************************************************************************/
 
-#define FS_48K      48000
+#define FS_48K 48000
 
-#define INTEGER_16  0xFFFF /*   65535*/
-#define INTEGER_15  0x7FFF /*   32767*/
+#define INTEGER_16 0xFFFF /*   65535*/
+#define INTEGER_15 0x7FFF /*   32767*/
 
-#define GAIN_6DB    1
-#define GAIN_12DB   2
-#define GAIN_18DB   3
-#define GAIN_24DB   4
+#define GAIN_6DB 1
+#define GAIN_12DB 2
+#define GAIN_18DB 3
+#define GAIN_24DB 4
 
 #endif /* #ifndef _COMP_LIM_PRIVATE_ */
 
 /*** End of file ******************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/Copy_16.cpp b/media/libeffects/lvm/lib/Common/src/Copy_16.cpp
index 3a50554..8887890 100644
--- a/media/libeffects/lvm/lib/Common/src/Copy_16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Copy_16.cpp
@@ -25,27 +25,19 @@
    FUNCTION COPY_16
 ***********************************************************************************/
 
-void Copy_16( const LVM_INT16 *src,
-                    LVM_INT16 *dst,
-                    LVM_INT16  n )
-{
+void Copy_16(const LVM_INT16* src, LVM_INT16* dst, LVM_INT16 n) {
     LVM_INT16 ii;
 
-    if (src > dst)
-    {
-        for (ii = n; ii != 0; ii--)
-        {
+    if (src > dst) {
+        for (ii = n; ii != 0; ii--) {
             *dst = *src;
             dst++;
             src++;
         }
-    }
-    else
-    {
+    } else {
         src += n - 1;
         dst += n - 1;
-        for (ii = n; ii != 0; ii--)
-        {
+        for (ii = n; ii != 0; ii--) {
             *dst = *src;
             dst--;
             src--;
@@ -54,27 +46,19 @@
 
     return;
 }
-void Copy_Float( const LVM_FLOAT *src,
-                 LVM_FLOAT *dst,
-                 LVM_INT16  n )
-{
+void Copy_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n) {
     LVM_INT16 ii;
 
-    if (src > dst)
-    {
-        for (ii = n; ii != 0; ii--)
-        {
+    if (src > dst) {
+        for (ii = n; ii != 0; ii--) {
             *dst = *src;
             dst++;
             src++;
         }
-    }
-    else
-    {
+    } else {
         src += n - 1;
         dst += n - 1;
-        for (ii = n; ii != 0; ii--)
-        {
+        for (ii = n; ii != 0; ii--) {
             *dst = *src;
             dst--;
             src--;
@@ -83,46 +67,35 @@
 
     return;
 }
-#ifdef SUPPORT_MC
 // Extract out the stereo channel pair from multichannel source.
-void Copy_Float_Mc_Stereo(const LVM_FLOAT *src,
-                 LVM_FLOAT *dst,
-                 LVM_INT16 NrFrames, /* Number of frames */
-                 LVM_INT32 NrChannels)
-{
+void Copy_Float_Mc_Stereo(const LVM_FLOAT* src, LVM_FLOAT* dst,
+                          LVM_INT16 NrFrames, /* Number of frames */
+                          LVM_INT32 NrChannels) {
     LVM_INT16 ii;
 
-    if (NrChannels >= 2)
-    {
-        for (ii = NrFrames; ii != 0; ii--)
-        {
+    if (NrChannels >= 2) {
+        for (ii = NrFrames; ii != 0; ii--) {
             dst[0] = src[0];
             dst[1] = src[1];
             dst += 2;
             src += NrChannels;
         }
-    }
-    else if (NrChannels == 1)
-    {   // not expected to occur, provided for completeness.
+    } else if (NrChannels == 1) {  // not expected to occur, provided for completeness.
         src += (NrFrames - 1);
         dst += 2 * (NrFrames - 1);
-        for (ii = NrFrames; ii != 0; ii--)
-        {
+        for (ii = NrFrames; ii != 0; ii--) {
             dst[0] = src[0];
             dst[1] = src[0];
             dst -= 2;
-            src --;
+            src--;
         }
     }
 }
 
 // Merge a multichannel source with stereo contained in StereoOut, to dst.
-void Copy_Float_Stereo_Mc(const LVM_FLOAT *src,
-                 LVM_FLOAT *StereoOut,
-                 LVM_FLOAT *dst,
-                 LVM_INT16 NrFrames, /* Number of frames*/
-                 LVM_INT32 NrChannels)
-{
+void Copy_Float_Stereo_Mc(const LVM_FLOAT* src, LVM_FLOAT* StereoOut, LVM_FLOAT* dst,
+                          LVM_INT16 NrFrames, /* Number of frames*/
+                          LVM_INT32 NrChannels) {
     LVM_INT16 ii, jj;
 
     // pack dst with stereo information of StereoOut
@@ -130,18 +103,15 @@
     StereoOut += 2 * (NrFrames - 1);
     dst += NrChannels * (NrFrames - 1);
     src += NrChannels * (NrFrames - 1);
-    for (ii = NrFrames; ii != 0; ii--)
-    {
+    for (ii = NrFrames; ii != 0; ii--) {
         dst[1] = StereoOut[1];
-        dst[0] = StereoOut[0]; // copy 1 before 0 is required for NrChannels == 3.
-        for (jj = 2; jj < NrChannels; jj++)
-        {
+        dst[0] = StereoOut[0];  // copy 1 before 0 is required for NrChannels == 3.
+        for (jj = 2; jj < NrChannels; jj++) {
             dst[jj] = src[jj];
         }
-        dst    -= NrChannels;
-        src    -= NrChannels;
+        dst -= NrChannels;
+        src -= NrChannels;
         StereoOut -= 2;
     }
 }
-#endif
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/Core_MixHard_2St_D32C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/Core_MixHard_2St_D32C31_SAT.cpp
index 5e77335..2c2061a 100644
--- a/media/libeffects/lvm/lib/Common/src/Core_MixHard_2St_D32C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Core_MixHard_2St_D32C31_SAT.cpp
@@ -25,13 +25,9 @@
 /**********************************************************************************
    FUNCTION CORE_MIXHARD_2ST_D32C31_SAT
 ***********************************************************************************/
-void Core_MixHard_2St_D32C31_SAT(   Mix_2St_Cll_FLOAT_t       *pInstance,
-                                    const LVM_FLOAT     *src1,
-                                    const LVM_FLOAT     *src2,
-                                    LVM_FLOAT     *dst,
-                                    LVM_INT16     n)
-{
-    LVM_FLOAT  Temp1,Temp2,Temp3;
+void Core_MixHard_2St_D32C31_SAT(Mix_2St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src1,
+                                 const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n) {
+    LVM_FLOAT Temp1, Temp2, Temp3;
     LVM_INT16 ii;
     LVM_FLOAT Current1Short;
     LVM_FLOAT Current2Short;
@@ -39,7 +35,7 @@
     Current1Short = (pInstance->Current1);
     Current2Short = (pInstance->Current2);
 
-    for (ii = n; ii != 0; ii--){
+    for (ii = n; ii != 0; ii--) {
         Temp1 = *src1++;
         Temp3 = Temp1 * Current1Short;
         Temp2 = *src2++;
@@ -47,11 +43,11 @@
         Temp2 = (Temp1 / 2.0f) + (Temp3 / 2.0f);
         if (Temp2 > 0.5f)
             Temp2 = 1.0f;
-        else if (Temp2 < -0.5f )
+        else if (Temp2 < -0.5f)
             Temp2 = -1.0f;
         else
             Temp2 = (Temp2 * 2);
-            *dst++ = Temp2;
+        *dst++ = Temp2;
     }
 }
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/Core_MixInSoft_D32C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/Core_MixInSoft_D32C31_SAT.cpp
index 8f5c0ae..be9e49b 100644
--- a/media/libeffects/lvm/lib/Common/src/Core_MixInSoft_D32C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Core_MixInSoft_D32C31_SAT.cpp
@@ -26,53 +26,48 @@
    FUNCTION CORE_MIXSOFT_1ST_D32C31_WRA
 ***********************************************************************************/
 
-void Core_MixInSoft_D32C31_SAT(     Mix_1St_Cll_FLOAT_t       *pInstance,
-                                    const LVM_FLOAT     *src,
-                                          LVM_FLOAT     *dst,
-                                          LVM_INT16     n)
-{
-    LVM_FLOAT    Temp1,Temp2,Temp3;
-    LVM_INT16     OutLoop;
-    LVM_INT16     InLoop;
-    LVM_FLOAT    TargetTimesOneMinAlpha;
-    LVM_FLOAT    CurrentTimesAlpha;
-    LVM_INT16     ii,jj;
+void Core_MixInSoft_D32C31_SAT(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+                               LVM_INT16 n) {
+    LVM_FLOAT Temp1, Temp2, Temp3;
+    LVM_INT16 OutLoop;
+    LVM_INT16 InLoop;
+    LVM_FLOAT TargetTimesOneMinAlpha;
+    LVM_FLOAT CurrentTimesAlpha;
+    LVM_INT16 ii, jj;
 
     InLoop = (LVM_INT16)(n >> 2); /* Process per 4 samples */
     OutLoop = (LVM_INT16)(n - (InLoop << 2));
 
-    TargetTimesOneMinAlpha = ((1.0f -pInstance->Alpha) * pInstance->Target);
-    if (pInstance->Target >= pInstance->Current){
-        TargetTimesOneMinAlpha +=(LVM_FLOAT)(2.0f / 2147483647.0f); /* Ceil*/
+    TargetTimesOneMinAlpha = ((1.0f - pInstance->Alpha) * pInstance->Target);
+    if (pInstance->Target >= pInstance->Current) {
+        TargetTimesOneMinAlpha += (LVM_FLOAT)(2.0f / 2147483647.0f); /* Ceil*/
     }
 
-    if (OutLoop){
-
+    if (OutLoop) {
         CurrentTimesAlpha = pInstance->Current * pInstance->Alpha;
         pInstance->Current = TargetTimesOneMinAlpha + CurrentTimesAlpha;
 
-        for (ii = OutLoop; ii != 0; ii--){
-        Temp1 = *src++;
-        Temp2 = *dst;
+        for (ii = OutLoop; ii != 0; ii--) {
+            Temp1 = *src++;
+            Temp2 = *dst;
 
-        Temp3 = Temp1 * (pInstance->Current);
-        Temp1 = Temp2 + Temp3;
+            Temp3 = Temp1 * (pInstance->Current);
+            Temp1 = Temp2 + Temp3;
 
-        if (Temp1 > 1.0f)
-            Temp1 = 1.0f;
-        else if (Temp1 < -1.0f)
-            Temp1 = -1.0f;
+            if (Temp1 > 1.0f)
+                Temp1 = 1.0f;
+            else if (Temp1 < -1.0f)
+                Temp1 = -1.0f;
 
-        *dst++ = Temp1;
+            *dst++ = Temp1;
         }
     }
 
-    for (ii = InLoop; ii != 0; ii--){
-
+    for (ii = InLoop; ii != 0; ii--) {
         CurrentTimesAlpha = pInstance->Current * pInstance->Alpha;
         pInstance->Current = TargetTimesOneMinAlpha + CurrentTimesAlpha;
 
-        for (jj = 4; jj!=0 ; jj--){
+        for (jj = 4; jj != 0; jj--) {
             Temp1 = *src++;
             Temp2 = *dst;
 
diff --git a/media/libeffects/lvm/lib/Common/src/Core_MixSoft_1St_D32C31_WRA.cpp b/media/libeffects/lvm/lib/Common/src/Core_MixSoft_1St_D32C31_WRA.cpp
index 6ff7853..61a4752 100644
--- a/media/libeffects/lvm/lib/Common/src/Core_MixSoft_1St_D32C31_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Core_MixSoft_1St_D32C31_WRA.cpp
@@ -25,12 +25,9 @@
 /**********************************************************************************
    FUNCTION CORE_MIXSOFT_1ST_D32C31_WRA
 ***********************************************************************************/
-void Core_MixSoft_1St_D32C31_WRA(   Mix_1St_Cll_FLOAT_t       *pInstance,
-                                    const LVM_FLOAT     *src,
-                                    LVM_FLOAT     *dst,
-                                    LVM_INT16     n)
-{
-    LVM_FLOAT Temp1,Temp2;
+void Core_MixSoft_1St_D32C31_WRA(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src,
+                                 LVM_FLOAT* dst, LVM_INT16 n) {
+    LVM_FLOAT Temp1, Temp2;
     LVM_INT16 OutLoop;
     LVM_INT16 InLoop;
     LVM_FLOAT TargetTimesOneMinAlpha;
@@ -41,19 +38,17 @@
     InLoop = (LVM_INT16)(n >> 2); /* Process per 4 samples */
     OutLoop = (LVM_INT16)(n - (InLoop << 2));
 
-    TargetTimesOneMinAlpha = (1.0f - pInstance->Alpha) * pInstance->Target; /* float * float in float */
-    if (pInstance->Target >= pInstance->Current)
-    {
+    TargetTimesOneMinAlpha =
+            (1.0f - pInstance->Alpha) * pInstance->Target; /* float * float in float */
+    if (pInstance->Target >= pInstance->Current) {
         TargetTimesOneMinAlpha += (LVM_FLOAT)(2.0f / 2147483647.0f); /* Ceil*/
     }
 
-    if (OutLoop != 0)
-    {
+    if (OutLoop != 0) {
         CurrentTimesAlpha = (pInstance->Current * pInstance->Alpha);
         pInstance->Current = TargetTimesOneMinAlpha + CurrentTimesAlpha;
 
-        for (ii = OutLoop; ii != 0; ii--)
-        {
+        for (ii = OutLoop; ii != 0; ii--) {
             Temp1 = *src;
             src++;
 
@@ -63,37 +58,36 @@
         }
     }
 
-    for (ii = InLoop; ii != 0; ii--)
-    {
+    for (ii = InLoop; ii != 0; ii--) {
         CurrentTimesAlpha = pInstance->Current * pInstance->Alpha;
         pInstance->Current = TargetTimesOneMinAlpha + CurrentTimesAlpha;
 
-            Temp1 = *src;
-            src++;
+        Temp1 = *src;
+        src++;
 
-            Temp2 = Temp1 * (pInstance->Current);
-            *dst = Temp2;
-            dst++;
+        Temp2 = Temp1 * (pInstance->Current);
+        *dst = Temp2;
+        dst++;
 
-            Temp1 = *src;
-            src++;
+        Temp1 = *src;
+        src++;
 
-            Temp2 = Temp1 * (pInstance->Current);
-            *dst = Temp2;
-            dst++;
+        Temp2 = Temp1 * (pInstance->Current);
+        *dst = Temp2;
+        dst++;
 
-            Temp1 = *src;
-            src++;
+        Temp1 = *src;
+        src++;
 
-            Temp2 = Temp1 * (pInstance->Current);
-            *dst = Temp2;
-            dst++;
+        Temp2 = Temp1 * (pInstance->Current);
+        *dst = Temp2;
+        dst++;
 
-            Temp1 = *src;
-            src++;
-            Temp2 = Temp1 * (pInstance->Current);
-            *dst = Temp2;
-            dst++;
+        Temp1 = *src;
+        src++;
+        Temp2 = Temp1 * (pInstance->Current);
+        *dst = Temp2;
+        dst++;
     }
 }
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01.cpp
index a7ce4d3..2861be6 100644
--- a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01.cpp
@@ -18,50 +18,47 @@
 #include "BIQUAD.h"
 #include "DC_2I_D16_TRC_WRA_01_Private.h"
 #include "LVM_Macros.h"
-void DC_2I_D16_TRC_WRA_01( Biquad_FLOAT_Instance_t       *pInstance,
-                           LVM_FLOAT               *pDataIn,
-                           LVM_FLOAT               *pDataOut,
-                           LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT LeftDC,RightDC;
-        LVM_FLOAT Diff;
-        LVM_INT32 j;
-        PFilter_FLOAT_State pBiquadState = (PFilter_FLOAT_State) pInstance;
+void DC_2I_D16_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                          LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+    LVM_FLOAT LeftDC, RightDC;
+    LVM_FLOAT Diff;
+    LVM_INT32 j;
+    PFilter_FLOAT_State pBiquadState = (PFilter_FLOAT_State)pInstance;
 
-        LeftDC = pBiquadState->LeftDC;
-        RightDC = pBiquadState->RightDC;
-        for(j = NrSamples-1; j >= 0; j--)
-        {
-            /* Subtract DC and saturate */
-            Diff =* (pDataIn++) - (LeftDC);
-            if (Diff > 1.0f) {
-                Diff = 1.0f; }
-            else if (Diff < -1.0f) {
-                Diff = -1.0f; }
-            *(pDataOut++) = (LVM_FLOAT)Diff;
-            if (Diff < 0) {
-                LeftDC -= DC_FLOAT_STEP; }
-            else {
-                LeftDC += DC_FLOAT_STEP; }
-
-            /* Subtract DC an saturate */
-            Diff =* (pDataIn++) - (RightDC);
-            if (Diff > 1.0f) {
-                Diff = 1.0f; }
-            else if (Diff < -1.0f) {
-                Diff = -1.0f; }
-            *(pDataOut++) = (LVM_FLOAT)Diff;
-            if (Diff < 0) {
-                RightDC -= DC_FLOAT_STEP; }
-            else {
-                RightDC += DC_FLOAT_STEP; }
-
+    LeftDC = pBiquadState->LeftDC;
+    RightDC = pBiquadState->RightDC;
+    for (j = NrSamples - 1; j >= 0; j--) {
+        /* Subtract DC and saturate */
+        Diff = *(pDataIn++) - (LeftDC);
+        if (Diff > 1.0f) {
+            Diff = 1.0f;
+        } else if (Diff < -1.0f) {
+            Diff = -1.0f;
         }
-        pBiquadState->LeftDC = LeftDC;
-        pBiquadState->RightDC = RightDC;
+        *(pDataOut++) = (LVM_FLOAT)Diff;
+        if (Diff < 0) {
+            LeftDC -= DC_FLOAT_STEP;
+        } else {
+            LeftDC += DC_FLOAT_STEP;
+        }
 
+        /* Subtract DC an saturate */
+        Diff = *(pDataIn++) - (RightDC);
+        if (Diff > 1.0f) {
+            Diff = 1.0f;
+        } else if (Diff < -1.0f) {
+            Diff = -1.0f;
+        }
+        *(pDataOut++) = (LVM_FLOAT)Diff;
+        if (Diff < 0) {
+            RightDC -= DC_FLOAT_STEP;
+        } else {
+            RightDC += DC_FLOAT_STEP;
+        }
     }
-#ifdef SUPPORT_MC
+    pBiquadState->LeftDC = LeftDC;
+    pBiquadState->RightDC = RightDC;
+}
 /*
  * FUNCTION:       DC_Mc_D16_TRC_WRA_01
  *
@@ -79,37 +76,30 @@
  *  void
  *
  */
-void DC_Mc_D16_TRC_WRA_01(Biquad_FLOAT_Instance_t       *pInstance,
-                          LVM_FLOAT               *pDataIn,
-                          LVM_FLOAT               *pDataOut,
-                          LVM_INT16               NrFrames,
-                          LVM_INT16               NrChannels)
-    {
-        LVM_FLOAT *ChDC;
-        LVM_FLOAT Diff;
-        LVM_INT32 j;
-        LVM_INT32 i;
-        PFilter_FLOAT_State_Mc pBiquadState = (PFilter_FLOAT_State_Mc) pInstance;
+void DC_Mc_D16_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                          LVM_FLOAT* pDataOut, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+    LVM_FLOAT* ChDC;
+    LVM_FLOAT Diff;
+    LVM_INT32 j;
+    LVM_INT32 i;
+    PFilter_FLOAT_State_Mc pBiquadState = (PFilter_FLOAT_State_Mc)pInstance;
 
-        ChDC = &pBiquadState->ChDC[0];
-        for (j = NrFrames - 1; j >= 0; j--)
-        {
-            /* Subtract DC and saturate */
-            for (i = NrChannels - 1; i >= 0; i--)
-            {
-                Diff = *(pDataIn++) - (ChDC[i]);
-                if (Diff > 1.0f) {
-                    Diff = 1.0f;
-                } else if (Diff < -1.0f) {
-                    Diff = -1.0f; }
-                *(pDataOut++) = (LVM_FLOAT)Diff;
-                if (Diff < 0) {
-                    ChDC[i] -= DC_FLOAT_STEP;
-                } else {
-                    ChDC[i] += DC_FLOAT_STEP; }
+    ChDC = &pBiquadState->ChDC[0];
+    for (j = NrFrames - 1; j >= 0; j--) {
+        /* Subtract DC and saturate */
+        for (i = NrChannels - 1; i >= 0; i--) {
+            Diff = *(pDataIn++) - (ChDC[i]);
+            if (Diff > 1.0f) {
+                Diff = 1.0f;
+            } else if (Diff < -1.0f) {
+                Diff = -1.0f;
             }
-
+            *(pDataOut++) = (LVM_FLOAT)Diff;
+            if (Diff < 0) {
+                ChDC[i] -= DC_FLOAT_STEP;
+            } else {
+                ChDC[i] += DC_FLOAT_STEP;
+            }
         }
-
     }
-#endif
+}
diff --git a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp
index beee112..2828cb3 100644
--- a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp
@@ -17,20 +17,15 @@
 
 #include "BIQUAD.h"
 #include "DC_2I_D16_TRC_WRA_01_Private.h"
-void  DC_2I_D16_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t   *pInstance)
-{
-    PFilter_FLOAT_State pBiquadState  = (PFilter_FLOAT_State) pInstance;
-    pBiquadState->LeftDC        = 0.0f;
-    pBiquadState->RightDC       = 0.0f;
+void DC_2I_D16_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance) {
+    PFilter_FLOAT_State pBiquadState = (PFilter_FLOAT_State)pInstance;
+    pBiquadState->LeftDC = 0.0f;
+    pBiquadState->RightDC = 0.0f;
 }
-#ifdef SUPPORT_MC
-void  DC_Mc_D16_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t   *pInstance)
-{
-    PFilter_FLOAT_State_Mc pBiquadState  = (PFilter_FLOAT_State_Mc) pInstance;
+void DC_Mc_D16_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance) {
+    PFilter_FLOAT_State_Mc pBiquadState = (PFilter_FLOAT_State_Mc)pInstance;
     LVM_INT32 i;
-    for (i = 0; i < LVM_MAX_CHANNELS; i++)
-    {
+    for (i = 0; i < LVM_MAX_CHANNELS; i++) {
         pBiquadState->ChDC[i] = 0.0f;
     }
 }
-#endif
diff --git a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Private.h
index 6508b73..8f459d2 100644
--- a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Private.h
@@ -18,21 +18,17 @@
 #ifndef _DC_2I_D16_TRC_WRA_01_PRIVATE_H_
 #define _DC_2I_D16_TRC_WRA_01_PRIVATE_H_
 
-#define DC_FLOAT_STEP   0.0000002384f;
+#define DC_FLOAT_STEP 0.0000002384f
 
 /* The internal state variables are implemented in a (for the user)  hidden structure */
 /* In this (private) file, the internal structure is declared fro private use.*/
-typedef struct _Filter_FLOAT_State_
-{
-    LVM_FLOAT  LeftDC;     /* LeftDC  */
-    LVM_FLOAT  RightDC;    /* RightDC  */
-}Filter_FLOAT_State;
-typedef Filter_FLOAT_State * PFilter_FLOAT_State ;
-#ifdef SUPPORT_MC
-typedef struct _Filter_FLOAT_State_Mc_
-{
-    LVM_FLOAT  ChDC[LVM_MAX_CHANNELS];     /* ChannelDC  */
+typedef struct _Filter_FLOAT_State_ {
+    LVM_FLOAT LeftDC;  /* LeftDC  */
+    LVM_FLOAT RightDC; /* RightDC  */
+} Filter_FLOAT_State;
+typedef Filter_FLOAT_State* PFilter_FLOAT_State;
+typedef struct _Filter_FLOAT_State_Mc_ {
+    LVM_FLOAT ChDC[LVM_MAX_CHANNELS]; /* ChannelDC  */
 } Filter_FLOAT_State_Mc;
-typedef Filter_FLOAT_State_Mc * PFilter_FLOAT_State_Mc ;
-#endif
+typedef Filter_FLOAT_State_Mc* PFilter_FLOAT_State_Mc;
 #endif /* _DC_2I_D16_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/DelayAllPass_Sat_32x16To32.cpp b/media/libeffects/lvm/lib/Common/src/DelayAllPass_Sat_32x16To32.cpp
index 771fae2..5daef59 100644
--- a/media/libeffects/lvm/lib/Common/src/DelayAllPass_Sat_32x16To32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/DelayAllPass_Sat_32x16To32.cpp
@@ -27,54 +27,44 @@
    FUNCTION DelayAllPass_32x32
 ***********************************************************************************/
 
-void DelayAllPass_Sat_32x16To32(  LVM_INT32  *delay,                    /* Delay buffer */
-                                  LVM_UINT16 size,                      /* Delay size */
-                                  LVM_INT16 coeff,                      /* All pass filter coefficient */
-                                  LVM_UINT16 DelayOffset,               /* Simple delay offset */
-                                  LVM_UINT16 *pAllPassOffset,           /* All pass filter delay offset */
-                                  LVM_INT32  *dst,                      /* Source/destination */
-                                  LVM_INT16 n)                          /* Number of  samples */
+void DelayAllPass_Sat_32x16To32(LVM_INT32* delay,           /* Delay buffer */
+                                LVM_UINT16 size,            /* Delay size */
+                                LVM_INT16 coeff,            /* All pass filter coefficient */
+                                LVM_UINT16 DelayOffset,     /* Simple delay offset */
+                                LVM_UINT16* pAllPassOffset, /* All pass filter delay offset */
+                                LVM_INT32* dst,             /* Source/destination */
+                                LVM_INT16 n)                /* Number of  samples */
 {
-    LVM_INT16   i;
-    LVM_UINT16   AllPassOffset = *pAllPassOffset;
-    LVM_INT32    temp;
-    LVM_INT32    a,b,c;
+    LVM_INT16 i;
+    LVM_UINT16 AllPassOffset = *pAllPassOffset;
+    LVM_INT32 temp;
+    LVM_INT32 a, b, c;
 
-    for (i = 0; i < n; i++)
-    {
-
-        MUL32x16INTO32(delay[AllPassOffset], coeff, temp, 15)
-        a = temp;
+    for (i = 0; i < n; i++) {
+        MUL32x16INTO32(delay[AllPassOffset], coeff, temp, 15) a = temp;
         b = delay[DelayOffset];
         DelayOffset++;
 
         c = a + b;
-        if ((((c ^ a) & (c ^ b)) >> 31) != 0)  /* overflow / underflow */
+        if ((((c ^ a) & (c ^ b)) >> 31) != 0) /* overflow / underflow */
         {
-            if(a < 0)
-            {
+            if (a < 0) {
                 c = 0x80000000L;
-            }
-            else
-            {
+            } else {
                 c = 0x7FFFFFFFL;
             }
         }
         *dst = c;
         dst++;
 
-        MUL32x16INTO32(c, -coeff, temp, 15)
-        a = temp;
+        MUL32x16INTO32(c, -coeff, temp, 15) a = temp;
         b = delay[AllPassOffset];
         c = a + b;
-        if ((((c ^ a) & (c ^ b)) >> 31)!=0)  /* overflow / underflow */
+        if ((((c ^ a) & (c ^ b)) >> 31) != 0) /* overflow / underflow */
         {
-            if(a < 0)
-            {
+            if (a < 0) {
                 c = 0x80000000L;
-            }
-            else
-            {
+            } else {
                 c = 0x7FFFFFFFL;
             }
         }
@@ -82,13 +72,11 @@
         AllPassOffset++;
 
         /* Make the delay buffer a circular buffer */
-        if (DelayOffset >= size)
-        {
+        if (DelayOffset >= size) {
             DelayOffset = 0;
         }
 
-        if (AllPassOffset >= size)
-        {
+        if (AllPassOffset >= size) {
             AllPassOffset = 0;
         }
     }
@@ -100,4 +88,3 @@
 }
 
 /**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/DelayMix_16x16.cpp b/media/libeffects/lvm/lib/Common/src/DelayMix_16x16.cpp
index 52d263f..da75982 100644
--- a/media/libeffects/lvm/lib/Common/src/DelayMix_16x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/DelayMix_16x16.cpp
@@ -25,19 +25,18 @@
    FUNCTION DelayMix_16x16
 ***********************************************************************************/
 
-void DelayMix_16x16(const LVM_INT16 *src,           /* Source 1, to be delayed */
-                          LVM_INT16 *delay,         /* Delay buffer */
-                          LVM_INT16 size,           /* Delay size */
-                          LVM_INT16 *dst,           /* Source/destination */
-                          LVM_INT16 *pOffset,       /* Delay offset */
-                          LVM_INT16 n)              /* Number of stereo samples */
+void DelayMix_16x16(const LVM_INT16* src, /* Source 1, to be delayed */
+                    LVM_INT16* delay,     /* Delay buffer */
+                    LVM_INT16 size,       /* Delay size */
+                    LVM_INT16* dst,       /* Source/destination */
+                    LVM_INT16* pOffset,   /* Delay offset */
+                    LVM_INT16 n)          /* Number of stereo samples */
 {
-    LVM_INT16   i;
-    LVM_INT16   Offset  = *pOffset;
-    LVM_INT16   temp;
+    LVM_INT16 i;
+    LVM_INT16 Offset = *pOffset;
+    LVM_INT16 temp;
 
-    for (i = 0; i < n; i++)
-    {
+    for (i = 0; i < n; i++) {
         /* Left channel */
         temp = (LVM_INT16)((LVM_UINT32)((LVM_INT32)(*dst) + (LVM_INT32)delay[Offset]) >> 1);
         *dst = temp;
@@ -57,8 +56,7 @@
         src++;
 
         /* Make the reverb delay buffer a circular buffer */
-        if (Offset >= size)
-        {
+        if (Offset >= size) {
             Offset = 0;
         }
     }
@@ -68,22 +66,21 @@
 
     return;
 }
-void DelayMix_Float(const LVM_FLOAT *src,           /* Source 1, to be delayed */
-                          LVM_FLOAT *delay,         /* Delay buffer */
-                          LVM_INT16 size,           /* Delay size */
-                          LVM_FLOAT *dst,           /* Source/destination */
-                          LVM_INT16 *pOffset,       /* Delay offset */
-                          LVM_INT16 n)              /* Number of stereo samples */
+void DelayMix_Float(const LVM_FLOAT* src, /* Source 1, to be delayed */
+                    LVM_FLOAT* delay,     /* Delay buffer */
+                    LVM_INT16 size,       /* Delay size */
+                    LVM_FLOAT* dst,       /* Source/destination */
+                    LVM_INT16* pOffset,   /* Delay offset */
+                    LVM_INT16 n)          /* Number of stereo samples */
 {
-    LVM_INT16   i;
-    LVM_INT16   Offset  = *pOffset;
-    LVM_FLOAT   temp;
+    LVM_INT16 i;
+    LVM_INT16 Offset = *pOffset;
+    LVM_FLOAT temp;
 
-    for (i=0; i<n; i++)
-    {
+    for (i = 0; i < n; i++) {
         /* Left channel */
-        temp            = (LVM_FLOAT)((LVM_FLOAT)(*dst + (LVM_FLOAT)delay[Offset]) / 2.0f);
-        *dst            = temp;
+        temp = (LVM_FLOAT)((LVM_FLOAT)(*dst + (LVM_FLOAT)delay[Offset]) / 2.0f);
+        *dst = temp;
         dst++;
 
         delay[Offset] = *src;
@@ -91,8 +88,8 @@
         src++;
 
         /* Right channel */
-        temp            = (LVM_FLOAT)((LVM_FLOAT)(*dst - (LVM_FLOAT)delay[Offset]) / 2.0f);
-        *dst            = temp;
+        temp = (LVM_FLOAT)((LVM_FLOAT)(*dst - (LVM_FLOAT)delay[Offset]) / 2.0f);
+        *dst = temp;
         dst++;
 
         delay[Offset] = *src;
@@ -100,8 +97,7 @@
         src++;
 
         /* Make the reverb delay buffer a circular buffer */
-        if (Offset >= size)
-        {
+        if (Offset >= size) {
             Offset = 0;
         }
     }
diff --git a/media/libeffects/lvm/lib/Common/src/DelayWrite_32.cpp b/media/libeffects/lvm/lib/Common/src/DelayWrite_32.cpp
index 809cddc..47cffbf 100644
--- a/media/libeffects/lvm/lib/Common/src/DelayWrite_32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/DelayWrite_32.cpp
@@ -25,24 +25,22 @@
    FUNCTION DelayMix_16x16
 ***********************************************************************************/
 
-void DelayWrite_32(const LVM_INT32  *src,               /* Source 1, to be delayed */
-                         LVM_INT32  *delay,             /* Delay buffer */
-                         LVM_UINT16 size,               /* Delay size */
-                         LVM_UINT16 *pOffset,           /* Delay offset */
-                         LVM_INT16  n)                  /* Number of samples */
+void DelayWrite_32(const LVM_INT32* src, /* Source 1, to be delayed */
+                   LVM_INT32* delay,     /* Delay buffer */
+                   LVM_UINT16 size,      /* Delay size */
+                   LVM_UINT16* pOffset,  /* Delay offset */
+                   LVM_INT16 n)          /* Number of samples */
 {
-    LVM_INT16   i;
-    LVM_INT16   Offset  = (LVM_INT16)*pOffset;
+    LVM_INT16 i;
+    LVM_INT16 Offset = (LVM_INT16)*pOffset;
 
-    for (i=0; i<n; i++)
-    {
+    for (i = 0; i < n; i++) {
         delay[Offset] = *src;
         Offset++;
         src++;
 
         /* Make the delay buffer a circular buffer */
-        if (Offset >= size)
-        {
+        if (Offset >= size) {
             Offset = 0;
         }
     }
@@ -54,4 +52,3 @@
 }
 
 /**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16C15_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16C15_TRC_WRA_01.cpp
index bef0d62..df8fadc 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16C15_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16C15_TRC_WRA_01.cpp
@@ -31,41 +31,34 @@
  pBiquadState->pDelays[1] is y(n-1)L in Q0 format
 ***************************************************************************/
 
-void FO_1I_D16F16C15_TRC_WRA_01( Biquad_FLOAT_Instance_t       *pInstance,
-                                 LVM_FLOAT               *pDataIn,
-                                 LVM_FLOAT               *pDataOut,
-                                 LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT  ynL;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void FO_1I_D16F16C15_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+    LVM_FLOAT ynL;
+    LVM_INT16 ii;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
 
-         for (ii = NrSamples; ii != 0; ii--)
-         {
+    for (ii = NrSamples; ii != 0; ii--) {
+        /**************************************************************************
+                        PROCESSING OF THE LEFT CHANNEL
+        ***************************************************************************/
+        // ynL=A1  * x(n-1)L
+        ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[0];
 
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            // ynL=A1  * x(n-1)L
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[0];
+        // ynL+=A0  * x(n)L
+        ynL += (LVM_FLOAT)pBiquadState->coefs[1] * (*pDataIn);
 
-            // ynL+=A0  * x(n)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * (*pDataIn);
+        // ynL+=  (-B1  * y(n-1)L
+        ynL += (LVM_FLOAT)pBiquadState->coefs[2] * pBiquadState->pDelays[1];
 
-            // ynL+=  (-B1  * y(n-1)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[2] * pBiquadState->pDelays[1];
+        /**************************************************************************
+                        UPDATING THE DELAYS
+        ***************************************************************************/
+        pBiquadState->pDelays[1] = ynL;           // Update y(n-1)L
+        pBiquadState->pDelays[0] = (*pDataIn++);  // Update x(n-1)L
 
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[1] = ynL; // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output
-
-        }
-
+        /**************************************************************************
+                        WRITING THE OUTPUT
+        ***************************************************************************/
+        *pDataOut++ = (LVM_FLOAT)ynL;  // Write Left output
     }
+}
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Init.cpp
index 161225e..10604bf 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Init.cpp
@@ -37,13 +37,12 @@
 /* RETURNS:                                                                */
 /*   void return code                                                      */
 /*-------------------------------------------------------------------------*/
-void FO_1I_D16F16Css_TRC_WRA_01_Init(    Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_1I_Order1_FLOAT_Taps_t   *pTaps,
-                                         FO_FLOAT_Coefs_t            *pCoef)
-{
+void FO_1I_D16F16Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_1I_Order1_FLOAT_Taps_t* pTaps,
+                                     FO_FLOAT_Coefs_t* pCoef) {
     LVM_FLOAT temp;
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)  pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *)pTaps;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+    pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
     temp = pCoef->A1;
     pBiquadState->coefs[0] = temp;
     temp = pCoef->A0;
@@ -53,4 +52,3 @@
 }
 /*------------------------------------------------*/
 /* End Of File: FO_1I_D16F16Css_TRC_WRA_01_Init.c */
-
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Private.h
index 34f3df9..d1819fc 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Private.h
@@ -20,20 +20,18 @@
 
 /* The internal state variables are implemented in a (for the user)  hidden structure */
 /* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32*        pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT16         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+    LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_INT16 coefs[3]; /* pointer to the filter coefficients */
+} Filter_State;
 
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
 
-typedef struct _Filter_State_FLOAT
-{
-    LVM_FLOAT *                          pDelays;        /* pointer to the delayed samples \
-                                                            (data of 32 bits)   */
-    LVM_FLOAT                            coefs[3];       /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
+typedef struct _Filter_State_FLOAT {
+    LVM_FLOAT* pDelays; /* pointer to the delayed samples \
+                           (data of 32 bits)   */
+    LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
+} Filter_State_FLOAT;
 
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
+typedef Filter_State_FLOAT* PFilter_State_FLOAT;
 #endif /* _FO_1I_D16F16CSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32C31_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32C31_TRC_WRA_01.cpp
index e3efad7..4c75e04 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32C31_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32C31_TRC_WRA_01.cpp
@@ -30,42 +30,36 @@
  pBiquadState->pDelays[0] is x(n-1)L in Q0 format
  pBiquadState->pDelays[1] is y(n-1)L in Q0 format
 ***************************************************************************/
-void FO_1I_D32F32C31_TRC_WRA_01( Biquad_FLOAT_Instance_t       *pInstance,
-                                 LVM_FLOAT               *pDataIn,
-                                 LVM_FLOAT               *pDataOut,
-                                 LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT  ynL,templ;
-        LVM_INT16  ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void FO_1I_D32F32C31_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+    LVM_FLOAT ynL, templ;
+    LVM_INT16 ii;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
 
-        for (ii = NrSamples; ii != 0; ii--)
-        {
+    for (ii = NrSamples; ii != 0; ii--) {
+        /**************************************************************************
+                        PROCESSING OF THE LEFT CHANNEL
+        ***************************************************************************/
+        // ynL=A1  * x(n-1)L
+        ynL = pBiquadState->coefs[0] * pBiquadState->pDelays[0];
 
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            // ynL=A1  * x(n-1)L
-            ynL = pBiquadState->coefs[0] * pBiquadState->pDelays[0];
+        // ynL+=A0  * x(n)L
+        templ = pBiquadState->coefs[1] * (*pDataIn);
+        ynL += templ;
 
-            // ynL+=A0  * x(n)L
-            templ = pBiquadState->coefs[1] * (*pDataIn);
-            ynL += templ;
+        // ynL+=  (-B1  * y(n-1)L
+        templ = pBiquadState->coefs[2] * pBiquadState->pDelays[1];
+        ynL += templ;
 
-            // ynL+=  (-B1  * y(n-1)L
-            templ = pBiquadState->coefs[2] * pBiquadState->pDelays[1];
-            ynL += templ;
+        /**************************************************************************
+                        UPDATING THE DELAYS
+        ***************************************************************************/
+        pBiquadState->pDelays[1] = ynL;           // Update y(n-1)L
+        pBiquadState->pDelays[0] = (*pDataIn++);  // Update x(n-1)L
 
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[1] = ynL; // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output in Q0
-        }
-
+        /**************************************************************************
+                        WRITING THE OUTPUT
+        ***************************************************************************/
+        *pDataOut++ = (LVM_FLOAT)ynL;  // Write Left output in Q0
     }
+}
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Init.cpp
index bb5295c..bf2e5e1 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Init.cpp
@@ -36,13 +36,12 @@
 /* RETURNS:                                                                */
 /*   void return code                                                      */
 /*-------------------------------------------------------------------------*/
-void FO_1I_D32F32Cll_TRC_WRA_01_Init( Biquad_FLOAT_Instance_t         *pInstance,
-                                      Biquad_1I_Order1_FLOAT_Taps_t   *pTaps,
-                                      FO_FLOAT_Coefs_t            *pCoef)
-{
+void FO_1I_D32F32Cll_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                     Biquad_1I_Order1_FLOAT_Taps_t* pTaps,
+                                     FO_FLOAT_Coefs_t* pCoef) {
     LVM_FLOAT temp;
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)  pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *)    pTaps;
+    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+    pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
 
     temp = pCoef->A1;
     pBiquadState->coefs[0] = temp;
@@ -53,4 +52,3 @@
 }
 /*------------------------------------------------*/
 /* End Of File: FO_1I_D32F32Cll_TRC_WRA_01_Init.c */
-
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Private.h
index 67d1384..8645593 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Private.h
@@ -20,19 +20,17 @@
 
 /* The internal state variables are implemented in a (for the user)  hidden structure */
 /* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT32         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+    LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_INT32 coefs[3]; /* pointer to the filter coefficients */
+} Filter_State;
 
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
 
-typedef struct _Filter_State_FLOAT_
-{
-    LVM_FLOAT *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
+typedef struct _Filter_State_FLOAT_ {
+    LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
+} Filter_State_FLOAT;
 
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
+typedef Filter_State_FLOAT* PFilter_State_FLOAT;
 #endif /* _FO_1I_D32F32CLL_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32C15_LShx_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32C15_LShx_TRC_WRA_01.cpp
index 6ca819a..dad070b 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32C15_LShx_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32C15_LShx_TRC_WRA_01.cpp
@@ -32,88 +32,73 @@
 pBiquadState->pDelays[2] is x(n-1)R in Q15 format
 pBiquadState->pDelays[3] is y(n-1)R in Q30 format
 ***************************************************************************/
-void FO_2I_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t       *pInstance,
-                                     LVM_FLOAT               *pDataIn,
-                                     LVM_FLOAT               *pDataOut,
-                                     LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT   ynL,ynR;
-        LVM_FLOAT   Temp;
-        LVM_FLOAT   NegSatValue;
-        LVM_INT16   ii;
+void FO_2I_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                     LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+    LVM_FLOAT ynL, ynR;
+    LVM_FLOAT Temp;
+    LVM_FLOAT NegSatValue;
+    LVM_INT16 ii;
 
-        PFilter_Float_State pBiquadState = (PFilter_Float_State) pInstance;
+    PFilter_Float_State pBiquadState = (PFilter_Float_State)pInstance;
 
-        NegSatValue = -1.0f;
+    NegSatValue = -1.0f;
 
-        for (ii = NrSamples; ii != 0; ii--)
-        {
+    for (ii = NrSamples; ii != 0; ii--) {
+        /**************************************************************************
+                        PROCESSING OF THE LEFT CHANNEL
+        ***************************************************************************/
 
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
+        // ynL =A1  * x(n-1)L
+        ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[0];
+        // ynR =A1  * x(n-1)R
+        ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
 
-            // ynL =A1  * x(n-1)L
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[0];
-            // ynR =A1  * x(n-1)R
-            ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
+        // ynL+=A0  * x(n)L
+        ynL += (LVM_FLOAT)pBiquadState->coefs[1] * (*pDataIn);
+        // ynR+=A0  * x(n)L
+        ynR += (LVM_FLOAT)pBiquadState->coefs[1] * (*(pDataIn + 1));
 
-            // ynL+=A0  * x(n)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * (*pDataIn);
-            // ynR+=A0  * x(n)L
-            ynR += (LVM_FLOAT)pBiquadState->coefs[1] * (*(pDataIn+1));
+        // ynL +=  (-B1  * y(n-1)L  )
+        Temp = pBiquadState->pDelays[1] * pBiquadState->coefs[2];
+        ynL += Temp;
+        // ynR +=  (-B1  * y(n-1)R ) )
+        Temp = pBiquadState->pDelays[3] * pBiquadState->coefs[2];
+        ynR += Temp;
 
-            // ynL +=  (-B1  * y(n-1)L  )
-            Temp = pBiquadState->pDelays[1] * pBiquadState->coefs[2];
-            ynL += Temp;
-            // ynR +=  (-B1  * y(n-1)R ) )
-            Temp = pBiquadState->pDelays[3] * pBiquadState->coefs[2];
-            ynR += Temp;
+        /**************************************************************************
+                        UPDATING THE DELAYS
+        ***************************************************************************/
+        pBiquadState->pDelays[1] = ynL;           // Update y(n-1)L
+        pBiquadState->pDelays[0] = (*pDataIn++);  // Update x(n-1)L
 
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[1] = ynL; // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
+        pBiquadState->pDelays[3] = ynR;           // Update y(n-1)R
+        pBiquadState->pDelays[2] = (*pDataIn++);  // Update x(n-1)R
 
-            pBiquadState->pDelays[3] = ynR; // Update y(n-1)R
-            pBiquadState->pDelays[2] = (*pDataIn++); // Update x(n-1)R
+        /**************************************************************************
+                        WRITING THE OUTPUT
+        ***************************************************************************/
 
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-
-            /*Saturate results*/
-            if(ynL > 1.0f)
-            {
-                ynL = 1.0f;
+        /*Saturate results*/
+        if (ynL > 1.0f) {
+            ynL = 1.0f;
+        } else {
+            if (ynL < NegSatValue) {
+                ynL = NegSatValue;
             }
-            else
-            {
-                if(ynL < NegSatValue)
-                {
-                    ynL = NegSatValue;
-                }
-            }
-
-            if(ynR > 1.0f)
-            {
-                ynR = 1.0f;
-            }
-            else
-            {
-                if(ynR < NegSatValue)
-                {
-                    ynR = NegSatValue;
-                }
-            }
-
-            *pDataOut++ = (LVM_FLOAT)ynL;
-            *pDataOut++ = (LVM_FLOAT)ynR;
         }
 
+        if (ynR > 1.0f) {
+            ynR = 1.0f;
+        } else {
+            if (ynR < NegSatValue) {
+                ynR = NegSatValue;
+            }
+        }
+
+        *pDataOut++ = (LVM_FLOAT)ynL;
+        *pDataOut++ = (LVM_FLOAT)ynR;
     }
-#ifdef SUPPORT_MC
+}
 /**************************************************************************
 ASSUMPTIONS:
 COEFS-
@@ -135,64 +120,56 @@
 RETURNS:
  void
 ***************************************************************************/
-void FO_Mc_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t *pInstance,
-                                     LVM_FLOAT               *pDataIn,
-                                     LVM_FLOAT               *pDataOut,
-                                     LVM_INT16               NrFrames,
-                                     LVM_INT16               NrChannels)
-    {
-        LVM_FLOAT   yn;
-        LVM_FLOAT   Temp;
-        LVM_INT16   ii;
-        LVM_INT16   ch;
-        PFilter_Float_State pBiquadState = (PFilter_Float_State) pInstance;
+void FO_Mc_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                     LVM_FLOAT* pDataOut, LVM_INT16 NrFrames,
+                                     LVM_INT16 NrChannels) {
+    LVM_FLOAT yn;
+    LVM_FLOAT Temp;
+    LVM_INT16 ii;
+    LVM_INT16 ch;
+    PFilter_Float_State pBiquadState = (PFilter_Float_State)pInstance;
 
-        LVM_FLOAT   *pDelays = pBiquadState->pDelays;
-        LVM_FLOAT   *pCoefs  = &pBiquadState->coefs[0];
-        LVM_FLOAT   A0 = pCoefs[1];
-        LVM_FLOAT   A1 = pCoefs[0];
-        LVM_FLOAT   B1 = pCoefs[2];
+    LVM_FLOAT* pDelays = pBiquadState->pDelays;
+    LVM_FLOAT* pCoefs = &pBiquadState->coefs[0];
+    LVM_FLOAT A0 = pCoefs[1];
+    LVM_FLOAT A1 = pCoefs[0];
+    LVM_FLOAT B1 = pCoefs[2];
 
-        for (ii = NrFrames; ii != 0; ii--)
-        {
+    for (ii = NrFrames; ii != 0; ii--) {
+        /**************************************************************************
+                        PROCESSING OF THE CHANNELS
+        ***************************************************************************/
+        for (ch = 0; ch < NrChannels; ch++) {
+            // yn =A1  * x(n-1)
+            yn = (LVM_FLOAT)A1 * pDelays[0];
+
+            // yn+=A0  * x(n)
+            yn += (LVM_FLOAT)A0 * (*pDataIn);
+
+            // yn +=  (-B1  * y(n-1))
+            Temp = B1 * pDelays[1];
+            yn += Temp;
 
             /**************************************************************************
-                            PROCESSING OF THE CHANNELS
+                            UPDATING THE DELAYS
             ***************************************************************************/
-            for (ch = 0; ch < NrChannels; ch++)
-            {
-                // yn =A1  * x(n-1)
-                yn = (LVM_FLOAT)A1 * pDelays[0];
+            pDelays[1] = yn;            // Update y(n-1)
+            pDelays[0] = (*pDataIn++);  // Update x(n-1)
 
-                // yn+=A0  * x(n)
-                yn += (LVM_FLOAT)A0 * (*pDataIn);
+            /**************************************************************************
+                            WRITING THE OUTPUT
+            ***************************************************************************/
 
-                // yn +=  (-B1  * y(n-1))
-                Temp = B1 * pDelays[1];
-                yn += Temp;
-
-                /**************************************************************************
-                                UPDATING THE DELAYS
-                ***************************************************************************/
-                pDelays[1] = yn; // Update y(n-1)
-                pDelays[0] = (*pDataIn++); // Update x(n-1)
-
-                /**************************************************************************
-                                WRITING THE OUTPUT
-                ***************************************************************************/
-
-                /*Saturate results*/
-                if (yn > 1.0f)
-                {
-                    yn = 1.0f;
-                } else if (yn < -1.0f) {
-                    yn = -1.0f;
-                }
-
-                *pDataOut++ = (LVM_FLOAT)yn;
-                pDelays += 2;
+            /*Saturate results*/
+            if (yn > 1.0f) {
+                yn = 1.0f;
+            } else if (yn < -1.0f) {
+                yn = -1.0f;
             }
-            pDelays -= NrChannels * 2;
+
+            *pDataOut++ = (LVM_FLOAT)yn;
+            pDelays += 2;
         }
+        pDelays -= NrChannels * 2;
     }
-#endif
+}
diff --git a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Init.cpp
index b81b976..552aeda 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Init.cpp
@@ -37,13 +37,12 @@
 /* RETURNS:                                                                */
 /*   void return code                                                      */
 /*-------------------------------------------------------------------------*/
-void FO_2I_D16F32Css_LShx_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t         *pInstance,
-                                          Biquad_2I_Order1_FLOAT_Taps_t   *pTaps,
-                                          FO_FLOAT_LShx_Coefs_t        *pCoef)
-{
+void FO_2I_D16F32Css_LShx_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                          Biquad_2I_Order1_FLOAT_Taps_t* pTaps,
+                                          FO_FLOAT_LShx_Coefs_t* pCoef) {
     LVM_FLOAT temp;
-    PFilter_Float_State pBiquadState = (PFilter_Float_State) pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *) pTaps            ;
+    PFilter_Float_State pBiquadState = (PFilter_Float_State)pInstance;
+    pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
 
     temp = pCoef->A1;
     pBiquadState->coefs[0] = temp;
@@ -54,4 +53,3 @@
 }
 /*-------------------------------------------------------------------------*/
 /* End Of File: FO_2I_D16F32Css_LShx_TRC_WRA_01_Init.c                     */
-
diff --git a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Private.h
index 5022500..0103328 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Private.h
@@ -20,11 +20,10 @@
 
 /* The internal state variables are implemented in a (for the user)  hidden structure */
 /* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-    LVM_FLOAT     *pDelays;       /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT     coefs[3];       /* pointer to the filter coefficients */
-}Filter_Float_State;
+typedef struct _Filter_State_ {
+    LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
+} Filter_Float_State;
 
-typedef Filter_Float_State * PFilter_Float_State ;
+typedef Filter_Float_State* PFilter_Float_State;
 #endif /* _FO_2I_D16F32CSS_LSHX_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/Filters.h b/media/libeffects/lvm/lib/Common/src/Filters.h
index b5db8f4..8eb3e76 100644
--- a/media/libeffects/lvm/lib/Common/src/Filters.h
+++ b/media/libeffects/lvm/lib/Common/src/Filters.h
@@ -30,26 +30,23 @@
  * Biquad with coefficients A0, A1, A2, B1 and B2 coefficients
  */
 /* Single precision (16-bit) Biquad section coefficients */
-typedef struct
-{
-    LVM_FLOAT   A0;
-    LVM_FLOAT   A1;
-    LVM_FLOAT   A2;
-    LVM_FLOAT   B1;
-    LVM_FLOAT   B2;
-    LVM_UINT16  Scale;
+typedef struct {
+    LVM_FLOAT A0;
+    LVM_FLOAT A1;
+    LVM_FLOAT A2;
+    LVM_FLOAT B1;
+    LVM_FLOAT B2;
+    LVM_UINT16 Scale;
 } BiquadA012B12CoefsSP_t;
 /*
  * Biquad with coefficients A0, A1 and B1 coefficients
  */
 /* Single precision (16-bit) Biquad section coefficients */
-typedef struct
-{
-    LVM_FLOAT   A0;
-    LVM_FLOAT   A1;
-    LVM_FLOAT   B1;
-    LVM_UINT16  Scale;
+typedef struct {
+    LVM_FLOAT A0;
+    LVM_FLOAT A1;
+    LVM_FLOAT B1;
+    LVM_UINT16 Scale;
 } BiquadA01B1CoefsSP_t;
 
-#endif      /* FILTERS_H */
-
+#endif /* FILTERS_H */
diff --git a/media/libeffects/lvm/lib/Common/src/From2iToMS_16x16.cpp b/media/libeffects/lvm/lib/Common/src/From2iToMS_16x16.cpp
index c3f6648..b050267 100644
--- a/media/libeffects/lvm/lib/Common/src/From2iToMS_16x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/From2iToMS_16x16.cpp
@@ -25,15 +25,10 @@
    FUNCTION  From2iToMS_16x16
 ***********************************************************************************/
 
-void From2iToMS_16x16( const LVM_INT16  *src,
-                             LVM_INT16  *dstM,
-                             LVM_INT16  *dstS,
-                             LVM_INT16  n )
-{
-    LVM_INT32 temp1,left,right;
+void From2iToMS_16x16(const LVM_INT16* src, LVM_INT16* dstM, LVM_INT16* dstS, LVM_INT16 n) {
+    LVM_INT32 temp1, left, right;
     LVM_INT16 ii;
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         left = (LVM_INT32)*src;
         src++;
 
@@ -41,27 +36,22 @@
         src++;
 
         /* Compute M signal*/
-        temp1 =  (left+right)>>1;
+        temp1 = (left + right) >> 1;
         *dstM = (LVM_INT16)temp1;
         dstM++;
 
         /* Compute S signal*/
-        temp1 =  (left-right)>>1;
+        temp1 = (left - right) >> 1;
         *dstS = (LVM_INT16)temp1;
         dstS++;
     }
 
     return;
 }
-void From2iToMS_Float( const LVM_FLOAT  *src,
-                             LVM_FLOAT  *dstM,
-                             LVM_FLOAT  *dstS,
-                             LVM_INT16  n )
-{
-    LVM_FLOAT temp1,left,right;
+void From2iToMS_Float(const LVM_FLOAT* src, LVM_FLOAT* dstM, LVM_FLOAT* dstS, LVM_INT16 n) {
+    LVM_FLOAT temp1, left, right;
     LVM_INT16 ii;
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         left = (LVM_FLOAT)*src;
         src++;
 
@@ -69,12 +59,12 @@
         src++;
 
         /* Compute M signal*/
-        temp1 =  (left + right) / 2.0f;
+        temp1 = (left + right) / 2.0f;
         *dstM = (LVM_FLOAT)temp1;
         dstM++;
 
         /* Compute S signal*/
-        temp1 =  (left - right) / 2.0f;
+        temp1 = (left - right) / 2.0f;
         *dstS = (LVM_FLOAT)temp1;
         dstS++;
     }
diff --git a/media/libeffects/lvm/lib/Common/src/From2iToMono_16.cpp b/media/libeffects/lvm/lib/Common/src/From2iToMono_16.cpp
index b758ee7..9a54ee4 100644
--- a/media/libeffects/lvm/lib/Common/src/From2iToMono_16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/From2iToMono_16.cpp
@@ -25,21 +25,17 @@
    FUNCTION From2iToMono_16
 ***********************************************************************************/
 
-void From2iToMono_16( const LVM_INT16 *src,
-                            LVM_INT16 *dst,
-                            LVM_INT16 n)
-{
+void From2iToMono_16(const LVM_INT16* src, LVM_INT16* dst, LVM_INT16 n) {
     LVM_INT16 ii;
     LVM_INT32 Temp;
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         Temp = (LVM_INT32)*src;
         src++;
 
         Temp += (LVM_INT32)*src;
         src++;
 
-        *dst  = (LVM_INT16)(Temp >>1);
+        *dst = (LVM_INT16)(Temp >> 1);
         dst++;
     }
 
diff --git a/media/libeffects/lvm/lib/Common/src/From2iToMono_32.cpp b/media/libeffects/lvm/lib/Common/src/From2iToMono_32.cpp
index a8688b4..6ede958 100644
--- a/media/libeffects/lvm/lib/Common/src/From2iToMono_32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/From2iToMono_32.cpp
@@ -25,19 +25,15 @@
    FUNCTION From2iToMono_32
 ***********************************************************************************/
 
-void From2iToMono_32( const LVM_INT32 *src,
-                            LVM_INT32 *dst,
-                            LVM_INT16 n)
-{
+void From2iToMono_32(const LVM_INT32* src, LVM_INT32* dst, LVM_INT16 n) {
     LVM_INT16 ii;
     LVM_INT32 Temp;
 
-    for (ii = n; ii != 0; ii--)
-    {
-        Temp = (*src>>1);
+    for (ii = n; ii != 0; ii--) {
+        Temp = (*src >> 1);
         src++;
 
-        Temp +=(*src>>1);
+        Temp += (*src >> 1);
         src++;
 
         *dst = Temp;
@@ -46,15 +42,11 @@
 
     return;
 }
-void From2iToMono_Float( const LVM_FLOAT *src,
-                         LVM_FLOAT *dst,
-                         LVM_INT16 n)
-{
+void From2iToMono_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n) {
     LVM_INT16 ii;
     LVM_FLOAT Temp;
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         Temp = (*src);
         src++;
 
@@ -67,7 +59,6 @@
 
     return;
 }
-#ifdef SUPPORT_MC
 /*
  * FUNCTION:       FromMcToMono_Float
  *
@@ -85,19 +76,14 @@
  *  void
  *
  */
-void FromMcToMono_Float(const LVM_FLOAT *src,
-                        LVM_FLOAT *dst,
-                        LVM_INT16 NrFrames,
-                        LVM_INT16 NrChannels)
-{
+void FromMcToMono_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 NrFrames,
+                        LVM_INT16 NrChannels) {
     LVM_INT16 ii, jj;
     LVM_FLOAT Temp;
 
-    for (ii = NrFrames; ii != 0; ii--)
-    {
+    for (ii = NrFrames; ii != 0; ii--) {
         Temp = 0.0f;
-        for (jj = NrChannels; jj !=0; jj--)
-        {
+        for (jj = NrChannels; jj != 0; jj--) {
             Temp += (*src);
             src++;
         }
@@ -107,6 +93,5 @@
 
     return;
 }
-#endif
 
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp b/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp
index a039bf5..2cfe056 100644
--- a/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp
+++ b/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp
@@ -26,9 +26,7 @@
  *  Remarks     :
  ****************************************************************************************/
 
-void    InstAlloc_Init( INST_ALLOC      *pms,
-                        void            *StartAddr )
-{
+void InstAlloc_Init(INST_ALLOC* pms, void* StartAddr) {
     pms->TotalSize = 3;
     pms->pNextMember = (((uintptr_t)StartAddr + 3) & (uintptr_t)~3);
 }
@@ -44,10 +42,8 @@
  *  Remarks     :
  ****************************************************************************************/
 
-void*   InstAlloc_AddMember( INST_ALLOC         *pms,
-                             LVM_UINT32           Size )
-{
-    void *NewMemberAddress; /* Variable to temporarily store the return value */
+void* InstAlloc_AddMember(INST_ALLOC* pms, LVM_UINT32 Size) {
+    void* NewMemberAddress; /* Variable to temporarily store the return value */
     NewMemberAddress = (void*)pms->pNextMember;
 
     Size = ((Size + 3) & (LVM_UINT32)~3); /* Ceil the size to a multiple of four */
@@ -55,7 +51,7 @@
     pms->TotalSize += Size;
     pms->pNextMember += Size;
 
-    return(NewMemberAddress);
+    return (NewMemberAddress);
 }
 
 /****************************************************************************************
@@ -66,21 +62,15 @@
  *  Remarks     :
  ****************************************************************************************/
 
-LVM_UINT32 InstAlloc_GetTotal( INST_ALLOC *pms)
-{
-    if (pms->TotalSize > 3)
-    {
-        return(pms->TotalSize);
-    }
-    else
-    {
-        return 0;           /* No memory added */
+LVM_UINT32 InstAlloc_GetTotal(INST_ALLOC* pms) {
+    if (pms->TotalSize > 3) {
+        return (pms->TotalSize);
+    } else {
+        return 0; /* No memory added */
     }
 }
 
-void    InstAlloc_InitAll( INST_ALLOC                      *pms,
-                           LVM_MemoryTable_st             *pMemoryTable)
-{
+void InstAlloc_InitAll(INST_ALLOC* pms, LVM_MemoryTable_st* pMemoryTable) {
     uintptr_t StartAddr;
 
     StartAddr = (uintptr_t)pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress;
@@ -102,7 +92,6 @@
 
     pms[3].TotalSize = 3;
     pms[3].pNextMember = ((StartAddr + 3) & (uintptr_t)~3);
-
 }
 
 /****************************************************************************************
@@ -114,8 +103,7 @@
  *  Remarks     :
  ****************************************************************************************/
 
-void    InstAlloc_InitAll_NULL( INST_ALLOC  *pms)
-{
+void InstAlloc_InitAll_NULL(INST_ALLOC* pms) {
     pms[0].TotalSize = 3;
     pms[0].pNextMember = 0;
 
@@ -127,47 +115,46 @@
 
     pms[3].TotalSize = 3;
     pms[3].pNextMember = 0;
-
 }
 
-void*   InstAlloc_AddMemberAll( INST_ALLOC                     *pms,
-                                 LVM_UINT32                   Size[],
-                                 LVM_MemoryTable_st           *pMemoryTable)
-{
-    void *NewMemberAddress; /* Variable to temporarily store the return value */
+void* InstAlloc_AddMemberAll(INST_ALLOC* pms, LVM_UINT32 Size[], LVM_MemoryTable_st* pMemoryTable) {
+    void* NewMemberAddress; /* Variable to temporarily store the return value */
 
     /* coverity[returned_pointer] Ignore coverity warning that ptr is not used */
-    NewMemberAddress = InstAlloc_AddMember(&pms[LVM_PERSISTENT_SLOW_DATA], Size[LVM_PERSISTENT_SLOW_DATA]);
+    NewMemberAddress =
+            InstAlloc_AddMember(&pms[LVM_PERSISTENT_SLOW_DATA], Size[LVM_PERSISTENT_SLOW_DATA]);
 
-    pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size         = InstAlloc_GetTotal(&pms[LVM_PERSISTENT_SLOW_DATA]);
-    pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Type         = LVM_PERSISTENT_SLOW_DATA;
+    pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size =
+            InstAlloc_GetTotal(&pms[LVM_PERSISTENT_SLOW_DATA]);
+    pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Type = LVM_PERSISTENT_SLOW_DATA;
     pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress = LVM_NULL;
 
-    NewMemberAddress = InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_DATA], Size[LVM_PERSISTENT_FAST_DATA]);
+    NewMemberAddress =
+            InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_DATA], Size[LVM_PERSISTENT_FAST_DATA]);
 
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size         = InstAlloc_GetTotal(&pms[LVM_PERSISTENT_FAST_DATA]);
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Type         = LVM_PERSISTENT_FAST_DATA;
+    pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size =
+            InstAlloc_GetTotal(&pms[LVM_PERSISTENT_FAST_DATA]);
+    pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Type = LVM_PERSISTENT_FAST_DATA;
     pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress = LVM_NULL;
 
-    NewMemberAddress = InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_COEF], Size[LVM_PERSISTENT_FAST_COEF]);
+    NewMemberAddress =
+            InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_COEF], Size[LVM_PERSISTENT_FAST_COEF]);
 
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size         = InstAlloc_GetTotal(&pms[LVM_PERSISTENT_FAST_COEF]);
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Type         = LVM_PERSISTENT_FAST_COEF;
+    pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size =
+            InstAlloc_GetTotal(&pms[LVM_PERSISTENT_FAST_COEF]);
+    pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Type = LVM_PERSISTENT_FAST_COEF;
     pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress = LVM_NULL;
 
     NewMemberAddress = InstAlloc_AddMember(&pms[LVM_TEMPORARY_FAST], Size[LVM_TEMPORARY_FAST]);
 
-    pMemoryTable->Region[LVM_TEMPORARY_FAST].Size                 = InstAlloc_GetTotal(&pms[LVM_TEMPORARY_FAST]);
-    pMemoryTable->Region[LVM_TEMPORARY_FAST].Type                 = LVM_TEMPORARY_FAST;
-    pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress         = LVM_NULL;
+    pMemoryTable->Region[LVM_TEMPORARY_FAST].Size = InstAlloc_GetTotal(&pms[LVM_TEMPORARY_FAST]);
+    pMemoryTable->Region[LVM_TEMPORARY_FAST].Type = LVM_TEMPORARY_FAST;
+    pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress = LVM_NULL;
 
-    return(NewMemberAddress);
+    return (NewMemberAddress);
 }
 
-void*   InstAlloc_AddMemberAllRet(     INST_ALLOC                 *pms,
-                                     LVM_UINT32               Size[],
-                                     void                    **ptr)
-{
+void* InstAlloc_AddMemberAllRet(INST_ALLOC* pms, LVM_UINT32 Size[], void** ptr) {
     ptr[0] = InstAlloc_AddMember(&pms[LVM_PERSISTENT_SLOW_DATA], Size[LVM_PERSISTENT_SLOW_DATA]);
     ptr[1] = InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_DATA], Size[LVM_PERSISTENT_FAST_DATA]);
     ptr[2] = InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_COEF], Size[LVM_PERSISTENT_FAST_COEF]);
diff --git a/media/libeffects/lvm/lib/Common/src/Int16LShiftToInt32_16x32.cpp b/media/libeffects/lvm/lib/Common/src/Int16LShiftToInt32_16x32.cpp
index 9f09e4d..9ddcbe4 100644
--- a/media/libeffects/lvm/lib/Common/src/Int16LShiftToInt32_16x32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Int16LShiftToInt32_16x32.cpp
@@ -25,19 +25,14 @@
    FUNCTION INT16LSHIFTTOINT32_16X32
 ***********************************************************************************/
 
-void Int16LShiftToInt32_16x32(const LVM_INT16   *src,
-                              LVM_INT32         *dst,
-                              LVM_INT16         n,
-                              LVM_INT16         shift )
-{
+void Int16LShiftToInt32_16x32(const LVM_INT16* src, LVM_INT32* dst, LVM_INT16 n, LVM_INT16 shift) {
     LVM_INT16 ii;
 
-    src += n-1;
-    dst += n-1;
+    src += n - 1;
+    dst += n - 1;
 
-    for (ii = n; ii != 0; ii--)
-    {
-        *dst = ( ((LVM_INT32)*src) << shift);
+    for (ii = n; ii != 0; ii--) {
+        *dst = (((LVM_INT32)*src) << shift);
         src--;
         dst--;
     }
diff --git a/media/libeffects/lvm/lib/Common/src/Int32RShiftToInt16_Sat_32x16.cpp b/media/libeffects/lvm/lib/Common/src/Int32RShiftToInt16_Sat_32x16.cpp
index 8c9980d..2584117 100644
--- a/media/libeffects/lvm/lib/Common/src/Int32RShiftToInt16_Sat_32x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Int32RShiftToInt16_Sat_32x16.cpp
@@ -25,29 +25,20 @@
    FUNCTION INT32RSHIFTTOINT16_SAT_32X16
 ***********************************************************************************/
 
-void Int32RShiftToInt16_Sat_32x16(const LVM_INT32  *src,
-                                  LVM_INT16 *dst,
-                                  LVM_INT16 n,
-                                  LVM_INT16 shift )
-{
+void Int32RShiftToInt16_Sat_32x16(const LVM_INT32* src, LVM_INT16* dst, LVM_INT16 n,
+                                  LVM_INT16 shift) {
     LVM_INT32 temp;
     LVM_INT16 ii;
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         temp = *src >> shift;
         src++;
 
-        if (temp > 0x00007FFF)
-        {
+        if (temp > 0x00007FFF) {
             *dst = 0x7FFF;
-        }
-        else if (temp < -0x00008000)
-        {
-            *dst = - 0x8000;
-        }
-        else
-        {
+        } else if (temp < -0x00008000) {
+            *dst = -0x8000;
+        } else {
             *dst = (LVM_INT16)temp;
         }
 
diff --git a/media/libeffects/lvm/lib/Common/src/JoinTo2i_32x32.cpp b/media/libeffects/lvm/lib/Common/src/JoinTo2i_32x32.cpp
index 05df656..0721b76 100644
--- a/media/libeffects/lvm/lib/Common/src/JoinTo2i_32x32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/JoinTo2i_32x32.cpp
@@ -25,19 +25,14 @@
    FUNCTION JoinTo2i_32x32
 ***********************************************************************************/
 
-void JoinTo2i_32x32( const LVM_INT32    *srcL,
-                     const LVM_INT32    *srcR,
-                           LVM_INT32    *dst,
-                           LVM_INT16    n )
-{
+void JoinTo2i_32x32(const LVM_INT32* srcL, const LVM_INT32* srcR, LVM_INT32* dst, LVM_INT16 n) {
     LVM_INT16 ii;
 
-    srcL += n-1;
-    srcR += n-1;
-    dst  += ((2*n)-1);
+    srcL += n - 1;
+    srcR += n - 1;
+    dst += ((2 * n) - 1);
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         *dst = *srcR;
         dst--;
         srcR--;
@@ -49,19 +44,14 @@
 
     return;
 }
-void JoinTo2i_Float( const LVM_FLOAT    *srcL,
-                     const LVM_FLOAT    *srcR,
-                           LVM_FLOAT    *dst,
-                           LVM_INT16    n )
-{
+void JoinTo2i_Float(const LVM_FLOAT* srcL, const LVM_FLOAT* srcR, LVM_FLOAT* dst, LVM_INT16 n) {
     LVM_INT16 ii;
 
     srcL += n - 1;
     srcR += n - 1;
-    dst  += ((2 * n) - 1);
+    dst += ((2 * n) - 1);
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         *dst = *srcR;
         dst--;
         srcR--;
@@ -74,4 +64,3 @@
     return;
 }
 /**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp
index 14d61bd..8b00925 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp
@@ -26,19 +26,15 @@
 /**********************************************************************************
    FUNCTION LVC_Core_MixHard_1St_2i_D16C31_SAT
 ***********************************************************************************/
-void LVC_Core_MixHard_1St_2i_D16C31_SAT( LVMixer3_FLOAT_st        *ptrInstance1,
-                                         LVMixer3_FLOAT_st        *ptrInstance2,
-                                         const LVM_FLOAT    *src,
-                                         LVM_FLOAT          *dst,
-                                         LVM_INT16          n)
-{
-    LVM_FLOAT  Temp;
+void LVC_Core_MixHard_1St_2i_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance1,
+                                        LVMixer3_FLOAT_st* ptrInstance2, const LVM_FLOAT* src,
+                                        LVM_FLOAT* dst, LVM_INT16 n) {
+    LVM_FLOAT Temp;
     LVM_INT16 ii;
-    Mix_Private_FLOAT_st  *pInstance1 = (Mix_Private_FLOAT_st *)(ptrInstance1->PrivateParams);
-    Mix_Private_FLOAT_st  *pInstance2 = (Mix_Private_FLOAT_st *)(ptrInstance2->PrivateParams);
-    for (ii = n; ii != 0; ii--)
-    {
-        Temp = ((LVM_FLOAT)*(src++) * (LVM_FLOAT)pInstance1->Current);
+    Mix_Private_FLOAT_st* pInstance1 = (Mix_Private_FLOAT_st*)(ptrInstance1->PrivateParams);
+    Mix_Private_FLOAT_st* pInstance2 = (Mix_Private_FLOAT_st*)(ptrInstance2->PrivateParams);
+    for (ii = n; ii != 0; ii--) {
+        Temp = ((LVM_FLOAT) * (src++) * (LVM_FLOAT)pInstance1->Current);
         if (Temp > 1.0f)
             *dst++ = 1.0f;
         else if (Temp < -1.0f)
@@ -46,7 +42,7 @@
         else
             *dst++ = (LVM_FLOAT)Temp;
 
-        Temp = ((LVM_FLOAT)*(src++) * (LVM_FLOAT)pInstance2->Current);
+        Temp = ((LVM_FLOAT) * (src++) * (LVM_FLOAT)pInstance2->Current);
         if (Temp > 1.0f)
             *dst++ = 1.0f;
         else if (Temp < -1.0f)
@@ -54,23 +50,15 @@
         else
             *dst++ = (LVM_FLOAT)Temp;
     }
-
 }
-#ifdef SUPPORT_MC
-void LVC_Core_MixHard_1St_MC_float_SAT (Mix_Private_FLOAT_st **ptrInstance,
-                                         const LVM_FLOAT      *src,
-                                         LVM_FLOAT            *dst,
-                                         LVM_INT16            NrFrames,
-                                         LVM_INT16            NrChannels)
-{
-    LVM_FLOAT  Temp;
+void LVC_Core_MixHard_1St_MC_float_SAT(Mix_Private_FLOAT_st** ptrInstance, const LVM_FLOAT* src,
+                                       LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+    LVM_FLOAT Temp;
     LVM_INT16 ii, jj;
-    for (ii = NrFrames; ii != 0; ii--)
-    {
-        for (jj = 0; jj < NrChannels; jj++)
-        {
-            Mix_Private_FLOAT_st  *pInstance1 = (Mix_Private_FLOAT_st *)(ptrInstance[jj]);
-            Temp = ((LVM_FLOAT)*(src++) * (LVM_FLOAT)pInstance1->Current);
+    for (ii = NrFrames; ii != 0; ii--) {
+        for (jj = 0; jj < NrChannels; jj++) {
+            Mix_Private_FLOAT_st* pInstance1 = (Mix_Private_FLOAT_st*)(ptrInstance[jj]);
+            Temp = ((LVM_FLOAT) * (src++) * (LVM_FLOAT)pInstance1->Current);
             if (Temp > 1.0f)
                 *dst++ = 1.0f;
             else if (Temp < -1.0f)
@@ -80,5 +68,4 @@
         }
     }
 }
-#endif
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp
index 841fa1e..31cd805 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp
@@ -24,26 +24,22 @@
 /**********************************************************************************
    FUNCTION LVCore_MIXHARD_2ST_D16C31_SAT
 ***********************************************************************************/
-void LVC_Core_MixHard_2St_D16C31_SAT( LVMixer3_FLOAT_st *ptrInstance1,
-                                    LVMixer3_FLOAT_st         *ptrInstance2,
-                                    const LVM_FLOAT     *src1,
-                                    const LVM_FLOAT     *src2,
-                                          LVM_FLOAT     *dst,
-                                          LVM_INT16     n)
-{
-    LVM_FLOAT  Temp;
+void LVC_Core_MixHard_2St_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance1,
+                                     LVMixer3_FLOAT_st* ptrInstance2, const LVM_FLOAT* src1,
+                                     const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n) {
+    LVM_FLOAT Temp;
     LVM_INT16 ii;
     LVM_FLOAT Current1;
     LVM_FLOAT Current2;
-    Mix_Private_FLOAT_st  *pInstance1 = (Mix_Private_FLOAT_st *)(ptrInstance1->PrivateParams);
-    Mix_Private_FLOAT_st  *pInstance2 = (Mix_Private_FLOAT_st *)(ptrInstance2->PrivateParams);
+    Mix_Private_FLOAT_st* pInstance1 = (Mix_Private_FLOAT_st*)(ptrInstance1->PrivateParams);
+    Mix_Private_FLOAT_st* pInstance2 = (Mix_Private_FLOAT_st*)(ptrInstance2->PrivateParams);
 
     Current1 = (pInstance1->Current);
     Current2 = (pInstance2->Current);
 
-    for (ii = n; ii != 0; ii--){
-        Temp = (((LVM_FLOAT)*(src1++) * (LVM_FLOAT)Current1)) +
-               (((LVM_FLOAT)*(src2++) * (LVM_FLOAT)Current2));
+    for (ii = n; ii != 0; ii--) {
+        Temp = (((LVM_FLOAT) * (src1++) * (LVM_FLOAT)Current1)) +
+               (((LVM_FLOAT) * (src2++) * (LVM_FLOAT)Current2));
         if (Temp > 1.0f)
             *dst++ = 1.0f;
         else if (Temp < -1.0f)
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp
index 318138d..b7865d9 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp
@@ -25,33 +25,28 @@
 /**********************************************************************************
    FUNCTION LVCore_MIXSOFT_1ST_D16C31_WRA
 ***********************************************************************************/
-void LVC_Core_MixInSoft_D16C31_SAT(LVMixer3_FLOAT_st *ptrInstance,
-                                   const LVM_FLOAT   *src,
-                                         LVM_FLOAT   *dst,
-                                         LVM_INT16   n)
-{
-
-    LVM_INT16   OutLoop;
-    LVM_INT16   InLoop;
-    LVM_INT32   ii,jj;
-    Mix_Private_FLOAT_st  *pInstance = (Mix_Private_FLOAT_st *)(ptrInstance->PrivateParams);
-    LVM_FLOAT   Delta = pInstance->Delta;
-    LVM_FLOAT   Current = pInstance->Current;
-    LVM_FLOAT   Target = pInstance->Target;
-    LVM_FLOAT   Temp;
+void LVC_Core_MixInSoft_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                   LVM_FLOAT* dst, LVM_INT16 n) {
+    LVM_INT16 OutLoop;
+    LVM_INT16 InLoop;
+    LVM_INT32 ii, jj;
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)(ptrInstance->PrivateParams);
+    LVM_FLOAT Delta = pInstance->Delta;
+    LVM_FLOAT Current = pInstance->Current;
+    LVM_FLOAT Target = pInstance->Target;
+    LVM_FLOAT Temp;
 
     InLoop = (LVM_INT16)(n >> 2); /* Process per 4 samples */
     OutLoop = (LVM_INT16)(n - (InLoop << 2));
 
-    if(Current < Target){
-        if (OutLoop){
+    if (Current < Target) {
+        if (OutLoop) {
             Temp = Current + Delta;
             Current = Temp;
-            if (Current > Target)
-                Current = Target;
+            if (Current > Target) Current = Target;
 
-           for (ii = OutLoop; ii != 0; ii--){
-                Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT)*(src++) * Current));
+            for (ii = OutLoop; ii != 0; ii--) {
+                Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT) * (src++) * Current));
                 if (Temp > 1.0f)
                     *dst++ = 1.0f;
                 else if (Temp < -1.0f)
@@ -61,14 +56,13 @@
             }
         }
 
-        for (ii = InLoop; ii != 0; ii--){
+        for (ii = InLoop; ii != 0; ii--) {
             Temp = Current + Delta;
             Current = Temp;
-            if (Current > Target)
-                Current = Target;
+            if (Current > Target) Current = Target;
 
-            for (jj = 4; jj != 0 ; jj--){
-                Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT)*(src++) * Current));
+            for (jj = 4; jj != 0; jj--) {
+                Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT) * (src++) * Current));
                 if (Temp > 1.0f)
                     *dst++ = 1.0f;
                 else if (Temp < -1.0f)
@@ -77,15 +71,13 @@
                     *dst++ = (LVM_FLOAT)Temp;
             }
         }
-    }
-    else{
-        if (OutLoop){
+    } else {
+        if (OutLoop) {
             Current -= Delta;
-            if (Current < Target)
-                Current = Target;
+            if (Current < Target) Current = Target;
 
-            for (ii = OutLoop; ii != 0; ii--){
-                Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT)*(src++) * Current));
+            for (ii = OutLoop; ii != 0; ii--) {
+                Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT) * (src++) * Current));
                 if (Temp > 1.0f)
                     *dst++ = 1.0f;
                 else if (Temp < -1.0f)
@@ -95,13 +87,12 @@
             }
         }
 
-        for (ii = InLoop; ii != 0; ii--){
+        for (ii = InLoop; ii != 0; ii--) {
             Current -= Delta;
-            if (Current < Target)
-                Current = Target;
+            if (Current < Target) Current = Target;
 
-            for (jj = 4; jj != 0 ; jj--){
-                Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT)*(src++) * Current));
+            for (jj = 4; jj != 0; jj--) {
+                Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT) * (src++) * Current));
                 if (Temp > 1.0f)
                     *dst++ = 1.0f;
                 else if (Temp < -1.0f)
@@ -113,7 +104,6 @@
     }
     pInstance->Current = Current;
 }
-#ifdef SUPPORT_MC
 /*
  * FUNCTION:       LVC_Core_MixInSoft_Mc_D16C31_SAT
  *
@@ -131,21 +121,16 @@
  *  void
  *
  */
-void LVC_Core_MixInSoft_Mc_D16C31_SAT(LVMixer3_FLOAT_st *ptrInstance,
-                                      const LVM_FLOAT   *src,
-                                            LVM_FLOAT   *dst,
-                                            LVM_INT16   NrFrames,
-                                            LVM_INT16   NrChannels)
-{
-
-    LVM_INT16   OutLoop;
-    LVM_INT16   InLoop;
-    LVM_INT32   ii, jj;
-    Mix_Private_FLOAT_st  *pInstance = (Mix_Private_FLOAT_st *)(ptrInstance->PrivateParams);
-    LVM_FLOAT   Delta = pInstance->Delta;
-    LVM_FLOAT   Current = pInstance->Current;
-    LVM_FLOAT   Target = pInstance->Target;
-    LVM_FLOAT   Temp;
+void LVC_Core_MixInSoft_Mc_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                      LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+    LVM_INT16 OutLoop;
+    LVM_INT16 InLoop;
+    LVM_INT32 ii, jj;
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)(ptrInstance->PrivateParams);
+    LVM_FLOAT Delta = pInstance->Delta;
+    LVM_FLOAT Current = pInstance->Current;
+    LVM_FLOAT Target = pInstance->Target;
+    LVM_FLOAT Temp;
 
     /*
      * Same operation is performed on consecutive frames.
@@ -160,10 +145,9 @@
         if (OutLoop) {
             Temp = Current + Delta;
             Current = Temp;
-            if (Current > Target)
-                Current = Target;
+            if (Current > Target) Current = Target;
 
-           for (ii = OutLoop*NrChannels; ii != 0; ii--) {
+            for (ii = OutLoop * NrChannels; ii != 0; ii--) {
                 Temp = (*dst) + (*(src++) * Current);
                 if (Temp > 1.0f)
                     *dst++ = 1.0f;
@@ -177,10 +161,9 @@
         for (ii = InLoop; ii != 0; ii--) {
             Temp = Current + Delta;
             Current = Temp;
-            if (Current > Target)
-                Current = Target;
+            if (Current > Target) Current = Target;
 
-            for (jj = NrChannels; jj != 0 ; jj--) {
+            for (jj = NrChannels; jj != 0; jj--) {
                 Temp = (*dst) + (*(src++) * Current);
                 if (Temp > 1.0f)
                     *dst++ = 1.0f;
@@ -196,17 +179,14 @@
                     *dst++ = -1.0f;
                 else
                     *dst++ = Temp;
-
             }
         }
-    }
-    else{
+    } else {
         if (OutLoop) {
             Current -= Delta;
-            if (Current < Target)
-                Current = Target;
+            if (Current < Target) Current = Target;
 
-            for (ii = OutLoop*NrChannels; ii != 0; ii--) {
+            for (ii = OutLoop * NrChannels; ii != 0; ii--) {
                 Temp = (*dst) + (*(src++) * Current);
                 if (Temp > 1.0f)
                     *dst++ = 1.0f;
@@ -219,10 +199,9 @@
 
         for (ii = InLoop; ii != 0; ii--) {
             Current -= Delta;
-            if (Current < Target)
-                Current = Target;
+            if (Current < Target) Current = Target;
 
-            for (jj = NrChannels; jj != 0 ; jj--) {
+            for (jj = NrChannels; jj != 0; jj--) {
                 Temp = (*dst) + (*(src++) * Current);
                 if (Temp > 1.0f)
                     *dst++ = 1.0f;
@@ -238,12 +217,10 @@
                     *dst++ = -1.0f;
                 else
                     *dst++ = Temp;
-
             }
         }
     }
     pInstance->Current = Current;
 }
 
-#endif
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp
index 1f4b08a..d45845a 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp
@@ -26,12 +26,9 @@
 /**********************************************************************************
    FUNCTION LVC_Core_MixSoft_1St_2i_D16C31_WRA
 ***********************************************************************************/
-static LVM_FLOAT ADD2_SAT_FLOAT(LVM_FLOAT a,
-                                LVM_FLOAT b,
-                                LVM_FLOAT c)
-{
+static LVM_FLOAT ADD2_SAT_FLOAT(LVM_FLOAT a, LVM_FLOAT b, LVM_FLOAT c) {
     LVM_FLOAT temp;
-    temp = a + b ;
+    temp = a + b;
     if (temp < -1.0f)
         c = -1.0f;
     else if (temp > 1.0f)
@@ -40,154 +37,112 @@
         c = temp;
     return c;
 }
-void LVC_Core_MixSoft_1St_2i_D16C31_WRA( LVMixer3_FLOAT_st        *ptrInstance1,
-                                         LVMixer3_FLOAT_st        *ptrInstance2,
-                                         const LVM_FLOAT    *src,
-                                         LVM_FLOAT          *dst,
-                                         LVM_INT16          n)
-{
-    LVM_INT16   OutLoop;
-    LVM_INT16   InLoop;
-    LVM_INT32   ii;
-    Mix_Private_FLOAT_st  *pInstanceL = (Mix_Private_FLOAT_st *)(ptrInstance1->PrivateParams);
-    Mix_Private_FLOAT_st  *pInstanceR = (Mix_Private_FLOAT_st *)(ptrInstance2->PrivateParams);
+void LVC_Core_MixSoft_1St_2i_D16C31_WRA(LVMixer3_FLOAT_st* ptrInstance1,
+                                        LVMixer3_FLOAT_st* ptrInstance2, const LVM_FLOAT* src,
+                                        LVM_FLOAT* dst, LVM_INT16 n) {
+    LVM_INT16 OutLoop;
+    LVM_INT16 InLoop;
+    LVM_INT32 ii;
+    Mix_Private_FLOAT_st* pInstanceL = (Mix_Private_FLOAT_st*)(ptrInstance1->PrivateParams);
+    Mix_Private_FLOAT_st* pInstanceR = (Mix_Private_FLOAT_st*)(ptrInstance2->PrivateParams);
 
-    LVM_FLOAT   DeltaL = pInstanceL->Delta;
-    LVM_FLOAT   CurrentL = pInstanceL->Current;
-    LVM_FLOAT   TargetL = pInstanceL->Target;
+    LVM_FLOAT DeltaL = pInstanceL->Delta;
+    LVM_FLOAT CurrentL = pInstanceL->Current;
+    LVM_FLOAT TargetL = pInstanceL->Target;
 
-    LVM_FLOAT   DeltaR = pInstanceR->Delta;
-    LVM_FLOAT   CurrentR = pInstanceR->Current;
-    LVM_FLOAT   TargetR = pInstanceR->Target;
+    LVM_FLOAT DeltaR = pInstanceR->Delta;
+    LVM_FLOAT CurrentR = pInstanceR->Current;
+    LVM_FLOAT TargetR = pInstanceR->Target;
 
-    LVM_FLOAT   Temp = 0;
+    LVM_FLOAT Temp = 0;
 
     InLoop = (LVM_INT16)(n >> 2); /* Process per 4 samples */
     OutLoop = (LVM_INT16)(n - (InLoop << 2));
 
-    if (OutLoop)
-    {
-        if(CurrentL < TargetL)
-        {
+    if (OutLoop) {
+        if (CurrentL < TargetL) {
             ADD2_SAT_FLOAT(CurrentL, DeltaL, Temp);
             CurrentL = Temp;
-            if (CurrentL > TargetL)
-                CurrentL = TargetL;
-        }
-        else
-        {
+            if (CurrentL > TargetL) CurrentL = TargetL;
+        } else {
             CurrentL -= DeltaL;
-            if (CurrentL < TargetL)
-                CurrentL = TargetL;
+            if (CurrentL < TargetL) CurrentL = TargetL;
         }
 
-        if(CurrentR < TargetR)
-        {
+        if (CurrentR < TargetR) {
             ADD2_SAT_FLOAT(CurrentR, DeltaR, Temp);
             CurrentR = Temp;
-            if (CurrentR > TargetR)
-                CurrentR = TargetR;
-        }
-        else
-        {
+            if (CurrentR > TargetR) CurrentR = TargetR;
+        } else {
             CurrentR -= DeltaR;
-            if (CurrentR < TargetR)
-                CurrentR = TargetR;
+            if (CurrentR < TargetR) CurrentR = TargetR;
         }
 
-        for (ii = OutLoop * 2; ii != 0; ii -= 2)
-        {
-            *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentL));
-            *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentR));
+        for (ii = OutLoop * 2; ii != 0; ii -= 2) {
+            *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentL));
+            *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentR));
         }
     }
 
-    for (ii = InLoop * 2; ii != 0; ii-=2)
-    {
-        if(CurrentL < TargetL)
-        {
+    for (ii = InLoop * 2; ii != 0; ii -= 2) {
+        if (CurrentL < TargetL) {
             ADD2_SAT_FLOAT(CurrentL, DeltaL, Temp);
             CurrentL = Temp;
-            if (CurrentL > TargetL)
-                CurrentL = TargetL;
-        }
-        else
-        {
+            if (CurrentL > TargetL) CurrentL = TargetL;
+        } else {
             CurrentL -= DeltaL;
-            if (CurrentL < TargetL)
-                CurrentL = TargetL;
+            if (CurrentL < TargetL) CurrentL = TargetL;
         }
 
-        if(CurrentR < TargetR)
-        {
+        if (CurrentR < TargetR) {
             ADD2_SAT_FLOAT(CurrentR, DeltaR, Temp);
             CurrentR = Temp;
-            if (CurrentR > TargetR)
-                CurrentR = TargetR;
-        }
-        else
-        {
+            if (CurrentR > TargetR) CurrentR = TargetR;
+        } else {
             CurrentR -= DeltaR;
-            if (CurrentR < TargetR)
-                CurrentR = TargetR;
+            if (CurrentR < TargetR) CurrentR = TargetR;
         }
 
-        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentL));
-        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentR));
-        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentL));
-        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentR));
-        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentL));
-        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentR));
-        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentL));
-        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentR));
+        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentL));
+        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentR));
+        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentL));
+        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentR));
+        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentL));
+        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentR));
+        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentL));
+        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentR));
     }
     pInstanceL->Current = CurrentL;
     pInstanceR->Current = CurrentR;
-
 }
-#ifdef SUPPORT_MC
-void LVC_Core_MixSoft_1St_MC_float_WRA (Mix_Private_FLOAT_st **ptrInstance,
-                                         const LVM_FLOAT      *src,
-                                         LVM_FLOAT            *dst,
-                                         LVM_INT16            NrFrames,
-                                         LVM_INT16            NrChannels)
-{
-    LVM_INT32   ii, ch;
-    LVM_FLOAT   Temp =0.0f;
-    LVM_FLOAT   tempCurrent[NrChannels];
-    for (ch = 0; ch < NrChannels; ch++)
-    {
+void LVC_Core_MixSoft_1St_MC_float_WRA(Mix_Private_FLOAT_st** ptrInstance, const LVM_FLOAT* src,
+                                       LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+    LVM_INT32 ii, ch;
+    LVM_FLOAT Temp = 0.0f;
+    LVM_FLOAT tempCurrent[NrChannels];
+    for (ch = 0; ch < NrChannels; ch++) {
         tempCurrent[ch] = ptrInstance[ch]->Current;
     }
-    for (ii = NrFrames; ii > 0; ii--)
-    {
-        for (ch = 0; ch < NrChannels; ch++)
-        {
-            Mix_Private_FLOAT_st *pInstance = ptrInstance[ch];
-            const LVM_FLOAT   Delta = pInstance->Delta;
-            LVM_FLOAT         Current = tempCurrent[ch];
-            const LVM_FLOAT   Target = pInstance->Target;
-            if (Current < Target)
-            {
+    for (ii = NrFrames; ii > 0; ii--) {
+        for (ch = 0; ch < NrChannels; ch++) {
+            Mix_Private_FLOAT_st* pInstance = ptrInstance[ch];
+            const LVM_FLOAT Delta = pInstance->Delta;
+            LVM_FLOAT Current = tempCurrent[ch];
+            const LVM_FLOAT Target = pInstance->Target;
+            if (Current < Target) {
                 ADD2_SAT_FLOAT(Current, Delta, Temp);
                 Current = Temp;
-                if (Current > Target)
-                    Current = Target;
-            }
-            else
-            {
+                if (Current > Target) Current = Target;
+            } else {
                 Current -= Delta;
-                if (Current < Target)
-                    Current = Target;
+                if (Current < Target) Current = Target;
             }
             *dst++ = *src++ * Current;
             tempCurrent[ch] = Current;
         }
     }
-    for (ch = 0; ch < NrChannels; ch++)
-    {
+    for (ch = 0; ch < NrChannels; ch++) {
         ptrInstance[ch]->Current = tempCurrent[ch];
     }
 }
-#endif
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp
index 5d8aadc..f8c0a9d 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp
@@ -26,43 +26,37 @@
 /**********************************************************************************
    FUNCTION LVCore_MIXSOFT_1ST_D16C31_WRA
 ***********************************************************************************/
-void LVC_Core_MixSoft_1St_D16C31_WRA(LVMixer3_FLOAT_st *ptrInstance,
-                                     const LVM_FLOAT   *src,
-                                           LVM_FLOAT   *dst,
-                                           LVM_INT16   n)
-{
-    LVM_INT16   OutLoop;
-    LVM_INT16   InLoop;
-    LVM_INT32   ii;
-    Mix_Private_FLOAT_st  *pInstance=(Mix_Private_FLOAT_st *)(ptrInstance->PrivateParams);
-    LVM_FLOAT   Delta= (LVM_FLOAT)pInstance->Delta;
-    LVM_FLOAT   Current = (LVM_FLOAT)pInstance->Current;
-    LVM_FLOAT   Target= (LVM_FLOAT)pInstance->Target;
-    LVM_FLOAT   Temp;
+void LVC_Core_MixSoft_1St_D16C31_WRA(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                     LVM_FLOAT* dst, LVM_INT16 n) {
+    LVM_INT16 OutLoop;
+    LVM_INT16 InLoop;
+    LVM_INT32 ii;
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)(ptrInstance->PrivateParams);
+    LVM_FLOAT Delta = (LVM_FLOAT)pInstance->Delta;
+    LVM_FLOAT Current = (LVM_FLOAT)pInstance->Current;
+    LVM_FLOAT Target = (LVM_FLOAT)pInstance->Target;
+    LVM_FLOAT Temp;
 
     InLoop = (LVM_INT16)(n >> 2); /* Process per 4 samples */
     OutLoop = (LVM_INT16)(n - (InLoop << 2));
 
-    if(Current<Target){
-        if (OutLoop){
-
+    if (Current < Target) {
+        if (OutLoop) {
             Temp = Current + Delta;
             if (Temp > 1.0f)
                 Temp = 1.0f;
             else if (Temp < -1.0f)
                 Temp = -1.0f;
 
-            Current=Temp;
-            if (Current > Target)
-                Current = Target;
+            Current = Temp;
+            if (Current > Target) Current = Target;
 
-            for (ii = OutLoop; ii != 0; ii--){
-                *(dst++) = (((LVM_FLOAT)*(src++) * (LVM_FLOAT)Current));
+            for (ii = OutLoop; ii != 0; ii--) {
+                *(dst++) = (((LVM_FLOAT) * (src++) * (LVM_FLOAT)Current));
             }
         }
 
-        for (ii = InLoop; ii != 0; ii--){
-
+        for (ii = InLoop; ii != 0; ii--) {
             Temp = Current + Delta;
 
             if (Temp > 1.0f)
@@ -70,42 +64,37 @@
             else if (Temp < -1.0f)
                 Temp = -1.0f;
 
-            Current=Temp;
-            if (Current > Target)
-                Current = Target;
+            Current = Temp;
+            if (Current > Target) Current = Target;
 
-            *(dst++) = (((LVM_FLOAT)*(src++) * Current) );
-            *(dst++) = (((LVM_FLOAT)*(src++) * Current) );
-            *(dst++) = (((LVM_FLOAT)*(src++) * Current) );
-            *(dst++) = (((LVM_FLOAT)*(src++) * Current) );
+            *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+            *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+            *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+            *(dst++) = (((LVM_FLOAT) * (src++) * Current));
         }
-    }
-    else{
-        if (OutLoop){
+    } else {
+        if (OutLoop) {
             Current -= Delta;
-            if (Current < Target)
-                Current = Target;
+            if (Current < Target) Current = Target;
 
-            for (ii = OutLoop; ii != 0; ii--){
-                *(dst++) = (((LVM_FLOAT)*(src++) * Current));
+            for (ii = OutLoop; ii != 0; ii--) {
+                *(dst++) = (((LVM_FLOAT) * (src++) * Current));
             }
         }
 
-        for (ii = InLoop; ii != 0; ii--){
+        for (ii = InLoop; ii != 0; ii--) {
             Current -= Delta;
-            if (Current < Target)
-                Current = Target;
+            if (Current < Target) Current = Target;
 
-            *(dst++) = (((LVM_FLOAT)*(src++) * Current));
-            *(dst++) = (((LVM_FLOAT)*(src++) * Current));
-            *(dst++) = (((LVM_FLOAT)*(src++) * Current));
-            *(dst++) = (((LVM_FLOAT)*(src++) * Current));
+            *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+            *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+            *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+            *(dst++) = (((LVM_FLOAT) * (src++) * Current));
         }
     }
-    pInstance->Current=Current;
+    pInstance->Current = Current;
 }
 
-#ifdef SUPPORT_MC
 /*
  * FUNCTION:       LVC_Core_MixSoft_Mc_D16C31_WRA
  *
@@ -123,20 +112,16 @@
  *  void
  *
  */
-void LVC_Core_MixSoft_Mc_D16C31_WRA(LVMixer3_FLOAT_st *ptrInstance,
-                                    const LVM_FLOAT   *src,
-                                          LVM_FLOAT   *dst,
-                                          LVM_INT16   NrFrames,
-                                          LVM_INT16   NrChannels)
-{
-    LVM_INT16   OutLoop;
-    LVM_INT16   InLoop;
-    LVM_INT32   ii, jj;
-    Mix_Private_FLOAT_st  *pInstance=(Mix_Private_FLOAT_st *)(ptrInstance->PrivateParams);
-    LVM_FLOAT   Delta= (LVM_FLOAT)pInstance->Delta;
-    LVM_FLOAT   Current = (LVM_FLOAT)pInstance->Current;
-    LVM_FLOAT   Target= (LVM_FLOAT)pInstance->Target;
-    LVM_FLOAT   Temp;
+void LVC_Core_MixSoft_Mc_D16C31_WRA(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                    LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+    LVM_INT16 OutLoop;
+    LVM_INT16 InLoop;
+    LVM_INT32 ii, jj;
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)(ptrInstance->PrivateParams);
+    LVM_FLOAT Delta = (LVM_FLOAT)pInstance->Delta;
+    LVM_FLOAT Current = (LVM_FLOAT)pInstance->Current;
+    LVM_FLOAT Target = (LVM_FLOAT)pInstance->Target;
+    LVM_FLOAT Temp;
 
     /*
      * Same operation is performed on consecutive frames.
@@ -147,28 +132,25 @@
     /* OutLoop is calculated to handle cases where NrFrames value can be odd.*/
     OutLoop = (LVM_INT16)(NrFrames - (InLoop << 1));
 
-    if (Current<Target) {
+    if (Current < Target) {
         if (OutLoop) {
-
             Temp = Current + Delta;
             if (Temp > 1.0f)
                 Temp = 1.0f;
             else if (Temp < -1.0f)
                 Temp = -1.0f;
 
-            Current=Temp;
-            if (Current > Target)
-                Current = Target;
+            Current = Temp;
+            if (Current > Target) Current = Target;
 
             for (ii = OutLoop; ii != 0; ii--) {
-                for (jj = NrChannels; jj !=0; jj--) {
-                    *(dst++) = (((LVM_FLOAT)*(src++) * (LVM_FLOAT)Current));
+                for (jj = NrChannels; jj != 0; jj--) {
+                    *(dst++) = (((LVM_FLOAT) * (src++) * (LVM_FLOAT)Current));
                 }
             }
         }
 
         for (ii = InLoop; ii != 0; ii--) {
-
             Temp = Current + Delta;
 
             if (Temp > 1.0f)
@@ -176,44 +158,37 @@
             else if (Temp < -1.0f)
                 Temp = -1.0f;
 
-            Current=Temp;
-            if (Current > Target)
-                Current = Target;
+            Current = Temp;
+            if (Current > Target) Current = Target;
 
-            for (jj = NrChannels; jj != 0 ; jj--)
-            {
-                *(dst++) = (((LVM_FLOAT)*(src++) * Current));
-                *(dst++) = (((LVM_FLOAT)*(src++) * Current));
+            for (jj = NrChannels; jj != 0; jj--) {
+                *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+                *(dst++) = (((LVM_FLOAT) * (src++) * Current));
             }
         }
-    }
-    else{
+    } else {
         if (OutLoop) {
             Current -= Delta;
-            if (Current < Target)
-                Current = Target;
+            if (Current < Target) Current = Target;
 
             for (ii = OutLoop; ii != 0; ii--) {
-                for (jj = NrChannels; jj !=0; jj--) {
-                    *(dst++) = (((LVM_FLOAT)*(src++) * (LVM_FLOAT)Current));
+                for (jj = NrChannels; jj != 0; jj--) {
+                    *(dst++) = (((LVM_FLOAT) * (src++) * (LVM_FLOAT)Current));
                 }
             }
         }
 
         for (ii = InLoop; ii != 0; ii--) {
             Current -= Delta;
-            if (Current < Target)
-                Current = Target;
+            if (Current < Target) Current = Target;
 
-            for (jj = NrChannels; jj != 0 ; jj--)
-            {
-                *(dst++) = (((LVM_FLOAT)*(src++) * Current));
-                *(dst++) = (((LVM_FLOAT)*(src++) * Current));
+            for (jj = NrChannels; jj != 0; jj--) {
+                *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+                *(dst++) = (((LVM_FLOAT) * (src++) * Current));
             }
         }
     }
-    pInstance->Current=Current;
+    pInstance->Current = Current;
 }
-#endif
 
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixInSoft_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixInSoft_D16C31_SAT.cpp
index 2bec3be..270c7e0 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixInSoft_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixInSoft_D16C31_SAT.cpp
@@ -27,39 +27,35 @@
    DEFINITIONS
 ***********************************************************************************/
 
-#define TRUE          1
-#define FALSE         0
+#define TRUE 1
+#define FALSE 0
 
 /**********************************************************************************
    FUNCTION MIXINSOFT_D16C31_SAT
 ***********************************************************************************/
-void LVC_MixInSoft_D16C31_SAT(LVMixer3_1St_FLOAT_st *ptrInstance,
-                              const LVM_FLOAT       *src,
-                                    LVM_FLOAT       *dst,
-                                    LVM_INT16       n)
-{
-    char        HardMixing = TRUE;
-    LVM_FLOAT   TargetGain;
-    Mix_Private_FLOAT_st  *pInstance = \
-                             (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
+void LVC_MixInSoft_D16C31_SAT(LVMixer3_1St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                              LVM_FLOAT* dst, LVM_INT16 n) {
+    char HardMixing = TRUE;
+    LVM_FLOAT TargetGain;
+    Mix_Private_FLOAT_st* pInstance =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
 
-    if(n <= 0)    return;
+    if (n <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
-    if (pInstance->Current != pInstance->Target)
-    {
-        if(pInstance->Delta == 1.0f){
+    if (pInstance->Current != pInstance->Target) {
+        if (pInstance->Delta == 1.0f) {
             pInstance->Current = pInstance->Target;
             TargetGain = pInstance->Target;
             LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta){
+        } else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
             pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
                                                        Make them equal. */
             TargetGain = pInstance->Target;
             LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }else{
+        } else {
             /* Soft mixing has to be applied */
             HardMixing = FALSE;
             LVC_Core_MixInSoft_D16C31_SAT(&(ptrInstance->MixerStream[0]), src, dst, n);
@@ -70,111 +66,12 @@
        HARD MIXING
     *******************************************************************************/
 
-    if (HardMixing){
-        if (pInstance->Target != 0){ /* Nothing to do in case Target = 0 */
-            if ((pInstance->Target) == 1.0f){
-                Add2_Sat_Float(src, dst, n);
-            }
-            else{
-                Mac3s_Sat_Float(src, (pInstance->Target), dst, n);
-                /* In case the LVCore function would have changed the Current value */
-                pInstance->Current = pInstance->Target;
-            }
-        }
-    }
-
-    /******************************************************************************
-       CALL BACK
-    *******************************************************************************/
-
-    if (ptrInstance->MixerStream[0].CallbackSet){
-        if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta){
-            pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
-                                                       Make them equal. */
-            TargetGain = pInstance->Target;
-            LVC_Mixer_SetTarget(ptrInstance->MixerStream, TargetGain);
-            ptrInstance->MixerStream[0].CallbackSet = FALSE;
-            if (ptrInstance->MixerStream[0].pCallBack != 0){
-                (*ptrInstance->MixerStream[0].pCallBack) ( \
-                                                ptrInstance->MixerStream[0].pCallbackHandle,
-                                                ptrInstance->MixerStream[0].pGeneralPurpose,
-                                                ptrInstance->MixerStream[0].CallbackParam );
-            }
-        }
-    }
-
-}
-
-#ifdef SUPPORT_MC
-/*
- * FUNCTION:       LVC_MixInSoft_Mc_D16C31_SAT
- *
- * DESCRIPTION:
- *  Mixer function with support for processing multichannel input
- *
- * PARAMETERS:
- *  ptrInstance    Instance pointer
- *  src            Source
- *  dst            Destination
- *  NrFrames       Number of frames
- *  NrChannels     Number of channels
- *
- * RETURNS:
- *  void
- *
- */
-void LVC_MixInSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st *ptrInstance,
-                                 const LVM_FLOAT       *src,
-                                       LVM_FLOAT       *dst,
-                                       LVM_INT16       NrFrames,
-                                       LVM_INT16       NrChannels)
-{
-    char        HardMixing = TRUE;
-    LVM_FLOAT   TargetGain;
-    Mix_Private_FLOAT_st  *pInstance = \
-                             (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
-
-    if (NrFrames <= 0)    return;
-
-    /******************************************************************************
-       SOFT MIXING
-    *******************************************************************************/
-    if (pInstance->Current != pInstance->Target)
-    {
-        if (pInstance->Delta == 1.0f) {
-            pInstance->Current = pInstance->Target;
-            TargetGain = pInstance->Target;
-            LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
-            pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
-                                                       Make them equal. */
-            TargetGain = pInstance->Target;
-            LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }else{
-            /* Soft mixing has to be applied */
-            HardMixing = FALSE;
-            LVC_Core_MixInSoft_Mc_D16C31_SAT(&(ptrInstance->MixerStream[0]),
-                                             src,
-                                             dst,
-                                             NrFrames,
-                                             NrChannels);
-        }
-    }
-
-    /******************************************************************************
-       HARD MIXING
-    *******************************************************************************/
-
     if (HardMixing) {
         if (pInstance->Target != 0) { /* Nothing to do in case Target = 0 */
             if ((pInstance->Target) == 1.0f) {
-                Add2_Sat_Float(src, dst, NrFrames*NrChannels);
-            }
-            else{
-                Mac3s_Sat_Float(src,
-                                (pInstance->Target),
-                                dst,
-                                NrFrames * NrChannels);
+                Add2_Sat_Float(src, dst, n);
+            } else {
+                Mac3s_Sat_Float(src, (pInstance->Target), dst, n);
                 /* In case the LVCore function would have changed the Current value */
                 pInstance->Current = pInstance->Target;
             }
@@ -193,15 +90,97 @@
             LVC_Mixer_SetTarget(ptrInstance->MixerStream, TargetGain);
             ptrInstance->MixerStream[0].CallbackSet = FALSE;
             if (ptrInstance->MixerStream[0].pCallBack != 0) {
-                (*ptrInstance->MixerStream[0].pCallBack) (\
-                                                ptrInstance->MixerStream[0].pCallbackHandle,
-                                                ptrInstance->MixerStream[0].pGeneralPurpose,
-                                                ptrInstance->MixerStream[0].CallbackParam);
+                (*ptrInstance->MixerStream[0].pCallBack)(
+                        ptrInstance->MixerStream[0].pCallbackHandle,
+                        ptrInstance->MixerStream[0].pGeneralPurpose,
+                        ptrInstance->MixerStream[0].CallbackParam);
+            }
+        }
+    }
+}
+
+/*
+ * FUNCTION:       LVC_MixInSoft_Mc_D16C31_SAT
+ *
+ * DESCRIPTION:
+ *  Mixer function with support for processing multichannel input
+ *
+ * PARAMETERS:
+ *  ptrInstance    Instance pointer
+ *  src            Source
+ *  dst            Destination
+ *  NrFrames       Number of frames
+ *  NrChannels     Number of channels
+ *
+ * RETURNS:
+ *  void
+ *
+ */
+void LVC_MixInSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                 LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+    char HardMixing = TRUE;
+    LVM_FLOAT TargetGain;
+    Mix_Private_FLOAT_st* pInstance =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
+
+    if (NrFrames <= 0) return;
+
+    /******************************************************************************
+       SOFT MIXING
+    *******************************************************************************/
+    if (pInstance->Current != pInstance->Target) {
+        if (pInstance->Delta == 1.0f) {
+            pInstance->Current = pInstance->Target;
+            TargetGain = pInstance->Target;
+            LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
+        } else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
+            pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
+                                                       Make them equal. */
+            TargetGain = pInstance->Target;
+            LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
+        } else {
+            /* Soft mixing has to be applied */
+            HardMixing = FALSE;
+            LVC_Core_MixInSoft_Mc_D16C31_SAT(&(ptrInstance->MixerStream[0]), src, dst, NrFrames,
+                                             NrChannels);
+        }
+    }
+
+    /******************************************************************************
+       HARD MIXING
+    *******************************************************************************/
+
+    if (HardMixing) {
+        if (pInstance->Target != 0) { /* Nothing to do in case Target = 0 */
+            if ((pInstance->Target) == 1.0f) {
+                Add2_Sat_Float(src, dst, NrFrames * NrChannels);
+            } else {
+                Mac3s_Sat_Float(src, (pInstance->Target), dst, NrFrames * NrChannels);
+                /* In case the LVCore function would have changed the Current value */
+                pInstance->Current = pInstance->Target;
             }
         }
     }
 
+    /******************************************************************************
+       CALL BACK
+    *******************************************************************************/
+
+    if (ptrInstance->MixerStream[0].CallbackSet) {
+        if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
+            pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
+                                                       Make them equal. */
+            TargetGain = pInstance->Target;
+            LVC_Mixer_SetTarget(ptrInstance->MixerStream, TargetGain);
+            ptrInstance->MixerStream[0].CallbackSet = FALSE;
+            if (ptrInstance->MixerStream[0].pCallBack != 0) {
+                (*ptrInstance->MixerStream[0].pCallBack)(
+                        ptrInstance->MixerStream[0].pCallbackHandle,
+                        ptrInstance->MixerStream[0].pGeneralPurpose,
+                        ptrInstance->MixerStream[0].CallbackParam);
+            }
+        }
+    }
 }
-#endif
 
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp
index 3153ada..c74c8c6 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp
@@ -29,144 +29,117 @@
    DEFINITIONS
 ***********************************************************************************/
 
-#define TRUE          1
-#define FALSE         0
+#define TRUE 1
+#define FALSE 0
 
 #define ARRAY_SIZE(a) ((sizeof(a)) / (sizeof(*(a))))
 
 /**********************************************************************************
    FUNCTION LVC_MixSoft_1St_2i_D16C31_SAT
 ***********************************************************************************/
-#ifdef SUPPORT_MC
 /* This threshold is used to decide on the processing to be applied on
  * front center and back center channels
  */
 #define LVM_VOL_BAL_THR (0.000016f)
-void LVC_MixSoft_1St_MC_float_SAT (LVMixer3_2St_FLOAT_st *ptrInstance,
-                                    const LVM_FLOAT       *src,
-                                    LVM_FLOAT             *dst,
-                                    LVM_INT16             NrFrames,
-                                    LVM_INT32             NrChannels,
-                                    LVM_INT32             ChMask)
-{
-    char        HardMixing = TRUE;
-    LVM_FLOAT   TargetGain;
-    Mix_Private_FLOAT_st  Target_lfe = {LVM_MAXFLOAT, LVM_MAXFLOAT, LVM_MAXFLOAT};
-    Mix_Private_FLOAT_st  Target_ctr = {LVM_MAXFLOAT, LVM_MAXFLOAT, LVM_MAXFLOAT};
-    Mix_Private_FLOAT_st  *pInstance1 = \
-                              (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
-    Mix_Private_FLOAT_st  *pInstance2 = \
-                              (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[1].PrivateParams);
-    Mix_Private_FLOAT_st  *pMixPrivInst[4] = {pInstance1, pInstance2, &Target_ctr, &Target_lfe};
-    Mix_Private_FLOAT_st  *pInstance[NrChannels];
+void LVC_MixSoft_1St_MC_float_SAT(LVMixer3_2St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                  LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT32 NrChannels,
+                                  LVM_INT32 ChMask) {
+    char HardMixing = TRUE;
+    LVM_FLOAT TargetGain;
+    Mix_Private_FLOAT_st Target_lfe = {LVM_MAXFLOAT, LVM_MAXFLOAT, LVM_MAXFLOAT};
+    Mix_Private_FLOAT_st Target_ctr = {LVM_MAXFLOAT, LVM_MAXFLOAT, LVM_MAXFLOAT};
+    Mix_Private_FLOAT_st* pInstance1 =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
+    Mix_Private_FLOAT_st* pInstance2 =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[1].PrivateParams);
+    Mix_Private_FLOAT_st* pMixPrivInst[4] = {pInstance1, pInstance2, &Target_ctr, &Target_lfe};
+    Mix_Private_FLOAT_st* pInstance[NrChannels];
 
-    if (audio_channel_mask_get_representation(ChMask)
-            == AUDIO_CHANNEL_REPRESENTATION_INDEX)
-    {
-        for (int i = 0; i < 2; i++)
-        {
+    if (audio_channel_mask_get_representation(ChMask) == AUDIO_CHANNEL_REPRESENTATION_INDEX) {
+        for (int i = 0; i < 2; i++) {
             pInstance[i] = pMixPrivInst[i];
         }
-        for (int i = 2; i < NrChannels; i++)
-        {
+        for (int i = 2; i < NrChannels; i++) {
             pInstance[i] = pMixPrivInst[2];
         }
-    }
-    else
-    {
+    } else {
         // TODO: Combine with system/media/audio_utils/Balance.cpp
         // Constants in system/media/audio/include/system/audio-base.h
         // 'mixInstIdx' is used to map the appropriate mixer instance for each channel.
         const int mixInstIdx[] = {
-            0, // AUDIO_CHANNEL_OUT_FRONT_LEFT            = 0x1u,
-            1, // AUDIO_CHANNEL_OUT_FRONT_RIGHT           = 0x2u,
-            2, // AUDIO_CHANNEL_OUT_FRONT_CENTER          = 0x4u,
-            3, // AUDIO_CHANNEL_OUT_LOW_FREQUENCY         = 0x8u,
-            0, // AUDIO_CHANNEL_OUT_BACK_LEFT             = 0x10u,
-            1, // AUDIO_CHANNEL_OUT_BACK_RIGHT            = 0x20u,
-            0, // AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER  = 0x40u,
-            1, // AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 0x80u,
-            2, // AUDIO_CHANNEL_OUT_BACK_CENTER           = 0x100u,
-            0, // AUDIO_CHANNEL_OUT_SIDE_LEFT             = 0x200u,
-            1, // AUDIO_CHANNEL_OUT_SIDE_RIGHT            = 0x400u,
-            2, // AUDIO_CHANNEL_OUT_TOP_CENTER            = 0x800u,
-            0, // AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT        = 0x1000u,
-            2, // AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER      = 0x2000u,
-            1, // AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT       = 0x4000u,
-            0, // AUDIO_CHANNEL_OUT_TOP_BACK_LEFT         = 0x8000u,
-            2, // AUDIO_CHANNEL_OUT_TOP_BACK_CENTER       = 0x10000u,
-            1, // AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT        = 0x20000u,
-            0, // AUDIO_CHANNEL_OUT_TOP_SIDE_LEFT         = 0x40000u,
-            1, // AUDIO_CHANNEL_OUT_TOP_SIDE_RIGHT        = 0x80000u
+                0,  // AUDIO_CHANNEL_OUT_FRONT_LEFT            = 0x1u,
+                1,  // AUDIO_CHANNEL_OUT_FRONT_RIGHT           = 0x2u,
+                2,  // AUDIO_CHANNEL_OUT_FRONT_CENTER          = 0x4u,
+                3,  // AUDIO_CHANNEL_OUT_LOW_FREQUENCY         = 0x8u,
+                0,  // AUDIO_CHANNEL_OUT_BACK_LEFT             = 0x10u,
+                1,  // AUDIO_CHANNEL_OUT_BACK_RIGHT            = 0x20u,
+                0,  // AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER  = 0x40u,
+                1,  // AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 0x80u,
+                2,  // AUDIO_CHANNEL_OUT_BACK_CENTER           = 0x100u,
+                0,  // AUDIO_CHANNEL_OUT_SIDE_LEFT             = 0x200u,
+                1,  // AUDIO_CHANNEL_OUT_SIDE_RIGHT            = 0x400u,
+                2,  // AUDIO_CHANNEL_OUT_TOP_CENTER            = 0x800u,
+                0,  // AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT        = 0x1000u,
+                2,  // AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER      = 0x2000u,
+                1,  // AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT       = 0x4000u,
+                0,  // AUDIO_CHANNEL_OUT_TOP_BACK_LEFT         = 0x8000u,
+                2,  // AUDIO_CHANNEL_OUT_TOP_BACK_CENTER       = 0x10000u,
+                1,  // AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT        = 0x20000u,
+                0,  // AUDIO_CHANNEL_OUT_TOP_SIDE_LEFT         = 0x40000u,
+                1,  // AUDIO_CHANNEL_OUT_TOP_SIDE_RIGHT        = 0x80000u
         };
-        if (pInstance1->Target <= LVM_VOL_BAL_THR ||
-            pInstance2->Target <= LVM_VOL_BAL_THR)
-        {
-            Target_ctr.Target  = 0.0f;
+        if (pInstance1->Target <= LVM_VOL_BAL_THR || pInstance2->Target <= LVM_VOL_BAL_THR) {
+            Target_ctr.Target = 0.0f;
             Target_ctr.Current = 0.0f;
-            Target_ctr.Delta   = 0.0f;
+            Target_ctr.Delta = 0.0f;
         }
         const unsigned int idxArrSize = ARRAY_SIZE(mixInstIdx);
-        for (unsigned int i = 0, channel = ChMask; channel !=0 ; ++i)
-        {
+        for (unsigned int i = 0, channel = ChMask; channel != 0; ++i) {
             const unsigned int idx = __builtin_ctz(channel);
-            if (idx < idxArrSize)
-            {
+            if (idx < idxArrSize) {
                 pInstance[i] = pMixPrivInst[mixInstIdx[idx]];
-            }
-            else
-            {
+            } else {
                 pInstance[i] = pMixPrivInst[2];
             }
             channel &= ~(1 << idx);
         }
     }
 
-    if (NrFrames <= 0)    return;
+    if (NrFrames <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
 
     if ((pInstance1->Current != pInstance1->Target) ||
-        (pInstance2->Current != pInstance2->Target))
-    {
+        (pInstance2->Current != pInstance2->Target)) {
         // TODO: combine similar checks below.
-        if (pInstance1->Delta == LVM_MAXFLOAT
-                || Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta)
-        {
+        if (pInstance1->Delta == LVM_MAXFLOAT ||
+            Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta) {
             /* Difference is not significant anymore. Make them equal. */
             pInstance1->Current = pInstance1->Target;
             TargetGain = pInstance1->Target;
             LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }
-        else
-        {
+        } else {
             /* Soft mixing has to be applied */
             HardMixing = FALSE;
         }
 
-        if (HardMixing == TRUE)
-        {
-            if (pInstance2->Delta == LVM_MAXFLOAT
-                    || Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta)
-            {
+        if (HardMixing == TRUE) {
+            if (pInstance2->Delta == LVM_MAXFLOAT ||
+                Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta) {
                 /* Difference is not significant anymore. Make them equal. */
                 pInstance2->Current = pInstance2->Target;
                 TargetGain = pInstance2->Target;
                 LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[1]), TargetGain);
-            }
-            else
-            {
+            } else {
                 /* Soft mixing has to be applied */
                 HardMixing = FALSE;
             }
         }
 
-        if (HardMixing == FALSE)
-        {
-             LVC_Core_MixSoft_1St_MC_float_WRA (&pInstance[0],
-                                                 src, dst, NrFrames, NrChannels);
+        if (HardMixing == FALSE) {
+            LVC_Core_MixSoft_1St_MC_float_WRA(&pInstance[0], src, dst, NrFrames, NrChannels);
         }
     }
 
@@ -174,19 +147,13 @@
        HARD MIXING
     *******************************************************************************/
 
-    if (HardMixing == TRUE)
-    {
-        if ((pInstance1->Target == LVM_MAXFLOAT) && (pInstance2->Target == LVM_MAXFLOAT))
-        {
-            if (src != dst)
-            {
-                Copy_Float(src, dst, NrFrames*NrChannels);
+    if (HardMixing == TRUE) {
+        if ((pInstance1->Target == LVM_MAXFLOAT) && (pInstance2->Target == LVM_MAXFLOAT)) {
+            if (src != dst) {
+                Copy_Float(src, dst, NrFrames * NrChannels);
             }
-        }
-        else
-        {
-            LVC_Core_MixHard_1St_MC_float_SAT(&(pInstance[0]),
-                                               src, dst, NrFrames, NrChannels);
+        } else {
+            LVC_Core_MixHard_1St_MC_float_SAT(&(pInstance[0]), src, dst, NrFrames, NrChannels);
         }
     }
 
@@ -194,109 +161,86 @@
        CALL BACK
     *******************************************************************************/
 
-    if (ptrInstance->MixerStream[0].CallbackSet)
-    {
-        if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta)
-        {
+    if (ptrInstance->MixerStream[0].CallbackSet) {
+        if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta) {
             pInstance1->Current = pInstance1->Target; /* Difference is not significant anymore. \
                                                          Make them equal. */
             TargetGain = pInstance1->Target;
             LVC_Mixer_SetTarget(&ptrInstance->MixerStream[0], TargetGain);
             ptrInstance->MixerStream[0].CallbackSet = FALSE;
-            if (ptrInstance->MixerStream[0].pCallBack != 0)
-            {
-                (*ptrInstance->MixerStream[0].pCallBack) (\
-                    ptrInstance->MixerStream[0].pCallbackHandle,
-                    ptrInstance->MixerStream[0].pGeneralPurpose,
-                    ptrInstance->MixerStream[0].CallbackParam);
+            if (ptrInstance->MixerStream[0].pCallBack != 0) {
+                (*ptrInstance->MixerStream[0].pCallBack)(
+                        ptrInstance->MixerStream[0].pCallbackHandle,
+                        ptrInstance->MixerStream[0].pGeneralPurpose,
+                        ptrInstance->MixerStream[0].CallbackParam);
             }
         }
     }
-    if (ptrInstance->MixerStream[1].CallbackSet)
-    {
-        if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta)
-        {
+    if (ptrInstance->MixerStream[1].CallbackSet) {
+        if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta) {
             pInstance2->Current = pInstance2->Target; /* Difference is not significant anymore.
                                                          Make them equal. */
             TargetGain = pInstance2->Target;
             LVC_Mixer_SetTarget(&ptrInstance->MixerStream[1], TargetGain);
             ptrInstance->MixerStream[1].CallbackSet = FALSE;
-            if (ptrInstance->MixerStream[1].pCallBack != 0)
-            {
-                (*ptrInstance->MixerStream[1].pCallBack) (\
-                    ptrInstance->MixerStream[1].pCallbackHandle,
-                    ptrInstance->MixerStream[1].pGeneralPurpose,
-                    ptrInstance->MixerStream[1].CallbackParam);
+            if (ptrInstance->MixerStream[1].pCallBack != 0) {
+                (*ptrInstance->MixerStream[1].pCallBack)(
+                        ptrInstance->MixerStream[1].pCallbackHandle,
+                        ptrInstance->MixerStream[1].pGeneralPurpose,
+                        ptrInstance->MixerStream[1].CallbackParam);
             }
         }
     }
 }
-#endif
-void LVC_MixSoft_1St_2i_D16C31_SAT( LVMixer3_2St_FLOAT_st *ptrInstance,
-                                    const LVM_FLOAT             *src,
-                                    LVM_FLOAT             *dst,
-                                    LVM_INT16             n)
-{
-    char        HardMixing = TRUE;
-    LVM_FLOAT   TargetGain;
-    Mix_Private_FLOAT_st  *pInstance1 = \
-                              (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
-    Mix_Private_FLOAT_st  *pInstance2 = \
-                              (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[1].PrivateParams);
+void LVC_MixSoft_1St_2i_D16C31_SAT(LVMixer3_2St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                   LVM_FLOAT* dst, LVM_INT16 n) {
+    char HardMixing = TRUE;
+    LVM_FLOAT TargetGain;
+    Mix_Private_FLOAT_st* pInstance1 =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
+    Mix_Private_FLOAT_st* pInstance2 =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[1].PrivateParams);
 
-    if(n <= 0)    return;
+    if (n <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
-    if ((pInstance1->Current != pInstance1->Target) || (pInstance2->Current != pInstance2->Target))
-    {
-        if(pInstance1->Delta == 1.0f)
-        {
+    if ((pInstance1->Current != pInstance1->Target) ||
+        (pInstance2->Current != pInstance2->Target)) {
+        if (pInstance1->Delta == 1.0f) {
             pInstance1->Current = pInstance1->Target;
             TargetGain = pInstance1->Target;
             LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }
-        else if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta)
-        {
+        } else if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta) {
             pInstance1->Current = pInstance1->Target; /* Difference is not significant anymore. \
                                                          Make them equal. */
             TargetGain = pInstance1->Target;
             LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }
-        else
-        {
+        } else {
             /* Soft mixing has to be applied */
             HardMixing = FALSE;
         }
 
-        if(HardMixing == TRUE)
-        {
-            if(pInstance2->Delta == 1.0f)
-            {
+        if (HardMixing == TRUE) {
+            if (pInstance2->Delta == 1.0f) {
                 pInstance2->Current = pInstance2->Target;
                 TargetGain = pInstance2->Target;
                 LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[1]), TargetGain);
-            }
-            else if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta)
-            {
-                pInstance2->Current = pInstance2->Target; /* Difference is not significant anymore. \
-                                                             Make them equal. */
+            } else if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta) {
+                pInstance2->Current = pInstance2->Target; /* Difference is not significant anymore.
+                                                             \ Make them equal. */
                 TargetGain = pInstance2->Target;
                 LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[1]), TargetGain);
-            }
-            else
-            {
+            } else {
                 /* Soft mixing has to be applied */
                 HardMixing = FALSE;
             }
         }
 
-        if(HardMixing == FALSE)
-        {
-             LVC_Core_MixSoft_1St_2i_D16C31_WRA( &(ptrInstance->MixerStream[0]),
-                                                 &(ptrInstance->MixerStream[1]),
-                                                 src, dst, n);
+        if (HardMixing == FALSE) {
+            LVC_Core_MixSoft_1St_2i_D16C31_WRA(&(ptrInstance->MixerStream[0]),
+                                               &(ptrInstance->MixerStream[1]), src, dst, n);
         }
     }
 
@@ -304,20 +248,14 @@
        HARD MIXING
     *******************************************************************************/
 
-    if (HardMixing)
-    {
-        if ((pInstance1->Target == 1.0f) && (pInstance2->Target == 1.0f))
-        {
-            if(src != dst)
-            {
+    if (HardMixing) {
+        if ((pInstance1->Target == 1.0f) && (pInstance2->Target == 1.0f)) {
+            if (src != dst) {
                 Copy_Float(src, dst, n);
             }
-        }
-        else
-        {
+        } else {
             LVC_Core_MixHard_1St_2i_D16C31_SAT(&(ptrInstance->MixerStream[0]),
-                                               &(ptrInstance->MixerStream[1]),
-                                               src, dst, n);
+                                               &(ptrInstance->MixerStream[1]), src, dst, n);
         }
     }
 
@@ -325,39 +263,33 @@
        CALL BACK
     *******************************************************************************/
 
-    if (ptrInstance->MixerStream[0].CallbackSet)
-    {
-        if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta)
-        {
+    if (ptrInstance->MixerStream[0].CallbackSet) {
+        if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta) {
             pInstance1->Current = pInstance1->Target; /* Difference is not significant anymore. \
                                                          Make them equal. */
             TargetGain = pInstance1->Target;
             LVC_Mixer_SetTarget(&ptrInstance->MixerStream[0], TargetGain);
             ptrInstance->MixerStream[0].CallbackSet = FALSE;
-            if (ptrInstance->MixerStream[0].pCallBack != 0)
-            {
-                (*ptrInstance->MixerStream[0].pCallBack) ( \
-                                                ptrInstance->MixerStream[0].pCallbackHandle,
-                                                ptrInstance->MixerStream[0].pGeneralPurpose,
-                                                ptrInstance->MixerStream[0].CallbackParam );
+            if (ptrInstance->MixerStream[0].pCallBack != 0) {
+                (*ptrInstance->MixerStream[0].pCallBack)(
+                        ptrInstance->MixerStream[0].pCallbackHandle,
+                        ptrInstance->MixerStream[0].pGeneralPurpose,
+                        ptrInstance->MixerStream[0].CallbackParam);
             }
         }
     }
-    if (ptrInstance->MixerStream[1].CallbackSet)
-    {
-        if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta)
-        {
+    if (ptrInstance->MixerStream[1].CallbackSet) {
+        if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta) {
             pInstance2->Current = pInstance2->Target; /* Difference is not significant anymore.
                                                          Make them equal. */
             TargetGain = pInstance2->Target;
             LVC_Mixer_SetTarget(&ptrInstance->MixerStream[1], TargetGain);
             ptrInstance->MixerStream[1].CallbackSet = FALSE;
-            if (ptrInstance->MixerStream[1].pCallBack != 0)
-            {
-                (*ptrInstance->MixerStream[1].pCallBack) (
-                                                ptrInstance->MixerStream[1].pCallbackHandle,
-                                                ptrInstance->MixerStream[1].pGeneralPurpose,
-                                                ptrInstance->MixerStream[1].CallbackParam );
+            if (ptrInstance->MixerStream[1].pCallBack != 0) {
+                (*ptrInstance->MixerStream[1].pCallBack)(
+                        ptrInstance->MixerStream[1].pCallbackHandle,
+                        ptrInstance->MixerStream[1].pGeneralPurpose,
+                        ptrInstance->MixerStream[1].CallbackParam);
             }
         }
     }
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
index 4d229da..be19fa0 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
@@ -27,39 +27,35 @@
    DEFINITIONS
 ***********************************************************************************/
 
-#define TRUE          1
-#define FALSE         0
+#define TRUE 1
+#define FALSE 0
 
 /**********************************************************************************
    FUNCTION LVMixer3_MIXSOFT_1ST_D16C31_SAT
 ***********************************************************************************/
-void LVC_MixSoft_1St_D16C31_SAT( LVMixer3_1St_FLOAT_st *ptrInstance,
-                                  const LVM_FLOAT             *src,
-                                        LVM_FLOAT             *dst,
-                                        LVM_INT16             n)
-{
-    char        HardMixing = TRUE;
-    LVM_FLOAT   TargetGain;
-    Mix_Private_FLOAT_st  *pInstance = \
-                          (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
+void LVC_MixSoft_1St_D16C31_SAT(LVMixer3_1St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                LVM_FLOAT* dst, LVM_INT16 n) {
+    char HardMixing = TRUE;
+    LVM_FLOAT TargetGain;
+    Mix_Private_FLOAT_st* pInstance =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
 
-    if(n <= 0)    return;
+    if (n <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
-    if (pInstance->Current != pInstance->Target)
-    {
-        if(pInstance->Delta == 1.0f){
+    if (pInstance->Current != pInstance->Target) {
+        if (pInstance->Delta == 1.0f) {
             pInstance->Current = pInstance->Target;
             TargetGain = pInstance->Target;
             LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta){
+        } else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
             pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
                                                        Make them equal. */
             TargetGain = pInstance->Target;
             LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }else{
+        } else {
             /* Soft mixing has to be applied */
             HardMixing = FALSE;
             LVC_Core_MixSoft_1St_D16C31_WRA(&(ptrInstance->MixerStream[0]), src, dst, n);
@@ -70,39 +66,37 @@
        HARD MIXING
     *******************************************************************************/
 
-    if (HardMixing){
+    if (HardMixing) {
         if (pInstance->Target == 0)
             LoadConst_Float(0.0, dst, n);
         else {
             if ((pInstance->Target) != 1.0f)
                 Mult3s_Float(src, (pInstance->Target), dst, n);
-            else if(src != dst)
+            else if (src != dst)
                 Copy_Float(src, dst, n);
         }
-
     }
 
     /******************************************************************************
        CALL BACK
     *******************************************************************************/
 
-    if (ptrInstance->MixerStream[0].CallbackSet){
-        if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta){
+    if (ptrInstance->MixerStream[0].CallbackSet) {
+        if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
             pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
                                                        Make them equal. */
             TargetGain = pInstance->Target;
             LVC_Mixer_SetTarget(ptrInstance->MixerStream, TargetGain);
             ptrInstance->MixerStream[0].CallbackSet = FALSE;
-            if (ptrInstance->MixerStream[0].pCallBack != 0){
-                (*ptrInstance->MixerStream[0].pCallBack) ( \
-                                                ptrInstance->MixerStream[0].pCallbackHandle,
-                                                ptrInstance->MixerStream[0].pGeneralPurpose,
-                                                ptrInstance->MixerStream[0].CallbackParam );
+            if (ptrInstance->MixerStream[0].pCallBack != 0) {
+                (*ptrInstance->MixerStream[0].pCallBack)(
+                        ptrInstance->MixerStream[0].pCallbackHandle,
+                        ptrInstance->MixerStream[0].pGeneralPurpose,
+                        ptrInstance->MixerStream[0].CallbackParam);
             }
         }
     }
 }
-#ifdef SUPPORT_MC
 /*
  * FUNCTION:       LVC_MixSoft_Mc_D16C31_SAT
  *
@@ -120,40 +114,32 @@
  *  void
  *
  */
-void LVC_MixSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st *ptrInstance,
-                                  const LVM_FLOAT      *src,
-                                        LVM_FLOAT      *dst,
-                                        LVM_INT16      NrFrames,
-                                        LVM_INT16      NrChannels)
-{
-    char        HardMixing = TRUE;
-    LVM_FLOAT   TargetGain;
-    Mix_Private_FLOAT_st  *pInstance = \
-                          (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
+void LVC_MixSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                               LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+    char HardMixing = TRUE;
+    LVM_FLOAT TargetGain;
+    Mix_Private_FLOAT_st* pInstance =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
 
-    if (NrFrames <= 0)    return;
+    if (NrFrames <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
-    if (pInstance->Current != pInstance->Target)
-    {
+    if (pInstance->Current != pInstance->Target) {
         if (pInstance->Delta == 1.0f) {
             pInstance->Current = pInstance->Target;
             TargetGain = pInstance->Target;
             LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
+        } else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
             pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
                                                        Make them equal. */
             TargetGain = pInstance->Target;
             LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }else{
+        } else {
             /* Soft mixing has to be applied */
             HardMixing = FALSE;
-            LVC_Core_MixSoft_Mc_D16C31_WRA(&(ptrInstance->MixerStream[0]),
-                                           src,
-                                           dst,
-                                           NrFrames,
+            LVC_Core_MixSoft_Mc_D16C31_WRA(&(ptrInstance->MixerStream[0]), src, dst, NrFrames,
                                            NrChannels);
         }
     }
@@ -171,7 +157,6 @@
             else if (src != dst)
                 Copy_Float(src, dst, NrFrames * NrChannels);
         }
-
     }
 
     /******************************************************************************
@@ -186,15 +171,13 @@
             LVC_Mixer_SetTarget(ptrInstance->MixerStream, TargetGain);
             ptrInstance->MixerStream[0].CallbackSet = FALSE;
             if (ptrInstance->MixerStream[0].pCallBack != 0) {
-                (*ptrInstance->MixerStream[0].pCallBack) (\
-                                                ptrInstance->MixerStream[0].pCallbackHandle,
-                                                ptrInstance->MixerStream[0].pGeneralPurpose,
-                                                ptrInstance->MixerStream[0].CallbackParam);
+                (*ptrInstance->MixerStream[0].pCallBack)(
+                        ptrInstance->MixerStream[0].pCallbackHandle,
+                        ptrInstance->MixerStream[0].pGeneralPurpose,
+                        ptrInstance->MixerStream[0].CallbackParam);
             }
         }
     }
 }
 
-#endif
-
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp
index 54ab79d..882a8ce 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp
@@ -25,49 +25,39 @@
 /**********************************************************************************
    FUNCTION LVC_MixSoft_2St_D16C31_SAT.c
 ***********************************************************************************/
-void LVC_MixSoft_2St_D16C31_SAT(LVMixer3_2St_FLOAT_st *ptrInstance,
-                                const LVM_FLOAT       *src1,
-                                const LVM_FLOAT       *src2,
-                                      LVM_FLOAT       *dst,
-                                      LVM_INT16       n)
-{
-    Mix_Private_FLOAT_st  *pInstance1 = \
-                             (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
-    Mix_Private_FLOAT_st  *pInstance2 = \
-                             (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[1].PrivateParams);
+void LVC_MixSoft_2St_D16C31_SAT(LVMixer3_2St_FLOAT_st* ptrInstance, const LVM_FLOAT* src1,
+                                const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n) {
+    Mix_Private_FLOAT_st* pInstance1 =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
+    Mix_Private_FLOAT_st* pInstance2 =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[1].PrivateParams);
 
-    if(n <= 0)    return;
+    if (n <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
-    if ((pInstance1->Current == pInstance1->Target) && (pInstance1->Current == 0)){
-        LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[1]),
-                                    src2, dst, n);
-    }
-    else if ((pInstance2->Current == pInstance2->Target) && (pInstance2->Current == 0)){
-        LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[0]),
-                                    src1, dst, n);
-    }
-    else if ((pInstance1->Current != pInstance1->Target) || \
-                                    (pInstance2->Current != pInstance2->Target))
-    {
-        LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[0]),
-                                   src1, dst, n);
-        LVC_MixInSoft_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[1]),
-                                  src2, dst, n);
-    }
-    else{
+    if ((pInstance1->Current == pInstance1->Target) && (pInstance1->Current == 0)) {
+        LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[1]), src2,
+                                   dst, n);
+    } else if ((pInstance2->Current == pInstance2->Target) && (pInstance2->Current == 0)) {
+        LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[0]), src1,
+                                   dst, n);
+    } else if ((pInstance1->Current != pInstance1->Target) ||
+               (pInstance2->Current != pInstance2->Target)) {
+        LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[0]), src1,
+                                   dst, n);
+        LVC_MixInSoft_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[1]), src2, dst,
+                                 n);
+    } else {
         /******************************************************************************
            HARD MIXING
         *******************************************************************************/
-        LVC_Core_MixHard_2St_D16C31_SAT( &ptrInstance->MixerStream[0],
-                                         &ptrInstance->MixerStream[1],
-                                         src1, src2, dst, n);
+        LVC_Core_MixHard_2St_D16C31_SAT(&ptrInstance->MixerStream[0], &ptrInstance->MixerStream[1],
+                                        src1, src2, dst, n);
     }
 }
 
-#ifdef SUPPORT_MC
 /*
  * FUNCTION:       LVC_MixSoft_2Mc_D16C31_SAT
  *
@@ -86,48 +76,38 @@
  *  void
  *
  */
-void LVC_MixSoft_2Mc_D16C31_SAT(LVMixer3_2St_FLOAT_st *ptrInstance,
-                                const LVM_FLOAT       *src1,
-                                const LVM_FLOAT       *src2,
-                                      LVM_FLOAT       *dst,
-                                      LVM_INT16       NrFrames,
-                                      LVM_INT16       NrChannels)
-{
-    Mix_Private_FLOAT_st  *pInstance1 = \
-                             (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
-    Mix_Private_FLOAT_st  *pInstance2 = \
-                             (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[1].PrivateParams);
+void LVC_MixSoft_2Mc_D16C31_SAT(LVMixer3_2St_FLOAT_st* ptrInstance, const LVM_FLOAT* src1,
+                                const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 NrFrames,
+                                LVM_INT16 NrChannels) {
+    Mix_Private_FLOAT_st* pInstance1 =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
+    Mix_Private_FLOAT_st* pInstance2 =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[1].PrivateParams);
 
-    if (NrFrames <= 0)    return;
+    if (NrFrames <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
     if ((pInstance1->Current == pInstance1->Target) && (pInstance1->Current == 0)) {
-        LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[1]),
-                                    src2, dst, NrFrames, NrChannels);
-    }
-    else if ((pInstance2->Current == pInstance2->Target) && (pInstance2->Current == 0)) {
-        LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[0]),
-                                    src1, dst, NrFrames, NrChannels);
-    }
-    else if ((pInstance1->Current != pInstance1->Target) || \
-                                    (pInstance2->Current != pInstance2->Target))
-    {
-        LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[0]),
-                                   src1, dst, NrFrames, NrChannels);
-        LVC_MixInSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[1]),
-                                   src2, dst, NrFrames, NrChannels);
-    }
-    else{
+        LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[1]), src2, dst,
+                                  NrFrames, NrChannels);
+    } else if ((pInstance2->Current == pInstance2->Target) && (pInstance2->Current == 0)) {
+        LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[0]), src1, dst,
+                                  NrFrames, NrChannels);
+    } else if ((pInstance1->Current != pInstance1->Target) ||
+               (pInstance2->Current != pInstance2->Target)) {
+        LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[0]), src1, dst,
+                                  NrFrames, NrChannels);
+        LVC_MixInSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[1]), src2,
+                                    dst, NrFrames, NrChannels);
+    } else {
         /******************************************************************************
            HARD MIXING
         *******************************************************************************/
-        LVC_Core_MixHard_2St_D16C31_SAT(&ptrInstance->MixerStream[0],
-                                        &ptrInstance->MixerStream[1],
+        LVC_Core_MixHard_2St_D16C31_SAT(&ptrInstance->MixerStream[0], &ptrInstance->MixerStream[1],
                                         src1, src2, dst, NrFrames * NrChannels);
     }
 }
-#endif
 
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer.h b/media/libeffects/lvm/lib/Common/src/LVC_Mixer.h
index ce42d2e..55255a6 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer.h
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer.h
@@ -25,34 +25,31 @@
 ***********************************************************************************/
 
 /* LVMixer3_st structure stores Instance parameters for one audio stream */
-typedef struct
-{
-    LVM_FLOAT       PrivateParams[3];   /* Private Instance params for \
-                                           Audio Stream shift parameter */
-    LVM_INT16       CallbackSet;        /* Boolean.  Should be set by calling application \
-                                           each time the target value is updated */
-    LVM_INT16       CallbackParam;      /* Parameter that will be used in the calback function */
-    void            *pCallbackHandle;   /* Pointer to the instance of the callback function */
-    void            *pGeneralPurpose;   /* Pointer for general purpose usage */
-    LVM_Callback    pCallBack;          /* Pointer to the callback function */
+typedef struct {
+    LVM_FLOAT PrivateParams[3]; /* Private Instance params for \
+                                   Audio Stream shift parameter */
+    LVM_INT16 CallbackSet;      /* Boolean.  Should be set by calling application \
+                                   each time the target value is updated */
+    LVM_INT16 CallbackParam;    /* Parameter that will be used in the calback function */
+    void* pCallbackHandle;      /* Pointer to the instance of the callback function */
+    void* pGeneralPurpose;      /* Pointer for general purpose usage */
+    LVM_Callback pCallBack;     /* Pointer to the callback function */
 } LVMixer3_FLOAT_st;
-typedef struct
-{
-    LVMixer3_FLOAT_st     MixerStream[1];    /* Instance Params for one Audio Stream */
+typedef struct {
+    LVMixer3_FLOAT_st MixerStream[1]; /* Instance Params for one Audio Stream */
 } LVMixer3_1St_FLOAT_st;
-typedef struct
-{
-    LVMixer3_FLOAT_st     MixerStream[2];    /* Instance Params for two Audio Streams */
+typedef struct {
+    LVMixer3_FLOAT_st MixerStream[2]; /* Instance Params for two Audio Streams */
 } LVMixer3_2St_FLOAT_st;
 /**********************************************************************************
    FUNCTION PROTOTYPES (HIGH LEVEL FUNCTIONS)
 ***********************************************************************************/
 
 /* Function names should be unique within first 16 characters  */
-#define    LVMixer3_MixSoft_1St_D16C31_SAT   LVMixer3_1St_D16C31_SAT_MixSoft
-#define    LVMixer3_MixInSoft_D16C31_SAT     LVMixer3_D16C31_SAT_MixInSoft
-#define    LVMixer3_MixSoft_2St_D16C31_SAT   LVMixer3_2St_D16C31_SAT_MixSoft
-#define    LVMixer3_MixSoft_3St_D16C31_SAT   LVMixer3_3St_D16C31_SAT_MixSoft
+#define LVMixer3_MixSoft_1St_D16C31_SAT LVMixer3_1St_D16C31_SAT_MixSoft
+#define LVMixer3_MixInSoft_D16C31_SAT LVMixer3_D16C31_SAT_MixInSoft
+#define LVMixer3_MixSoft_2St_D16C31_SAT LVMixer3_2St_D16C31_SAT_MixSoft
+#define LVMixer3_MixSoft_3St_D16C31_SAT LVMixer3_3St_D16C31_SAT_MixSoft
 
 /*** General functions ************************************************************/
 
@@ -62,85 +59,52 @@
 /* then the calculation will give an incorrect value for alpha, see the mixer     */
 /* documentation for further details.                                             */
 /* ********************************************************************************/
-void LVC_Mixer_SetTarget( LVMixer3_FLOAT_st *pStream,
-                          LVM_FLOAT        TargetGain);
-LVM_FLOAT LVC_Mixer_GetTarget( LVMixer3_FLOAT_st *pStream);
+void LVC_Mixer_SetTarget(LVMixer3_FLOAT_st* pStream, LVM_FLOAT TargetGain);
+LVM_FLOAT LVC_Mixer_GetTarget(LVMixer3_FLOAT_st* pStream);
 
-LVM_FLOAT LVC_Mixer_GetCurrent( LVMixer3_FLOAT_st *pStream);
+LVM_FLOAT LVC_Mixer_GetCurrent(LVMixer3_FLOAT_st* pStream);
 
-void LVC_Mixer_Init( LVMixer3_FLOAT_st *pStream,
-                     LVM_FLOAT           TargetGain,
-                     LVM_FLOAT           CurrentGain);
+void LVC_Mixer_Init(LVMixer3_FLOAT_st* pStream, LVM_FLOAT TargetGain, LVM_FLOAT CurrentGain);
 
-void LVC_Mixer_SetTimeConstant( LVMixer3_FLOAT_st *pStream,
-                                LVM_INT32           Tc_millisec,
-                                LVM_Fs_en           Fs,
-                                LVM_INT16           NumChannels);
+void LVC_Mixer_SetTimeConstant(LVMixer3_FLOAT_st* pStream, LVM_INT32 Tc_millisec, LVM_Fs_en Fs,
+                               LVM_INT16 NumChannels);
 
-void LVC_Mixer_VarSlope_SetTimeConstant( LVMixer3_FLOAT_st *pStream,
-                                         LVM_INT32           Tc_millisec,
-                                         LVM_Fs_en           Fs,
-                                         LVM_INT16           NumChannels);
+void LVC_Mixer_VarSlope_SetTimeConstant(LVMixer3_FLOAT_st* pStream, LVM_INT32 Tc_millisec,
+                                        LVM_Fs_en Fs, LVM_INT16 NumChannels);
 
 /*** 16 bit functions *************************************************************/
 
-void LVC_MixSoft_1St_D16C31_SAT(LVMixer3_1St_FLOAT_st *pInstance,
-                                const LVM_FLOAT       *src,
-                                      LVM_FLOAT       *dst,
-                                      LVM_INT16       n);
-#ifdef SUPPORT_MC
-void LVC_MixSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st *pInstance,
-                               const LVM_FLOAT       *src,
-                                     LVM_FLOAT       *dst,
-                                     LVM_INT16       NrFrames,
-                                     LVM_INT16       NrChannels);
-#endif
+void LVC_MixSoft_1St_D16C31_SAT(LVMixer3_1St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+                                LVM_FLOAT* dst, LVM_INT16 n);
+void LVC_MixSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+                               LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
 
-void LVC_MixInSoft_D16C31_SAT(LVMixer3_1St_FLOAT_st *pInstance,
-                              const LVM_FLOAT       *src,
-                                    LVM_FLOAT       *dst,
-                                    LVM_INT16       n);
-#ifdef SUPPORT_MC
-void LVC_MixInSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st *pInstance,
-                                 const LVM_FLOAT       *src,
-                                       LVM_FLOAT       *dst,
-                                       LVM_INT16       NrFrames,
-                                       LVM_INT16       NrChannels);
-#endif
+void LVC_MixInSoft_D16C31_SAT(LVMixer3_1St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+                              LVM_FLOAT* dst, LVM_INT16 n);
+void LVC_MixInSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+                                 LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
 
-void LVC_MixSoft_2St_D16C31_SAT(LVMixer3_2St_FLOAT_st *pInstance,
-                                const LVM_FLOAT       *src1,
-                                const LVM_FLOAT       *src2,
-                                LVM_FLOAT             *dst,  /* dst cannot be equal to src2 */
-                                LVM_INT16             n);
-#ifdef SUPPORT_MC
-void LVC_MixSoft_2Mc_D16C31_SAT(LVMixer3_2St_FLOAT_st *pInstance,
-                                const LVM_FLOAT       *src1,
-                                const LVM_FLOAT       *src2,
-                                LVM_FLOAT             *dst,  /* dst cannot be equal to src2 */
-                                LVM_INT16             NrFrames,
-                                LVM_INT16             NrChannels);
-#endif
+void LVC_MixSoft_2St_D16C31_SAT(LVMixer3_2St_FLOAT_st* pInstance, const LVM_FLOAT* src1,
+                                const LVM_FLOAT* src2,
+                                LVM_FLOAT* dst, /* dst cannot be equal to src2 */
+                                LVM_INT16 n);
+void LVC_MixSoft_2Mc_D16C31_SAT(LVMixer3_2St_FLOAT_st* pInstance, const LVM_FLOAT* src1,
+                                const LVM_FLOAT* src2,
+                                LVM_FLOAT* dst, /* dst cannot be equal to src2 */
+                                LVM_INT16 NrFrames, LVM_INT16 NrChannels);
 /**********************************************************************************/
 /* For applying different gains to Left and right chennals                        */
 /* MixerStream[0] applies to Left channel                                         */
 /* MixerStream[1] applies to Right channel                                        */
 /* Gain values should not be more that 1.0                                        */
 /**********************************************************************************/
-#ifdef SUPPORT_MC
-void LVC_MixSoft_1St_MC_float_SAT(LVMixer3_2St_FLOAT_st *pInstance,
-                                   const   LVM_FLOAT     *src,
-                                   LVM_FLOAT             *dst,   /* dst can be equal to src */
-                                   LVM_INT16             NrFrames,
-                                   LVM_INT32             NrChannels,
-                                   LVM_INT32             ChMask);
-#endif
-void LVC_MixSoft_1St_2i_D16C31_SAT(LVMixer3_2St_FLOAT_st *pInstance,
-                                   const   LVM_FLOAT     *src,
-                                   LVM_FLOAT             *dst,   /* dst can be equal to src */
-                                   LVM_INT16             n);     /* Number of stereo samples */
+void LVC_MixSoft_1St_MC_float_SAT(LVMixer3_2St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+                                  LVM_FLOAT* dst, /* dst can be equal to src */
+                                  LVM_INT16 NrFrames, LVM_INT32 NrChannels, LVM_INT32 ChMask);
+void LVC_MixSoft_1St_2i_D16C31_SAT(LVMixer3_2St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+                                   LVM_FLOAT* dst, /* dst can be equal to src */
+                                   LVM_INT16 n);   /* Number of stereo samples */
 
 /**********************************************************************************/
 
-#endif //#ifndef __LVC_MIXER_H__
-
+#endif  //#ifndef __LVC_MIXER_H__
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetCurrent.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetCurrent.cpp
index d0b50e6..03de8b0 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetCurrent.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetCurrent.cpp
@@ -30,10 +30,9 @@
 /*  CurrentGain      - CurrentGain value in Q 16.15 format              */
 /*                                                                      */
 /************************************************************************/
-LVM_FLOAT LVC_Mixer_GetCurrent( LVMixer3_FLOAT_st *pStream)
-{
-    LVM_FLOAT       CurrentGain;
-    Mix_Private_FLOAT_st  *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
+LVM_FLOAT LVC_Mixer_GetCurrent(LVMixer3_FLOAT_st* pStream) {
+    LVM_FLOAT CurrentGain;
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
     CurrentGain = pInstance->Current;  // CurrentGain
     return CurrentGain;
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetTarget.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetTarget.cpp
index 3ae5ba4..21ebac1f 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetTarget.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetTarget.cpp
@@ -30,10 +30,9 @@
 /*  TargetGain      - TargetGain value in Q 16.15 format                */
 /*                                                                      */
 /************************************************************************/
-LVM_FLOAT LVC_Mixer_GetTarget( LVMixer3_FLOAT_st *pStream)
-{
-    LVM_FLOAT       TargetGain;
-    Mix_Private_FLOAT_st  *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
+LVM_FLOAT LVC_Mixer_GetTarget(LVMixer3_FLOAT_st* pStream) {
+    LVM_FLOAT TargetGain;
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
 
     TargetGain = pInstance->Target;  // TargetGain
     return TargetGain;
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Init.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Init.cpp
index c9fd344..e37f635 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Init.cpp
@@ -44,14 +44,10 @@
 /*  void                                                                */
 /*                                                                      */
 /************************************************************************/
-void LVC_Mixer_Init( LVMixer3_FLOAT_st *pStream,
-                     LVM_FLOAT           TargetGain,
-                     LVM_FLOAT           CurrentGain)
-{
+void LVC_Mixer_Init(LVMixer3_FLOAT_st* pStream, LVM_FLOAT TargetGain, LVM_FLOAT CurrentGain) {
     LVM_FLOAT MaxGain = TargetGain;
-    Mix_Private_FLOAT_st *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
-    if(CurrentGain > MaxGain)
-        MaxGain = CurrentGain;
-    pInstance->Target = TargetGain;   // Update fractional gain Target
-    pInstance->Current = CurrentGain; // Update fractional gain Current
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
+    if (CurrentGain > MaxGain) MaxGain = CurrentGain;
+    pInstance->Target = TargetGain;    // Update fractional gain Target
+    pInstance->Current = CurrentGain;  // Update fractional gain Current
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Private.h b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Private.h
index 123d22b..5f22d77 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Private.h
@@ -26,54 +26,36 @@
 #include "VectorArithmetic.h"
 
 /* Instance parameter structure */
-typedef struct
-{
+typedef struct {
     /* General */
-    LVM_FLOAT                       Target;           /*number specifying value of Target Gain */
-    LVM_FLOAT                       Current;          /*number specifying value of Current Gain */
-    LVM_FLOAT                       Delta;            /*number specifying value of Delta Gain */
+    LVM_FLOAT Target;  /*number specifying value of Target Gain */
+    LVM_FLOAT Current; /*number specifying value of Current Gain */
+    LVM_FLOAT Delta;   /*number specifying value of Delta Gain */
 } Mix_Private_FLOAT_st;
 
 /**********************************************************************************
    DEFINITIONS
 ***********************************************************************************/
-#define LVCore_MixInSoft_D32C31_SAT    LVCore_InSoft_D32C31_SAT
-#define LVCore_MixSoft_1St_D32C31_WRA  LVCore_Soft_1St_D32C31_WRA
-#define LVCore_MixHard_2St_D32C31_SAT  LVCore_Hard_2St_D32C31_SAT
+#define LVCore_MixInSoft_D32C31_SAT LVCore_InSoft_D32C31_SAT
+#define LVCore_MixSoft_1St_D32C31_WRA LVCore_Soft_1St_D32C31_WRA
+#define LVCore_MixHard_2St_D32C31_SAT LVCore_Hard_2St_D32C31_SAT
 
 /**********************************************************************************
    FUNCTION PROTOTYPES (LOW LEVEL SUBFUNCTIONS)
 ***********************************************************************************/
 
 /*** 16 bit functions *************************************************************/
-void LVC_Core_MixInSoft_D16C31_SAT( LVMixer3_FLOAT_st *ptrInstance,
-                                    const LVM_FLOAT     *src,
-                                    LVM_FLOAT     *dst,
-                                    LVM_INT16     n);
-#ifdef SUPPORT_MC
-void LVC_Core_MixInSoft_Mc_D16C31_SAT(LVMixer3_FLOAT_st *ptrInstance,
-                                    const LVM_FLOAT     *src,
-                                          LVM_FLOAT     *dst,
-                                          LVM_INT16     NrFrames,
-                                          LVM_INT16     NrChannels);
-#endif
-void LVC_Core_MixSoft_1St_D16C31_WRA( LVMixer3_FLOAT_st *ptrInstance,
-                                      const LVM_FLOAT     *src,
-                                      LVM_FLOAT     *dst,
-                                      LVM_INT16     n);
-#ifdef SUPPORT_MC
-void LVC_Core_MixSoft_Mc_D16C31_WRA(LVMixer3_FLOAT_st *ptrInstance,
-                                    const LVM_FLOAT     *src,
-                                          LVM_FLOAT     *dst,
-                                          LVM_INT16     NrFrames,
-                                          LVM_INT16     NrChannels);
-#endif
-void LVC_Core_MixHard_2St_D16C31_SAT( LVMixer3_FLOAT_st *pInstance1,
-                                      LVMixer3_FLOAT_st         *pInstance2,
-                                      const LVM_FLOAT     *src1,
-                                      const LVM_FLOAT     *src2,
-                                      LVM_FLOAT     *dst,
-                                      LVM_INT16     n);
+void LVC_Core_MixInSoft_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                   LVM_FLOAT* dst, LVM_INT16 n);
+void LVC_Core_MixInSoft_Mc_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                      LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
+void LVC_Core_MixSoft_1St_D16C31_WRA(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                     LVM_FLOAT* dst, LVM_INT16 n);
+void LVC_Core_MixSoft_Mc_D16C31_WRA(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                    LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
+void LVC_Core_MixHard_2St_D16C31_SAT(LVMixer3_FLOAT_st* pInstance1, LVMixer3_FLOAT_st* pInstance2,
+                                     const LVM_FLOAT* src1, const LVM_FLOAT* src2, LVM_FLOAT* dst,
+                                     LVM_INT16 n);
 
 /**********************************************************************************/
 /* For applying different gains to Left and right chennals                        */
@@ -81,18 +63,11 @@
 /* ptrInstance2 applies to Right channel                                          */
 /* Gain values should not be more that 1.0                                        */
 /**********************************************************************************/
-#ifdef SUPPORT_MC
-void LVC_Core_MixSoft_1St_MC_float_WRA(Mix_Private_FLOAT_st **ptrInstance,
-                                         const LVM_FLOAT      *src,
-                                         LVM_FLOAT            *dst,
-                                         LVM_INT16            NrFrames,
-                                         LVM_INT16            NrChannels);
-#endif
-void LVC_Core_MixSoft_1St_2i_D16C31_WRA( LVMixer3_FLOAT_st        *ptrInstance1,
-                                         LVMixer3_FLOAT_st        *ptrInstance2,
-                                         const LVM_FLOAT    *src,
-                                         LVM_FLOAT          *dst,
-                                         LVM_INT16          n);
+void LVC_Core_MixSoft_1St_MC_float_WRA(Mix_Private_FLOAT_st** ptrInstance, const LVM_FLOAT* src,
+                                       LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
+void LVC_Core_MixSoft_1St_2i_D16C31_WRA(LVMixer3_FLOAT_st* ptrInstance1,
+                                        LVMixer3_FLOAT_st* ptrInstance2, const LVM_FLOAT* src,
+                                        LVM_FLOAT* dst, LVM_INT16 n);
 
 /**********************************************************************************/
 /* For applying different gains to Left and right chennals                        */
@@ -100,21 +75,13 @@
 /* ptrInstance2 applies to Right channel                                          */
 /* Gain values should not be more that 1.0                                        */
 /**********************************************************************************/
-#ifdef SUPPORT_MC
-void LVC_Core_MixHard_1St_MC_float_SAT(Mix_Private_FLOAT_st **ptrInstance,
-                                         const LVM_FLOAT      *src,
-                                         LVM_FLOAT            *dst,
-                                         LVM_INT16            NrFrames,
-                                         LVM_INT16            NrChannels);
-#endif
-void LVC_Core_MixHard_1St_2i_D16C31_SAT( LVMixer3_FLOAT_st        *ptrInstance1,
-                                         LVMixer3_FLOAT_st        *ptrInstance2,
-                                         const LVM_FLOAT    *src,
-                                         LVM_FLOAT          *dst,
-                                         LVM_INT16          n);
+void LVC_Core_MixHard_1St_MC_float_SAT(Mix_Private_FLOAT_st** ptrInstance, const LVM_FLOAT* src,
+                                       LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
+void LVC_Core_MixHard_1St_2i_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance1,
+                                        LVMixer3_FLOAT_st* ptrInstance2, const LVM_FLOAT* src,
+                                        LVM_FLOAT* dst, LVM_INT16 n);
 
 /*** 32 bit functions *************************************************************/
 /**********************************************************************************/
 
-#endif //#ifndef __LVC_MIXER_PRIVATE_H__
-
+#endif  //#ifndef __LVC_MIXER_PRIVATE_H__
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTarget.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTarget.cpp
index 47b0cec..d8015c4 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTarget.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTarget.cpp
@@ -43,9 +43,7 @@
 /*  void                                                                */
 /*                                                                      */
 /************************************************************************/
-void LVC_Mixer_SetTarget(LVMixer3_FLOAT_st *pStream,
-                         LVM_FLOAT         TargetGain)
-{
-    Mix_Private_FLOAT_st *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
-    pInstance->Target = TargetGain;               // Update gain Target
+void LVC_Mixer_SetTarget(LVMixer3_FLOAT_st* pStream, LVM_FLOAT TargetGain) {
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
+    pInstance->Target = TargetGain;  // Update gain Target
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTimeConstant.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTimeConstant.cpp
index 1a8da7a..715b908 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTimeConstant.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTimeConstant.cpp
@@ -44,36 +44,33 @@
 /* RETURNS:                                                             */
 /*  void                                                                */
 /************************************************************************/
-void LVC_Mixer_SetTimeConstant(LVMixer3_FLOAT_st *pStream,
-                               LVM_INT32           Tc_millisec,
-                               LVM_Fs_en           Fs,
-                               LVM_INT16           NumChannels)
-{
-    LVM_FLOAT   DeltaTable[13] = {0.500000f,/*8000*/
-                                  0.362812f,/*11025*/
-                                  0.333333f,/*12000*/
-                                  0.250000f,/*16000*/
-                                  0.181406f,/*22050*/
-                                  0.166666f,/*24000*/
-                                  0.125000f,/*32000*/
-                                  0.090703f,/*44100*/
-                                  0.083333f,/*48000*/
-                                  0.045352f,/*88200*/
-                                  0.041667f,/*96000*/
-                                  0.022676f,/*176400*/
-                                  0.020833f};/*192000*/
+void LVC_Mixer_SetTimeConstant(LVMixer3_FLOAT_st* pStream, LVM_INT32 Tc_millisec, LVM_Fs_en Fs,
+                               LVM_INT16 NumChannels) {
+    LVM_FLOAT DeltaTable[13] = {0.500000f,  /*8000*/
+                                0.362812f,  /*11025*/
+                                0.333333f,  /*12000*/
+                                0.250000f,  /*16000*/
+                                0.181406f,  /*22050*/
+                                0.166666f,  /*24000*/
+                                0.125000f,  /*32000*/
+                                0.090703f,  /*44100*/
+                                0.083333f,  /*48000*/
+                                0.045352f,  /*88200*/
+                                0.041667f,  /*96000*/
+                                0.022676f,  /*176400*/
+                                0.020833f}; /*192000*/
 
-    Mix_Private_FLOAT_st *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
     LVM_FLOAT Delta = DeltaTable[Fs];
     Delta = Delta / (NumChannels);
 
-    if(Tc_millisec == 0)
+    if (Tc_millisec == 0)
         Delta = 1.000000f;
     else
         Delta = Delta / Tc_millisec;
 
-    if(Delta == 0)
-        Delta = 0.0000000005f;  /* If Time Constant is so large that Delta is 0, \
-                                  assign minimum value to Delta */
+    if (Delta == 0)
+        Delta = 0.0000000005f; /* If Time Constant is so large that Delta is 0, \
+                                 assign minimum value to Delta */
     pInstance->Delta = Delta;  // Delta=(2147483647*4*1000)/(NumChannels*SampleRate*Tc_millisec)
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp
index f335a1e..cf84613 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp
@@ -44,57 +44,50 @@
 /* RETURNS:                                                             */
 /*  void                                                                */
 /************************************************************************/
-void LVC_Mixer_VarSlope_SetTimeConstant( LVMixer3_FLOAT_st *pStream,
-                                         LVM_INT32           Tc_millisec,
-                                         LVM_Fs_en           Fs,
-                                         LVM_INT16           NumChannels)
-{
-     LVM_FLOAT   DeltaTable[13] = {0.500000f,/*8000*/
-                                   0.362812f,/*11025*/
-                                   0.333333f,/*12000*/
-                                   0.250000f,/*16000*/
-                                   0.181406f,/*22050*/
-                                   0.166666f,/*24000*/
-                                   0.125000f,/*32000*/
-                                   0.090703f,/*44100*/
-                                   0.083333f,/*48000*/
-                                   0.045352f,/*88200*/
-                                   0.041666f,/*96000*/
-                                   0.022676f,/*176400*/
-                                   0.020833f};/*192000*/
+void LVC_Mixer_VarSlope_SetTimeConstant(LVMixer3_FLOAT_st* pStream, LVM_INT32 Tc_millisec,
+                                        LVM_Fs_en Fs, LVM_INT16 NumChannels) {
+    LVM_FLOAT DeltaTable[13] = {0.500000f,  /*8000*/
+                                0.362812f,  /*11025*/
+                                0.333333f,  /*12000*/
+                                0.250000f,  /*16000*/
+                                0.181406f,  /*22050*/
+                                0.166666f,  /*24000*/
+                                0.125000f,  /*32000*/
+                                0.090703f,  /*44100*/
+                                0.083333f,  /*48000*/
+                                0.045352f,  /*88200*/
+                                0.041666f,  /*96000*/
+                                0.022676f,  /*176400*/
+                                0.020833f}; /*192000*/
     LVM_FLOAT Tc_millisec_float;
-    Mix_Private_FLOAT_st *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
     LVM_FLOAT Delta = DeltaTable[Fs];
 
-    LVM_FLOAT   Current;
-    LVM_FLOAT   Target;
+    LVM_FLOAT Current;
+    LVM_FLOAT Target;
 
-    Delta=Delta / (NumChannels);
+    Delta = Delta / (NumChannels);
 
     /*  Get gain values  */
     Current = pInstance->Current;
     Target = pInstance->Target;
 
-    if (Current != Target)
-    {
+    if (Current != Target) {
         Tc_millisec_float = (LVM_FLOAT)(Tc_millisec) / (Current - Target);
-        if (Tc_millisec_float < 0)
-            Tc_millisec_float = -Tc_millisec_float;
+        if (Tc_millisec_float < 0) Tc_millisec_float = -Tc_millisec_float;
 
-        if(Tc_millisec == 0)
+        if (Tc_millisec == 0)
             Delta = 1.000000f;
         else
             Delta = Delta / Tc_millisec_float;
 
-        if(Delta == 0)
+        if (Delta == 0)
             Delta = 0.0000000005f; /* If Time Constant is so large that Delta is 0, \
                                       assign minimum value to Delta */
-    }
-    else
-    {
-        Delta = 0.0000000005f;  /* Minimum value for proper call-backs \
-                             (setting it to zero has some problems, to be corrected) */
+    } else {
+        Delta = 0.0000000005f; /* Minimum value for proper call-backs \
+                            (setting it to zero has some problems, to be corrected) */
     }
 
-    pInstance->Delta = Delta;     // Delta=(2147483647*4*1000)/(NumChannels*SampleRate*Tc_millisec)
+    pInstance->Delta = Delta;  // Delta=(2147483647*4*1000)/(NumChannels*SampleRate*Tc_millisec)
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_FO_HPF.cpp b/media/libeffects/lvm/lib/Common/src/LVM_FO_HPF.cpp
index 2497d29..59095df 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_FO_HPF.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_FO_HPF.cpp
@@ -67,31 +67,19 @@
 /* RETURNS:                                                                */
 /*                                                                         */
 /*-------------------------------------------------------------------------*/
-LVM_FLOAT LVM_FO_HPF(   LVM_FLOAT       w,
-                        FO_FLOAT_Coefs_t  *pCoeffs)
-{
-    LVM_FLOAT Y,Coefficients[13] = {-0.999996f,
-                                    0.999801f,
-                                    -0.497824f,
-                                    0.322937f,
-                                    -0.180880f,
-                                    0.087658f,
-                                    -0.032102f,
-                                    0.008163f,
-                                    -0.001252f,
-                                    0.000089f,
-                                    0,
-                                    0,
-                                    0};
-    Y=LVM_Polynomial((LVM_UINT16)9, Coefficients, w);
+LVM_FLOAT LVM_FO_HPF(LVM_FLOAT w, FO_FLOAT_Coefs_t* pCoeffs) {
+    LVM_FLOAT Y, Coefficients[13] = {-0.999996f, 0.999801f,  -0.497824f, 0.322937f,  -0.180880f,
+                                     0.087658f,  -0.032102f, 0.008163f,  -0.001252f, 0.000089f,
+                                     0,          0,          0};
+    Y = LVM_Polynomial((LVM_UINT16)9, Coefficients, w);
 
-    pCoeffs->B1 = -Y;         /* Store -B1 in filter structure instead of B1!*/
-                            /* A0=(1-B1)/2= B1/2 - 0.5*/
-    Y = Y / 2.0f;                 /* A0=Y=B1/2*/
-    Y = Y - 0.5f;         /* A0=Y=(B1/2 - 0.5)*/
+    pCoeffs->B1 = -Y; /* Store -B1 in filter structure instead of B1!*/
+                      /* A0=(1-B1)/2= B1/2 - 0.5*/
+    Y = Y / 2.0f;     /* A0=Y=B1/2*/
+    Y = Y - 0.5f;     /* A0=Y=(B1/2 - 0.5)*/
 
-    pCoeffs->A0 = Y * FILTER_LOSS_FLOAT;                  /* Apply loss to avoid overflow*/
-    pCoeffs->A1 = -pCoeffs->A0;                           /* Store A1=-A0*/
+    pCoeffs->A0 = Y * FILTER_LOSS_FLOAT; /* Apply loss to avoid overflow*/
+    pCoeffs->A1 = -pCoeffs->A0;          /* Store A1=-A0*/
 
     return 1;
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_FO_LPF.cpp b/media/libeffects/lvm/lib/Common/src/LVM_FO_LPF.cpp
index 7bc6046..91964fb 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_FO_LPF.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_FO_LPF.cpp
@@ -67,25 +67,15 @@
 /* RETURNS:                                                                */
 /*                                                                         */
 /*-------------------------------------------------------------------------*/
-LVM_FLOAT LVM_FO_LPF(   LVM_FLOAT       w,
-                        FO_FLOAT_Coefs_t  *pCoeffs)
-{
-    LVM_FLOAT Y,Coefficients[13] = {-0.999996f,
-                                    0.999801f,
-                                    -0.497824f,
-                                    0.322937f,
-                                    -0.180880f,
-                                    0.087658f,
-                                    -0.032102f,
-                                    0.008163f,
-                                    -0.001252f,
-                                    0.000089f,
-                                    0};
-    Y=LVM_Polynomial((LVM_UINT16)9, Coefficients, w);
-    pCoeffs->B1 = -Y;     // Store -B1 in filter structure instead of B1!
-                        // A0=(1+B1)/2= B1/2 + 0.5
-    Y = Y / 2.0f;             // A0=Y=B1/2
-    Y = Y + 0.5f;     // A0=Y=(B1/2 + 0.5)
+LVM_FLOAT LVM_FO_LPF(LVM_FLOAT w, FO_FLOAT_Coefs_t* pCoeffs) {
+    LVM_FLOAT Y,
+            Coefficients[13] = {-0.999996f, 0.999801f, -0.497824f, 0.322937f, -0.180880f, 0.087658f,
+                                -0.032102f, 0.008163f, -0.001252f, 0.000089f, 0};
+    Y = LVM_Polynomial((LVM_UINT16)9, Coefficients, w);
+    pCoeffs->B1 = -Y;  // Store -B1 in filter structure instead of B1!
+                       // A0=(1+B1)/2= B1/2 + 0.5
+    Y = Y / 2.0f;      // A0=Y=B1/2
+    Y = Y + 0.5f;      // A0=Y=(B1/2 + 0.5)
 
     pCoeffs->A0 = Y * FILTER_LOSS_FLOAT;
     pCoeffs->A1 = pCoeffs->A0;
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_GetOmega.cpp b/media/libeffects/lvm/lib/Common/src/LVM_GetOmega.cpp
index 2a7cca2..5f25677 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_GetOmega.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_GetOmega.cpp
@@ -25,41 +25,32 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#define LVVDL_2PiBy_8000        1727108826  /* In Q41 format */
-#define LVVDL_2PiBy_11025       1253230894  /* In Q41 format */
-#define LVVDL_2PiBy_12000       1151405884  /* In Q41 format */
+#define LVVDL_2PiBy_8000 1727108826  /* In Q41 format */
+#define LVVDL_2PiBy_11025 1253230894 /* In Q41 format */
+#define LVVDL_2PiBy_12000 1151405884 /* In Q41 format */
 
-#define LVVDL_2PiByFs_SHIFT1    12          /* Qformat shift for 8kHz, 11.025kHz and 12kHz i.e. 12=41-29 */
-#define LVVDL_2PiByFs_SHIFT2    13          /* Qformat shift for 16kHz, 22.050kHz and 24kHz i.e. 13=42-29 */
-#define LVVDL_2PiByFs_SHIFT3    14          /* Qformat shift for 32kHz, 44.1kHz and 48kHz i.e. 14=43-29 */
-#define LVVDL_2PiBy_8000_f        0.000785398f
-#define LVVDL_2PiBy_11025_f       0.000569903f
-#define LVVDL_2PiBy_12000_f       0.000523599f
-#define LVVDL_2PiBy_16000_f       0.000392700f
-#define LVVDL_2PiBy_22050_f       0.000284952f
-#define LVVDL_2PiBy_24000_f       0.000261800f
-#define LVVDL_2PiBy_32000_f       0.000196350f
-#define LVVDL_2PiBy_44100_f       0.000142476f
-#define LVVDL_2PiBy_48000_f       0.000130900f
+#define LVVDL_2PiByFs_SHIFT1 12 /* Qformat shift for 8kHz, 11.025kHz and 12kHz i.e. 12=41-29 */
+#define LVVDL_2PiByFs_SHIFT2 13 /* Qformat shift for 16kHz, 22.050kHz and 24kHz i.e. 13=42-29 */
+#define LVVDL_2PiByFs_SHIFT3 14 /* Qformat shift for 32kHz, 44.1kHz and 48kHz i.e. 14=43-29 */
+#define LVVDL_2PiBy_8000_f 0.000785398f
+#define LVVDL_2PiBy_11025_f 0.000569903f
+#define LVVDL_2PiBy_12000_f 0.000523599f
+#define LVVDL_2PiBy_16000_f 0.000392700f
+#define LVVDL_2PiBy_22050_f 0.000284952f
+#define LVVDL_2PiBy_24000_f 0.000261800f
+#define LVVDL_2PiBy_32000_f 0.000196350f
+#define LVVDL_2PiBy_44100_f 0.000142476f
+#define LVVDL_2PiBy_48000_f 0.000130900f
 
-#define LVVDL_2PiBy_88200_f       0.000071238f
-#define LVVDL_2PiBy_96000_f       0.000065450f
-#define LVVDL_2PiBy_176400_f      0.000035619f
-#define LVVDL_2PiBy_192000_f      0.000032725f
-const LVM_FLOAT     LVVDL_2PiOnFsTable[] =  {LVVDL_2PiBy_8000_f,
-                                             LVVDL_2PiBy_11025_f,
-                                             LVVDL_2PiBy_12000_f,
-                                             LVVDL_2PiBy_16000_f,
-                                             LVVDL_2PiBy_22050_f,
-                                             LVVDL_2PiBy_24000_f,
-                                             LVVDL_2PiBy_32000_f,
-                                             LVVDL_2PiBy_44100_f,
-                                             LVVDL_2PiBy_48000_f
-                                            ,LVVDL_2PiBy_88200_f
-                                            ,LVVDL_2PiBy_96000_f
-                                            ,LVVDL_2PiBy_176400_f
-                                            ,LVVDL_2PiBy_192000_f
-                                           };
+#define LVVDL_2PiBy_88200_f 0.000071238f
+#define LVVDL_2PiBy_96000_f 0.000065450f
+#define LVVDL_2PiBy_176400_f 0.000035619f
+#define LVVDL_2PiBy_192000_f 0.000032725f
+const LVM_FLOAT LVVDL_2PiOnFsTable[] = {
+        LVVDL_2PiBy_8000_f,  LVVDL_2PiBy_11025_f, LVVDL_2PiBy_12000_f, LVVDL_2PiBy_16000_f,
+        LVVDL_2PiBy_22050_f, LVVDL_2PiBy_24000_f, LVVDL_2PiBy_32000_f, LVVDL_2PiBy_44100_f,
+        LVVDL_2PiBy_48000_f, LVVDL_2PiBy_88200_f, LVVDL_2PiBy_96000_f, LVVDL_2PiBy_176400_f,
+        LVVDL_2PiBy_192000_f};
 /*-------------------------------------------------------------------------*/
 /* FUNCTION:                                                               */
 /*   LVM_GetOmega                                                          */
@@ -77,10 +68,8 @@
 /* RETURNS:                                                                */
 /*   w=2*pi*Fc/Fs in Q2.29 format                                          */
 /*-------------------------------------------------------------------------*/
-LVM_FLOAT LVM_GetOmega(LVM_UINT32                  Fc,
-                       LVM_Fs_en                   Fs)
-{
-    LVM_FLOAT   w;
+LVM_FLOAT LVM_GetOmega(LVM_UINT32 Fc, LVM_Fs_en Fs) {
+    LVM_FLOAT w;
     w = (LVM_FLOAT)Fc * LVVDL_2PiOnFsTable[Fs];
     return w;
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Mixer_FilterCoeffs.h b/media/libeffects/lvm/lib/Common/src/LVM_Mixer_FilterCoeffs.h
index 244f09d..31dcaa4 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Mixer_FilterCoeffs.h
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Mixer_FilterCoeffs.h
@@ -33,109 +33,109 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#define Alpha_TableSize                        50      /* The number of table entires */
-#define ALPHA_0                        2147480769      /* Floating point Alpha = 0.999999 */
-#define ALPHA_1                        2147479577      /* Floating point Alpha = 0.999998 */
-#define ALPHA_2                        2147477892      /* Floating point Alpha = 0.999997 */
-#define ALPHA_3                        2147475510      /* Floating point Alpha = 0.999996 */
-#define ALPHA_4                        2147472141      /* Floating point Alpha = 0.999995 */
-#define ALPHA_5                        2147467377      /* Floating point Alpha = 0.999992 */
-#define ALPHA_6                        2147460642      /* Floating point Alpha = 0.999989 */
-#define ALPHA_7                        2147451118      /* Floating point Alpha = 0.999985 */
-#define ALPHA_8                        2147437651      /* Floating point Alpha = 0.999979 */
-#define ALPHA_9                        2147418608      /* Floating point Alpha = 0.999970 */
-#define ALPHA_10                       2147391683      /* Floating point Alpha = 0.999957 */
-#define ALPHA_11                       2147353611      /* Floating point Alpha = 0.999939 */
-#define ALPHA_12                       2147299779      /* Floating point Alpha = 0.999914 */
-#define ALPHA_13                       2147223662      /* Floating point Alpha = 0.999879 */
-#define ALPHA_14                       2147116037      /* Floating point Alpha = 0.999829 */
-#define ALPHA_15                       2146963865      /* Floating point Alpha = 0.999758 */
-#define ALPHA_16                       2146748712      /* Floating point Alpha = 0.999658 */
-#define ALPHA_17                       2146444522      /* Floating point Alpha = 0.999516 */
-#define ALPHA_18                       2146014472      /* Floating point Alpha = 0.999316 */
-#define ALPHA_19                       2145406527      /* Floating point Alpha = 0.999033 */
-#define ALPHA_20                       2144547188      /* Floating point Alpha = 0.998633 */
-#define ALPHA_21                       2143332669      /* Floating point Alpha = 0.998067 */
-#define ALPHA_22                       2141616514      /* Floating point Alpha = 0.997268 */
-#define ALPHA_23                       2139192215      /* Floating point Alpha = 0.996139 */
-#define ALPHA_24                       2135768939      /* Floating point Alpha = 0.994545 */
-#define ALPHA_25                       2130937774      /* Floating point Alpha = 0.992295 */
-#define ALPHA_26                       2124125153      /* Floating point Alpha = 0.989123 */
-#define ALPHA_27                       2114529263      /* Floating point Alpha = 0.984654 */
-#define ALPHA_28                       2101034612      /* Floating point Alpha = 0.978370 */
-#define ALPHA_29                       2082100030      /* Floating point Alpha = 0.969553 */
-#define ALPHA_30                       2055617398      /* Floating point Alpha = 0.957221 */
-#define ALPHA_31                       2018744824      /* Floating point Alpha = 0.940051 */
-#define ALPHA_32                       1967733015      /* Floating point Alpha = 0.916297 */
-#define ALPHA_33                       1897794587      /* Floating point Alpha = 0.883729 */
-#define ALPHA_34                       1803123234      /* Floating point Alpha = 0.839645 */
-#define ALPHA_35                       1677262220      /* Floating point Alpha = 0.781036 */
-#define ALPHA_36                       1514142675      /* Floating point Alpha = 0.705078 */
-#define ALPHA_37                       1310197875      /* Floating point Alpha = 0.610108 */
-#define ALPHA_38                       1067813480      /* Floating point Alpha = 0.497239 */
-#define ALPHA_39                        799601371      /* Floating point Alpha = 0.372343 */
-#define ALPHA_40                        531183049      /* Floating point Alpha = 0.247351 */
-#define ALPHA_41                        297904007      /* Floating point Alpha = 0.138722 */
-#define ALPHA_42                        131499768      /* Floating point Alpha = 0.061234 */
-#define ALPHA_43                         41375282      /* Floating point Alpha = 0.019267 */
-#define ALPHA_44                          8065899      /* Floating point Alpha = 0.003756 */
-#define ALPHA_45                           799076      /* Floating point Alpha = 0.000372 */
-#define ALPHA_46                            30398      /* Floating point Alpha = 0.000014 */
-#define ALPHA_47                              299      /* Floating point Alpha = 0.000000 */
-#define ALPHA_48                                0      /* Floating point Alpha = 0.000000 */
-#define ALPHA_49                                0      /* Floating point Alpha = 0.000000 */
-#define ALPHA_50                                0      /* Floating point Alpha = 0.000000 */
+#define Alpha_TableSize 50  /* The number of table entires */
+#define ALPHA_0 2147480769  /* Floating point Alpha = 0.999999 */
+#define ALPHA_1 2147479577  /* Floating point Alpha = 0.999998 */
+#define ALPHA_2 2147477892  /* Floating point Alpha = 0.999997 */
+#define ALPHA_3 2147475510  /* Floating point Alpha = 0.999996 */
+#define ALPHA_4 2147472141  /* Floating point Alpha = 0.999995 */
+#define ALPHA_5 2147467377  /* Floating point Alpha = 0.999992 */
+#define ALPHA_6 2147460642  /* Floating point Alpha = 0.999989 */
+#define ALPHA_7 2147451118  /* Floating point Alpha = 0.999985 */
+#define ALPHA_8 2147437651  /* Floating point Alpha = 0.999979 */
+#define ALPHA_9 2147418608  /* Floating point Alpha = 0.999970 */
+#define ALPHA_10 2147391683 /* Floating point Alpha = 0.999957 */
+#define ALPHA_11 2147353611 /* Floating point Alpha = 0.999939 */
+#define ALPHA_12 2147299779 /* Floating point Alpha = 0.999914 */
+#define ALPHA_13 2147223662 /* Floating point Alpha = 0.999879 */
+#define ALPHA_14 2147116037 /* Floating point Alpha = 0.999829 */
+#define ALPHA_15 2146963865 /* Floating point Alpha = 0.999758 */
+#define ALPHA_16 2146748712 /* Floating point Alpha = 0.999658 */
+#define ALPHA_17 2146444522 /* Floating point Alpha = 0.999516 */
+#define ALPHA_18 2146014472 /* Floating point Alpha = 0.999316 */
+#define ALPHA_19 2145406527 /* Floating point Alpha = 0.999033 */
+#define ALPHA_20 2144547188 /* Floating point Alpha = 0.998633 */
+#define ALPHA_21 2143332669 /* Floating point Alpha = 0.998067 */
+#define ALPHA_22 2141616514 /* Floating point Alpha = 0.997268 */
+#define ALPHA_23 2139192215 /* Floating point Alpha = 0.996139 */
+#define ALPHA_24 2135768939 /* Floating point Alpha = 0.994545 */
+#define ALPHA_25 2130937774 /* Floating point Alpha = 0.992295 */
+#define ALPHA_26 2124125153 /* Floating point Alpha = 0.989123 */
+#define ALPHA_27 2114529263 /* Floating point Alpha = 0.984654 */
+#define ALPHA_28 2101034612 /* Floating point Alpha = 0.978370 */
+#define ALPHA_29 2082100030 /* Floating point Alpha = 0.969553 */
+#define ALPHA_30 2055617398 /* Floating point Alpha = 0.957221 */
+#define ALPHA_31 2018744824 /* Floating point Alpha = 0.940051 */
+#define ALPHA_32 1967733015 /* Floating point Alpha = 0.916297 */
+#define ALPHA_33 1897794587 /* Floating point Alpha = 0.883729 */
+#define ALPHA_34 1803123234 /* Floating point Alpha = 0.839645 */
+#define ALPHA_35 1677262220 /* Floating point Alpha = 0.781036 */
+#define ALPHA_36 1514142675 /* Floating point Alpha = 0.705078 */
+#define ALPHA_37 1310197875 /* Floating point Alpha = 0.610108 */
+#define ALPHA_38 1067813480 /* Floating point Alpha = 0.497239 */
+#define ALPHA_39 799601371  /* Floating point Alpha = 0.372343 */
+#define ALPHA_40 531183049  /* Floating point Alpha = 0.247351 */
+#define ALPHA_41 297904007  /* Floating point Alpha = 0.138722 */
+#define ALPHA_42 131499768  /* Floating point Alpha = 0.061234 */
+#define ALPHA_43 41375282   /* Floating point Alpha = 0.019267 */
+#define ALPHA_44 8065899    /* Floating point Alpha = 0.003756 */
+#define ALPHA_45 799076     /* Floating point Alpha = 0.000372 */
+#define ALPHA_46 30398      /* Floating point Alpha = 0.000014 */
+#define ALPHA_47 299        /* Floating point Alpha = 0.000000 */
+#define ALPHA_48 0          /* Floating point Alpha = 0.000000 */
+#define ALPHA_49 0          /* Floating point Alpha = 0.000000 */
+#define ALPHA_50 0          /* Floating point Alpha = 0.000000 */
 
-#define ALPHA_Float_0                        0.999999f
-#define ALPHA_Float_1                        0.999998f
-#define ALPHA_Float_2                        0.999997f
-#define ALPHA_Float_3                        0.999996f
-#define ALPHA_Float_4                        0.999995f
-#define ALPHA_Float_5                        0.999992f
-#define ALPHA_Float_6                        0.999989f
-#define ALPHA_Float_7                        0.999985f
-#define ALPHA_Float_8                        0.999979f
-#define ALPHA_Float_9                        0.999970f
-#define ALPHA_Float_10                       0.999957f
-#define ALPHA_Float_11                       0.999939f
-#define ALPHA_Float_12                       0.999914f
-#define ALPHA_Float_13                       0.999879f
-#define ALPHA_Float_14                       0.999829f
-#define ALPHA_Float_15                       0.999758f
-#define ALPHA_Float_16                       0.999658f
-#define ALPHA_Float_17                       0.999516f
-#define ALPHA_Float_18                       0.999316f
-#define ALPHA_Float_19                       0.999033f
-#define ALPHA_Float_20                       0.998633f
-#define ALPHA_Float_21                       0.998067f
-#define ALPHA_Float_22                       0.997268f
-#define ALPHA_Float_23                       0.996139f
-#define ALPHA_Float_24                       0.994545f
-#define ALPHA_Float_25                       0.992295f
-#define ALPHA_Float_26                       0.989123f
-#define ALPHA_Float_27                       0.984654f
-#define ALPHA_Float_28                       0.978370f
-#define ALPHA_Float_29                       0.969553f
-#define ALPHA_Float_30                       0.957221f
-#define ALPHA_Float_31                       0.940051f
-#define ALPHA_Float_32                       0.916297f
-#define ALPHA_Float_33                       0.883729f
-#define ALPHA_Float_34                       0.839645f
-#define ALPHA_Float_35                       0.781036f
-#define ALPHA_Float_36                       0.705078f
-#define ALPHA_Float_37                       0.610108f
-#define ALPHA_Float_38                       0.497239f
-#define ALPHA_Float_39                       0.372343f
-#define ALPHA_Float_40                       0.247351f
-#define ALPHA_Float_41                       0.138722f
-#define ALPHA_Float_42                       0.061234f
-#define ALPHA_Float_43                       0.019267f
-#define ALPHA_Float_44                       0.003756f
-#define ALPHA_Float_45                       0.000372f
-#define ALPHA_Float_46                       0.000014f
-#define ALPHA_Float_47                       0.000000f
-#define ALPHA_Float_48                       0.000000f
-#define ALPHA_Float_49                       0.000000f
-#define ALPHA_Float_50                       0.000000f
+#define ALPHA_Float_0 0.999999f
+#define ALPHA_Float_1 0.999998f
+#define ALPHA_Float_2 0.999997f
+#define ALPHA_Float_3 0.999996f
+#define ALPHA_Float_4 0.999995f
+#define ALPHA_Float_5 0.999992f
+#define ALPHA_Float_6 0.999989f
+#define ALPHA_Float_7 0.999985f
+#define ALPHA_Float_8 0.999979f
+#define ALPHA_Float_9 0.999970f
+#define ALPHA_Float_10 0.999957f
+#define ALPHA_Float_11 0.999939f
+#define ALPHA_Float_12 0.999914f
+#define ALPHA_Float_13 0.999879f
+#define ALPHA_Float_14 0.999829f
+#define ALPHA_Float_15 0.999758f
+#define ALPHA_Float_16 0.999658f
+#define ALPHA_Float_17 0.999516f
+#define ALPHA_Float_18 0.999316f
+#define ALPHA_Float_19 0.999033f
+#define ALPHA_Float_20 0.998633f
+#define ALPHA_Float_21 0.998067f
+#define ALPHA_Float_22 0.997268f
+#define ALPHA_Float_23 0.996139f
+#define ALPHA_Float_24 0.994545f
+#define ALPHA_Float_25 0.992295f
+#define ALPHA_Float_26 0.989123f
+#define ALPHA_Float_27 0.984654f
+#define ALPHA_Float_28 0.978370f
+#define ALPHA_Float_29 0.969553f
+#define ALPHA_Float_30 0.957221f
+#define ALPHA_Float_31 0.940051f
+#define ALPHA_Float_32 0.916297f
+#define ALPHA_Float_33 0.883729f
+#define ALPHA_Float_34 0.839645f
+#define ALPHA_Float_35 0.781036f
+#define ALPHA_Float_36 0.705078f
+#define ALPHA_Float_37 0.610108f
+#define ALPHA_Float_38 0.497239f
+#define ALPHA_Float_39 0.372343f
+#define ALPHA_Float_40 0.247351f
+#define ALPHA_Float_41 0.138722f
+#define ALPHA_Float_42 0.061234f
+#define ALPHA_Float_43 0.019267f
+#define ALPHA_Float_44 0.003756f
+#define ALPHA_Float_45 0.000372f
+#define ALPHA_Float_46 0.000014f
+#define ALPHA_Float_47 0.000000f
+#define ALPHA_Float_48 0.000000f
+#define ALPHA_Float_49 0.000000f
+#define ALPHA_Float_50 0.000000f
 
 #endif
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Mixer_TimeConstant.cpp b/media/libeffects/lvm/lib/Common/src/LVM_Mixer_TimeConstant.cpp
index 73da2cf..3ec103a 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Mixer_TimeConstant.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Mixer_TimeConstant.cpp
@@ -56,83 +56,37 @@
 /*  Alpha   - the filter coefficient Q31 format                         */
 /*                                                                      */
 /************************************************************************/
-LVM_FLOAT LVM_Mixer_TimeConstant(LVM_UINT32   tc,
-                                  LVM_UINT32   Fs,
-                                  LVM_UINT16   NumChannels)
-{
-
-    LVM_UINT32  Product;
-    LVM_FLOAT  ProductFloat;
-    LVM_INT16   InterpolateShort;
-    LVM_FLOAT   Interpolate;
-    LVM_UINT16  Shift;
-    LVM_FLOAT   Diff;
-    LVM_FLOAT  Table[] = {ALPHA_Float_0,             /* Log spaced look-up table */
-                          ALPHA_Float_1,
-                          ALPHA_Float_2,
-                          ALPHA_Float_3,
-                          ALPHA_Float_4,
-                          ALPHA_Float_5,
-                          ALPHA_Float_6,
-                          ALPHA_Float_7,
-                          ALPHA_Float_8,
-                          ALPHA_Float_9,
-                          ALPHA_Float_10,
-                          ALPHA_Float_11,
-                          ALPHA_Float_12,
-                          ALPHA_Float_13,
-                          ALPHA_Float_14,
-                          ALPHA_Float_15,
-                          ALPHA_Float_16,
-                          ALPHA_Float_17,
-                          ALPHA_Float_18,
-                          ALPHA_Float_19,
-                          ALPHA_Float_20,
-                          ALPHA_Float_21,
-                          ALPHA_Float_22,
-                          ALPHA_Float_23,
-                          ALPHA_Float_24,
-                          ALPHA_Float_25,
-                          ALPHA_Float_26,
-                          ALPHA_Float_27,
-                          ALPHA_Float_28,
-                          ALPHA_Float_29,
-                          ALPHA_Float_30,
-                          ALPHA_Float_31,
-                          ALPHA_Float_32,
-                          ALPHA_Float_33,
-                          ALPHA_Float_34,
-                          ALPHA_Float_35,
-                          ALPHA_Float_36,
-                          ALPHA_Float_37,
-                          ALPHA_Float_38,
-                          ALPHA_Float_39,
-                          ALPHA_Float_40,
-                          ALPHA_Float_41,
-                          ALPHA_Float_42,
-                          ALPHA_Float_43,
-                          ALPHA_Float_44,
-                          ALPHA_Float_45,
-                          ALPHA_Float_46,
-                          ALPHA_Float_47,
-                          ALPHA_Float_48,
-                          ALPHA_Float_49,
-                          ALPHA_Float_50};
+LVM_FLOAT LVM_Mixer_TimeConstant(LVM_UINT32 tc, LVM_UINT32 Fs, LVM_UINT16 NumChannels) {
+    LVM_UINT32 Product;
+    LVM_FLOAT ProductFloat;
+    LVM_INT16 InterpolateShort;
+    LVM_FLOAT Interpolate;
+    LVM_UINT16 Shift;
+    LVM_FLOAT Diff;
+    LVM_FLOAT Table[] = {
+            ALPHA_Float_0, /* Log spaced look-up table */
+            ALPHA_Float_1,  ALPHA_Float_2,  ALPHA_Float_3,  ALPHA_Float_4,  ALPHA_Float_5,
+            ALPHA_Float_6,  ALPHA_Float_7,  ALPHA_Float_8,  ALPHA_Float_9,  ALPHA_Float_10,
+            ALPHA_Float_11, ALPHA_Float_12, ALPHA_Float_13, ALPHA_Float_14, ALPHA_Float_15,
+            ALPHA_Float_16, ALPHA_Float_17, ALPHA_Float_18, ALPHA_Float_19, ALPHA_Float_20,
+            ALPHA_Float_21, ALPHA_Float_22, ALPHA_Float_23, ALPHA_Float_24, ALPHA_Float_25,
+            ALPHA_Float_26, ALPHA_Float_27, ALPHA_Float_28, ALPHA_Float_29, ALPHA_Float_30,
+            ALPHA_Float_31, ALPHA_Float_32, ALPHA_Float_33, ALPHA_Float_34, ALPHA_Float_35,
+            ALPHA_Float_36, ALPHA_Float_37, ALPHA_Float_38, ALPHA_Float_39, ALPHA_Float_40,
+            ALPHA_Float_41, ALPHA_Float_42, ALPHA_Float_43, ALPHA_Float_44, ALPHA_Float_45,
+            ALPHA_Float_46, ALPHA_Float_47, ALPHA_Float_48, ALPHA_Float_49, ALPHA_Float_50};
 
     /* Calculate the product of the time constant and the sample rate */
-    Product = ((tc >> 16) * (LVM_UINT32)Fs) << 13;  /* Stereo value */
+    Product = ((tc >> 16) * (LVM_UINT32)Fs) << 13; /* Stereo value */
     Product = Product + (((tc & 0x0000FFFF) * (LVM_UINT32)Fs) >> 3);
 
-    if (NumChannels == 1)
-    {
-        Product = Product >> 1;   /* Mono value */
+    if (NumChannels == 1) {
+        Product = Product >> 1; /* Mono value */
     }
 
     /* Normalize to get the table index and interpolation factor */
-    for (Shift = 0; Shift < ((Alpha_TableSize - 1) / 2); Shift++)
-    {
-        if ((Product & 0x80000000) != 0)
-        {
+    for (Shift = 0; Shift < ((Alpha_TableSize - 1) / 2); Shift++) {
+        if ((Product & 0x80000000) != 0) {
             break;
         }
 
@@ -140,8 +94,7 @@
     }
     Shift = (LVM_UINT16)((Shift << 1));
 
-    if ((Product & 0x40000000)==0)
-    {
+    if ((Product & 0x40000000) == 0) {
         Shift++;
     }
 
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Polynomial.cpp b/media/libeffects/lvm/lib/Common/src/LVM_Polynomial.cpp
index 2c3e9ec..8382529 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Polynomial.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Polynomial.cpp
@@ -40,33 +40,25 @@
 /* RETURNS:                                                                */
 /*   The result of the polynomial expansion in Q1.31 format                */
 /*-------------------------------------------------------------------------*/
-LVM_FLOAT LVM_Polynomial(LVM_UINT16    N,
-                         LVM_FLOAT    *pCoefficients,
-                         LVM_FLOAT    X)
-{
+LVM_FLOAT LVM_Polynomial(LVM_UINT16 N, LVM_FLOAT* pCoefficients, LVM_FLOAT X) {
     LVM_INT32 i;
-    LVM_FLOAT Y,A,XTemp,Temp,sign;
+    LVM_FLOAT Y, A, XTemp, Temp, sign;
 
     Y = *pCoefficients; /* Y=A0*/
     pCoefficients++;
 
-    if(X == -1.0f)
-    {
+    if (X == -1.0f) {
         Temp = -1;
         sign = Temp;
-        for(i = 1; i <= N; i++)
-        {
+        for (i = 1; i <= N; i++) {
             Y += ((*pCoefficients) * sign);
             pCoefficients++;
             sign *= Temp;
         }
 
-    }
-    else
-    {
+    } else {
         XTemp = X;
-        for(i = N-1; i >= 0; i--)
-        {
+        for (i = N - 1; i >= 0; i--) {
             A = *pCoefficients;
             pCoefficients++;
 
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Power10.cpp b/media/libeffects/lvm/lib/Common/src/LVM_Power10.cpp
index ae8e9d1..85596aa 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Power10.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Power10.cpp
@@ -53,23 +53,10 @@
 /* RETURNS:                                                                */
 /*   The result of the 10x expansion in Q8.24 format                       */
 /*-------------------------------------------------------------------------*/
-LVM_FLOAT LVM_Power10(LVM_FLOAT     X)
-{
-    LVM_FLOAT Y,Coefficients[13]={0.999906f,
-                                  2.302475f,
-                                  2.652765f,
-                                  2.035494f,
-                                  1.165667f,
-                                  0.537676f,
-                                  0.213192f,
-                                  0.069603f,
-                                  0.016553f,
-                                  0.004373f,
-                                  0.001817f,
-                                  0.000367f,
-                                  0};
-    Y=LVM_Polynomial((LVM_UINT16)11,
-                     Coefficients,
-                     X);
+LVM_FLOAT LVM_Power10(LVM_FLOAT X) {
+    LVM_FLOAT Y, Coefficients[13] = {0.999906f, 2.302475f, 2.652765f, 2.035494f, 1.165667f,
+                                     0.537676f, 0.213192f, 0.069603f, 0.016553f, 0.004373f,
+                                     0.001817f, 0.000367f, 0};
+    Y = LVM_Polynomial((LVM_UINT16)11, Coefficients, X);
     return Y;
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Timer.cpp b/media/libeffects/lvm/lib/Common/src/LVM_Timer.cpp
index 5995f54..be7c8e4 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Timer.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Timer.cpp
@@ -26,19 +26,16 @@
 /*  TIMER FUNCTION                                                                      */
 /****************************************************************************************/
 
-void LVM_Timer      (   LVM_Timer_Instance_t       *pInstance,
-                        LVM_INT16                  BlockSize ){
+void LVM_Timer(LVM_Timer_Instance_t* pInstance, LVM_INT16 BlockSize) {
+    LVM_Timer_Instance_Private_t* pInstancePr;
+    pInstancePr = (LVM_Timer_Instance_Private_t*)pInstance;
 
-    LVM_Timer_Instance_Private_t *pInstancePr;
-    pInstancePr = (LVM_Timer_Instance_Private_t *)pInstance;
-
-    if (pInstancePr->TimerArmed){
+    if (pInstancePr->TimerArmed) {
         pInstancePr->RemainingTimeInSamples -= BlockSize;
-        if (pInstancePr->RemainingTimeInSamples <= 0){
+        if (pInstancePr->RemainingTimeInSamples <= 0) {
             pInstancePr->TimerArmed = 0;
-            (*pInstancePr->pCallBack) ( pInstancePr->pCallbackInstance,
-                                        pInstancePr->pCallBackParams,
-                                        pInstancePr->CallBackParam );
+            (*pInstancePr->pCallBack)(pInstancePr->pCallbackInstance, pInstancePr->pCallBackParams,
+                                      pInstancePr->CallBackParam);
         }
     }
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Timer_Init.cpp b/media/libeffects/lvm/lib/Common/src/LVM_Timer_Init.cpp
index 3015057..bfd6bcf 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Timer_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Timer_Init.cpp
@@ -33,20 +33,20 @@
 /*  INIT FUNCTION                                                                       */
 /****************************************************************************************/
 
-void LVM_Timer_Init (   LVM_Timer_Instance_t       *pInstance,
-                        LVM_Timer_Params_t         *pParams     ){
+void LVM_Timer_Init(LVM_Timer_Instance_t* pInstance, LVM_Timer_Params_t* pParams) {
+    LVM_Timer_Instance_Private_t* pInstancePr;
+    pInstancePr = (LVM_Timer_Instance_Private_t*)pInstance;
 
-    LVM_Timer_Instance_Private_t *pInstancePr;
-    pInstancePr = (LVM_Timer_Instance_Private_t *)pInstance;
-
-    pInstancePr->CallBackParam     = pParams->CallBackParam;
-    pInstancePr->pCallBackParams   = (LVM_INT32 *)pParams->pCallBackParams;
+    pInstancePr->CallBackParam = pParams->CallBackParam;
+    pInstancePr->pCallBackParams = (LVM_INT32*)pParams->pCallBackParams;
     pInstancePr->pCallbackInstance = pParams->pCallbackInstance;
-    pInstancePr->pCallBack         = pParams->pCallBack;
-    pInstancePr->TimerArmed        = 1;
+    pInstancePr->pCallBack = pParams->pCallBack;
+    pInstancePr->TimerArmed = 1;
 
-    MUL32x16INTO32(pParams->SamplingRate,OneOverThousandInQ24,pInstancePr->RemainingTimeInSamples,16);  /* (Q0 * Q24) >>16 into Q8*/
-    MUL32x16INTO32(pInstancePr->RemainingTimeInSamples,pParams->TimeInMs,pInstancePr->RemainingTimeInSamples,8);  /* (Q8 * Q0) >>8 into Q0*/
+    MUL32x16INTO32(pParams->SamplingRate, OneOverThousandInQ24, pInstancePr->RemainingTimeInSamples,
+                   16); /* (Q0 * Q24) >>16 into Q8*/
+    MUL32x16INTO32(pInstancePr->RemainingTimeInSamples, pParams->TimeInMs,
+                   pInstancePr->RemainingTimeInSamples, 8); /* (Q8 * Q0) >>8 into Q0*/
 }
 
 /****************************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Timer_Private.h b/media/libeffects/lvm/lib/Common/src/LVM_Timer_Private.h
index a372b82..3e8aba8 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Timer_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Timer_Private.h
@@ -24,14 +24,13 @@
 /*  TYPE DEFINITIONS                                                                    */
 /****************************************************************************************/
 
-typedef struct
-{
-    LVM_INT32  RemainingTimeInSamples;
-    LVM_INT32  CallBackParam;
-    LVM_INT32  *pCallBackParams;
-    void  *pCallbackInstance;
-    void  (*pCallBack)(void*,void*,LVM_INT32);
-    LVM_INT16 TimerArmed;                        /* Boolean, true between init and callback */
+typedef struct {
+    LVM_INT32 RemainingTimeInSamples;
+    LVM_INT32 CallBackParam;
+    LVM_INT32* pCallBackParams;
+    void* pCallbackInstance;
+    void (*pCallBack)(void*, void*, LVM_INT32);
+    LVM_INT16 TimerArmed; /* Boolean, true between init and callback */
 
 } LVM_Timer_Instance_Private_t;
 
@@ -39,4 +38,4 @@
 /*  END OF HEADER                                                                       */
 /****************************************************************************************/
 
-#endif  /* LVM_TIMER_PRIVATE_H */
+#endif /* LVM_TIMER_PRIVATE_H */
diff --git a/media/libeffects/lvm/lib/Common/src/LoadConst_16.cpp b/media/libeffects/lvm/lib/Common/src/LoadConst_16.cpp
index f88ca0e..a39fa2f 100644
--- a/media/libeffects/lvm/lib/Common/src/LoadConst_16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LoadConst_16.cpp
@@ -25,14 +25,10 @@
    FUNCTION LoadConst_16
 ***********************************************************************************/
 
-void LoadConst_16(const LVM_INT16 val,
-                  LVM_INT16 *dst,
-                  LVM_INT16 n )
-{
+void LoadConst_16(const LVM_INT16 val, LVM_INT16* dst, LVM_INT16 n) {
     LVM_INT16 ii;
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         *dst = val;
         dst++;
     }
diff --git a/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp b/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp
index c789756..df7a558 100644
--- a/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp
@@ -24,14 +24,10 @@
 /**********************************************************************************
    FUNCTION LoadConst_32
 ***********************************************************************************/
-void LoadConst_Float(const LVM_FLOAT   val,
-                     LVM_FLOAT  *dst,
-                     LVM_INT16 n )
-{
+void LoadConst_Float(const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n) {
     LVM_INT16 ii;
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         *dst = val;
         dst++;
     }
diff --git a/media/libeffects/lvm/lib/Common/src/MSTo2i_Sat_16x16.cpp b/media/libeffects/lvm/lib/Common/src/MSTo2i_Sat_16x16.cpp
index 1ea765a..a19e66f 100644
--- a/media/libeffects/lvm/lib/Common/src/MSTo2i_Sat_16x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MSTo2i_Sat_16x16.cpp
@@ -25,50 +25,35 @@
    FUNCTION  MSTO2I_SAT_16X16
 ***********************************************************************************/
 
-void MSTo2i_Sat_16x16(const LVM_INT16  *srcM,
-                      const LVM_INT16  *srcS,
-                      LVM_INT16  *dst,
-                      LVM_INT16  n )
-{
-    LVM_INT32 temp,mVal,sVal;
+void MSTo2i_Sat_16x16(const LVM_INT16* srcM, const LVM_INT16* srcS, LVM_INT16* dst, LVM_INT16 n) {
+    LVM_INT32 temp, mVal, sVal;
     LVM_INT16 ii;
 
-    for (ii = n; ii != 0; ii--)
-    {
-        mVal=(LVM_INT32)*srcM;
+    for (ii = n; ii != 0; ii--) {
+        mVal = (LVM_INT32)*srcM;
         srcM++;
 
-        sVal=(LVM_INT32)*srcS;
+        sVal = (LVM_INT32)*srcS;
         srcS++;
 
         temp = mVal + sVal;
 
-        if (temp > 0x00007FFF)
-        {
+        if (temp > 0x00007FFF) {
             *dst = 0x7FFF;
-        }
-        else if (temp < -0x00008000)
-        {
-            *dst = - 0x8000;
-        }
-        else
-        {
+        } else if (temp < -0x00008000) {
+            *dst = -0x8000;
+        } else {
             *dst = (LVM_INT16)temp;
         }
         dst++;
 
         temp = mVal - sVal;
 
-        if (temp > 0x00007FFF)
-        {
+        if (temp > 0x00007FFF) {
             *dst = 0x7FFF;
-        }
-        else if (temp < -0x00008000)
-        {
-            *dst = - 0x8000;
-        }
-        else
-        {
+        } else if (temp < -0x00008000) {
+            *dst = -0x8000;
+        } else {
             *dst = (LVM_INT16)temp;
         }
         dst++;
@@ -76,16 +61,11 @@
 
     return;
 }
-void MSTo2i_Sat_Float(const LVM_FLOAT  *srcM,
-                      const LVM_FLOAT  *srcS,
-                      LVM_FLOAT  *dst,
-                      LVM_INT16  n )
-{
-    LVM_FLOAT temp,mVal,sVal;
+void MSTo2i_Sat_Float(const LVM_FLOAT* srcM, const LVM_FLOAT* srcS, LVM_FLOAT* dst, LVM_INT16 n) {
+    LVM_FLOAT temp, mVal, sVal;
     LVM_INT16 ii;
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         mVal = (LVM_FLOAT)*srcM;
         srcM++;
 
@@ -94,32 +74,22 @@
 
         temp = mVal + sVal;
 
-        if (temp > 1.0f)
-        {
+        if (temp > 1.0f) {
             *dst = 1.0f;
-        }
-        else if (temp < -1.0f)
-        {
+        } else if (temp < -1.0f) {
             *dst = -1.0f;
-        }
-        else
-        {
+        } else {
             *dst = (LVM_FLOAT)temp;
         }
         dst++;
 
         temp = mVal - sVal;
 
-        if (temp > 1.0f)
-        {
+        if (temp > 1.0f) {
             *dst = 1.0f;
-        }
-        else if (temp < -1.0f)
-        {
-            *dst = - 1.0f;
-        }
-        else
-        {
+        } else if (temp < -1.0f) {
+            *dst = -1.0f;
+        } else {
             *dst = (LVM_FLOAT)temp;
         }
         dst++;
diff --git a/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_16x16.cpp b/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_16x16.cpp
index 6584251..1d450b0 100644
--- a/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_16x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_16x16.cpp
@@ -35,36 +35,26 @@
    FUNCTION Mac3S_16X16
 ***********************************************************************************/
 
-void Mac3s_Sat_16x16( const LVM_INT16 *src,
-                     const LVM_INT16 val,
-                     LVM_INT16 *dst,
-                     LVM_INT16 n)
-{
+void Mac3s_Sat_16x16(const LVM_INT16* src, const LVM_INT16 val, LVM_INT16* dst, LVM_INT16 n) {
     LVM_INT16 ii;
     LVM_INT16 srcval;
-    LVM_INT32 Temp,dInVal;
+    LVM_INT32 Temp, dInVal;
 
-    for (ii = n; ii != 0; ii--)
-    {
-        srcval=*src;
+    for (ii = n; ii != 0; ii--) {
+        srcval = *src;
         src++;
 
-        Temp = (srcval *val)>>15;
+        Temp = (srcval * val) >> 15;
 
-        dInVal  = (LVM_INT32)*dst;
+        dInVal = (LVM_INT32)*dst;
 
         Temp = Temp + dInVal;
 
-        if (Temp > 0x00007FFF)
-        {
+        if (Temp > 0x00007FFF) {
             *dst = 0x7FFF;
-        }
-        else if (Temp < -0x00008000)
-        {
-            *dst = - 0x8000;
-        }
-        else
-        {
+        } else if (Temp < -0x00008000) {
+            *dst = -0x8000;
+        } else {
             *dst = (LVM_INT16)Temp;
         }
 
@@ -75,4 +65,3 @@
 }
 
 /**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_32x16.cpp b/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_32x16.cpp
index 5d5564f..0fe9fef 100644
--- a/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_32x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_32x16.cpp
@@ -26,33 +26,25 @@
    FUNCTION MAC3S_16X16
 ***********************************************************************************/
 
-void Mac3s_Sat_32x16(  const LVM_INT32 *src,
-                     const LVM_INT16 val,
-                     LVM_INT32 *dst,
-                     LVM_INT16 n)
-{
+void Mac3s_Sat_32x16(const LVM_INT32* src, const LVM_INT16 val, LVM_INT32* dst, LVM_INT16 n) {
     LVM_INT16 ii;
-    LVM_INT32 srcval,temp, dInVal, dOutVal;
+    LVM_INT32 srcval, temp, dInVal, dOutVal;
 
-    for (ii = n; ii != 0; ii--)
-    {
-        srcval=*src;
+    for (ii = n; ii != 0; ii--) {
+        srcval = *src;
         src++;
 
-        MUL32x16INTO32(srcval,val,temp,15)
+        MUL32x16INTO32(srcval, val, temp, 15)
 
-            dInVal  = *dst;
+                dInVal = *dst;
         dOutVal = temp + dInVal;
 
-        if ((((dOutVal ^ temp) & (dOutVal ^ dInVal)) >> 31)!=0)     /* overflow / underflow */
+        if ((((dOutVal ^ temp) & (dOutVal ^ dInVal)) >> 31) != 0) /* overflow / underflow */
         {
-            if(temp<0)
-            {
-                dOutVal=0x80000000L;
-            }
-            else
-            {
-                dOutVal=0x7FFFFFFFL;
+            if (temp < 0) {
+                dOutVal = 0x80000000L;
+            } else {
+                dOutVal = 0x7FFFFFFFL;
             }
         }
 
@@ -62,35 +54,25 @@
 
     return;
 }
-void Mac3s_Sat_Float(const LVM_FLOAT *src,
-                     const LVM_FLOAT val,
-                     LVM_FLOAT *dst,
-                     LVM_INT16 n)
-{
+void Mac3s_Sat_Float(const LVM_FLOAT* src, const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n) {
     LVM_INT16 ii;
     LVM_FLOAT srcval;
-    LVM_FLOAT Temp,dInVal;
+    LVM_FLOAT Temp, dInVal;
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         srcval = *src;
         src++;
 
         Temp = srcval * val;
 
-        dInVal  = (LVM_FLOAT)*dst;
+        dInVal = (LVM_FLOAT)*dst;
         Temp = Temp + dInVal;
 
-        if (Temp > 1.000000f)
-        {
+        if (Temp > 1.000000f) {
             *dst = 1.000000f;
-        }
-        else if (Temp < -1.000000f)
-        {
+        } else if (Temp < -1.000000f) {
             *dst = -1.000000f;
-        }
-        else
-        {
+        } else {
             *dst = Temp;
         }
         dst++;
@@ -99,4 +81,3 @@
     return;
 }
 /**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/MixInSoft_D32C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/MixInSoft_D32C31_SAT.cpp
index 7c7b36f..9663998 100644
--- a/media/libeffects/lvm/lib/Common/src/MixInSoft_D32C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MixInSoft_D32C31_SAT.cpp
@@ -26,33 +26,29 @@
    DEFINITIONS
 ***********************************************************************************/
 
-#define TRUE          1
-#define FALSE         0
+#define TRUE 1
+#define FALSE 0
 
 /**********************************************************************************
    FUNCTION MIXINSOFT_D32C31_SAT
 ***********************************************************************************/
-void MixInSoft_D32C31_SAT( Mix_1St_Cll_FLOAT_t        *pInstance,
-                           const LVM_FLOAT      *src,
-                           LVM_FLOAT      *dst,
-                           LVM_INT16      n)
-{
+void MixInSoft_D32C31_SAT(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+                          LVM_INT16 n) {
     char HardMixing = TRUE;
 
-    if(n <= 0)    return;
+    if (n <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
-    if (pInstance->Current != pInstance->Target)
-    {
-        if(pInstance->Alpha == 0){
+    if (pInstance->Current != pInstance->Target) {
+        if (pInstance->Alpha == 0) {
             pInstance->Current = pInstance->Target;
-        }else if ((pInstance->Current-pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
-                 (pInstance->Current-pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)){
+        } else if ((pInstance->Current - pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
+                   (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)) {
             pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
                                                        Make them equal. */
-        }else{
+        } else {
             /* Soft mixing has to be applied */
             HardMixing = FALSE;
             Core_MixInSoft_D32C31_SAT(pInstance, src, dst, n);
@@ -63,11 +59,11 @@
        HARD MIXING
     *******************************************************************************/
 
-    if (HardMixing){
-        if (pInstance->Target != 0){ /* Nothing to do in case Target = 0 */
+    if (HardMixing) {
+        if (pInstance->Target != 0) { /* Nothing to do in case Target = 0 */
             if ((pInstance->Target) == 1.0f)
                 Add2_Sat_Float(src, dst, n);
-            else{
+            else {
                 Core_MixInSoft_D32C31_SAT(pInstance, src, dst, n);
                 pInstance->Current = pInstance->Target; /* In case the core function would \
                                                            have changed the Current value */
@@ -81,16 +77,15 @@
     /* Call back before the hard mixing, because in this case, hard mixing makes
        use of the core soft mix function which can change the Current value!      */
 
-    if (pInstance->CallbackSet){
+    if (pInstance->CallbackSet) {
         if ((pInstance->Current - pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
-            (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)){
+            (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)) {
             pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
                                                        Make them equal. */
             pInstance->CallbackSet = FALSE;
-            if (pInstance->pCallBack != 0){
-                (*pInstance->pCallBack) ( pInstance->pCallbackHandle,
-                                          pInstance->pGeneralPurpose,
-                                          pInstance->CallbackParam );
+            if (pInstance->pCallBack != 0) {
+                (*pInstance->pCallBack)(pInstance->pCallbackHandle, pInstance->pGeneralPurpose,
+                                        pInstance->CallbackParam);
             }
         }
     }
diff --git a/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp b/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
index d3325ec..8408962 100644
--- a/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
@@ -26,33 +26,29 @@
    DEFINITIONS
 ***********************************************************************************/
 
-#define TRUE          1
-#define FALSE         0
+#define TRUE 1
+#define FALSE 0
 
 /**********************************************************************************
    FUNCTION MIXSOFT_1ST_D32C31_WRA
 ***********************************************************************************/
-void MixSoft_1St_D32C31_WRA(    Mix_1St_Cll_FLOAT_t       *pInstance,
-                                const LVM_FLOAT     *src,
-                                      LVM_FLOAT     *dst,
-                                      LVM_INT16     n)
-{
+void MixSoft_1St_D32C31_WRA(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+                            LVM_INT16 n) {
     char HardMixing = TRUE;
 
-    if(n <= 0)    return;
+    if (n <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
-    if (pInstance->Current != pInstance->Target)
-    {
-        if(pInstance->Alpha == 0){
+    if (pInstance->Current != pInstance->Target) {
+        if (pInstance->Alpha == 0) {
             pInstance->Current = pInstance->Target;
-        }else if ((pInstance->Current - pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
-                 (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)){
+        } else if ((pInstance->Current - pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
+                   (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)) {
             pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
                                                        Make them equal. */
-        }else{
+        } else {
             /* Soft mixing has to be applied */
             HardMixing = FALSE;
             Core_MixSoft_1St_D32C31_WRA(pInstance, src, dst, n);
@@ -63,14 +59,12 @@
        HARD MIXING
     *******************************************************************************/
 
-    if (HardMixing){
+    if (HardMixing) {
         if (pInstance->Target == 0)
             LoadConst_Float(0, dst, n);
-        else if ((pInstance->Target) == 1.0f){
-            if (src != dst)
-                Copy_Float((LVM_FLOAT*)src, (LVM_FLOAT*)dst, (LVM_INT16)(n));
-        }
-        else
+        else if ((pInstance->Target) == 1.0f) {
+            if (src != dst) Copy_Float((LVM_FLOAT*)src, (LVM_FLOAT*)dst, (LVM_INT16)(n));
+        } else
             Mult3s_Float(src, pInstance->Current, dst, n);
     }
 
@@ -78,16 +72,15 @@
        CALL BACK
     *******************************************************************************/
 
-    if (pInstance->CallbackSet){
+    if (pInstance->CallbackSet) {
         if ((pInstance->Current - pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
-            (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)){
+            (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)) {
             pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
                                                        Make them equal. */
             pInstance->CallbackSet = FALSE;
-            if (pInstance->pCallBack != 0){
-                (*pInstance->pCallBack) ( pInstance->pCallbackHandle,
-                                          pInstance->pGeneralPurpose,
-                                          pInstance->CallbackParam );
+            if (pInstance->pCallBack != 0) {
+                (*pInstance->pCallBack)(pInstance->pCallbackHandle, pInstance->pGeneralPurpose,
+                                        pInstance->CallbackParam);
             }
         }
     }
diff --git a/media/libeffects/lvm/lib/Common/src/MixSoft_2St_D32C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/MixSoft_2St_D32C31_SAT.cpp
index b002738..aba8537 100644
--- a/media/libeffects/lvm/lib/Common/src/MixSoft_2St_D32C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MixSoft_2St_D32C31_SAT.cpp
@@ -25,42 +25,35 @@
 /**********************************************************************************
    FUNCTION MIXSOFT_2ST_D32C31_SAT
 ***********************************************************************************/
-void MixSoft_2St_D32C31_SAT(    Mix_2St_Cll_FLOAT_t       *pInstance,
-                                const LVM_FLOAT     *src1,
-                                const LVM_FLOAT     *src2,
-                                      LVM_FLOAT     *dst,
-                                      LVM_INT16     n)
-{
-
-    if(n <= 0)    return;
+void MixSoft_2St_D32C31_SAT(Mix_2St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src1,
+                            const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n) {
+    if (n <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
-    if ((pInstance->Current1 != pInstance->Target1) || (pInstance->Current2 != pInstance->Target2))
-    {
+    if ((pInstance->Current1 != pInstance->Target1) ||
+        (pInstance->Current2 != pInstance->Target2)) {
         MixSoft_1St_D32C31_WRA((Mix_1St_Cll_FLOAT_t*)pInstance, src1, dst, n);
-        MixInSoft_D32C31_SAT((Mix_1St_Cll_FLOAT_t *)&pInstance->Alpha2, /* Cast to void: \
-                                                              no dereferencing in function*/
-                              src2, dst, n);
+        MixInSoft_D32C31_SAT((Mix_1St_Cll_FLOAT_t*)&pInstance->Alpha2, /* Cast to void: \
+                                                             no dereferencing in function*/
+                             src2, dst, n);
     }
 
     /******************************************************************************
        HARD MIXING
     *******************************************************************************/
 
-    else
-    {
+    else {
         if (pInstance->Current1 == 0)
             MixSoft_1St_D32C31_WRA(
-                    (Mix_1St_Cll_FLOAT_t *) &pInstance->Alpha2, /* Cast to void: no \
-                                                             dereferencing in function*/
-                                    src2, dst, n);
+                    (Mix_1St_Cll_FLOAT_t*)&pInstance->Alpha2, /* Cast to void: no \
+                                                           dereferencing in function*/
+                    src2, dst, n);
         else if (pInstance->Current2 == 0)
-            MixSoft_1St_D32C31_WRA((Mix_1St_Cll_FLOAT_t*) pInstance, src1, dst, n);
+            MixSoft_1St_D32C31_WRA((Mix_1St_Cll_FLOAT_t*)pInstance, src1, dst, n);
         else
             Core_MixHard_2St_D32C31_SAT(pInstance, src1, src2, dst, n);
     }
 }
 /**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/Mixer_private.h b/media/libeffects/lvm/lib/Common/src/Mixer_private.h
index 1d653bb..e1e62c5 100644
--- a/media/libeffects/lvm/lib/Common/src/Mixer_private.h
+++ b/media/libeffects/lvm/lib/Common/src/Mixer_private.h
@@ -26,13 +26,13 @@
 
 #define POINT_ZERO_ONE_DB 2473805 /* 0.01 dB on a full scale signal = (10^(0.01/20) -1) * 2^31 */
 
-#define POINT_ZERO_ONE_DB_FLOAT 0.001152 /* 0.01 dB on a full scale \
-                                            signal = (10^(0.01/20) -1) * 2^31 */
+#define POINT_ZERO_ONE_DB_FLOAT         \
+    0.001152 /* 0.01 dB on a full scale \
+                signal = (10^(0.01/20) -1) * 2^31 */
 /**********************************************************************************
    DEFINITIONS
 ***********************************************************************************/
 
 /**********************************************************************************/
 
-#endif //#ifndef __MIXER_PRIVATE_H__
-
+#endif  //#ifndef __MIXER_PRIVATE_H__
diff --git a/media/libeffects/lvm/lib/Common/src/MonoTo2I_16.cpp b/media/libeffects/lvm/lib/Common/src/MonoTo2I_16.cpp
index ead798d..7ab5d49 100644
--- a/media/libeffects/lvm/lib/Common/src/MonoTo2I_16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MonoTo2I_16.cpp
@@ -25,16 +25,12 @@
    FUNCTION MonoTo2I_16
 ***********************************************************************************/
 
-void MonoTo2I_16( const LVM_INT16 *src,
-                 LVM_INT16 *dst,
-                 LVM_INT16 n)
-{
+void MonoTo2I_16(const LVM_INT16* src, LVM_INT16* dst, LVM_INT16 n) {
     LVM_INT16 ii;
-    src += (n-1);
-    dst += ((n*2)-1);
+    src += (n - 1);
+    dst += ((n * 2) - 1);
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         *dst = *src;
         dst--;
 
diff --git a/media/libeffects/lvm/lib/Common/src/MonoTo2I_32.cpp b/media/libeffects/lvm/lib/Common/src/MonoTo2I_32.cpp
index 603d1fc..1ba669f 100644
--- a/media/libeffects/lvm/lib/Common/src/MonoTo2I_32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MonoTo2I_32.cpp
@@ -25,16 +25,12 @@
    FUNCTION MonoTo2I_32
 ***********************************************************************************/
 
-void MonoTo2I_32( const LVM_INT32  *src,
-                 LVM_INT32  *dst,
-                 LVM_INT16 n)
-{
+void MonoTo2I_32(const LVM_INT32* src, LVM_INT32* dst, LVM_INT16 n) {
     LVM_INT16 ii;
-    src += (n-1);
-    dst += ((n*2)-1);
+    src += (n - 1);
+    dst += ((n * 2) - 1);
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         *dst = *src;
         dst--;
 
@@ -45,16 +41,12 @@
 
     return;
 }
-void MonoTo2I_Float( const LVM_FLOAT  *src,
-                     LVM_FLOAT  *dst,
-                     LVM_INT16 n)
-{
+void MonoTo2I_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n) {
     LVM_INT16 ii;
     src += (n - 1);
     dst += ((n * 2) - 1);
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         *dst = *src;
         dst--;
 
diff --git a/media/libeffects/lvm/lib/Common/src/Mult3s_32x16.cpp b/media/libeffects/lvm/lib/Common/src/Mult3s_32x16.cpp
index 370c39a..4589703 100644
--- a/media/libeffects/lvm/lib/Common/src/Mult3s_32x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Mult3s_32x16.cpp
@@ -26,37 +26,27 @@
 FUNCTION MULT3S_16X16
 ***********************************************************************************/
 
-void Mult3s_32x16( const LVM_INT32 *src,
-                  const LVM_INT16 val,
-                  LVM_INT32 *dst,
-                  LVM_INT16 n)
-{
+void Mult3s_32x16(const LVM_INT32* src, const LVM_INT16 val, LVM_INT32* dst, LVM_INT16 n) {
     LVM_INT16 ii;
-    LVM_INT32 srcval,temp;
+    LVM_INT32 srcval, temp;
 
-    for (ii = n; ii != 0; ii--)
-    {
-        srcval=*src;
+    for (ii = n; ii != 0; ii--) {
+        srcval = *src;
         src++;
 
-        MUL32x16INTO32(srcval,val,temp,15)
+        MUL32x16INTO32(srcval, val, temp, 15)
 
-        *dst = temp;
+                * dst = temp;
         dst++;
     }
 
     return;
 }
-void Mult3s_Float( const LVM_FLOAT *src,
-                   const LVM_FLOAT val,
-                   LVM_FLOAT *dst,
-                   LVM_INT16 n)
-{
+void Mult3s_Float(const LVM_FLOAT* src, const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n) {
     LVM_INT16 ii;
     LVM_FLOAT temp;
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         temp = (*src) * val;
         src++;
         *dst = temp;
diff --git a/media/libeffects/lvm/lib/Common/src/NonLinComp_D16.cpp b/media/libeffects/lvm/lib/Common/src/NonLinComp_D16.cpp
index 36d1149..fba0666 100644
--- a/media/libeffects/lvm/lib/Common/src/NonLinComp_D16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/NonLinComp_D16.cpp
@@ -61,22 +61,16 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-void NonLinComp_D16(LVM_INT16        Gain,
-                      LVM_INT16        *pDataIn,
-                    LVM_INT16        *pDataOut,
-                    LVM_INT32        BlockLength)
-{
-
-    LVM_INT16            Sample;                    /* Input samples */
-    LVM_INT32            SampleNo;                /* Sample index */
-    LVM_INT16            Temp;
+void NonLinComp_D16(LVM_INT16 Gain, LVM_INT16* pDataIn, LVM_INT16* pDataOut,
+                    LVM_INT32 BlockLength) {
+    LVM_INT16 Sample;   /* Input samples */
+    LVM_INT32 SampleNo; /* Sample index */
+    LVM_INT16 Temp;
 
     /*
      * Process a block of samples
      */
-    for(SampleNo = 0; SampleNo<BlockLength; SampleNo++)
-    {
-
+    for (SampleNo = 0; SampleNo < BlockLength; SampleNo++) {
         /*
          * Read the input
          */
@@ -88,15 +82,11 @@
          * harmonic distortion. The amount of compression is control by the
          * gain factor
          */
-        if ((LVM_INT32)Sample != -32768)
-        {
+        if ((LVM_INT32)Sample != -32768) {
             Temp = (LVM_INT16)((Sample * Sample) >> 15);
-            if(Sample >0)
-            {
+            if (Sample > 0) {
                 Sample = (LVM_INT16)(Sample + ((Gain * (Sample - Temp)) >> 15));
-            }
-            else
-            {
+            } else {
                 Sample = (LVM_INT16)(Sample + ((Gain * (Sample + Temp)) >> 15));
             }
         }
@@ -106,25 +96,18 @@
          */
         *pDataOut = Sample;
         pDataOut++;
-
     }
-
 }
-void NonLinComp_Float(LVM_FLOAT        Gain,
-                      LVM_FLOAT        *pDataIn,
-                      LVM_FLOAT        *pDataOut,
-                      LVM_INT32        BlockLength)
-{
-
-    LVM_FLOAT            Sample;                    /* Input samples */
-    LVM_INT32            SampleNo;                /* Sample index */
-    LVM_FLOAT            Temp;
+void NonLinComp_Float(LVM_FLOAT Gain, LVM_FLOAT* pDataIn, LVM_FLOAT* pDataOut,
+                      LVM_INT32 BlockLength) {
+    LVM_FLOAT Sample;   /* Input samples */
+    LVM_INT32 SampleNo; /* Sample index */
+    LVM_FLOAT Temp;
 
     /*
      * Process a block of samples
      */
-    for(SampleNo = 0; SampleNo < BlockLength; SampleNo++)
-    {
+    for (SampleNo = 0; SampleNo < BlockLength; SampleNo++) {
         /*
          * Read the input
          */
@@ -136,16 +119,12 @@
          * harmonic distortion. The amount of compression is control by the
          * gain factor
          */
-        if (Sample != -1.0f)
-        {
+        if (Sample != -1.0f) {
             Temp = ((Sample * Sample));
-            if(Sample > 0)
-            {
-                Sample = (Sample + ((Gain * (Sample - Temp)) ));
-            }
-            else
-            {
-                Sample = (Sample + ((Gain * (Sample + Temp)) ));
+            if (Sample > 0) {
+                Sample = (Sample + ((Gain * (Sample - Temp))));
+            } else {
+                Sample = (Sample + ((Gain * (Sample + Temp))));
             }
         }
 
diff --git a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32C14G11_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32C14G11_TRC_WRA_01.cpp
index 3f62f99..0afaad2 100644
--- a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32C14G11_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32C14G11_TRC_WRA_01.cpp
@@ -37,87 +37,79 @@
  pBiquadState->pDelays[6] is y(n-2)L in Q0 format
  pBiquadState->pDelays[7] is y(n-2)R in Q0 format
 ***************************************************************************/
-void PK_2I_D32F32C14G11_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                     LVM_FLOAT               *pDataIn,
-                                     LVM_FLOAT               *pDataOut,
-                                     LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT ynL,ynR,ynLO,ynRO,templ;
-        LVM_INT16 ii;
-        PFilter_State_Float pBiquadState = (PFilter_State_Float) pInstance;
+void PK_2I_D32F32C14G11_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                   LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+    LVM_FLOAT ynL, ynR, ynLO, ynRO, templ;
+    LVM_INT16 ii;
+    PFilter_State_Float pBiquadState = (PFilter_State_Float)pInstance;
 
-         for (ii = NrSamples; ii != 0; ii--)
-         {
+    for (ii = NrSamples; ii != 0; ii--) {
+        /**************************************************************************
+                        PROCESSING OF THE LEFT CHANNEL
+        ***************************************************************************/
+        /* ynL= (A0  * (x(n)L - x(n-2)L  ) )*/
+        templ = (*pDataIn) - pBiquadState->pDelays[2];
+        ynL = templ * pBiquadState->coefs[0];
 
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            /* ynL= (A0  * (x(n)L - x(n-2)L  ) )*/
-            templ = (*pDataIn) - pBiquadState->pDelays[2];
-            ynL = templ * pBiquadState->coefs[0];
+        /* ynL+= ((-B2  * y(n-2)L  )) */
+        templ = pBiquadState->pDelays[6] * pBiquadState->coefs[1];
+        ynL += templ;
 
-            /* ynL+= ((-B2  * y(n-2)L  )) */
-            templ = pBiquadState->pDelays[6] * pBiquadState->coefs[1];
-            ynL += templ;
+        /* ynL+= ((-B1 * y(n-1)L  ) ) */
+        templ = pBiquadState->pDelays[4] * pBiquadState->coefs[2];
+        ynL += templ;
 
-            /* ynL+= ((-B1 * y(n-1)L  ) ) */
-            templ = pBiquadState->pDelays[4] * pBiquadState->coefs[2];
-            ynL += templ;
+        /* ynLO= ((Gain * ynL )) */
+        ynLO = ynL * pBiquadState->coefs[3];
 
-            /* ynLO= ((Gain * ynL )) */
-            ynLO = ynL * pBiquadState->coefs[3];
+        /* ynLO=( ynLO + x(n)L  )*/
+        ynLO += (*pDataIn);
 
-            /* ynLO=( ynLO + x(n)L  )*/
-            ynLO += (*pDataIn);
+        /**************************************************************************
+                        PROCESSING OF THE RIGHT CHANNEL
+        ***************************************************************************/
+        /* ynR= (A0  * (x(n)R  - x(n-2)R  ) ) */
+        templ = (*(pDataIn + 1)) - pBiquadState->pDelays[3];
+        ynR = templ * pBiquadState->coefs[0];
 
-            /**************************************************************************
-                            PROCESSING OF THE RIGHT CHANNEL
-            ***************************************************************************/
-            /* ynR= (A0  * (x(n)R  - x(n-2)R  ) ) */
-            templ = (*(pDataIn + 1)) - pBiquadState->pDelays[3];
-            ynR = templ * pBiquadState->coefs[0];
+        /* ynR+= ((-B2  * y(n-2)R  ) )  */
+        templ = pBiquadState->pDelays[7] * pBiquadState->coefs[1];
+        ynR += templ;
 
-            /* ynR+= ((-B2  * y(n-2)R  ) )  */
-            templ = pBiquadState->pDelays[7] * pBiquadState->coefs[1];
-            ynR += templ;
+        /* ynR+= ((-B1  * y(n-1)R  ) )   */
+        templ = pBiquadState->pDelays[5] * pBiquadState->coefs[2];
+        ynR += templ;
 
-            /* ynR+= ((-B1  * y(n-1)R  ) )   */
-            templ = pBiquadState->pDelays[5] * pBiquadState->coefs[2];
-            ynR += templ;
+        /* ynRO= ((Gain  * ynR )) */
+        ynRO = ynR * pBiquadState->coefs[3];
 
-            /* ynRO= ((Gain  * ynR )) */
-            ynRO = ynR * pBiquadState->coefs[3];
+        /* ynRO=( ynRO + x(n)R  )*/
+        ynRO += (*(pDataIn + 1));
 
-            /* ynRO=( ynRO + x(n)R  )*/
-            ynRO += (*(pDataIn+1));
+        /**************************************************************************
+                        UPDATING THE DELAYS
+        ***************************************************************************/
+        pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
+        pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
+        pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
+        pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
+        pBiquadState->pDelays[5] = ynR;                      /* Update y(n-1)R */
+        pBiquadState->pDelays[4] = ynL;                      /* Update y(n-1)L */
+        pBiquadState->pDelays[0] = (*pDataIn);               /* Update x(n-1)L */
+        pDataIn++;
+        pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R */
+        pDataIn++;
 
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
-            pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
-            pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
-            pBiquadState->pDelays[5] = ynR; /* Update y(n-1)R */
-            pBiquadState->pDelays[4] = ynL; /* Update y(n-1)L */
-            pBiquadState->pDelays[0] = (*pDataIn); /* Update x(n-1)L */
-            pDataIn++;
-            pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R */
-            pDataIn++;
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut = ynLO; /* Write Left output*/
-            pDataOut++;
-            *pDataOut = ynRO; /* Write Right ouput*/
-            pDataOut++;
-
-        }
-
+        /**************************************************************************
+                        WRITING THE OUTPUT
+        ***************************************************************************/
+        *pDataOut = ynLO; /* Write Left output*/
+        pDataOut++;
+        *pDataOut = ynRO; /* Write Right output*/
+        pDataOut++;
     }
+}
 
-#ifdef SUPPORT_MC
 /**************************************************************************
 DELAYS-
 pBiquadState->pDelays[0] to
@@ -133,60 +125,51 @@
 pBiquadState->pDelays[4*NrChannels - 1] is y(n-2) for all NrChannels
 ***************************************************************************/
 
-void PK_Mc_D32F32C14G11_TRC_WRA_01 (Biquad_FLOAT_Instance_t       *pInstance,
-                                    LVM_FLOAT               *pDataIn,
-                                    LVM_FLOAT               *pDataOut,
-                                    LVM_INT16               NrFrames,
-                                    LVM_INT16               NrChannels)
-    {
-        LVM_FLOAT yn, ynO, temp;
-        LVM_INT16 ii, jj;
-        PFilter_State_Float pBiquadState = (PFilter_State_Float) pInstance;
+void PK_Mc_D32F32C14G11_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                                   LVM_FLOAT* pDataOut, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+    LVM_FLOAT yn, ynO, temp;
+    LVM_INT16 ii, jj;
+    PFilter_State_Float pBiquadState = (PFilter_State_Float)pInstance;
 
-         for (ii = NrFrames; ii != 0; ii--)
-         {
+    for (ii = NrFrames; ii != 0; ii--) {
+        for (jj = 0; jj < NrChannels; jj++) {
+            /**************************************************************************
+                            PROCESSING OF THE jj CHANNEL
+            ***************************************************************************/
+            /* yn= (A0  * (x(n) - x(n-2)))*/
+            temp = (*pDataIn) - pBiquadState->pDelays[NrChannels + jj];
+            yn = temp * pBiquadState->coefs[0];
 
-            for (jj = 0; jj < NrChannels; jj++)
-            {
-                /**************************************************************************
-                                PROCESSING OF THE jj CHANNEL
-                ***************************************************************************/
-                /* yn= (A0  * (x(n) - x(n-2)))*/
-                temp = (*pDataIn) - pBiquadState->pDelays[NrChannels + jj];
-                yn = temp * pBiquadState->coefs[0];
+            /* yn+= ((-B2  * y(n-2))) */
+            temp = pBiquadState->pDelays[NrChannels * 3 + jj] * pBiquadState->coefs[1];
+            yn += temp;
 
-                /* yn+= ((-B2  * y(n-2))) */
-                temp = pBiquadState->pDelays[NrChannels*3 + jj] * pBiquadState->coefs[1];
-                yn += temp;
+            /* yn+= ((-B1 * y(n-1))) */
+            temp = pBiquadState->pDelays[NrChannels * 2 + jj] * pBiquadState->coefs[2];
+            yn += temp;
 
-                /* yn+= ((-B1 * y(n-1))) */
-                temp = pBiquadState->pDelays[NrChannels*2 + jj] * pBiquadState->coefs[2];
-                yn += temp;
+            /* ynO= ((Gain * yn)) */
+            ynO = yn * pBiquadState->coefs[3];
 
-                /* ynO= ((Gain * yn)) */
-                ynO = yn * pBiquadState->coefs[3];
+            /* ynO=(ynO + x(n))*/
+            ynO += (*pDataIn);
 
-                /* ynO=(ynO + x(n))*/
-                ynO += (*pDataIn);
-
-                /**************************************************************************
-                                UPDATING THE DELAYS
-                ***************************************************************************/
-                pBiquadState->pDelays[NrChannels * 3 + jj] =
+            /**************************************************************************
+                            UPDATING THE DELAYS
+            ***************************************************************************/
+            pBiquadState->pDelays[NrChannels * 3 + jj] =
                     pBiquadState->pDelays[NrChannels * 2 + jj]; /* y(n-2)=y(n-1)*/
-                pBiquadState->pDelays[NrChannels * 1 + jj] =
-                    pBiquadState->pDelays[jj]; /* x(n-2)=x(n-1)*/
-                pBiquadState->pDelays[NrChannels * 2 + jj] = yn; /* Update y(n-1) */
-                pBiquadState->pDelays[jj] = (*pDataIn); /* Update x(n-1)*/
-                pDataIn++;
+            pBiquadState->pDelays[NrChannels * 1 + jj] =
+                    pBiquadState->pDelays[jj];               /* x(n-2)=x(n-1)*/
+            pBiquadState->pDelays[NrChannels * 2 + jj] = yn; /* Update y(n-1) */
+            pBiquadState->pDelays[jj] = (*pDataIn);          /* Update x(n-1)*/
+            pDataIn++;
 
-                /**************************************************************************
-                                WRITING THE OUTPUT
-                ***************************************************************************/
-                *pDataOut = ynO; /* Write output*/
-                pDataOut++;
-            }
+            /**************************************************************************
+                            WRITING THE OUTPUT
+            ***************************************************************************/
+            *pDataOut = ynO; /* Write output*/
+            pDataOut++;
         }
-
     }
-#endif
+}
diff --git a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Init.cpp
index 714aa52..1e08a55 100644
--- a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Init.cpp
@@ -17,4 +17,3 @@
 
 #include "BIQUAD.h"
 #include "PK_2I_D32F32CllGss_TRC_WRA_01_Private.h"
-
diff --git a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Private.h
index c5f9c7c..3f5d332 100644
--- a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Private.h
@@ -20,12 +20,11 @@
 
 /* The internal state variables are implemented in a (for the user)  hidden structure */
 /* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT32         coefs[5];       /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+    LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_INT32 coefs[5]; /* pointer to the filter coefficients */
+} Filter_State;
 
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
 
 #endif /* _PK_2I_D32F32CLLGSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Init.cpp
index f6c05da..178d766 100644
--- a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Init.cpp
@@ -17,12 +17,11 @@
 
 #include "BIQUAD.h"
 #include "PK_2I_D32F32CssGss_TRC_WRA_01_Private.h"
-void  PK_2I_D32F32CssGss_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_2I_Order2_FLOAT_Taps_t   *pTaps,
-                                         PK_FLOAT_Coefs_t            *pCoef)
-{
-    PFilter_State_Float pBiquadState = (PFilter_State_Float) pInstance;
-    pBiquadState->pDelays       = (LVM_FLOAT *) pTaps;
+void PK_2I_D32F32CssGss_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+                                        Biquad_2I_Order2_FLOAT_Taps_t* pTaps,
+                                        PK_FLOAT_Coefs_t* pCoef) {
+    PFilter_State_Float pBiquadState = (PFilter_State_Float)pInstance;
+    pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
 
     pBiquadState->coefs[0] = pCoef->A0;
 
diff --git a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Private.h
index cc924c4..57a1c16 100644
--- a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Private.h
@@ -21,19 +21,17 @@
 /* The internal state variables are implemented in a (for the user)  hidden structure */
 /* In this (private) file, the internal structure is declared fro private use.        */
 
-typedef struct _Filter_State_Float_
-{
-    LVM_FLOAT *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT         coefs[5];       /* pointer to the filter coefficients */
-}Filter_State_Float;
+typedef struct _Filter_State_Float_ {
+    LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
+} Filter_State_Float;
 
-typedef Filter_State_Float * PFilter_State_Float ;
-typedef struct _Filter_State_
-{
-  LVM_INT32 *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT32         coefs[5];       /* pointer to the filter coefficients */
-}Filter_State;
+typedef Filter_State_Float* PFilter_State_Float;
+typedef struct _Filter_State_ {
+    LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_INT32 coefs[5]; /* pointer to the filter coefficients */
+} Filter_State;
 
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
 
 #endif /* _PK_2I_D32F32CSSGSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/Shift_Sat_v32xv32.cpp b/media/libeffects/lvm/lib/Common/src/Shift_Sat_v32xv32.cpp
index 97a04c1..f54ba90 100644
--- a/media/libeffects/lvm/lib/Common/src/Shift_Sat_v32xv32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Shift_Sat_v32xv32.cpp
@@ -24,55 +24,39 @@
 /**********************************************************************************
    FUNCTION Shift_Sat_v32xv32
 ***********************************************************************************/
-void Shift_Sat_Float (const   LVM_INT16   val,
-                      const   LVM_FLOAT   *src,
-                      LVM_FLOAT   *dst,
-                      LVM_INT16   n)
-{
-    LVM_FLOAT   temp;
-    LVM_INT32   ii,ij;
-    LVM_INT16   RShift;
+void Shift_Sat_Float(const LVM_INT16 val, const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n) {
+    LVM_FLOAT temp;
+    LVM_INT32 ii, ij;
+    LVM_INT16 RShift;
 
-    if(val > 0)
-    {
-        for (ii = n; ii != 0; ii--)
-        {
+    if (val > 0) {
+        for (ii = n; ii != 0; ii--) {
             temp = (LVM_FLOAT)*src;
             src++;
-            for(ij = 0; ij < val; ij++)
-            {
+            for (ij = 0; ij < val; ij++) {
                 temp = temp * 2;
             }
 
-            if(temp > 1.0)
-                temp = 1.0;
-            if(temp < -1.0)
-                temp = -1.0;
+            if (temp > 1.0) temp = 1.0;
+            if (temp < -1.0) temp = -1.0;
 
             *dst = (LVM_FLOAT)temp;
             dst++;
         }
-    }
-    else if(val < 0)
-    {
-        RShift=(LVM_INT16)(-val);
+    } else if (val < 0) {
+        RShift = (LVM_INT16)(-val);
 
-        for (ii = n; ii != 0; ii--)
-        {
+        for (ii = n; ii != 0; ii--) {
             temp = (LVM_FLOAT)*src;
             src++;
-            for(ij = 0; ij < RShift; ij++)
-            {
+            for (ij = 0; ij < RShift; ij++) {
                 temp = temp / 2;
             }
             *dst = (LVM_FLOAT)temp;
             dst++;
         }
-    }
-    else
-    {
-        if(src != dst)
-        {
+    } else {
+        if (src != dst) {
             Copy_Float(src, dst, n);
         }
     }
diff --git a/media/libeffects/lvm/lib/Common/src/dB_to_Lin32.cpp b/media/libeffects/lvm/lib/Common/src/dB_to_Lin32.cpp
index 4da2013..2143465 100644
--- a/media/libeffects/lvm/lib/Common/src/dB_to_Lin32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/dB_to_Lin32.cpp
@@ -57,17 +57,16 @@
  *
  ****************************************************************************************/
 
-#define FOUR_OVER_SIX    21846                  /* (4 / 6) * 2^15 */
-#define SIX_DB           96                     /* 6 * 16 or 6dB in Q11.4 format */
-#define FIRST_COEF_NEG   14884305
-#define FIRST_COEF_POS   7442152                /* FIRST_COEF_NEG / 2 */
-#define SECOND_COEF      38836
-#define MAX_VALUE        1536                   /* 96 * 16 */
+#define FOUR_OVER_SIX 21846 /* (4 / 6) * 2^15 */
+#define SIX_DB 96           /* 6 * 16 or 6dB in Q11.4 format */
+#define FIRST_COEF_NEG 14884305
+#define FIRST_COEF_POS 7442152 /* FIRST_COEF_NEG / 2 */
+#define SECOND_COEF 38836
+#define MAX_VALUE 1536 /* 96 * 16 */
 
-LVM_FLOAT   dB_to_LinFloat(LVM_INT16    db_fix)
-{
-    LVM_FLOAT    dB_Float;
-    LVM_FLOAT    LinFloat;
+LVM_FLOAT dB_to_LinFloat(LVM_INT16 db_fix) {
+    LVM_FLOAT dB_Float;
+    LVM_FLOAT LinFloat;
 
     dB_Float = (LVM_FLOAT)((LVM_FLOAT)db_fix / 16.0f);
     LinFloat = pow(10, dB_Float / 20.0);
diff --git a/media/libeffects/lvm/lib/Common/src/mult3s_16x16.cpp b/media/libeffects/lvm/lib/Common/src/mult3s_16x16.cpp
index 4092560..66f9132 100644
--- a/media/libeffects/lvm/lib/Common/src/mult3s_16x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/mult3s_16x16.cpp
@@ -25,16 +25,11 @@
    FUNCTION MULT3S_16X16
 ***********************************************************************************/
 
-void Mult3s_16x16( const LVM_INT16 *src,
-                  const LVM_INT16 val,
-                  LVM_INT16 *dst,
-                  LVM_INT16 n)
-{
+void Mult3s_16x16(const LVM_INT16* src, const LVM_INT16 val, LVM_INT16* dst, LVM_INT16 n) {
     LVM_INT16 ii;
     LVM_INT32 temp;
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         temp = (LVM_INT32)(*src) * (LVM_INT32)val;
         src++;
 
diff --git a/media/libeffects/lvm/lib/Eq/lib/LVEQNB.h b/media/libeffects/lvm/lib/Eq/lib/LVEQNB.h
index c5ddf77..f1afcd6 100644
--- a/media/libeffects/lvm/lib/Eq/lib/LVEQNB.h
+++ b/media/libeffects/lvm/lib/Eq/lib/LVEQNB.h
@@ -86,16 +86,9 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory table */
-#define LVEQNB_MEMREGION_INSTANCE          0   /* Offset to the instance memory region */
-#define LVEQNB_MEMREGION_PERSISTENT_DATA   1   /* Offset to persistent data memory region */
-#define LVEQNB_MEMREGION_PERSISTENT_COEF   2   /* Offset to persistent coefficient region */
-#define LVEQNB_MEMREGION_SCRATCH           3   /* Offset to data scratch memory region */
-#define LVEQNB_NR_MEMORY_REGIONS           4   /* Number of memory regions */
-
 /* Callback events */
-#define LVEQNB_EVENT_NONE                   0x0000    /* Not a valid event */
-#define LVEQNB_EVENT_ALGOFF                 0x0001    /* EQNB has completed switch off */
+#define LVEQNB_EVENT_NONE 0x0000   /* Not a valid event */
+#define LVEQNB_EVENT_ALGOFF 0x0001 /* EQNB has completed switch off */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -104,42 +97,25 @@
 /****************************************************************************************/
 
 /* Instance handle */
-typedef void *LVEQNB_Handle_t;
+typedef void* LVEQNB_Handle_t;
 
 /* Operating modes */
-typedef enum
-{
-    LVEQNB_BYPASS   = 0,
-    LVEQNB_ON       = 1,
-    LVEQNB_MODE_MAX = LVM_MAXINT_32
-} LVEQNB_Mode_en;
+typedef enum { LVEQNB_BYPASS = 0, LVEQNB_ON = 1, LVEQNB_MODE_MAX = LVM_MAXINT_32 } LVEQNB_Mode_en;
 
 /* Filter mode control */
-typedef enum
-{
-    LVEQNB_FILTER_OFF   = 0,
-    LVEQNB_FILTER_ON    = 1,
+typedef enum {
+    LVEQNB_FILTER_OFF = 0,
+    LVEQNB_FILTER_ON = 1,
     LVEQNB_FILTER_DUMMY = LVM_MAXINT_32
 } LVEQNB_FilterMode_en;
 
-/* Memory Types */
-typedef enum
-{
-    LVEQNB_PERSISTENT      = 0,
-    LVEQNB_PERSISTENT_DATA = 1,
-    LVEQNB_PERSISTENT_COEF = 2,
-    LVEQNB_SCRATCH         = 3,
-    LVEQNB_MEMORY_MAX      = LVM_MAXINT_32
-} LVEQNB_MemoryTypes_en;
-
 /* Function return status */
-typedef enum
-{
-    LVEQNB_SUCCESS        = 0,                          /* Successful return from a routine */
-    LVEQNB_ALIGNMENTERROR = 1,                          /* Memory alignment error */
-    LVEQNB_NULLADDRESS    = 2,                          /* NULL allocation address */
-    LVEQNB_TOOMANYSAMPLES = 3,                          /* Maximum block size exceeded */
-    LVEQNB_STATUS_MAX     = LVM_MAXINT_32
+typedef enum {
+    LVEQNB_SUCCESS = 0,        /* Successful return from a routine */
+    LVEQNB_ALIGNMENTERROR = 1, /* Memory alignment error */
+    LVEQNB_NULLADDRESS = 2,    /* NULL allocation address */
+    LVEQNB_TOOMANYSAMPLES = 3, /* Maximum block size exceeded */
+    LVEQNB_STATUS_MAX = LVM_MAXINT_32
 } LVEQNB_ReturnStatus_en;
 
 /****************************************************************************************/
@@ -166,39 +142,35 @@
 /*
  * Supported source data formats
  */
-#define LVEQNB_CAP_STEREO                  1
-#define LVEQNB_CAP_MONOINSTEREO            2
+#define LVEQNB_CAP_STEREO 1
+#define LVEQNB_CAP_MONOINSTEREO 2
 
-typedef enum
-{
-    LVEQNB_STEREO       = 0,
+typedef enum {
+    LVEQNB_STEREO = 0,
     LVEQNB_MONOINSTEREO = 1,
-#ifdef SUPPORT_MC
     LVEQNB_MULTICHANNEL = 2,
-#endif
-    LVEQNB_SOURCE_MAX   = LVM_MAXINT_32
+    LVEQNB_SOURCE_MAX = LVM_MAXINT_32
 } LVEQNB_SourceFormat_en;
 
 /*
  * Supported sample rates in samples per second
  */
-#define LVEQNB_CAP_FS_8000                 1
-#define LVEQNB_CAP_FS_11025                2
-#define LVEQNB_CAP_FS_12000                4
-#define LVEQNB_CAP_FS_16000                8
-#define LVEQNB_CAP_FS_22050                16
-#define LVEQNB_CAP_FS_24000                32
-#define LVEQNB_CAP_FS_32000                64
-#define LVEQNB_CAP_FS_44100                128
-#define LVEQNB_CAP_FS_48000                256
-#define LVEQNB_CAP_FS_88200                512
-#define LVEQNB_CAP_FS_96000                1024
-#define LVEQNB_CAP_FS_176400               2048
-#define LVEQNB_CAP_FS_192000               4096
+#define LVEQNB_CAP_FS_8000 1
+#define LVEQNB_CAP_FS_11025 2
+#define LVEQNB_CAP_FS_12000 4
+#define LVEQNB_CAP_FS_16000 8
+#define LVEQNB_CAP_FS_22050 16
+#define LVEQNB_CAP_FS_24000 32
+#define LVEQNB_CAP_FS_32000 64
+#define LVEQNB_CAP_FS_44100 128
+#define LVEQNB_CAP_FS_48000 256
+#define LVEQNB_CAP_FS_88200 512
+#define LVEQNB_CAP_FS_96000 1024
+#define LVEQNB_CAP_FS_176400 2048
+#define LVEQNB_CAP_FS_192000 4096
 
-typedef enum
-{
-    LVEQNB_FS_8000  = 0,
+typedef enum {
+    LVEQNB_FS_8000 = 0,
     LVEQNB_FS_11025 = 1,
     LVEQNB_FS_12000 = 2,
     LVEQNB_FS_16000 = 3,
@@ -211,7 +183,7 @@
     LVEQNB_FS_96000 = 10,
     LVEQNB_FS_176400 = 11,
     LVEQNB_FS_192000 = 12,
-    LVEQNB_FS_MAX   = LVM_MAXINT_32
+    LVEQNB_FS_MAX = LVM_MAXINT_32
 } LVEQNB_Fs_en;
 
 /****************************************************************************************/
@@ -220,58 +192,38 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory region definition */
-typedef struct
-{
-    LVM_UINT32                  Size;                   /* Region size in bytes */
-    LVM_UINT16                  Alignment;              /* Region alignment in bytes */
-    LVEQNB_MemoryTypes_en       Type;                   /* Region type */
-    void                        *pBaseAddress;          /* Pointer to the region base address */
-} LVEQNB_MemoryRegion_t;
-
-/* Memory table containing the region definitions */
-typedef struct
-{
-    LVEQNB_MemoryRegion_t       Region[LVEQNB_NR_MEMORY_REGIONS];  /* One definition for each region */
-} LVEQNB_MemTab_t;
-
 /* Equaliser band definition */
-typedef struct
-{
-    LVM_INT16                   Gain;                   /* Band gain in dB */
-    LVM_UINT16                  Frequency;              /* Band centre frequency in Hz */
-    LVM_UINT16                  QFactor;                /* Band quality factor */
+typedef struct {
+    LVM_INT16 Gain;       /* Band gain in dB */
+    LVM_UINT16 Frequency; /* Band centre frequency in Hz */
+    LVM_UINT16 QFactor;   /* Band quality factor */
 } LVEQNB_BandDef_t;
 
 /* Parameter structure */
-typedef struct
-{
+typedef struct {
     /* General parameters */
-    LVEQNB_Mode_en              OperatingMode;
-    LVEQNB_Fs_en                SampleRate;
-    LVEQNB_SourceFormat_en      SourceFormat;
+    LVEQNB_Mode_en OperatingMode;
+    LVEQNB_Fs_en SampleRate;
+    LVEQNB_SourceFormat_en SourceFormat;
 
     /* Equaliser parameters */
-    LVM_UINT16                  NBands;                 /* Number of bands */
-    LVEQNB_BandDef_t            *pBandDefinition;       /* Pointer to equaliser definitions */
-#ifdef SUPPORT_MC
-    LVM_INT16                   NrChannels;
-#endif
+    LVM_UINT16 NBands;                 /* Number of bands */
+    LVEQNB_BandDef_t* pBandDefinition; /* Pointer to equaliser definitions */
+    LVM_INT16 NrChannels;
 } LVEQNB_Params_t;
 
 /* Capability structure */
-typedef struct
-{
+typedef struct {
     /* General parameters */
-    LVM_UINT16                  SampleRate;
+    LVM_UINT16 SampleRate;
 
-    LVM_UINT16                  SourceFormat;
-    LVM_UINT16                  MaxBlockSize;
-    LVM_UINT16                  MaxBands;
+    LVM_UINT16 SourceFormat;
+    LVM_UINT16 MaxBlockSize;
+    LVM_UINT16 MaxBands;
 
     /* Callback parameters */
-    LVM_Callback                CallBack;               /* Bundle callback */
-    void                        *pBundleInstance;       /* Bundle instance handle */
+    LVM_Callback CallBack; /* Bundle callback */
+    void* pBundleInstance; /* Bundle instance handle */
 
 } LVEQNB_Capabilities_t;
 
@@ -283,78 +235,43 @@
 
 /****************************************************************************************/
 /*                                                                                      */
-/* FUNCTION:                LVEQNB_Memory                                               */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*      hInstance = NULL                Returns the memory requirements                 */
-/*      hInstance = Instance handle     Returns the memory requirements and             */
-/*                                      allocated base addresses for the instance       */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) the memory      */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the memory       */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory definition table                 */
-/*  pCapabilities           Pointer to the default capabilities                         */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVEQNB_SUCCESS          Succeeded                                                   */
-/*  LVEQNB_NULLADDRESS      When any of pMemoryTable and pCapabilities is NULL address  */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVEQNB_Process function                 */
-/*                                                                                      */
-/****************************************************************************************/
-
-LVEQNB_ReturnStatus_en LVEQNB_Memory(LVEQNB_Handle_t            hInstance,
-                                     LVEQNB_MemTab_t            *pMemoryTable,
-                                     LVEQNB_Capabilities_t      *pCapabilities);
-
-/****************************************************************************************/
-/*                                                                                      */
 /* FUNCTION:                LVEQNB_Init                                                 */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
-/*  Create and initialisation function for the N-Band equalliser module                 */
-/*                                                                                      */
-/*  This function can be used to create an algorithm instance by calling with           */
-/*  hInstance set to NULL. In this case the algorithm returns the new instance          */
-/*  handle.                                                                             */
-/*                                                                                      */
-/*  This function can be used to force a full re-initialisation of the algorithm        */
-/*  by calling with hInstance = Instance Handle. In this case the memory table          */
-/*  should be correct for the instance, this can be ensured by calling the function     */
-/*  LVEQNB_Memory before calling this function.                                         */
+/*  Create and initialisation function for the N-Band equaliser module.                 */
 /*                                                                                      */
 /* PARAMETERS:                                                                          */
-/*  hInstance               Instance handle                                             */
-/*  pMemoryTable            Pointer to the memory definition table                      */
+/*  phInstance              Pointer to instance handle                                  */
 /*  pCapabilities           Pointer to the initialisation capabilities                  */
+/*  pScratch                Pointer to bundle scratch buffer                            */
 /*                                                                                      */
 /* RETURNS:                                                                             */
 /*  LVEQNB_SUCCESS          Initialisation succeeded                                    */
-/*  LVEQNB_NULLADDRESS        When pCapabilities or pMemoryTableis or phInstance are NULL */
-/*  LVEQNB_NULLADDRESS        One or more of the memory regions has a NULL base address   */
-/*                          pointer for a memory region with a non-zero size.           */
-/*                                                                                      */
+/*  LVEQNB_NULLADDRESS      When pCapabilities or phInstance are NULL                   */
+/*  LVEQNB_NULLADDRESS      When allocated memory has a NULL base address               */
 /*                                                                                      */
 /* NOTES:                                                                               */
-/*  1.  The instance handle is the pointer to the base address of the first memory      */
-/*      region.                                                                         */
-/*  2.  This function must not be interrupted by the LVEQNB_Process function            */
+/*  1.  This function must not be interrupted by the LVEQNB_Process function            */
 /*                                                                                      */
 /****************************************************************************************/
+LVEQNB_ReturnStatus_en LVEQNB_Init(LVEQNB_Handle_t* phInstance,
+                                   LVEQNB_Capabilities_t* pCapabilities, void* pScratch);
 
-LVEQNB_ReturnStatus_en LVEQNB_Init(LVEQNB_Handle_t          *phInstance,
-                                   LVEQNB_MemTab_t          *pMemoryTable,
-                                   LVEQNB_Capabilities_t    *pCapabilities);
+/****************************************************************************************/
+/*                                                                                      */
+/* FUNCTION:                LVEQNB_DeInit                                               */
+/*                                                                                      */
+/* DESCRIPTION:                                                                         */
+/*    Free the memories created during LVEQNB_Init including instance handle            */
+/*                                                                                      */
+/* PARAMETERS:                                                                          */
+/*  phInstance              Pointer to instance handle                                  */
+/*                                                                                      */
+/* NOTES:                                                                               */
+/*  1.  This function must not be interrupted by the LVEQNB_Process function            */
+/*                                                                                      */
+/****************************************************************************************/
+void LVEQNB_DeInit(LVEQNB_Handle_t* phInstance);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -377,8 +294,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVEQNB_ReturnStatus_en LVEQNB_GetParameters(LVEQNB_Handle_t     hInstance,
-                                            LVEQNB_Params_t     *pParams);
+LVEQNB_ReturnStatus_en LVEQNB_GetParameters(LVEQNB_Handle_t hInstance, LVEQNB_Params_t* pParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -401,8 +317,8 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVEQNB_ReturnStatus_en LVEQNB_GetCapabilities(LVEQNB_Handle_t           hInstance,
-                                              LVEQNB_Capabilities_t     *pCapabilities);
+LVEQNB_ReturnStatus_en LVEQNB_GetCapabilities(LVEQNB_Handle_t hInstance,
+                                              LVEQNB_Capabilities_t* pCapabilities);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -426,8 +342,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVEQNB_ReturnStatus_en LVEQNB_Control(LVEQNB_Handle_t       hInstance,
-                                      LVEQNB_Params_t       *pParams);
+LVEQNB_ReturnStatus_en LVEQNB_Control(LVEQNB_Handle_t hInstance, LVEQNB_Params_t* pParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -451,10 +366,7 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-LVEQNB_ReturnStatus_en LVEQNB_Process(LVEQNB_Handle_t       hInstance,
-                                      const LVM_FLOAT       *pInData,
-                                      LVM_FLOAT             *pOutData,
-                                      LVM_UINT16            NumSamples);
+LVEQNB_ReturnStatus_en LVEQNB_Process(LVEQNB_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                      LVM_FLOAT* pOutData, LVM_UINT16 NumSamples);
 
-#endif      /* __LVEQNB__ */
-
+#endif /* __LVEQNB__ */
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_CalcCoef.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_CalcCoef.cpp
index c3c0fad..f8a5f2a 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_CalcCoef.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_CalcCoef.cpp
@@ -111,43 +111,37 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVEQNB_ReturnStatus_en LVEQNB_SinglePrecCoefs(LVM_UINT16        Fs,
-                                              LVEQNB_BandDef_t  *pFilterDefinition,
-                                              PK_FLOAT_Coefs_t  *pCoefficients)
-{
-
-    extern LVM_FLOAT    LVEQNB_GainTable[];
-    extern LVM_FLOAT    LVEQNB_TwoPiOnFsTable[];
-    extern LVM_FLOAT    LVEQNB_DTable[];
+LVEQNB_ReturnStatus_en LVEQNB_SinglePrecCoefs(LVM_UINT16 Fs, LVEQNB_BandDef_t* pFilterDefinition,
+                                              PK_FLOAT_Coefs_t* pCoefficients) {
+    extern LVM_FLOAT LVEQNB_GainTable[];
+    extern LVM_FLOAT LVEQNB_TwoPiOnFsTable[];
+    extern LVM_FLOAT LVEQNB_DTable[];
 
     /*
      * Get the filter definition
      */
-    LVM_INT16           Gain        = pFilterDefinition->Gain;
-    LVM_UINT16          Frequency   = pFilterDefinition->Frequency;
+    LVM_INT16 Gain = pFilterDefinition->Gain;
+    LVM_UINT16 Frequency = pFilterDefinition->Frequency;
     /* As mentioned in effectbundle.h */
-    LVM_FLOAT           QFactor     = (LVM_FLOAT)pFilterDefinition->QFactor / 100.0f;
+    LVM_FLOAT QFactor = (LVM_FLOAT)pFilterDefinition->QFactor / 100.0f;
 
     /*
      * Intermediate variables and temporary values
      */
-    LVM_FLOAT           T0;
-    LVM_FLOAT           D;
-    LVM_FLOAT           A0;
-    LVM_FLOAT           B1;
-    LVM_FLOAT           B2;
+    LVM_FLOAT T0;
+    LVM_FLOAT D;
+    LVM_FLOAT A0;
+    LVM_FLOAT B1;
+    LVM_FLOAT B2;
 
     /*
      * Calculating the intermediate values
      */
-    T0 = Frequency * LVEQNB_TwoPiOnFsTable[Fs];        /* T0 = 2 * Pi * Fc / Fs */
-    if (Gain >= 0)
-    {
-        D = LVEQNB_DTable[15];                         /* D = 1            if GaindB >= 0 */
-    }
-    else
-    {
-        D = LVEQNB_DTable[Gain + 15];                    /* D = 1 / (1 + G)  if GaindB <  0 */
+    T0 = Frequency * LVEQNB_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
+    if (Gain >= 0) {
+        D = LVEQNB_DTable[15]; /* D = 1            if GaindB >= 0 */
+    } else {
+        D = LVEQNB_DTable[Gain + 15]; /* D = 1 / (1 + G)  if GaindB <  0 */
     }
 
     /*
@@ -164,7 +158,7 @@
     pCoefficients->A0 = 2 * A0;
     pCoefficients->B1 = 2 * B1;
     pCoefficients->B2 = 2 * B2;
-    pCoefficients->G  = LVEQNB_GainTable[Gain + 15];
+    pCoefficients->G = LVEQNB_GainTable[Gain + 15];
 
-    return(LVEQNB_SUCCESS);
+    return (LVEQNB_SUCCESS);
 }
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h
index 6329181..c44a9be 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h
@@ -23,78 +23,78 @@
 /* Gain table for (10^(Gain/20) - 1)                                                */
 /*                                                                                  */
 /************************************************************************************/
-#define LVEQNB_Gain_Neg15_dB                             (-0.822172f)
-#define LVEQNB_Gain_Neg14_dB                             (-0.800474f)
-#define LVEQNB_Gain_Neg13_dB                             (-0.776128f)
-#define LVEQNB_Gain_Neg12_dB                             (-0.748811f)
-#define LVEQNB_Gain_Neg11_dB                             (-0.718162f)
-#define LVEQNB_Gain_Neg10_dB                             (-0.683772f)
-#define LVEQNB_Gain_Neg9_dB                              (-0.645187f)
-#define LVEQNB_Gain_Neg8_dB                              (-0.601893f)
-#define LVEQNB_Gain_Neg7_dB                              (-0.553316f)
-#define LVEQNB_Gain_Neg6_dB                              (-0.498813f)
-#define LVEQNB_Gain_Neg5_dB                              (-0.437659f)
-#define LVEQNB_Gain_Neg4_dB                              (-0.369043f)
-#define LVEQNB_Gain_Neg3_dB                              (-0.292054f)
-#define LVEQNB_Gain_Neg2_dB                              (-0.205672f)
-#define LVEQNB_Gain_Neg1_dB                              (-0.108749f)
-#define LVEQNB_Gain_0_dB                                  0.000000f
-#define LVEQNB_Gain_1_dB                                  0.122018f
-#define LVEQNB_Gain_2_dB                                  0.258925f
-#define LVEQNB_Gain_3_dB                                  0.412538f
-#define LVEQNB_Gain_4_dB                                  0.584893f
-#define LVEQNB_Gain_5_dB                                  0.778279f
-#define LVEQNB_Gain_6_dB                                  0.995262f
-#define LVEQNB_Gain_7_dB                                  1.238721f
-#define LVEQNB_Gain_8_dB                                  1.511886f
-#define LVEQNB_Gain_9_dB                                  1.818383f
-#define LVEQNB_Gain_10_dB                                 2.162278f
-#define LVEQNB_Gain_11_dB                                 2.548134f
-#define LVEQNB_Gain_12_dB                                 2.981072f
-#define LVEQNB_Gain_13_dB                                 3.466836f
-#define LVEQNB_Gain_14_dB                                 4.011872f
-#define LVEQNB_Gain_15_dB                                 4.623413f
+#define LVEQNB_Gain_Neg15_dB (-0.822172f)
+#define LVEQNB_Gain_Neg14_dB (-0.800474f)
+#define LVEQNB_Gain_Neg13_dB (-0.776128f)
+#define LVEQNB_Gain_Neg12_dB (-0.748811f)
+#define LVEQNB_Gain_Neg11_dB (-0.718162f)
+#define LVEQNB_Gain_Neg10_dB (-0.683772f)
+#define LVEQNB_Gain_Neg9_dB (-0.645187f)
+#define LVEQNB_Gain_Neg8_dB (-0.601893f)
+#define LVEQNB_Gain_Neg7_dB (-0.553316f)
+#define LVEQNB_Gain_Neg6_dB (-0.498813f)
+#define LVEQNB_Gain_Neg5_dB (-0.437659f)
+#define LVEQNB_Gain_Neg4_dB (-0.369043f)
+#define LVEQNB_Gain_Neg3_dB (-0.292054f)
+#define LVEQNB_Gain_Neg2_dB (-0.205672f)
+#define LVEQNB_Gain_Neg1_dB (-0.108749f)
+#define LVEQNB_Gain_0_dB 0.000000f
+#define LVEQNB_Gain_1_dB 0.122018f
+#define LVEQNB_Gain_2_dB 0.258925f
+#define LVEQNB_Gain_3_dB 0.412538f
+#define LVEQNB_Gain_4_dB 0.584893f
+#define LVEQNB_Gain_5_dB 0.778279f
+#define LVEQNB_Gain_6_dB 0.995262f
+#define LVEQNB_Gain_7_dB 1.238721f
+#define LVEQNB_Gain_8_dB 1.511886f
+#define LVEQNB_Gain_9_dB 1.818383f
+#define LVEQNB_Gain_10_dB 2.162278f
+#define LVEQNB_Gain_11_dB 2.548134f
+#define LVEQNB_Gain_12_dB 2.981072f
+#define LVEQNB_Gain_13_dB 3.466836f
+#define LVEQNB_Gain_14_dB 4.011872f
+#define LVEQNB_Gain_15_dB 4.623413f
 
 /************************************************************************************/
 /*                                                                                  */
 /* Frequency table for 2*Pi/Fs                                                      */
 /*                                                                                  */
 /************************************************************************************/
-#define LVEQNB_2PiOn_8000                                0.000785f
-#define LVEQNB_2PiOn_11025                               0.000570f
-#define LVEQNB_2PiOn_12000                               0.000524f
-#define LVEQNB_2PiOn_16000                               0.000393f
-#define LVEQNB_2PiOn_22050                               0.000285f
-#define LVEQNB_2PiOn_24000                               0.000262f
-#define LVEQNB_2PiOn_32000                               0.000196f
-#define LVEQNB_2PiOn_44100                               0.000142f
-#define LVEQNB_2PiOn_48000                               0.000131f
+#define LVEQNB_2PiOn_8000 0.000785f
+#define LVEQNB_2PiOn_11025 0.000570f
+#define LVEQNB_2PiOn_12000 0.000524f
+#define LVEQNB_2PiOn_16000 0.000393f
+#define LVEQNB_2PiOn_22050 0.000285f
+#define LVEQNB_2PiOn_24000 0.000262f
+#define LVEQNB_2PiOn_32000 0.000196f
+#define LVEQNB_2PiOn_44100 0.000142f
+#define LVEQNB_2PiOn_48000 0.000131f
 
-#define LVEQNB_2PiOn_88200                               0.000071f
-#define LVEQNB_2PiOn_96000                               0.000065f
-#define LVEQNB_2PiOn_176400                              0.000036f
-#define LVEQNB_2PiOn_192000                              0.000033f
+#define LVEQNB_2PiOn_88200 0.000071f
+#define LVEQNB_2PiOn_96000 0.000065f
+#define LVEQNB_2PiOn_176400 0.000036f
+#define LVEQNB_2PiOn_192000 0.000033f
 
 /************************************************************************************/
 /*                                                                                  */
 /* 50D table for 50 / ( 1 + Gain )                                                  */
 /*                                                                                  */
 /************************************************************************************/
-#define LVEQNB_100D_Neg15_dB                             5.623413f
-#define LVEQNB_100D_Neg14_dB                             5.011872f
-#define LVEQNB_100D_Neg13_dB                             4.466836f
-#define LVEQNB_100D_Neg12_dB                             3.981072f
-#define LVEQNB_100D_Neg11_dB                             3.548134f
-#define LVEQNB_100D_Neg10_dB                             3.162278f
-#define LVEQNB_100D_Neg9_dB                              2.818383f
-#define LVEQNB_100D_Neg8_dB                              2.511886f
-#define LVEQNB_100D_Neg7_dB                              2.238721f
-#define LVEQNB_100D_Neg6_dB                              1.995262f
-#define LVEQNB_100D_Neg5_dB                              1.778279f
-#define LVEQNB_100D_Neg4_dB                              1.584893f
-#define LVEQNB_100D_Neg3_dB                              1.412538f
-#define LVEQNB_100D_Neg2_dB                              1.258925f
-#define LVEQNB_100D_Neg1_dB                              1.122018f
-#define LVEQNB_100D_0_dB                                 1.000000f
+#define LVEQNB_100D_Neg15_dB 5.623413f
+#define LVEQNB_100D_Neg14_dB 5.011872f
+#define LVEQNB_100D_Neg13_dB 4.466836f
+#define LVEQNB_100D_Neg12_dB 3.981072f
+#define LVEQNB_100D_Neg11_dB 3.548134f
+#define LVEQNB_100D_Neg10_dB 3.162278f
+#define LVEQNB_100D_Neg9_dB 2.818383f
+#define LVEQNB_100D_Neg8_dB 2.511886f
+#define LVEQNB_100D_Neg7_dB 2.238721f
+#define LVEQNB_100D_Neg6_dB 1.995262f
+#define LVEQNB_100D_Neg5_dB 1.778279f
+#define LVEQNB_100D_Neg4_dB 1.584893f
+#define LVEQNB_100D_Neg3_dB 1.412538f
+#define LVEQNB_100D_Neg2_dB 1.258925f
+#define LVEQNB_100D_Neg1_dB 1.122018f
+#define LVEQNB_100D_0_dB 1.000000f
 
 #endif
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
index 6bb4a7e..bccbe86 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
@@ -32,8 +32,8 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-#define     LOW_FREQ            298             /* 32768/110 for low test frequency */
-#define     HIGH_FREQ           386             /* 32768/85 for high test frequency */
+#define LOW_FREQ 298  /* 32768/110 for low test frequency */
+#define HIGH_FREQ 386 /* 32768/85 for high test frequency */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -56,23 +56,19 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVEQNB_ReturnStatus_en LVEQNB_GetParameters(LVEQNB_Handle_t     hInstance,
-                                            LVEQNB_Params_t     *pParams)
-{
+LVEQNB_ReturnStatus_en LVEQNB_GetParameters(LVEQNB_Handle_t hInstance, LVEQNB_Params_t* pParams) {
+    LVEQNB_Instance_t* pInstance = (LVEQNB_Instance_t*)hInstance;
 
-    LVEQNB_Instance_t    *pInstance =(LVEQNB_Instance_t  *)hInstance;
-
-   /*
+    /*
      * Check for error conditions
      */
-    if((hInstance == LVM_NULL) || (pParams == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pParams == LVM_NULL)) {
         return LVEQNB_NULLADDRESS;
     }
 
     *pParams = pInstance->Params;
 
-    return(LVEQNB_SUCCESS);
+    return (LVEQNB_SUCCESS);
 }
 
 /************************************************************************************/
@@ -96,20 +92,17 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVEQNB_ReturnStatus_en LVEQNB_GetCapabilities(LVEQNB_Handle_t           hInstance,
-                                              LVEQNB_Capabilities_t     *pCapabilities)
-{
+LVEQNB_ReturnStatus_en LVEQNB_GetCapabilities(LVEQNB_Handle_t hInstance,
+                                              LVEQNB_Capabilities_t* pCapabilities) {
+    LVEQNB_Instance_t* pInstance = (LVEQNB_Instance_t*)hInstance;
 
-    LVEQNB_Instance_t    *pInstance =(LVEQNB_Instance_t  *)hInstance;
-
-    if((hInstance == LVM_NULL) || (pCapabilities == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pCapabilities == LVM_NULL)) {
         return LVEQNB_NULLADDRESS;
     }
 
     *pCapabilities = pInstance->Capabilities;
 
-    return(LVEQNB_SUCCESS);
+    return (LVEQNB_SUCCESS);
 }
 
 /************************************************************************************/
@@ -134,33 +127,30 @@
 /*                                                                                  */
 /************************************************************************************/
 
-void    LVEQNB_SetFilters(LVEQNB_Instance_t     *pInstance,
-                          LVEQNB_Params_t       *pParams)
-{
-    extern const LVM_UINT32   LVEQNB_SampleRateTab[];           /* Sample rate table */
+void LVEQNB_SetFilters(LVEQNB_Instance_t* pInstance, LVEQNB_Params_t* pParams) {
+    extern const LVM_UINT32 LVEQNB_SampleRateTab[]; /* Sample rate table */
 
-    LVM_UINT16          i;                                      /* Filter band index */
-    LVM_UINT32          fs = (LVM_UINT32)LVEQNB_SampleRateTab[(LVM_UINT16)pParams->SampleRate];  /* Sample rate */
-    LVM_UINT32          fc;                                     /* Filter centre frequency */
-    LVM_INT16           QFactor;                                /* Filter Q factor */
+    LVM_UINT16 i; /* Filter band index */
+    LVM_UINT32 fs =
+            (LVM_UINT32)LVEQNB_SampleRateTab[(LVM_UINT16)pParams->SampleRate]; /* Sample rate */
+    LVM_UINT32 fc;     /* Filter centre frequency */
+    LVM_INT16 QFactor; /* Filter Q factor */
 
     pInstance->NBands = pParams->NBands;
 
-    for (i=0; i<pParams->NBands; i++)
-    {
+    for (i = 0; i < pParams->NBands; i++) {
         /*
          * Get the filter settings
          */
-        fc = (LVM_UINT32)pParams->pBandDefinition[i].Frequency;     /* Get the band centre frequency */
-        QFactor = (LVM_INT16)pParams->pBandDefinition[i].QFactor;   /* Get the band Q factor */
+        fc = (LVM_UINT32)pParams->pBandDefinition[i].Frequency; /* Get the band centre frequency */
+        QFactor = (LVM_INT16)pParams->pBandDefinition[i].QFactor; /* Get the band Q factor */
 
         pInstance->pBiquadType[i] = LVEQNB_SinglePrecision_Float; /* Default to single precision */
 
         /*
          * Check for out of range frequencies
          */
-        if (fc > (fs >> 1))
-        {
+        if (fc > (fs >> 1)) {
             pInstance->pBiquadType[i] = LVEQNB_OutOfRange;
         }
 
@@ -168,7 +158,6 @@
          * Copy the filter definition to persistant memory
          */
         pInstance->pBandDefinitions[i] = pParams->pBandDefinition[i];
-
     }
 }
 
@@ -186,46 +175,37 @@
 /*                                                                                  */
 /************************************************************************************/
 
-void    LVEQNB_SetCoefficients(LVEQNB_Instance_t     *pInstance)
-{
-
-    LVM_UINT16              i;                          /* Filter band index */
-    LVEQNB_BiquadType_en    BiquadType;                 /* Filter biquad type */
+void LVEQNB_SetCoefficients(LVEQNB_Instance_t* pInstance) {
+    LVM_UINT16 i;                    /* Filter band index */
+    LVEQNB_BiquadType_en BiquadType; /* Filter biquad type */
 
     /*
      * Set the coefficients for each band by the init function
      */
-    for (i=0; i<pInstance->Params.NBands; i++)
-    {
-
+    for (i = 0; i < pInstance->Params.NBands; i++) {
         /*
          * Check band type for correct initialisation method and recalculate the coefficients
          */
         BiquadType = pInstance->pBiquadType[i];
-        switch  (BiquadType)
-        {
-            case    LVEQNB_SinglePrecision_Float:
-            {
-                PK_FLOAT_Coefs_t      Coefficients;
+        switch (BiquadType) {
+            case LVEQNB_SinglePrecision_Float: {
+                PK_FLOAT_Coefs_t Coefficients;
                 /*
                  * Calculate the single precision coefficients
                  */
                 LVEQNB_SinglePrecCoefs((LVM_UINT16)pInstance->Params.SampleRate,
-                                       &pInstance->pBandDefinitions[i],
-                                       &Coefficients);
+                                       &pInstance->pBandDefinitions[i], &Coefficients);
                 /*
                  * Set the coefficients
                  */
                 PK_2I_D32F32CssGss_TRC_WRA_01_Init(&pInstance->pEQNB_FilterState_Float[i],
-                                                   &pInstance->pEQNB_Taps_Float[i],
-                                                   &Coefficients);
+                                                   &pInstance->pEQNB_Taps_Float[i], &Coefficients);
                 break;
             }
             default:
                 break;
         }
     }
-
 }
 
 /************************************************************************************/
@@ -239,20 +219,19 @@
 /*  pInstance           Pointer to the instance                                     */
 /*                                                                                  */
 /************************************************************************************/
-void    LVEQNB_ClearFilterHistory(LVEQNB_Instance_t     *pInstance)
-{
-    LVM_FLOAT       *pTapAddress;
-    LVM_INT16       NumTaps;
+void LVEQNB_ClearFilterHistory(LVEQNB_Instance_t* pInstance) {
+    LVM_FLOAT* pTapAddress;
+    LVM_INT16 NumTaps;
 
-    pTapAddress = (LVM_FLOAT *)pInstance->pEQNB_Taps_Float;
-    NumTaps     = (LVM_INT16)((pInstance->Capabilities.MaxBands * \
-                                    sizeof(Biquad_2I_Order2_FLOAT_Taps_t)) / sizeof(LVM_FLOAT));
+    pTapAddress = (LVM_FLOAT*)pInstance->pEQNB_Taps_Float;
+    NumTaps =
+            (LVM_INT16)((pInstance->Capabilities.MaxBands * sizeof(Biquad_2I_Order2_FLOAT_Taps_t)) /
+                        sizeof(LVM_FLOAT));
 
-    if (NumTaps != 0)
-    {
-        LoadConst_Float(0,                                 /* Clear the history, value 0 */
-                        pTapAddress,                       /* Destination */
-                        NumTaps);                          /* Number of words */
+    if (NumTaps != 0) {
+        LoadConst_Float(0,           /* Clear the history, value 0 */
+                        pTapAddress, /* Destination */
+                        NumTaps);    /* Number of words */
     }
 }
 /****************************************************************************************/
@@ -277,56 +256,47 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVEQNB_ReturnStatus_en LVEQNB_Control(LVEQNB_Handle_t        hInstance,
-                                      LVEQNB_Params_t        *pParams)
-{
-
-    LVEQNB_Instance_t    *pInstance = (LVEQNB_Instance_t  *)hInstance;
-    LVM_INT16            bChange    = LVM_FALSE;
-    LVM_INT16            i = 0;
-    LVEQNB_Mode_en       OperatingModeSave ;
+LVEQNB_ReturnStatus_en LVEQNB_Control(LVEQNB_Handle_t hInstance, LVEQNB_Params_t* pParams) {
+    LVEQNB_Instance_t* pInstance = (LVEQNB_Instance_t*)hInstance;
+    LVM_INT16 bChange = LVM_FALSE;
+    LVM_INT16 i = 0;
+    LVEQNB_Mode_en OperatingModeSave;
 
     /*
      * Check for error conditions
      */
-    if((hInstance == LVM_NULL) || (pParams == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pParams == LVM_NULL)) {
         return LVEQNB_NULLADDRESS;
     }
 
-    if((pParams->NBands !=0) && (pParams->pBandDefinition==LVM_NULL))
-    {
+    if ((pParams->NBands != 0) && (pParams->pBandDefinition == LVM_NULL)) {
         return LVEQNB_NULLADDRESS;
     }
 
     OperatingModeSave = pInstance->Params.OperatingMode;
 
     /* Set the alpha factor of the mixer */
-    if (pParams->SampleRate != pInstance->Params.SampleRate)
-    {
-        LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0],LVEQNB_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate,2);
-        LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[1],LVEQNB_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate,2);
+    if (pParams->SampleRate != pInstance->Params.SampleRate) {
+        LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0],
+                                           LVEQNB_BYPASS_MIXER_TC, (LVM_Fs_en)pParams->SampleRate,
+                                           2);
+        LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[1],
+                                           LVEQNB_BYPASS_MIXER_TC, (LVM_Fs_en)pParams->SampleRate,
+                                           2);
     }
 
-    if( (pInstance->Params.NBands            !=  pParams->NBands          ) ||
-        (pInstance->Params.OperatingMode     !=  pParams->OperatingMode   ) ||
-        (pInstance->Params.pBandDefinition   !=  pParams->pBandDefinition ) ||
-        (pInstance->Params.SampleRate        !=  pParams->SampleRate      ) ||
-        (pInstance->Params.SourceFormat      !=  pParams->SourceFormat    ))
-    {
-
+    if ((pInstance->Params.NBands != pParams->NBands) ||
+        (pInstance->Params.OperatingMode != pParams->OperatingMode) ||
+        (pInstance->Params.pBandDefinition != pParams->pBandDefinition) ||
+        (pInstance->Params.SampleRate != pParams->SampleRate) ||
+        (pInstance->Params.SourceFormat != pParams->SourceFormat)) {
         bChange = LVM_TRUE;
-    }
-    else
-    {
-        for(i = 0; i < pParams->NBands; i++)
-        {
-
-            if((pInstance->pBandDefinitions[i].Frequency  != pParams->pBandDefinition[i].Frequency )||
-                (pInstance->pBandDefinitions[i].Gain       != pParams->pBandDefinition[i].Gain      )||
-                (pInstance->pBandDefinitions[i].QFactor    != pParams->pBandDefinition[i].QFactor   ))
-            {
-
+    } else {
+        for (i = 0; i < pParams->NBands; i++) {
+            if ((pInstance->pBandDefinitions[i].Frequency !=
+                 pParams->pBandDefinition[i].Frequency) ||
+                (pInstance->pBandDefinitions[i].Gain != pParams->pBandDefinition[i].Gain) ||
+                (pInstance->pBandDefinitions[i].QFactor != pParams->pBandDefinition[i].QFactor)) {
                 bChange = LVM_TRUE;
             }
         }
@@ -335,19 +305,17 @@
     // During operating mode transition, there is a race condition where the mode
     // is still LVEQNB_ON, but the effect is considered disabled in the upper layers.
     // modeChange handles this special race condition.
-    const int /* bool */ modeChange = pParams->OperatingMode != OperatingModeSave
-            || (OperatingModeSave == LVEQNB_ON
-                    && pInstance->bInOperatingModeTransition
-                    && LVC_Mixer_GetTarget(&pInstance->BypassMixer.MixerStream[0]) == 0);
+    const int /* bool */ modeChange =
+            pParams->OperatingMode != OperatingModeSave ||
+            (OperatingModeSave == LVEQNB_ON && pInstance->bInOperatingModeTransition &&
+             LVC_Mixer_GetTarget(&pInstance->BypassMixer.MixerStream[0]) == 0);
 
     if (bChange || modeChange) {
-
         /*
          * If the sample rate has changed clear the history
          */
-        if (pInstance->Params.SampleRate != pParams->SampleRate)
-        {
-            LVEQNB_ClearFilterHistory(pInstance);           /* Clear the history */
+        if (pInstance->Params.SampleRate != pParams->SampleRate) {
+            LVEQNB_ClearFilterHistory(pInstance); /* Clear the history */
         }
 
         /*
@@ -358,45 +326,45 @@
         /*
          * Reset the filters except if the algo is switched off
          */
-        if(pParams->OperatingMode != LVEQNB_BYPASS){
+        if (pParams->OperatingMode != LVEQNB_BYPASS) {
             /*
              * Reset the filters as all parameters could have changed
              */
-            LVEQNB_SetFilters(pInstance,                        /* Instance pointer */
-                              pParams);                         /* New parameters */
+            LVEQNB_SetFilters(pInstance, /* Instance pointer */
+                              pParams);  /* New parameters */
 
             /*
              * Update the filters
              */
-            LVEQNB_SetCoefficients(pInstance);                  /* Instance pointer */
+            LVEQNB_SetCoefficients(pInstance); /* Instance pointer */
         }
 
         if (modeChange) {
-            if(pParams->OperatingMode == LVEQNB_ON)
-            {
+            if (pParams->OperatingMode == LVEQNB_ON) {
                 LVC_Mixer_SetTarget(&pInstance->BypassMixer.MixerStream[0], 1.0f);
                 LVC_Mixer_SetTarget(&pInstance->BypassMixer.MixerStream[1], 0.0f);
-                pInstance->BypassMixer.MixerStream[0].CallbackSet        = 1;
-                pInstance->BypassMixer.MixerStream[1].CallbackSet        = 1;
-            }
-            else
-            {
+                pInstance->BypassMixer.MixerStream[0].CallbackSet = 1;
+                pInstance->BypassMixer.MixerStream[1].CallbackSet = 1;
+            } else {
                 /* Stay on the ON operating mode until the transition is done */
                 // This may introduce a state race condition if the effect is enabled again
                 // while in transition.  This is fixed in the modeChange logic.
                 pInstance->Params.OperatingMode = LVEQNB_ON;
                 LVC_Mixer_SetTarget(&pInstance->BypassMixer.MixerStream[0], 0.0f);
                 LVC_Mixer_SetTarget(&pInstance->BypassMixer.MixerStream[1], 1.0f);
-                pInstance->BypassMixer.MixerStream[0].CallbackSet        = 1;
-                pInstance->BypassMixer.MixerStream[1].CallbackSet        = 1;
+                pInstance->BypassMixer.MixerStream[0].CallbackSet = 1;
+                pInstance->BypassMixer.MixerStream[1].CallbackSet = 1;
             }
-            LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0],LVEQNB_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate,2);
-            LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[1],LVEQNB_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate,2);
+            LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0],
+                                               LVEQNB_BYPASS_MIXER_TC,
+                                               (LVM_Fs_en)pParams->SampleRate, 2);
+            LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[1],
+                                               LVEQNB_BYPASS_MIXER_TC,
+                                               (LVM_Fs_en)pParams->SampleRate, 2);
             pInstance->bInOperatingModeTransition = LVM_TRUE;
         }
-
     }
-    return(LVEQNB_SUCCESS);
+    return (LVEQNB_SUCCESS);
 }
 
 /****************************************************************************************/
@@ -408,23 +376,22 @@
 /*  transition                                                                          */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_INT32 LVEQNB_BypassMixerCallBack (void* hInstance,
-                                      void *pGeneralPurpose,
-                                      LVM_INT16 CallbackParam)
-{
-    LVEQNB_Instance_t      *pInstance =(LVEQNB_Instance_t  *)hInstance;
-    LVM_Callback            CallBack  = pInstance->Capabilities.CallBack;
+LVM_INT32 LVEQNB_BypassMixerCallBack(void* hInstance, void* pGeneralPurpose,
+                                     LVM_INT16 CallbackParam) {
+    LVEQNB_Instance_t* pInstance = (LVEQNB_Instance_t*)hInstance;
+    LVM_Callback CallBack = pInstance->Capabilities.CallBack;
 
-    (void) pGeneralPurpose;
+    (void)pGeneralPurpose;
 
-     /*
-      * Send an ALGOFF event if the ON->OFF switch transition is finished
-      */
-    if((LVC_Mixer_GetTarget(&pInstance->BypassMixer.MixerStream[0]) == 0) &&
-       (CallbackParam == 0)){
+    /*
+     * Send an ALGOFF event if the ON->OFF switch transition is finished
+     */
+    if ((LVC_Mixer_GetTarget(&pInstance->BypassMixer.MixerStream[0]) == 0) &&
+        (CallbackParam == 0)) {
         pInstance->Params.OperatingMode = LVEQNB_BYPASS;
-        if (CallBack != LVM_NULL){
-            CallBack(pInstance->Capabilities.pBundleInstance, LVM_NULL, ALGORITHM_EQNB_ID|LVEQNB_EVENT_ALGOFF);
+        if (CallBack != LVM_NULL) {
+            CallBack(pInstance->Capabilities.pBundleInstance, LVM_NULL,
+                     ALGORITHM_EQNB_ID | LVEQNB_EVENT_ALGOFF);
         }
     }
 
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
index 271a914..1d2a5f5 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
@@ -21,6 +21,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
+#include <stdlib.h>
 #include "LVEQNB.h"
 #include "LVEQNB_Private.h"
 #include "InstAlloc.h"
@@ -28,295 +29,148 @@
 
 /****************************************************************************************/
 /*                                                                                      */
-/* FUNCTION:                LVEQNB_Memory                                               */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*      hInstance = NULL                Returns the memory requirements                 */
-/*      hInstance = Instance handle     Returns the memory requirements and             */
-/*                                      allocated base addresses for the instance       */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) the memory      */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the memory       */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory definition table                 */
-/*  pCapabilities           Pointer to the instance capabilities                        */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVEQNB_SUCCESS          Succeeded                                                   */
-/*  LVEQNB_NULLADDRESS      When any of pMemoryTable and pCapabilities is NULL address  */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVEQNB_Process function                 */
-/*                                                                                      */
-/****************************************************************************************/
-
-LVEQNB_ReturnStatus_en LVEQNB_Memory(LVEQNB_Handle_t            hInstance,
-                                     LVEQNB_MemTab_t            *pMemoryTable,
-                                     LVEQNB_Capabilities_t      *pCapabilities)
-{
-
-    INST_ALLOC          AllocMem;
-    LVEQNB_Instance_t   *pInstance = (LVEQNB_Instance_t *)hInstance;
-
-    if((pMemoryTable == LVM_NULL)|| (pCapabilities == LVM_NULL))
-    {
-        return LVEQNB_NULLADDRESS;
-    }
-
-    /*
-     * Fill in the memory table
-     */
-    if (hInstance == LVM_NULL)
-    {
-        /*
-         * Instance memory
-         */
-        InstAlloc_Init(&AllocMem,
-                       LVM_NULL);
-        InstAlloc_AddMember(&AllocMem,                              /* Low pass filter */
-                            sizeof(LVEQNB_Instance_t));
-        pMemoryTable->Region[LVEQNB_MEMREGION_INSTANCE].Size         = InstAlloc_GetTotal(&AllocMem);
-        pMemoryTable->Region[LVEQNB_MEMREGION_INSTANCE].Alignment    = LVEQNB_INSTANCE_ALIGN;
-        pMemoryTable->Region[LVEQNB_MEMREGION_INSTANCE].Type         = LVEQNB_PERSISTENT;
-        pMemoryTable->Region[LVEQNB_MEMREGION_INSTANCE].pBaseAddress = LVM_NULL;
-
-        /*
-         * Persistant data memory
-         */
-        InstAlloc_Init(&AllocMem,
-                       LVM_NULL);
-        InstAlloc_AddMember(&AllocMem,                              /* Low pass filter */
-                            sizeof(Biquad_2I_Order2_FLOAT_Taps_t));
-        InstAlloc_AddMember(&AllocMem,                              /* High pass filter */
-                            sizeof(Biquad_2I_Order2_FLOAT_Taps_t));
-        /* Equaliser Biquad Taps */
-        InstAlloc_AddMember(&AllocMem,
-                            (pCapabilities->MaxBands * sizeof(Biquad_2I_Order2_FLOAT_Taps_t)));
-        /* Filter definitions */
-        InstAlloc_AddMember(&AllocMem,
-                            (pCapabilities->MaxBands * sizeof(LVEQNB_BandDef_t)));
-        /* Biquad types */
-        InstAlloc_AddMember(&AllocMem,
-                            (pCapabilities->MaxBands * sizeof(LVEQNB_BiquadType_en)));
-        pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_DATA].Size         = InstAlloc_GetTotal(&AllocMem);
-        pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_DATA].Alignment    = LVEQNB_DATA_ALIGN;
-        pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_DATA].Type         = LVEQNB_PERSISTENT_DATA;
-        pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_DATA].pBaseAddress = LVM_NULL;
-
-        /*
-         * Persistant coefficient memory
-         */
-        InstAlloc_Init(&AllocMem,
-                       LVM_NULL);
-        InstAlloc_AddMember(&AllocMem,                              /* Low pass filter */
-                            sizeof(Biquad_FLOAT_Instance_t));
-        InstAlloc_AddMember(&AllocMem,                              /* High pass filter */
-                            sizeof(Biquad_FLOAT_Instance_t));
-        /* Equaliser Biquad Instance */
-        InstAlloc_AddMember(&AllocMem,
-                            pCapabilities->MaxBands * sizeof(Biquad_FLOAT_Instance_t));
-        pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_COEF].Size         = InstAlloc_GetTotal(&AllocMem);
-        pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_COEF].Alignment    = LVEQNB_COEF_ALIGN;
-        pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_COEF].Type         = LVEQNB_PERSISTENT_COEF;
-        pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_COEF].pBaseAddress = LVM_NULL;
-
-        /*
-         * Scratch memory
-         */
-        InstAlloc_Init(&AllocMem,
-                       LVM_NULL);
-        InstAlloc_AddMember(&AllocMem,                              /* Low pass filter */
-                            LVEQNB_SCRATCHBUFFERS * sizeof(LVM_FLOAT) * \
-                                             pCapabilities->MaxBlockSize);
-        pMemoryTable->Region[LVEQNB_MEMREGION_SCRATCH].Size              = InstAlloc_GetTotal(&AllocMem);
-        pMemoryTable->Region[LVEQNB_MEMREGION_SCRATCH].Alignment         = LVEQNB_SCRATCH_ALIGN;
-        pMemoryTable->Region[LVEQNB_MEMREGION_SCRATCH].Type              = LVEQNB_SCRATCH;
-        pMemoryTable->Region[LVEQNB_MEMREGION_SCRATCH].pBaseAddress      = LVM_NULL;
-    }
-    else
-    {
-        /* Read back memory allocation table */
-        *pMemoryTable = pInstance->MemoryTable;
-    }
-
-    return(LVEQNB_SUCCESS);
-}
-
-/****************************************************************************************/
-/*                                                                                      */
 /* FUNCTION:                LVEQNB_Init                                                 */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
-/*  Create and initialisation function for the N-Band equaliser module                  */
-/*                                                                                      */
-/*  This function can be used to create an algorithm instance by calling with           */
-/*  hInstance set to NULL. In this case the algorithm returns the new instance          */
-/*  handle.                                                                             */
-/*                                                                                      */
-/*  This function can be used to force a full re-initialisation of the algorithm        */
-/*  by calling with hInstance = Instance Handle. In this case the memory table          */
-/*  should be correct for the instance, this can be ensured by calling the function     */
-/*  DBE_Memory before calling this function.                                            */
+/*  Create and initialisation function for the N-Band equaliser module.                 */
 /*                                                                                      */
 /* PARAMETERS:                                                                          */
-/*  hInstance               Instance handle                                             */
-/*  pMemoryTable            Pointer to the memory definition table                      */
-/*  pCapabilities           Pointer to the instance capabilities                        */
+/*  phInstance              Pointer to instance handle                                  */
+/*  pCapabilities           Pointer to the initialisation capabilities                  */
+/*  pScratch                Pointer to bundle scratch buffer                            */
 /*                                                                                      */
 /* RETURNS:                                                                             */
 /*  LVEQNB_SUCCESS          Initialisation succeeded                                    */
-/*  LVEQNB_NULLADDRESS        When pCapabilities or pMemoryTableis or phInstance are NULL */
-/*  LVEQNB_NULLADDRESS        One or more of the memory regions has a NULL base address   */
-/*                          pointer for a memory region with a non-zero size.           */
+/*  LVEQNB_NULLADDRESS      One or more memory has a NULL pointer - malloc failure      */
 /*                                                                                      */
 /* NOTES:                                                                               */
-/*  1.  The instance handle is the pointer to the base address of the first memory      */
-/*      region.                                                                         */
-/*  2.  This function must not be interrupted by the LVEQNB_Process function            */
+/*  1.  This function must not be interrupted by the LVEQNB_Process function            */
 /*                                                                                      */
 /****************************************************************************************/
 
-LVEQNB_ReturnStatus_en LVEQNB_Init(LVEQNB_Handle_t          *phInstance,
-                                   LVEQNB_MemTab_t          *pMemoryTable,
-                                   LVEQNB_Capabilities_t    *pCapabilities)
-{
+LVEQNB_ReturnStatus_en LVEQNB_Init(LVEQNB_Handle_t* phInstance,
+                                   LVEQNB_Capabilities_t* pCapabilities, void* pScratch) {
+    LVEQNB_Instance_t* pInstance;
 
-    LVEQNB_Instance_t   *pInstance;
-    LVM_UINT32          MemSize;
-    INST_ALLOC          AllocMem;
-    LVM_INT32           i;
+    *phInstance = calloc(1, sizeof(*pInstance));
+    if (phInstance == LVM_NULL) {
+        return LVEQNB_NULLADDRESS;
+    }
+    pInstance = (LVEQNB_Instance_t*)*phInstance;
 
-    /*
-     * Check for NULL pointers
-     */
-    if((phInstance == LVM_NULL) || (pMemoryTable == LVM_NULL) || (pCapabilities == LVM_NULL))
-    {
+    pInstance->Capabilities = *pCapabilities;
+    pInstance->pScratch = pScratch;
+
+    /* Equaliser Biquad Instance */
+    LVM_UINT32 MemSize = pCapabilities->MaxBands * sizeof(*(pInstance->pEQNB_FilterState_Float));
+    pInstance->pEQNB_FilterState_Float = (Biquad_FLOAT_Instance_t*)calloc(1, MemSize);
+    if (pInstance->pEQNB_FilterState_Float == LVM_NULL) {
         return LVEQNB_NULLADDRESS;
     }
 
-    /*
-     * Check the memory table for NULL pointers
-     */
-    for (i = 0; i < LVEQNB_NR_MEMORY_REGIONS; i++)
-    {
-        if (pMemoryTable->Region[i].Size!=0)
-        {
-            if (pMemoryTable->Region[i].pBaseAddress==LVM_NULL)
-            {
-                return(LVEQNB_NULLADDRESS);
-            }
-        }
+    MemSize = (pCapabilities->MaxBands * sizeof(*(pInstance->pEQNB_Taps_Float)));
+    pInstance->pEQNB_Taps_Float = (Biquad_2I_Order2_FLOAT_Taps_t*)calloc(1, MemSize);
+    if (pInstance->pEQNB_Taps_Float == LVM_NULL) {
+        return LVEQNB_NULLADDRESS;
     }
 
-    /*
-     * Set the instance handle if not already initialised
-     */
-
-    InstAlloc_Init(&AllocMem,  pMemoryTable->Region[LVEQNB_MEMREGION_INSTANCE].pBaseAddress);
-
-    if (*phInstance == LVM_NULL)
-    {
-        *phInstance = InstAlloc_AddMember(&AllocMem, sizeof(LVEQNB_Instance_t));
+    MemSize = (pCapabilities->MaxBands * sizeof(*(pInstance->pBandDefinitions)));
+    pInstance->pBandDefinitions = (LVEQNB_BandDef_t*)calloc(1, MemSize);
+    if (pInstance->pBandDefinitions == LVM_NULL) {
+        return LVEQNB_NULLADDRESS;
     }
-    pInstance =(LVEQNB_Instance_t  *)*phInstance;
-
-    /*
-     * Save the memory table in the instance structure
-     */
-    pInstance->Capabilities = *pCapabilities;
-
-    /*
-     * Save the memory table in the instance structure and
-     * set the structure pointers
-     */
-    pInstance->MemoryTable       = *pMemoryTable;
-
-    /*
-     * Allocate coefficient memory
-     */
-    InstAlloc_Init(&AllocMem,
-                   pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_COEF].pBaseAddress);
-
-    /* Equaliser Biquad Instance */
-    pInstance->pEQNB_FilterState_Float = (Biquad_FLOAT_Instance_t *)
-        InstAlloc_AddMember(&AllocMem, pCapabilities->MaxBands * \
-                                                       sizeof(Biquad_FLOAT_Instance_t));
-
-    /*
-     * Allocate data memory
-     */
-    InstAlloc_Init(&AllocMem,
-                   pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_DATA].pBaseAddress);
-
-    MemSize = (pCapabilities->MaxBands * sizeof(Biquad_2I_Order2_FLOAT_Taps_t));
-    pInstance->pEQNB_Taps_Float = (Biquad_2I_Order2_FLOAT_Taps_t *)InstAlloc_AddMember(&AllocMem,
-                                                                                       MemSize);
-    MemSize = (pCapabilities->MaxBands * sizeof(LVEQNB_BandDef_t));
-    pInstance->pBandDefinitions  = (LVEQNB_BandDef_t *)InstAlloc_AddMember(&AllocMem,
-                                                                           MemSize);
     // clear all the bands, setting their gain to 0, otherwise when applying new params,
     // it will compare against uninitialized values
     memset(pInstance->pBandDefinitions, 0, MemSize);
-    MemSize = (pCapabilities->MaxBands * sizeof(LVEQNB_BiquadType_en));
-    pInstance->pBiquadType = (LVEQNB_BiquadType_en *)InstAlloc_AddMember(&AllocMem,
-                                                                         MemSize);
 
-    /*
-     * Internally map, structure and allign scratch memory
-     */
-    InstAlloc_Init(&AllocMem,
-                   pMemoryTable->Region[LVEQNB_MEMREGION_SCRATCH].pBaseAddress);
+    MemSize = (pCapabilities->MaxBands * sizeof(*(pInstance->pBiquadType)));
+    pInstance->pBiquadType = (LVEQNB_BiquadType_en*)calloc(1, MemSize);
+    if (pInstance->pBiquadType == LVM_NULL) {
+        return LVEQNB_NULLADDRESS;
+    }
 
-    pInstance->pFastTemporary = (LVM_FLOAT *)InstAlloc_AddMember(&AllocMem,
-                                                                 sizeof(LVM_FLOAT));
+    pInstance->pFastTemporary = (LVM_FLOAT*)pScratch;
 
     /*
      * Update the instance parameters
      */
-    pInstance->Params.NBands          = 0;
-    pInstance->Params.OperatingMode   = LVEQNB_BYPASS;
+    pInstance->Params.NBands = 0;
+    pInstance->Params.OperatingMode = LVEQNB_BYPASS;
     pInstance->Params.pBandDefinition = LVM_NULL;
-    pInstance->Params.SampleRate      = LVEQNB_FS_8000;
-    pInstance->Params.SourceFormat    = LVEQNB_STEREO;
+    pInstance->Params.SampleRate = LVEQNB_FS_8000;
+    pInstance->Params.SourceFormat = LVEQNB_STEREO;
 
     /*
      * Initialise the filters
      */
-    LVEQNB_SetFilters(pInstance,                        /* Set the filter types */
+    LVEQNB_SetFilters(pInstance, /* Set the filter types */
                       &pInstance->Params);
 
-    LVEQNB_SetCoefficients(pInstance);                  /* Set the filter coefficients */
+    LVEQNB_SetCoefficients(pInstance); /* Set the filter coefficients */
 
-    LVEQNB_ClearFilterHistory(pInstance);               /* Clear the filter history */
+    LVEQNB_ClearFilterHistory(pInstance); /* Clear the filter history */
 
     /*
      * Initialise the bypass variables
      */
-    pInstance->BypassMixer.MixerStream[0].CallbackSet        = 0;
-    pInstance->BypassMixer.MixerStream[0].CallbackParam      = 0;
-    pInstance->BypassMixer.MixerStream[0].pCallbackHandle    = (void*)pInstance;
-    pInstance->BypassMixer.MixerStream[0].pCallBack          = LVEQNB_BypassMixerCallBack;
+    pInstance->BypassMixer.MixerStream[0].CallbackSet = 0;
+    pInstance->BypassMixer.MixerStream[0].CallbackParam = 0;
+    pInstance->BypassMixer.MixerStream[0].pCallbackHandle = (void*)pInstance;
+    pInstance->BypassMixer.MixerStream[0].pCallBack = LVEQNB_BypassMixerCallBack;
 
-    LVC_Mixer_Init(&pInstance->BypassMixer.MixerStream[0],0,0);
-    LVC_Mixer_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0],0,LVM_FS_8000,2);
+    LVC_Mixer_Init(&pInstance->BypassMixer.MixerStream[0], 0, 0);
+    LVC_Mixer_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0], 0, LVM_FS_8000, 2);
 
-    pInstance->BypassMixer.MixerStream[1].CallbackSet        = 1;
-    pInstance->BypassMixer.MixerStream[1].CallbackParam      = 0;
-    pInstance->BypassMixer.MixerStream[1].pCallbackHandle    = LVM_NULL;
-    pInstance->BypassMixer.MixerStream[1].pCallBack          = LVM_NULL;
+    pInstance->BypassMixer.MixerStream[1].CallbackSet = 1;
+    pInstance->BypassMixer.MixerStream[1].CallbackParam = 0;
+    pInstance->BypassMixer.MixerStream[1].pCallbackHandle = LVM_NULL;
+    pInstance->BypassMixer.MixerStream[1].pCallBack = LVM_NULL;
     LVC_Mixer_Init(&pInstance->BypassMixer.MixerStream[1], 0, 1.0f);
     LVC_Mixer_SetTimeConstant(&pInstance->BypassMixer.MixerStream[1], 0, LVM_FS_8000, 2);
 
-    pInstance->bInOperatingModeTransition      = LVM_FALSE;
+    pInstance->bInOperatingModeTransition = LVM_FALSE;
 
-    return(LVEQNB_SUCCESS);
+    return (LVEQNB_SUCCESS);
 }
+/****************************************************************************************/
+/*                                                                                      */
+/* FUNCTION:                LVEQNB_DeInit                                               */
+/*                                                                                      */
+/* DESCRIPTION:                                                                         */
+/*    Free the memories created during LVEQNB_Init including instance handle            */
+/*                                                                                      */
+/* PARAMETERS:                                                                          */
+/*  phInstance              Pointer to instance handle                                  */
+/*                                                                                      */
+/* NOTES:                                                                               */
+/*  1.  This function must not be interrupted by the LVEQNB_Process function            */
+/*                                                                                      */
+/****************************************************************************************/
 
+void LVEQNB_DeInit(LVEQNB_Handle_t* phInstance) {
+    LVEQNB_Instance_t* pInstance;
+    if (phInstance == LVM_NULL) {
+        return;
+    }
+    pInstance = (LVEQNB_Instance_t*)*phInstance;
+
+    /* Equaliser Biquad Instance */
+    if (pInstance->pEQNB_FilterState_Float != LVM_NULL) {
+        free(pInstance->pEQNB_FilterState_Float);
+        pInstance->pEQNB_FilterState_Float = LVM_NULL;
+    }
+    if (pInstance->pEQNB_Taps_Float != LVM_NULL) {
+        free(pInstance->pEQNB_Taps_Float);
+        pInstance->pEQNB_Taps_Float = LVM_NULL;
+    }
+    if (pInstance->pBandDefinitions != LVM_NULL) {
+        free(pInstance->pBandDefinitions);
+        pInstance->pBandDefinitions = LVM_NULL;
+    }
+    if (pInstance->pBiquadType != LVM_NULL) {
+        free(pInstance->pBiquadType);
+        pInstance->pBiquadType = LVM_NULL;
+    }
+    free(pInstance);
+    *phInstance = LVM_NULL;
+}
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Private.h b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Private.h
index 40facfb..83a3449 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Private.h
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Private.h
@@ -24,7 +24,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-#include "LVEQNB.h"                                     /* Calling or Application layer definitions */
+#include "LVEQNB.h" /* Calling or Application layer definitions */
 #include "BIQUAD.h"
 #include "LVC_Mixer.h"
 
@@ -35,21 +35,8 @@
 /****************************************************************************************/
 
 /* General */
-#define LVEQNB_INVALID              0xFFFF              /* Invalid init parameter */
-
-/* Memory */
-#define LVEQNB_INSTANCE_ALIGN       4                   /* 32-bit alignment for instance structures */
-#define LVEQNB_DATA_ALIGN           4                   /* 32-bit alignment for structures */
-#define LVEQNB_COEF_ALIGN           4                   /* 32-bit alignment for long words */
-#ifdef SUPPORT_MC
-/* Number of buffers required for inplace processing */
-#define LVEQNB_SCRATCHBUFFERS       (LVM_MAX_CHANNELS * 2)
-#else
-#define LVEQNB_SCRATCHBUFFERS       4                   /* Number of buffers required for inplace processing */
-#endif
-#define LVEQNB_SCRATCH_ALIGN        4                   /* 32-bit alignment for long data */
-
-#define LVEQNB_BYPASS_MIXER_TC      100                 /* Bypass Mixer TC */
+#define LVEQNB_INVALID 0xFFFF      /* Invalid init parameter */
+#define LVEQNB_BYPASS_MIXER_TC 100 /* Bypass Mixer TC */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -58,13 +45,12 @@
 /****************************************************************************************/
 
 /* Filter biquad types */
-typedef enum
-{
+typedef enum {
     LVEQNB_SinglePrecision_Float = -1,
     LVEQNB_SinglePrecision = 0,
     LVEQNB_DoublePrecision = 1,
-    LVEQNB_OutOfRange      = 2,
-    LVEQNB_BIQUADTYPE_MAX  = LVM_MAXINT_32
+    LVEQNB_OutOfRange = 2,
+    LVEQNB_BIQUADTYPE_MAX = LVM_MAXINT_32
 } LVEQNB_BiquadType_en;
 
 /****************************************************************************************/
@@ -74,28 +60,27 @@
 /****************************************************************************************/
 
 /* Instance structure */
-typedef struct
-{
+typedef struct {
     /* Public parameters */
-    LVEQNB_MemTab_t                 MemoryTable;        /* Instance memory allocation table */
-    LVEQNB_Params_t                 Params;             /* Instance parameters */
-    LVEQNB_Capabilities_t           Capabilities;       /* Instance capabilities */
+    void* pScratch;                     /* Pointer to bundle scratch buffer */
+    LVEQNB_Params_t Params;             /* Instance parameters */
+    LVEQNB_Capabilities_t Capabilities; /* Instance capabilities */
 
     /* Aligned memory pointers */
-    LVM_FLOAT                      *pFastTemporary;        /* Fast temporary data base address */
+    LVM_FLOAT* pFastTemporary; /* Fast temporary data base address */
 
-    Biquad_2I_Order2_FLOAT_Taps_t   *pEQNB_Taps_Float;        /* Equaliser Taps */
-    Biquad_FLOAT_Instance_t         *pEQNB_FilterState_Float; /* State for each filter band */
+    Biquad_2I_Order2_FLOAT_Taps_t* pEQNB_Taps_Float;  /* Equaliser Taps */
+    Biquad_FLOAT_Instance_t* pEQNB_FilterState_Float; /* State for each filter band */
 
     /* Filter definitions and call back */
-    LVM_UINT16                      NBands;             /* Number of bands */
-    LVEQNB_BandDef_t                *pBandDefinitions;  /* Filter band definitions */
-    LVEQNB_BiquadType_en            *pBiquadType;       /* Filter biquad types */
+    LVM_UINT16 NBands;                  /* Number of bands */
+    LVEQNB_BandDef_t* pBandDefinitions; /* Filter band definitions */
+    LVEQNB_BiquadType_en* pBiquadType;  /* Filter biquad types */
 
     /* Bypass variable */
-    LVMixer3_2St_FLOAT_st     BypassMixer;
+    LVMixer3_2St_FLOAT_st BypassMixer;
 
-    LVM_INT16               bInOperatingModeTransition; /* Operating mode transition flag */
+    LVM_INT16 bInOperatingModeTransition; /* Operating mode transition flag */
 
 } LVEQNB_Instance_t;
 
@@ -105,17 +90,15 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-void    LVEQNB_SetFilters(LVEQNB_Instance_t   *pInstance,
-                          LVEQNB_Params_t     *pParams);
+void LVEQNB_SetFilters(LVEQNB_Instance_t* pInstance, LVEQNB_Params_t* pParams);
 
-void    LVEQNB_SetCoefficients(LVEQNB_Instance_t    *pInstance);
+void LVEQNB_SetCoefficients(LVEQNB_Instance_t* pInstance);
 
-void    LVEQNB_ClearFilterHistory(LVEQNB_Instance_t *pInstance);
-LVEQNB_ReturnStatus_en LVEQNB_SinglePrecCoefs(LVM_UINT16        Fs,
-                                              LVEQNB_BandDef_t  *pFilterDefinition,
-                                              PK_FLOAT_Coefs_t    *pCoefficients);
+void LVEQNB_ClearFilterHistory(LVEQNB_Instance_t* pInstance);
+LVEQNB_ReturnStatus_en LVEQNB_SinglePrecCoefs(LVM_UINT16 Fs, LVEQNB_BandDef_t* pFilterDefinition,
+                                              PK_FLOAT_Coefs_t* pCoefficients);
 
-LVM_INT32 LVEQNB_BypassMixerCallBack (void* hInstance, void *pGeneralPurpose, LVM_INT16 CallbackParam);
+LVM_INT32 LVEQNB_BypassMixerCallBack(void* hInstance, void* pGeneralPurpose,
+                                     LVM_INT16 CallbackParam);
 
 #endif /* __LVEQNB_PRIVATE_H__ */
-
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Process.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Process.cpp
index 65eff53..d2a26db 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Process.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Process.cpp
@@ -34,7 +34,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-#define SHIFT       13
+#define SHIFT 13
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -58,89 +58,65 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-LVEQNB_ReturnStatus_en LVEQNB_Process(LVEQNB_Handle_t       hInstance,
-                                      const LVM_FLOAT       *pInData,
-                                      LVM_FLOAT             *pOutData,
-                                      const LVM_UINT16      NrFrames)
-{                                     // updated to use samples = frames * channels.
-    LVEQNB_Instance_t   *pInstance = (LVEQNB_Instance_t  *)hInstance;
+LVEQNB_ReturnStatus_en LVEQNB_Process(
+        LVEQNB_Handle_t hInstance, const LVM_FLOAT* pInData, LVM_FLOAT* pOutData,
+        const LVM_UINT16 NrFrames) {  // updated to use samples = frames * channels.
+    LVEQNB_Instance_t* pInstance = (LVEQNB_Instance_t*)hInstance;
 
-#ifdef SUPPORT_MC
     // Mono passed in as stereo
-    const LVM_INT32 NrChannels = pInstance->Params.NrChannels == 1
-        ? 2 : pInstance->Params.NrChannels;
-#else
-    const LVM_INT32 NrChannels = 2; // FCC_2
-#endif
+    const LVM_INT32 NrChannels =
+            pInstance->Params.NrChannels == 1 ? 2 : pInstance->Params.NrChannels;
     const LVM_INT32 NrSamples = NrChannels * NrFrames;
 
-     /* Check for NULL pointers */
-    if((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL))
-    {
+    /* Check for NULL pointers */
+    if ((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL)) {
         return LVEQNB_NULLADDRESS;
     }
 
     /* Check if the input and output data buffers are 32-bit aligned */
-    if ((((uintptr_t)pInData % 4) != 0) || (((uintptr_t)pOutData % 4) != 0))
-    {
+    if ((((uintptr_t)pInData % 4) != 0) || (((uintptr_t)pOutData % 4) != 0)) {
         return LVEQNB_ALIGNMENTERROR;
     }
 
-    LVM_FLOAT * const pScratch = (LVM_FLOAT *)pInstance->pFastTemporary;
+    LVM_FLOAT* const pScratch = (LVM_FLOAT*)pInstance->pFastTemporary;
 
     /*
-    * Check the number of frames is not too large
-    */
-    if (NrFrames > pInstance->Capabilities.MaxBlockSize)
-    {
+     * Check the number of frames is not too large
+     */
+    if (NrFrames > pInstance->Capabilities.MaxBlockSize) {
         return LVEQNB_TOOMANYSAMPLES;
     }
 
-    if (pInstance->Params.OperatingMode == LVEQNB_ON)
-    {
+    if (pInstance->Params.OperatingMode == LVEQNB_ON) {
         /*
          * Copy input data in to scratch buffer
          */
-        Copy_Float(pInData,     /* Source */
-                   pScratch,    /* Destination */
+        Copy_Float(pInData,  /* Source */
+                   pScratch, /* Destination */
                    (LVM_INT16)NrSamples);
 
         /*
          * For each section execte the filter unless the gain is 0dB
          */
-        if (pInstance->NBands != 0)
-        {
-            for (LVM_UINT16 i = 0; i < pInstance->NBands; i++)
-            {
+        if (pInstance->NBands != 0) {
+            for (LVM_UINT16 i = 0; i < pInstance->NBands; i++) {
                 /*
                  * Check if band is non-zero dB gain
                  */
-                if (pInstance->pBandDefinitions[i].Gain != 0)
-                {
+                if (pInstance->pBandDefinitions[i].Gain != 0) {
                     /*
                      * Get the address of the biquad instance
                      */
-                    Biquad_FLOAT_Instance_t *pBiquad = &pInstance->pEQNB_FilterState_Float[i];
+                    Biquad_FLOAT_Instance_t* pBiquad = &pInstance->pEQNB_FilterState_Float[i];
 
                     /*
                      * Select single or double precision as required
                      */
-                    switch (pInstance->pBiquadType[i])
-                    {
-                        case LVEQNB_SinglePrecision_Float:
-                        {
-#ifdef SUPPORT_MC
-                            PK_Mc_D32F32C14G11_TRC_WRA_01(pBiquad,
-                                                          pScratch,
-                                                          pScratch,
+                    switch (pInstance->pBiquadType[i]) {
+                        case LVEQNB_SinglePrecision_Float: {
+                            PK_Mc_D32F32C14G11_TRC_WRA_01(pBiquad, pScratch, pScratch,
                                                           (LVM_INT16)NrFrames,
                                                           (LVM_INT16)NrChannels);
-#else
-                            PK_2I_D32F32C14G11_TRC_WRA_01(pBiquad,
-                                                          pScratch,
-                                                          pScratch,
-                                                          (LVM_INT16)NrFrames);
-#endif
                             break;
                         }
                         default:
@@ -150,44 +126,27 @@
             }
         }
 
-        if(pInstance->bInOperatingModeTransition == LVM_TRUE){
-#ifdef SUPPORT_MC
-            LVC_MixSoft_2Mc_D16C31_SAT(&pInstance->BypassMixer,
-                                       pScratch,
-                                       pInData,
-                                       pScratch,
-                                       (LVM_INT16)NrFrames,
-                                       (LVM_INT16)NrChannels);
-#else
-            LVC_MixSoft_2St_D16C31_SAT(&pInstance->BypassMixer,
-                                       pScratch,
-                                       pInData,
-                                       pScratch,
-                                       (LVM_INT16)NrSamples);
-#endif
+        if (pInstance->bInOperatingModeTransition == LVM_TRUE) {
+            LVC_MixSoft_2Mc_D16C31_SAT(&pInstance->BypassMixer, pScratch, pInData, pScratch,
+                                       (LVM_INT16)NrFrames, (LVM_INT16)NrChannels);
             // duplicate with else clause(s)
-            Copy_Float(pScratch,                         /* Source */
-                       pOutData,                         /* Destination */
-                       (LVM_INT16)NrSamples);            /* All channel samples */
-        }
-        else{
+            Copy_Float(pScratch,              /* Source */
+                       pOutData,              /* Destination */
+                       (LVM_INT16)NrSamples); /* All channel samples */
+        } else {
             Copy_Float(pScratch,              /* Source */
                        pOutData,              /* Destination */
                        (LVM_INT16)NrSamples); /* All channel samples */
         }
-    }
-    else
-    {
+    } else {
         /*
          * Mode is OFF so copy the data if necessary
          */
-        if (pInData != pOutData)
-        {
-            Copy_Float(pInData,                          /* Source */
-                       pOutData,                         /* Destination */
-                       (LVM_INT16)NrSamples);            /* All channel samples */
+        if (pInData != pOutData) {
+            Copy_Float(pInData,               /* Source */
+                       pOutData,              /* Destination */
+                       (LVM_INT16)NrSamples); /* All channel samples */
         }
     }
     return LVEQNB_SUCCESS;
-
 }
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.cpp
index 0628114..d79d7c9 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.cpp
@@ -35,20 +35,9 @@
  * Sample rate table for converting between the enumerated type and the actual
  * frequency
  */
-const LVM_UINT32    LVEQNB_SampleRateTab[] = {8000,                    /* 8kS/s  */
-                                              11025,
-                                              12000,
-                                              16000,
-                                              22050,
-                                              24000,
-                                              32000,
-                                              44100,
-                                              48000,
-                                              88200,
-                                              96000,
-                                              176400,
-                                              192000
-};
+const LVM_UINT32 LVEQNB_SampleRateTab[] = {8000, /* 8kS/s  */
+                                           11025, 12000, 16000, 22050, 24000,  32000,
+                                           44100, 48000, 88200, 96000, 176400, 192000};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -59,74 +48,34 @@
 /*
  * Table for 2 * Pi / Fs
  */
-const LVM_FLOAT     LVEQNB_TwoPiOnFsTable[] = {LVEQNB_2PiOn_8000,      /* 8kS/s */
-                                               LVEQNB_2PiOn_11025,
-                                               LVEQNB_2PiOn_12000,
-                                               LVEQNB_2PiOn_16000,
-                                               LVEQNB_2PiOn_22050,
-                                               LVEQNB_2PiOn_24000,
-                                               LVEQNB_2PiOn_32000,
-                                               LVEQNB_2PiOn_44100,
-                                               LVEQNB_2PiOn_48000
-                                              ,LVEQNB_2PiOn_88200
-                                              ,LVEQNB_2PiOn_96000
-                                              ,LVEQNB_2PiOn_176400
-                                              ,LVEQNB_2PiOn_192000
-                                               };
+const LVM_FLOAT LVEQNB_TwoPiOnFsTable[] = {
+        LVEQNB_2PiOn_8000, /* 8kS/s */
+        LVEQNB_2PiOn_11025, LVEQNB_2PiOn_12000, LVEQNB_2PiOn_16000,  LVEQNB_2PiOn_22050,
+        LVEQNB_2PiOn_24000, LVEQNB_2PiOn_32000, LVEQNB_2PiOn_44100,  LVEQNB_2PiOn_48000,
+        LVEQNB_2PiOn_88200, LVEQNB_2PiOn_96000, LVEQNB_2PiOn_176400, LVEQNB_2PiOn_192000};
 
 /*
  * Gain table
  */
-const LVM_FLOAT     LVEQNB_GainTable[] = {LVEQNB_Gain_Neg15_dB,        /* -15dB gain */
-                                          LVEQNB_Gain_Neg14_dB,
-                                          LVEQNB_Gain_Neg13_dB,
-                                          LVEQNB_Gain_Neg12_dB,
-                                          LVEQNB_Gain_Neg11_dB,
-                                          LVEQNB_Gain_Neg10_dB,
-                                          LVEQNB_Gain_Neg9_dB,
-                                          LVEQNB_Gain_Neg8_dB,
-                                          LVEQNB_Gain_Neg7_dB,
-                                          LVEQNB_Gain_Neg6_dB,
-                                          LVEQNB_Gain_Neg5_dB,
-                                          LVEQNB_Gain_Neg4_dB,
-                                          LVEQNB_Gain_Neg3_dB,
-                                          LVEQNB_Gain_Neg2_dB,
-                                          LVEQNB_Gain_Neg1_dB,
-                                          LVEQNB_Gain_0_dB,            /* 0dB gain */
-                                          LVEQNB_Gain_1_dB,
-                                          LVEQNB_Gain_2_dB,
-                                          LVEQNB_Gain_3_dB,
-                                          LVEQNB_Gain_4_dB,
-                                          LVEQNB_Gain_5_dB,
-                                          LVEQNB_Gain_6_dB,
-                                          LVEQNB_Gain_7_dB,
-                                          LVEQNB_Gain_8_dB,
-                                          LVEQNB_Gain_9_dB,
-                                          LVEQNB_Gain_10_dB,
-                                          LVEQNB_Gain_11_dB,
-                                          LVEQNB_Gain_12_dB,
-                                          LVEQNB_Gain_13_dB,
-                                          LVEQNB_Gain_14_dB,
-                                          LVEQNB_Gain_15_dB};          /* +15dB gain */
+const LVM_FLOAT LVEQNB_GainTable[] = {
+        LVEQNB_Gain_Neg15_dB, /* -15dB gain */
+        LVEQNB_Gain_Neg14_dB, LVEQNB_Gain_Neg13_dB, LVEQNB_Gain_Neg12_dB, LVEQNB_Gain_Neg11_dB,
+        LVEQNB_Gain_Neg10_dB, LVEQNB_Gain_Neg9_dB,  LVEQNB_Gain_Neg8_dB,  LVEQNB_Gain_Neg7_dB,
+        LVEQNB_Gain_Neg6_dB,  LVEQNB_Gain_Neg5_dB,  LVEQNB_Gain_Neg4_dB,  LVEQNB_Gain_Neg3_dB,
+        LVEQNB_Gain_Neg2_dB,  LVEQNB_Gain_Neg1_dB,  LVEQNB_Gain_0_dB, /* 0dB gain */
+        LVEQNB_Gain_1_dB,     LVEQNB_Gain_2_dB,     LVEQNB_Gain_3_dB,     LVEQNB_Gain_4_dB,
+        LVEQNB_Gain_5_dB,     LVEQNB_Gain_6_dB,     LVEQNB_Gain_7_dB,     LVEQNB_Gain_8_dB,
+        LVEQNB_Gain_9_dB,     LVEQNB_Gain_10_dB,    LVEQNB_Gain_11_dB,    LVEQNB_Gain_12_dB,
+        LVEQNB_Gain_13_dB,    LVEQNB_Gain_14_dB,    LVEQNB_Gain_15_dB}; /* +15dB gain */
 /*
  * D table for 100 / (Gain + 1)
  */
-const LVM_FLOAT    LVEQNB_DTable[] = {LVEQNB_100D_Neg15_dB,            /* -15dB gain */
-                                      LVEQNB_100D_Neg14_dB,
-                                      LVEQNB_100D_Neg13_dB,
-                                      LVEQNB_100D_Neg12_dB,
-                                      LVEQNB_100D_Neg11_dB,
-                                      LVEQNB_100D_Neg10_dB,
-                                      LVEQNB_100D_Neg9_dB,
-                                      LVEQNB_100D_Neg8_dB,
-                                      LVEQNB_100D_Neg7_dB,
-                                      LVEQNB_100D_Neg6_dB,
-                                      LVEQNB_100D_Neg5_dB,
-                                      LVEQNB_100D_Neg4_dB,
-                                      LVEQNB_100D_Neg3_dB,
-                                      LVEQNB_100D_Neg2_dB,
-                                      LVEQNB_100D_Neg1_dB,
-                                      LVEQNB_100D_0_dB};               /* 0dB gain */
+const LVM_FLOAT LVEQNB_DTable[] = {
+        LVEQNB_100D_Neg15_dB, /* -15dB gain */
+        LVEQNB_100D_Neg14_dB, LVEQNB_100D_Neg13_dB, LVEQNB_100D_Neg12_dB, LVEQNB_100D_Neg11_dB,
+        LVEQNB_100D_Neg10_dB, LVEQNB_100D_Neg9_dB,  LVEQNB_100D_Neg8_dB,  LVEQNB_100D_Neg7_dB,
+        LVEQNB_100D_Neg6_dB,  LVEQNB_100D_Neg5_dB,  LVEQNB_100D_Neg4_dB,  LVEQNB_100D_Neg3_dB,
+        LVEQNB_100D_Neg2_dB,  LVEQNB_100D_Neg1_dB,  LVEQNB_100D_0_dB}; /* 0dB gain */
 /************************************************************************************/
 /*                                                                                  */
 /*    Filter polynomial coefficients                                                */
@@ -142,13 +91,13 @@
  * a range of 0 to Pi. The output is in the range 32767 to -32768 representing the range
  * +1.0 to -1.0
  */
-const LVM_INT16     LVEQNB_CosCoef[] = {3,                             /* Shifts */
-                                        4096,                          /* a0 */
-                                        -36,                           /* a1 */
-                                        -19725,                        /* a2 */
-                                        -2671,                         /* a3 */
-                                        23730,                         /* a4 */
-                                        -9490};                        /* a5 */
+const LVM_INT16 LVEQNB_CosCoef[] = {3,      /* Shifts */
+                                    4096,   /* a0 */
+                                    -36,    /* a1 */
+                                    -19725, /* a2 */
+                                    -2671,  /* a3 */
+                                    23730,  /* a4 */
+                                    -9490}; /* a5 */
 
 /*
  * Coefficients for calculating the cosine error with the equation:
@@ -164,9 +113,8 @@
  *
  * Cos(x) = 1.0 - CosErr(x)
  */
-const LVM_INT16     LVEQNB_DPCosCoef[] = {1,                           /* Shifts */
-                                          0,                           /* a0 */
-                                          -6,                          /* a1 */
-                                          16586,                       /* a2 */
-                                          -44};                        /* a3 */
-
+const LVM_INT16 LVEQNB_DPCosCoef[] = {1,     /* Shifts */
+                                      0,     /* a0 */
+                                      -6,    /* a1 */
+                                      16586, /* a2 */
+                                      -44};  /* a3 */
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.h b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.h
index a71eeb9..ab51196 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.h
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.h
@@ -27,7 +27,7 @@
  * Sample rate table for converting between the enumerated type and the actual
  * frequency
  */
-extern const LVM_UINT32    LVEQNB_SampleRateTab[];
+extern const LVM_UINT32 LVEQNB_SampleRateTab[];
 
 /************************************************************************************/
 /*                                                                                  */
@@ -38,17 +38,17 @@
 /*
  * Table for 2 * Pi / Fs
  */
-extern const LVM_FLOAT     LVEQNB_TwoPiOnFsTable[];
+extern const LVM_FLOAT LVEQNB_TwoPiOnFsTable[];
 
 /*
  * Gain table
  */
-extern const LVM_FLOAT     LVEQNB_GainTable[];
+extern const LVM_FLOAT LVEQNB_GainTable[];
 
 /*
  * D table for 100 / (Gain + 1)
  */
-extern const LVM_FLOAT     LVEQNB_DTable[];
+extern const LVM_FLOAT LVEQNB_DTable[];
 
 /************************************************************************************/
 /*                                                                                  */
@@ -65,7 +65,7 @@
  * a range of 0 to Pi. The output is in the range 32767 to -32768 representing the range
  * +1.0 to -1.0
  */
-extern const LVM_INT16     LVEQNB_CosCoef[];
+extern const LVM_INT16 LVEQNB_CosCoef[];
 
 /*
  * Coefficients for calculating the cosine error with the equation:
@@ -81,6 +81,6 @@
  *
  * Cos(x) = 1.0 - CosErr(x)
  */
-extern const LVM_INT16     LVEQNB_DPCosCoef[];
+extern const LVM_INT16 LVEQNB_DPCosCoef[];
 
 #endif /* __LVEQNB_TABLES_H__ */
diff --git a/media/libeffects/lvm/lib/Reverb/lib/LVREV.h b/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
index 8c91ea9..484787a 100644
--- a/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
+++ b/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
@@ -41,11 +41,11 @@
 /*                                                                                      */
 /****************************************************************************************/
 /* General */
-#define LVREV_BLOCKSIZE_MULTIPLE                1       /* Processing block size multiple */
-#define LVREV_MAX_T60                        7000       /* Maximum decay time is 7000ms */
+#define LVREV_BLOCKSIZE_MULTIPLE 1 /* Processing block size multiple */
+#define LVREV_MAX_T60 7000         /* Maximum decay time is 7000ms */
 
 /* Memory table*/
-#define LVREV_NR_MEMORY_REGIONS                 4       /* Number of memory regions */
+#define LVREV_NR_MEMORY_REGIONS 4 /* Number of memory regions */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -53,24 +53,22 @@
 /*                                                                                      */
 /****************************************************************************************/
 /* Instance handle */
-typedef void *LVREV_Handle_t;
+typedef void* LVREV_Handle_t;
 
 /* Status return values */
-typedef enum
-{
-    LVREV_SUCCESS            = 0,                       /* Successful return from a routine */
-    LVREV_NULLADDRESS        = 1,                       /* NULL allocation address */
-    LVREV_OUTOFRANGE         = 2,                       /* Out of range control parameter */
-    LVREV_INVALIDNUMSAMPLES  = 3,                       /* Invalid number of samples */
+typedef enum {
+    LVREV_SUCCESS = 0,           /* Successful return from a routine */
+    LVREV_NULLADDRESS = 1,       /* NULL allocation address */
+    LVREV_OUTOFRANGE = 2,        /* Out of range control parameter */
+    LVREV_INVALIDNUMSAMPLES = 3, /* Invalid number of samples */
     LVREV_RETURNSTATUS_DUMMY = LVM_MAXENUM
 } LVREV_ReturnStatus_en;
 
 /* Reverb delay lines */
-typedef enum
-{
-    LVREV_DELAYLINES_1     = 1,                         /* One delay line */
-    LVREV_DELAYLINES_2     = 2,                         /* Two delay lines */
-    LVREV_DELAYLINES_4     = 4,                         /* Four delay lines */
+typedef enum {
+    LVREV_DELAYLINES_1 = 1, /* One delay line */
+    LVREV_DELAYLINES_2 = 2, /* Two delay lines */
+    LVREV_DELAYLINES_4 = 4, /* Four delay lines */
     LVREV_DELAYLINES_DUMMY = LVM_MAXENUM
 } LVREV_NumDelayLines_en;
 
@@ -81,40 +79,37 @@
 /****************************************************************************************/
 
 /* Memory table containing the region definitions */
-typedef struct
-{
-    LVM_MemoryRegion_st        Region[LVREV_NR_MEMORY_REGIONS];  /* One definition for each region */
+typedef struct {
+    LVM_MemoryRegion_st Region[LVREV_NR_MEMORY_REGIONS]; /* One definition for each region */
 } LVREV_MemoryTable_st;
 
 /* Control Parameter structure */
-typedef struct
-{
+typedef struct {
     /* General parameters */
-    LVM_Mode_en                 OperatingMode;          /* Operating mode */
-    LVM_Fs_en                   SampleRate;             /* Sample rate */
-    LVM_Format_en               SourceFormat;           /* Source data format */
+    LVM_Mode_en OperatingMode;  /* Operating mode */
+    LVM_Fs_en SampleRate;       /* Sample rate */
+    LVM_Format_en SourceFormat; /* Source data format */
 
     /* Parameters for REV */
-    LVM_UINT16                  Level;                  /* Level, 0 to 100 representing percentage of reverb */
-    LVM_UINT32                  LPF;                    /* Low pass filter, in Hz */
-    LVM_UINT32                  HPF;                    /* High pass filter, in Hz */
+    LVM_UINT16 Level; /* Level, 0 to 100 representing percentage of reverb */
+    LVM_UINT32 LPF;   /* Low pass filter, in Hz */
+    LVM_UINT32 HPF;   /* High pass filter, in Hz */
 
-    LVM_UINT16                  T60;                    /* Decay time constant, in ms */
-    LVM_UINT16                  Density;                /* Echo density, 0 to 100 for minimum to maximum density */
-    LVM_UINT16                  Damping;                /* Damping */
-    LVM_UINT16                  RoomSize;               /* Simulated room size, 1 to 100 for minimum to maximum size */
+    LVM_UINT16 T60;      /* Decay time constant, in ms */
+    LVM_UINT16 Density;  /* Echo density, 0 to 100 for minimum to maximum density */
+    LVM_UINT16 Damping;  /* Damping */
+    LVM_UINT16 RoomSize; /* Simulated room size, 1 to 100 for minimum to maximum size */
 
 } LVREV_ControlParams_st;
 
 /* Instance Parameter structure */
-typedef struct
-{
+typedef struct {
     /* General */
-    LVM_UINT16                  MaxBlockSize;           /* Maximum processing block size */
+    LVM_UINT16 MaxBlockSize; /* Maximum processing block size */
 
     /* Reverb */
-    LVM_Format_en               SourceFormat;           /* Source data formats to support */
-    LVREV_NumDelayLines_en      NumDelays;              /* The number of delay lines, 1, 2 or 4 */
+    LVM_Format_en SourceFormat;       /* Source data formats to support */
+    LVREV_NumDelayLines_en NumDelays; /* The number of delay lines, 1, 2 or 4 */
 
 } LVREV_InstanceParams_st;
 
@@ -160,9 +155,9 @@
 /*  1.  This function may be interrupted by the LVREV_Process function                  */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetMemoryTable(LVREV_Handle_t           hInstance,
-                                           LVREV_MemoryTable_st     *pMemoryTable,
-                                           LVREV_InstanceParams_st  *pInstanceParams);
+LVREV_ReturnStatus_en LVREV_GetMemoryTable(LVREV_Handle_t hInstance,
+                                           LVREV_MemoryTable_st* pMemoryTable,
+                                           LVREV_InstanceParams_st* pInstanceParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -190,9 +185,9 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t            *phInstance,
-                                              LVREV_MemoryTable_st      *pMemoryTable,
-                                              LVREV_InstanceParams_st   *pInstanceParams);
+LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t* phInstance,
+                                              LVREV_MemoryTable_st* pMemoryTable,
+                                              LVREV_InstanceParams_st* pInstanceParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -214,8 +209,8 @@
 /*  1.  This function may be interrupted by the LVREV_Process function                  */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetControlParameters(LVREV_Handle_t           hInstance,
-                                                 LVREV_ControlParams_st   *pControlParams);
+LVREV_ReturnStatus_en LVREV_GetControlParameters(LVREV_Handle_t hInstance,
+                                                 LVREV_ControlParams_st* pControlParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -236,8 +231,8 @@
 /*  1.  This function may be interrupted by the LVREV_Process function                  */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_SetControlParameters(LVREV_Handle_t           hInstance,
-                                                 LVREV_ControlParams_st   *pNewParams);
+LVREV_ReturnStatus_en LVREV_SetControlParameters(LVREV_Handle_t hInstance,
+                                                 LVREV_ControlParams_st* pNewParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -257,7 +252,7 @@
 /*  1. This function must not be interrupted by the LVREV_Process function              */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_ClearAudioBuffers(LVREV_Handle_t  hInstance);
+LVREV_ReturnStatus_en LVREV_ClearAudioBuffers(LVREV_Handle_t hInstance);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -280,11 +275,9 @@
 /*  1. The input and output buffers must be 32-bit aligned                              */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_Process(LVREV_Handle_t      hInstance,
-                                    const LVM_FLOAT     *pInData,
-                                    LVM_FLOAT           *pOutData,
-                                    const LVM_UINT16          NumSamples);
+LVREV_ReturnStatus_en LVREV_Process(LVREV_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                    LVM_FLOAT* pOutData, const LVM_UINT16 NumSamples);
 
-#endif      /* __LVREV_H__ */
+#endif /* __LVREV_H__ */
 
 /* End of file */
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_ApplyNewSettings.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_ApplyNewSettings.cpp
index 1f0d39b..737ef01 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_ApplyNewSettings.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_ApplyNewSettings.cpp
@@ -41,160 +41,134 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVREV_ReturnStatus_en LVREV_ApplyNewSettings (LVREV_Instance_st     *pPrivate)
-{
-
-    LVM_Mode_en  OperatingMode;
-    LVM_INT32    NumberOfDelayLines;
+LVREV_ReturnStatus_en LVREV_ApplyNewSettings(LVREV_Instance_st* pPrivate) {
+    LVM_Mode_en OperatingMode;
+    LVM_INT32 NumberOfDelayLines;
 
     /* Check for NULL pointer */
-    if(pPrivate == LVM_NULL)
-    {
+    if (pPrivate == LVM_NULL) {
         return LVREV_NULLADDRESS;
     }
 
     OperatingMode = pPrivate->NewParams.OperatingMode;
 
-    if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4)
-    {
+    if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4) {
         NumberOfDelayLines = 4;
-    }
-    else if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2)
-    {
+    } else if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2) {
         NumberOfDelayLines = 2;
-    }
-    else
-    {
+    } else {
         NumberOfDelayLines = 1;
     }
 
     /*
      * Update the high pass filter coefficients
      */
-    if((pPrivate->NewParams.HPF        != pPrivate->CurrentParams.HPF)        ||
-       (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
-       (pPrivate->bFirstControl        == LVM_TRUE))
-    {
-        LVM_FLOAT       Omega;
-        FO_FLOAT_Coefs_t  Coeffs;
+    if ((pPrivate->NewParams.HPF != pPrivate->CurrentParams.HPF) ||
+        (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
+        (pPrivate->bFirstControl == LVM_TRUE)) {
+        LVM_FLOAT Omega;
+        FO_FLOAT_Coefs_t Coeffs;
 
         Omega = LVM_GetOmega(pPrivate->NewParams.HPF, pPrivate->NewParams.SampleRate);
         LVM_FO_HPF(Omega, &Coeffs);
-        FO_1I_D32F32Cll_TRC_WRA_01_Init( &pPrivate->pFastCoef->HPCoefs,
-                                         &pPrivate->pFastData->HPTaps, &Coeffs);
-        LoadConst_Float(0,
-                (LVM_FLOAT *)&pPrivate->pFastData->HPTaps,
+        FO_1I_D32F32Cll_TRC_WRA_01_Init(&pPrivate->pFastCoef->HPCoefs, &pPrivate->pFastData->HPTaps,
+                                        &Coeffs);
+        LoadConst_Float(0, (LVM_FLOAT*)&pPrivate->pFastData->HPTaps,
                         sizeof(Biquad_1I_Order1_FLOAT_Taps_t) / sizeof(LVM_FLOAT));
     }
 
     /*
      * Update the low pass filter coefficients
      */
-    if((pPrivate->NewParams.LPF        != pPrivate->CurrentParams.LPF)        ||
-       (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
-       (pPrivate->bFirstControl        == LVM_TRUE))
-    {
-        LVM_FLOAT       Omega;
-        FO_FLOAT_Coefs_t  Coeffs;
+    if ((pPrivate->NewParams.LPF != pPrivate->CurrentParams.LPF) ||
+        (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
+        (pPrivate->bFirstControl == LVM_TRUE)) {
+        LVM_FLOAT Omega;
+        FO_FLOAT_Coefs_t Coeffs;
 
         Coeffs.A0 = 1;
         Coeffs.A1 = 0;
         Coeffs.B1 = 0;
-        if(pPrivate->NewParams.LPF <= (LVM_FsTable[pPrivate->NewParams.SampleRate] >> 1))
-        {
+        if (pPrivate->NewParams.LPF <= (LVM_FsTable[pPrivate->NewParams.SampleRate] >> 1)) {
             Omega = LVM_GetOmega(pPrivate->NewParams.LPF, pPrivate->NewParams.SampleRate);
 
             /*
              * Do not apply filter if w =2*pi*fc/fs >= 2.9
              */
-            if(Omega <= (LVM_FLOAT)LVREV_2_9_INQ29)
-            {
+            if (Omega <= (LVM_FLOAT)LVREV_2_9_INQ29) {
                 LVM_FO_LPF(Omega, &Coeffs);
             }
         }
-        FO_1I_D32F32Cll_TRC_WRA_01_Init( &pPrivate->pFastCoef->LPCoefs,
-                                         &pPrivate->pFastData->LPTaps, &Coeffs);
-        LoadConst_Float(0,
-                (LVM_FLOAT *)&pPrivate->pFastData->LPTaps,
+        FO_1I_D32F32Cll_TRC_WRA_01_Init(&pPrivate->pFastCoef->LPCoefs, &pPrivate->pFastData->LPTaps,
+                                        &Coeffs);
+        LoadConst_Float(0, (LVM_FLOAT*)&pPrivate->pFastData->LPTaps,
                         sizeof(Biquad_1I_Order1_FLOAT_Taps_t) / sizeof(LVM_FLOAT));
     }
 
     /*
      * Calculate the room size parameter
      */
-    if( pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize)
-    {
+    if (pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) {
         /* Room size range is 10ms to 200ms
          * 0%   -- 10ms
          * 50%  -- 65ms
          * 100% -- 120ms
          */
-        pPrivate->RoomSizeInms = 10 + (((pPrivate->NewParams.RoomSize*11) + 5) / 10);
+        pPrivate->RoomSizeInms = 10 + (((pPrivate->NewParams.RoomSize * 11) + 5) / 10);
     }
 
     /*
      * Update the T delay number of samples and the all pass delay number of samples
      */
-    if( (pPrivate->NewParams.RoomSize   != pPrivate->CurrentParams.RoomSize)   ||
+    if ((pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
         (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
-        (pPrivate->bFirstControl        == LVM_TRUE))
-    {
-
-        LVM_UINT32  Temp;
-        LVM_INT32   APDelaySize;
-        LVM_INT32   Fs = LVM_GetFsFromTable(pPrivate->NewParams.SampleRate);
-        LVM_UINT32  DelayLengthSamples = (LVM_UINT32)(Fs * pPrivate->RoomSizeInms);
-        LVM_INT16   i;
-        LVM_FLOAT   ScaleTable[]  = {LVREV_T_3_Power_minus0_on_4, LVREV_T_3_Power_minus1_on_4, \
-                                     LVREV_T_3_Power_minus2_on_4, LVREV_T_3_Power_minus3_on_4};
-        LVM_INT16   MaxT_Delay[]  = {LVREV_MAX_T0_DELAY, LVREV_MAX_T1_DELAY, \
-                                     LVREV_MAX_T2_DELAY, LVREV_MAX_T3_DELAY};
-        LVM_INT16   MaxAP_Delay[] = {LVREV_MAX_AP0_DELAY, LVREV_MAX_AP1_DELAY, \
-                                     LVREV_MAX_AP2_DELAY, LVREV_MAX_AP3_DELAY};
+        (pPrivate->bFirstControl == LVM_TRUE)) {
+        LVM_UINT32 Temp;
+        LVM_INT32 APDelaySize;
+        LVM_INT32 Fs = LVM_GetFsFromTable(pPrivate->NewParams.SampleRate);
+        LVM_UINT32 DelayLengthSamples = (LVM_UINT32)(Fs * pPrivate->RoomSizeInms);
+        LVM_INT16 i;
+        LVM_FLOAT ScaleTable[] = {LVREV_T_3_Power_minus0_on_4, LVREV_T_3_Power_minus1_on_4,
+                                  LVREV_T_3_Power_minus2_on_4, LVREV_T_3_Power_minus3_on_4};
+        LVM_INT16 MaxT_Delay[] = {LVREV_MAX_T0_DELAY, LVREV_MAX_T1_DELAY, LVREV_MAX_T2_DELAY,
+                                  LVREV_MAX_T3_DELAY};
+        LVM_INT16 MaxAP_Delay[] = {LVREV_MAX_AP0_DELAY, LVREV_MAX_AP1_DELAY, LVREV_MAX_AP2_DELAY,
+                                   LVREV_MAX_AP3_DELAY};
 
         /*
          * For each delay line
          */
-        for (i = 0; i < NumberOfDelayLines; i++)
-        {
-            if (i != 0)
-            {
-                LVM_FLOAT Temp1;  /* to avoid QAC warning on type conversion */
+        for (i = 0; i < NumberOfDelayLines; i++) {
+            if (i != 0) {
+                LVM_FLOAT Temp1; /* to avoid QAC warning on type conversion */
 
-                Temp1=(LVM_FLOAT)DelayLengthSamples;
+                Temp1 = (LVM_FLOAT)DelayLengthSamples;
                 Temp = (LVM_UINT32)(Temp1 * ScaleTable[i]);
+            } else {
+                Temp = DelayLengthSamples;
             }
-            else
-            {
-               Temp = DelayLengthSamples;
-            }
-            APDelaySize = Temp  / 1500;
+            APDelaySize = Temp / 1500;
 
             /*
              * Set the fixed delay
              */
 
-            Temp  = (MaxT_Delay[i] - MaxAP_Delay[i]) * Fs / 192000;
+            Temp = (MaxT_Delay[i] - MaxAP_Delay[i]) * Fs / 192000;
             pPrivate->Delay_AP[i] = pPrivate->T[i] - Temp;
 
             /*
              * Set the tap selection
              */
-            if (pPrivate->AB_Selection)
-            {
+            if (pPrivate->AB_Selection) {
                 /* Smooth from tap A to tap B */
-                pPrivate->pOffsetB[i]             = &pPrivate->pDelay_T[i][pPrivate->T[i] - \
-                                                                           Temp - APDelaySize];
-                pPrivate->B_DelaySize[i]          = APDelaySize;
+                pPrivate->pOffsetB[i] = &pPrivate->pDelay_T[i][pPrivate->T[i] - Temp - APDelaySize];
+                pPrivate->B_DelaySize[i] = APDelaySize;
                 pPrivate->Mixer_APTaps[i].Target1 = 0;
                 pPrivate->Mixer_APTaps[i].Target2 = 1.0f;
-            }
-            else
-            {
+            } else {
                 /* Smooth from tap B to tap A */
-                pPrivate->pOffsetA[i]             = &pPrivate->pDelay_T[i][pPrivate->T[i] - \
-                                                                           Temp - APDelaySize];
-                pPrivate->A_DelaySize[i]          = APDelaySize;
+                pPrivate->pOffsetA[i] = &pPrivate->pDelay_T[i][pPrivate->T[i] - Temp - APDelaySize];
+                pPrivate->A_DelaySize[i] = APDelaySize;
                 pPrivate->Mixer_APTaps[i].Target2 = 0;
                 pPrivate->Mixer_APTaps[i].Target1 = 1.0f;
             }
@@ -202,22 +176,17 @@
             /*
              * Set the maximum block size to the smallest delay size
              */
-            pPrivate->MaxBlkLen   = Temp;
-            if (pPrivate->MaxBlkLen > pPrivate->A_DelaySize[i])
-            {
+            pPrivate->MaxBlkLen = Temp;
+            if (pPrivate->MaxBlkLen > pPrivate->A_DelaySize[i]) {
                 pPrivate->MaxBlkLen = pPrivate->A_DelaySize[i];
             }
-            if (pPrivate->MaxBlkLen > pPrivate->B_DelaySize[i])
-            {
+            if (pPrivate->MaxBlkLen > pPrivate->B_DelaySize[i]) {
                 pPrivate->MaxBlkLen = pPrivate->B_DelaySize[i];
             }
         }
-        if (pPrivate->AB_Selection)
-        {
+        if (pPrivate->AB_Selection) {
             pPrivate->AB_Selection = 0;
-        }
-        else
-        {
+        } else {
             pPrivate->AB_Selection = 1;
         }
 
@@ -226,8 +195,7 @@
          */
         /* Just as a precausion, but no problem if we remove this line      */
         pPrivate->MaxBlkLen = pPrivate->MaxBlkLen - 2;
-        if(pPrivate->MaxBlkLen > pPrivate->InstanceParams.MaxBlockSize)
-        {
+        if (pPrivate->MaxBlkLen > pPrivate->InstanceParams.MaxBlockSize) {
             pPrivate->MaxBlkLen = (LVM_INT32)pPrivate->InstanceParams.MaxBlockSize;
         }
     }
@@ -235,39 +203,30 @@
     /*
      * Update the low pass filter coefficient
      */
-    if( (pPrivate->NewParams.Damping    != pPrivate->CurrentParams.Damping)    ||
+    if ((pPrivate->NewParams.Damping != pPrivate->CurrentParams.Damping) ||
         (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
-        (pPrivate->bFirstControl        == LVM_TRUE))
-    {
-
-        LVM_INT32       Temp;
-        LVM_FLOAT       Omega;
-        FO_FLOAT_Coefs_t  Coeffs;
-        LVM_INT16       i;
-        LVM_INT16       Damping      = (LVM_INT16)((pPrivate->NewParams.Damping * 100) + 1000);
-        LVM_FLOAT       ScaleTable[] = {LVREV_T_3_Power_0_on_4, LVREV_T_3_Power_1_on_4,
-                                        LVREV_T_3_Power_2_on_4, LVREV_T_3_Power_3_on_4};
+        (pPrivate->bFirstControl == LVM_TRUE)) {
+        LVM_INT32 Temp;
+        LVM_FLOAT Omega;
+        FO_FLOAT_Coefs_t Coeffs;
+        LVM_INT16 i;
+        LVM_INT16 Damping = (LVM_INT16)((pPrivate->NewParams.Damping * 100) + 1000);
+        LVM_FLOAT ScaleTable[] = {LVREV_T_3_Power_0_on_4, LVREV_T_3_Power_1_on_4,
+                                  LVREV_T_3_Power_2_on_4, LVREV_T_3_Power_3_on_4};
 
         /*
          * For each filter
          */
-        for (i = 0; i < NumberOfDelayLines; i++)
-        {
-            if (i != 0)
-            {
+        for (i = 0; i < NumberOfDelayLines; i++) {
+            if (i != 0) {
                 Temp = (LVM_INT32)(ScaleTable[i] * Damping);
-            }
-            else
-            {
+            } else {
                 Temp = Damping;
             }
-            if(Temp <= (LVM_INT32)(LVM_FsTable[pPrivate->NewParams.SampleRate] >> 1))
-            {
+            if (Temp <= (LVM_INT32)(LVM_FsTable[pPrivate->NewParams.SampleRate] >> 1)) {
                 Omega = LVM_GetOmega(Temp, pPrivate->NewParams.SampleRate);
                 LVM_FO_LPF(Omega, &Coeffs);
-            }
-            else
-            {
+            } else {
                 Coeffs.A0 = 1;
                 Coeffs.A1 = 0;
                 Coeffs.B1 = 0;
@@ -280,27 +239,23 @@
     /*
      * Update All-pass filter mixer time constants
      */
-    if( (pPrivate->NewParams.RoomSize   != pPrivate->CurrentParams.RoomSize)   ||
+    if ((pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
         (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
-        (pPrivate->NewParams.Density    != pPrivate->CurrentParams.Density))
-    {
-        LVM_INT16   i;
-        LVM_FLOAT   Alpha;
-        LVM_FLOAT   AlphaTap;
+        (pPrivate->NewParams.Density != pPrivate->CurrentParams.Density)) {
+        LVM_INT16 i;
+        LVM_FLOAT Alpha;
+        LVM_FLOAT AlphaTap;
 
         Alpha = LVM_Mixer_TimeConstant(LVREV_ALLPASS_TC,
-                                       LVM_GetFsFromTable(pPrivate->NewParams.SampleRate),
-                                       1);
+                                       LVM_GetFsFromTable(pPrivate->NewParams.SampleRate), 1);
 
         AlphaTap = LVM_Mixer_TimeConstant(LVREV_ALLPASS_TAP_TC,
-                                          LVM_GetFsFromTable(pPrivate->NewParams.SampleRate),
-                                          1);
+                                          LVM_GetFsFromTable(pPrivate->NewParams.SampleRate), 1);
 
-        for (i = 0; i < 4; i++)
-        {
-            pPrivate->Mixer_APTaps[i].Alpha1       = AlphaTap;
-            pPrivate->Mixer_APTaps[i].Alpha2       = AlphaTap;
-            pPrivate->Mixer_SGFeedback[i].Alpha    = Alpha;
+        for (i = 0; i < 4; i++) {
+            pPrivate->Mixer_APTaps[i].Alpha1 = AlphaTap;
+            pPrivate->Mixer_APTaps[i].Alpha2 = AlphaTap;
+            pPrivate->Mixer_SGFeedback[i].Alpha = Alpha;
             pPrivate->Mixer_SGFeedforward[i].Alpha = Alpha;
         }
     }
@@ -308,150 +263,121 @@
     /*
      * Update the feed back gain
      */
-    if( (pPrivate->NewParams.RoomSize   != pPrivate->CurrentParams.RoomSize)   ||
+    if ((pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
         (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
-        (pPrivate->NewParams.T60        != pPrivate->CurrentParams.T60)        ||
-        (pPrivate->bFirstControl        == LVM_TRUE))
-    {
+        (pPrivate->NewParams.T60 != pPrivate->CurrentParams.T60) ||
+        (pPrivate->bFirstControl == LVM_TRUE)) {
+        LVM_FLOAT G[4]; /* Feedback gain (Q7.24) */
 
-        LVM_FLOAT               G[4];                       /* Feedback gain (Q7.24) */
-
-        if(pPrivate->NewParams.T60 == 0)
-        {
+        if (pPrivate->NewParams.T60 == 0) {
             G[3] = 0;
             G[2] = 0;
             G[1] = 0;
             G[0] = 0;
-        }
-        else
-        {
-            LVM_FLOAT   Temp1;
-            LVM_FLOAT   Temp2;
-            LVM_INT16   i;
-            LVM_FLOAT   ScaleTable[] = {LVREV_T_3_Power_minus0_on_4, LVREV_T_3_Power_minus1_on_4,
-                                        LVREV_T_3_Power_minus2_on_4, LVREV_T_3_Power_minus3_on_4};
+        } else {
+            LVM_FLOAT Temp1;
+            LVM_FLOAT Temp2;
+            LVM_INT16 i;
+            LVM_FLOAT ScaleTable[] = {LVREV_T_3_Power_minus0_on_4, LVREV_T_3_Power_minus1_on_4,
+                                      LVREV_T_3_Power_minus2_on_4, LVREV_T_3_Power_minus3_on_4};
 
             /*
              * For each delay line
              */
-            for (i = 0; i < NumberOfDelayLines; i++)
-            {
+            for (i = 0; i < NumberOfDelayLines; i++) {
                 Temp1 = (3 * pPrivate->RoomSizeInms * ScaleTable[i]) / pPrivate->NewParams.T60;
-                if(Temp1 >= (4))
-                {
+                if (Temp1 >= (4)) {
                     G[i] = 0;
-                }
-                else if((Temp1 >= (2)))
-                {
+                } else if ((Temp1 >= (2))) {
                     Temp2 = LVM_Power10(-(Temp1 / 2.0f));
                     Temp1 = LVM_Power10(-(Temp1 / 2.0f));
                     Temp1 = Temp1 * Temp2;
-                }
-                else
-                {
+                } else {
                     Temp1 = LVM_Power10(-(Temp1));
                 }
-                if (NumberOfDelayLines == 1)
-                {
+                if (NumberOfDelayLines == 1) {
                     G[i] = Temp1;
-                }
-                else
-                {
-                    LVM_FLOAT   TempG;
+                } else {
+                    LVM_FLOAT TempG;
                     TempG = Temp1 * ONE_OVER_SQRT_TWO;
-                    G[i]=TempG;
+                    G[i] = TempG;
                 }
             }
         }
 
         /* Set up the feedback mixers for four delay lines */
-        pPrivate->FeedbackMixer[0].Target=G[0];
-        pPrivate->FeedbackMixer[1].Target=G[1];
-        pPrivate->FeedbackMixer[2].Target=G[2];
-        pPrivate->FeedbackMixer[3].Target=G[3];
+        pPrivate->FeedbackMixer[0].Target = G[0];
+        pPrivate->FeedbackMixer[1].Target = G[1];
+        pPrivate->FeedbackMixer[2].Target = G[2];
+        pPrivate->FeedbackMixer[3].Target = G[3];
     }
 
     /*
      * Calculate the gain correction
      */
-    if((pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
-       (pPrivate->NewParams.Level    != pPrivate->CurrentParams.Level)    ||
-       (pPrivate->NewParams.T60      != pPrivate->CurrentParams.T60) )
-    {
-        LVM_INT32 Index=0;
+    if ((pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
+        (pPrivate->NewParams.Level != pPrivate->CurrentParams.Level) ||
+        (pPrivate->NewParams.T60 != pPrivate->CurrentParams.T60)) {
+        LVM_INT32 Index = 0;
         LVM_FLOAT Index_FLOAT;
-        LVM_INT32 i=0;
-        LVM_FLOAT Gain=0;
-        LVM_INT32 RoomSize=0;
+        LVM_INT32 i = 0;
+        LVM_FLOAT Gain = 0;
+        LVM_INT32 RoomSize = 0;
         LVM_FLOAT T60;
         LVM_FLOAT Coefs[5];
 
-        if(pPrivate->NewParams.RoomSize == 0)
-        {
+        if (pPrivate->NewParams.RoomSize == 0) {
             RoomSize = 1;
-        }
-        else
-        {
+        } else {
             RoomSize = (LVM_INT32)pPrivate->NewParams.RoomSize;
         }
 
-        if(pPrivate->NewParams.T60 < 100)
-        {
+        if (pPrivate->NewParams.T60 < 100) {
             T60 = 100 * LVREV_T60_SCALE;
-        }
-        else
-        {
+        } else {
             T60 = pPrivate->NewParams.T60 * LVREV_T60_SCALE;
         }
 
         /* Find the nearest room size in table */
-        for(i = 0; i < 24; i++)
-        {
-            if(RoomSize <= LVREV_GainPolyTable[i][0])
-            {
+        for (i = 0; i < 24; i++) {
+            if (RoomSize <= LVREV_GainPolyTable[i][0]) {
                 Index = i;
                 break;
             }
         }
 
-        if(RoomSize == LVREV_GainPolyTable[Index][0])
-        {
+        if (RoomSize == LVREV_GainPolyTable[Index][0]) {
             /* Take table values if the room size is in table */
-            for(i = 1; i < 5; i++)
-            {
-                Coefs[i-1] = LVREV_GainPolyTable[Index][i];
+            for (i = 1; i < 5; i++) {
+                Coefs[i - 1] = LVREV_GainPolyTable[Index][i];
             }
             Coefs[4] = 0;
-            Gain = LVM_Polynomial(3, Coefs, T60);       /* Q.24 result */
-        }
-        else
-        {
+            Gain = LVM_Polynomial(3, Coefs, T60); /* Q.24 result */
+        } else {
             /* Interpolate the gain between nearest room sizes */
 
-            LVM_FLOAT Gain1,Gain2;
-            LVM_INT32 Tot_Dist,Dist;
+            LVM_FLOAT Gain1, Gain2;
+            LVM_INT32 Tot_Dist, Dist;
 
-            Tot_Dist = (LVM_UINT32)LVREV_GainPolyTable[Index][0] - \
-                                            (LVM_UINT32)LVREV_GainPolyTable[Index-1][0];
+            Tot_Dist = (LVM_UINT32)LVREV_GainPolyTable[Index][0] -
+                       (LVM_UINT32)LVREV_GainPolyTable[Index - 1][0];
             Dist = RoomSize - (LVM_UINT32)LVREV_GainPolyTable[Index - 1][0];
 
             /* Get gain for first */
-            for(i = 1; i < 5; i++)
-            {
-                Coefs[i-1] = LVREV_GainPolyTable[Index-1][i];
+            for (i = 1; i < 5; i++) {
+                Coefs[i - 1] = LVREV_GainPolyTable[Index - 1][i];
             }
             Coefs[4] = 0;
 
-            Gain1 = LVM_Polynomial(3, Coefs, T60);      /* Q.24 result */
+            Gain1 = LVM_Polynomial(3, Coefs, T60); /* Q.24 result */
 
             /* Get gain for second */
-            for(i = 1; i < 5; i++)
-            {
-                Coefs[i-1] = LVREV_GainPolyTable[Index][i];
+            for (i = 1; i < 5; i++) {
+                Coefs[i - 1] = LVREV_GainPolyTable[Index][i];
             }
             Coefs[4] = 0;
 
-            Gain2 = LVM_Polynomial(3, Coefs, T60);      /* Q.24 result */
+            Gain2 = LVM_Polynomial(3, Coefs, T60); /* Q.24 result */
 
             /* Linear Interpolate the gain */
             Gain = Gain1 + (((Gain2 - Gain1) * Dist) / (Tot_Dist));
@@ -461,32 +387,27 @@
          * Get the inverse of gain: Q.15
          * Gain is mostly above one except few cases, take only gains above 1
          */
-        if(Gain < 1)
-        {
+        if (Gain < 1) {
             pPrivate->Gain = 1;
-        }
-        else
-        {
+        } else {
             pPrivate->Gain = 1 / Gain;
         }
 
         Index_FLOAT = 100.0f / (LVM_FLOAT)(100 + pPrivate->NewParams.Level);
         pPrivate->Gain = pPrivate->Gain * Index_FLOAT;
-        pPrivate->GainMixer.Target = (pPrivate->Gain*Index_FLOAT) / 2;
+        pPrivate->GainMixer.Target = (pPrivate->Gain * Index_FLOAT) / 2;
     }
 
     /*
      * Update the all pass comb filter coefficient
      */
-    if( (pPrivate->NewParams.Density != pPrivate->CurrentParams.Density) ||
-        (pPrivate->bFirstControl     == LVM_TRUE))
-    {
-        LVM_INT16   i;
-        LVM_FLOAT   b = (LVM_FLOAT)pPrivate->NewParams.Density * LVREV_B_8_on_1000;
+    if ((pPrivate->NewParams.Density != pPrivate->CurrentParams.Density) ||
+        (pPrivate->bFirstControl == LVM_TRUE)) {
+        LVM_INT16 i;
+        LVM_FLOAT b = (LVM_FLOAT)pPrivate->NewParams.Density * LVREV_B_8_on_1000;
 
-        for (i = 0; i < 4; i++)
-        {
-            pPrivate->Mixer_SGFeedback[i].Target    = b;
+        for (i = 0; i < 4; i++) {
+            pPrivate->Mixer_SGFeedback[i].Target = b;
             pPrivate->Mixer_SGFeedforward[i].Target = b;
         }
     }
@@ -494,11 +415,10 @@
     /*
      * Update the bypass mixer time constant
      */
-    if((pPrivate->NewParams.SampleRate   != pPrivate->CurrentParams.SampleRate)   ||
-       (pPrivate->bFirstControl          == LVM_TRUE))
-    {
-        LVM_UINT16   NumChannels = 1;                       /* Assume MONO format */
-        LVM_FLOAT    Alpha;
+    if ((pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
+        (pPrivate->bFirstControl == LVM_TRUE)) {
+        LVM_UINT16 NumChannels = 1; /* Assume MONO format */
+        LVM_FLOAT Alpha;
 
         Alpha = LVM_Mixer_TimeConstant(LVREV_FEEDBACKMIXER_TC,
                                        LVM_GetFsFromTable(pPrivate->NewParams.SampleRate),
@@ -508,67 +428,55 @@
         pPrivate->FeedbackMixer[2].Alpha = Alpha;
         pPrivate->FeedbackMixer[3].Alpha = Alpha;
 
-        NumChannels = 2;                                    /* Always stereo output */
-        pPrivate->BypassMixer.Alpha1 = LVM_Mixer_TimeConstant(LVREV_BYPASSMIXER_TC,
-                             LVM_GetFsFromTable(pPrivate->NewParams.SampleRate), NumChannels);
+        NumChannels = 2; /* Always stereo output */
+        pPrivate->BypassMixer.Alpha1 = LVM_Mixer_TimeConstant(
+                LVREV_BYPASSMIXER_TC, LVM_GetFsFromTable(pPrivate->NewParams.SampleRate),
+                NumChannels);
         pPrivate->BypassMixer.Alpha2 = pPrivate->BypassMixer.Alpha1;
-        pPrivate->GainMixer.Alpha    = pPrivate->BypassMixer.Alpha1;
+        pPrivate->GainMixer.Alpha = pPrivate->BypassMixer.Alpha1;
     }
 
     /*
      * Update the bypass mixer targets
      */
-    if( (pPrivate->NewParams.Level != pPrivate->CurrentParams.Level) &&
-        (pPrivate->NewParams.OperatingMode == LVM_MODE_ON))
-    {
-        pPrivate->BypassMixer.Target2 = (LVM_FLOAT)(pPrivate->NewParams.Level ) / 100.0f;
+    if ((pPrivate->NewParams.Level != pPrivate->CurrentParams.Level) &&
+        (pPrivate->NewParams.OperatingMode == LVM_MODE_ON)) {
+        pPrivate->BypassMixer.Target2 = (LVM_FLOAT)(pPrivate->NewParams.Level) / 100.0f;
         pPrivate->BypassMixer.Target1 = 0x00000000;
-        if ((pPrivate->NewParams.Level == 0) && (pPrivate->bFirstControl == LVM_FALSE))
-        {
+        if ((pPrivate->NewParams.Level == 0) && (pPrivate->bFirstControl == LVM_FALSE)) {
             pPrivate->BypassMixer.CallbackSet2 = LVM_TRUE;
         }
-        if (pPrivate->NewParams.Level != 0)
-        {
+        if (pPrivate->NewParams.Level != 0) {
             pPrivate->bDisableReverb = LVM_FALSE;
         }
     }
 
-    if(pPrivate->NewParams.OperatingMode != pPrivate->CurrentParams.OperatingMode)
-    {
-        if(pPrivate->NewParams.OperatingMode == LVM_MODE_ON)
-        {
-            pPrivate->BypassMixer.Target2 = (LVM_FLOAT)(pPrivate->NewParams.Level ) / 100.0f;
+    if (pPrivate->NewParams.OperatingMode != pPrivate->CurrentParams.OperatingMode) {
+        if (pPrivate->NewParams.OperatingMode == LVM_MODE_ON) {
+            pPrivate->BypassMixer.Target2 = (LVM_FLOAT)(pPrivate->NewParams.Level) / 100.0f;
             pPrivate->BypassMixer.Target1 = 0x00000000;
 
             pPrivate->BypassMixer.CallbackSet2 = LVM_FALSE;
-            OperatingMode                      = LVM_MODE_ON;
-            if (pPrivate->NewParams.Level == 0)
-            {
+            OperatingMode = LVM_MODE_ON;
+            if (pPrivate->NewParams.Level == 0) {
                 pPrivate->bDisableReverb = LVM_TRUE;
-            }
-            else
-            {
+            } else {
                 pPrivate->bDisableReverb = LVM_FALSE;
             }
-        }
-        else if (pPrivate->bFirstControl == LVM_FALSE)
-        {
+        } else if (pPrivate->bFirstControl == LVM_FALSE) {
             pPrivate->BypassMixer.Target2 = 0x00000000;
             pPrivate->BypassMixer.Target1 = 0x00000000;
             pPrivate->BypassMixer.CallbackSet2 = LVM_TRUE;
-            pPrivate->GainMixer.Target    = 0.03125f;
+            pPrivate->GainMixer.Target = 0.03125f;
             OperatingMode = LVM_MODE_ON;
-        }
-        else
-        {
+        } else {
             OperatingMode = LVM_MODE_OFF;
         }
     }
 
     /*  If it is the first call to ApplyNew settings force the current to the target \
         to begin immediate playback of the effect */
-    if(pPrivate->bFirstControl == LVM_TRUE)
-    {
+    if (pPrivate->bFirstControl == LVM_TRUE) {
         pPrivate->BypassMixer.Current1 = pPrivate->BypassMixer.Target1;
         pPrivate->BypassMixer.Current2 = pPrivate->BypassMixer.Target2;
     }
@@ -582,8 +490,7 @@
     /*
      * Update flag
      */
-    if(pPrivate->bFirstControl == LVM_TRUE)
-    {
+    if (pPrivate->bFirstControl == LVM_TRUE) {
         pPrivate->bFirstControl = LVM_FALSE;
     }
 
@@ -606,12 +513,9 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_INT32 BypassMixer_Callback (void *pCallbackData,
-                                void *pGeneralPurpose,
-                                LVM_INT16 GeneralPurpose )
-{
-
-    LVREV_Instance_st     *pLVREV_Private = (LVREV_Instance_st *)pCallbackData;
+LVM_INT32 BypassMixer_Callback(void* pCallbackData, void* pGeneralPurpose,
+                               LVM_INT16 GeneralPurpose) {
+    LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)pCallbackData;
 
     /*
      * Avoid build warnings
@@ -623,11 +527,10 @@
      * Turn off
      */
     pLVREV_Private->CurrentParams.OperatingMode = LVM_MODE_OFF;
-    pLVREV_Private->bDisableReverb              = LVM_TRUE;
+    pLVREV_Private->bDisableReverb = LVM_TRUE;
     LVREV_ClearAudioBuffers((LVREV_Handle_t)pCallbackData);
 
     return 0;
 }
 
 /* End of file */
-
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
index 586539f..5c83ce5 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
@@ -41,17 +41,14 @@
 /*  1. This function must not be interrupted by the LVM_Process function                */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_ClearAudioBuffers(LVREV_Handle_t  hInstance)
-{
-
-   LVREV_Instance_st     *pLVREV_Private = (LVREV_Instance_st *)hInstance;
+LVREV_ReturnStatus_en LVREV_ClearAudioBuffers(LVREV_Handle_t hInstance) {
+    LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
 
     /*
      * Check for error conditions
      */
     /* Check for NULL pointers */
-    if(hInstance == LVM_NULL)
-    {
+    if (hInstance == LVM_NULL) {
         return LVREV_NULLADDRESS;
     }
 
@@ -59,18 +56,13 @@
      * Clear all filter tap data, delay-lines and other signal related data
      */
 
-    LoadConst_Float(0,
-        (LVM_FLOAT *)&pLVREV_Private->pFastData->HPTaps,
-        2);
-    LoadConst_Float(0,
-        (LVM_FLOAT *)&pLVREV_Private->pFastData->LPTaps,
-        2);
-    if((LVM_UINT16)pLVREV_Private->InstanceParams.NumDelays == LVREV_DELAYLINES_4)
-    {
-        LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[3], 2);
-        LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[2], 2);
-        LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[1], 2);
-        LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[0], 2);
+    LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->HPTaps, 2);
+    LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->LPTaps, 2);
+    if ((LVM_UINT16)pLVREV_Private->InstanceParams.NumDelays == LVREV_DELAYLINES_4) {
+        LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->RevLPTaps[3], 2);
+        LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->RevLPTaps[2], 2);
+        LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->RevLPTaps[1], 2);
+        LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->RevLPTaps[0], 2);
 
         LoadConst_Float(0, pLVREV_Private->pDelay_T[3], LVREV_MAX_T3_DELAY);
         LoadConst_Float(0, pLVREV_Private->pDelay_T[2], LVREV_MAX_T2_DELAY);
@@ -78,18 +70,16 @@
         LoadConst_Float(0, pLVREV_Private->pDelay_T[0], LVREV_MAX_T0_DELAY);
     }
 
-    if((LVM_UINT16)pLVREV_Private->InstanceParams.NumDelays >= LVREV_DELAYLINES_2)
-    {
-        LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[1], 2);
-        LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[0], 2);
+    if ((LVM_UINT16)pLVREV_Private->InstanceParams.NumDelays >= LVREV_DELAYLINES_2) {
+        LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->RevLPTaps[1], 2);
+        LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->RevLPTaps[0], 2);
 
         LoadConst_Float(0, pLVREV_Private->pDelay_T[1], LVREV_MAX_T1_DELAY);
         LoadConst_Float(0, pLVREV_Private->pDelay_T[0], LVREV_MAX_T0_DELAY);
     }
 
-    if((LVM_UINT16)pLVREV_Private->InstanceParams.NumDelays >= LVREV_DELAYLINES_1)
-    {
-        LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[0], 2);
+    if ((LVM_UINT16)pLVREV_Private->InstanceParams.NumDelays >= LVREV_DELAYLINES_1) {
+        LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->RevLPTaps[0], 2);
         LoadConst_Float(0, pLVREV_Private->pDelay_T[0], LVREV_MAX_T0_DELAY);
     }
     return LVREV_SUCCESS;
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetControlParameters.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetControlParameters.cpp
index e0b0142..f858b74 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetControlParameters.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetControlParameters.cpp
@@ -42,17 +42,14 @@
 /*  1.  This function may be interrupted by the LVREV_Process function                  */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetControlParameters(LVREV_Handle_t           hInstance,
-                                                 LVREV_ControlParams_st   *pControlParams)
-{
-
-    LVREV_Instance_st  *pLVREV_Private = (LVREV_Instance_st *)hInstance;
+LVREV_ReturnStatus_en LVREV_GetControlParameters(LVREV_Handle_t hInstance,
+                                                 LVREV_ControlParams_st* pControlParams) {
+    LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
 
     /*
      * Check for error conditions
      */
-    if((hInstance == LVM_NULL) || (pControlParams == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pControlParams == LVM_NULL)) {
         return LVREV_NULLADDRESS;
     }
 
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
index 68f883a..b5db23b 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
@@ -45,35 +45,29 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t            *phInstance,
-                                              LVREV_MemoryTable_st      *pMemoryTable,
-                                              LVREV_InstanceParams_st   *pInstanceParams)
-{
-
-    INST_ALLOC              SlowData;
-    INST_ALLOC              FastData;
-    INST_ALLOC              FastCoef;
-    INST_ALLOC              Temporary;
-    LVREV_Instance_st       *pLVREV_Private;
-    LVM_INT16               i;
-    LVM_UINT16              MaxBlockSize;
+LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t* phInstance,
+                                              LVREV_MemoryTable_st* pMemoryTable,
+                                              LVREV_InstanceParams_st* pInstanceParams) {
+    INST_ALLOC SlowData;
+    INST_ALLOC FastData;
+    INST_ALLOC FastCoef;
+    INST_ALLOC Temporary;
+    LVREV_Instance_st* pLVREV_Private;
+    LVM_INT16 i;
+    LVM_UINT16 MaxBlockSize;
 
     /*
      * Check for error conditions
      */
     /* Check for NULL pointers */
-    if((phInstance == LVM_NULL) || (pMemoryTable == LVM_NULL) || (pInstanceParams == LVM_NULL))
-    {
+    if ((phInstance == LVM_NULL) || (pMemoryTable == LVM_NULL) || (pInstanceParams == LVM_NULL)) {
         return LVREV_NULLADDRESS;
     }
     /* Check the memory table for NULL pointers */
-    for (i = 0; i < LVREV_NR_MEMORY_REGIONS; i++)
-    {
-        if (pMemoryTable->Region[i].Size!=0)
-        {
-            if (pMemoryTable->Region[i].pBaseAddress==LVM_NULL)
-            {
-                return(LVREV_NULLADDRESS);
+    for (i = 0; i < LVREV_NR_MEMORY_REGIONS; i++) {
+        if (pMemoryTable->Region[i].Size != 0) {
+            if (pMemoryTable->Region[i].pBaseAddress == LVM_NULL) {
+                return (LVREV_NULLADDRESS);
             }
         }
     }
@@ -82,101 +76,81 @@
      * Check all instance parameters are in range
      */
     /* Check for a non-zero block size */
-    if (pInstanceParams->MaxBlockSize == 0)
-    {
+    if (pInstanceParams->MaxBlockSize == 0) {
         return LVREV_OUTOFRANGE;
     }
 
     /* Check for a valid number of delay lines */
-    if ((pInstanceParams->NumDelays != LVREV_DELAYLINES_1)&&
-        (pInstanceParams->NumDelays != LVREV_DELAYLINES_2)&&
-        (pInstanceParams->NumDelays != LVREV_DELAYLINES_4))
-    {
+    if ((pInstanceParams->NumDelays != LVREV_DELAYLINES_1) &&
+        (pInstanceParams->NumDelays != LVREV_DELAYLINES_2) &&
+        (pInstanceParams->NumDelays != LVREV_DELAYLINES_4)) {
         return LVREV_OUTOFRANGE;
     }
 
     /*
      * Initialise the InstAlloc instances
      */
-    InstAlloc_Init(&SlowData,  pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress);
-    InstAlloc_Init(&FastData,  pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress);
-    InstAlloc_Init(&FastCoef,  pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress);
+    InstAlloc_Init(&SlowData, pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress);
+    InstAlloc_Init(&FastData, pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress);
+    InstAlloc_Init(&FastCoef, pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress);
     InstAlloc_Init(&Temporary, pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress);
 
     /*
      * Zero all memory regions
      */
-    LoadConst_Float(0,
-                    (LVM_FLOAT *)pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress,
-                    (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size) / \
-                                                    sizeof(LVM_FLOAT)));
-    LoadConst_Float(0,
-                    (LVM_FLOAT *)pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress,
-                    (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size) / \
-                                                    sizeof(LVM_FLOAT)));
-    LoadConst_Float(0,
-                    (LVM_FLOAT *)pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress,
-                    (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size) / \
-                                                    sizeof(LVM_FLOAT)));
-    LoadConst_Float(0,
-                    (LVM_FLOAT *)pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress,
-                    (LVM_INT16)((pMemoryTable->Region[LVM_TEMPORARY_FAST].Size) / \
-                                                    sizeof(LVM_FLOAT)));
+    LoadConst_Float(
+            0, (LVM_FLOAT*)pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress,
+            (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size) / sizeof(LVM_FLOAT)));
+    LoadConst_Float(
+            0, (LVM_FLOAT*)pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress,
+            (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size) / sizeof(LVM_FLOAT)));
+    LoadConst_Float(
+            0, (LVM_FLOAT*)pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress,
+            (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size) / sizeof(LVM_FLOAT)));
+    LoadConst_Float(
+            0, (LVM_FLOAT*)pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress,
+            (LVM_INT16)((pMemoryTable->Region[LVM_TEMPORARY_FAST].Size) / sizeof(LVM_FLOAT)));
     /*
      * Set the instance handle if not already initialised
      */
-    if (*phInstance == LVM_NULL)
-    {
+    if (*phInstance == LVM_NULL) {
         *phInstance = InstAlloc_AddMember(&SlowData, sizeof(LVREV_Instance_st));
     }
-    pLVREV_Private              =(LVREV_Instance_st *)*phInstance;
+    pLVREV_Private = (LVREV_Instance_st*)*phInstance;
     pLVREV_Private->MemoryTable = *pMemoryTable;
 
-    if(pInstanceParams->NumDelays ==LVREV_DELAYLINES_4)
-    {
+    if (pInstanceParams->NumDelays == LVREV_DELAYLINES_4) {
         MaxBlockSize = LVREV_MAX_AP3_DELAY;
-    }
-    else if(pInstanceParams->NumDelays ==LVREV_DELAYLINES_2)
-    {
+    } else if (pInstanceParams->NumDelays == LVREV_DELAYLINES_2) {
         MaxBlockSize = LVREV_MAX_AP1_DELAY;
-    }
-    else
-    {
+    } else {
         MaxBlockSize = LVREV_MAX_AP0_DELAY;
     }
 
-    if(MaxBlockSize>pInstanceParams->MaxBlockSize)
-    {
-        MaxBlockSize=pInstanceParams->MaxBlockSize;
+    if (MaxBlockSize > pInstanceParams->MaxBlockSize) {
+        MaxBlockSize = pInstanceParams->MaxBlockSize;
     }
 
     /*
      * Set the data, coefficient and temporary memory pointers
      */
     /* Fast data memory base address */
-    pLVREV_Private->pFastData = (LVREV_FastData_st *)
-        InstAlloc_AddMember(&FastData, sizeof(LVREV_FastData_st));
-    if(pInstanceParams->NumDelays == LVREV_DELAYLINES_4)
-    {
-        pLVREV_Private->pDelay_T[3]     =
-            (LVM_FLOAT *)InstAlloc_AddMember(&FastData, LVREV_MAX_T3_DELAY * \
-                                                              sizeof(LVM_FLOAT));
-        pLVREV_Private->pDelay_T[2]     =
-            (LVM_FLOAT *)InstAlloc_AddMember(&FastData, LVREV_MAX_T2_DELAY * \
-                                                              sizeof(LVM_FLOAT));
-        pLVREV_Private->pDelay_T[1]     =
-            (LVM_FLOAT *)InstAlloc_AddMember(&FastData, LVREV_MAX_T1_DELAY * \
-                                                              sizeof(LVM_FLOAT));
-        pLVREV_Private->pDelay_T[0]     =
-            (LVM_FLOAT *)InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * \
-                                                              sizeof(LVM_FLOAT));
+    pLVREV_Private->pFastData =
+            (LVREV_FastData_st*)InstAlloc_AddMember(&FastData, sizeof(LVREV_FastData_st));
+    if (pInstanceParams->NumDelays == LVREV_DELAYLINES_4) {
+        pLVREV_Private->pDelay_T[3] =
+                (LVM_FLOAT*)InstAlloc_AddMember(&FastData, LVREV_MAX_T3_DELAY * sizeof(LVM_FLOAT));
+        pLVREV_Private->pDelay_T[2] =
+                (LVM_FLOAT*)InstAlloc_AddMember(&FastData, LVREV_MAX_T2_DELAY * sizeof(LVM_FLOAT));
+        pLVREV_Private->pDelay_T[1] =
+                (LVM_FLOAT*)InstAlloc_AddMember(&FastData, LVREV_MAX_T1_DELAY * sizeof(LVM_FLOAT));
+        pLVREV_Private->pDelay_T[0] =
+                (LVM_FLOAT*)InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
 
-        for(i = 0; i < 4; i++)
-        {
+        for (i = 0; i < 4; i++) {
             /* Scratch for each delay line output */
-            pLVREV_Private->pScratchDelayLine[i] = (LVM_FLOAT *)InstAlloc_AddMember(&Temporary,
-                                                                       sizeof(LVM_FLOAT) * \
-                                                                       MaxBlockSize);
+            pLVREV_Private->pScratchDelayLine[i] =
+                    (LVM_FLOAT*)InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
         }
 
         LoadConst_Float(0, pLVREV_Private->pDelay_T[3], LVREV_MAX_T3_DELAY);
@@ -185,60 +159,50 @@
         LoadConst_Float(0, pLVREV_Private->pDelay_T[0], LVREV_MAX_T0_DELAY);
     }
 
-    if(pInstanceParams->NumDelays == LVREV_DELAYLINES_2)
-    {
-        pLVREV_Private->pDelay_T[1]  = (LVM_FLOAT *)
-                InstAlloc_AddMember(&FastData, LVREV_MAX_T1_DELAY * \
-                                                           sizeof(LVM_FLOAT));
-        pLVREV_Private->pDelay_T[0]  = (LVM_FLOAT *)
-                InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * \
-                                                           sizeof(LVM_FLOAT));
+    if (pInstanceParams->NumDelays == LVREV_DELAYLINES_2) {
+        pLVREV_Private->pDelay_T[1] =
+                (LVM_FLOAT*)InstAlloc_AddMember(&FastData, LVREV_MAX_T1_DELAY * sizeof(LVM_FLOAT));
+        pLVREV_Private->pDelay_T[0] =
+                (LVM_FLOAT*)InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
 
-        for(i = 0; i < 2; i++)
-        {
+        for (i = 0; i < 2; i++) {
             /* Scratch for each delay line output */
-            pLVREV_Private->pScratchDelayLine[i] = (LVM_FLOAT *)InstAlloc_AddMember(&Temporary,
-                                                                       sizeof(LVM_FLOAT) * \
-                                                                       MaxBlockSize);
+            pLVREV_Private->pScratchDelayLine[i] =
+                    (LVM_FLOAT*)InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
         }
 
         LoadConst_Float(0, pLVREV_Private->pDelay_T[1], (LVM_INT16)LVREV_MAX_T1_DELAY);
         LoadConst_Float(0, pLVREV_Private->pDelay_T[0], (LVM_INT16)LVREV_MAX_T0_DELAY);
     }
 
-    if(pInstanceParams->NumDelays == LVREV_DELAYLINES_1)
-    {
-        pLVREV_Private->pDelay_T[0]  = (LVM_FLOAT *)InstAlloc_AddMember(&FastData,
-                                                           LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
+    if (pInstanceParams->NumDelays == LVREV_DELAYLINES_1) {
+        pLVREV_Private->pDelay_T[0] =
+                (LVM_FLOAT*)InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
 
-        for(i = 0; i < 1; i++)
-        {
+        for (i = 0; i < 1; i++) {
             /* Scratch for each delay line output */
-            pLVREV_Private->pScratchDelayLine[i] = (LVM_FLOAT *)InstAlloc_AddMember(&Temporary,
-                                                                       sizeof(LVM_FLOAT) * \
-                                                                       MaxBlockSize);
+            pLVREV_Private->pScratchDelayLine[i] =
+                    (LVM_FLOAT*)InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
         }
 
         LoadConst_Float(0, pLVREV_Private->pDelay_T[0], (LVM_INT16)LVREV_MAX_T0_DELAY);
     }
     /* All-pass delay buffer addresses and sizes */
-    pLVREV_Private->T[0]         = LVREV_MAX_T0_DELAY;
-    pLVREV_Private->T[1]         = LVREV_MAX_T1_DELAY;
-    pLVREV_Private->T[2]         = LVREV_MAX_T2_DELAY;
-    pLVREV_Private->T[3]         = LVREV_MAX_T3_DELAY;
-    pLVREV_Private->AB_Selection = 1;       /* Select smoothing A to B */
+    pLVREV_Private->T[0] = LVREV_MAX_T0_DELAY;
+    pLVREV_Private->T[1] = LVREV_MAX_T1_DELAY;
+    pLVREV_Private->T[2] = LVREV_MAX_T2_DELAY;
+    pLVREV_Private->T[3] = LVREV_MAX_T3_DELAY;
+    pLVREV_Private->AB_Selection = 1; /* Select smoothing A to B */
 
     /* Fast coefficient memory base address */
-    pLVREV_Private->pFastCoef       =
-        (LVREV_FastCoef_st *)InstAlloc_AddMember(&FastCoef, sizeof(LVREV_FastCoef_st));
+    pLVREV_Private->pFastCoef =
+            (LVREV_FastCoef_st*)InstAlloc_AddMember(&FastCoef, sizeof(LVREV_FastCoef_st));
     /* General purpose scratch */
-    pLVREV_Private->pScratch        =
-            (LVM_FLOAT *)InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * \
-                                                          MaxBlockSize);
+    pLVREV_Private->pScratch =
+            (LVM_FLOAT*)InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
     /* Mono->stereo input save for end mix */
-    pLVREV_Private->pInputSave      =
-            (LVM_FLOAT *)InstAlloc_AddMember(&Temporary, 2 * sizeof(LVM_FLOAT) * \
-                                                          MaxBlockSize);
+    pLVREV_Private->pInputSave =
+            (LVM_FLOAT*)InstAlloc_AddMember(&Temporary, 2 * sizeof(LVM_FLOAT) * MaxBlockSize);
     LoadConst_Float(0, pLVREV_Private->pInputSave, (LVM_INT16)(MaxBlockSize * 2));
 
     /*
@@ -249,91 +213,90 @@
     /*
      * Set the parameters to invalid
      */
-    pLVREV_Private->CurrentParams.SampleRate    = LVM_FS_INVALID;
+    pLVREV_Private->CurrentParams.SampleRate = LVM_FS_INVALID;
     pLVREV_Private->CurrentParams.OperatingMode = LVM_MODE_DUMMY;
-    pLVREV_Private->CurrentParams.SourceFormat  = LVM_SOURCE_DUMMY;
+    pLVREV_Private->CurrentParams.SourceFormat = LVM_SOURCE_DUMMY;
 
-    pLVREV_Private->bControlPending             = LVM_FALSE;
-    pLVREV_Private->bFirstControl               = LVM_TRUE;
-    pLVREV_Private->bDisableReverb              = LVM_FALSE;
+    pLVREV_Private->bControlPending = LVM_FALSE;
+    pLVREV_Private->bFirstControl = LVM_TRUE;
+    pLVREV_Private->bDisableReverb = LVM_FALSE;
 
     /*
      * Set mixer parameters
      */
-    pLVREV_Private->BypassMixer.CallbackParam2      = 0;
-    pLVREV_Private->BypassMixer.pCallbackHandle2    = pLVREV_Private;
-    pLVREV_Private->BypassMixer.pGeneralPurpose2    = LVM_NULL;
-    pLVREV_Private->BypassMixer.pCallBack2          = BypassMixer_Callback;
-    pLVREV_Private->BypassMixer.CallbackSet2        = LVM_FALSE;
-    pLVREV_Private->BypassMixer.Current2            = 0;
-    pLVREV_Private->BypassMixer.Target2             = 0;
-    pLVREV_Private->BypassMixer.CallbackParam1      = 0;
-    pLVREV_Private->BypassMixer.pCallbackHandle1    = LVM_NULL;
-    pLVREV_Private->BypassMixer.pGeneralPurpose1    = LVM_NULL;
-    pLVREV_Private->BypassMixer.pCallBack1          = LVM_NULL;
-    pLVREV_Private->BypassMixer.CallbackSet1        = LVM_FALSE;
-    pLVREV_Private->BypassMixer.Current1            = 0x00000000;
-    pLVREV_Private->BypassMixer.Target1             = 0x00000000;
+    pLVREV_Private->BypassMixer.CallbackParam2 = 0;
+    pLVREV_Private->BypassMixer.pCallbackHandle2 = pLVREV_Private;
+    pLVREV_Private->BypassMixer.pGeneralPurpose2 = LVM_NULL;
+    pLVREV_Private->BypassMixer.pCallBack2 = BypassMixer_Callback;
+    pLVREV_Private->BypassMixer.CallbackSet2 = LVM_FALSE;
+    pLVREV_Private->BypassMixer.Current2 = 0;
+    pLVREV_Private->BypassMixer.Target2 = 0;
+    pLVREV_Private->BypassMixer.CallbackParam1 = 0;
+    pLVREV_Private->BypassMixer.pCallbackHandle1 = LVM_NULL;
+    pLVREV_Private->BypassMixer.pGeneralPurpose1 = LVM_NULL;
+    pLVREV_Private->BypassMixer.pCallBack1 = LVM_NULL;
+    pLVREV_Private->BypassMixer.CallbackSet1 = LVM_FALSE;
+    pLVREV_Private->BypassMixer.Current1 = 0x00000000;
+    pLVREV_Private->BypassMixer.Target1 = 0x00000000;
 
-    pLVREV_Private->RoomSizeInms                    = 100;  // 100 msec
+    pLVREV_Private->RoomSizeInms = 100;  // 100 msec
 
     /*
      *  Set the output gain mixer parameters
      */
-    pLVREV_Private->GainMixer.CallbackParam      = 0;
-    pLVREV_Private->GainMixer.pCallbackHandle    = LVM_NULL;
-    pLVREV_Private->GainMixer.pGeneralPurpose    = LVM_NULL;
-    pLVREV_Private->GainMixer.pCallBack          = LVM_NULL;
-    pLVREV_Private->GainMixer.CallbackSet        = LVM_FALSE;
-    pLVREV_Private->GainMixer.Current            = 0.03125f;//0x03ffffff;
-    pLVREV_Private->GainMixer.Target             = 0.03125f;//0x03ffffff;
+    pLVREV_Private->GainMixer.CallbackParam = 0;
+    pLVREV_Private->GainMixer.pCallbackHandle = LVM_NULL;
+    pLVREV_Private->GainMixer.pGeneralPurpose = LVM_NULL;
+    pLVREV_Private->GainMixer.pCallBack = LVM_NULL;
+    pLVREV_Private->GainMixer.CallbackSet = LVM_FALSE;
+    pLVREV_Private->GainMixer.Current = 0.03125f;  // 0x03ffffff;
+    pLVREV_Private->GainMixer.Target = 0.03125f;   // 0x03ffffff;
 
     /*
      * Set the All-Pass Filter mixers
      */
-    for (i=0; i<4; i++)
-    {
+    for (i = 0; i < 4; i++) {
         pLVREV_Private->pOffsetA[i] = pLVREV_Private->pDelay_T[i];
         pLVREV_Private->pOffsetB[i] = pLVREV_Private->pDelay_T[i];
         /* Delay tap selection mixer */
-        pLVREV_Private->Mixer_APTaps[i].CallbackParam2   = 0;
+        pLVREV_Private->Mixer_APTaps[i].CallbackParam2 = 0;
         pLVREV_Private->Mixer_APTaps[i].pCallbackHandle2 = LVM_NULL;
         pLVREV_Private->Mixer_APTaps[i].pGeneralPurpose2 = LVM_NULL;
-        pLVREV_Private->Mixer_APTaps[i].pCallBack2       = LVM_NULL;
-        pLVREV_Private->Mixer_APTaps[i].CallbackSet2     = LVM_FALSE;
-        pLVREV_Private->Mixer_APTaps[i].Current2         = 0;
-        pLVREV_Private->Mixer_APTaps[i].Target2          = 0;
-        pLVREV_Private->Mixer_APTaps[i].CallbackParam1   = 0;
+        pLVREV_Private->Mixer_APTaps[i].pCallBack2 = LVM_NULL;
+        pLVREV_Private->Mixer_APTaps[i].CallbackSet2 = LVM_FALSE;
+        pLVREV_Private->Mixer_APTaps[i].Current2 = 0;
+        pLVREV_Private->Mixer_APTaps[i].Target2 = 0;
+        pLVREV_Private->Mixer_APTaps[i].CallbackParam1 = 0;
         pLVREV_Private->Mixer_APTaps[i].pCallbackHandle1 = LVM_NULL;
         pLVREV_Private->Mixer_APTaps[i].pGeneralPurpose1 = LVM_NULL;
-        pLVREV_Private->Mixer_APTaps[i].pCallBack1       = LVM_NULL;
-        pLVREV_Private->Mixer_APTaps[i].CallbackSet1     = LVM_FALSE;
-        pLVREV_Private->Mixer_APTaps[i].Current1         = 0;
-        pLVREV_Private->Mixer_APTaps[i].Target1          = 1;
+        pLVREV_Private->Mixer_APTaps[i].pCallBack1 = LVM_NULL;
+        pLVREV_Private->Mixer_APTaps[i].CallbackSet1 = LVM_FALSE;
+        pLVREV_Private->Mixer_APTaps[i].Current1 = 0;
+        pLVREV_Private->Mixer_APTaps[i].Target1 = 1;
         /* Feedforward mixer */
-        pLVREV_Private->Mixer_SGFeedforward[i].CallbackParam   = 0;
+        pLVREV_Private->Mixer_SGFeedforward[i].CallbackParam = 0;
         pLVREV_Private->Mixer_SGFeedforward[i].pCallbackHandle = LVM_NULL;
         pLVREV_Private->Mixer_SGFeedforward[i].pGeneralPurpose = LVM_NULL;
-        pLVREV_Private->Mixer_SGFeedforward[i].pCallBack       = LVM_NULL;
-        pLVREV_Private->Mixer_SGFeedforward[i].CallbackSet     = LVM_FALSE;
-        pLVREV_Private->Mixer_SGFeedforward[i].Current         = 0;
-        pLVREV_Private->Mixer_SGFeedforward[i].Target          = 0;
+        pLVREV_Private->Mixer_SGFeedforward[i].pCallBack = LVM_NULL;
+        pLVREV_Private->Mixer_SGFeedforward[i].CallbackSet = LVM_FALSE;
+        pLVREV_Private->Mixer_SGFeedforward[i].Current = 0;
+        pLVREV_Private->Mixer_SGFeedforward[i].Target = 0;
         /* Feedback mixer */
-        pLVREV_Private->Mixer_SGFeedback[i].CallbackParam   = 0;
+        pLVREV_Private->Mixer_SGFeedback[i].CallbackParam = 0;
         pLVREV_Private->Mixer_SGFeedback[i].pCallbackHandle = LVM_NULL;
         pLVREV_Private->Mixer_SGFeedback[i].pGeneralPurpose = LVM_NULL;
-        pLVREV_Private->Mixer_SGFeedback[i].pCallBack       = LVM_NULL;
-        pLVREV_Private->Mixer_SGFeedback[i].CallbackSet     = LVM_FALSE;
-        pLVREV_Private->Mixer_SGFeedback[i].Current         = 0;
-        pLVREV_Private->Mixer_SGFeedback[i].Target          = 0;
+        pLVREV_Private->Mixer_SGFeedback[i].pCallBack = LVM_NULL;
+        pLVREV_Private->Mixer_SGFeedback[i].CallbackSet = LVM_FALSE;
+        pLVREV_Private->Mixer_SGFeedback[i].Current = 0;
+        pLVREV_Private->Mixer_SGFeedback[i].Target = 0;
         /* Feedback gain mixer */
-        pLVREV_Private->FeedbackMixer[i].CallbackParam    = 0;
-        pLVREV_Private->FeedbackMixer[i].pCallbackHandle  = LVM_NULL;
-        pLVREV_Private->FeedbackMixer[i].pGeneralPurpose  = LVM_NULL;
-        pLVREV_Private->FeedbackMixer[i].pCallBack        = LVM_NULL;
-        pLVREV_Private->FeedbackMixer[i].CallbackSet      = LVM_FALSE;
-        pLVREV_Private->FeedbackMixer[i].Current          = 0;
-        pLVREV_Private->FeedbackMixer[i].Target           = 0;
+        pLVREV_Private->FeedbackMixer[i].CallbackParam = 0;
+        pLVREV_Private->FeedbackMixer[i].pCallbackHandle = LVM_NULL;
+        pLVREV_Private->FeedbackMixer[i].pGeneralPurpose = LVM_NULL;
+        pLVREV_Private->FeedbackMixer[i].pCallBack = LVM_NULL;
+        pLVREV_Private->FeedbackMixer[i].CallbackSet = LVM_FALSE;
+        pLVREV_Private->FeedbackMixer[i].Current = 0;
+        pLVREV_Private->FeedbackMixer[i].Target = 0;
     }
     /* Delay tap index */
     pLVREV_Private->A_DelaySize[0] = LVREV_MAX_AP0_DELAY;
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp
index f59933c..2c1e04d 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp
@@ -56,46 +56,40 @@
 /*  1.  This function may be interrupted by the LVREV_Process function                  */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetMemoryTable(LVREV_Handle_t           hInstance,
-                                           LVREV_MemoryTable_st     *pMemoryTable,
-                                           LVREV_InstanceParams_st  *pInstanceParams)
-{
-
-    INST_ALLOC              SlowData;
-    INST_ALLOC              FastData;
-    INST_ALLOC              FastCoef;
-    INST_ALLOC              Temporary;
-    LVM_INT16               i;
-    LVM_UINT16              MaxBlockSize;
+LVREV_ReturnStatus_en LVREV_GetMemoryTable(LVREV_Handle_t hInstance,
+                                           LVREV_MemoryTable_st* pMemoryTable,
+                                           LVREV_InstanceParams_st* pInstanceParams) {
+    INST_ALLOC SlowData;
+    INST_ALLOC FastData;
+    INST_ALLOC FastCoef;
+    INST_ALLOC Temporary;
+    LVM_INT16 i;
+    LVM_UINT16 MaxBlockSize;
 
     /*
      * Check for error conditions
      */
     /* Check for NULL pointer */
-    if (pMemoryTable == LVM_NULL)
-    {
-        return(LVREV_NULLADDRESS);
+    if (pMemoryTable == LVM_NULL) {
+        return (LVREV_NULLADDRESS);
     }
 
     /*
      * Check all instance parameters are in range
      */
-    if (pInstanceParams != LVM_NULL)
-    {
+    if (pInstanceParams != LVM_NULL) {
         /*
          * Call for memory allocation, so check the parameters
          */
         /* Check for a non-zero block size */
-        if (pInstanceParams->MaxBlockSize == 0)
-        {
+        if (pInstanceParams->MaxBlockSize == 0) {
             return LVREV_OUTOFRANGE;
         }
 
         /* Check for a valid number of delay lines */
         if ((pInstanceParams->NumDelays != LVREV_DELAYLINES_1) &&
             (pInstanceParams->NumDelays != LVREV_DELAYLINES_2) &&
-            (pInstanceParams->NumDelays != LVREV_DELAYLINES_4))
-        {
+            (pInstanceParams->NumDelays != LVREV_DELAYLINES_4)) {
             return LVREV_OUTOFRANGE;
         }
     }
@@ -103,86 +97,75 @@
     /*
      * Initialise the InstAlloc instances
      */
-    InstAlloc_Init(&SlowData,  (void *)LVM_NULL);
-    InstAlloc_Init(&FastData,  (void *)LVM_NULL);
-    InstAlloc_Init(&FastCoef,  (void *)LVM_NULL);
-    InstAlloc_Init(&Temporary, (void *)LVM_NULL);
+    InstAlloc_Init(&SlowData, (void*)LVM_NULL);
+    InstAlloc_Init(&FastData, (void*)LVM_NULL);
+    InstAlloc_Init(&FastCoef, (void*)LVM_NULL);
+    InstAlloc_Init(&Temporary, (void*)LVM_NULL);
 
     /*
      * Fill in the memory table
      */
-    if (hInstance == LVM_NULL)
-    {
+    if (hInstance == LVM_NULL) {
         /*
          * Check for null pointers
          */
-        if (pInstanceParams == LVM_NULL)
-        {
-            return(LVREV_NULLADDRESS);
+        if (pInstanceParams == LVM_NULL) {
+            return (LVREV_NULLADDRESS);
         }
 
         /*
          * Select the maximum internal block size
          */
-        if(pInstanceParams->NumDelays ==LVREV_DELAYLINES_4)
-        {
+        if (pInstanceParams->NumDelays == LVREV_DELAYLINES_4) {
             MaxBlockSize = LVREV_MAX_AP3_DELAY;
-        }
-        else if(pInstanceParams->NumDelays ==LVREV_DELAYLINES_2)
-        {
+        } else if (pInstanceParams->NumDelays == LVREV_DELAYLINES_2) {
             MaxBlockSize = LVREV_MAX_AP1_DELAY;
-        }
-        else
-        {
+        } else {
             MaxBlockSize = LVREV_MAX_AP0_DELAY;
         }
 
-        if(MaxBlockSize>pInstanceParams->MaxBlockSize)
-        {
-            MaxBlockSize=pInstanceParams->MaxBlockSize;
+        if (MaxBlockSize > pInstanceParams->MaxBlockSize) {
+            MaxBlockSize = pInstanceParams->MaxBlockSize;
         }
 
         /*
          * Slow data memory
          */
         InstAlloc_AddMember(&SlowData, sizeof(LVREV_Instance_st));
-        pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size         = InstAlloc_GetTotal(&SlowData);
-        pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Type         = LVM_PERSISTENT_SLOW_DATA;
+        pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size = InstAlloc_GetTotal(&SlowData);
+        pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Type = LVM_PERSISTENT_SLOW_DATA;
         pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress = LVM_NULL;
 
         /*
          * Persistent fast data memory
          */
         InstAlloc_AddMember(&FastData, sizeof(LVREV_FastData_st));
-        if(pInstanceParams->NumDelays == LVREV_DELAYLINES_4)
-        {
+        if (pInstanceParams->NumDelays == LVREV_DELAYLINES_4) {
             InstAlloc_AddMember(&FastData, LVREV_MAX_T3_DELAY * sizeof(LVM_FLOAT));
             InstAlloc_AddMember(&FastData, LVREV_MAX_T2_DELAY * sizeof(LVM_FLOAT));
             InstAlloc_AddMember(&FastData, LVREV_MAX_T1_DELAY * sizeof(LVM_FLOAT));
             InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
         }
 
-        if(pInstanceParams->NumDelays == LVREV_DELAYLINES_2)
-        {
+        if (pInstanceParams->NumDelays == LVREV_DELAYLINES_2) {
             InstAlloc_AddMember(&FastData, LVREV_MAX_T1_DELAY * sizeof(LVM_FLOAT));
             InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
         }
 
-        if(pInstanceParams->NumDelays == LVREV_DELAYLINES_1)
-        {
+        if (pInstanceParams->NumDelays == LVREV_DELAYLINES_1) {
             InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
         }
 
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size         = InstAlloc_GetTotal(&FastData);
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Type         = LVM_PERSISTENT_FAST_DATA;
+        pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size = InstAlloc_GetTotal(&FastData);
+        pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Type = LVM_PERSISTENT_FAST_DATA;
         pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress = LVM_NULL;
 
         /*
          * Persistent fast coefficient memory
          */
         InstAlloc_AddMember(&FastCoef, sizeof(LVREV_FastCoef_st));
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size         = InstAlloc_GetTotal(&FastCoef);
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Type         = LVM_PERSISTENT_FAST_COEF;
+        pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size = InstAlloc_GetTotal(&FastCoef);
+        pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Type = LVM_PERSISTENT_FAST_COEF;
         pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress = LVM_NULL;
 
         /*
@@ -192,41 +175,33 @@
         InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
         /* Mono->stereo input saved for end mix */
         InstAlloc_AddMember(&Temporary, 2 * sizeof(LVM_FLOAT) * MaxBlockSize);
-        if(pInstanceParams->NumDelays == LVREV_DELAYLINES_4)
-        {
-            for(i=0; i<4; i++)
-            {
+        if (pInstanceParams->NumDelays == LVREV_DELAYLINES_4) {
+            for (i = 0; i < 4; i++) {
                 /* A Scratch buffer for each delay line */
                 InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
             }
         }
 
-        if(pInstanceParams->NumDelays == LVREV_DELAYLINES_2)
-        {
-            for(i=0; i<2; i++)
-            {
+        if (pInstanceParams->NumDelays == LVREV_DELAYLINES_2) {
+            for (i = 0; i < 2; i++) {
                 /* A Scratch buffer for each delay line */
                 InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
             }
         }
 
-        if(pInstanceParams->NumDelays == LVREV_DELAYLINES_1)
-        {
-            for(i=0; i<1; i++)
-            {
+        if (pInstanceParams->NumDelays == LVREV_DELAYLINES_1) {
+            for (i = 0; i < 1; i++) {
                 /* A Scratch buffer for each delay line */
                 InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
             }
         }
 
-        pMemoryTable->Region[LVM_TEMPORARY_FAST].Size         = InstAlloc_GetTotal(&Temporary);
-        pMemoryTable->Region[LVM_TEMPORARY_FAST].Type         = LVM_TEMPORARY_FAST;
+        pMemoryTable->Region[LVM_TEMPORARY_FAST].Size = InstAlloc_GetTotal(&Temporary);
+        pMemoryTable->Region[LVM_TEMPORARY_FAST].Type = LVM_TEMPORARY_FAST;
         pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress = LVM_NULL;
 
-    }
-    else
-    {
-        LVREV_Instance_st   *pLVREV_Private = (LVREV_Instance_st *)hInstance;
+    } else {
+        LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
 
         /*
          * Read back memory allocation table
@@ -234,7 +209,7 @@
         *pMemoryTable = pLVREV_Private->MemoryTable;
     }
 
-    return(LVREV_SUCCESS);
+    return (LVREV_SUCCESS);
 }
 
 /* End of file */
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h b/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h
index 2c27c6e..b6edb03 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h
@@ -37,63 +37,63 @@
 /*                                                                                      */
 /****************************************************************************************/
 /* General */
-#define ONE_OVER_SQRT_TWO            0.707107f           /* 1/sqrt(2) * 2^15 */
-#define LVREV_B_8_on_1000               0.008f           /* 0.8 * 2^31 */
-#define LVREV_HEADROOM                   0.25f           /* -12dB * 2^15 */
-#define LVREV_2_9_INQ29                   2.9f           /* 2.9 in Q29 format */
-#define LVREV_MIN3DB                0.7079457f           /* -3dB in Q15 format */
+#define ONE_OVER_SQRT_TWO 0.707107f /* 1/sqrt(2) * 2^15 */
+#define LVREV_B_8_on_1000 0.008f    /* 0.8 * 2^31 */
+#define LVREV_HEADROOM 0.25f        /* -12dB * 2^15 */
+#define LVREV_2_9_INQ29 2.9f        /* 2.9 in Q29 format */
+#define LVREV_MIN3DB 0.7079457f     /* -3dB in Q15 format */
 
 /* Intenal constants */
-#define LVREV_LP_Poly_Order                 4
-#define LVREV_LP_Poly_Shift                 5
+#define LVREV_LP_Poly_Order 4
+#define LVREV_LP_Poly_Shift 5
 
-#define LVREV_T60_SCALE                0.000142f           /*(1/7000) */
+#define LVREV_T60_SCALE 0.000142f /*(1/7000) */
 
-#define LVREV_T_3_Power_0_on_4              1.0f
-#define LVREV_T_3_Power_1_on_4         1.316074f
-#define LVREV_T_3_Power_2_on_4         1.732051f
-#define LVREV_T_3_Power_3_on_4         2.279507f
-#define LVREV_T_3_Power_minus0_on_4         1.0f        /* 3^(-0/4) * 2^15 */
-#define LVREV_T_3_Power_minus1_on_4    0.759836f        /* 3^(-1/4) * 2^15 */
-#define LVREV_T_3_Power_minus2_on_4    0.577350f        /* 3^(-2/4) * 2^15 */
-#define LVREV_T_3_Power_minus3_on_4    0.438691f        /* 3^(-3/4) * 2^15 */
+#define LVREV_T_3_Power_0_on_4 1.0f
+#define LVREV_T_3_Power_1_on_4 1.316074f
+#define LVREV_T_3_Power_2_on_4 1.732051f
+#define LVREV_T_3_Power_3_on_4 2.279507f
+#define LVREV_T_3_Power_minus0_on_4 1.0f      /* 3^(-0/4) * 2^15 */
+#define LVREV_T_3_Power_minus1_on_4 0.759836f /* 3^(-1/4) * 2^15 */
+#define LVREV_T_3_Power_minus2_on_4 0.577350f /* 3^(-2/4) * 2^15 */
+#define LVREV_T_3_Power_minus3_on_4 0.438691f /* 3^(-3/4) * 2^15 */
 
-    /* ((192000 * 120 * LVREV_T_3_Power_minus3_on_4) >> 15) / 1000 */
-#define LVREV_MAX_T3_DELAY               10108
-    /* ((192000 * 120 * LVREV_T_3_Power_minus2_on_4) >> 15) / 1000 */
-#define LVREV_MAX_T2_DELAY               13304
-    /* ((192000 * 120 * LVREV_T_3_Power_minus1_on_4) >> 15) / 1000 */
-#define LVREV_MAX_T1_DELAY               17508
-    /* ((192000 * 120 * LVREV_T_3_Power_minus0_on_4) >> 15) / 1000 */
-#define LVREV_MAX_T0_DELAY               23040
-    /* ((192000 * 120 * LVREV_T_3_Power_minus3_on_4) >> 15) / 1500 */
-#define LVREV_MAX_AP3_DELAY               6740
-    /* ((192000 * 120 * LVREV_T_3_Power_minus2_on_4) >> 15) / 1500 */
-#define LVREV_MAX_AP2_DELAY               8872
-    /* ((192000 * 120 * LVREV_T_3_Power_minus1_on_4) >> 15) / 1500 */
-#define LVREV_MAX_AP1_DELAY              11672
-    /* ((192000 * 120 * LVREV_T_3_Power_minus0_on_4) >> 15) / 1500 */
-#define LVREV_MAX_AP0_DELAY              15360
+/* ((192000 * 120 * LVREV_T_3_Power_minus3_on_4) >> 15) / 1000 */
+#define LVREV_MAX_T3_DELAY 10108
+/* ((192000 * 120 * LVREV_T_3_Power_minus2_on_4) >> 15) / 1000 */
+#define LVREV_MAX_T2_DELAY 13304
+/* ((192000 * 120 * LVREV_T_3_Power_minus1_on_4) >> 15) / 1000 */
+#define LVREV_MAX_T1_DELAY 17508
+/* ((192000 * 120 * LVREV_T_3_Power_minus0_on_4) >> 15) / 1000 */
+#define LVREV_MAX_T0_DELAY 23040
+/* ((192000 * 120 * LVREV_T_3_Power_minus3_on_4) >> 15) / 1500 */
+#define LVREV_MAX_AP3_DELAY 6740
+/* ((192000 * 120 * LVREV_T_3_Power_minus2_on_4) >> 15) / 1500 */
+#define LVREV_MAX_AP2_DELAY 8872
+/* ((192000 * 120 * LVREV_T_3_Power_minus1_on_4) >> 15) / 1500 */
+#define LVREV_MAX_AP1_DELAY 11672
+/* ((192000 * 120 * LVREV_T_3_Power_minus0_on_4) >> 15) / 1500 */
+#define LVREV_MAX_AP0_DELAY 15360
 
-#define LVREV_BYPASSMIXER_TC             1000           /* Bypass mixer time constant*/
-#define LVREV_ALLPASS_TC                 1000           /* All-pass filter time constant */
-#define LVREV_ALLPASS_TAP_TC             10000           /* All-pass filter dely tap change */
-#define LVREV_FEEDBACKMIXER_TC            100           /* Feedback mixer time constant*/
-#define LVREV_OUTPUTGAIN_SHIFT              5           /* Bits shift for output gain correction */
+#define LVREV_BYPASSMIXER_TC 1000  /* Bypass mixer time constant*/
+#define LVREV_ALLPASS_TC 1000      /* All-pass filter time constant */
+#define LVREV_ALLPASS_TAP_TC 10000 /* All-pass filter dely tap change */
+#define LVREV_FEEDBACKMIXER_TC 100 /* Feedback mixer time constant*/
+#define LVREV_OUTPUTGAIN_SHIFT 5   /* Bits shift for output gain correction */
 
 /* Parameter limits */
-#define LVREV_NUM_FS                       13           /* Number of supported sample rates */
+#define LVREV_NUM_FS 13 /* Number of supported sample rates */
 
-#define LVREV_MAXBLKSIZE_LIMIT             64           /* Maximum block size low limit */
-#define LVREV_MAX_LEVEL                   100           /* Maximum level, 100% */
-#define LVREV_MIN_LPF_CORNER               50           /* Low pass filter limits */
-#define LVREV_MAX_LPF_CORNER            23999
-#define LVREV_MIN_HPF_CORNER               20           /* High pass filrer limits */
-#define LVREV_MAX_HPF_CORNER             1000
-#define LVREV_MAX_T60                    7000           /* Maximum T60 time in ms */
-#define LVREV_MAX_DENSITY                 100           /* Maximum density, 100% */
-#define LVREV_MAX_DAMPING                 100           /* Maximum damping, 100% */
-#define LVREV_MAX_ROOMSIZE                100           /* Maximum room size, 100% */
+#define LVREV_MAXBLKSIZE_LIMIT 64 /* Maximum block size low limit */
+#define LVREV_MAX_LEVEL 100       /* Maximum level, 100% */
+#define LVREV_MIN_LPF_CORNER 50   /* Low pass filter limits */
+#define LVREV_MAX_LPF_CORNER 23999
+#define LVREV_MIN_HPF_CORNER 20 /* High pass filrer limits */
+#define LVREV_MAX_HPF_CORNER 1000
+#define LVREV_MAX_T60 7000     /* Maximum T60 time in ms */
+#define LVREV_MAX_DENSITY 100  /* Maximum density, 100% */
+#define LVREV_MAX_DAMPING 100  /* Maximum damping, 100% */
+#define LVREV_MAX_ROOMSIZE 100 /* Maximum room size, 100% */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -102,72 +102,68 @@
 /****************************************************************************************/
 
 /* Fast data structure */
-typedef struct
-{
-    Biquad_1I_Order1_FLOAT_Taps_t HPTaps;                     /* High pass filter taps */
-    Biquad_1I_Order1_FLOAT_Taps_t LPTaps;                     /* Low pass filter taps */
-    Biquad_1I_Order1_FLOAT_Taps_t RevLPTaps[4];               /* Reverb low pass filters taps */
+typedef struct {
+    Biquad_1I_Order1_FLOAT_Taps_t HPTaps;       /* High pass filter taps */
+    Biquad_1I_Order1_FLOAT_Taps_t LPTaps;       /* Low pass filter taps */
+    Biquad_1I_Order1_FLOAT_Taps_t RevLPTaps[4]; /* Reverb low pass filters taps */
 
 } LVREV_FastData_st;
 
 /* Fast coefficient structure */
-typedef struct
-{
-
-    Biquad_FLOAT_Instance_t       HPCoefs;              /* High pass filter coefficients */
-    Biquad_FLOAT_Instance_t       LPCoefs;              /* Low pass filter coefficients */
-    Biquad_FLOAT_Instance_t       RevLPCoefs[4];        /* Reverb low pass filters coefficients */
+typedef struct {
+    Biquad_FLOAT_Instance_t HPCoefs;       /* High pass filter coefficients */
+    Biquad_FLOAT_Instance_t LPCoefs;       /* Low pass filter coefficients */
+    Biquad_FLOAT_Instance_t RevLPCoefs[4]; /* Reverb low pass filters coefficients */
 
 } LVREV_FastCoef_st;
-typedef struct
-{
+typedef struct {
     /* General */
-    LVREV_InstanceParams_st InstanceParams;           /* Initialisation time instance parameters */
-    LVREV_MemoryTable_st    MemoryTable;              /* Memory table */
-    LVREV_ControlParams_st  CurrentParams;            /* Parameters being used */
-    LVREV_ControlParams_st  NewParams;                /* New parameters from the \
-                                                         calling application */
-    LVM_CHAR                bControlPending;          /* Flag to indicate new parameters \
-                                                         are available */
-    LVM_CHAR                bFirstControl;            /* Flag to indicate that the control \
-                                                         function is called for the first time */
-    LVM_CHAR                bDisableReverb;           /* Flag to indicate that the mix level is
-                                                         0% and the reverb can be disabled */
-    LVM_INT32               RoomSizeInms;             /* Room size in msec */
-    LVM_INT32               MaxBlkLen;                /* Maximum block size for internal
-                                                         processing */
+    LVREV_InstanceParams_st InstanceParams; /* Initialisation time instance parameters */
+    LVREV_MemoryTable_st MemoryTable;       /* Memory table */
+    LVREV_ControlParams_st CurrentParams;   /* Parameters being used */
+    LVREV_ControlParams_st NewParams;       /* New parameters from the \
+                                               calling application */
+    LVM_CHAR bControlPending;               /* Flag to indicate new parameters \
+                                               are available */
+    LVM_CHAR bFirstControl;                 /* Flag to indicate that the control \
+                                               function is called for the first time */
+    LVM_CHAR bDisableReverb;                /* Flag to indicate that the mix level is
+                                               0% and the reverb can be disabled */
+    LVM_INT32 RoomSizeInms;                 /* Room size in msec */
+    LVM_INT32 MaxBlkLen;                    /* Maximum block size for internal
+                                               processing */
 
     /* Aligned memory pointers */
-    LVREV_FastData_st       *pFastData;               /* Fast data memory base address */
-    LVREV_FastCoef_st       *pFastCoef;               /* Fast coefficient memory base address */
-    LVM_FLOAT               *pScratchDelayLine[4];    /* Delay line scratch memory */
-    LVM_FLOAT               *pScratch;                /* Multi ussge scratch */
-    LVM_FLOAT               *pInputSave;              /* Reverb block input save for dry/wet
-                                                         mixing*/
+    LVREV_FastData_st* pFastData;    /* Fast data memory base address */
+    LVREV_FastCoef_st* pFastCoef;    /* Fast coefficient memory base address */
+    LVM_FLOAT* pScratchDelayLine[4]; /* Delay line scratch memory */
+    LVM_FLOAT* pScratch;             /* Multi ussge scratch */
+    LVM_FLOAT* pInputSave;           /* Reverb block input save for dry/wet
+                                        mixing*/
 
     /* Feedback matrix */
-    Mix_1St_Cll_FLOAT_t     FeedbackMixer[4];         /* Mixer for Pop and Click Supression \
-                                                         caused by feedback Gain */
+    Mix_1St_Cll_FLOAT_t FeedbackMixer[4]; /* Mixer for Pop and Click Suppression \
+                                             caused by feedback Gain */
 
     /* All-Pass Filter */
-    LVM_INT32               T[4];                     /* Maximum delay size of buffer */
-    LVM_FLOAT               *pDelay_T[4];             /* Pointer to delay buffers */
-    LVM_INT32               Delay_AP[4];              /* Offset to AP delay buffer start */
-    LVM_INT16               AB_Selection;             /* Smooth from tap A to B when 1 \
-                                                         otherwise B to A */
-    LVM_INT32               A_DelaySize[4];           /* A delay length in samples */
-    LVM_INT32               B_DelaySize[4];           /* B delay length in samples */
-    LVM_FLOAT               *pOffsetA[4];             /* Offset for the A delay tap */
-    LVM_FLOAT               *pOffsetB[4];             /* Offset for the B delay tap */
-    Mix_2St_Cll_FLOAT_t     Mixer_APTaps[4];          /* Smoothed AP delay mixer */
-    Mix_1St_Cll_FLOAT_t     Mixer_SGFeedback[4];      /* Smoothed SAfeedback gain */
-    Mix_1St_Cll_FLOAT_t     Mixer_SGFeedforward[4];   /* Smoothed AP feedforward gain */
+    LVM_INT32 T[4];                             /* Maximum delay size of buffer */
+    LVM_FLOAT* pDelay_T[4];                     /* Pointer to delay buffers */
+    LVM_INT32 Delay_AP[4];                      /* Offset to AP delay buffer start */
+    LVM_INT16 AB_Selection;                     /* Smooth from tap A to B when 1 \
+                                                   otherwise B to A */
+    LVM_INT32 A_DelaySize[4];                   /* A delay length in samples */
+    LVM_INT32 B_DelaySize[4];                   /* B delay length in samples */
+    LVM_FLOAT* pOffsetA[4];                     /* Offset for the A delay tap */
+    LVM_FLOAT* pOffsetB[4];                     /* Offset for the B delay tap */
+    Mix_2St_Cll_FLOAT_t Mixer_APTaps[4];        /* Smoothed AP delay mixer */
+    Mix_1St_Cll_FLOAT_t Mixer_SGFeedback[4];    /* Smoothed SAfeedback gain */
+    Mix_1St_Cll_FLOAT_t Mixer_SGFeedforward[4]; /* Smoothed AP feedforward gain */
 
     /* Output gain */
-    Mix_2St_Cll_FLOAT_t     BypassMixer;              /* Dry/wet mixer */
-    LVM_FLOAT               Gain;                     /* Gain applied to output to maintain
-                                                         average signal power */
-    Mix_1St_Cll_FLOAT_t     GainMixer;                /* Gain smoothing */
+    Mix_2St_Cll_FLOAT_t BypassMixer; /* Dry/wet mixer */
+    LVM_FLOAT Gain;                  /* Gain applied to output to maintain
+                                        average signal power */
+    Mix_1St_Cll_FLOAT_t GainMixer;   /* Gain smoothing */
 
 } LVREV_Instance_st;
 
@@ -177,15 +173,12 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVREV_ReturnStatus_en   LVREV_ApplyNewSettings(LVREV_Instance_st     *pPrivate);
-void                    ReverbBlock(LVM_FLOAT           *pInput,
-                                    LVM_FLOAT           *pOutput,
-                                    LVREV_Instance_st   *pPrivate,
-                                    LVM_UINT16          NumSamples);
-LVM_INT32               BypassMixer_Callback(void       *pCallbackData,
-                                             void       *pGeneralPurpose,
-                                             LVM_INT16  GeneralPurpose );
+LVREV_ReturnStatus_en LVREV_ApplyNewSettings(LVREV_Instance_st* pPrivate);
+void ReverbBlock(LVM_FLOAT* pInput, LVM_FLOAT* pOutput, LVREV_Instance_st* pPrivate,
+                 LVM_UINT16 NumSamples);
+LVM_INT32 BypassMixer_Callback(void* pCallbackData, void* pGeneralPurpose,
+                               LVM_INT16 GeneralPurpose);
 
-#endif  /** __LVREV_PRIVATE_H__ **/
+#endif /** __LVREV_PRIVATE_H__ **/
 
 /* End of file */
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_Process.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_Process.cpp
index 35f9ad8..ed3b89c 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_Process.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_Process.cpp
@@ -45,43 +45,37 @@
 /*  1. The input and output buffers must be 32-bit aligned                              */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_Process(LVREV_Handle_t      hInstance,
-                                    const LVM_FLOAT     *pInData,
-                                    LVM_FLOAT           *pOutData,
-                                    const LVM_UINT16    NumSamples)
-{
-   LVREV_Instance_st     *pLVREV_Private = (LVREV_Instance_st *)hInstance;
-   LVM_FLOAT             *pInput  = (LVM_FLOAT *)pInData;
-   LVM_FLOAT             *pOutput = pOutData;
-   LVM_INT32             SamplesToProcess, RemainingSamples;
-   LVM_INT32             format = 1;
+LVREV_ReturnStatus_en LVREV_Process(LVREV_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                    LVM_FLOAT* pOutData, const LVM_UINT16 NumSamples) {
+    LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
+    LVM_FLOAT* pInput = (LVM_FLOAT*)pInData;
+    LVM_FLOAT* pOutput = pOutData;
+    LVM_INT32 SamplesToProcess, RemainingSamples;
+    LVM_INT32 format = 1;
 
     /*
      * Check for error conditions
      */
 
     /* Check for NULL pointers */
-    if((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL)) {
         return LVREV_NULLADDRESS;
     }
 
     /*
      * Apply the new controls settings if required
      */
-    if(pLVREV_Private->bControlPending == LVM_TRUE)
-    {
-        LVREV_ReturnStatus_en   errorCode;
+    if (pLVREV_Private->bControlPending == LVM_TRUE) {
+        LVREV_ReturnStatus_en errorCode;
 
         /*
          * Clear the pending flag and update the control settings
          */
         pLVREV_Private->bControlPending = LVM_FALSE;
 
-        errorCode = LVREV_ApplyNewSettings (pLVREV_Private);
+        errorCode = LVREV_ApplyNewSettings(pLVREV_Private);
 
-        if(errorCode != LVREV_SUCCESS)
-        {
+        if (errorCode != LVREV_SUCCESS) {
             return errorCode;
         }
     }
@@ -89,27 +83,23 @@
     /*
      * Trap the case where the number of samples is zero.
      */
-    if (NumSamples == 0)
-    {
+    if (NumSamples == 0) {
         return LVREV_SUCCESS;
     }
 
     /*
      * If OFF copy and reformat the data as necessary
      */
-    if (pLVREV_Private->CurrentParams.OperatingMode == LVM_MODE_OFF)
-    {
-        if(pInput != pOutput)
-        {
+    if (pLVREV_Private->CurrentParams.OperatingMode == LVM_MODE_OFF) {
+        if (pInput != pOutput) {
             /*
              * Copy the data to the output buffer, convert to stereo is required
              */
-            if(pLVREV_Private->CurrentParams.SourceFormat == LVM_MONO){
+            if (pLVREV_Private->CurrentParams.SourceFormat == LVM_MONO) {
                 MonoTo2I_Float(pInput, pOutput, NumSamples);
             } else {
-                Copy_Float(pInput,
-                           pOutput,
-                           (LVM_INT16)(NumSamples << 1)); // 32 bit data, stereo
+                Copy_Float(pInput, pOutput,
+                           (LVM_INT16)(NumSamples << 1));  // 32 bit data, stereo
             }
         }
 
@@ -118,31 +108,26 @@
 
     RemainingSamples = (LVM_INT32)NumSamples;
 
-    if (pLVREV_Private->CurrentParams.SourceFormat != LVM_MONO)
-    {
+    if (pLVREV_Private->CurrentParams.SourceFormat != LVM_MONO) {
         format = 2;
     }
 
-    while (RemainingSamples!=0)
-    {
+    while (RemainingSamples != 0) {
         /*
          * Process the data
          */
 
-        if(RemainingSamples >  pLVREV_Private->MaxBlkLen)
-        {
-            SamplesToProcess =  pLVREV_Private->MaxBlkLen;
+        if (RemainingSamples > pLVREV_Private->MaxBlkLen) {
+            SamplesToProcess = pLVREV_Private->MaxBlkLen;
             RemainingSamples = (LVM_INT16)(RemainingSamples - SamplesToProcess);
-        }
-        else
-        {
+        } else {
             SamplesToProcess = RemainingSamples;
             RemainingSamples = 0;
         }
 
         ReverbBlock(pInput, pOutput, pLVREV_Private, (LVM_UINT16)SamplesToProcess);
-        pInput  = (LVM_FLOAT *)(pInput + (SamplesToProcess * format));
-        pOutput = (LVM_FLOAT *)(pOutput + (SamplesToProcess * 2));      // Always stereo output
+        pInput = (LVM_FLOAT*)(pInput + (SamplesToProcess * format));
+        pOutput = (LVM_FLOAT*)(pOutput + (SamplesToProcess * 2));  // Always stereo output
     }
 
     return LVREV_SUCCESS;
@@ -170,16 +155,15 @@
 /*  1. The input and output buffers must be 32-bit aligned                              */
 /*                                                                                      */
 /****************************************************************************************/
-void ReverbBlock(LVM_FLOAT *pInput, LVM_FLOAT *pOutput,
-                 LVREV_Instance_st *pPrivate, LVM_UINT16 NumSamples)
-{
-    LVM_INT16   j, size;
-    LVM_FLOAT   *pDelayLine;
-    LVM_FLOAT   *pDelayLineInput = pPrivate->pScratch;
-    LVM_FLOAT   *pScratch = pPrivate->pScratch;
-    LVM_FLOAT   *pIn;
-    LVM_FLOAT   *pTemp = pPrivate->pInputSave;
-    LVM_INT32   NumberOfDelayLines;
+void ReverbBlock(LVM_FLOAT* pInput, LVM_FLOAT* pOutput, LVREV_Instance_st* pPrivate,
+                 LVM_UINT16 NumSamples) {
+    LVM_INT16 j, size;
+    LVM_FLOAT* pDelayLine;
+    LVM_FLOAT* pDelayLineInput = pPrivate->pScratch;
+    LVM_FLOAT* pScratch = pPrivate->pScratch;
+    LVM_FLOAT* pIn;
+    LVM_FLOAT* pTemp = pPrivate->pInputSave;
+    LVM_INT32 NumberOfDelayLines;
 
     /******************************************************************************
      * All calculations will go into the buffer pointed to by pTemp, this will    *
@@ -196,85 +180,60 @@
      * and the final output is converted to STEREO after the mixer                *
      ******************************************************************************/
 
-    if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4)
-    {
+    if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4) {
         NumberOfDelayLines = 4;
-    }
-    else if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2)
-    {
+    } else if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2) {
         NumberOfDelayLines = 2;
-    }
-    else
-    {
+    } else {
         NumberOfDelayLines = 1;
     }
 
-    if(pPrivate->CurrentParams.SourceFormat == LVM_MONO)
-    {
+    if (pPrivate->CurrentParams.SourceFormat == LVM_MONO) {
         pIn = pInput;
-    }
-    else
-    {
+    } else {
         /*
          *  Stereo to mono conversion
          */
 
-        From2iToMono_Float(pInput,
-                           pTemp,
-                           (LVM_INT16)NumSamples);
+        From2iToMono_Float(pInput, pTemp, (LVM_INT16)NumSamples);
         pIn = pTemp;
     }
 
-    Mult3s_Float(pIn,
-                 (LVM_FLOAT)LVREV_HEADROOM,
-                 pTemp,
-                 (LVM_INT16)NumSamples);
+    Mult3s_Float(pIn, (LVM_FLOAT)LVREV_HEADROOM, pTemp, (LVM_INT16)NumSamples);
 
     /*
      *  High pass filter
      */
-    FO_1I_D32F32C31_TRC_WRA_01(&pPrivate->pFastCoef->HPCoefs,
-                               pTemp,
-                               pTemp,
-                               (LVM_INT16)NumSamples);
+    FO_1I_D32F32C31_TRC_WRA_01(&pPrivate->pFastCoef->HPCoefs, pTemp, pTemp, (LVM_INT16)NumSamples);
     /*
      *  Low pass filter
      */
-    FO_1I_D32F32C31_TRC_WRA_01(&pPrivate->pFastCoef->LPCoefs,
-                               pTemp,
-                               pTemp,
-                               (LVM_INT16)NumSamples);
+    FO_1I_D32F32C31_TRC_WRA_01(&pPrivate->pFastCoef->LPCoefs, pTemp, pTemp, (LVM_INT16)NumSamples);
 
     /*
      *  Process all delay lines
      */
 
-    for(j = 0; j < NumberOfDelayLines; j++)
-    {
+    for (j = 0; j < NumberOfDelayLines; j++) {
         pDelayLine = pPrivate->pScratchDelayLine[j];
 
         /*
          * All-pass filter with pop and click suppression
          */
         /* Get the smoothed, delayed output. Put it in the output buffer */
-        MixSoft_2St_D32C31_SAT(&pPrivate->Mixer_APTaps[j],
-                               pPrivate->pOffsetA[j],
-                               pPrivate->pOffsetB[j],
-                               pDelayLine,
-                               (LVM_INT16)NumSamples);
+        MixSoft_2St_D32C31_SAT(&pPrivate->Mixer_APTaps[j], pPrivate->pOffsetA[j],
+                               pPrivate->pOffsetB[j], pDelayLine, (LVM_INT16)NumSamples);
         /* Re-align the all pass filter delay buffer and copying the fixed delay data \
            to the AP delay in the process */
-        Copy_Float(&pPrivate->pDelay_T[j][NumSamples],
-                   pPrivate->pDelay_T[j],
-                   (LVM_INT16)(pPrivate->T[j] - NumSamples));         /* 32-bit data */
+        Copy_Float(&pPrivate->pDelay_T[j][NumSamples], pPrivate->pDelay_T[j],
+                   (LVM_INT16)(pPrivate->T[j] - NumSamples)); /* 32-bit data */
         /* Apply the smoothed feedback and save to fixed delay input (currently empty) */
-        MixSoft_1St_D32C31_WRA(&pPrivate->Mixer_SGFeedback[j],
-                               pDelayLine,
+        MixSoft_1St_D32C31_WRA(&pPrivate->Mixer_SGFeedback[j], pDelayLine,
                                &pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
                                (LVM_INT16)NumSamples);
         /* Sum into the AP delay line */
         Mac3s_Sat_Float(&pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
-                        -1.0f,    /* Invert since the feedback coefficient is negative */
+                        -1.0f, /* Invert since the feedback coefficient is negative */
                         &pPrivate->pDelay_T[j][pPrivate->Delay_AP[j] - NumSamples],
                         (LVM_INT16)NumSamples);
         /* Apply smoothed feedforward sand save to fixed delay input (currently empty) */
@@ -283,9 +242,7 @@
                                &pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
                                (LVM_INT16)NumSamples);
         /* Sum into the AP output */
-        Mac3s_Sat_Float(&pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
-                        1.0f,
-                        pDelayLine,
+        Mac3s_Sat_Float(&pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples], 1.0f, pDelayLine,
                         (LVM_INT16)NumSamples);
 
         /*
@@ -296,34 +253,27 @@
         /*
          *  Low pass filter
          */
-        FO_1I_D32F32C31_TRC_WRA_01(&pPrivate->pFastCoef->RevLPCoefs[j],
-                                   pDelayLine,
-                                   pDelayLine,
+        FO_1I_D32F32C31_TRC_WRA_01(&pPrivate->pFastCoef->RevLPCoefs[j], pDelayLine, pDelayLine,
                                    (LVM_INT16)NumSamples);
     }
 
     /*
      *  Apply rotation matrix and delay samples
      */
-    for(j = 0; j < NumberOfDelayLines; j++)
-    {
-
-        Copy_Float(pTemp,
-                   pDelayLineInput,
-                   (LVM_INT16)(NumSamples));
+    for (j = 0; j < NumberOfDelayLines; j++) {
+        Copy_Float(pTemp, pDelayLineInput, (LVM_INT16)(NumSamples));
         /*
          *  Rotation matrix mix
          */
-        switch(j)
-        {
+        switch (j) {
             case 3:
                 /*
                  *  Add delay line 1 and 2 contribution
                  */
-                 Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f,
-                                 pDelayLineInput, (LVM_INT16)NumSamples);
-                 Mac3s_Sat_Float(pPrivate->pScratchDelayLine[2], -1.0f,
-                                 pDelayLineInput, (LVM_INT16)NumSamples);
+                Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f, pDelayLineInput,
+                                (LVM_INT16)NumSamples);
+                Mac3s_Sat_Float(pPrivate->pScratchDelayLine[2], -1.0f, pDelayLineInput,
+                                (LVM_INT16)NumSamples);
 
                 break;
             case 2:
@@ -331,61 +281,52 @@
                 /*
                  *  Add delay line 0 and 3 contribution
                  */
-                 Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f,
-                                 pDelayLineInput, (LVM_INT16)NumSamples);
-                 Mac3s_Sat_Float(pPrivate->pScratchDelayLine[3], -1.0f,
-                                 pDelayLineInput, (LVM_INT16)NumSamples);
+                Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f, pDelayLineInput,
+                                (LVM_INT16)NumSamples);
+                Mac3s_Sat_Float(pPrivate->pScratchDelayLine[3], -1.0f, pDelayLineInput,
+                                (LVM_INT16)NumSamples);
 
                 break;
             case 1:
-                if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4)
-                {
+                if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4) {
                     /*
                      *  Add delay line 0 and 3 contribution
                      */
-                    Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f,
-                                    pDelayLineInput, (LVM_INT16)NumSamples);
+                    Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f, pDelayLineInput,
+                                    (LVM_INT16)NumSamples);
                     Add2_Sat_Float(pPrivate->pScratchDelayLine[3], pDelayLineInput,
                                    (LVM_INT16)NumSamples);
 
-                }
-                else
-                {
+                } else {
                     /*
                      *  Add delay line 0 and 1 contribution
                      */
-                     Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f,
-                                     pDelayLineInput, (LVM_INT16)NumSamples);
-                     Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f,
-                                     pDelayLineInput, (LVM_INT16)NumSamples);
-
+                    Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f, pDelayLineInput,
+                                    (LVM_INT16)NumSamples);
+                    Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f, pDelayLineInput,
+                                    (LVM_INT16)NumSamples);
                 }
                 break;
             case 0:
-                if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4)
-                {
+                if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4) {
                     /*
                      *  Add delay line 1 and 2 contribution
                      */
-                    Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f,
-                                    pDelayLineInput, (LVM_INT16)NumSamples);
+                    Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f, pDelayLineInput,
+                                    (LVM_INT16)NumSamples);
                     Add2_Sat_Float(pPrivate->pScratchDelayLine[2], pDelayLineInput,
                                    (LVM_INT16)NumSamples);
 
-                }
-                else if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2)
-                {
+                } else if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2) {
                     /*
                      *  Add delay line 0 and 1 contribution
                      */
                     Add2_Sat_Float(pPrivate->pScratchDelayLine[0], pDelayLineInput,
                                    (LVM_INT16)NumSamples);
-                    Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f,
-                                    pDelayLineInput, (LVM_INT16)NumSamples);
+                    Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f, pDelayLineInput,
+                                    (LVM_INT16)NumSamples);
 
-                }
-                else
-                {
+                } else {
                     /*
                      *  Add delay line 0 contribution
                      */
@@ -402,54 +343,37 @@
         /*
          *  Delay samples
          */
-        Copy_Float(pDelayLineInput,
-                   &pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
-                   (LVM_INT16)(NumSamples));              /* 32-bit data */
+        Copy_Float(pDelayLineInput, &pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
+                   (LVM_INT16)(NumSamples)); /* 32-bit data */
     }
 
     /*
      *  Create stereo output
      */
-    switch(pPrivate->InstanceParams.NumDelays)
-    {
+    switch (pPrivate->InstanceParams.NumDelays) {
         case LVREV_DELAYLINES_4:
-             Add2_Sat_Float(pPrivate->pScratchDelayLine[3],
-                            pPrivate->pScratchDelayLine[0],
-                            (LVM_INT16)NumSamples);
-             Add2_Sat_Float(pPrivate->pScratchDelayLine[2],
-                            pPrivate->pScratchDelayLine[1],
-                            (LVM_INT16)NumSamples);
+            Add2_Sat_Float(pPrivate->pScratchDelayLine[3], pPrivate->pScratchDelayLine[0],
+                           (LVM_INT16)NumSamples);
+            Add2_Sat_Float(pPrivate->pScratchDelayLine[2], pPrivate->pScratchDelayLine[1],
+                           (LVM_INT16)NumSamples);
 
-            JoinTo2i_Float(pPrivate->pScratchDelayLine[0],
-                           pPrivate->pScratchDelayLine[1],
-                           pTemp,
+            JoinTo2i_Float(pPrivate->pScratchDelayLine[0], pPrivate->pScratchDelayLine[1], pTemp,
                            (LVM_INT16)NumSamples);
 
             break;
         case LVREV_DELAYLINES_2:
 
-             Copy_Float(pPrivate->pScratchDelayLine[1],
-                        pScratch,
-                        (LVM_INT16)(NumSamples));
+            Copy_Float(pPrivate->pScratchDelayLine[1], pScratch, (LVM_INT16)(NumSamples));
 
-             Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0],
-                            -1.0f,
-                            pScratch,
-                            (LVM_INT16)NumSamples);
+            Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f, pScratch, (LVM_INT16)NumSamples);
 
-             Add2_Sat_Float(pPrivate->pScratchDelayLine[1],
-                            pPrivate->pScratchDelayLine[0],
-                            (LVM_INT16)NumSamples);
+            Add2_Sat_Float(pPrivate->pScratchDelayLine[1], pPrivate->pScratchDelayLine[0],
+                           (LVM_INT16)NumSamples);
 
-             JoinTo2i_Float(pPrivate->pScratchDelayLine[0],
-                            pScratch,
-                            pTemp,
-                            (LVM_INT16)NumSamples);
+            JoinTo2i_Float(pPrivate->pScratchDelayLine[0], pScratch, pTemp, (LVM_INT16)NumSamples);
             break;
         case LVREV_DELAYLINES_1:
-            MonoTo2I_Float(pPrivate->pScratchDelayLine[0],
-                           pTemp,
-                           (LVM_INT16)NumSamples);
+            MonoTo2I_Float(pPrivate->pScratchDelayLine[0], pTemp, (LVM_INT16)NumSamples);
             break;
         default:
             break;
@@ -460,25 +384,14 @@
      */
 
     size = (LVM_INT16)(NumSamples << 1);
-    MixSoft_2St_D32C31_SAT(&pPrivate->BypassMixer,
-                           pTemp,
-                           pTemp,
-                           pOutput,
-                           size);
+    MixSoft_2St_D32C31_SAT(&pPrivate->BypassMixer, pTemp, pTemp, pOutput, size);
 
     /* Apply Gain*/
 
-    Shift_Sat_Float(LVREV_OUTPUTGAIN_SHIFT,
-                    pOutput,
-                    pOutput,
-                    size);
+    Shift_Sat_Float(LVREV_OUTPUTGAIN_SHIFT, pOutput, pOutput, size);
 
-    MixSoft_1St_D32C31_WRA(&pPrivate->GainMixer,
-                           pOutput,
-                           pOutput,
-                           size);
+    MixSoft_1St_D32C31_WRA(&pPrivate->GainMixer, pOutput, pOutput, size);
 
     return;
 }
 /* End of file */
-
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_SetControlParameters.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_SetControlParameters.cpp
index 2a75559..e5a0bc8 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_SetControlParameters.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_SetControlParameters.cpp
@@ -42,84 +42,67 @@
 /*  1.  This function may be interrupted by the LVREV_Process function                  */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_SetControlParameters(LVREV_Handle_t           hInstance,
-                                                 LVREV_ControlParams_st   *pNewParams)
-{
-
-    LVREV_Instance_st     *pLVREV_Private = (LVREV_Instance_st *)hInstance;
+LVREV_ReturnStatus_en LVREV_SetControlParameters(LVREV_Handle_t hInstance,
+                                                 LVREV_ControlParams_st* pNewParams) {
+    LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
 
     /*
      * Check for error conditions
      */
-    if((hInstance == LVM_NULL) || (pNewParams == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pNewParams == LVM_NULL)) {
         return LVREV_NULLADDRESS;
     }
 
     /*
      * Check all new control parameters are in range
      */
-    if(    ((pNewParams->OperatingMode != LVM_MODE_OFF) && (pNewParams->OperatingMode != LVM_MODE_ON))                                         ||
-        (
-        (pNewParams->SampleRate != LVM_FS_8000) && (pNewParams->SampleRate != LVM_FS_11025) && (pNewParams->SampleRate != LVM_FS_12000)       &&
-        (pNewParams->SampleRate != LVM_FS_16000) && (pNewParams->SampleRate != LVM_FS_22050) && (pNewParams->SampleRate != LVM_FS_24000)       &&
-        (pNewParams->SampleRate != LVM_FS_32000) &&
-        (pNewParams->SampleRate != LVM_FS_44100) &&
-        (pNewParams->SampleRate != LVM_FS_48000)
-        && (pNewParams->SampleRate != LVM_FS_88200) && (pNewParams->SampleRate != LVM_FS_96000)
-        && (pNewParams->SampleRate != LVM_FS_176400) && (pNewParams->SampleRate != LVM_FS_192000)
-        )
-#ifdef SUPPORT_MC
-        || ((pNewParams->SourceFormat != LVM_STEREO)       &&
-            (pNewParams->SourceFormat != LVM_MONOINSTEREO) &&
-            (pNewParams->SourceFormat != LVM_MONO)         &&
-            (pNewParams->SourceFormat != LVM_MULTICHANNEL)))
-#else
-        || ((pNewParams->SourceFormat != LVM_STEREO) && (pNewParams->SourceFormat != LVM_MONOINSTEREO) && (pNewParams->SourceFormat != LVM_MONO)) )
-#endif
-    {
+    if (((pNewParams->OperatingMode != LVM_MODE_OFF) &&
+         (pNewParams->OperatingMode != LVM_MODE_ON)) ||
+        ((pNewParams->SampleRate != LVM_FS_8000) && (pNewParams->SampleRate != LVM_FS_11025) &&
+         (pNewParams->SampleRate != LVM_FS_12000) && (pNewParams->SampleRate != LVM_FS_16000) &&
+         (pNewParams->SampleRate != LVM_FS_22050) && (pNewParams->SampleRate != LVM_FS_24000) &&
+         (pNewParams->SampleRate != LVM_FS_32000) && (pNewParams->SampleRate != LVM_FS_44100) &&
+         (pNewParams->SampleRate != LVM_FS_48000) && (pNewParams->SampleRate != LVM_FS_88200) &&
+         (pNewParams->SampleRate != LVM_FS_96000) && (pNewParams->SampleRate != LVM_FS_176400) &&
+         (pNewParams->SampleRate != LVM_FS_192000)) ||
+        ((pNewParams->SourceFormat != LVM_STEREO) &&
+         (pNewParams->SourceFormat != LVM_MONOINSTEREO) && (pNewParams->SourceFormat != LVM_MONO) &&
+         (pNewParams->SourceFormat != LVM_MULTICHANNEL))) {
         return (LVREV_OUTOFRANGE);
     }
 
-    if (pNewParams->Level > LVREV_MAX_LEVEL)
-    {
+    if (pNewParams->Level > LVREV_MAX_LEVEL) {
         return LVREV_OUTOFRANGE;
     }
 
-    if ((pNewParams->LPF < LVREV_MIN_LPF_CORNER) || (pNewParams->LPF > LVREV_MAX_LPF_CORNER))
-    {
+    if ((pNewParams->LPF < LVREV_MIN_LPF_CORNER) || (pNewParams->LPF > LVREV_MAX_LPF_CORNER)) {
         return LVREV_OUTOFRANGE;
     }
 
-    if ((pNewParams->HPF < LVREV_MIN_HPF_CORNER) || (pNewParams->HPF > LVREV_MAX_HPF_CORNER))
-    {
+    if ((pNewParams->HPF < LVREV_MIN_HPF_CORNER) || (pNewParams->HPF > LVREV_MAX_HPF_CORNER)) {
         return LVREV_OUTOFRANGE;
     }
 
-    if (pNewParams->T60 > LVREV_MAX_T60)
-    {
+    if (pNewParams->T60 > LVREV_MAX_T60) {
         return LVREV_OUTOFRANGE;
     }
 
-    if (pNewParams->Density > LVREV_MAX_DENSITY)
-    {
+    if (pNewParams->Density > LVREV_MAX_DENSITY) {
         return LVREV_OUTOFRANGE;
     }
 
-    if (pNewParams->Damping > LVREV_MAX_DAMPING)
-    {
+    if (pNewParams->Damping > LVREV_MAX_DAMPING) {
         return LVREV_OUTOFRANGE;
     }
 
-    if (pNewParams->RoomSize > LVREV_MAX_ROOMSIZE)
-    {
+    if (pNewParams->RoomSize > LVREV_MAX_ROOMSIZE) {
         return LVREV_OUTOFRANGE;
     }
 
     /*
      * Copy the new parameters and set the flag to indicate they are available
      */
-    pLVREV_Private->NewParams       = *pNewParams;
+    pLVREV_Private->NewParams = *pNewParams;
     pLVREV_Private->bControlPending = LVM_TRUE;
 
     return LVREV_SUCCESS;
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.cpp
index 5cd623e..35a6522 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.cpp
@@ -30,25 +30,11 @@
 /****************************************************************************************/
 
 /* Table with supported sampling rates.  The table can be indexed using LVM_Fs_en       */
-const LVM_UINT32 LVM_FsTable[] = {
-    8000 ,
-    11025,
-    12000,
-    16000,
-    22050,
-    24000,
-    32000,
-    44100,
-    48000,
-    88200,
-    96000,
-    176400,
-    192000
-};
+const LVM_UINT32 LVM_FsTable[] = {8000,  11025, 12000, 16000, 22050,  24000, 32000,
+                                  44100, 48000, 88200, 96000, 176400, 192000};
 /* Table with supported sampling rates.  The table can be indexed using LVM_Fs_en       */
-LVM_UINT32 LVM_GetFsFromTable(LVM_Fs_en FsIndex){
-    if (FsIndex > LVM_FS_192000)
-        return 0;
+LVM_UINT32 LVM_GetFsFromTable(LVM_Fs_en FsIndex) {
+    if (FsIndex > LVM_FS_192000) return 0;
 
     return (LVM_FsTable[FsIndex]);
 }
@@ -73,30 +59,174 @@
   */
 
 /* Normalizing output including Reverb Level part (only shift up)*/
-const LVM_FLOAT LVREV_GainPolyTable[24][5]={{1,1.045909f,7.681098f,-7.211500f,3.025605f,},
-                                            {2,1.088194f,10.291749f,-11.513787f,5.265817f,},
-                                            {3,0.988919f,8.299956f,-8.920862f,3.979806f,},
-                                            {4,1.035927f,10.182567f,-10.346134f,4.546533f,},
-                                            {5,1.130313f,12.538727f,-13.627023f,6.165208f,},
-                                            {6,1.060743f,8.091713f,-8.588079f,3.834230f,},
-                                            {7,1.040381f,10.406566f,-11.176650f,5.075132f,},
-                                            {8,1.026944f,8.387302f,-8.689796f,3.895863f,},
-                                            {9,1.013312f,9.727236f,-10.534165f,4.742272f,},
-                                            {10,0.996095f,8.492249f,-7.947677f,3.478917f,},
-                                            {13,1.079346f,8.894425f,-9.641768f,4.434442f,},
-                                            {15,0.994327f,7.441335f,-8.003979f,3.581177f,},
-                                            {17,0.991067f,7.208373f,-7.257859f,3.167774f,},
-                                            {20,1.033445f,7.476371f,-7.546960f,3.369703f,},
-                                            {25,0.982830f,5.913867f,-5.638448f,2.420932f,},
-                                            {30,0.928782f,5.035343f,-4.492104f,1.844904f,},
-                                            {40,0.953714f,5.060232f,-4.472204f,1.829642f,},
-                                            {50,0.899258f,4.273357f,-3.537492f,1.387576f,},
-                                            {60,0.943584f,4.093228f,-3.469658f,1.410911f,},
-                                            {70,0.926021f,3.973125f,-3.331985f,1.344690f,},
-                                            {75,0.894853f,2.871747f,-1.438758f,0.311856f,},
-                                            {80,0.935122f,2.991857f,-2.038882f,0.686395f,},
-                                            {90,0.953872f,2.880315f,-2.122365f,0.784032f,},
-                                            {100,0.951005f,2.894294f,-2.009086f,0.698316f,},
+const LVM_FLOAT LVREV_GainPolyTable[24][5] = {
+        {
+                1,
+                1.045909f,
+                7.681098f,
+                -7.211500f,
+                3.025605f,
+        },
+        {
+                2,
+                1.088194f,
+                10.291749f,
+                -11.513787f,
+                5.265817f,
+        },
+        {
+                3,
+                0.988919f,
+                8.299956f,
+                -8.920862f,
+                3.979806f,
+        },
+        {
+                4,
+                1.035927f,
+                10.182567f,
+                -10.346134f,
+                4.546533f,
+        },
+        {
+                5,
+                1.130313f,
+                12.538727f,
+                -13.627023f,
+                6.165208f,
+        },
+        {
+                6,
+                1.060743f,
+                8.091713f,
+                -8.588079f,
+                3.834230f,
+        },
+        {
+                7,
+                1.040381f,
+                10.406566f,
+                -11.176650f,
+                5.075132f,
+        },
+        {
+                8,
+                1.026944f,
+                8.387302f,
+                -8.689796f,
+                3.895863f,
+        },
+        {
+                9,
+                1.013312f,
+                9.727236f,
+                -10.534165f,
+                4.742272f,
+        },
+        {
+                10,
+                0.996095f,
+                8.492249f,
+                -7.947677f,
+                3.478917f,
+        },
+        {
+                13,
+                1.079346f,
+                8.894425f,
+                -9.641768f,
+                4.434442f,
+        },
+        {
+                15,
+                0.994327f,
+                7.441335f,
+                -8.003979f,
+                3.581177f,
+        },
+        {
+                17,
+                0.991067f,
+                7.208373f,
+                -7.257859f,
+                3.167774f,
+        },
+        {
+                20,
+                1.033445f,
+                7.476371f,
+                -7.546960f,
+                3.369703f,
+        },
+        {
+                25,
+                0.982830f,
+                5.913867f,
+                -5.638448f,
+                2.420932f,
+        },
+        {
+                30,
+                0.928782f,
+                5.035343f,
+                -4.492104f,
+                1.844904f,
+        },
+        {
+                40,
+                0.953714f,
+                5.060232f,
+                -4.472204f,
+                1.829642f,
+        },
+        {
+                50,
+                0.899258f,
+                4.273357f,
+                -3.537492f,
+                1.387576f,
+        },
+        {
+                60,
+                0.943584f,
+                4.093228f,
+                -3.469658f,
+                1.410911f,
+        },
+        {
+                70,
+                0.926021f,
+                3.973125f,
+                -3.331985f,
+                1.344690f,
+        },
+        {
+                75,
+                0.894853f,
+                2.871747f,
+                -1.438758f,
+                0.311856f,
+        },
+        {
+                80,
+                0.935122f,
+                2.991857f,
+                -2.038882f,
+                0.686395f,
+        },
+        {
+                90,
+                0.953872f,
+                2.880315f,
+                -2.122365f,
+                0.784032f,
+        },
+        {
+                100,
+                0.951005f,
+                2.894294f,
+                -2.009086f,
+                0.698316f,
+        },
 };
 /* End of file */
-
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.h b/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.h
index e100d8a..4b0dcca 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.h
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.h
@@ -31,11 +31,11 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-extern const    LVM_UINT32  LVM_FsTable[];
-extern          LVM_UINT32  LVM_GetFsFromTable(LVM_Fs_en FsIndex);
+extern const LVM_UINT32 LVM_FsTable[];
+extern LVM_UINT32 LVM_GetFsFromTable(LVM_Fs_en FsIndex);
 
-extern const    LVM_FLOAT   LVREV_GainPolyTable[24][5];
+extern const LVM_FLOAT LVREV_GainPolyTable[24][5];
 
-#endif  /** _LVREV_TABLES_H_ **/
+#endif /** _LVREV_TABLES_H_ **/
 
 /* End of file */
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/lib/LVPSA.h b/media/libeffects/lvm/lib/SpectrumAnalyzer/lib/LVPSA.h
index c9fa7ad..85e3ab9 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/lib/LVPSA.h
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/lib/LVPSA.h
@@ -22,255 +22,191 @@
 
 /****************************************************************************************/
 /*                                                                                      */
-/*  CONSTANTS DEFINITIONS                                                               */
-/*                                                                                      */
-/****************************************************************************************/
-
-/* Memory table*/
-#define     LVPSA_NR_MEMORY_REGIONS                  4      /* Number of memory regions                                          */
-
-/****************************************************************************************/
-/*                                                                                      */
 /*  TYPES DEFINITIONS                                                                   */
 /*                                                                                      */
 /****************************************************************************************/
-/* Memory Types */
-typedef enum
-{
-    LVPSA_PERSISTENT      = LVM_PERSISTENT,
-    LVPSA_PERSISTENT_DATA = LVM_PERSISTENT_DATA,
-    LVPSA_PERSISTENT_COEF = LVM_PERSISTENT_COEF,
-    LVPSA_SCRATCH         = LVM_SCRATCH,
-    LVPSA_MEMORY_DUMMY = LVM_MAXINT_32                      /* Force 32 bits enum, don't use it!                                 */
-} LVPSA_MemoryTypes_en;
-
 /* Level detection speed control parameters */
-typedef enum
-{
-    LVPSA_SPEED_LOW,                                        /* Low speed level   detection                                       */
-    LVPSA_SPEED_MEDIUM,                                     /* Medium speed level   detection                                    */
-    LVPSA_SPEED_HIGH,                                       /* High speed level   detection                                      */
-    LVPSA_SPEED_DUMMY = LVM_MAXINT_32                       /* Force 32 bits enum, don't use it!                                 */
+typedef enum {
+    LVPSA_SPEED_LOW,    /* Low speed level   detection                                       */
+    LVPSA_SPEED_MEDIUM, /* Medium speed level   detection                                    */
+    LVPSA_SPEED_HIGH,   /* High speed level   detection                                      */
+    LVPSA_SPEED_DUMMY = LVM_MAXINT_32 /* Force 32 bits enum, don't use it! */
 } LVPSA_LevelDetectSpeed_en;
 
 /* Filter control parameters */
-typedef struct
-{
-    LVM_UINT16                 CenterFrequency;             /* Center frequency of the band-pass filter (in Hz)                  */
-    LVM_UINT16                 QFactor;                     /* Quality factor of the filter             (in 1/100)               */
-    LVM_INT16                  PostGain;                    /* Postgain to apply after the filtering    (in dB Q16.0)            */
+typedef struct {
+    LVM_UINT16 CenterFrequency; /* Center frequency of the band-pass filter (in Hz) */
+    LVM_UINT16 QFactor; /* Quality factor of the filter             (in 1/100)               */
+    LVM_INT16 PostGain; /* Postgain to apply after the filtering    (in dB Q16.0)            */
 
 } LVPSA_FilterParam_t;
 
 /* LVPSA initialization parameters */
-typedef struct
-{
-    LVM_UINT16                 SpectralDataBufferDuration;  /* Spectral data buffer duration in time (ms in Q16.0)               */
-    LVM_UINT16                 MaxInputBlockSize;           /* Maximum expected input block size (in samples)                    */
-    LVM_UINT16                 nBands;                      /* Number of bands of the SA                                         */
-    LVPSA_FilterParam_t       *pFiltersParams;              /* Points to nBands filter param structures for filters settings     */
+typedef struct {
+    LVM_UINT16
+            SpectralDataBufferDuration; /* Spectral data buffer duration in time (ms in Q16.0) */
+    LVM_UINT16 MaxInputBlockSize;       /* Maximum expected input block size (in samples)       */
+    LVM_UINT16 nBands; /* Number of bands of the SA                                         */
+    LVPSA_FilterParam_t*
+            pFiltersParams; /* Points to nBands filter param structures for filters settings     */
 
 } LVPSA_InitParams_t, *pLVPSA_InitParams_t;
 
 /* LVPSA control parameters */
-typedef struct
-{
-    LVM_Fs_en                  Fs;                          /* Input sampling rate                                               */
-    LVPSA_LevelDetectSpeed_en  LevelDetectionSpeed;         /* Level detection speed                                             */
+typedef struct {
+    LVM_Fs_en Fs; /* Input sampling rate                                               */
+    LVPSA_LevelDetectSpeed_en LevelDetectionSpeed; /* Level detection speed */
 
 } LVPSA_ControlParams_t, *pLVPSA_ControlParams_t;
 
-/* Memory region definition */
-typedef struct
-{
-    LVM_UINT32                 Size;                        /* Region size in bytes                                              */
-    LVPSA_MemoryTypes_en       Type;                        /* Region type                                                       */
-    void                       *pBaseAddress;               /* Pointer to the region base address                                */
-} LVPSA_MemoryRegion_t;
-
-/* Memory table containing the region definitions */
-typedef struct
-{
-    LVPSA_MemoryRegion_t       Region[LVPSA_NR_MEMORY_REGIONS];/* One definition for each region                                 */
-} LVPSA_MemTab_t;
-
 /* Audio time type */
 typedef LVM_INT32 LVPSA_Time;
 
 /* Module instance Handle */
-typedef void *pLVPSA_Handle_t;
+typedef void* pLVPSA_Handle_t;
 
 /* LVPSA return codes */
-typedef enum
-{
-    LVPSA_OK,                                               /* The function ran without any problem                              */
-    LVPSA_ERROR_INVALIDPARAM,                               /* A parameter is incorrect                                          */
-    LVPSA_ERROR_WRONGTIME,                                  /* An incorrect AudioTime is used                                    */
-    LVPSA_ERROR_NULLADDRESS,                                /* A pointer has a NULL value                                        */
-    LVPSA_RETURN_DUMMY = LVM_MAXINT_32                      /* Force 32 bits enum, don't use it!                                 */
+typedef enum {
+    LVPSA_OK, /* The function ran without any problem                              */
+    LVPSA_ERROR_INVALIDPARAM, /* A parameter is incorrect */
+    LVPSA_ERROR_WRONGTIME,   /* An incorrect AudioTime is used                                    */
+    LVPSA_ERROR_NULLADDRESS, /* A pointer has a NULL value                                        */
+    LVPSA_RETURN_DUMMY = LVM_MAXINT_32 /* Force 32 bits enum, don't use it! */
 } LVPSA_RETURN;
 
 /*********************************************************************************************************************************
    FUNCTIONS PROTOTYPE
 **********************************************************************************************************************************/
-/*********************************************************************************************************************************/
-/*                                                                                                                               */
-/* FUNCTION:            LVPSA_Memory                                                                                         */
-/*                                                                                                                               */
-/* DESCRIPTION:                                                                                                                  */
-/*  This function is used for memory allocation and free. It can be called in                                                    */
-/*  two ways:                                                                                                                    */
-/*                                                                                                                               */
-/*      hInstance = NULL                Returns the memory requirements                                                          */
-/*      hInstance = Instance handle     Returns the memory requirements and                                                      */
-/*                                      allocated base addresses for the instance                                                */
-/*                                                                                                                               */
-/*  When this function is called for memory allocation (hInstance=NULL) the memory                                               */
-/*  base address pointers are NULL on return.                                                                                    */
-/*                                                                                                                               */
-/*  When the function is called for free (hInstance = Instance Handle) the memory                                                */
-/*  table returns the allocated memory and base addresses used during initialisation.                                            */
-/*                                                                                                                               */
-/* PARAMETERS:                                                                                                                   */
-/*  hInstance           Instance Handle                                                                                          */
-/*  pMemoryTable        Pointer to an empty memory definition table                                                              */
-/*  pInitParams         Pointer to the instance init parameters                                                                  */
-/*                                                                                                                               */
-/* RETURNS:                                                                                                                      */
-/*  LVPSA_OK            Succeeds                                                                                                 */
-/*  otherwise           Error due to bad parameters                                                                              */
-/*                                                                                                                               */
-/*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_Memory            ( pLVPSA_Handle_t             hInstance,
-                                       LVPSA_MemTab_t             *pMemoryTable,
-                                       LVPSA_InitParams_t         *pInitParams    );
+/************************************************************************************/
+/*                                                                                  */
+/* FUNCTION:            LVPSA_Init                                                  */
+/*                                                                                  */
+/* DESCRIPTION:                                                                     */
+/*  Create and Initialize the LVPSA module including instance handle                */
+/*                                                                                  */
+/*                                                                                  */
+/* PARAMETERS:                                                                      */
+/*  phInstance          Pointer to the instance handle                              */
+/*  InitParams          Init parameters structure                                   */
+/*  ControlParams       Control parameters structure                                */
+/*  pScratch            Pointer to bundle scratch memory area                       */
+/*                                                                                  */
+/*                                                                                  */
+/* RETURNS:                                                                         */
+/*  LVPSA_OK            Succeeds                                                    */
+/*  otherwise           Error due to bad parameters                                 */
+/*                                                                                  */
+/************************************************************************************/
+LVPSA_RETURN LVPSA_Init(pLVPSA_Handle_t* phInstance, LVPSA_InitParams_t* pInitParams,
+                        LVPSA_ControlParams_t* pControlParams, void* pScratch);
+
+/************************************************************************************/
+/*                                                                                  */
+/* FUNCTION:            LVPSA_DeInit                                                */
+/*                                                                                  */
+/* DESCRIPTION:                                                                     */
+/*    Free the memories created in LVPSA_Init call including instance handle        */
+/*                                                                                  */
+/* PARAMETERS:                                                                      */
+/*  phInstance          Pointer to the instance handle                              */
+/*                                                                                  */
+/************************************************************************************/
+void LVPSA_DeInit(pLVPSA_Handle_t* phInstance);
 
 /*********************************************************************************************************************************/
 /*                                                                                                                               */
-/* FUNCTION:            LVPSA_Init                                                                                               */
+/* FUNCTION:            LVPSA_Control */
 /*                                                                                                                               */
-/* DESCRIPTION:                                                                                                                  */
-/*  Initializes the LVPSA module.                                                                                                */
+/* DESCRIPTION: */
+/*  Controls the LVPSA module. */
 /*                                                                                                                               */
+/* PARAMETERS: */
+/*  hInstance           Instance Handle */
+/*  pNewParams          Pointer to the instance new control parameters */
 /*                                                                                                                               */
-/* PARAMETERS:                                                                                                                   */
-/*  phInstance          Pointer to the instance Handle                                                                           */
-/*  pInitParams         Pointer to the instance init parameters                                                                  */
-/*  pControlParams      Pointer to the instance control parameters                                                               */
-/*  pMemoryTable        Pointer to the memory definition table                                                                   */
-/*                                                                                                                               */
-/*                                                                                                                               */
-/* RETURNS:                                                                                                                      */
-/*  LVPSA_OK            Succeeds                                                                                                 */
-/*  otherwise           Error due to bad parameters                                                                              */
+/* RETURNS: */
+/*  LVPSA_OK            Succeeds */
+/*  otherwise           Error due to bad parameters */
 /*                                                                                                                               */
 /*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_Init              ( pLVPSA_Handle_t             *phInstance,
-                                       LVPSA_InitParams_t          *pInitParams,
-                                       LVPSA_ControlParams_t       *pControlParams,
-                                       LVPSA_MemTab_t              *pMemoryTable  );
+LVPSA_RETURN LVPSA_Control(pLVPSA_Handle_t hInstance, LVPSA_ControlParams_t* pNewParams);
 
 /*********************************************************************************************************************************/
 /*                                                                                                                               */
-/* FUNCTION:            LVPSA_Control                                                                                            */
+/* FUNCTION:            LVPSA_Process */
 /*                                                                                                                               */
-/* DESCRIPTION:                                                                                                                  */
-/*  Controls the LVPSA module.                                                                                                   */
+/* DESCRIPTION: */
+/*  The process calculates the levels of the frequency bands. */
 /*                                                                                                                               */
-/* PARAMETERS:                                                                                                                   */
-/*  hInstance           Instance Handle                                                                                          */
-/*  pNewParams          Pointer to the instance new control parameters                                                           */
+/* PARAMETERS: */
+/*  hInstance           Instance Handle */
+/*  pLVPSA_InputSamples Pointer to the input samples buffer */
+/*  InputBlockSize      Number of mono samples to process */
+/*  AudioTime           Playback time of the first input sample */
 /*                                                                                                                               */
-/* RETURNS:                                                                                                                      */
-/*  LVPSA_OK            Succeeds                                                                                                 */
-/*  otherwise           Error due to bad parameters                                                                              */
+/*                                                                                                                               */
+/* RETURNS: */
+/*  LVPSA_OK            Succeeds */
+/*  otherwise           Error due to bad parameters */
 /*                                                                                                                               */
 /*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_Control           ( pLVPSA_Handle_t             hInstance,
-                                       LVPSA_ControlParams_t      *pNewParams     );
+LVPSA_RETURN LVPSA_Process(pLVPSA_Handle_t hInstance, LVM_FLOAT* pLVPSA_InputSamples,
+                           LVM_UINT16 InputBlockSize, LVPSA_Time AudioTime);
+/*********************************************************************************************************************************/
+/*                                                                                                                               */
+/* FUNCTION:            LVPSA_GetSpectrum */
+/*                                                                                                                               */
+/* DESCRIPTION: */
+/*  This function is used for memory allocation and free. */
+/*                                                                                                                               */
+/*                                                                                                                               */
+/* PARAMETERS: */
+/*  hInstance            Instance Handle */
+/*  GetSpectrumAudioTime Time to retrieve the values at */
+/*  pCurrentValues       Pointer to an empty buffer : Current level values output */
+/*  pPeakValues          Pointer to an empty buffer : Peak level values output */
+/*                                                                                                                               */
+/*                                                                                                                               */
+/* RETURNS: */
+/*  LVPSA_OK            Succeeds */
+/*  otherwise           Error due to bad parameters */
+/*                                                                                                                               */
+/*********************************************************************************************************************************/
+LVPSA_RETURN LVPSA_GetSpectrum(pLVPSA_Handle_t hInstance, LVPSA_Time GetSpectrumAudioTime,
+                               LVM_UINT8* pCurrentValues, LVM_UINT8* pPeakValues);
 
 /*********************************************************************************************************************************/
 /*                                                                                                                               */
-/* FUNCTION:            LVPSA_Process                                                                                            */
+/* FUNCTION:            LVPSA_GetControlParams */
 /*                                                                                                                               */
-/* DESCRIPTION:                                                                                                                  */
-/*  The process calculates the levels of the frequency bands.                                                                    */
+/* DESCRIPTION: */
+/*  Get the current control parameters of the LVPSA module. */
 /*                                                                                                                               */
-/* PARAMETERS:                                                                                                                   */
-/*  hInstance           Instance Handle                                                                                          */
-/*  pLVPSA_InputSamples Pointer to the input samples buffer                                                                      */
-/*  InputBlockSize      Number of mono samples to process                                                                        */
-/*  AudioTime           Playback time of the first input sample                                                                  */
-/*                                                                                                                               */
-/*                                                                                                                               */
-/* RETURNS:                                                                                                                      */
-/*  LVPSA_OK            Succeeds                                                                                                 */
-/*  otherwise           Error due to bad parameters                                                                              */
+/* PARAMETERS: */
+/*  hInstance           Instance Handle */
+/*  pParams             Pointer to an empty control parameters structure */
+/* RETURNS: */
+/*  LVPSA_OK            Succeeds */
+/*  otherwise           Error due to bad parameters */
 /*                                                                                                                               */
 /*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_Process           ( pLVPSA_Handle_t      hInstance,
-                                       LVM_FLOAT           *pLVPSA_InputSamples,
-                                       LVM_UINT16           InputBlockSize,
-                                       LVPSA_Time           AudioTime             );
-/*********************************************************************************************************************************/
-/*                                                                                                                               */
-/* FUNCTION:            LVPSA_GetSpectrum                                                                                        */
-/*                                                                                                                               */
-/* DESCRIPTION:                                                                                                                  */
-/*  This function is used for memory allocation and free.                                                                        */
-/*                                                                                                                               */
-/*                                                                                                                               */
-/* PARAMETERS:                                                                                                                   */
-/*  hInstance            Instance Handle                                                                                         */
-/*  GetSpectrumAudioTime Time to retrieve the values at                                                                          */
-/*  pCurrentValues       Pointer to an empty buffer : Current level values output                                                */
-/*  pPeakValues          Pointer to an empty buffer : Peak level values output                                                   */
-/*                                                                                                                               */
-/*                                                                                                                               */
-/* RETURNS:                                                                                                                      */
-/*  LVPSA_OK            Succeeds                                                                                                 */
-/*  otherwise           Error due to bad parameters                                                                              */
-/*                                                                                                                               */
-/*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_GetSpectrum       ( pLVPSA_Handle_t      hInstance,
-                                       LVPSA_Time           GetSpectrumAudioTime,
-                                       LVM_UINT8           *pCurrentValues,
-                                       LVM_UINT8           *pPeakValues           );
+LVPSA_RETURN LVPSA_GetControlParams(pLVPSA_Handle_t hInstance, LVPSA_ControlParams_t* pParams);
 
 /*********************************************************************************************************************************/
 /*                                                                                                                               */
-/* FUNCTION:            LVPSA_GetControlParams                                                                                   */
+/* FUNCTION:            LVPSA_GetInitParams */
 /*                                                                                                                               */
-/* DESCRIPTION:                                                                                                                  */
-/*  Get the current control parameters of the LVPSA module.                                                                      */
+/* DESCRIPTION: */
+/*  Get the initialization parameters of the LVPSA module. */
 /*                                                                                                                               */
-/* PARAMETERS:                                                                                                                   */
-/*  hInstance           Instance Handle                                                                                          */
-/*  pParams             Pointer to an empty control parameters structure                                                         */
-/* RETURNS:                                                                                                                      */
-/*  LVPSA_OK            Succeeds                                                                                                 */
-/*  otherwise           Error due to bad parameters                                                                              */
+/* PARAMETERS: */
+/*  hInstance           Instance Handle */
+/*  pParams             Pointer to an empty init parameters structure */
+/* RETURNS: */
+/*  LVPSA_OK            Succeeds */
+/*  otherwise           Error due to bad parameters */
 /*                                                                                                                               */
 /*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_GetControlParams  (    pLVPSA_Handle_t            hInstance,
-                                          LVPSA_ControlParams_t     *pParams      );
-
-/*********************************************************************************************************************************/
-/*                                                                                                                               */
-/* FUNCTION:            LVPSA_GetInitParams                                                                                      */
-/*                                                                                                                               */
-/* DESCRIPTION:                                                                                                                  */
-/*  Get the initialization parameters of the LVPSA module.                                                                       */
-/*                                                                                                                               */
-/* PARAMETERS:                                                                                                                   */
-/*  hInstance           Instance Handle                                                                                          */
-/*  pParams             Pointer to an empty init parameters structure                                                            */
-/* RETURNS:                                                                                                                      */
-/*  LVPSA_OK            Succeeds                                                                                                 */
-/*  otherwise           Error due to bad parameters                                                                              */
-/*                                                                                                                               */
-/*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_GetInitParams     (    pLVPSA_Handle_t            hInstance,
-                                          LVPSA_InitParams_t        *pParams      );
+LVPSA_RETURN LVPSA_GetInitParams(pLVPSA_Handle_t hInstance, LVPSA_InitParams_t* pParams);
 
 #endif /* _LVPSA_H */
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
index deafaa7..4e90a42 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
@@ -15,30 +15,25 @@
  * limitations under the License.
  */
 
-#include    "LVPSA.h"
-#include    "LVPSA_Private.h"
-#include    "VectorArithmetic.h"
+#include "LVPSA.h"
+#include "LVPSA_Private.h"
+#include "VectorArithmetic.h"
 
-#define     LOW_FREQ            298             /* 32768/110 for low test frequency */
-#define     HIGH_FREQ           386             /* 32768/85 for high test frequency */
+#define LOW_FREQ 298  /* 32768/110 for low test frequency */
+#define HIGH_FREQ 386 /* 32768/85 for high test frequency */
 
-LVPSA_RETURN LVPSA_SetBPFiltersType (  LVPSA_InstancePr_t        *pInst,
-                                       LVPSA_ControlParams_t      *pParams  );
+LVPSA_RETURN LVPSA_SetBPFiltersType(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams);
 
-LVPSA_RETURN LVPSA_SetQPFCoefficients( LVPSA_InstancePr_t        *pInst,
-                                       LVPSA_ControlParams_t      *pParams  );
+LVPSA_RETURN LVPSA_SetQPFCoefficients(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams);
 
-LVPSA_RETURN LVPSA_BPSinglePrecCoefs(  LVM_UINT16             Fs,
-                                       LVPSA_FilterParam_t   *pFilterParams,
-                                       BP_FLOAT_Coefs_t        *pCoefficients);
+LVPSA_RETURN LVPSA_BPSinglePrecCoefs(LVM_UINT16 Fs, LVPSA_FilterParam_t* pFilterParams,
+                                     BP_FLOAT_Coefs_t* pCoefficients);
 
-LVPSA_RETURN LVPSA_BPDoublePrecCoefs(  LVM_UINT16            Fs,
-                                       LVPSA_FilterParam_t  *pFilterParams,
-                                       BP_FLOAT_Coefs_t       *pCoefficients);
-LVPSA_RETURN LVPSA_SetBPFCoefficients( LVPSA_InstancePr_t        *pInst,
-                                       LVPSA_ControlParams_t      *pParams  );
+LVPSA_RETURN LVPSA_BPDoublePrecCoefs(LVM_UINT16 Fs, LVPSA_FilterParam_t* pFilterParams,
+                                     BP_FLOAT_Coefs_t* pCoefficients);
+LVPSA_RETURN LVPSA_SetBPFCoefficients(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams);
 
-LVPSA_RETURN LVPSA_ClearFilterHistory( LVPSA_InstancePr_t        *pInst);
+LVPSA_RETURN LVPSA_ClearFilterHistory(LVPSA_InstancePr_t* pInst);
 
 /************************************************************************************/
 /*                                                                                  */
@@ -56,29 +51,23 @@
 /*  otherwise           Error due to bad parameters                                 */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_Control           ( pLVPSA_Handle_t             hInstance,
-                                       LVPSA_ControlParams_t      *pNewParams     )
-{
+LVPSA_RETURN LVPSA_Control(pLVPSA_Handle_t hInstance, LVPSA_ControlParams_t* pNewParams) {
+    LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
 
-    LVPSA_InstancePr_t     *pLVPSA_Inst    = (LVPSA_InstancePr_t*)hInstance;
-
-    if((hInstance == LVM_NULL) || (pNewParams == LVM_NULL))
-    {
-        return(LVPSA_ERROR_NULLADDRESS);
+    if ((hInstance == LVM_NULL) || (pNewParams == LVM_NULL)) {
+        return (LVPSA_ERROR_NULLADDRESS);
     }
-    if(pNewParams->Fs >= LVPSA_NR_SUPPORTED_RATE)
-    {
-        return(LVPSA_ERROR_INVALIDPARAM);
+    if (pNewParams->Fs >= LVPSA_NR_SUPPORTED_RATE) {
+        return (LVPSA_ERROR_INVALIDPARAM);
     }
-    if(pNewParams->LevelDetectionSpeed >= LVPSA_NR_SUPPORTED_SPEED)
-    {
-        return(LVPSA_ERROR_INVALIDPARAM);
+    if (pNewParams->LevelDetectionSpeed >= LVPSA_NR_SUPPORTED_SPEED) {
+        return (LVPSA_ERROR_INVALIDPARAM);
     }
 
     pLVPSA_Inst->NewParams = *pNewParams;
     pLVPSA_Inst->bControlPending = LVM_TRUE;
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
 
 /************************************************************************************/
@@ -96,20 +85,17 @@
 /*  otherwise           Error due to bad parameters                                 */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_GetControlParams         (    pLVPSA_Handle_t            hInstance,
-                                                 LVPSA_ControlParams_t     *pParams )
-{
-    LVPSA_InstancePr_t     *pLVPSA_Inst    = (LVPSA_InstancePr_t*)hInstance;
+LVPSA_RETURN LVPSA_GetControlParams(pLVPSA_Handle_t hInstance, LVPSA_ControlParams_t* pParams) {
+    LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
 
-    if((hInstance == LVM_NULL) || (pParams == LVM_NULL))
-    {
-        return(LVPSA_ERROR_NULLADDRESS);
+    if ((hInstance == LVM_NULL) || (pParams == LVM_NULL)) {
+        return (LVPSA_ERROR_NULLADDRESS);
     }
 
-    pParams->Fs                     = pLVPSA_Inst->CurrentParams.Fs;
-    pParams->LevelDetectionSpeed    = pLVPSA_Inst->CurrentParams.LevelDetectionSpeed;
+    pParams->Fs = pLVPSA_Inst->CurrentParams.Fs;
+    pParams->LevelDetectionSpeed = pLVPSA_Inst->CurrentParams.LevelDetectionSpeed;
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
 
 /************************************************************************************/
@@ -127,22 +113,19 @@
 /*  otherwise           Error due to bad parameters                                 */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_GetInitParams         (    pLVPSA_Handle_t            hInstance,
-                                              LVPSA_InitParams_t        *pParams )
-{
-    LVPSA_InstancePr_t     *pLVPSA_Inst    = (LVPSA_InstancePr_t*)hInstance;
+LVPSA_RETURN LVPSA_GetInitParams(pLVPSA_Handle_t hInstance, LVPSA_InitParams_t* pParams) {
+    LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
 
-    if((hInstance == LVM_NULL) || (pParams == LVM_NULL))
-    {
-        return(LVPSA_ERROR_NULLADDRESS);
+    if ((hInstance == LVM_NULL) || (pParams == LVM_NULL)) {
+        return (LVPSA_ERROR_NULLADDRESS);
     }
 
-    pParams->SpectralDataBufferDuration   = pLVPSA_Inst->SpectralDataBufferDuration;
-    pParams->MaxInputBlockSize            = pLVPSA_Inst->MaxInputBlockSize;
-    pParams->nBands                       = pLVPSA_Inst->nBands;
-    pParams->pFiltersParams               = pLVPSA_Inst->pFiltersParams;
+    pParams->SpectralDataBufferDuration = pLVPSA_Inst->SpectralDataBufferDuration;
+    pParams->MaxInputBlockSize = pLVPSA_Inst->MaxInputBlockSize;
+    pParams->nBands = pLVPSA_Inst->nBands;
+    pParams->pFiltersParams = pLVPSA_Inst->pFiltersParams;
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
 
 /************************************************************************************/
@@ -163,42 +146,38 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_ApplyNewSettings (LVPSA_InstancePr_t     *pInst)
-{
+LVPSA_RETURN LVPSA_ApplyNewSettings(LVPSA_InstancePr_t* pInst) {
     LVM_UINT16 ii;
     LVM_UINT16 Freq;
-    LVPSA_ControlParams_t   Params;
-    extern LVM_INT16        LVPSA_nSamplesBufferUpdate[];
-    extern LVM_UINT32       LVPSA_SampleRateTab[];
-    extern LVM_UINT16       LVPSA_DownSamplingFactor[];
+    LVPSA_ControlParams_t Params;
+    extern LVM_INT16 LVPSA_nSamplesBufferUpdate[];
+    extern LVM_UINT32 LVPSA_SampleRateTab[];
+    extern LVM_UINT16 LVPSA_DownSamplingFactor[];
 
-    if(pInst == 0)
-    {
-        return(LVPSA_ERROR_NULLADDRESS);
+    if (pInst == 0) {
+        return (LVPSA_ERROR_NULLADDRESS);
     }
 
     Params = pInst->NewParams;
 
     /* Modifies filters types and coefficients, clear the taps and
        re-initializes parameters if sample frequency has changed    */
-    if(Params.Fs != pInst->CurrentParams.Fs)
-    {
+    if (Params.Fs != pInst->CurrentParams.Fs) {
         pInst->CurrentParams.Fs = Params.Fs;
 
         /* Initialize the center freqeuncies as a function of the sample rate */
-        Freq = (LVM_UINT16) ((LVPSA_SampleRateTab[pInst->CurrentParams.Fs]>>1) / (pInst->nBands + 1));
-        for(ii = pInst->nBands; ii > 0; ii--)
-        {
-            pInst->pFiltersParams[ii-1].CenterFrequency = (LVM_UINT16) (Freq * ii);
+        Freq = (LVM_UINT16)((LVPSA_SampleRateTab[pInst->CurrentParams.Fs] >> 1) /
+                            (pInst->nBands + 1));
+        for (ii = pInst->nBands; ii > 0; ii--) {
+            pInst->pFiltersParams[ii - 1].CenterFrequency = (LVM_UINT16)(Freq * ii);
         }
 
         /* Count the number of relevant filters. If the center frequency of the filter is
            bigger than the nyquist frequency, then the filter is not relevant and doesn't
            need to be used */
-        for(ii = pInst->nBands; ii > 0; ii--)
-        {
-            if(pInst->pFiltersParams[ii-1].CenterFrequency < (LVPSA_SampleRateTab[pInst->CurrentParams.Fs]>>1))
-            {
+        for (ii = pInst->nBands; ii > 0; ii--) {
+            if (pInst->pFiltersParams[ii - 1].CenterFrequency <
+                (LVPSA_SampleRateTab[pInst->CurrentParams.Fs] >> 1)) {
                 pInst->nRelevantFilters = ii;
                 break;
             }
@@ -211,19 +190,14 @@
         pInst->BufferUpdateSamplesCount = 0;
         pInst->DownSamplingFactor = LVPSA_DownSamplingFactor[Params.Fs];
         pInst->DownSamplingCount = 0;
-        for(ii = 0; ii < (pInst->nBands * pInst->SpectralDataBufferLength); ii++)
-        {
+        for (ii = 0; ii < (pInst->nBands * pInst->SpectralDataBufferLength); ii++) {
             pInst->pSpectralDataBufferStart[ii] = 0;
         }
-        for(ii = 0; ii < pInst->nBands; ii++)
-        {
+        for (ii = 0; ii < pInst->nBands; ii++) {
             pInst->pPreviousPeaks[ii] = 0;
         }
-    }
-    else
-    {
-        if(Params.LevelDetectionSpeed != pInst->CurrentParams.LevelDetectionSpeed)
-        {
+    } else {
+        if (Params.LevelDetectionSpeed != pInst->CurrentParams.LevelDetectionSpeed) {
             LVPSA_SetQPFCoefficients(pInst, &Params);
         }
     }
@@ -253,47 +227,43 @@
 /*          Single precision    otherwise                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_SetBPFiltersType (   LVPSA_InstancePr_t        *pInst,
-                                        LVPSA_ControlParams_t      *pParams  )
-{
-    extern LVM_UINT32   LVPSA_SampleRateTab[];                 /* Sample rate table */
-    LVM_UINT16          ii;                                                         /* Filter band index */
-    LVM_UINT32          fs = (LVM_UINT32)LVPSA_SampleRateTab[(LVM_UINT16)pParams->Fs];      /* Sample rate */
-    LVM_UINT32          fc;                                                         /* Filter centre frequency */
-    LVM_INT16           QFactor;                                                    /* Filter Q factor */
+LVPSA_RETURN LVPSA_SetBPFiltersType(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams) {
+    extern LVM_UINT32 LVPSA_SampleRateTab[]; /* Sample rate table */
+    LVM_UINT16 ii;                           /* Filter band index */
+    LVM_UINT32 fs = (LVM_UINT32)LVPSA_SampleRateTab[(LVM_UINT16)pParams->Fs]; /* Sample rate */
+    LVM_UINT32 fc;     /* Filter centre frequency */
+    LVM_INT16 QFactor; /* Filter Q factor */
 
-    for (ii = 0; ii < pInst->nRelevantFilters; ii++)
-    {
+    for (ii = 0; ii < pInst->nRelevantFilters; ii++) {
         /*
          * Get the filter settings
          */
-        fc = (LVM_UINT32)pInst->pFiltersParams[ii].CenterFrequency;     /* Get the band centre frequency */
-        QFactor =(LVM_INT16) pInst->pFiltersParams[ii].QFactor;                    /* Get the band Q factor */
+        fc = (LVM_UINT32)pInst->pFiltersParams[ii]
+                     .CenterFrequency;                          /* Get the band centre frequency */
+        QFactor = (LVM_INT16)pInst->pFiltersParams[ii].QFactor; /* Get the band Q factor */
 
         /*
          * For each filter set the type of biquad required
          */
-        pInst->pBPFiltersPrecision[ii] = LVPSA_SimplePrecisionFilter;     /* Default to single precision */
-        if ((LOW_FREQ * fs) >= (fc << 15))
-        {
+        pInst->pBPFiltersPrecision[ii] =
+                LVPSA_SimplePrecisionFilter; /* Default to single precision */
+        if ((LOW_FREQ * fs) >= (fc << 15)) {
             /*
              * fc <= fs/110
              */
             pInst->pBPFiltersPrecision[ii] = LVPSA_DoublePrecisionFilter;
-        }
-        else
-        {
-            if (((LOW_FREQ * fs) < (fc << 15)) && ((fc << 15) < (HIGH_FREQ * fs)) && (QFactor > 300))
-            {
+        } else {
+            if (((LOW_FREQ * fs) < (fc << 15)) && ((fc << 15) < (HIGH_FREQ * fs)) &&
+                (QFactor > 300)) {
                 /*
-                * (fs/110 < fc < fs/85) & (Q>3)
-                */
+                 * (fs/110 < fc < fs/85) & (Q>3)
+                 */
                 pInst->pBPFiltersPrecision[ii] = LVPSA_DoublePrecisionFilter;
             }
         }
     }
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
 
 /************************************************************************************/
@@ -314,60 +284,49 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_SetBPFCoefficients(  LVPSA_InstancePr_t        *pInst,
-                                        LVPSA_ControlParams_t      *pParams)
-{
-
-    LVM_UINT16                      ii;
+LVPSA_RETURN LVPSA_SetBPFCoefficients(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams) {
+    LVM_UINT16 ii;
 
     /*
      * Set the coefficients for each band by the init function
      */
-    for (ii = 0; ii < pInst->nRelevantFilters; ii++)
-    {
-        switch  (pInst->pBPFiltersPrecision[ii])
-        {
-            case    LVPSA_DoublePrecisionFilter:
-            {
-                BP_FLOAT_Coefs_t      Coefficients;
+    for (ii = 0; ii < pInst->nRelevantFilters; ii++) {
+        switch (pInst->pBPFiltersPrecision[ii]) {
+            case LVPSA_DoublePrecisionFilter: {
+                BP_FLOAT_Coefs_t Coefficients;
                 /*
                  * Calculate the double precision coefficients
                  */
-                LVPSA_BPDoublePrecCoefs((LVM_UINT16)pParams->Fs,
-                                        &pInst->pFiltersParams[ii],
+                LVPSA_BPDoublePrecCoefs((LVM_UINT16)pParams->Fs, &pInst->pFiltersParams[ii],
                                         &Coefficients);
                 /*
                  * Set the coefficients
                  */
-                BP_1I_D16F32Cll_TRC_WRA_01_Init ( &pInst->pBP_Instances[ii],
-                                                  &pInst->pBP_Taps[ii],
-                                                  &Coefficients);
+                BP_1I_D16F32Cll_TRC_WRA_01_Init(&pInst->pBP_Instances[ii], &pInst->pBP_Taps[ii],
+                                                &Coefficients);
                 break;
             }
 
-            case    LVPSA_SimplePrecisionFilter:
-            {
-                BP_FLOAT_Coefs_t      Coefficients;
+            case LVPSA_SimplePrecisionFilter: {
+                BP_FLOAT_Coefs_t Coefficients;
 
                 /*
                  * Calculate the single precision coefficients
                  */
-                LVPSA_BPSinglePrecCoefs((LVM_UINT16)pParams->Fs,
-                                        &pInst->pFiltersParams[ii],
+                LVPSA_BPSinglePrecCoefs((LVM_UINT16)pParams->Fs, &pInst->pFiltersParams[ii],
                                         &Coefficients);
 
                 /*
                  * Set the coefficients
                  */
-                BP_1I_D16F16Css_TRC_WRA_01_Init (&pInst->pBP_Instances[ii],
-                                                  &pInst->pBP_Taps[ii],
-                                                  &Coefficients);
+                BP_1I_D16F16Css_TRC_WRA_01_Init(&pInst->pBP_Instances[ii], &pInst->pBP_Taps[ii],
+                                                &Coefficients);
                 break;
             }
         }
     }
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
 
 /************************************************************************************/
@@ -388,26 +347,20 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_SetQPFCoefficients(   LVPSA_InstancePr_t        *pInst,
-                                         LVPSA_ControlParams_t      *pParams  )
-{
-    LVM_UINT16     ii;
-    LVM_Fs_en      Fs = pParams->Fs;
-    QPD_FLOAT_Coefs  *pCoefficients;
-    extern         QPD_FLOAT_Coefs     LVPSA_QPD_Float_Coefs[];
+LVPSA_RETURN LVPSA_SetQPFCoefficients(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams) {
+    LVM_UINT16 ii;
+    LVM_Fs_en Fs = pParams->Fs;
+    QPD_FLOAT_Coefs* pCoefficients;
+    extern QPD_FLOAT_Coefs LVPSA_QPD_Float_Coefs[];
 
-    pCoefficients = &LVPSA_QPD_Float_Coefs[(pParams->LevelDetectionSpeed * \
-                                    LVPSA_NR_SUPPORTED_RATE) + Fs];
+    pCoefficients =
+            &LVPSA_QPD_Float_Coefs[(pParams->LevelDetectionSpeed * LVPSA_NR_SUPPORTED_RATE) + Fs];
 
-    for (ii = 0; ii < pInst->nRelevantFilters; ii++)
-    {
-        LVPSA_QPD_Init_Float (&pInst->pQPD_States[ii],
-                              &pInst->pQPD_Taps[ii],
-                              pCoefficients );
+    for (ii = 0; ii < pInst->nRelevantFilters; ii++) {
+        LVPSA_QPD_Init_Float(&pInst->pQPD_States[ii], &pInst->pQPD_Taps[ii], pCoefficients);
     }
 
-    return(LVPSA_OK);
-
+    return (LVPSA_OK);
 }
 
 /****************************************************************************************/
@@ -443,49 +396,46 @@
 /*     of the n bands equalizer (LVEQNB                                                 */
 /*                                                                                      */
 /****************************************************************************************/
-LVPSA_RETURN LVPSA_BPSinglePrecCoefs(    LVM_UINT16              Fs,
-                                         LVPSA_FilterParam_t     *pFilterParams,
-                                         BP_FLOAT_Coefs_t        *pCoefficients)
-{
-
-    extern LVM_FLOAT    LVPSA_Float_TwoPiOnFsTable[];
-    extern LVM_FLOAT    LVPSA_Float_CosCoef[];
+LVPSA_RETURN LVPSA_BPSinglePrecCoefs(LVM_UINT16 Fs, LVPSA_FilterParam_t* pFilterParams,
+                                     BP_FLOAT_Coefs_t* pCoefficients) {
+    extern LVM_FLOAT LVPSA_Float_TwoPiOnFsTable[];
+    extern LVM_FLOAT LVPSA_Float_CosCoef[];
 
     /*
      * Intermediate variables and temporary values
      */
-    LVM_FLOAT           T0;
-    LVM_FLOAT           D;
-    LVM_FLOAT           A0;
-    LVM_FLOAT           B1;
-    LVM_FLOAT           B2;
-    LVM_FLOAT           Dt0;
-    LVM_FLOAT           B2_Den;
-    LVM_FLOAT           B2_Num;
-    LVM_FLOAT           COS_T0;
-    LVM_FLOAT           coef;
-    LVM_FLOAT           factor;
-    LVM_FLOAT           t0;
-    LVM_INT16           i;
+    LVM_FLOAT T0;
+    LVM_FLOAT D;
+    LVM_FLOAT A0;
+    LVM_FLOAT B1;
+    LVM_FLOAT B2;
+    LVM_FLOAT Dt0;
+    LVM_FLOAT B2_Den;
+    LVM_FLOAT B2_Num;
+    LVM_FLOAT COS_T0;
+    LVM_FLOAT coef;
+    LVM_FLOAT factor;
+    LVM_FLOAT t0;
+    LVM_INT16 i;
 
     /*
      * Get the filter definition
      */
-    LVM_FLOAT          Frequency   = (LVM_FLOAT)(pFilterParams->CenterFrequency);
-    LVM_FLOAT          QFactor     = ((LVM_FLOAT)(pFilterParams->QFactor)) / 100;
+    LVM_FLOAT Frequency = (LVM_FLOAT)(pFilterParams->CenterFrequency);
+    LVM_FLOAT QFactor = ((LVM_FLOAT)(pFilterParams->QFactor)) / 100;
 
     /*
      * Calculating the intermediate values
      */
-    T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs];   /* T0 = 2 * Pi * Fc / Fs */
-    D = 3200;                 /* Floating point value 1.000000 (1*100*2^5) */
-                    /* Force D = 1 : the function was originally used for a peaking filter.
-                       The D parameter do not exist for a BandPass filter coefficients */
+    T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
+    D = 3200;                                        /* Floating point value 1.000000 (1*100*2^5) */
+    /* Force D = 1 : the function was originally used for a peaking filter.
+       The D parameter do not exist for a BandPass filter coefficients */
 
     /*
      * Calculate the B2 coefficient
      */
-    Dt0 =  T0 / 2048 ;
+    Dt0 = T0 / 2048;
     B2_Den = QFactor + Dt0;
     B2_Num = Dt0 - QFactor;
     B2 = B2_Num / (2 * B2_Den);
@@ -495,20 +445,19 @@
      *
      *  Cos += coef(n) * t0^n                   For n = 0 to 6
      */
-    T0 = (T0 / 2048) * 0.63658558f;              /* Scale to 1.0 in 16-bit for range 0 to fs/2 */
-    t0 = T0 ;
-    factor = 1.0f;                            /* Initialise to 1.0 for the a0 coefficient */
-    COS_T0 = 0.0f;                                 /* Initialise the error to zero */
-    for (i = 1; i < 7; i++)
-    {
-        coef    = LVPSA_Float_CosCoef[i];                /* Get the nth coefficient */
-        COS_T0 += (factor * coef);         /* The nth partial sum */
-        factor  = (factor * t0) ;           /* Calculate t0^n */
+    T0 = (T0 / 2048) * 0.63658558f; /* Scale to 1.0 in 16-bit for range 0 to fs/2 */
+    t0 = T0;
+    factor = 1.0f; /* Initialise to 1.0 for the a0 coefficient */
+    COS_T0 = 0.0f; /* Initialise the error to zero */
+    for (i = 1; i < 7; i++) {
+        coef = LVPSA_Float_CosCoef[i]; /* Get the nth coefficient */
+        COS_T0 += (factor * coef);     /* The nth partial sum */
+        factor = (factor * t0);        /* Calculate t0^n */
     }
-    COS_T0 = COS_T0 * 8;    /*LVPSA_CosCoef_float[0]*/      /* Correct the scaling */
+    COS_T0 = COS_T0 * 8; /*LVPSA_CosCoef_float[0]*/ /* Correct the scaling */
 
-    B1 = ((LVM_FLOAT)0.5 - B2) * (COS_T0);    /* B1 = (0.5 - b2) * cos(t0) */
-    A0 = ((LVM_FLOAT)0.5 + B2) / 2;                        /* A0 = (0.5 + b2) / 2 */
+    B1 = ((LVM_FLOAT)0.5 - B2) * (COS_T0); /* B1 = (0.5 - b2) * cos(t0) */
+    A0 = ((LVM_FLOAT)0.5 + B2) / 2;        /* A0 = (0.5 + b2) / 2 */
 
     /*
      * Write coeff into the data structure
@@ -517,7 +466,7 @@
     pCoefficients->B1 = B1 * 2;
     pCoefficients->B2 = B2 * 2;
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
 /****************************************************************************************/
 /*                                                                                      */
@@ -561,49 +510,46 @@
 /*     of the n bands equalizer (LVEQNB                                                 */
 /*                                                                                      */
 /****************************************************************************************/
-LVPSA_RETURN LVPSA_BPDoublePrecCoefs(   LVM_UINT16            Fs,
-                                        LVPSA_FilterParam_t   *pFilterParams,
-                                        BP_FLOAT_Coefs_t      *pCoefficients)
-{
-
-    extern LVM_FLOAT    LVPSA_Float_TwoPiOnFsTable[];
-    extern LVM_FLOAT    LVPSA_Float_DPCosCoef[];
+LVPSA_RETURN LVPSA_BPDoublePrecCoefs(LVM_UINT16 Fs, LVPSA_FilterParam_t* pFilterParams,
+                                     BP_FLOAT_Coefs_t* pCoefficients) {
+    extern LVM_FLOAT LVPSA_Float_TwoPiOnFsTable[];
+    extern LVM_FLOAT LVPSA_Float_DPCosCoef[];
 
     /*
      * Intermediate variables and temporary values
      */
-    LVM_FLOAT           T0;
-    LVM_FLOAT           D;
-    LVM_FLOAT           A0;
-    LVM_FLOAT           B1;
-    LVM_FLOAT           B2;
-    LVM_FLOAT           Dt0;
-    LVM_FLOAT           B2_Den;
-    LVM_FLOAT           B2_Num;
-    LVM_FLOAT           CosErr;
-    LVM_FLOAT           coef;
-    LVM_FLOAT           factor;
-    LVM_FLOAT           t0;
-    LVM_INT16           i;
+    LVM_FLOAT T0;
+    LVM_FLOAT D;
+    LVM_FLOAT A0;
+    LVM_FLOAT B1;
+    LVM_FLOAT B2;
+    LVM_FLOAT Dt0;
+    LVM_FLOAT B2_Den;
+    LVM_FLOAT B2_Num;
+    LVM_FLOAT CosErr;
+    LVM_FLOAT coef;
+    LVM_FLOAT factor;
+    LVM_FLOAT t0;
+    LVM_INT16 i;
 
     /*
      * Get the filter definition
      */
-    LVM_FLOAT          Frequency   = (LVM_FLOAT)(pFilterParams->CenterFrequency);
-    LVM_FLOAT          QFactor     = ((LVM_FLOAT)(pFilterParams->QFactor)) / 100;
+    LVM_FLOAT Frequency = (LVM_FLOAT)(pFilterParams->CenterFrequency);
+    LVM_FLOAT QFactor = ((LVM_FLOAT)(pFilterParams->QFactor)) / 100;
 
     /*
      * Calculating the intermediate values
      */
-    T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs];   /* T0 = 2 * Pi * Fc / Fs */
-    D = 3200;    /* Floating point value 1.000000 (1*100*2^5) */
-                 /* Force D = 1 : the function was originally used for a peaking filter.
-                    The D parameter do not exist for a BandPass filter coefficients */
+    T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
+    D = 3200;                                        /* Floating point value 1.000000 (1*100*2^5) */
+    /* Force D = 1 : the function was originally used for a peaking filter.
+       The D parameter do not exist for a BandPass filter coefficients */
 
     /*
      * Calculate the B2 coefficient
      */
-    Dt0 =  T0 / 2048 ;
+    Dt0 = T0 / 2048;
     B2_Den = QFactor + Dt0;
     B2_Num = Dt0 - QFactor;
     B2 = B2_Num / (2 * B2_Den);
@@ -613,25 +559,24 @@
      *
      *  CosErr += coef(n) * t0^n                For n = 0 to 4
      */
-    T0 = T0 * 0.994750f;                    /* Scale to 1.0 in 16-bit for range 0 to fs/50 */
+    T0 = T0 * 0.994750f; /* Scale to 1.0 in 16-bit for range 0 to fs/50 */
     t0 = T0;
-    factor = 1.0f;                            /* Initialise to 1.0 for the a0 coefficient */
-    CosErr = 0.0f;                                 /* Initialise the error to zero */
-    for (i = 1; i < 5; i++)
-    {
-        coef = LVPSA_Float_DPCosCoef[i];              /* Get the nth coefficient */
+    factor = 1.0f; /* Initialise to 1.0 for the a0 coefficient */
+    CosErr = 0.0f; /* Initialise the error to zero */
+    for (i = 1; i < 5; i++) {
+        coef = LVPSA_Float_DPCosCoef[i]; /* Get the nth coefficient */
         CosErr += factor * coef;         /* The nth partial sum */
-        factor = factor * t0;           /* Calculate t0^n */
+        factor = factor * t0;            /* Calculate t0^n */
     }
-    CosErr = CosErr * 2;          /* Correct the scaling */
+    CosErr = CosErr * 2; /* Correct the scaling */
 
     /*
      * Calculate the B1 and A0 coefficients
      */
-    B1 = ((LVM_FLOAT)0.5 - B2);                     /* B1 = (0.5 - b2) */
-    A0 = B1 * CosErr ;    /* Temporary storage for (0.5 - b2) * coserr(t0) */
-    B1 -= A0;                                   /* B1 = (0.5 - b2) * (1 - coserr(t0))  */
-    A0 = ((LVM_FLOAT)0.5  + B2) / 2;                /* A0 = (0.5 + b2) / 2 */
+    B1 = ((LVM_FLOAT)0.5 - B2);     /* B1 = (0.5 - b2) */
+    A0 = B1 * CosErr;               /* Temporary storage for (0.5 - b2) * coserr(t0) */
+    B1 -= A0;                       /* B1 = (0.5 - b2) * (1 - coserr(t0))  */
+    A0 = ((LVM_FLOAT)0.5 + B2) / 2; /* A0 = (0.5 + b2) / 2 */
 
     /*
      * Write coeff into the data structure
@@ -640,7 +585,7 @@
     pCoefficients->B1 = B1;
     pCoefficients->B2 = B2;
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
 /************************************************************************************/
 /*                                                                                  */
@@ -658,24 +603,20 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_ClearFilterHistory(LVPSA_InstancePr_t        *pInst)
-{
-    LVM_INT8       *pTapAddress;
-    LVM_UINT32       i;
+LVPSA_RETURN LVPSA_ClearFilterHistory(LVPSA_InstancePr_t* pInst) {
+    LVM_INT8* pTapAddress;
+    LVM_UINT32 i;
 
     /* Band Pass filters taps */
-    pTapAddress = (LVM_INT8 *)pInst->pBP_Taps;
-    for(i = 0; i < pInst->nBands * sizeof(Biquad_1I_Order2_FLOAT_Taps_t); i++)
-    {
+    pTapAddress = (LVM_INT8*)pInst->pBP_Taps;
+    for (i = 0; i < pInst->nBands * sizeof(Biquad_1I_Order2_FLOAT_Taps_t); i++) {
         pTapAddress[i] = 0;
     }
     /* Quasi-peak filters taps */
-    pTapAddress = (LVM_INT8 *)pInst->pQPD_Taps;
-    for(i = 0; i < pInst->nBands * sizeof(QPD_Taps_t); i++)
-    {
+    pTapAddress = (LVM_INT8*)pInst->pQPD_Taps;
+    for (i = 0; i < pInst->nBands * sizeof(QPD_Taps_t); i++) {
         pTapAddress[i] = 0;
     }
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
-
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
index 9fcd82f..9a2b29f 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
@@ -15,23 +15,24 @@
  * limitations under the License.
  */
 
-#include    "LVPSA.h"
-#include    "LVPSA_Private.h"
-#include    "InstAlloc.h"
+#include <stdlib.h>
+#include "LVPSA.h"
+#include "LVPSA_Private.h"
+#include "InstAlloc.h"
 
 /************************************************************************************/
 /*                                                                                  */
 /* FUNCTION:            LVPSA_Init                                                  */
 /*                                                                                  */
 /* DESCRIPTION:                                                                     */
-/*  Initialize the LVPSA module                                                     */
+/*  Create and Initialize the LVPSA module including instance handle                */
 /*                                                                                  */
 /*                                                                                  */
 /* PARAMETERS:                                                                      */
-/*  phInstance          Pointer to pointer to the instance                          */
+/*  phInstance          Pointer to the instance handle                              */
 /*  InitParams          Init parameters structure                                   */
 /*  ControlParams       Control parameters structure                                */
-/*  pMemoryTable        Memory table that contains memory areas definition          */
+/*  pScratch            Pointer to bundle scratch memory area                       */
 /*                                                                                  */
 /*                                                                                  */
 /* RETURNS:                                                                         */
@@ -39,75 +40,22 @@
 /*  otherwise           Error due to bad parameters                                 */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_Init              ( pLVPSA_Handle_t             *phInstance,
-                                       LVPSA_InitParams_t          *pInitParams,
-                                       LVPSA_ControlParams_t       *pControlParams,
-                                       LVPSA_MemTab_t              *pMemoryTable )
-{
-    LVPSA_InstancePr_t          *pLVPSA_Inst;
-    LVPSA_RETURN                errorCode       = LVPSA_OK;
-    LVM_UINT32                  ii;
-    extern LVM_FLOAT            LVPSA_Float_GainTable[];
-    LVM_UINT32                  BufferLength = 0;
-
-    /* Ints_Alloc instances, needed for memory alignment management */
-    INST_ALLOC          Instance;
-    INST_ALLOC          Scratch;
-    INST_ALLOC          Data;
-    INST_ALLOC          Coef;
-
-    /* Check parameters */
-    if((phInstance == LVM_NULL) || (pInitParams == LVM_NULL) || (pControlParams == LVM_NULL) || (pMemoryTable == LVM_NULL))
-    {
-        return(LVPSA_ERROR_NULLADDRESS);
-    }
-    if( (pInitParams->SpectralDataBufferDuration > LVPSA_MAXBUFFERDURATION)   ||
-        (pInitParams->SpectralDataBufferDuration == 0)                        ||
-        (pInitParams->MaxInputBlockSize > LVPSA_MAXINPUTBLOCKSIZE)      ||
-        (pInitParams->MaxInputBlockSize == 0)                           ||
-        (pInitParams->nBands < LVPSA_NBANDSMIN)                         ||
-        (pInitParams->nBands > LVPSA_NBANDSMAX)                         ||
-        (pInitParams->pFiltersParams == 0))
-    {
-        return(LVPSA_ERROR_INVALIDPARAM);
-    }
-    for(ii = 0; ii < pInitParams->nBands; ii++)
-    {
-        if((pInitParams->pFiltersParams[ii].CenterFrequency > LVPSA_MAXCENTERFREQ) ||
-           (pInitParams->pFiltersParams[ii].PostGain        > LVPSA_MAXPOSTGAIN)   ||
-           (pInitParams->pFiltersParams[ii].PostGain        < LVPSA_MINPOSTGAIN)   ||
-           (pInitParams->pFiltersParams[ii].QFactor < LVPSA_MINQFACTOR)            ||
-           (pInitParams->pFiltersParams[ii].QFactor > LVPSA_MAXQFACTOR))
-           {
-                return(LVPSA_ERROR_INVALIDPARAM);
-           }
-    }
-
-    /*Inst_Alloc instances initialization */
-    InstAlloc_Init( &Instance   , pMemoryTable->Region[LVPSA_MEMREGION_INSTANCE].pBaseAddress);
-    InstAlloc_Init( &Scratch    , pMemoryTable->Region[LVPSA_MEMREGION_SCRATCH].pBaseAddress);
-    InstAlloc_Init( &Data       , pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_DATA].pBaseAddress);
-    InstAlloc_Init( &Coef       , pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_COEF].pBaseAddress);
+LVPSA_RETURN LVPSA_Init(pLVPSA_Handle_t* phInstance, LVPSA_InitParams_t* pInitParams,
+                        LVPSA_ControlParams_t* pControlParams, void* pScratch) {
+    LVPSA_InstancePr_t* pLVPSA_Inst;
+    LVPSA_RETURN errorCode = LVPSA_OK;
+    LVM_UINT32 ii;
+    extern LVM_FLOAT LVPSA_Float_GainTable[];
+    LVM_UINT32 BufferLength = 0;
 
     /* Set the instance handle if not already initialised */
-    if (*phInstance == LVM_NULL)
-    {
-        *phInstance = InstAlloc_AddMember( &Instance, sizeof(LVPSA_InstancePr_t) );
+    *phInstance = calloc(1, sizeof(*pLVPSA_Inst));
+    if (*phInstance == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
     }
-    pLVPSA_Inst =(LVPSA_InstancePr_t*)*phInstance;
+    pLVPSA_Inst = (LVPSA_InstancePr_t*)*phInstance;
 
-    /* Check the memory table for NULL pointers */
-    for (ii = 0; ii < LVPSA_NR_MEMORY_REGIONS; ii++)
-    {
-        if (pMemoryTable->Region[ii].Size!=0)
-        {
-            if (pMemoryTable->Region[ii].pBaseAddress==LVM_NULL)
-            {
-                return(LVPSA_ERROR_NULLADDRESS);
-            }
-            pLVPSA_Inst->MemoryTable.Region[ii] = pMemoryTable->Region[ii];
-        }
-    }
+    pLVPSA_Inst->pScratch = pScratch;
 
     /* Initialize module's internal parameters */
     pLVPSA_Inst->bControlPending = LVM_FALSE;
@@ -117,81 +65,150 @@
     pLVPSA_Inst->CurrentParams.Fs = LVM_FS_DUMMY;
     pLVPSA_Inst->CurrentParams.LevelDetectionSpeed = LVPSA_SPEED_DUMMY;
 
-    {   /* for avoiding QAC warnings */
-        LVM_INT32 SDBD=(LVM_INT32)pLVPSA_Inst->SpectralDataBufferDuration;
-        LVM_INT32 IRTI=(LVM_INT32)LVPSA_InternalRefreshTimeInv;
+    { /* for avoiding QAC warnings */
+        LVM_INT32 SDBD = (LVM_INT32)pLVPSA_Inst->SpectralDataBufferDuration;
+        LVM_INT32 IRTI = (LVM_INT32)LVPSA_InternalRefreshTimeInv;
         LVM_INT32 BL;
 
-        MUL32x32INTO32(SDBD,IRTI,BL,LVPSA_InternalRefreshTimeShift)
+        MUL32x32INTO32(SDBD, IRTI, BL, LVPSA_InternalRefreshTimeShift)
 
-        BufferLength=(LVM_UINT32)BL;
+                BufferLength = (LVM_UINT32)BL;
     }
 
-    if((BufferLength * LVPSA_InternalRefreshTime) != pLVPSA_Inst->SpectralDataBufferDuration)
-    {
+    if ((BufferLength * LVPSA_InternalRefreshTime) != pLVPSA_Inst->SpectralDataBufferDuration) {
         pLVPSA_Inst->SpectralDataBufferLength = BufferLength + 1;
-    }
-    else
-    {
+    } else {
         pLVPSA_Inst->SpectralDataBufferLength = BufferLength;
     }
 
     /* Assign the pointers */
-    pLVPSA_Inst->pPostGains             =
-        (LVM_FLOAT *)InstAlloc_AddMember(&Instance, pInitParams->nBands * sizeof(LVM_FLOAT));
-    pLVPSA_Inst->pFiltersParams             = (LVPSA_FilterParam_t *)
-        InstAlloc_AddMember(&Instance, pInitParams->nBands * sizeof(LVPSA_FilterParam_t));
-    pLVPSA_Inst->pSpectralDataBufferStart   = (LVM_UINT8 *)
-        InstAlloc_AddMember(&Instance, pInitParams->nBands * \
-                                pLVPSA_Inst->SpectralDataBufferLength * sizeof(LVM_UINT8));
-    pLVPSA_Inst->pPreviousPeaks             = (LVM_UINT8 *)
-                  InstAlloc_AddMember(&Instance, pInitParams->nBands * sizeof(LVM_UINT8));
-    pLVPSA_Inst->pBPFiltersPrecision        = (LVPSA_BPFilterPrecision_en *)
-                  InstAlloc_AddMember(&Instance, pInitParams->nBands * \
-                                                       sizeof(LVPSA_BPFilterPrecision_en));
-    pLVPSA_Inst->pBP_Instances          = (Biquad_FLOAT_Instance_t *)
-                  InstAlloc_AddMember(&Coef, pInitParams->nBands * \
-                                                          sizeof(Biquad_FLOAT_Instance_t));
-    pLVPSA_Inst->pQPD_States            = (QPD_FLOAT_State_t *)
-                  InstAlloc_AddMember(&Coef, pInitParams->nBands * \
-                                                                sizeof(QPD_FLOAT_State_t));
-
-    pLVPSA_Inst->pBP_Taps               = (Biquad_1I_Order2_FLOAT_Taps_t *)
-        InstAlloc_AddMember(&Data, pInitParams->nBands * \
-                                                     sizeof(Biquad_1I_Order2_FLOAT_Taps_t));
-    pLVPSA_Inst->pQPD_Taps              = (QPD_FLOAT_Taps_t *)
-        InstAlloc_AddMember(&Data, pInitParams->nBands * \
-                                                    sizeof(QPD_FLOAT_Taps_t));
+    pLVPSA_Inst->pPostGains =
+            (LVM_FLOAT*)calloc(pInitParams->nBands, sizeof(*(pLVPSA_Inst->pPostGains)));
+    if (pLVPSA_Inst->pPostGains == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
+    }
+    pLVPSA_Inst->pFiltersParams = (LVPSA_FilterParam_t*)calloc(
+            pInitParams->nBands, sizeof(*(pLVPSA_Inst->pFiltersParams)));
+    if (pLVPSA_Inst->pFiltersParams == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
+    }
+    pLVPSA_Inst->pSpectralDataBufferStart = (LVM_UINT8*)calloc(
+            pInitParams->nBands, pLVPSA_Inst->SpectralDataBufferLength *
+                                         sizeof(*(pLVPSA_Inst->pSpectralDataBufferStart)));
+    if (pLVPSA_Inst->pSpectralDataBufferStart == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
+    }
+    pLVPSA_Inst->pPreviousPeaks =
+            (LVM_UINT8*)calloc(pInitParams->nBands, sizeof(*(pLVPSA_Inst->pPreviousPeaks)));
+    if (pLVPSA_Inst->pPreviousPeaks == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
+    }
+    pLVPSA_Inst->pBPFiltersPrecision = (LVPSA_BPFilterPrecision_en*)calloc(
+            pInitParams->nBands, sizeof(*(pLVPSA_Inst->pBPFiltersPrecision)));
+    if (pLVPSA_Inst->pBPFiltersPrecision == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
+    }
+    pLVPSA_Inst->pBP_Instances = (Biquad_FLOAT_Instance_t*)calloc(
+            pInitParams->nBands, sizeof(*(pLVPSA_Inst->pBP_Instances)));
+    if (pLVPSA_Inst->pBP_Instances == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
+    }
+    pLVPSA_Inst->pQPD_States =
+            (QPD_FLOAT_State_t*)calloc(pInitParams->nBands, sizeof(*(pLVPSA_Inst->pQPD_States)));
+    if (pLVPSA_Inst->pQPD_States == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
+    }
+    pLVPSA_Inst->pBP_Taps = (Biquad_1I_Order2_FLOAT_Taps_t*)calloc(
+            pInitParams->nBands, sizeof(*(pLVPSA_Inst->pBP_Taps)));
+    if (pLVPSA_Inst->pBP_Taps == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
+    }
+    pLVPSA_Inst->pQPD_Taps =
+            (QPD_FLOAT_Taps_t*)calloc(pInitParams->nBands, sizeof(*(pLVPSA_Inst->pQPD_Taps)));
+    if (pLVPSA_Inst->pQPD_Taps == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
+    }
 
     /* Copy filters parameters in the private instance */
-    for(ii = 0; ii < pLVPSA_Inst->nBands; ii++)
-    {
+    for (ii = 0; ii < pLVPSA_Inst->nBands; ii++) {
         pLVPSA_Inst->pFiltersParams[ii] = pInitParams->pFiltersParams[ii];
     }
 
     /* Set Post filters gains*/
-    for(ii = 0; ii < pLVPSA_Inst->nBands; ii++)
-    {
-        pLVPSA_Inst->pPostGains[ii] = LVPSA_Float_GainTable[15 + \
-                                                        pInitParams->pFiltersParams[ii].PostGain];
+    for (ii = 0; ii < pLVPSA_Inst->nBands; ii++) {
+        pLVPSA_Inst->pPostGains[ii] =
+                LVPSA_Float_GainTable[15 + pInitParams->pFiltersParams[ii].PostGain];
     }
     pLVPSA_Inst->pSpectralDataBufferWritePointer = pLVPSA_Inst->pSpectralDataBufferStart;
 
     /* Initialize control dependant internal parameters */
-    errorCode = LVPSA_Control (*phInstance, pControlParams);
+    errorCode = LVPSA_Control(*phInstance, pControlParams);
 
-    if(errorCode!=0)
-    {
+    if (errorCode != 0) {
         return errorCode;
     }
 
-    errorCode = LVPSA_ApplyNewSettings (pLVPSA_Inst);
+    errorCode = LVPSA_ApplyNewSettings(pLVPSA_Inst);
 
-    if(errorCode!=0)
-    {
+    if (errorCode != 0) {
         return errorCode;
     }
 
-    return(errorCode);
+    return (errorCode);
 }
 
+/************************************************************************************/
+/*                                                                                  */
+/* FUNCTION:            LVPSA_DeInit                                                */
+/*                                                                                  */
+/* DESCRIPTION:                                                                     */
+/*    Free the memories created in LVPSA_Init call including instance handle        */
+/*                                                                                  */
+/* PARAMETERS:                                                                      */
+/*  phInstance          Pointer to the instance handle                              */
+/*                                                                                  */
+/************************************************************************************/
+void LVPSA_DeInit(pLVPSA_Handle_t* phInstance) {
+    LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)*phInstance;
+    if (pLVPSA_Inst == LVM_NULL) {
+        return;
+    }
+    if (pLVPSA_Inst->pPostGains != LVM_NULL) {
+        free(pLVPSA_Inst->pPostGains);
+        pLVPSA_Inst->pPostGains = LVM_NULL;
+    }
+    if (pLVPSA_Inst->pFiltersParams != LVM_NULL) {
+        free(pLVPSA_Inst->pFiltersParams);
+        pLVPSA_Inst->pFiltersParams = LVM_NULL;
+    }
+    if (pLVPSA_Inst->pSpectralDataBufferStart != LVM_NULL) {
+        free(pLVPSA_Inst->pSpectralDataBufferStart);
+        pLVPSA_Inst->pSpectralDataBufferStart = LVM_NULL;
+    }
+    if (pLVPSA_Inst->pPreviousPeaks != LVM_NULL) {
+        free(pLVPSA_Inst->pPreviousPeaks);
+        pLVPSA_Inst->pPreviousPeaks = LVM_NULL;
+    }
+    if (pLVPSA_Inst->pBPFiltersPrecision != LVM_NULL) {
+        free(pLVPSA_Inst->pBPFiltersPrecision);
+        pLVPSA_Inst->pBPFiltersPrecision = LVM_NULL;
+    }
+    if (pLVPSA_Inst->pBP_Instances != LVM_NULL) {
+        free(pLVPSA_Inst->pBP_Instances);
+        pLVPSA_Inst->pBP_Instances = LVM_NULL;
+    }
+    if (pLVPSA_Inst->pQPD_States != LVM_NULL) {
+        free(pLVPSA_Inst->pQPD_States);
+        pLVPSA_Inst->pQPD_States = LVM_NULL;
+    }
+    if (pLVPSA_Inst->pBP_Taps != LVM_NULL) {
+        free(pLVPSA_Inst->pBP_Taps);
+        pLVPSA_Inst->pBP_Taps = LVM_NULL;
+    }
+    if (pLVPSA_Inst->pQPD_Taps != LVM_NULL) {
+        free(pLVPSA_Inst->pQPD_Taps);
+        pLVPSA_Inst->pQPD_Taps = LVM_NULL;
+    }
+    free(pLVPSA_Inst);
+    *phInstance = LVM_NULL;
+}
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Memory.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Memory.cpp
deleted file mode 100644
index eafcbe6..0000000
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Memory.cpp
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include    "LVPSA.h"
-#include    "LVPSA_Private.h"
-#include    "InstAlloc.h"
-
-/****************************************************************************************/
-/*                                                                                      */
-/* FUNCTION:                LVEQNB_Memory                                               */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*      hInstance = NULL         Returns the memory requirements                        */
-/*      hInstance = Instance handle     Returns the memory requirements and             */
-/*                                      allocated base addresses for the instance       */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) the memory      */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the memory       */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory definition table                 */
-/*  InitParams              Pointer to the instance init parameters                     */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVPSA_OK            Succeeds                                                        */
-/*  otherwise           Error due to bad parameters                                     */
-/*                                                                                      */
-/****************************************************************************************/
-LVPSA_RETURN LVPSA_Memory            ( pLVPSA_Handle_t             hInstance,
-                                       LVPSA_MemTab_t             *pMemoryTable,
-                                       LVPSA_InitParams_t         *pInitParams    )
-{
-    LVM_UINT32          ii;
-    LVM_UINT32          BufferLength;
-    INST_ALLOC          Instance;
-    INST_ALLOC          Scratch;
-    INST_ALLOC          Data;
-    INST_ALLOC          Coef;
-    LVPSA_InstancePr_t *pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
-
-    InstAlloc_Init( &Instance   , LVM_NULL);
-    InstAlloc_Init( &Scratch    , LVM_NULL);
-    InstAlloc_Init( &Data       , LVM_NULL);
-    InstAlloc_Init( &Coef       , LVM_NULL);
-
-    if((pMemoryTable == LVM_NULL) || (pInitParams == LVM_NULL))
-    {
-        return(LVPSA_ERROR_NULLADDRESS);
-    }
-
-    /*
-     * Fill in the memory table
-     */
-    if (hInstance == LVM_NULL)
-    {
-
-        /* Check init parameter */
-        if( (pInitParams->SpectralDataBufferDuration > LVPSA_MAXBUFFERDURATION)   ||
-            (pInitParams->SpectralDataBufferDuration == 0)                        ||
-            (pInitParams->MaxInputBlockSize > LVPSA_MAXINPUTBLOCKSIZE)      ||
-            (pInitParams->MaxInputBlockSize == 0)                           ||
-            (pInitParams->nBands < LVPSA_NBANDSMIN)                         ||
-            (pInitParams->nBands > LVPSA_NBANDSMAX)                         ||
-            (pInitParams->pFiltersParams == 0))
-        {
-            return(LVPSA_ERROR_INVALIDPARAM);
-        }
-        for(ii = 0; ii < pInitParams->nBands; ii++)
-        {
-            if((pInitParams->pFiltersParams[ii].CenterFrequency > LVPSA_MAXCENTERFREQ) ||
-               (pInitParams->pFiltersParams[ii].PostGain        > LVPSA_MAXPOSTGAIN)   ||
-               (pInitParams->pFiltersParams[ii].PostGain        < LVPSA_MINPOSTGAIN)   ||
-               (pInitParams->pFiltersParams[ii].QFactor < LVPSA_MINQFACTOR)            ||
-               (pInitParams->pFiltersParams[ii].QFactor > LVPSA_MAXQFACTOR))
-               {
-                    return(LVPSA_ERROR_INVALIDPARAM);
-               }
-        }
-
-        /*
-         * Instance memory
-         */
-
-        InstAlloc_AddMember( &Instance, sizeof(LVPSA_InstancePr_t) );
-        InstAlloc_AddMember( &Instance, pInitParams->nBands * sizeof(LVM_FLOAT) );
-        InstAlloc_AddMember( &Instance, pInitParams->nBands * sizeof(LVPSA_FilterParam_t) );
-
-        {
-            /* for avoiding QAC warnings as MUL32x32INTO32 works on LVM_INT32 only*/
-            LVM_INT32 SDBD=(LVM_INT32)pInitParams->SpectralDataBufferDuration;
-            LVM_INT32 IRTI=(LVM_INT32)LVPSA_InternalRefreshTimeInv;
-            LVM_INT32 BL;
-
-            MUL32x32INTO32(SDBD,IRTI,BL,LVPSA_InternalRefreshTimeShift)
-            BufferLength=(LVM_UINT32)BL;
-        }
-
-        if((BufferLength * LVPSA_InternalRefreshTime) != pInitParams->SpectralDataBufferDuration)
-        {
-            BufferLength++;
-        }
-        InstAlloc_AddMember( &Instance, pInitParams->nBands * BufferLength * sizeof(LVM_UINT8) );
-        InstAlloc_AddMember( &Instance, pInitParams->nBands * sizeof(LVM_UINT8) );
-        InstAlloc_AddMember( &Instance, pInitParams->nBands * sizeof(LVPSA_BPFilterPrecision_en) );
-        pMemoryTable->Region[LVPSA_MEMREGION_INSTANCE].Size         = InstAlloc_GetTotal(&Instance);
-        pMemoryTable->Region[LVPSA_MEMREGION_INSTANCE].Type         = LVPSA_PERSISTENT;
-        pMemoryTable->Region[LVPSA_MEMREGION_INSTANCE].pBaseAddress = LVM_NULL;
-
-        /*
-         * Scratch memory
-         */
-        InstAlloc_AddMember( &Scratch, 2 * pInitParams->MaxInputBlockSize * sizeof(LVM_FLOAT) );
-        pMemoryTable->Region[LVPSA_MEMREGION_SCRATCH].Size         = InstAlloc_GetTotal(&Scratch);
-        pMemoryTable->Region[LVPSA_MEMREGION_SCRATCH].Type         = LVPSA_SCRATCH;
-        pMemoryTable->Region[LVPSA_MEMREGION_SCRATCH].pBaseAddress = LVM_NULL;
-
-        /*
-         * Persistent coefficients memory
-         */
-        InstAlloc_AddMember( &Coef, pInitParams->nBands * sizeof(Biquad_FLOAT_Instance_t) );
-        InstAlloc_AddMember( &Coef, pInitParams->nBands * sizeof(QPD_FLOAT_State_t) );
-        pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_COEF].Size         = InstAlloc_GetTotal(&Coef);
-        pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_COEF].Type         = LVPSA_PERSISTENT_COEF;
-        pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_COEF].pBaseAddress = LVM_NULL;
-
-        /*
-         * Persistent data memory
-         */
-        InstAlloc_AddMember( &Data, pInitParams->nBands * sizeof(Biquad_1I_Order2_FLOAT_Taps_t) );
-        InstAlloc_AddMember( &Data, pInitParams->nBands * sizeof(QPD_FLOAT_Taps_t) );
-        pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_DATA].Size         = InstAlloc_GetTotal(&Data);
-        pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_DATA].Type         = LVPSA_PERSISTENT_DATA;
-        pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_DATA].pBaseAddress = LVM_NULL;
-
-    }
-    else
-    {
-        /* Read back memory allocation table */
-        *pMemoryTable = pLVPSA_Inst->MemoryTable;
-    }
-
-    return(LVPSA_OK);
-}
-
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Private.h b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Private.h
index 61987b5..e00c11c 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Private.h
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Private.h
@@ -27,95 +27,100 @@
    CONSTANT DEFINITIONS
 ***********************************************************************************/
 
-/* Memory */
-#define LVPSA_INSTANCE_ALIGN             4      /* 32-bit alignment for structures                                  */
-#define LVPSA_SCRATCH_ALIGN              4      /* 32-bit alignment for long data                                   */
-#define LVPSA_COEF_ALIGN                 4      /* 32-bit alignment for long words                                  */
-#define LVPSA_DATA_ALIGN                 4      /* 32-bit alignment for long data                                   */
+#define LVPSA_NR_SUPPORTED_RATE 13 /* From 8000Hz to 192000Hz*/
+#define LVPSA_NR_SUPPORTED_SPEED \
+    3 /* LOW, MEDIUM, HIGH                                                */
 
-#define LVPSA_MEMREGION_INSTANCE         0      /* Offset to instance memory region in memory table                 */
-#define LVPSA_MEMREGION_PERSISTENT_COEF  1      /* Offset to persistent coefficients  memory region in memory table */
-#define LVPSA_MEMREGION_PERSISTENT_DATA  2      /* Offset to persistent taps  memory region in memory table         */
-#define LVPSA_MEMREGION_SCRATCH          3      /* Offset to scratch  memory region in memory table                 */
-#define LVPSA_NR_SUPPORTED_RATE          13      /* From 8000Hz to 192000Hz*/
-#define LVPSA_NR_SUPPORTED_SPEED         3      /* LOW, MEDIUM, HIGH                                                */
+#define LVPSA_MAXBUFFERDURATION \
+    4000 /* Maximum length in ms of the levels buffer                        */
+#define LVPSA_MAXINPUTBLOCKSIZE \
+    5000                   /* Maximum length in mono samples of the block to process           */
+#define LVPSA_NBANDSMIN 1  /* Minimum number of frequency band                                 */
+#define LVPSA_NBANDSMAX 30 /* Maximum number of frequency band                                 */
+#define LVPSA_MAXCENTERFREQ \
+    20000 /* Maximum possible center frequency                                */
+#define LVPSA_MINPOSTGAIN \
+    (-15)                     /* Minimum possible post gain                                       */
+#define LVPSA_MAXPOSTGAIN 15  /* Maximum possible post gain  */
+#define LVPSA_MINQFACTOR 25   /* Minimum possible Q factor                                        */
+#define LVPSA_MAXQFACTOR 1200 /* Maximum possible Q factor */
 
-#define LVPSA_MAXBUFFERDURATION          4000   /* Maximum length in ms of the levels buffer                        */
-#define LVPSA_MAXINPUTBLOCKSIZE          5000   /* Maximum length in mono samples of the block to process           */
-#define LVPSA_NBANDSMIN                  1      /* Minimum number of frequency band                                 */
-#define LVPSA_NBANDSMAX                  30     /* Maximum number of frequency band                                 */
-#define LVPSA_MAXCENTERFREQ              20000  /* Maximum possible center frequency                                */
-#define LVPSA_MINPOSTGAIN              (-15)    /* Minimum possible post gain                                       */
-#define LVPSA_MAXPOSTGAIN                15     /* Maximum possible post gain                                       */
-#define LVPSA_MINQFACTOR                 25     /* Minimum possible Q factor                                        */
-#define LVPSA_MAXQFACTOR                 1200   /* Maximum possible Q factor                                        */
+#define LVPSA_MAXLEVELDECAYFACTOR \
+    0x4111 /* Decay factor for the maximum values calculation                  */
+#define LVPSA_MAXLEVELDECAYSHIFT \
+    14 /* Decay shift for the maximum values calculation                   */
 
-#define LVPSA_MAXLEVELDECAYFACTOR        0x4111 /* Decay factor for the maximum values calculation                  */
-#define LVPSA_MAXLEVELDECAYSHIFT         14     /* Decay shift for the maximum values calculation                   */
+#define LVPSA_MAXUNSIGNEDCHAR 0xFF
 
-#define LVPSA_MAXUNSIGNEDCHAR            0xFF
-
-#define LVPSA_FsInvertShift              31
-#define LVPSA_GAINSHIFT                  11
-#define LVPSA_FREQSHIFT                  25
+#define LVPSA_FsInvertShift 31
+#define LVPSA_GAINSHIFT 11
+#define LVPSA_FREQSHIFT 25
 
 /**********************************************************************************
    TYPES DEFINITIONS
 ***********************************************************************************/
 
-#define LVPSA_InternalRefreshTime       0x0014    /* 20 ms (50Hz) in Q16.0      */
-#define LVPSA_InternalRefreshTimeInv    0x0666    /* 1/20ms left shifted by 15  */
-#define LVPSA_InternalRefreshTimeShift  15
+#define LVPSA_InternalRefreshTime 0x0014    /* 20 ms (50Hz) in Q16.0      */
+#define LVPSA_InternalRefreshTimeInv 0x0666 /* 1/20ms left shifted by 15  */
+#define LVPSA_InternalRefreshTimeShift 15
 
 /* Precision of the filter */
-typedef enum
-{
-    LVPSA_SimplePrecisionFilter,    /* Simple precision */
-    LVPSA_DoublePrecisionFilter     /* Double precision */
+typedef enum {
+    LVPSA_SimplePrecisionFilter, /* Simple precision */
+    LVPSA_DoublePrecisionFilter  /* Double precision */
 } LVPSA_BPFilterPrecision_en;
 
-typedef struct
-{
-    LVM_CHAR                    bControlPending;                    /* Flag incating a change of the control parameters                                             */
-    LVM_UINT16                  nBands;                             /* Number of bands of the spectrum analyzer                                                     */
-    LVM_UINT16                  MaxInputBlockSize;                  /* Maximum input data buffer size                                                               */
+typedef struct {
+    LVM_CHAR bControlPending;     /* Flag incating a change of the control parameters     */
+    LVM_UINT16 nBands;            /* Number of bands of the spectrum analyzer            */
+    LVM_UINT16 MaxInputBlockSize; /* Maximum input data buffer size */
 
-    LVPSA_ControlParams_t       CurrentParams;                      /* Current control parameters of the module                                                     */
-    LVPSA_ControlParams_t       NewParams;                          /* New control parameters given by the user                                                     */
-    LVPSA_MemTab_t              MemoryTable;
+    LVPSA_ControlParams_t CurrentParams; /* Current control parameters of the module */
+    LVPSA_ControlParams_t NewParams;     /* New control parameters given by the user     */
+    void* pScratch;
+    /* Pointer to bundle scratch buffer */
 
-    LVPSA_BPFilterPrecision_en *pBPFiltersPrecision;                /* Points a nBands elements array that contains the filter precision for each band              */
-    Biquad_FLOAT_Instance_t          *pBP_Instances;
+    LVPSA_BPFilterPrecision_en* pBPFiltersPrecision; /* Points a nBands elements array that contains
+                                                        the filter precision for each band */
+    Biquad_FLOAT_Instance_t* pBP_Instances;
     /* Points a nBands elements array that contains the band pass filter taps for each band */
-    Biquad_1I_Order2_FLOAT_Taps_t    *pBP_Taps;
+    Biquad_1I_Order2_FLOAT_Taps_t* pBP_Taps;
     /* Points a nBands elements array that contains the QPD filter instance for each band */
-    QPD_FLOAT_State_t                *pQPD_States;
+    QPD_FLOAT_State_t* pQPD_States;
     /* Points a nBands elements array that contains the QPD filter taps for each band */
-    QPD_FLOAT_Taps_t                 *pQPD_Taps;
+    QPD_FLOAT_Taps_t* pQPD_Taps;
 
     /* Points a nBands elements array that contains the post-filter gains for each band */
-    LVM_FLOAT                  *pPostGains;
-    LVPSA_FilterParam_t        *pFiltersParams;                     /* Copy of the filters parameters from the input parameters                                     */
+    LVM_FLOAT* pPostGains;
+    LVPSA_FilterParam_t*
+            pFiltersParams; /* Copy of the filters parameters from the input parameters */
 
-    LVM_UINT16                  nSamplesBufferUpdate;               /* Number of samples to make 20ms                                                               */
-    LVM_INT32                   BufferUpdateSamplesCount;           /* Counter used to know when to put a new value in the buffer                                   */
-    LVM_UINT16                  nRelevantFilters;                   /* Number of relevent filters depending on sampling frequency and bands center frequency        */
-    LVM_UINT16                  LocalSamplesCount;                  /* Counter used to update the SpectralDataBufferAudioTime                                       */
+    LVM_UINT16 nSamplesBufferUpdate;    /* Number of samples to make 20ms    */
+    LVM_INT32 BufferUpdateSamplesCount; /* Counter used to know when to put a new value in the
+                                           buffer                                   */
+    LVM_UINT16 nRelevantFilters;  /* Number of relevant filters depending on sampling frequency and
+                                     bands center frequency        */
+    LVM_UINT16 LocalSamplesCount; /* Counter used to update the SpectralDataBufferAudioTime */
 
-    LVM_UINT16                  DownSamplingFactor;                 /* Down sampling factor depending on the sampling frequency                                     */
-    LVM_UINT16                  DownSamplingCount;                  /* Counter used for the downsampling handling                                                   */
+    LVM_UINT16 DownSamplingFactor; /* Down sampling factor depending on the sampling frequency */
+    LVM_UINT16 DownSamplingCount;  /* Counter used for the downsampling handling  */
 
-    LVM_UINT16                  SpectralDataBufferDuration;         /* Length of the buffer in time (ms) defined by the application                                 */
-    LVM_UINT8                  *pSpectralDataBufferStart;           /* Starting address of the buffer                                                               */
-    LVM_UINT8                  *pSpectralDataBufferWritePointer;    /* Current position of the writting pointer of the buffer                                       */
-    LVPSA_Time                  SpectralDataBufferAudioTime;        /* AudioTime at which the last value save occured in the buffer                                 */
-    LVM_UINT32                  SpectralDataBufferLength;           /* Number of spectrum data value that the buffer can contain (per band)
-                                                                       = SpectralDataBufferDuration/20ms                                                            */
+    LVM_UINT16 SpectralDataBufferDuration;      /* Length of the buffer in time (ms) defined by the
+                                                   application                                 */
+    LVM_UINT8* pSpectralDataBufferStart;        /* Starting address of the buffer        */
+    LVM_UINT8* pSpectralDataBufferWritePointer; /* Current position of the writing pointer of the
+                                                   buffer                                       */
+    LVPSA_Time SpectralDataBufferAudioTime; /* AudioTime at which the last value save occurred in
+                                               the buffer                                 */
+    LVM_UINT32
+            SpectralDataBufferLength; /* Number of spectrum data value that the buffer can contain
+                                         (per band) = SpectralDataBufferDuration/20ms */
 
-    LVM_UINT8                  *pPreviousPeaks;                     /* Points to a nBands elements array that contains the previous peak value of the level
-                                                                     detection. Those values are decremented after each call to the GetSpectrum function          */
+    LVM_UINT8* pPreviousPeaks; /* Points to a nBands elements array that contains the previous peak
+                                value of the level detection. Those values are decremented after
+                                each call to the GetSpectrum function          */
 
-}LVPSA_InstancePr_t, *pLVPSA_InstancePr_t;
+} LVPSA_InstancePr_t, *pLVPSA_InstancePr_t;
 
 /**********************************************************************************
    FUNCTIONS PROTOTYPE
@@ -135,6 +140,6 @@
 /*  LVPSA_OK            Always succeeds                                             */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_ApplyNewSettings (LVPSA_InstancePr_t     *pInst);
+LVPSA_RETURN LVPSA_ApplyNewSettings(LVPSA_InstancePr_t* pInst);
 
 #endif /* _LVPSA_PRIVATE_H */
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Process.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Process.cpp
index 81a88c5..299dfd2 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Process.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Process.cpp
@@ -15,23 +15,23 @@
  * limitations under the License.
  */
 
-#include    "LVPSA.h"
-#include    "LVPSA_Private.h"
-#include    "LVM_Macros.h"
-#include    "VectorArithmetic.h"
+#include "LVPSA.h"
+#include "LVPSA_Private.h"
+#include "LVM_Macros.h"
+#include "VectorArithmetic.h"
 
-#define LVM_MININT_32   0x80000000
+#define LVM_MININT_32 0x80000000
 
 static LVM_INT32 mult32x32in32_shiftr(LVM_INT32 a, LVM_INT32 b, LVM_INT32 c) {
-  LVM_INT64 result = ((LVM_INT64)a * b) >> c;
+    LVM_INT64 result = ((LVM_INT64)a * b) >> c;
 
-  if (result >= INT32_MAX) {
-    return INT32_MAX;
-  } else if (result <= INT32_MIN) {
-    return INT32_MIN;
-  } else {
-    return (LVM_INT32)result;
-  }
+    if (result >= INT32_MAX) {
+        return INT32_MAX;
+    } else if (result <= INT32_MIN) {
+        return INT32_MIN;
+    } else {
+        return (LVM_INT32)result;
+    }
 }
 
 /************************************************************************************/
@@ -54,42 +54,36 @@
 /*  otherwise           Error due to bad parameters                                 */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_Process           ( pLVPSA_Handle_t      hInstance,
-                                       LVM_FLOAT           *pLVPSA_InputSamples,
-                                       LVM_UINT16           InputBlockSize,
-                                       LVPSA_Time           AudioTime            )
+LVPSA_RETURN LVPSA_Process(pLVPSA_Handle_t hInstance, LVM_FLOAT* pLVPSA_InputSamples,
+                           LVM_UINT16 InputBlockSize, LVPSA_Time AudioTime)
 
 {
-    LVPSA_InstancePr_t     *pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
-    LVM_FLOAT               *pScratch;
-    LVM_INT16               ii;
-    LVM_INT32               AudioTimeInc;
-    extern LVM_UINT32       LVPSA_SampleRateInvTab[];
-    LVM_UINT8               *pWrite_Save;         /* Position of the write pointer
-                                                     at the beginning of the process  */
+    LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
+    LVM_FLOAT* pScratch;
+    LVM_INT16 ii;
+    LVM_INT32 AudioTimeInc;
+    extern LVM_UINT32 LVPSA_SampleRateInvTab[];
+    LVM_UINT8* pWrite_Save; /* Position of the write pointer
+                               at the beginning of the process  */
 
     /******************************************************************************
        CHECK PARAMETERS
     *******************************************************************************/
-    if(hInstance == LVM_NULL || pLVPSA_InputSamples == LVM_NULL)
-    {
-        return(LVPSA_ERROR_NULLADDRESS);
+    if (hInstance == LVM_NULL || pLVPSA_InputSamples == LVM_NULL) {
+        return (LVPSA_ERROR_NULLADDRESS);
     }
-    if(InputBlockSize == 0 || InputBlockSize > pLVPSA_Inst->MaxInputBlockSize)
-    {
-        return(LVPSA_ERROR_INVALIDPARAM);
+    if (InputBlockSize == 0 || InputBlockSize > pLVPSA_Inst->MaxInputBlockSize) {
+        return (LVPSA_ERROR_INVALIDPARAM);
     }
-
-    pScratch = (LVM_FLOAT*)pLVPSA_Inst->MemoryTable.Region[LVPSA_MEMREGION_SCRATCH].pBaseAddress;
+    pScratch = (LVM_FLOAT*)pLVPSA_Inst->pScratch;
     pWrite_Save = pLVPSA_Inst->pSpectralDataBufferWritePointer;
 
     /******************************************************************************
        APPLY NEW SETTINGS IF NEEDED
     *******************************************************************************/
-    if (pLVPSA_Inst->bControlPending == LVM_TRUE)
-    {
+    if (pLVPSA_Inst->bControlPending == LVM_TRUE) {
         pLVPSA_Inst->bControlPending = 0;
-        LVPSA_ApplyNewSettings( pLVPSA_Inst);
+        LVPSA_ApplyNewSettings(pLVPSA_Inst);
     }
 
     /******************************************************************************
@@ -99,39 +93,30 @@
     Copy_Float(pLVPSA_InputSamples, pScratch, (LVM_INT16)InputBlockSize);
     Shift_Sat_Float(-1, pScratch, pScratch, (LVM_INT16)InputBlockSize);
 
-    for (ii = 0; ii < pLVPSA_Inst->nRelevantFilters; ii++)
-    {
-        switch(pLVPSA_Inst->pBPFiltersPrecision[ii])
-        {
+    for (ii = 0; ii < pLVPSA_Inst->nRelevantFilters; ii++) {
+        switch (pLVPSA_Inst->pBPFiltersPrecision[ii]) {
             case LVPSA_SimplePrecisionFilter:
-                BP_1I_D16F16C14_TRC_WRA_01  ( &pLVPSA_Inst->pBP_Instances[ii],
-                                              pScratch,
-                                              pScratch + InputBlockSize,
-                                              (LVM_INT16)InputBlockSize);
+                BP_1I_D16F16C14_TRC_WRA_01(&pLVPSA_Inst->pBP_Instances[ii], pScratch,
+                                           pScratch + InputBlockSize, (LVM_INT16)InputBlockSize);
                 break;
 
             case LVPSA_DoublePrecisionFilter:
-                BP_1I_D16F32C30_TRC_WRA_01  ( &pLVPSA_Inst->pBP_Instances[ii],
-                                              pScratch,
-                                              pScratch + InputBlockSize,
-                                              (LVM_INT16)InputBlockSize);
+                BP_1I_D16F32C30_TRC_WRA_01(&pLVPSA_Inst->pBP_Instances[ii], pScratch,
+                                           pScratch + InputBlockSize, (LVM_INT16)InputBlockSize);
                 break;
             default:
                 break;
         }
 
-        LVPSA_QPD_Process_Float   ( pLVPSA_Inst,
-                                    pScratch + InputBlockSize,
-                                    (LVM_INT16)InputBlockSize,
-                                    ii);
+        LVPSA_QPD_Process_Float(pLVPSA_Inst, pScratch + InputBlockSize, (LVM_INT16)InputBlockSize,
+                                ii);
     }
 
     /******************************************************************************
        UPDATE SpectralDataBufferAudioTime
     *******************************************************************************/
 
-    if(pLVPSA_Inst->pSpectralDataBufferWritePointer != pWrite_Save)
-    {
+    if (pLVPSA_Inst->pSpectralDataBufferWritePointer != pWrite_Save) {
         AudioTimeInc = mult32x32in32_shiftr(
                 (AudioTime + ((LVM_INT32)pLVPSA_Inst->LocalSamplesCount * 1000)),
                 (LVM_INT32)LVPSA_SampleRateInvTab[pLVPSA_Inst->CurrentParams.Fs],
@@ -139,7 +124,7 @@
         pLVPSA_Inst->SpectralDataBufferAudioTime = AudioTime + AudioTimeInc;
     }
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
 
 /************************************************************************************/
@@ -162,99 +147,95 @@
 /*  otherwise           Error due to bad parameters                                 */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_GetSpectrum       ( pLVPSA_Handle_t      hInstance,
-                                       LVPSA_Time           GetSpectrumAudioTime,
-                                       LVM_UINT8           *pCurrentValues,
-                                       LVM_UINT8           *pPeakValues           )
+LVPSA_RETURN LVPSA_GetSpectrum(pLVPSA_Handle_t hInstance, LVPSA_Time GetSpectrumAudioTime,
+                               LVM_UINT8* pCurrentValues, LVM_UINT8* pPeakValues)
 
 {
+    LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
+    LVM_INT32 StatusDelta, ii;
+    LVM_UINT8* pRead;
 
-    LVPSA_InstancePr_t      *pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
-    LVM_INT32               StatusDelta, ii;
-    LVM_UINT8               *pRead;
-
-    if(hInstance == LVM_NULL || pCurrentValues == LVM_NULL || pPeakValues == LVM_NULL)
-    {
-        return(LVPSA_ERROR_NULLADDRESS);
+    if (hInstance == LVM_NULL || pCurrentValues == LVM_NULL || pPeakValues == LVM_NULL) {
+        return (LVPSA_ERROR_NULLADDRESS);
     }
 
     /* First find the place where to look in the status buffer */
-    if(GetSpectrumAudioTime <= pLVPSA_Inst->SpectralDataBufferAudioTime)
-    {
-        MUL32x32INTO32((pLVPSA_Inst->SpectralDataBufferAudioTime - GetSpectrumAudioTime),LVPSA_InternalRefreshTimeInv,StatusDelta,LVPSA_InternalRefreshTimeShift);
-        if((StatusDelta * LVPSA_InternalRefreshTime) != (pLVPSA_Inst->SpectralDataBufferAudioTime - GetSpectrumAudioTime))
-        {
+    if (GetSpectrumAudioTime <= pLVPSA_Inst->SpectralDataBufferAudioTime) {
+        MUL32x32INTO32((pLVPSA_Inst->SpectralDataBufferAudioTime - GetSpectrumAudioTime),
+                       LVPSA_InternalRefreshTimeInv, StatusDelta, LVPSA_InternalRefreshTimeShift);
+        if ((StatusDelta * LVPSA_InternalRefreshTime) !=
+            (pLVPSA_Inst->SpectralDataBufferAudioTime - GetSpectrumAudioTime)) {
             StatusDelta += 1;
         }
-    }
-    else
-    {
+    } else {
         /* This part handles the wrap around */
-        MUL32x32INTO32(((pLVPSA_Inst->SpectralDataBufferAudioTime - (LVM_INT32)LVM_MININT_32) + ((LVM_INT32)LVM_MAXINT_32 - GetSpectrumAudioTime)),LVPSA_InternalRefreshTimeInv,StatusDelta,LVPSA_InternalRefreshTimeShift)
-        if(((LVM_INT32)(StatusDelta * LVPSA_InternalRefreshTime)) != ((LVM_INT32)((pLVPSA_Inst->SpectralDataBufferAudioTime - (LVM_INT32)LVM_MININT_32) + ((LVM_INT32)LVM_MAXINT_32 - GetSpectrumAudioTime))))
-        {
+        MUL32x32INTO32(
+                ((pLVPSA_Inst->SpectralDataBufferAudioTime - (LVM_INT32)LVM_MININT_32) +
+                 ((LVM_INT32)LVM_MAXINT_32 - GetSpectrumAudioTime)),
+                LVPSA_InternalRefreshTimeInv, StatusDelta,
+                LVPSA_InternalRefreshTimeShift) if (((LVM_INT32)(StatusDelta *
+                                                                 LVPSA_InternalRefreshTime)) !=
+                                                    ((LVM_INT32)(
+                                                            (pLVPSA_Inst
+                                                                     ->SpectralDataBufferAudioTime -
+                                                             (LVM_INT32)LVM_MININT_32) +
+                                                            ((LVM_INT32)LVM_MAXINT_32 -
+                                                             GetSpectrumAudioTime)))) {
             StatusDelta += 1;
         }
     }
     /* Check whether the desired level is not too "old" (see 2.10 in LVPSA_DesignNotes.doc)*/
-    if(
-        ((GetSpectrumAudioTime < pLVPSA_Inst->SpectralDataBufferAudioTime)&&
-         ((GetSpectrumAudioTime<0)&&(pLVPSA_Inst->SpectralDataBufferAudioTime>0))&&
-         (((LVM_INT32)(-GetSpectrumAudioTime + pLVPSA_Inst->SpectralDataBufferAudioTime))>LVM_MAXINT_32))||
+    if (((GetSpectrumAudioTime < pLVPSA_Inst->SpectralDataBufferAudioTime) &&
+         ((GetSpectrumAudioTime < 0) && (pLVPSA_Inst->SpectralDataBufferAudioTime > 0)) &&
+         (((LVM_INT32)(-GetSpectrumAudioTime + pLVPSA_Inst->SpectralDataBufferAudioTime)) >
+          LVM_MAXINT_32)) ||
 
-         ((GetSpectrumAudioTime > pLVPSA_Inst->SpectralDataBufferAudioTime)&&
-         (((GetSpectrumAudioTime>=0)&&(pLVPSA_Inst->SpectralDataBufferAudioTime>=0))||
-          ((GetSpectrumAudioTime<=0)&&(pLVPSA_Inst->SpectralDataBufferAudioTime<=0))||
-         (((GetSpectrumAudioTime>=0)&&(pLVPSA_Inst->SpectralDataBufferAudioTime<=0))&&
-         (((LVM_INT32)(GetSpectrumAudioTime - pLVPSA_Inst->SpectralDataBufferAudioTime))<LVM_MAXINT_32))))||
+        ((GetSpectrumAudioTime > pLVPSA_Inst->SpectralDataBufferAudioTime) &&
+         (((GetSpectrumAudioTime >= 0) && (pLVPSA_Inst->SpectralDataBufferAudioTime >= 0)) ||
+          ((GetSpectrumAudioTime <= 0) && (pLVPSA_Inst->SpectralDataBufferAudioTime <= 0)) ||
+          (((GetSpectrumAudioTime >= 0) && (pLVPSA_Inst->SpectralDataBufferAudioTime <= 0)) &&
+           (((LVM_INT32)(GetSpectrumAudioTime - pLVPSA_Inst->SpectralDataBufferAudioTime)) <
+            LVM_MAXINT_32)))) ||
 
-        (StatusDelta > (LVM_INT32)pLVPSA_Inst->SpectralDataBufferLength) ||
-        (!StatusDelta))
-    {
-        for(ii = 0; ii < pLVPSA_Inst->nBands; ii++)
-        {
-            pCurrentValues[ii]  = 0;
-            pPeakValues[ii]      = 0;
+        (StatusDelta > (LVM_INT32)pLVPSA_Inst->SpectralDataBufferLength) || (!StatusDelta)) {
+        for (ii = 0; ii < pLVPSA_Inst->nBands; ii++) {
+            pCurrentValues[ii] = 0;
+            pPeakValues[ii] = 0;
         }
-        return(LVPSA_OK);
+        return (LVPSA_OK);
     }
     /* Set the reading pointer */
-    if((LVM_INT32)(StatusDelta * pLVPSA_Inst->nBands) > (pLVPSA_Inst->pSpectralDataBufferWritePointer - pLVPSA_Inst->pSpectralDataBufferStart))
-    {
-        pRead = pLVPSA_Inst->pSpectralDataBufferWritePointer + (pLVPSA_Inst->SpectralDataBufferLength - (LVM_UINT32)StatusDelta) * pLVPSA_Inst->nBands;
-    }
-    else
-    {
-        pRead = pLVPSA_Inst->pSpectralDataBufferWritePointer  - StatusDelta * pLVPSA_Inst->nBands;
+    if ((LVM_INT32)(StatusDelta * pLVPSA_Inst->nBands) >
+        (pLVPSA_Inst->pSpectralDataBufferWritePointer - pLVPSA_Inst->pSpectralDataBufferStart)) {
+        pRead = pLVPSA_Inst->pSpectralDataBufferWritePointer +
+                (pLVPSA_Inst->SpectralDataBufferLength - (LVM_UINT32)StatusDelta) *
+                        pLVPSA_Inst->nBands;
+    } else {
+        pRead = pLVPSA_Inst->pSpectralDataBufferWritePointer - StatusDelta * pLVPSA_Inst->nBands;
     }
 
     /* Read the status buffer and fill the output buffers */
-    for(ii = 0; ii < pLVPSA_Inst->nBands; ii++)
-    {
+    for (ii = 0; ii < pLVPSA_Inst->nBands; ii++) {
         pCurrentValues[ii] = pRead[ii];
-        if(pLVPSA_Inst->pPreviousPeaks[ii] <= pRead[ii])
-        {
+        if (pLVPSA_Inst->pPreviousPeaks[ii] <= pRead[ii]) {
             pLVPSA_Inst->pPreviousPeaks[ii] = pRead[ii];
-        }
-        else if(pLVPSA_Inst->pPreviousPeaks[ii] != 0)
-        {
+        } else if (pLVPSA_Inst->pPreviousPeaks[ii] != 0) {
             LVM_INT32 temp;
             /*Re-compute max values for decay */
             temp = (LVM_INT32)(LVPSA_MAXUNSIGNEDCHAR - pLVPSA_Inst->pPreviousPeaks[ii]);
-            temp = ((temp * LVPSA_MAXLEVELDECAYFACTOR)>>LVPSA_MAXLEVELDECAYSHIFT);
+            temp = ((temp * LVPSA_MAXLEVELDECAYFACTOR) >> LVPSA_MAXLEVELDECAYSHIFT);
             /* If the gain has no effect, "help" the value to increase */
-            if(temp == (LVPSA_MAXUNSIGNEDCHAR - pLVPSA_Inst->pPreviousPeaks[ii]))
-            {
+            if (temp == (LVPSA_MAXUNSIGNEDCHAR - pLVPSA_Inst->pPreviousPeaks[ii])) {
                 temp += 1;
             }
             /* Saturate */
             temp = (temp > LVPSA_MAXUNSIGNEDCHAR) ? LVPSA_MAXUNSIGNEDCHAR : temp;
             /* Store new max level */
-            pLVPSA_Inst->pPreviousPeaks[ii] =  (LVM_UINT8)(LVPSA_MAXUNSIGNEDCHAR - temp);
+            pLVPSA_Inst->pPreviousPeaks[ii] = (LVM_UINT8)(LVPSA_MAXUNSIGNEDCHAR - temp);
         }
 
         pPeakValues[ii] = pLVPSA_Inst->pPreviousPeaks[ii];
     }
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD.h b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD.h
index 609a485..2f752bf 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD.h
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD.h
@@ -20,41 +20,35 @@
 
 #include "LVM_Types.h"
 
-typedef struct
-{
-  LVM_INT32                            *pDelay;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT32                            Coefs[2];       /* pointer to the filter coefficients */
-}QPD_State_t, *pQPD_State_t;
+typedef struct {
+    LVM_INT32* pDelay;  /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_INT32 Coefs[2]; /* pointer to the filter coefficients */
+} QPD_State_t, *pQPD_State_t;
 
-typedef struct
-{
+typedef struct {
     /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT                            *pDelay;
-    LVM_FLOAT                            Coefs[2];       /* pointer to the filter coefficients */
-}QPD_FLOAT_State_t, *pQPD_FLOAT_State_t;
+    LVM_FLOAT* pDelay;
+    LVM_FLOAT Coefs[2]; /* pointer to the filter coefficients */
+} QPD_FLOAT_State_t, *pQPD_FLOAT_State_t;
 
-typedef struct
-{
-    LVM_INT32 KP;    /*should store a0*/
-    LVM_INT32 KM;    /*should store b2*/
+typedef struct {
+    LVM_INT32 KP; /*should store a0*/
+    LVM_INT32 KM; /*should store b2*/
 
 } QPD_C32_Coefs, *PQPD_C32_Coefs;
 
-typedef struct
-{
-    LVM_FLOAT KP;    /*should store a0*/
-    LVM_FLOAT KM;    /*should store b2*/
+typedef struct {
+    LVM_FLOAT KP; /*should store a0*/
+    LVM_FLOAT KM; /*should store b2*/
 
 } QPD_FLOAT_Coefs, *PQPD_FLOAT_Coefs;
 
-typedef struct
-{
+typedef struct {
     LVM_INT32 Storage[1];
 
 } QPD_Taps_t, *pQPD_Taps_t;
 
-typedef struct
-{
+typedef struct {
     LVM_FLOAT Storage[1];
 
 } QPD_FLOAT_Taps_t, *pQPD_FLOAT_Taps_t;
@@ -72,15 +66,11 @@
 /* RETURNS:             void                                                        */
 /*                                                                                  */
 /************************************************************************************/
-void LVPSA_QPD_Process (            void                               *hInstance,
-                                    LVM_INT16                          *pInSamps,
-                                    LVM_INT16                           numSamples,
-                                    LVM_INT16                           BandIndex);
+void LVPSA_QPD_Process(void* hInstance, LVM_INT16* pInSamps, LVM_INT16 numSamples,
+                       LVM_INT16 BandIndex);
 
-void LVPSA_QPD_Process_Float (      void                               *hInstance,
-                                    LVM_FLOAT                          *pInSamps,
-                                    LVM_INT16                           numSamples,
-                                    LVM_INT16                           BandIndex);
+void LVPSA_QPD_Process_Float(void* hInstance, LVM_FLOAT* pInSamps, LVM_INT16 numSamples,
+                             LVM_INT16 BandIndex);
 /************************************************************************************/
 /*                                                                                  */
 /* FUNCTION:            LVPSA_QPD_Init                                              */
@@ -96,13 +86,9 @@
 /* RETURNS:     void                                                                */
 /*                                                                                  */
 /************************************************************************************/
-void LVPSA_QPD_Init (   QPD_State_t       *pInstance,
-                        QPD_Taps_t        *pTaps,
-                        QPD_C32_Coefs     *pCoef     );
+void LVPSA_QPD_Init(QPD_State_t* pInstance, QPD_Taps_t* pTaps, QPD_C32_Coefs* pCoef);
 
-void LVPSA_QPD_Init_Float (   QPD_FLOAT_State_t       *pInstance,
-                              QPD_FLOAT_Taps_t        *pTaps,
-                              QPD_FLOAT_Coefs         *pCoef     );
+void LVPSA_QPD_Init_Float(QPD_FLOAT_State_t* pInstance, QPD_FLOAT_Taps_t* pTaps,
+                          QPD_FLOAT_Coefs* pCoef);
 
 #endif
-
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp
index 2dbf694..c5023c3 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp
@@ -32,20 +32,15 @@
 /* RETURNS:     void                                                                */
 /*                                                                                  */
 /************************************************************************************/
-void LVPSA_QPD_Init (   pQPD_State_t       pQPD_State,
-                        QPD_Taps_t        *pTaps,
-                        QPD_C32_Coefs     *pCoef     )
-{
-    pQPD_State->pDelay  = pTaps->Storage;
-    pQPD_State->Coefs[0]  = pCoef->KP;
-    pQPD_State->Coefs[1]  = pCoef->KM;
+void LVPSA_QPD_Init(pQPD_State_t pQPD_State, QPD_Taps_t* pTaps, QPD_C32_Coefs* pCoef) {
+    pQPD_State->pDelay = pTaps->Storage;
+    pQPD_State->Coefs[0] = pCoef->KP;
+    pQPD_State->Coefs[1] = pCoef->KM;
 }
 
-void LVPSA_QPD_Init_Float (   pQPD_FLOAT_State_t       pQPD_State,
-                              QPD_FLOAT_Taps_t         *pTaps,
-                              QPD_FLOAT_Coefs          *pCoef     )
-{
-    pQPD_State->pDelay  = pTaps->Storage;
-    pQPD_State->Coefs[0]  = ((LVM_FLOAT)pCoef->KP);
-    pQPD_State->Coefs[1]  = ((LVM_FLOAT)pCoef->KM);
+void LVPSA_QPD_Init_Float(pQPD_FLOAT_State_t pQPD_State, QPD_FLOAT_Taps_t* pTaps,
+                          QPD_FLOAT_Coefs* pCoef) {
+    pQPD_State->pDelay = pTaps->Storage;
+    pQPD_State->Coefs[0] = ((LVM_FLOAT)pCoef->KP);
+    pQPD_State->Coefs[1] = ((LVM_FLOAT)pCoef->KM);
 }
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp
index 8805420..e301cf9 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp
@@ -34,15 +34,11 @@
 /* RETURNS:             void                                                        */
 /*                                                                                  */
 /************************************************************************************/
-void LVPSA_QPD_WritePeak(   pLVPSA_InstancePr_t       pLVPSA_Inst,
-                            LVM_UINT8                 **ppWrite,
-                            LVM_INT16                 BandIndex,
-                            LVM_INT16                 Value   );
+void LVPSA_QPD_WritePeak(pLVPSA_InstancePr_t pLVPSA_Inst, LVM_UINT8** ppWrite, LVM_INT16 BandIndex,
+                         LVM_INT16 Value);
 
-void LVPSA_QPD_WritePeak_Float(   pLVPSA_InstancePr_t       pLVPSA_Inst,
-                                  LVM_UINT8             **ppWrite,
-                                  LVM_INT16               BandIndex,
-                                  LVM_FLOAT               Value   );
+void LVPSA_QPD_WritePeak_Float(pLVPSA_InstancePr_t pLVPSA_Inst, LVM_UINT8** ppWrite,
+                               LVM_INT16 BandIndex, LVM_FLOAT Value);
 /************************************************************************************/
 /*                                                                                  */
 /* FUNCTION:            LVPSA_QPD_Process                                           */
@@ -56,38 +52,34 @@
 /* RETURNS:             void                                                        */
 /*                                                                                  */
 /************************************************************************************/
-void LVPSA_QPD_Process_Float (      void                               *hInstance,
-                                    LVM_FLOAT                          *pInSamps,
-                                    LVM_INT16                           numSamples,
-                                    LVM_INT16                           BandIndex)
-{
-
+void LVPSA_QPD_Process_Float(void* hInstance, LVM_FLOAT* pInSamps, LVM_INT16 numSamples,
+                             LVM_INT16 BandIndex) {
     /******************************************************************************
        PARAMETERS
     *******************************************************************************/
-    LVPSA_InstancePr_t     *pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
-    QPD_FLOAT_State_t *pQPDState =  (QPD_FLOAT_State_t*)&pLVPSA_Inst->pQPD_States[BandIndex];
+    LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
+    QPD_FLOAT_State_t* pQPDState = (QPD_FLOAT_State_t*)&pLVPSA_Inst->pQPD_States[BandIndex];
 
     /* Pointer to taps */
-    LVM_FLOAT* pDelay  = pQPDState->pDelay;
+    LVM_FLOAT* pDelay = pQPDState->pDelay;
 
     /* Parameters needed during quasi peak calculations */
-    LVM_FLOAT   X0;
-    LVM_FLOAT   temp,temp2;
-    LVM_FLOAT   accu;
-    LVM_FLOAT   Xg0;
-    LVM_FLOAT   D0;
-    LVM_FLOAT   V0 = (LVM_FLOAT)(*pDelay);
+    LVM_FLOAT X0;
+    LVM_FLOAT temp, temp2;
+    LVM_FLOAT accu;
+    LVM_FLOAT Xg0;
+    LVM_FLOAT D0;
+    LVM_FLOAT V0 = (LVM_FLOAT)(*pDelay);
 
     /* Filter's coef */
-    LVM_FLOAT   Kp = ((LVM_FLOAT)(pQPDState->Coefs[0]));
-    LVM_FLOAT   Km = ((LVM_FLOAT)(pQPDState->Coefs[1]));
+    LVM_FLOAT Kp = ((LVM_FLOAT)(pQPDState->Coefs[0]));
+    LVM_FLOAT Km = ((LVM_FLOAT)(pQPDState->Coefs[1]));
 
-    LVM_INT16   ii = numSamples;
+    LVM_INT16 ii = numSamples;
 
-    LVM_UINT8  *pWrite = pLVPSA_Inst->pSpectralDataBufferWritePointer;
-    LVM_INT32   BufferUpdateSamplesCount = pLVPSA_Inst->BufferUpdateSamplesCount;
-    LVM_UINT16  DownSamplingFactor = pLVPSA_Inst->DownSamplingFactor;
+    LVM_UINT8* pWrite = pLVPSA_Inst->pSpectralDataBufferWritePointer;
+    LVM_INT32 BufferUpdateSamplesCount = pLVPSA_Inst->BufferUpdateSamplesCount;
+    LVM_UINT16 DownSamplingFactor = pLVPSA_Inst->DownSamplingFactor;
 
     /******************************************************************************
        INITIALIZATION
@@ -97,29 +89,27 @@
     /* Correct also the number of samples */
     ii = (LVM_INT16)(ii - (LVM_INT16)pLVPSA_Inst->DownSamplingCount);
 
-    while (ii > 0)
-    {
+    while (ii > 0) {
         /* Apply post gain */
         /* - 1 to compensate scaling in process function*/
         X0 = (*pInSamps) * pLVPSA_Inst->pPostGains[BandIndex];
         pInSamps = pInSamps + DownSamplingFactor;
 
         /* Saturate and take absolute value */
-        if(X0 < 0.0f)
-            X0 = -X0;
+        if (X0 < 0.0f) X0 = -X0;
         if (X0 > 1.0f)
             Xg0 = 1.0f;
         else
-            Xg0 =X0;
+            Xg0 = X0;
 
         /* Quasi peak filter calculation */
-        D0  = Xg0 - V0;
+        D0 = Xg0 - V0;
 
         temp2 = D0;
 
         accu = temp2 * Kp;
-        D0    = D0 / 2.0f;
-        if (D0 < 0.0f){
+        D0 = D0 / 2.0f;
+        if (D0 < 0.0f) {
             D0 = -D0;
         }
 
@@ -130,17 +120,13 @@
 
         if (accu > 1.0f)
             accu = 1.0f;
-        else if(accu < 0.0f)
+        else if (accu < 0.0f)
             accu = 0.0f;
 
         V0 = accu;
 
-        if(((pLVPSA_Inst->nSamplesBufferUpdate - BufferUpdateSamplesCount) < DownSamplingFactor))
-        {
-            LVPSA_QPD_WritePeak_Float( pLVPSA_Inst,
-                                       &pWrite,
-                                       BandIndex,
-                                       V0);
+        if (((pLVPSA_Inst->nSamplesBufferUpdate - BufferUpdateSamplesCount) < DownSamplingFactor)) {
+            LVPSA_QPD_WritePeak_Float(pLVPSA_Inst, &pWrite, BandIndex, V0);
 
             BufferUpdateSamplesCount -= pLVPSA_Inst->nSamplesBufferUpdate;
             pLVPSA_Inst->LocalSamplesCount = (LVM_UINT16)(numSamples - ii);
@@ -148,7 +134,6 @@
         BufferUpdateSamplesCount += DownSamplingFactor;
 
         ii = (LVM_INT16)(ii - DownSamplingFactor);
-
     }
 
     /* Store last taps in memory */
@@ -156,20 +141,15 @@
 
     /* If this is the last call to the function after last band processing,
        update the parameters. */
-    if(BandIndex == (pLVPSA_Inst->nRelevantFilters - 1))
-    {
+    if (BandIndex == (pLVPSA_Inst->nRelevantFilters - 1)) {
         pLVPSA_Inst->pSpectralDataBufferWritePointer = pWrite;
         /* Adjustment for 11025Hz input, 220,5 is normally
            the exact number of samples for 20ms.*/
-        if((pLVPSA_Inst->pSpectralDataBufferWritePointer != pWrite)&&
-                                        (pLVPSA_Inst->CurrentParams.Fs == LVM_FS_11025))
-        {
-            if(pLVPSA_Inst->nSamplesBufferUpdate == 220)
-            {
+        if ((pLVPSA_Inst->pSpectralDataBufferWritePointer != pWrite) &&
+            (pLVPSA_Inst->CurrentParams.Fs == LVM_FS_11025)) {
+            if (pLVPSA_Inst->nSamplesBufferUpdate == 220) {
                 pLVPSA_Inst->nSamplesBufferUpdate = 221;
-            }
-            else
-            {
+            } else {
                 pLVPSA_Inst->nSamplesBufferUpdate = 220;
             }
         }
@@ -194,37 +174,29 @@
 /* RETURNS:             void                                                        */
 /*                                                                                  */
 /************************************************************************************/
-void LVPSA_QPD_WritePeak(   pLVPSA_InstancePr_t       pLVPSA_Inst,
-                            LVM_UINT8             **ppWrite,
-                            LVM_INT16               BandIndex,
-                            LVM_INT16               Value   )
-{
-    LVM_UINT8 *pWrite = *ppWrite;
+void LVPSA_QPD_WritePeak(pLVPSA_InstancePr_t pLVPSA_Inst, LVM_UINT8** ppWrite, LVM_INT16 BandIndex,
+                         LVM_INT16 Value) {
+    LVM_UINT8* pWrite = *ppWrite;
 
     /* Write the value and update the write pointer */
-    *(pWrite + BandIndex) = (LVM_UINT8)(Value>>7);
+    *(pWrite + BandIndex) = (LVM_UINT8)(Value >> 7);
     pWrite += pLVPSA_Inst->nBands;
-    if (pWrite == (pLVPSA_Inst->pSpectralDataBufferStart + pLVPSA_Inst->nBands * pLVPSA_Inst->SpectralDataBufferLength))
-    {
+    if (pWrite == (pLVPSA_Inst->pSpectralDataBufferStart +
+                   pLVPSA_Inst->nBands * pLVPSA_Inst->SpectralDataBufferLength)) {
         pWrite = pLVPSA_Inst->pSpectralDataBufferStart;
     }
 
     *ppWrite = pWrite;
-
 }
-void LVPSA_QPD_WritePeak_Float(   pLVPSA_InstancePr_t     pLVPSA_Inst,
-                                  LVM_UINT8               **ppWrite,
-                                  LVM_INT16               BandIndex,
-                                  LVM_FLOAT               Value   )
-{
-    LVM_UINT8 *pWrite = *ppWrite;
+void LVPSA_QPD_WritePeak_Float(pLVPSA_InstancePr_t pLVPSA_Inst, LVM_UINT8** ppWrite,
+                               LVM_INT16 BandIndex, LVM_FLOAT Value) {
+    LVM_UINT8* pWrite = *ppWrite;
 
     /* Write the value and update the write pointer */
     *(pWrite + BandIndex) = (LVM_UINT8)(Value * 256);
     pWrite += pLVPSA_Inst->nBands;
-    if (pWrite == (pLVPSA_Inst->pSpectralDataBufferStart + pLVPSA_Inst->nBands * \
-                                    pLVPSA_Inst->SpectralDataBufferLength))
-    {
+    if (pWrite == (pLVPSA_Inst->pSpectralDataBufferStart +
+                   pLVPSA_Inst->nBands * pLVPSA_Inst->SpectralDataBufferLength)) {
         pWrite = pLVPSA_Inst->pSpectralDataBufferStart;
     }
 
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.cpp
index 9f0aa02..4fbff6f 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.cpp
@@ -34,19 +34,9 @@
  * Sample rate table for converting between the enumerated type and the actual
  * frequency
  */
-const LVM_UINT32    LVPSA_SampleRateTab[] = {   8000,                    /* 8kS/s  */
-                                                11025,
-                                                12000,
-                                                16000,
-                                                22050,
-                                                24000,
-                                                32000,
-                                                44100,
-                                                48000,
-                                                88200,
-                                                96000,
-                                               176400,
-                                               192000};                  /* 192kS/s */
+const LVM_UINT32 LVPSA_SampleRateTab[] = {8000, /* 8kS/s  */
+                                          11025, 12000, 16000, 22050, 24000,  32000,
+                                          44100, 48000, 88200, 96000, 176400, 192000}; /* 192kS/s */
 
 /************************************************************************************/
 /*                                                                                  */
@@ -58,20 +48,11 @@
  * Sample rate table for converting between the enumerated type and the actual
  * frequency
  */
-const LVM_UINT32    LVPSA_SampleRateInvTab[] = {    268435,                    /* 8kS/s  */
-                                                    194783,
-                                                    178957,
-                                                    134218,
-                                                    97391,
-                                                    89478,
-                                                    67109,
-                                                    48696,
-                                                    44739
-                                                    ,24348
-                                                    ,22369
-                                                    ,12174
-                                                    ,11185                  /* 192kS/s */
-                                               };
+const LVM_UINT32 LVPSA_SampleRateInvTab[] = {
+        268435, /* 8kS/s  */
+        194783, 178957, 134218, 97391, 89478, 67109,
+        48696,  44739,  24348,  22369, 12174, 11185 /* 192kS/s */
+};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -83,20 +64,10 @@
  * Table for converting between the enumerated type and the number of samples
  * during 20ms
  */
-const LVM_UINT16    LVPSA_nSamplesBufferUpdate[]  = {   160,                   /* 8kS/s  */
-                                                        220,
-                                                        240,
-                                                        320,
-                                                        441,
-                                                        480,
-                                                        640,
-                                                        882,
-                                                        960
-                                                        ,1764
-                                                        ,1920
-                                                        ,3528
-                                                        ,3840                  /* 192kS/s */
-                                                    };
+const LVM_UINT16 LVPSA_nSamplesBufferUpdate[] = {
+        160,                                                           /* 8kS/s  */
+        220, 240, 320, 441, 480, 640, 882, 960, 1764, 1920, 3528, 3840 /* 192kS/s */
+};
 /************************************************************************************/
 /*                                                                                  */
 /*  Down sampling factors                                                           */
@@ -106,20 +77,25 @@
 /*
  * Table for converting between the enumerated type and the down sampling factor
  */
-const LVM_UINT16    LVPSA_DownSamplingFactor[]  = {     5,                    /* 8000  S/s  */
-                                                        7,                    /* 11025 S/s  */
-                                                        8,                    /* 12000 S/s  */
-                                                        10,                   /* 16000 S/s  */
-                                                        15,                   /* 22050 S/s  */
-                                                        16,                   /* 24000 S/s  */
-                                                        21,                   /* 32000 S/s  */
-                                                        30,                   /* 44100 S/s  */
-                                                        32                    /* 48000 S/s  */
-                                                       ,60                   /* 88200 S/s  */
-                                                       ,64                   /* 96000 S/s  */
-                                                       ,120                  /* 176400 S/s  */
-                                                       ,128                  /*192000 S/s  */
-                                                  };
+const LVM_UINT16 LVPSA_DownSamplingFactor[] = {
+        5,  /* 8000  S/s  */
+        7,  /* 11025 S/s  */
+        8,  /* 12000 S/s  */
+        10, /* 16000 S/s  */
+        15, /* 22050 S/s  */
+        16, /* 24000 S/s  */
+        21, /* 32000 S/s  */
+        30, /* 44100 S/s  */
+        32  /* 48000 S/s  */
+        ,
+        60 /* 88200 S/s  */
+        ,
+        64 /* 96000 S/s  */
+        ,
+        120 /* 176400 S/s  */
+        ,
+        128 /*192000 S/s  */
+};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -130,102 +106,34 @@
 /*
  * Table for 2 * Pi / Fs
  */
-const LVM_INT16     LVPSA_TwoPiOnFsTable[] = {  26354,      /* 8kS/s */
-                                                19123,
-                                                17569,
-                                                13177,
-                                                 9561,
-                                                 8785,
-                                                 6588,
-                                                 4781,
-                                                 4392
-                                                ,2390
-                                                ,2196
-                                                ,1195
-                                                ,1098    /* 192kS/s */
-                                             };
+const LVM_INT16 LVPSA_TwoPiOnFsTable[] = {
+        26354,                                                                    /* 8kS/s */
+        19123, 17569, 13177, 9561, 8785, 6588, 4781, 4392, 2390, 2196, 1195, 1098 /* 192kS/s */
+};
 
-const LVM_FLOAT     LVPSA_Float_TwoPiOnFsTable[] = {  0.8042847f,      /* 8kS/s */
-                                                      0.5836054f,
-                                                      0.5361796f,
-                                                      0.4021423f,
-                                                      0.2917874f,
-                                                      0.2681051f,
-                                                      0.2010559f,
-                                                      0.1459089f,
-                                                      0.1340372f
-                                                     ,0.0729476f
-                                                     ,0.0670186f
-                                                     ,0.0364738f
-                                                     ,0.0335093f    /* 192kS/s */
-                                                   };
+const LVM_FLOAT LVPSA_Float_TwoPiOnFsTable[] = {
+        0.8042847f, /* 8kS/s */
+        0.5836054f, 0.5361796f, 0.4021423f, 0.2917874f, 0.2681051f, 0.2010559f,
+        0.1459089f, 0.1340372f, 0.0729476f, 0.0670186f, 0.0364738f, 0.0335093f /* 192kS/s */
+};
 
 /*
  * Gain table
  */
-const LVM_INT16     LVPSA_GainTable[] = {   364,          /* -15dB gain */
-                                            408,
-                                            458,
-                                            514,
-                                            577,
-                                            647,
-                                            726,
-                                            815,
-                                            914,
-                                            1026,
-                                            1151,
-                                            1292,
-                                            1449,
-                                            1626,
-                                            1825,
-                                            2048,         /* 0dB gain */
-                                            2297,
-                                            2578,
-                                            2892,
-                                            3245,
-                                            3641,
-                                            4096,
-                                            4584,
-                                            5144,
-                                            5772,
-                                            6476,
-                                            7266,
-                                            8153,
-                                            9148,
-                                            10264,
-                                            11576};        /* +15dB gain */
+const LVM_INT16 LVPSA_GainTable[] = {364, /* -15dB gain */
+                                     408,  458,  514,  577,  647,  726,   815,  914,
+                                     1026, 1151, 1292, 1449, 1626, 1825,  2048, /* 0dB gain */
+                                     2297, 2578, 2892, 3245, 3641, 4096,  4584, 5144,
+                                     5772, 6476, 7266, 8153, 9148, 10264, 11576}; /* +15dB gain */
 
-const LVM_FLOAT  LVPSA_Float_GainTable[]={  0.177734375f,          /* -15dB gain */
-                                            0.199218750f,
-                                            0.223632812f,
-                                            0.250976562f,
-                                            0.281738281f,
-                                            0.315917968f,
-                                            0.354492187f,
-                                            0.397949218f,
-                                            0.446289062f,
-                                            0.500976562f,
-                                            0.562011718f,
-                                            0.630859375f,
-                                            0.707519531f,
-                                            0.793945312f,
-                                            0.891113281f,
-                                            1.000000000f,         /* 0dB gain */
-                                            1.121582031f,
-                                            1.258789062f,
-                                            1.412109375f,
-                                            1.584472656f,
-                                            1.777832031f,
-                                            2.000000000f,
-                                            2.238281250f,
-                                            2.511718750f,
-                                            2.818359375f,
-                                            3.162109375f,
-                                            3.547851562f,
-                                            3.980957031f,
-                                            4.466796875f,
-                                            5.011718750f,
-                                            5.652343750f};        /* +15dB gain */
+const LVM_FLOAT LVPSA_Float_GainTable[] = {
+        0.177734375f, /* -15dB gain */
+        0.199218750f, 0.223632812f, 0.250976562f, 0.281738281f, 0.315917968f,
+        0.354492187f, 0.397949218f, 0.446289062f, 0.500976562f, 0.562011718f,
+        0.630859375f, 0.707519531f, 0.793945312f, 0.891113281f, 1.000000000f, /* 0dB gain */
+        1.121582031f, 1.258789062f, 1.412109375f, 1.584472656f, 1.777832031f,
+        2.000000000f, 2.238281250f, 2.511718750f, 2.818359375f, 3.162109375f,
+        3.547851562f, 3.980957031f, 4.466796875f, 5.011718750f, 5.652343750f}; /* +15dB gain */
 /************************************************************************************/
 /*                                                                                  */
 /*  Cosone polynomial coefficients                                                  */
@@ -241,20 +149,20 @@
  * a range of 0 to Pi. The output is in the range 32767 to -32768 representing the range
  * +1.0 to -1.0
  */
-const LVM_INT16     LVPSA_CosCoef[] = { 3,                             /* Shifts */
-                                        4096,                          /* a0 */
-                                        -36,                           /* a1 */
-                                        -19725,                        /* a2 */
-                                        -2671,                         /* a3 */
-                                        23730,                         /* a4 */
-                                        -9490};                        /* a5 */
-const LVM_FLOAT     LVPSA_Float_CosCoef[] = { 3,                             /* Shifts */
-                                              0.1250038f,                          /* a0 */
-                                              -0.0010986f,                           /* a1 */
-                                              -0.6019775f,                        /* a2 */
-                                              -0.0815149f,                         /* a3 */
-                                              0.7242042f,                         /* a4 */
-                                              -0.2896206f};                        /* a5 */
+const LVM_INT16 LVPSA_CosCoef[] = {3,                  /* Shifts */
+                                   4096,               /* a0 */
+                                   -36,                /* a1 */
+                                   -19725,             /* a2 */
+                                   -2671,              /* a3 */
+                                   23730,              /* a4 */
+                                   -9490};             /* a5 */
+const LVM_FLOAT LVPSA_Float_CosCoef[] = {3,            /* Shifts */
+                                         0.1250038f,   /* a0 */
+                                         -0.0010986f,  /* a1 */
+                                         -0.6019775f,  /* a2 */
+                                         -0.0815149f,  /* a3 */
+                                         0.7242042f,   /* a4 */
+                                         -0.2896206f}; /* a5 */
 /*
  * Coefficients for calculating the cosine error with the equation:
  *
@@ -269,101 +177,100 @@
  *
  * Cos(x) = 1.0 - CosErr(x)
  */
-const LVM_INT16     LVPSA_DPCosCoef[] = {   1,                           /* Shifts */
-                                            0,                           /* a0 */
-                                            -6,                          /* a1 */
-                                            16586,                       /* a2 */
-                                            -44};                        /* a3 */
-const LVM_FLOAT    LVPSA_Float_DPCosCoef[] = {1.0f,                        /* Shifts */
-                                              0.0f,                        /* a0 */
-                                              -0.00008311f,                 /* a1 */
-                                              0.50617999f,                 /* a2 */
-                                              -0.00134281f};                /* a3 */
+const LVM_INT16 LVPSA_DPCosCoef[] = {1,                   /* Shifts */
+                                     0,                   /* a0 */
+                                     -6,                  /* a1 */
+                                     16586,               /* a2 */
+                                     -44};                /* a3 */
+const LVM_FLOAT LVPSA_Float_DPCosCoef[] = {1.0f,          /* Shifts */
+                                           0.0f,          /* a0 */
+                                           -0.00008311f,  /* a1 */
+                                           0.50617999f,   /* a2 */
+                                           -0.00134281f}; /* a3 */
 /************************************************************************************/
 /*                                                                                  */
 /*  Quasi peak filter coefficients table                                            */
 /*                                                                                  */
 /************************************************************************************/
-const QPD_C32_Coefs     LVPSA_QPD_Coefs[] = {
-                                         /* 8kS/s  */    /* LVPSA_SPEED_LOW   */
-                                         {(LVM_INT32)0x80CEFD2B,0x00CB9B17},
-                                         {(LVM_INT32)0x80D242E7,0x00CED11D},
-                                         {(LVM_INT32)0x80DCBAF5,0x00D91679},
-                                         {(LVM_INT32)0x80CEFD2B,0x00CB9B17},
-                                         {(LVM_INT32)0x80E13739,0x00DD7CD3},
-                                         {(LVM_INT32)0x80DCBAF5,0x00D91679},
-                                         {(LVM_INT32)0x80D94BAF,0x00D5B7E7},
-                                         {(LVM_INT32)0x80E13739,0x00DD7CD3},
-                                         {(LVM_INT32)0x80DCBAF5,0x00D91679},  /* 48kS/s */
+const QPD_C32_Coefs LVPSA_QPD_Coefs[] = {
+        /* 8kS/s  */ /* LVPSA_SPEED_LOW   */
+        {(LVM_INT32)0x80CEFD2B, 0x00CB9B17},
+        {(LVM_INT32)0x80D242E7, 0x00CED11D},
+        {(LVM_INT32)0x80DCBAF5, 0x00D91679},
+        {(LVM_INT32)0x80CEFD2B, 0x00CB9B17},
+        {(LVM_INT32)0x80E13739, 0x00DD7CD3},
+        {(LVM_INT32)0x80DCBAF5, 0x00D91679},
+        {(LVM_INT32)0x80D94BAF, 0x00D5B7E7},
+        {(LVM_INT32)0x80E13739, 0x00DD7CD3},
+        {(LVM_INT32)0x80DCBAF5, 0x00D91679}, /* 48kS/s */
 
-                                         /* 8kS/s  */    /* LVPSA_SPEED_MEDIUM */
-                                         {(LVM_INT32)0x8587513D,0x055C22CF},
-                                         {(LVM_INT32)0x859D2967,0x0570F007},
-                                         {(LVM_INT32)0x85E2EFAC,0x05B34D79},
-                                         {(LVM_INT32)0x8587513D,0x055C22CF},
-                                         {(LVM_INT32)0x8600C7B9,0x05CFA6CF},
-                                         {(LVM_INT32)0x85E2EFAC,0x05B34D79},
-                                         {(LVM_INT32)0x85CC1018,0x059D8F69},
-                                         {(LVM_INT32)0x8600C7B9,0x05CFA6CF},
-                                         {(LVM_INT32)0x85E2EFAC,0x05B34D79},  /* 48kS/s */
+        /* 8kS/s  */ /* LVPSA_SPEED_MEDIUM */
+        {(LVM_INT32)0x8587513D, 0x055C22CF},
+        {(LVM_INT32)0x859D2967, 0x0570F007},
+        {(LVM_INT32)0x85E2EFAC, 0x05B34D79},
+        {(LVM_INT32)0x8587513D, 0x055C22CF},
+        {(LVM_INT32)0x8600C7B9, 0x05CFA6CF},
+        {(LVM_INT32)0x85E2EFAC, 0x05B34D79},
+        {(LVM_INT32)0x85CC1018, 0x059D8F69},
+        {(LVM_INT32)0x8600C7B9, 0x05CFA6CF},
+        {(LVM_INT32)0x85E2EFAC, 0x05B34D79}, /* 48kS/s */
 
-                                         /* 8kS/s  */   /* LVPSA_SPEED_HIGH    */
-                                         {(LVM_INT32)0xA115EA7A,0x1CDB3F5C},
-                                         {(LVM_INT32)0xA18475F0,0x1D2C83A2},
-                                         {(LVM_INT32)0xA2E1E950,0x1E2A532E},
-                                         {(LVM_INT32)0xA115EA7A,0x1CDB3F5C},
-                                         {(LVM_INT32)0xA375B2C6,0x1E943BBC},
-                                         {(LVM_INT32)0xA2E1E950,0x1E2A532E},
-                                         {(LVM_INT32)0xA26FF6BD,0x1DD81530},
-                                         {(LVM_INT32)0xA375B2C6,0x1E943BBC},
-                                         {(LVM_INT32)0xA2E1E950,0x1E2A532E}}; /* 48kS/s */
+        /* 8kS/s  */ /* LVPSA_SPEED_HIGH    */
+        {(LVM_INT32)0xA115EA7A, 0x1CDB3F5C},
+        {(LVM_INT32)0xA18475F0, 0x1D2C83A2},
+        {(LVM_INT32)0xA2E1E950, 0x1E2A532E},
+        {(LVM_INT32)0xA115EA7A, 0x1CDB3F5C},
+        {(LVM_INT32)0xA375B2C6, 0x1E943BBC},
+        {(LVM_INT32)0xA2E1E950, 0x1E2A532E},
+        {(LVM_INT32)0xA26FF6BD, 0x1DD81530},
+        {(LVM_INT32)0xA375B2C6, 0x1E943BBC},
+        {(LVM_INT32)0xA2E1E950, 0x1E2A532E}}; /* 48kS/s */
 
-const QPD_FLOAT_Coefs     LVPSA_QPD_Float_Coefs[] = {
+const QPD_FLOAT_Coefs LVPSA_QPD_Float_Coefs[] = {
 
-                                         /* 8kS/s  */    /* LVPSA_SPEED_LOW   */
-                                         {-0.9936831989325583f,0.0062135565094650f},
-                                         {-0.9935833332128823f,0.0063115493394434f},
-                                         {-0.9932638457976282f,0.0066249934025109f},
-                                         {-0.9936831989325583f,0.0062135565094650f},
-                                         {-0.9931269618682563f,0.0067592649720609f},
-                                         {-0.9932638457976282f,0.0066249934025109f},
-                                         {-0.9933686633594334f,0.0065221670083702f},
-                                         {-0.9931269618682563f,0.0067592649720609f},
-                                          /* 48kS/s */
-                                         {-0.9932638457976282f,0.0066249934025109f},
-                                         {-0.9931269618682563f,0.0067592649720609f},
-                                         {-0.9932638457976282f,0.0066249934025109f},
-                                         {-0.9931269618682563f,0.0067592649720609f},
-                                         {-0.9932638457976282f,0.0066249934025109f},
-                                         /* 8kS/s  */    /* LVPSA_SPEED_MEDIUM      */
-                                         {-0.9568079425953329f,0.0418742666952312f},
-                                         {-0.9561413046903908f,0.0425090822391212f},
-                                         {-0.9540119562298059f,0.0445343819446862f},
-                                         {-0.9568079425953329f,0.0418742666952312f},
-                                         {-0.9531011912040412f,0.0453995238058269f},
-                                         {-0.9540119562298059f,0.0445343819446862f},
-                                         {-0.9547099955379963f,0.0438708555884659f},
-                                          //{0x8600C7B9,0x05CFA6CF},
-                                         {-0.9531011912040412f,0.0453995238058269f},
-                                          /* 48kS/s */
-                                         {-0.9540119562298059f,0.0445343819446862f},
-                                         {-0.9531011912040412f,0.0453995238058269f},
-                                         {-0.9540119562298059f,0.0445343819446862f},
-                                         {-0.9531011912040412f,0.0453995238058269f},
-                                         {-0.9540119562298059f,0.0445343819446862f},
-                                          /* 8kS/s  */   /* LVPSA_SPEED_HIGH      */
-                                         {-0.7415186790749431f,0.2254409026354551f},
-                                         {-0.7381451204419136f,0.2279209652915597f},
-                                         {-0.7274807319045067f,0.2356666540727019f},
-                                         {-0.7415186790749431f,0.2254409026354551f},
-                                         {-0.7229706319049001f,0.2388987224549055f},
-                                         {-0.7274807319045067f,0.2356666540727019f},
-                                         {-0.7309581353329122f,0.2331568226218224f},
-                                         {-0.7229706319049001f,0.2388987224549055f},
-                                           /* 48kS/s */
-                                         {-0.7274807319045067f,0.2356666540727019f}
-                                        ,{-0.7229706319049001f,0.2388987224549055f}
-                                        ,{-0.7274807319045067f,0.2356666540727019f}
-                                        ,{-0.7229706319049001f,0.2388987224549055f}
-                                        ,{-0.7274807319045067f,0.2356666540727019f}
-                                        };
+        /* 8kS/s  */ /* LVPSA_SPEED_LOW   */
+        {-0.9936831989325583f, 0.0062135565094650f},
+        {-0.9935833332128823f, 0.0063115493394434f},
+        {-0.9932638457976282f, 0.0066249934025109f},
+        {-0.9936831989325583f, 0.0062135565094650f},
+        {-0.9931269618682563f, 0.0067592649720609f},
+        {-0.9932638457976282f, 0.0066249934025109f},
+        {-0.9933686633594334f, 0.0065221670083702f},
+        {-0.9931269618682563f, 0.0067592649720609f},
+        /* 48kS/s */
+        {-0.9932638457976282f, 0.0066249934025109f},
+        {-0.9931269618682563f, 0.0067592649720609f},
+        {-0.9932638457976282f, 0.0066249934025109f},
+        {-0.9931269618682563f, 0.0067592649720609f},
+        {-0.9932638457976282f, 0.0066249934025109f},
+        /* 8kS/s  */ /* LVPSA_SPEED_MEDIUM      */
+        {-0.9568079425953329f, 0.0418742666952312f},
+        {-0.9561413046903908f, 0.0425090822391212f},
+        {-0.9540119562298059f, 0.0445343819446862f},
+        {-0.9568079425953329f, 0.0418742666952312f},
+        {-0.9531011912040412f, 0.0453995238058269f},
+        {-0.9540119562298059f, 0.0445343819446862f},
+        {-0.9547099955379963f, 0.0438708555884659f},
+        //{0x8600C7B9,0x05CFA6CF},
+        {-0.9531011912040412f, 0.0453995238058269f},
+        /* 48kS/s */
+        {-0.9540119562298059f, 0.0445343819446862f},
+        {-0.9531011912040412f, 0.0453995238058269f},
+        {-0.9540119562298059f, 0.0445343819446862f},
+        {-0.9531011912040412f, 0.0453995238058269f},
+        {-0.9540119562298059f, 0.0445343819446862f},
+        /* 8kS/s  */ /* LVPSA_SPEED_HIGH      */
+        {-0.7415186790749431f, 0.2254409026354551f},
+        {-0.7381451204419136f, 0.2279209652915597f},
+        {-0.7274807319045067f, 0.2356666540727019f},
+        {-0.7415186790749431f, 0.2254409026354551f},
+        {-0.7229706319049001f, 0.2388987224549055f},
+        {-0.7274807319045067f, 0.2356666540727019f},
+        {-0.7309581353329122f, 0.2331568226218224f},
+        {-0.7229706319049001f, 0.2388987224549055f},
+        /* 48kS/s */
+        {-0.7274807319045067f, 0.2356666540727019f},
+        {-0.7229706319049001f, 0.2388987224549055f},
+        {-0.7274807319045067f, 0.2356666540727019f},
+        {-0.7229706319049001f, 0.2388987224549055f},
+        {-0.7274807319045067f, 0.2356666540727019f}};
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.h b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.h
index 65872fe..c771dad 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.h
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.h
@@ -27,7 +27,7 @@
  * Sample rate table for converting between the enumerated type and the actual
  * frequency
  */
-extern const LVM_UINT32    LVPSA_SampleRateTab[];
+extern const LVM_UINT32 LVPSA_SampleRateTab[];
 
 /************************************************************************************/
 /*                                                                                  */
@@ -39,7 +39,7 @@
  * Sample rate table for converting between the enumerated type and the actual
  * frequency
  */
-extern const LVM_UINT32    LVPSA_SampleRateInvTab[];
+extern const LVM_UINT32 LVPSA_SampleRateInvTab[];
 
 /************************************************************************************/
 /*                                                                                  */
@@ -51,7 +51,7 @@
  * Table for converting between the enumerated type and the number of samples
  * during 20ms
  */
-extern const LVM_UINT16    LVPSA_nSamplesBufferUpdate[];
+extern const LVM_UINT16 LVPSA_nSamplesBufferUpdate[];
 
 /************************************************************************************/
 /*                                                                                  */
@@ -62,7 +62,7 @@
 /*
  * Table for converting between the enumerated type and the down sampling factor
  */
-extern const LVM_UINT16    LVPSA_DownSamplingFactor[];
+extern const LVM_UINT16 LVPSA_DownSamplingFactor[];
 
 /************************************************************************************/
 /*                                                                                  */
@@ -73,14 +73,14 @@
 /*
  * Table for 2 * Pi / Fs
  */
-extern const LVM_INT16     LVPSA_TwoPiOnFsTable[];
-extern const LVM_FLOAT     LVPSA_Float_TwoPiOnFsTable[];
+extern const LVM_INT16 LVPSA_TwoPiOnFsTable[];
+extern const LVM_FLOAT LVPSA_Float_TwoPiOnFsTable[];
 
 /*
  * Gain table
  */
-extern const LVM_INT16     LVPSA_GainTable[];
-extern const LVM_FLOAT     LVPSA_Float_GainTable[];
+extern const LVM_INT16 LVPSA_GainTable[];
+extern const LVM_FLOAT LVPSA_Float_GainTable[];
 
 /************************************************************************************/
 /*                                                                                  */
@@ -97,8 +97,8 @@
  * a range of 0 to Pi. The output is in the range 32767 to -32768 representing the range
  * +1.0 to -1.0
  */
-extern const LVM_INT16     LVPSA_CosCoef[];
-extern const LVM_FLOAT     LVPSA_Float_CosCoef[];
+extern const LVM_INT16 LVPSA_CosCoef[];
+extern const LVM_FLOAT LVPSA_Float_CosCoef[];
 
 /*
  * Coefficients for calculating the cosine error with the equation:
@@ -114,15 +114,15 @@
  *
  * Cos(x) = 1.0 - CosErr(x)
  */
-extern const LVM_INT16     LVPSA_DPCosCoef[];
-extern const LVM_FLOAT    LVPSA_Float_DPCosCoef[];
+extern const LVM_INT16 LVPSA_DPCosCoef[];
+extern const LVM_FLOAT LVPSA_Float_DPCosCoef[];
 
 /************************************************************************************/
 /*                                                                                  */
 /*  Quasi peak filter coefficients table                                            */
 /*                                                                                  */
 /************************************************************************************/
-extern const QPD_C32_Coefs     LVPSA_QPD_Coefs[];
-extern const QPD_FLOAT_Coefs     LVPSA_QPD_Float_Coefs[];
+extern const QPD_C32_Coefs LVPSA_QPD_Coefs[];
+extern const QPD_FLOAT_Coefs LVPSA_QPD_Float_Coefs[];
 
 #endif /* __LVPSA_TABLES_H__ */
diff --git a/media/libeffects/lvm/lib/StereoWidening/lib/LVCS.h b/media/libeffects/lvm/lib/StereoWidening/lib/LVCS.h
index 0adfd1b..ffe7902 100644
--- a/media/libeffects/lvm/lib/StereoWidening/lib/LVCS.h
+++ b/media/libeffects/lvm/lib/StereoWidening/lib/LVCS.h
@@ -71,21 +71,14 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory table */
-#define LVCS_MEMREGION_PERSISTENT_SLOW_DATA    0    /* Offset to the instance memory region */
-#define LVCS_MEMREGION_PERSISTENT_FAST_DATA    1    /* Offset to the persistent data memory region */
-#define LVCS_MEMREGION_PERSISTENT_FAST_COEF    2    /* Offset to the persistent coefficient memory region */
-#define LVCS_MEMREGION_TEMPORARY_FAST          3    /* Offset to temporary memory region */
-#define LVCS_NR_MEMORY_REGIONS                 4    /* Number of memory regions */
-
 /* Effect Level */
-#define LVCS_EFFECT_LOW                    16384    /* Effect scaling 50% */
-#define LVCS_EFFECT_MEDIUM                 24576    /* Effect scaling 75% */
-#define LVCS_EFFECT_HIGH                   32767    /* Effect Scaling 100% */
+#define LVCS_EFFECT_LOW 16384    /* Effect scaling 50% */
+#define LVCS_EFFECT_MEDIUM 24576 /* Effect scaling 75% */
+#define LVCS_EFFECT_HIGH 32767   /* Effect Scaling 100% */
 
 /* Callback events */
-#define LVCS_EVENT_NONE                   0x0000    /* Not a valid event */
-#define LVCS_EVENT_ALGOFF                 0x0001    /* CS has completed switch off */
+#define LVCS_EVENT_NONE 0x0000   /* Not a valid event */
+#define LVCS_EVENT_ALGOFF 0x0001 /* CS has completed switch off */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -94,70 +87,49 @@
 /****************************************************************************************/
 
 /* Instance handle */
-typedef void *LVCS_Handle_t;
+typedef void* LVCS_Handle_t;
 
 /* Operating modes */
-typedef enum
-{
-    LVCS_OFF = 0,
-    LVCS_ON  = 15,
-    LVCS_MAX = LVM_MAXENUM
-} LVCS_Modes_en;
-
-/* Memory Types */
-typedef enum
-{
-    LVCS_SCRATCH        = 0,
-    LVCS_DATA           = 1,
-    LVCS_COEFFICIENT    = 2,
-    LVCS_PERSISTENT     = 3,
-    LVCS_MEMORYTYPE_MAX = LVM_MAXENUM
-} LVCS_MemoryTypes_en;
+typedef enum { LVCS_OFF = 0, LVCS_ON = 15, LVCS_MAX = LVM_MAXENUM } LVCS_Modes_en;
 
 /* Function return status */
-typedef enum
-{
-    LVCS_SUCCESS        = 0,                        /* Successful return from a routine */
-    LVCS_ALIGNMENTERROR = 1,                        /* Memory alignment error */
-    LVCS_NULLADDRESS    = 2,                        /* NULL allocation address */
-    LVCS_TOOMANYSAMPLES = 3,                        /* Maximum block size exceeded */
-    LVCS_INVALIDBUFFER  = 4,                        /* Invalid buffer processing request */
-    LVCS_STATUSMAX      = LVM_MAXENUM
+typedef enum {
+    LVCS_SUCCESS = 0,        /* Successful return from a routine */
+    LVCS_NULLADDRESS = 1,    /* NULL allocation address */
+    LVCS_TOOMANYSAMPLES = 2, /* Maximum block size exceeded */
+    LVCS_STATUSMAX = LVM_MAXENUM
 } LVCS_ReturnStatus_en;
 
 /*
  * Source data formats
  */
-typedef enum
-{
-    LVCS_STEREO       = 0,
+typedef enum {
+    LVCS_STEREO = 0,
     LVCS_MONOINSTEREO = 1,
-    LVCS_SOURCEMAX    = LVM_MAXENUM
+    LVCS_SOURCEMAX = LVM_MAXENUM
 } LVCS_SourceFormat_en;
 
 /*
  * Supported output devices
  */
-typedef enum
-{
-    LVCS_HEADPHONES             = 0,
-    LVCS_EX_HEADPHONES          = 1,
-    LVCS_SPEAKERTYPE_MAX        = LVM_MAXENUM
+typedef enum {
+    LVCS_HEADPHONES = 0,
+    LVCS_EX_HEADPHONES = 1,
+    LVCS_SPEAKERTYPE_MAX = LVM_MAXENUM
 } LVCS_SpeakerType_en;
 
 /*
  * Speaker Coefficients Table
  */
-typedef struct
-{
-    void    *pTable1;
-    void    *pTable2;
-    void    *pTable3;
-    void    *pTable4;
-    void    *pTable5;
-    void    *pTable6;
-    void    *pTable7;
-    void    *pTable8;
+typedef struct {
+    void* pTable1;
+    void* pTable2;
+    void* pTable3;
+    void* pTable4;
+    void* pTable5;
+    void* pTable6;
+    void* pTable7;
+    void* pTable8;
 } LVCS_CSMS_Coef_Tables_t;
 
 /****************************************************************************************/
@@ -166,44 +138,26 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory region definition */
-typedef struct
-{
-    LVM_UINT32              Size;                   /* Region size in bytes */
-    LVCS_MemoryTypes_en     Type;                   /* Region type */
-    void                    *pBaseAddress;          /* Pointer to the region base address */
-} LVCS_MemoryRegion_t;
-
-/* Memory table containing the region definitions */
-typedef struct
-{
-    LVCS_MemoryRegion_t Region[LVCS_NR_MEMORY_REGIONS]; /* One definition for each region */
-} LVCS_MemTab_t;
-
 /* Concert Sound parameter structure */
-typedef struct
-{
-    LVCS_Modes_en           OperatingMode;          /* Algorithm mode */
-    LVCS_SpeakerType_en     SpeakerType;            /* Output device type */
-    LVCS_SourceFormat_en    SourceFormat;           /* Source data format */
-    LVM_Mode_en             CompressorMode;         /* Non-Linear Compressor Mode */
-    LVM_Fs_en               SampleRate;             /* Sampling rate */
-    LVM_INT16               EffectLevel;            /* Effect level */
-    LVM_UINT16              ReverbLevel;            /* Reverb level in % */
-#ifdef SUPPORT_MC
-    LVM_INT32               NrChannels;
-#endif
+typedef struct {
+    LVCS_Modes_en OperatingMode;       /* Algorithm mode */
+    LVCS_SpeakerType_en SpeakerType;   /* Output device type */
+    LVCS_SourceFormat_en SourceFormat; /* Source data format */
+    LVM_Mode_en CompressorMode;        /* Non-Linear Compressor Mode */
+    LVM_Fs_en SampleRate;              /* Sampling rate */
+    LVM_INT16 EffectLevel;             /* Effect level */
+    LVM_UINT16 ReverbLevel;            /* Reverb level in % */
+    LVM_INT32 NrChannels;
 } LVCS_Params_t;
 
 /* Concert Sound Capability structure */
-typedef struct
-{
+typedef struct {
     /* General parameters */
-    LVM_UINT16              MaxBlockSize;           /* Maximum block size in sample pairs */
+    LVM_UINT16 MaxBlockSize; /* Maximum block size in sample pairs */
 
     /* Callback parameters */
-    LVM_Callback            CallBack;               /* Bundle callback */
-    void                    *pBundleInstance;       /* Bundle instance handle */
+    LVM_Callback CallBack; /* Bundle callback */
+    void* pBundleInstance; /* Bundle instance handle */
 
 } LVCS_Capabilities_t;
 
@@ -213,82 +167,44 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/****************************************************************************************/
-/*                                                                                      */
-/* FUNCTION:                LVCS_Memory                                                 */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*      hInstance = NULL                Returns the memory requirements                 */
-/*      hInstance = Instance handle     Returns the memory requirements and             */
-/*                                      allocated base addresses for the instance       */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) it is           */
-/*  passed the default capabilities, of these only the buffer processing setting is     */
-/*  used.                                                                               */
-/*                                                                                      */
-/*  When called for memory allocation the memory base address pointers are NULL on      */
-/*  return.                                                                             */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the              */
-/*  capabilities are ignored and the memory table returns the allocated memory and      */
-/*  base addresses used during initialisation.                                          */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory definition table                 */
-/*  pCapabilities           Pointer to the default capabilites                          */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVCS_Success            Succeeded                                                   */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVCS_Process function                   */
-/*                                                                                      */
-/****************************************************************************************/
+/************************************************************************************/
+/*                                                                                  */
+/* FUNCTION:                LVCS_Init                                               */
+/*                                                                                  */
+/* DESCRIPTION:                                                                     */
+/*  Create and initialisation function for the Concert Sound module                 */
+/*                                                                                  */
+/* PARAMETERS:                                                                      */
+/*  phInstance              Pointer to instance handle                              */
+/*  pCapabilities           Pointer to the capabilities structure                   */
+/*  pScratch                Pointer to the scratch buffer                           */
+/*                                                                                  */
+/* RETURNS:                                                                         */
+/*  LVCS_Success            Initialisation succeeded                                */
+/*  LVDBE_NULLADDRESS       One or more memory has a NULL pointer                   */
+/*                                                                                  */
+/* NOTES:                                                                           */
+/*  1.  This function must not be interrupted by the LVCS_Process function          */
+/*                                                                                  */
+/************************************************************************************/
+LVCS_ReturnStatus_en LVCS_Init(LVCS_Handle_t* phInstance, LVCS_Capabilities_t* pCapabilities,
+                               void* pScratch);
 
-LVCS_ReturnStatus_en LVCS_Memory(LVCS_Handle_t          hInstance,
-                                 LVCS_MemTab_t          *pMemoryTable,
-                                 LVCS_Capabilities_t    *pCapabilities);
-
-/****************************************************************************************/
-/*                                                                                      */
-/* FUNCTION:                LVCS_Init                                                   */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  Create and initialisation function for the Concert Sound module                     */
-/*                                                                                      */
-/*  This function can be used to create an algorithm instance by calling with           */
-/*  hInstance set to NULL. In this case the algorithm returns the new instance          */
-/*  handle.                                                                             */
-/*                                                                                      */
-/*  This function can be used to force a full re-initialisation of the algorithm        */
-/*  by calling with hInstance = Instance Handle. In this case the memory table          */
-/*  should be correct for the instance, this can be ensured by calling the function     */
-/*  LVCS_Memory before calling this function.                                           */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance handle                                             */
-/*  pMemoryTable            Pointer to the memory definition table                      */
-/*  pCapabilities           Pointer to the initialisation capabilities                  */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVCS_Success            Initialisation succeeded                                    */
-/*  LVCS_AlignmentError     Instance or scratch memory on incorrect alignment           */
-/*  LVCS_NullAddress        Instance or scratch memory has a NULL pointer               */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  The instance handle is the pointer to the base address of the first memory      */
-/*      region.                                                                         */
-/*  2.  This function must not be interrupted by the LVCS_Process function              */
-/*                                                                                      */
-/****************************************************************************************/
-
-LVCS_ReturnStatus_en LVCS_Init(LVCS_Handle_t            *phInstance,
-                               LVCS_MemTab_t            *pMemoryTable,
-                               LVCS_Capabilities_t      *pCapabilities);
+/************************************************************************************/
+/*                                                                                  */
+/* FUNCTION:                LVCS_DeInit                                             */
+/*                                                                                  */
+/* DESCRIPTION:                                                                     */
+/*  Free memories created during the LVCS_Init call including instance handle       */
+/*                                                                                  */
+/* PARAMETERS:                                                                      */
+/*  phInstance              Pointer to instance handle                              */
+/*                                                                                  */
+/* NOTES:                                                                           */
+/*  1.  This function must not be interrupted by the LVCS_Process function          */
+/*                                                                                  */
+/************************************************************************************/
+void LVCS_DeInit(LVCS_Handle_t* phInstance);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -310,8 +226,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_GetParameters(LVCS_Handle_t   hInstance,
-                                        LVCS_Params_t   *pParams);
+LVCS_ReturnStatus_en LVCS_GetParameters(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -332,8 +247,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_Control(LVCS_Handle_t     hInstance,
-                                  LVCS_Params_t     *pParams);
+LVCS_ReturnStatus_en LVCS_Control(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -356,9 +270,7 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-LVCS_ReturnStatus_en LVCS_Process(LVCS_Handle_t             hInstance,
-                                  const LVM_FLOAT           *pInData,
-                                  LVM_FLOAT                 *pOutData,
-                                  LVM_UINT16                NumSamples);
+LVCS_ReturnStatus_en LVCS_Process(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                  LVM_FLOAT* pOutData, LVM_UINT16 NumSamples);
 
-#endif  /* LVCS_H */
+#endif /* LVCS_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.cpp
index ba152c0..efca27d 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.cpp
@@ -32,9 +32,8 @@
 /*  Function Prototypes                                                                 */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_INT32 LVCS_MixerCallback(   LVCS_Handle_t   hInstance,
-                                void            *pGeneralPurpose,
-                                LVM_INT16       CallbackParam);
+LVM_INT32 LVCS_MixerCallback(LVCS_Handle_t hInstance, void* pGeneralPurpose,
+                             LVM_INT16 CallbackParam);
 
 /************************************************************************************/
 /*                                                                                  */
@@ -65,29 +64,22 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_BypassMixInit(LVCS_Handle_t       hInstance,
-                                        LVCS_Params_t       *pParams)
-{
-
-    LVM_UINT16          Offset;
-    LVM_FLOAT           Gain;
-    LVM_FLOAT           Current;
-    LVCS_Instance_t     *pInstance = (LVCS_Instance_t  *)hInstance;
-    LVCS_BypassMix_t    *pConfig   = (LVCS_BypassMix_t *)&pInstance->BypassMix;
-    const Gain_t        *pOutputGainTable;
+LVCS_ReturnStatus_en LVCS_BypassMixInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+    LVM_UINT16 Offset;
+    LVM_FLOAT Gain;
+    LVM_FLOAT Current;
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVCS_BypassMix_t* pConfig = (LVCS_BypassMix_t*)&pInstance->BypassMix;
+    const Gain_t* pOutputGainTable;
 
     /*
      * Set the transition gain
      */
-    if ((pParams->OperatingMode == LVCS_ON) &&
-        (pInstance->bTimerDone == LVM_TRUE)
-        && (pInstance->MSTarget1 != 0x7FFF) /* this indicates an off->on transtion */
-        )
-    {
+    if ((pParams->OperatingMode == LVCS_ON) && (pInstance->bTimerDone == LVM_TRUE) &&
+        (pInstance->MSTarget1 != 0x7FFF) /* this indicates an off->on transition */
+    ) {
         pInstance->TransitionGain = ((LVM_FLOAT)pParams->EffectLevel / 32767);
-    }
-    else
-    {
+    } else {
         /* Select no effect level */
         pInstance->TransitionGain = 0;
     }
@@ -95,18 +87,19 @@
     /*
      * Calculate the output gain table offset
      */
-    Offset = (LVM_UINT16)(pParams->SpeakerType + (pParams->SourceFormat*(1+LVCS_EX_HEADPHONES)));
+    Offset =
+            (LVM_UINT16)(pParams->SpeakerType + (pParams->SourceFormat * (1 + LVCS_EX_HEADPHONES)));
     pOutputGainTable = (Gain_t*)&LVCS_OutputGainTable[0];
 
     /*
      * Setup the mixer gain for the processed path
      */
-    Gain =  (LVM_FLOAT)(pOutputGainTable[Offset].Loss * pInstance->TransitionGain);
+    Gain = (LVM_FLOAT)(pOutputGainTable[Offset].Loss * pInstance->TransitionGain);
 
     pConfig->Mixer_Instance.MixerStream[0].CallbackParam = 0;
     pConfig->Mixer_Instance.MixerStream[0].pCallbackHandle = LVM_NULL;
     pConfig->Mixer_Instance.MixerStream[0].pCallBack = LVM_NULL;
-    pConfig->Mixer_Instance.MixerStream[0].CallbackSet=1;
+    pConfig->Mixer_Instance.MixerStream[0].CallbackSet = 1;
 
     Current = LVC_Mixer_GetCurrent(&pConfig->Mixer_Instance.MixerStream[0]);
     LVC_Mixer_Init(&pConfig->Mixer_Instance.MixerStream[0], (LVM_FLOAT)(Gain), Current);
@@ -116,8 +109,8 @@
     /*
      * Setup the mixer gain for the unprocessed path
      */
-    Gain = (LVM_FLOAT)(pOutputGainTable[Offset].Loss * (1.0 - \
-                                    (LVM_FLOAT)pInstance->TransitionGain));
+    Gain = (LVM_FLOAT)(pOutputGainTable[Offset].Loss *
+                       (1.0 - (LVM_FLOAT)pInstance->TransitionGain));
     Gain = (LVM_FLOAT)pOutputGainTable[Offset].UnprocLoss * Gain;
     Current = LVC_Mixer_GetCurrent(&pConfig->Mixer_Instance.MixerStream[1]);
     LVC_Mixer_Init(&pConfig->Mixer_Instance.MixerStream[1], (LVM_FLOAT)(Gain), Current);
@@ -125,7 +118,7 @@
                                        LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
     pConfig->Mixer_Instance.MixerStream[1].CallbackParam = 0;
     pConfig->Mixer_Instance.MixerStream[1].pCallbackHandle = hInstance;
-    pConfig->Mixer_Instance.MixerStream[1].CallbackSet=1;
+    pConfig->Mixer_Instance.MixerStream[1].CallbackSet = 1;
     pConfig->Mixer_Instance.MixerStream[1].pCallBack = LVCS_MixerCallback;
 
     /*
@@ -137,45 +130,42 @@
      * Correct gain for the effect level
      */
     {
-        LVM_FLOAT           GainCorrect;
-        LVM_FLOAT           Gain1;
-        LVM_FLOAT           Gain2;
+        LVM_FLOAT GainCorrect;
+        LVM_FLOAT Gain1;
+        LVM_FLOAT Gain2;
 
         Gain1 = LVC_Mixer_GetTarget(&pConfig->Mixer_Instance.MixerStream[0]);
         Gain2 = LVC_Mixer_GetTarget(&pConfig->Mixer_Instance.MixerStream[1]);
         /*
          * Calculate the gain correction
          */
-        if (pInstance->Params.CompressorMode == LVM_MODE_ON)
-        {
-        GainCorrect = (LVM_FLOAT)(  pInstance->VolCorrect.GainMin
-                                    - (((LVM_FLOAT)pInstance->VolCorrect.GainMin * \
-                                                         ((LVM_FLOAT)pInstance->TransitionGain)))
-                                    + (((LVM_FLOAT)pInstance->VolCorrect.GainFull * \
-                                                        ((LVM_FLOAT)pInstance->TransitionGain))));
+        if (pInstance->Params.CompressorMode == LVM_MODE_ON) {
+            GainCorrect = (LVM_FLOAT)(pInstance->VolCorrect.GainMin -
+                                      (((LVM_FLOAT)pInstance->VolCorrect.GainMin *
+                                        ((LVM_FLOAT)pInstance->TransitionGain))) +
+                                      (((LVM_FLOAT)pInstance->VolCorrect.GainFull *
+                                        ((LVM_FLOAT)pInstance->TransitionGain))));
 
-        /*
-         * Apply the gain correction
-         */
-        Gain1 = (Gain1 * GainCorrect);
-        Gain2 = (Gain2 * GainCorrect);
-
+            /*
+             * Apply the gain correction
+             */
+            Gain1 = (Gain1 * GainCorrect);
+            Gain2 = (Gain2 * GainCorrect);
         }
 
         /*
          * Set the gain values
          */
         pConfig->Output_Shift = pConfig->Output_Shift;
-        LVC_Mixer_SetTarget(&pConfig->Mixer_Instance.MixerStream[0],Gain1);
+        LVC_Mixer_SetTarget(&pConfig->Mixer_Instance.MixerStream[0], Gain1);
         LVC_Mixer_VarSlope_SetTimeConstant(&pConfig->Mixer_Instance.MixerStream[0],
                                            LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
-        LVC_Mixer_SetTarget(&pConfig->Mixer_Instance.MixerStream[1],Gain2);
+        LVC_Mixer_SetTarget(&pConfig->Mixer_Instance.MixerStream[1], Gain2);
         LVC_Mixer_VarSlope_SetTimeConstant(&pConfig->Mixer_Instance.MixerStream[1],
                                            LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
     }
 
-    return(LVCS_SUCCESS);
-
+    return (LVCS_SUCCESS);
 }
 
 /************************************************************************************/
@@ -205,39 +195,29 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_BypassMixer(LVCS_Handle_t         hInstance,
-                                      const LVM_FLOAT       *pProcessed,
-                                      const LVM_FLOAT       *pUnprocessed,
-                                      LVM_FLOAT             *pOutData,
-                                      LVM_UINT16            NumSamples)
-{
-
-    LVCS_Instance_t     *pInstance      = (LVCS_Instance_t  *)hInstance;
-    LVCS_BypassMix_t    *pConfig        = (LVCS_BypassMix_t *)&pInstance->BypassMix;
+LVCS_ReturnStatus_en LVCS_BypassMixer(LVCS_Handle_t hInstance, const LVM_FLOAT* pProcessed,
+                                      const LVM_FLOAT* pUnprocessed, LVM_FLOAT* pOutData,
+                                      LVM_UINT16 NumSamples) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVCS_BypassMix_t* pConfig = (LVCS_BypassMix_t*)&pInstance->BypassMix;
 
     /*
      * Check if the bypass mixer is enabled
      */
-    if ((pInstance->Params.OperatingMode & LVCS_BYPASSMIXSWITCH) != 0)
-    {
+    if ((pInstance->Params.OperatingMode & LVCS_BYPASSMIXSWITCH) != 0) {
         /*
          * Apply the bypass mix
          */
-        LVC_MixSoft_2St_D16C31_SAT(&pConfig->Mixer_Instance,
-                                   pProcessed,
-                                   (LVM_FLOAT *) pUnprocessed,
-                                   pOutData,
-                                   (LVM_INT16)(2 * NumSamples));
+        LVC_MixSoft_2St_D16C31_SAT(&pConfig->Mixer_Instance, pProcessed, (LVM_FLOAT*)pUnprocessed,
+                                   pOutData, (LVM_INT16)(2 * NumSamples));
         /*
          * Apply output gain correction shift
          */
-        Shift_Sat_Float((LVM_INT16)pConfig->Output_Shift,
-                        (LVM_FLOAT*)pOutData,
-                        (LVM_FLOAT*)pOutData,
-                        (LVM_INT16)(2 * NumSamples));          /* Left and right*/
+        Shift_Sat_Float((LVM_INT16)pConfig->Output_Shift, (LVM_FLOAT*)pOutData,
+                        (LVM_FLOAT*)pOutData, (LVM_INT16)(2 * NumSamples)); /* Left and right*/
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
 
 /************************************************************************************/
@@ -245,22 +225,18 @@
 /* FUNCTION:                LVCS_MixerCallback                                      */
 /*                                                                                  */
 /************************************************************************************/
-LVM_INT32 LVCS_MixerCallback(LVCS_Handle_t      hInstance,
-                            void                *pGeneralPurpose,
-                            LVM_INT16           CallbackParam)
-{
-    LVCS_Instance_t     *pInstance = (LVCS_Instance_t  *)hInstance;
+LVM_INT32 LVCS_MixerCallback(LVCS_Handle_t hInstance, void* pGeneralPurpose,
+                             LVM_INT16 CallbackParam) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
 
-   (void)pGeneralPurpose;
+    (void)pGeneralPurpose;
 
     /*
      * Off transition has completed in Headphone mode
      */
-    if ((pInstance->OutputDevice == LVCS_HEADPHONE) &&
-        (pInstance->bInOperatingModeTransition)     &&
-        (pInstance->MSTarget0 == 0x0000)&&  /* this indicates an on->off transition */
-        (CallbackParam == 0))
-    {
+    if ((pInstance->OutputDevice == LVCS_HEADPHONE) && (pInstance->bInOperatingModeTransition) &&
+        (pInstance->MSTarget0 == 0x0000) && /* this indicates an on->off transition */
+        (CallbackParam == 0)) {
         /* Set operating mode to OFF */
         pInstance->Params.OperatingMode = LVCS_OFF;
 
@@ -268,21 +244,17 @@
         pInstance->bInOperatingModeTransition = LVM_FALSE;
 
         /* Signal to the bundle */
-        if((*pInstance->Capabilities.CallBack) != LVM_NULL){
-            (*pInstance->Capabilities.CallBack)(pInstance->Capabilities.pBundleInstance,
-                                                LVM_NULL,
+        if ((*pInstance->Capabilities.CallBack) != LVM_NULL) {
+            (*pInstance->Capabilities.CallBack)(pInstance->Capabilities.pBundleInstance, LVM_NULL,
                                                 (ALGORITHM_CS_ID | LVCS_EVENT_ALGOFF));
         }
     }
 
-    if ((pInstance->OutputDevice == LVCS_HEADPHONE)  &&
-        (pInstance->MSTarget0 == 1) &&
-        (pInstance->bTimerDone == LVM_TRUE)){
-
+    if ((pInstance->OutputDevice == LVCS_HEADPHONE) && (pInstance->MSTarget0 == 1) &&
+        (pInstance->bTimerDone == LVM_TRUE)) {
         /* Exit transition state */
         pInstance->bInOperatingModeTransition = LVM_FALSE;
     }
 
     return 1;
 }
-
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.h
index fcd8ee3..69afcbb 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.h
@@ -33,20 +33,18 @@
 /************************************************************************************/
 
 /* Bypass mixer structure */
-typedef struct
-{
+typedef struct {
     /* Mixer settings */
-    LVMixer3_2St_FLOAT_st   Mixer_Instance;             /* Mixer instance */
-    LVM_UINT16              Output_Shift;               /* Correcting gain output shift */
+    LVMixer3_2St_FLOAT_st Mixer_Instance; /* Mixer instance */
+    LVM_UINT16 Output_Shift;              /* Correcting gain output shift */
 
 } LVCS_BypassMix_t;
 
-typedef struct
-{
+typedef struct {
     /* Output gain settings, Gain = (Loss/32768) * 2^Shift */
-    LVM_UINT16             Shift;                      /* Left shifts required */
-    LVM_FLOAT              Loss;                       /* Loss required */
-    LVM_FLOAT              UnprocLoss;                 /* Unprocessed path loss */
+    LVM_UINT16 Shift;     /* Left shifts required */
+    LVM_FLOAT Loss;       /* Loss required */
+    LVM_FLOAT UnprocLoss; /* Unprocessed path loss */
 } Gain_t;
 /************************************************************************************/
 /*                                                                                    */
@@ -54,13 +52,10 @@
 /*                                                                                    */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_BypassMixInit(LVCS_Handle_t       hInstance,
-                                           LVCS_Params_t    *pParams);
+LVCS_ReturnStatus_en LVCS_BypassMixInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
 
-LVCS_ReturnStatus_en LVCS_BypassMixer(LVCS_Handle_t         hInstance,
-                                      const LVM_FLOAT       *pProcessed,
-                                      const LVM_FLOAT       *unProcessed,
-                                      LVM_FLOAT       *pOutData,
-                                      LVM_UINT16      NumSamples);
+LVCS_ReturnStatus_en LVCS_BypassMixer(LVCS_Handle_t hInstance, const LVM_FLOAT* pProcessed,
+                                      const LVM_FLOAT* unProcessed, LVM_FLOAT* pOutData,
+                                      LVM_UINT16 NumSamples);
 
-#endif  /* BYPASSMIX_H */
+#endif /* BYPASSMIX_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Control.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Control.cpp
index 50db03d..8f88986 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Control.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Control.cpp
@@ -45,15 +45,12 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_GetParameters(LVCS_Handle_t   hInstance,
-                                        LVCS_Params_t   *pParams)
-{
-
-    LVCS_Instance_t     *pInstance =(LVCS_Instance_t  *)hInstance;
+LVCS_ReturnStatus_en LVCS_GetParameters(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
 
     *pParams = pInstance->Params;
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
 
 /************************************************************************************/
@@ -75,34 +72,29 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_Control(LVCS_Handle_t      hInstance,
-                                  LVCS_Params_t      *pParams)
-{
-    LVM_INT16                   Offset;
-    LVCS_Instance_t             *pInstance =(LVCS_Instance_t  *)hInstance;
-    LVCS_ReturnStatus_en        err;
-    LVCS_Modes_en               OperatingModeSave = pInstance->Params.OperatingMode;
+LVCS_ReturnStatus_en LVCS_Control(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+    LVM_INT16 Offset;
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVCS_ReturnStatus_en err;
+    LVCS_Modes_en OperatingModeSave = pInstance->Params.OperatingMode;
 
-    if (pParams->SampleRate != pInstance->Params.SampleRate)
-    {
+    if (pParams->SampleRate != pInstance->Params.SampleRate) {
         pInstance->TimerParams.SamplingRate = LVCS_SampleRateTable[pParams->SampleRate];
     }
 
     /*
      * If the reverb level has changed
      */
-    if(pInstance->Params.ReverbLevel != pParams->ReverbLevel)
-    {
-        err=LVCS_ReverbGeneratorInit(hInstance,pParams);
+    if (pInstance->Params.ReverbLevel != pParams->ReverbLevel) {
+        err = LVCS_ReverbGeneratorInit(hInstance, pParams);
     }
 
     /*
      * If the sample rate or speaker has changed then perform a full re-initialisation
      */
     if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
-       (pInstance->Params.SpeakerType != pParams->SpeakerType))
-    {
-        const LVCS_VolCorrect_t *pLVCS_VolCorrectTable;
+        (pInstance->Params.SpeakerType != pParams->SpeakerType)) {
+        const LVCS_VolCorrect_t* pLVCS_VolCorrectTable;
 
         /*
          * Output device
@@ -114,15 +106,16 @@
          */
         /* Use internal coefficient table */
         pLVCS_VolCorrectTable = (LVCS_VolCorrect_t*)&LVCS_VolCorrectTable[0];
-        Offset = (LVM_INT16)(pParams->SpeakerType + pParams->SourceFormat*(1+LVCS_EX_HEADPHONES));
+        Offset = (LVM_INT16)(pParams->SpeakerType +
+                             pParams->SourceFormat * (1 + LVCS_EX_HEADPHONES));
 
         pInstance->VolCorrect = pLVCS_VolCorrectTable[Offset];
 
         pInstance->CompressGain = pInstance->VolCorrect.CompMin;
         LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[0], 0, 0);
         {
-            LVM_FLOAT          Gain;
-            const Gain_t        *pOutputGainTable = (Gain_t*)&LVCS_OutputGainTable[0];
+            LVM_FLOAT Gain;
+            const Gain_t* pOutputGainTable = (Gain_t*)&LVCS_OutputGainTable[0];
             Gain = (LVM_FLOAT)(pOutputGainTable[Offset].Loss);
             Gain = (LVM_FLOAT)pOutputGainTable[Offset].UnprocLoss * (Gain);
 
@@ -133,22 +126,18 @@
 
             LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[1], 0, Gain);
             LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMix.Mixer_Instance.MixerStream[0],
-                    LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
+                                               LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
             LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMix.Mixer_Instance.MixerStream[1],
-                    LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
+                                               LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
         }
 
-        err=LVCS_SEnhancerInit(hInstance,
-                           pParams);
+        err = LVCS_SEnhancerInit(hInstance, pParams);
 
-        err=LVCS_ReverbGeneratorInit(hInstance,
-                                 pParams);
+        err = LVCS_ReverbGeneratorInit(hInstance, pParams);
 
-        err=LVCS_EqualiserInit(hInstance,
-                           pParams);
+        err = LVCS_EqualiserInit(hInstance, pParams);
 
-        err=LVCS_BypassMixInit(hInstance,
-                           pParams);
+        err = LVCS_BypassMixInit(hInstance, pParams);
 
     }
 
@@ -156,30 +145,26 @@
      * Check if the effect level or source format has changed
      */
     else if ((pInstance->Params.EffectLevel != pParams->EffectLevel) ||
-            (pInstance->Params.SourceFormat != pParams->SourceFormat))
-    {
-        const LVCS_VolCorrect_t *pLVCS_VolCorrectTable;
+             (pInstance->Params.SourceFormat != pParams->SourceFormat)) {
+        const LVCS_VolCorrect_t* pLVCS_VolCorrectTable;
 
         /*
          * Get the volume correction parameters
          */
         /* Use internal coefficient table */
         pLVCS_VolCorrectTable = (LVCS_VolCorrect_t*)&LVCS_VolCorrectTable[0];
-        Offset = (LVM_INT16)(pParams->SpeakerType + pParams->SourceFormat*(1+LVCS_EX_HEADPHONES));
+        Offset = (LVM_INT16)(pParams->SpeakerType +
+                             pParams->SourceFormat * (1 + LVCS_EX_HEADPHONES));
 
         pInstance->VolCorrect = pLVCS_VolCorrectTable[Offset];
 
         /* Update the effect level and alpha-mixer gains */
-        err=LVCS_BypassMixInit(hInstance,
-                           pParams);
+        err = LVCS_BypassMixInit(hInstance, pParams);
 
-        if(err != LVCS_SUCCESS)
-        {
+        if (err != LVCS_SUCCESS) {
             return err;
         }
-    }
-    else
-    {
+    } else {
         pInstance->Params = *pParams;
     }
 
@@ -189,40 +174,36 @@
     pInstance->Params = *pParams;
 
     /* Stay on the current operating mode until the transition is done */
-    if((pParams->OperatingMode != OperatingModeSave) ||
-       (pInstance->bInOperatingModeTransition == LVM_TRUE)){
-
+    if ((pParams->OperatingMode != OperatingModeSave) ||
+        (pInstance->bInOperatingModeTransition == LVM_TRUE)) {
         /* Set the reverb delay timeout */
-        if(pInstance->bInOperatingModeTransition != LVM_TRUE){
+        if (pInstance->bInOperatingModeTransition != LVM_TRUE) {
             pInstance->bTimerDone = LVM_FALSE;
             pInstance->TimerParams.TimeInMs =
-            (LVM_INT16)(((pInstance->Reverberation.DelaySize << 2)
-            /pInstance->TimerParams.SamplingRate) + 1);
-            LVM_Timer_Init ( &pInstance->TimerInstance,
-                             &pInstance->TimerParams);
+                    (LVM_INT16)(((pInstance->Reverberation.DelaySize << 2) /
+                                 pInstance->TimerParams.SamplingRate) +
+                                1);
+            LVM_Timer_Init(&pInstance->TimerInstance, &pInstance->TimerParams);
         }
 
         /* Update the effect level and alpha-mixer gains */
-        err=LVCS_BypassMixInit(hInstance,
-                           pParams);
+        err = LVCS_BypassMixInit(hInstance, pParams);
 
         /* Change transition bypass mixer settings if needed depending on transition type */
-        if(pParams->OperatingMode != LVCS_OFF){
-            pInstance->MSTarget0=LVM_MAXINT_16;
-            pInstance->MSTarget1=0;
-        }
-        else
-        {
+        if (pParams->OperatingMode != LVCS_OFF) {
+            pInstance->MSTarget0 = LVM_MAXINT_16;
+            pInstance->MSTarget1 = 0;
+        } else {
             pInstance->Params.OperatingMode = OperatingModeSave;
-            pInstance->MSTarget1=LVM_MAXINT_16;
-            pInstance->MSTarget0=0;
+            pInstance->MSTarget1 = LVM_MAXINT_16;
+            pInstance->MSTarget0 = 0;
         }
 
         /* Set transition flag */
         pInstance->bInOperatingModeTransition = LVM_TRUE;
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
 
 /****************************************************************************************/
@@ -233,12 +214,11 @@
 /*  CallBack function of the Timer.                                                     */
 /*                                                                                      */
 /****************************************************************************************/
-void LVCS_TimerCallBack (void* hInstance, void* pCallBackParams, LVM_INT32 CallbackParam)
-{
-    LVCS_Instance_t     *pInstance  = (LVCS_Instance_t  *)hInstance;
+void LVCS_TimerCallBack(void* hInstance, void* pCallBackParams, LVM_INT32 CallbackParam) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
 
     /* Avoid warnings because pCallBackParams and CallbackParam are not used*/
-    if((pCallBackParams != LVM_NULL) || (CallbackParam != 0)){
+    if ((pCallBackParams != LVM_NULL) || (CallbackParam != 0)) {
         pCallBackParams = hInstance;
         CallbackParam = 0;
         return;
@@ -248,4 +228,3 @@
 
     return;
 }
-
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp
index 431b7e3..bad9aef 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp
@@ -53,29 +53,22 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVCS_ReturnStatus_en LVCS_EqualiserInit(LVCS_Handle_t       hInstance,
-                                        LVCS_Params_t       *pParams)
-{
+LVCS_ReturnStatus_en LVCS_EqualiserInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+    LVM_UINT16 Offset;
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVCS_Equaliser_t* pConfig = (LVCS_Equaliser_t*)&pInstance->Equaliser;
+    LVCS_Data_t* pData;
+    LVCS_Coefficient_t* pCoefficients;
+    BQ_FLOAT_Coefs_t Coeffs;
+    const BiquadA012B12CoefsSP_t* pEqualiserCoefTable;
 
-    LVM_UINT16          Offset;
-    LVCS_Instance_t     *pInstance = (LVCS_Instance_t  *)hInstance;
-    LVCS_Equaliser_t    *pConfig   = (LVCS_Equaliser_t *)&pInstance->Equaliser;
-    LVCS_Data_t         *pData;
-    LVCS_Coefficient_t  *pCoefficients;
-    BQ_FLOAT_Coefs_t      Coeffs;
-    const BiquadA012B12CoefsSP_t *pEqualiserCoefTable;
-
-    pData = (LVCS_Data_t *) \
-                pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress;
-
-    pCoefficients = (LVCS_Coefficient_t *) \
-                pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
+    pData = (LVCS_Data_t*)pInstance->pData;
+    pCoefficients = (LVCS_Coefficient_t*)pInstance->pCoeff;
     /*
      * If the sample rate changes re-initialise the filters
      */
     if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
-        (pInstance->Params.SpeakerType != pParams->SpeakerType))
-    {
+        (pInstance->Params.SpeakerType != pParams->SpeakerType)) {
         /*
          * Setup the filter coefficients and clear the history
          */
@@ -84,37 +77,35 @@
 
         /* Left and right filters */
         /* Convert incoming coefficients to the required format/ordering */
-        Coeffs.A0 = (LVM_FLOAT) pEqualiserCoefTable[Offset].A0;
-        Coeffs.A1 = (LVM_FLOAT) pEqualiserCoefTable[Offset].A1;
-        Coeffs.A2 = (LVM_FLOAT) pEqualiserCoefTable[Offset].A2;
+        Coeffs.A0 = (LVM_FLOAT)pEqualiserCoefTable[Offset].A0;
+        Coeffs.A1 = (LVM_FLOAT)pEqualiserCoefTable[Offset].A1;
+        Coeffs.A2 = (LVM_FLOAT)pEqualiserCoefTable[Offset].A2;
         Coeffs.B1 = (LVM_FLOAT)-pEqualiserCoefTable[Offset].B1;
         Coeffs.B2 = (LVM_FLOAT)-pEqualiserCoefTable[Offset].B2;
 
-        LoadConst_Float((LVM_INT16)0,                                         /* Value */
-                        (LVM_FLOAT *)&pData->EqualiserBiquadTaps, /* Destination */
+        LoadConst_Float((LVM_INT16)0,                            /* Value */
+                        (LVM_FLOAT*)&pData->EqualiserBiquadTaps, /* Destination */
                         /* Number of words */
                         (LVM_UINT16)(sizeof(pData->EqualiserBiquadTaps) / sizeof(LVM_FLOAT)));
 
         BQ_2I_D16F32Css_TRC_WRA_01_Init(&pCoefficients->EqualiserBiquadInstance,
-                                        &pData->EqualiserBiquadTaps,
-                                        &Coeffs);
+                                        &pData->EqualiserBiquadTaps, &Coeffs);
 
         /* Callbacks */
-        switch(pEqualiserCoefTable[Offset].Scale)
-        {
+        switch (pEqualiserCoefTable[Offset].Scale) {
             case 13:
-                pConfig->pBiquadCallBack  = BQ_2I_D16F32C13_TRC_WRA_01;
+                pConfig->pBiquadCallBack = BQ_2I_D16F32C13_TRC_WRA_01;
                 break;
             case 14:
-                pConfig->pBiquadCallBack  = BQ_2I_D16F32C14_TRC_WRA_01;
+                pConfig->pBiquadCallBack = BQ_2I_D16F32C14_TRC_WRA_01;
                 break;
             case 15:
-                pConfig->pBiquadCallBack  = BQ_2I_D16F32C15_TRC_WRA_01;
+                pConfig->pBiquadCallBack = BQ_2I_D16F32C15_TRC_WRA_01;
                 break;
         }
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
 /************************************************************************************/
 /*                                                                                  */
@@ -135,30 +126,23 @@
 /*  1.  Always processes in place.                                                  */
 /*                                                                                  */
 /************************************************************************************/
-LVCS_ReturnStatus_en LVCS_Equaliser(LVCS_Handle_t       hInstance,
-                                    LVM_FLOAT           *pInputOutput,
-                                    LVM_UINT16          NumSamples)
-{
+LVCS_ReturnStatus_en LVCS_Equaliser(LVCS_Handle_t hInstance, LVM_FLOAT* pInputOutput,
+                                    LVM_UINT16 NumSamples) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVCS_Equaliser_t* pConfig = (LVCS_Equaliser_t*)&pInstance->Equaliser;
+    LVCS_Coefficient_t* pCoefficients;
 
-    LVCS_Instance_t     *pInstance = (LVCS_Instance_t  *)hInstance;
-    LVCS_Equaliser_t    *pConfig   = (LVCS_Equaliser_t  *)&pInstance->Equaliser;
-    LVCS_Coefficient_t  *pCoefficients;
-
-    pCoefficients = (LVCS_Coefficient_t *) \
-                  pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
+    pCoefficients = (LVCS_Coefficient_t*)pInstance->pCoeff;
 
     /*
      * Check if the equaliser is required
      */
-    if ((pInstance->Params.OperatingMode & LVCS_EQUALISERSWITCH) != 0)
-    {
+    if ((pInstance->Params.OperatingMode & LVCS_EQUALISERSWITCH) != 0) {
         /* Apply filter to the left and right channels */
-        (pConfig->pBiquadCallBack)((Biquad_FLOAT_Instance_t*) \
-                                        &pCoefficients->EqualiserBiquadInstance,
-                                        (LVM_FLOAT *)pInputOutput,
-                                        (LVM_FLOAT *)pInputOutput,
-                                        (LVM_INT16)NumSamples);
+        (pConfig->pBiquadCallBack)(
+                (Biquad_FLOAT_Instance_t*)&pCoefficients->EqualiserBiquadInstance,
+                (LVM_FLOAT*)pInputOutput, (LVM_FLOAT*)pInputOutput, (LVM_INT16)NumSamples);
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.h
index 918d931..c0d0950 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.h
@@ -25,9 +25,8 @@
 /************************************************************************************/
 
 /* Equaliser structure */
-typedef struct
-{
-    void (*pBiquadCallBack) (Biquad_FLOAT_Instance_t*, LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
+typedef struct {
+    void (*pBiquadCallBack)(Biquad_FLOAT_Instance_t*, LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
 } LVCS_Equaliser_t;
 
 /************************************************************************************/
@@ -36,10 +35,8 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_EqualiserInit(LVCS_Handle_t       hInstance,
-                                        LVCS_Params_t       *pParams);
-LVCS_ReturnStatus_en LVCS_Equaliser(LVCS_Handle_t            hInstance,
-                                    LVM_FLOAT                *pInputOutput,
-                                    LVM_UINT16                NumSamples);
+LVCS_ReturnStatus_en LVCS_EqualiserInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
+LVCS_ReturnStatus_en LVCS_Equaliser(LVCS_Handle_t hInstance, LVM_FLOAT* pInputOutput,
+                                    LVM_UINT16 NumSamples);
 
-#endif  /* EQUALISER_H */
+#endif /* EQUALISER_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
index c7ee232..69c46c6 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
@@ -24,201 +24,201 @@
 /*                                                                                  */
 /************************************************************************************/
 /* Stereo Enhancer coefficients for 8000 Hz sample rate, scaled with 0.161258 */
-#define CS_MIDDLE_8000_A0                           0.227720
-#define CS_MIDDLE_8000_A1                          (-0.215125)
-#define CS_MIDDLE_8000_A2                           0.000000
-#define CS_MIDDLE_8000_B1                          (-0.921899)
-#define CS_MIDDLE_8000_B2                           0.000000
-#define CS_MIDDLE_8000_SCALE                        15
-#define CS_SIDE_8000_A0                             0.611441
-#define CS_SIDE_8000_A1                            (-0.380344)
-#define CS_SIDE_8000_A2                            (-0.231097)
-#define CS_SIDE_8000_B1                            (-0.622470)
-#define CS_SIDE_8000_B2                            (-0.130759)
-#define CS_SIDE_8000_SCALE                         15
+#define CS_MIDDLE_8000_A0 0.227720
+#define CS_MIDDLE_8000_A1 (-0.215125)
+#define CS_MIDDLE_8000_A2 0.000000
+#define CS_MIDDLE_8000_B1 (-0.921899)
+#define CS_MIDDLE_8000_B2 0.000000
+#define CS_MIDDLE_8000_SCALE 15
+#define CS_SIDE_8000_A0 0.611441
+#define CS_SIDE_8000_A1 (-0.380344)
+#define CS_SIDE_8000_A2 (-0.231097)
+#define CS_SIDE_8000_B1 (-0.622470)
+#define CS_SIDE_8000_B2 (-0.130759)
+#define CS_SIDE_8000_SCALE 15
 
 /* Stereo Enhancer coefficients for 11025Hz sample rate, scaled with 0.162943 */
-#define CS_MIDDLE_11025_A0                       0.230838
-#define CS_MIDDLE_11025_A1                      (-0.221559)
-#define CS_MIDDLE_11025_A2                       0.000000
-#define CS_MIDDLE_11025_B1                      (-0.943056)
-#define CS_MIDDLE_11025_B2                       0.000000
-#define CS_MIDDLE_11025_SCALE                    15
-#define CS_SIDE_11025_A0                         0.557372
-#define CS_SIDE_11025_A1                        (-0.391490)
-#define CS_SIDE_11025_A2                        (-0.165881)
-#define CS_SIDE_11025_B1                        (-0.880608)
-#define CS_SIDE_11025_B2                         0.032397
-#define CS_SIDE_11025_SCALE                      15
+#define CS_MIDDLE_11025_A0 0.230838
+#define CS_MIDDLE_11025_A1 (-0.221559)
+#define CS_MIDDLE_11025_A2 0.000000
+#define CS_MIDDLE_11025_B1 (-0.943056)
+#define CS_MIDDLE_11025_B2 0.000000
+#define CS_MIDDLE_11025_SCALE 15
+#define CS_SIDE_11025_A0 0.557372
+#define CS_SIDE_11025_A1 (-0.391490)
+#define CS_SIDE_11025_A2 (-0.165881)
+#define CS_SIDE_11025_B1 (-0.880608)
+#define CS_SIDE_11025_B2 0.032397
+#define CS_SIDE_11025_SCALE 15
 
 /* Stereo Enhancer coefficients for 12000Hz sample rate, scaled with 0.162191 */
-#define CS_MIDDLE_12000_A0                        0.229932
-#define CS_MIDDLE_12000_A1                       (-0.221436)
-#define CS_MIDDLE_12000_A2                        0.000000
-#define CS_MIDDLE_12000_B1                       (-0.947616)
-#define CS_MIDDLE_12000_B2                        0.000000
-#define CS_MIDDLE_12000_SCALE                        15
-#define CS_SIDE_12000_A0                         0.558398
-#define CS_SIDE_12000_A1                        (-0.392211)
-#define CS_SIDE_12000_A2                        (-0.166187)
-#define CS_SIDE_12000_B1                        (-0.892550)
-#define CS_SIDE_12000_B2                         0.032856
-#define CS_SIDE_12000_SCALE                          15
+#define CS_MIDDLE_12000_A0 0.229932
+#define CS_MIDDLE_12000_A1 (-0.221436)
+#define CS_MIDDLE_12000_A2 0.000000
+#define CS_MIDDLE_12000_B1 (-0.947616)
+#define CS_MIDDLE_12000_B2 0.000000
+#define CS_MIDDLE_12000_SCALE 15
+#define CS_SIDE_12000_A0 0.558398
+#define CS_SIDE_12000_A1 (-0.392211)
+#define CS_SIDE_12000_A2 (-0.166187)
+#define CS_SIDE_12000_B1 (-0.892550)
+#define CS_SIDE_12000_B2 0.032856
+#define CS_SIDE_12000_SCALE 15
 
 /* Stereo Enhancer coefficients for 16000Hz sample rate, scaled with 0.162371 */
-#define CS_MIDDLE_16000_A0                       0.230638
-#define CS_MIDDLE_16000_A1                      (-0.224232)
-#define CS_MIDDLE_16000_A2                       0.000000
-#define CS_MIDDLE_16000_B1                      (-0.960550)
-#define CS_MIDDLE_16000_B2                       0.000000
-#define CS_MIDDLE_16000_SCALE                        15
-#define CS_SIDE_16000_A0                         0.499695
-#define CS_SIDE_16000_A1                        (-0.355543)
-#define CS_SIDE_16000_A2                        (-0.144152)
-#define CS_SIDE_16000_B1                        (-1.050788)
-#define CS_SIDE_16000_B2                         0.144104
-#define CS_SIDE_16000_SCALE                          14
+#define CS_MIDDLE_16000_A0 0.230638
+#define CS_MIDDLE_16000_A1 (-0.224232)
+#define CS_MIDDLE_16000_A2 0.000000
+#define CS_MIDDLE_16000_B1 (-0.960550)
+#define CS_MIDDLE_16000_B2 0.000000
+#define CS_MIDDLE_16000_SCALE 15
+#define CS_SIDE_16000_A0 0.499695
+#define CS_SIDE_16000_A1 (-0.355543)
+#define CS_SIDE_16000_A2 (-0.144152)
+#define CS_SIDE_16000_B1 (-1.050788)
+#define CS_SIDE_16000_B2 0.144104
+#define CS_SIDE_16000_SCALE 14
 
 /* Stereo Enhancer coefficients for 22050Hz sample rate, scaled with 0.160781 */
-#define CS_MIDDLE_22050_A0                       0.228749
-#define CS_MIDDLE_22050_A1                      (-0.224128)
-#define CS_MIDDLE_22050_A2                       0.000000
-#define CS_MIDDLE_22050_B1                      (-0.971262)
-#define CS_MIDDLE_22050_B2                       0.000000
-#define CS_MIDDLE_22050_SCALE                        15
-#define CS_SIDE_22050_A0                          0.440112
-#define CS_SIDE_22050_A1                         (-0.261096)
-#define CS_SIDE_22050_A2                         (-0.179016)
-#define CS_SIDE_22050_B1                         (-1.116786)
-#define CS_SIDE_22050_B2                          0.182507
-#define CS_SIDE_22050_SCALE                          14
+#define CS_MIDDLE_22050_A0 0.228749
+#define CS_MIDDLE_22050_A1 (-0.224128)
+#define CS_MIDDLE_22050_A2 0.000000
+#define CS_MIDDLE_22050_B1 (-0.971262)
+#define CS_MIDDLE_22050_B2 0.000000
+#define CS_MIDDLE_22050_SCALE 15
+#define CS_SIDE_22050_A0 0.440112
+#define CS_SIDE_22050_A1 (-0.261096)
+#define CS_SIDE_22050_A2 (-0.179016)
+#define CS_SIDE_22050_B1 (-1.116786)
+#define CS_SIDE_22050_B2 0.182507
+#define CS_SIDE_22050_SCALE 14
 
 /* Stereo Enhancer coefficients for 24000Hz sample rate, scaled with 0.161882 */
-#define CS_MIDDLE_24000_A0                         0.230395
-#define CS_MIDDLE_24000_A1                        (-0.226117)
-#define CS_MIDDLE_24000_A2                         0.000000
-#define CS_MIDDLE_24000_B1                        (-0.973573)
-#define CS_MIDDLE_24000_B2                         0.000000
-#define CS_MIDDLE_24000_SCALE                        15
-#define CS_SIDE_24000_A0                           0.414770
-#define CS_SIDE_24000_A1                          (-0.287182)
-#define CS_SIDE_24000_A2                          (-0.127588)
-#define CS_SIDE_24000_B1                          (-1.229648)
-#define CS_SIDE_24000_B2                           0.282177
-#define CS_SIDE_24000_SCALE                          14
+#define CS_MIDDLE_24000_A0 0.230395
+#define CS_MIDDLE_24000_A1 (-0.226117)
+#define CS_MIDDLE_24000_A2 0.000000
+#define CS_MIDDLE_24000_B1 (-0.973573)
+#define CS_MIDDLE_24000_B2 0.000000
+#define CS_MIDDLE_24000_SCALE 15
+#define CS_SIDE_24000_A0 0.414770
+#define CS_SIDE_24000_A1 (-0.287182)
+#define CS_SIDE_24000_A2 (-0.127588)
+#define CS_SIDE_24000_B1 (-1.229648)
+#define CS_SIDE_24000_B2 0.282177
+#define CS_SIDE_24000_SCALE 14
 
 /* Stereo Enhancer coefficients for 32000Hz sample rate, scaled with 0.160322 */
-#define CS_MIDDLE_32000_A0                          0.228400
-#define CS_MIDDLE_32000_A1                         (-0.225214)
-#define CS_MIDDLE_32000_A2                          0.000000
-#define CS_MIDDLE_32000_B1                         (-0.980126)
-#define CS_MIDDLE_32000_B2                          0.000000
-#define CS_MIDDLE_32000_SCALE                        15
-#define CS_SIDE_32000_A0                            0.364579
-#define CS_SIDE_32000_A1                           (-0.207355)
-#define CS_SIDE_32000_A2                           (-0.157224)
-#define CS_SIDE_32000_B1                           (-1.274231)
-#define CS_SIDE_32000_B2                            0.312495
-#define CS_SIDE_32000_SCALE                          14
+#define CS_MIDDLE_32000_A0 0.228400
+#define CS_MIDDLE_32000_A1 (-0.225214)
+#define CS_MIDDLE_32000_A2 0.000000
+#define CS_MIDDLE_32000_B1 (-0.980126)
+#define CS_MIDDLE_32000_B2 0.000000
+#define CS_MIDDLE_32000_SCALE 15
+#define CS_SIDE_32000_A0 0.364579
+#define CS_SIDE_32000_A1 (-0.207355)
+#define CS_SIDE_32000_A2 (-0.157224)
+#define CS_SIDE_32000_B1 (-1.274231)
+#define CS_SIDE_32000_B2 0.312495
+#define CS_SIDE_32000_SCALE 14
 
 /* Stereo Enhancer coefficients for 44100Hz sample rate, scaled with 0.163834 */
-#define CS_MIDDLE_44100_A0                     0.233593
-#define CS_MIDDLE_44100_A1                    (-0.231225)
-#define CS_MIDDLE_44100_A2                     0.000000
-#define CS_MIDDLE_44100_B1                    (-0.985545)
-#define CS_MIDDLE_44100_B2                     0.000000
-#define CS_MIDDLE_44100_SCALE                        15
-#define CS_SIDE_44100_A0                       0.284573
-#define CS_SIDE_44100_A1                      (-0.258910)
-#define CS_SIDE_44100_A2                      (-0.025662)
-#define CS_SIDE_44100_B1                      (-1.572248)
-#define CS_SIDE_44100_B2                       0.588399
-#define CS_SIDE_44100_SCALE                  14
+#define CS_MIDDLE_44100_A0 0.233593
+#define CS_MIDDLE_44100_A1 (-0.231225)
+#define CS_MIDDLE_44100_A2 0.000000
+#define CS_MIDDLE_44100_B1 (-0.985545)
+#define CS_MIDDLE_44100_B2 0.000000
+#define CS_MIDDLE_44100_SCALE 15
+#define CS_SIDE_44100_A0 0.284573
+#define CS_SIDE_44100_A1 (-0.258910)
+#define CS_SIDE_44100_A2 (-0.025662)
+#define CS_SIDE_44100_B1 (-1.572248)
+#define CS_SIDE_44100_B2 0.588399
+#define CS_SIDE_44100_SCALE 14
 
 /* Stereo Enhancer coefficients for 48000Hz sample rate, scaled with 0.164402 */
-#define CS_MIDDLE_48000_A0                     0.234445
-#define CS_MIDDLE_48000_A1                    (-0.232261)
-#define CS_MIDDLE_48000_A2                     0.000000
-#define CS_MIDDLE_48000_B1                    (-0.986713)
-#define CS_MIDDLE_48000_B2                     0.000000
-#define CS_MIDDLE_48000_SCALE                        15
-#define CS_SIDE_48000_A0                     0.272606
-#define CS_SIDE_48000_A1                    (-0.266952)
-#define CS_SIDE_48000_A2                    (-0.005654)
-#define CS_SIDE_48000_B1                    (-1.617141)
-#define CS_SIDE_48000_B2                     0.630405
-#define CS_SIDE_48000_SCALE                          14
+#define CS_MIDDLE_48000_A0 0.234445
+#define CS_MIDDLE_48000_A1 (-0.232261)
+#define CS_MIDDLE_48000_A2 0.000000
+#define CS_MIDDLE_48000_B1 (-0.986713)
+#define CS_MIDDLE_48000_B2 0.000000
+#define CS_MIDDLE_48000_SCALE 15
+#define CS_SIDE_48000_A0 0.272606
+#define CS_SIDE_48000_A1 (-0.266952)
+#define CS_SIDE_48000_A2 (-0.005654)
+#define CS_SIDE_48000_B1 (-1.617141)
+#define CS_SIDE_48000_B2 0.630405
+#define CS_SIDE_48000_SCALE 14
 
 /* Coefficients for 88200Hz sample rate.
  * The filter coefficients are obtained by carrying out
  * state-space analysis using the coefficients available
  * for 44100Hz.
  */
-#define CS_MIDDLE_88200_A0                     0.233846f
-#define CS_MIDDLE_88200_A1                     (-0.232657f)
-#define CS_MIDDLE_88200_A2                     0.000000f
-#define CS_MIDDLE_88200_B1                     (-0.992747f)
-#define CS_MIDDLE_88200_B2                     0.000000f
-#define CS_MIDDLE_88200_SCALE                  15
-#define CS_SIDE_88200_A0                       0.231541f
-#define CS_SIDE_88200_A1                       (-0.289586f)
-#define CS_SIDE_88200_A2                       0.058045f
-#define CS_SIDE_88200_B1                       (-1.765300f)
-#define CS_SIDE_88200_B2                       0.769816f
-#define CS_SIDE_88200_SCALE                    14
+#define CS_MIDDLE_88200_A0 0.233846f
+#define CS_MIDDLE_88200_A1 (-0.232657f)
+#define CS_MIDDLE_88200_A2 0.000000f
+#define CS_MIDDLE_88200_B1 (-0.992747f)
+#define CS_MIDDLE_88200_B2 0.000000f
+#define CS_MIDDLE_88200_SCALE 15
+#define CS_SIDE_88200_A0 0.231541f
+#define CS_SIDE_88200_A1 (-0.289586f)
+#define CS_SIDE_88200_A2 0.058045f
+#define CS_SIDE_88200_B1 (-1.765300f)
+#define CS_SIDE_88200_B2 0.769816f
+#define CS_SIDE_88200_SCALE 14
 
 /* Stereo Enhancer coefficients for 96000Hz sample rate, scaled with  0.165*/
 /* high pass filter with cutoff frequency 102.18 Hz*/
-#define CS_MIDDLE_96000_A0                     0.235532
-#define CS_MIDDLE_96000_A1                    (-0.234432)
-#define CS_MIDDLE_96000_A2                     0.000000
-#define CS_MIDDLE_96000_B1                    (-0.993334)
-#define CS_MIDDLE_96000_B2                     0.000000
-#define CS_MIDDLE_96000_SCALE                        15
+#define CS_MIDDLE_96000_A0 0.235532
+#define CS_MIDDLE_96000_A1 (-0.234432)
+#define CS_MIDDLE_96000_A2 0.000000
+#define CS_MIDDLE_96000_B1 (-0.993334)
+#define CS_MIDDLE_96000_B2 0.000000
+#define CS_MIDDLE_96000_SCALE 15
 /* Coefficients calculated using tf2ss and ss2tf functions based on
  * coefficients available for 48000Hz sampling frequency
  */
-#define CS_SIDE_96000_A0                     0.224326f
-#define CS_SIDE_96000_A1                     (-0.294937f)
-#define CS_SIDE_96000_A2                     0.070611f
-#define CS_SIDE_96000_B1                     (-1.792166f)
-#define CS_SIDE_96000_B2                     0.795830f
-#define CS_SIDE_96000_SCALE                  14
+#define CS_SIDE_96000_A0 0.224326f
+#define CS_SIDE_96000_A1 (-0.294937f)
+#define CS_SIDE_96000_A2 0.070611f
+#define CS_SIDE_96000_B1 (-1.792166f)
+#define CS_SIDE_96000_B2 0.795830f
+#define CS_SIDE_96000_SCALE 14
 
 /* Stereo Enhancer coefficients for 176400Hz sample rate.
  * The filter coefficients are obtained by carrying out
  * state-space analysis using the coefficients available
  * for 44100Hz.
  */
-#define CS_MIDDLE_176400_A0                     0.233973f
-#define CS_MIDDLE_176400_A1                     (-0.233378f)
-#define CS_MIDDLE_176400_A2                     0.000000f
-#define CS_MIDDLE_176400_B1                     (-0.996367f)
-#define CS_MIDDLE_176400_B2                     0.000000f
-#define CS_MIDDLE_176400_SCALE                  15
-#define CS_SIDE_176400_A0                       0.199836f
-#define CS_SIDE_176400_A1                       (-0.307544f)
-#define CS_SIDE_176400_A2                       0.107708f
-#define CS_SIDE_176400_B1                       (-1.876572f)
-#define CS_SIDE_176400_B2                       0.877771f
-#define CS_SIDE_176400_SCALE                    14
+#define CS_MIDDLE_176400_A0 0.233973f
+#define CS_MIDDLE_176400_A1 (-0.233378f)
+#define CS_MIDDLE_176400_A2 0.000000f
+#define CS_MIDDLE_176400_B1 (-0.996367f)
+#define CS_MIDDLE_176400_B2 0.000000f
+#define CS_MIDDLE_176400_SCALE 15
+#define CS_SIDE_176400_A0 0.199836f
+#define CS_SIDE_176400_A1 (-0.307544f)
+#define CS_SIDE_176400_A2 0.107708f
+#define CS_SIDE_176400_B1 (-1.876572f)
+#define CS_SIDE_176400_B2 0.877771f
+#define CS_SIDE_176400_SCALE 14
 
 /* Stereo Enhancer coefficients for 192000Hz sample rate, scaled with  0.1689*/
-#define CS_MIDDLE_192000_A0                     0.241219
-#define CS_MIDDLE_192000_A1                    (-0.240656)
-#define CS_MIDDLE_192000_A2                     0.000000
-#define CS_MIDDLE_192000_B1                    (-0.996661)
-#define CS_MIDDLE_192000_B2                     0.000000
-#define CS_MIDDLE_192000_SCALE                        15
+#define CS_MIDDLE_192000_A0 0.241219
+#define CS_MIDDLE_192000_A1 (-0.240656)
+#define CS_MIDDLE_192000_A2 0.000000
+#define CS_MIDDLE_192000_B1 (-0.996661)
+#define CS_MIDDLE_192000_B2 0.000000
+#define CS_MIDDLE_192000_SCALE 15
 /* Coefficients calculated using tf2ss and ss2tf functions based on
  * coefficients available for 48000Hz sampling frequency
  */
-#define CS_SIDE_192000_A0                    0.196039f
-#define CS_SIDE_192000_A1                    (-0.311027f)
-#define CS_SIDE_192000_A2                    0.114988f
-#define CS_SIDE_192000_B1                    (-1.891380f)
-#define CS_SIDE_192000_B2                    0.8923460f
-#define CS_SIDE_192000_SCALE                 14
+#define CS_SIDE_192000_A0 0.196039f
+#define CS_SIDE_192000_A1 (-0.311027f)
+#define CS_SIDE_192000_A2 0.114988f
+#define CS_SIDE_192000_B1 (-1.891380f)
+#define CS_SIDE_192000_B2 0.8923460f
+#define CS_SIDE_192000_SCALE 14
 
 /************************************************************************************/
 /*                                                                                  */
@@ -227,133 +227,133 @@
 /************************************************************************************/
 
 /* Reverb delay settings in samples */
-#define LVCS_STEREODELAY_CS_8KHZ                     93         /* Sample rate 8kS/s */
-#define LVCS_STEREODELAY_CS_11KHZ                   128         /* Sample rate 11kS/s */
-#define LVCS_STEREODELAY_CS_12KHZ                   139         /* Sample rate 12kS/s */
-#define LVCS_STEREODELAY_CS_16KHZ                   186         /* Sample rate 16kS/s */
-#define LVCS_STEREODELAY_CS_22KHZ                   256         /* Sample rate 22kS/s */
-#define LVCS_STEREODELAY_CS_24KHZ                   279         /* Sample rate 24kS/s */
-#define LVCS_STEREODELAY_CS_32KHZ                   372         /* Sample rate 32kS/s */
-#define LVCS_STEREODELAY_CS_44KHZ                   512         /* Sample rate 44kS/s */
-#define LVCS_STEREODELAY_CS_48KHZ                   557         /* Sample rate 48kS/s */
-#define LVCS_STEREODELAY_CS_88KHZ                   1024        /* Sample rate 88.2kS/s */
-#define LVCS_STEREODELAY_CS_96KHZ                   1115        /* Sample rate 96kS/s */
-#define LVCS_STEREODELAY_CS_176KHZ                  2048        /* Sample rate 176.4kS/s */
-#define LVCS_STEREODELAY_CS_192KHZ                  2229        /* Sample rate 196kS/s */
-#define LVCS_STEREODELAY_CS_MAX_VAL                 LVCS_STEREODELAY_CS_192KHZ
+#define LVCS_STEREODELAY_CS_8KHZ 93     /* Sample rate 8kS/s */
+#define LVCS_STEREODELAY_CS_11KHZ 128   /* Sample rate 11kS/s */
+#define LVCS_STEREODELAY_CS_12KHZ 139   /* Sample rate 12kS/s */
+#define LVCS_STEREODELAY_CS_16KHZ 186   /* Sample rate 16kS/s */
+#define LVCS_STEREODELAY_CS_22KHZ 256   /* Sample rate 22kS/s */
+#define LVCS_STEREODELAY_CS_24KHZ 279   /* Sample rate 24kS/s */
+#define LVCS_STEREODELAY_CS_32KHZ 372   /* Sample rate 32kS/s */
+#define LVCS_STEREODELAY_CS_44KHZ 512   /* Sample rate 44kS/s */
+#define LVCS_STEREODELAY_CS_48KHZ 557   /* Sample rate 48kS/s */
+#define LVCS_STEREODELAY_CS_88KHZ 1024  /* Sample rate 88.2kS/s */
+#define LVCS_STEREODELAY_CS_96KHZ 1115  /* Sample rate 96kS/s */
+#define LVCS_STEREODELAY_CS_176KHZ 2048 /* Sample rate 176.4kS/s */
+#define LVCS_STEREODELAY_CS_192KHZ 2229 /* Sample rate 196kS/s */
+#define LVCS_STEREODELAY_CS_MAX_VAL LVCS_STEREODELAY_CS_192KHZ
 
 /* Reverb coefficients for 8000 Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_8000_A0                          0.667271
-#define CS_REVERB_8000_A1                         (-0.667271)
-#define CS_REVERB_8000_A2                          0.000000
-#define CS_REVERB_8000_B1                         (-0.668179)
-#define CS_REVERB_8000_B2                          0.000000
-#define CS_REVERB_8000_SCALE                         15
+#define CS_REVERB_8000_A0 0.667271
+#define CS_REVERB_8000_A1 (-0.667271)
+#define CS_REVERB_8000_A2 0.000000
+#define CS_REVERB_8000_B1 (-0.668179)
+#define CS_REVERB_8000_B2 0.000000
+#define CS_REVERB_8000_SCALE 15
 
 /* Reverb coefficients for 11025Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_11025_A0                     0.699638
-#define CS_REVERB_11025_A1                    (-0.699638)
-#define CS_REVERB_11025_A2                     0.000000
-#define CS_REVERB_11025_B1                    (-0.749096)
-#define CS_REVERB_11025_B2                     0.000000
-#define CS_REVERB_11025_SCALE                  15
+#define CS_REVERB_11025_A0 0.699638
+#define CS_REVERB_11025_A1 (-0.699638)
+#define CS_REVERB_11025_A2 0.000000
+#define CS_REVERB_11025_B1 (-0.749096)
+#define CS_REVERB_11025_B2 0.000000
+#define CS_REVERB_11025_SCALE 15
 
 /* Reverb coefficients for 12000Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_12000_A0                   0.706931
-#define CS_REVERB_12000_A1                  (-0.706931)
-#define CS_REVERB_12000_A2                   0.000000
-#define CS_REVERB_12000_B1                  (-0.767327)
-#define CS_REVERB_12000_B2                   0.000000
-#define CS_REVERB_12000_SCALE                15
+#define CS_REVERB_12000_A0 0.706931
+#define CS_REVERB_12000_A1 (-0.706931)
+#define CS_REVERB_12000_A2 0.000000
+#define CS_REVERB_12000_B1 (-0.767327)
+#define CS_REVERB_12000_B2 0.000000
+#define CS_REVERB_12000_SCALE 15
 
 /* Reverb coefficients for 16000Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_16000_A0                      0.728272
-#define CS_REVERB_16000_A1                     (-0.728272)
-#define CS_REVERB_16000_A2                      0.000000
-#define CS_REVERB_16000_B1                     (-0.820679)
-#define CS_REVERB_16000_B2                      0.000000
-#define CS_REVERB_16000_SCALE                        15
+#define CS_REVERB_16000_A0 0.728272
+#define CS_REVERB_16000_A1 (-0.728272)
+#define CS_REVERB_16000_A2 0.000000
+#define CS_REVERB_16000_B1 (-0.820679)
+#define CS_REVERB_16000_B2 0.000000
+#define CS_REVERB_16000_SCALE 15
 
 /* Reverb coefficients for 22050Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_22050_A0                     0.516396
-#define CS_REVERB_22050_A1                     0.000000
-#define CS_REVERB_22050_A2                    (-0.516396)
-#define CS_REVERB_22050_B1                    (-0.518512)
-#define CS_REVERB_22050_B2                    (-0.290990)
-#define CS_REVERB_22050_SCALE                        15
+#define CS_REVERB_22050_A0 0.516396
+#define CS_REVERB_22050_A1 0.000000
+#define CS_REVERB_22050_A2 (-0.516396)
+#define CS_REVERB_22050_B1 (-0.518512)
+#define CS_REVERB_22050_B2 (-0.290990)
+#define CS_REVERB_22050_SCALE 15
 
 /* Reverb coefficients for 24000Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_24000_A0                       0.479565
-#define CS_REVERB_24000_A1                       0.000000
-#define CS_REVERB_24000_A2                      (-0.479565)
-#define CS_REVERB_24000_B1                      (-0.637745)
-#define CS_REVERB_24000_B2                      (-0.198912)
-#define CS_REVERB_24000_SCALE                        15
+#define CS_REVERB_24000_A0 0.479565
+#define CS_REVERB_24000_A1 0.000000
+#define CS_REVERB_24000_A2 (-0.479565)
+#define CS_REVERB_24000_B1 (-0.637745)
+#define CS_REVERB_24000_B2 (-0.198912)
+#define CS_REVERB_24000_SCALE 15
 
 /* Reverb coefficients for 32000Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_32000_A0                      0.380349
-#define CS_REVERB_32000_A1                      0.000000
-#define CS_REVERB_32000_A2                     (-0.380349)
-#define CS_REVERB_32000_B1                     (-0.950873)
-#define CS_REVERB_32000_B2                      0.049127
-#define CS_REVERB_32000_SCALE                        15
+#define CS_REVERB_32000_A0 0.380349
+#define CS_REVERB_32000_A1 0.000000
+#define CS_REVERB_32000_A2 (-0.380349)
+#define CS_REVERB_32000_B1 (-0.950873)
+#define CS_REVERB_32000_B2 0.049127
+#define CS_REVERB_32000_SCALE 15
 
 /* Reverb coefficients for 44100Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_44100_A0                         0.297389
-#define CS_REVERB_44100_A1                         0.000000
-#define CS_REVERB_44100_A2                        (-0.297389)
-#define CS_REVERB_44100_B1                        (-1.200423)
-#define CS_REVERB_44100_B2                         0.256529
-#define CS_REVERB_44100_SCALE                        14
+#define CS_REVERB_44100_A0 0.297389
+#define CS_REVERB_44100_A1 0.000000
+#define CS_REVERB_44100_A2 (-0.297389)
+#define CS_REVERB_44100_B1 (-1.200423)
+#define CS_REVERB_44100_B2 0.256529
+#define CS_REVERB_44100_SCALE 14
 
 /* Reverb coefficients for 48000Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_48000_A0                       0.278661
-#define CS_REVERB_48000_A1                       0.000000
-#define CS_REVERB_48000_A2                      (-0.278661)
-#define CS_REVERB_48000_B1                      (-1.254993)
-#define CS_REVERB_48000_B2                       0.303347
-#define CS_REVERB_48000_SCALE                        14
+#define CS_REVERB_48000_A0 0.278661
+#define CS_REVERB_48000_A1 0.000000
+#define CS_REVERB_48000_A2 (-0.278661)
+#define CS_REVERB_48000_B1 (-1.254993)
+#define CS_REVERB_48000_B2 0.303347
+#define CS_REVERB_48000_SCALE 14
 
 /* Reverb coefficients for 88200Hz sample rate, scaled with 0.8 */
 /* Band pass filter with fc1=500 and fc2=8000 */
-#define CS_REVERB_88200_A0                       0.171901f
-#define CS_REVERB_88200_A1                       0.000000f
-#define CS_REVERB_88200_A2                      (-0.171901f)
-#define CS_REVERB_88200_B1                      (-1.553948f)
-#define CS_REVERB_88200_B2                      (0.570248f)
-#define CS_REVERB_88200_SCALE                      14
+#define CS_REVERB_88200_A0 0.171901f
+#define CS_REVERB_88200_A1 0.000000f
+#define CS_REVERB_88200_A2 (-0.171901f)
+#define CS_REVERB_88200_B1 (-1.553948f)
+#define CS_REVERB_88200_B2 (0.570248f)
+#define CS_REVERB_88200_SCALE 14
 /* Reverb coefficients for 96000Hz sample rate, scaled with 0.8 */
 /* Band pass filter with fc1=500 and fc2=8000*/
-#define CS_REVERB_96000_A0                       0.1602488
-#define CS_REVERB_96000_A1                       0.000000
-#define CS_REVERB_96000_A2                      (-0.1602488)
-#define CS_REVERB_96000_B1                      (-1.585413)
-#define CS_REVERB_96000_B2                       0.599377
-#define CS_REVERB_96000_SCALE                        14
+#define CS_REVERB_96000_A0 0.1602488
+#define CS_REVERB_96000_A1 0.000000
+#define CS_REVERB_96000_A2 (-0.1602488)
+#define CS_REVERB_96000_B1 (-1.585413)
+#define CS_REVERB_96000_B2 0.599377
+#define CS_REVERB_96000_SCALE 14
 
 /* Reverb coefficients for 176400Hz sample rate, scaled with 0.8 */
 /* Band pass filter with fc1=500 and fc2=8000 */
-#define CS_REVERB_176400_A0                       0.094763f
-#define CS_REVERB_176400_A1                       0.000000f
-#define CS_REVERB_176400_A2                      (-0.094763f)
-#define CS_REVERB_176400_B1                      (-1.758593f)
-#define CS_REVERB_176400_B2                      (0.763091f)
-#define CS_REVERB_176400_SCALE                      14
+#define CS_REVERB_176400_A0 0.094763f
+#define CS_REVERB_176400_A1 0.000000f
+#define CS_REVERB_176400_A2 (-0.094763f)
+#define CS_REVERB_176400_B1 (-1.758593f)
+#define CS_REVERB_176400_B2 (0.763091f)
+#define CS_REVERB_176400_SCALE 14
 /* Reverb coefficients for 192000Hz sample rate, scaled with 0.8 */
 /* Band pass filter with fc1=500 and fc2=8000*/
-#define CS_REVERB_192000_A0                       0.0878369
-#define CS_REVERB_192000_A1                       0.000000
-#define CS_REVERB_192000_A2                      (-0.0878369)
-#define CS_REVERB_192000_B1                      (-1.7765764)
-#define CS_REVERB_192000_B2                       0.7804076
-#define CS_REVERB_192000_SCALE                        14
+#define CS_REVERB_192000_A0 0.0878369
+#define CS_REVERB_192000_A1 0.000000
+#define CS_REVERB_192000_A2 (-0.0878369)
+#define CS_REVERB_192000_B1 (-1.7765764)
+#define CS_REVERB_192000_B2 0.7804076
+#define CS_REVERB_192000_SCALE 14
 
 /* Reverb Gain Settings */
-#define LVCS_HEADPHONE_DELAYGAIN               0.800000         /* Algorithm delay path gain */
-#define LVCS_HEADPHONE_OUTPUTGAIN              1.000000         /* Algorithm output gain */
-#define LVCS_HEADPHONE_PROCGAIN                   18403         /* Processed path gain */
-#define LVCS_HEADPHONE_UNPROCGAIN                 18403         /* Unprocessed path gain */
-#define LVCS_HEADPHONE_GAINCORRECT             1.009343         /* Delay mixer gain correction */
+#define LVCS_HEADPHONE_DELAYGAIN 0.800000   /* Algorithm delay path gain */
+#define LVCS_HEADPHONE_OUTPUTGAIN 1.000000  /* Algorithm output gain */
+#define LVCS_HEADPHONE_PROCGAIN 18403       /* Processed path gain */
+#define LVCS_HEADPHONE_UNPROCGAIN 18403     /* Unprocessed path gain */
+#define LVCS_HEADPHONE_GAINCORRECT 1.009343 /* Delay mixer gain correction */
 
 /************************************************************************************/
 /*                                                                                  */
@@ -363,205 +363,204 @@
 
 /* Equaliser coefficients for 8000 Hz sample rate, \
    CS scaled with 1.038497 and CSEX scaled with 0.775480 */
-#define CS_EQUALISER_8000_A0                     1.263312
-#define CS_EQUALISER_8000_A1                    (-0.601748)
-#define CS_EQUALISER_8000_A2                    (-0.280681)
-#define CS_EQUALISER_8000_B1                    (-0.475865)
-#define CS_EQUALISER_8000_B2                    (-0.408154)
-#define CS_EQUALISER_8000_SCALE                      14
-#define CSEX_EQUALISER_8000_A0                    0.943357
-#define CSEX_EQUALISER_8000_A1                   (-0.449345)
-#define CSEX_EQUALISER_8000_A2                   (-0.209594)
-#define CSEX_EQUALISER_8000_B1                   (-0.475865)
-#define CSEX_EQUALISER_8000_B2                   (-0.408154)
-#define CSEX_EQUALISER_8000_SCALE                    15
+#define CS_EQUALISER_8000_A0 1.263312
+#define CS_EQUALISER_8000_A1 (-0.601748)
+#define CS_EQUALISER_8000_A2 (-0.280681)
+#define CS_EQUALISER_8000_B1 (-0.475865)
+#define CS_EQUALISER_8000_B2 (-0.408154)
+#define CS_EQUALISER_8000_SCALE 14
+#define CSEX_EQUALISER_8000_A0 0.943357
+#define CSEX_EQUALISER_8000_A1 (-0.449345)
+#define CSEX_EQUALISER_8000_A2 (-0.209594)
+#define CSEX_EQUALISER_8000_B1 (-0.475865)
+#define CSEX_EQUALISER_8000_B2 (-0.408154)
+#define CSEX_EQUALISER_8000_SCALE 15
 
 /* Equaliser coefficients for 11025Hz sample rate, \
    CS scaled with 1.027761 and CSEX scaled with 0.767463 */
-#define CS_EQUALISER_11025_A0                    1.101145
-#define CS_EQUALISER_11025_A1                    0.139020
-#define CS_EQUALISER_11025_A2                   (-0.864423)
-#define CS_EQUALISER_11025_B1                    0.024541
-#define CS_EQUALISER_11025_B2                   (-0.908930)
-#define CS_EQUALISER_11025_SCALE                     14
-#define CSEX_EQUALISER_11025_A0                    0.976058
-#define CSEX_EQUALISER_11025_A1                   (-0.695326)
-#define CSEX_EQUALISER_11025_A2                   (-0.090809)
-#define CSEX_EQUALISER_11025_B1                   (-0.610594)
-#define CSEX_EQUALISER_11025_B2                   (-0.311149)
-#define CSEX_EQUALISER_11025_SCALE                   15
+#define CS_EQUALISER_11025_A0 1.101145
+#define CS_EQUALISER_11025_A1 0.139020
+#define CS_EQUALISER_11025_A2 (-0.864423)
+#define CS_EQUALISER_11025_B1 0.024541
+#define CS_EQUALISER_11025_B2 (-0.908930)
+#define CS_EQUALISER_11025_SCALE 14
+#define CSEX_EQUALISER_11025_A0 0.976058
+#define CSEX_EQUALISER_11025_A1 (-0.695326)
+#define CSEX_EQUALISER_11025_A2 (-0.090809)
+#define CSEX_EQUALISER_11025_B1 (-0.610594)
+#define CSEX_EQUALISER_11025_B2 (-0.311149)
+#define CSEX_EQUALISER_11025_SCALE 15
 
 /* Equaliser coefficients for 12000Hz sample rate, \
    CS scaled with 1.032521 and CSEX scaled with 0.771017 */
-#define CS_EQUALISER_12000_A0                      1.276661
-#define CS_EQUALISER_12000_A1                     (-1.017519)
-#define CS_EQUALISER_12000_A2                     (-0.044128)
-#define CS_EQUALISER_12000_B1                     (-0.729616)
-#define CS_EQUALISER_12000_B2                     (-0.204532)
-#define CS_EQUALISER_12000_SCALE                     14
-#define CSEX_EQUALISER_12000_A0                 1.007095
-#define CSEX_EQUALISER_12000_A1                (-0.871912)
-#define CSEX_EQUALISER_12000_A2                 0.023232
-#define CSEX_EQUALISER_12000_B1                (-0.745857)
-#define CSEX_EQUALISER_12000_B2                (-0.189171)
-#define CSEX_EQUALISER_12000_SCALE                   14
+#define CS_EQUALISER_12000_A0 1.276661
+#define CS_EQUALISER_12000_A1 (-1.017519)
+#define CS_EQUALISER_12000_A2 (-0.044128)
+#define CS_EQUALISER_12000_B1 (-0.729616)
+#define CS_EQUALISER_12000_B2 (-0.204532)
+#define CS_EQUALISER_12000_SCALE 14
+#define CSEX_EQUALISER_12000_A0 1.007095
+#define CSEX_EQUALISER_12000_A1 (-0.871912)
+#define CSEX_EQUALISER_12000_A2 0.023232
+#define CSEX_EQUALISER_12000_B1 (-0.745857)
+#define CSEX_EQUALISER_12000_B2 (-0.189171)
+#define CSEX_EQUALISER_12000_SCALE 14
 
 /* Equaliser coefficients for 16000Hz sample rate, \
    CS scaled with 1.031378 and CSEX scaled with 0.770164 */
-#define CS_EQUALISER_16000_A0                     1.281629
-#define CS_EQUALISER_16000_A1                    (-1.075872)
-#define CS_EQUALISER_16000_A2                    (-0.041365)
-#define CS_EQUALISER_16000_B1                    (-0.725239)
-#define CS_EQUALISER_16000_B2                    (-0.224358)
-#define CS_EQUALISER_16000_SCALE                     14
-#define CSEX_EQUALISER_16000_A0                  1.081091
-#define CSEX_EQUALISER_16000_A1                 (-0.867183)
-#define CSEX_EQUALISER_16000_A2                 (-0.070247)
-#define CSEX_EQUALISER_16000_B1                 (-0.515121)
-#define CSEX_EQUALISER_16000_B2                 (-0.425893)
-#define CSEX_EQUALISER_16000_SCALE                   14
+#define CS_EQUALISER_16000_A0 1.281629
+#define CS_EQUALISER_16000_A1 (-1.075872)
+#define CS_EQUALISER_16000_A2 (-0.041365)
+#define CS_EQUALISER_16000_B1 (-0.725239)
+#define CS_EQUALISER_16000_B2 (-0.224358)
+#define CS_EQUALISER_16000_SCALE 14
+#define CSEX_EQUALISER_16000_A0 1.081091
+#define CSEX_EQUALISER_16000_A1 (-0.867183)
+#define CSEX_EQUALISER_16000_A2 (-0.070247)
+#define CSEX_EQUALISER_16000_B1 (-0.515121)
+#define CSEX_EQUALISER_16000_B2 (-0.425893)
+#define CSEX_EQUALISER_16000_SCALE 14
 
 /* Equaliser coefficients for 22050Hz sample rate, \
    CS scaled with 1.041576 and CSEX scaled with 0.777779 */
-#define CS_EQUALISER_22050_A0                   1.388605
-#define CS_EQUALISER_22050_A1                  (-1.305799)
-#define CS_EQUALISER_22050_A2                   0.039922
-#define CS_EQUALISER_22050_B1                  (-0.719494)
-#define CS_EQUALISER_22050_B2                  (-0.243245)
-#define CS_EQUALISER_22050_SCALE                     14
-#define CSEX_EQUALISER_22050_A0                   1.272910
-#define CSEX_EQUALISER_22050_A1                  (-1.341014)
-#define CSEX_EQUALISER_22050_A2                   0.167462
-#define CSEX_EQUALISER_22050_B1                  (-0.614219)
-#define CSEX_EQUALISER_22050_B2                  (-0.345384)
-#define CSEX_EQUALISER_22050_SCALE                   14
+#define CS_EQUALISER_22050_A0 1.388605
+#define CS_EQUALISER_22050_A1 (-1.305799)
+#define CS_EQUALISER_22050_A2 0.039922
+#define CS_EQUALISER_22050_B1 (-0.719494)
+#define CS_EQUALISER_22050_B2 (-0.243245)
+#define CS_EQUALISER_22050_SCALE 14
+#define CSEX_EQUALISER_22050_A0 1.272910
+#define CSEX_EQUALISER_22050_A1 (-1.341014)
+#define CSEX_EQUALISER_22050_A2 0.167462
+#define CSEX_EQUALISER_22050_B1 (-0.614219)
+#define CSEX_EQUALISER_22050_B2 (-0.345384)
+#define CSEX_EQUALISER_22050_SCALE 14
 
 /* Equaliser coefficients for 24000Hz sample rate, \
    CS scaled with 1.034495 and CSEX scaled with 0.772491 */
-#define CS_EQUALISER_24000_A0                    1.409832
-#define CS_EQUALISER_24000_A1                   (-1.456506)
-#define CS_EQUALISER_24000_A2                    0.151410
-#define CS_EQUALISER_24000_B1                   (-0.804201)
-#define CS_EQUALISER_24000_B2                   (-0.163783)
-#define CS_EQUALISER_24000_SCALE                     14
-#define CSEX_EQUALISER_24000_A0                  1.299198
-#define CSEX_EQUALISER_24000_A1                 (-1.452447)
-#define CSEX_EQUALISER_24000_A2                  0.240489
-#define CSEX_EQUALISER_24000_B1                 (-0.669303)
-#define CSEX_EQUALISER_24000_B2                 (-0.294984)
-#define CSEX_EQUALISER_24000_SCALE                   14
+#define CS_EQUALISER_24000_A0 1.409832
+#define CS_EQUALISER_24000_A1 (-1.456506)
+#define CS_EQUALISER_24000_A2 0.151410
+#define CS_EQUALISER_24000_B1 (-0.804201)
+#define CS_EQUALISER_24000_B2 (-0.163783)
+#define CS_EQUALISER_24000_SCALE 14
+#define CSEX_EQUALISER_24000_A0 1.299198
+#define CSEX_EQUALISER_24000_A1 (-1.452447)
+#define CSEX_EQUALISER_24000_A2 0.240489
+#define CSEX_EQUALISER_24000_B1 (-0.669303)
+#define CSEX_EQUALISER_24000_B2 (-0.294984)
+#define CSEX_EQUALISER_24000_SCALE 14
 
 /* Equaliser coefficients for 32000Hz sample rate, \
    CS scaled with 1.044559 and CSEX scaled with 0.780006 */
-#define CS_EQUALISER_32000_A0                     1.560988
-#define CS_EQUALISER_32000_A1                    (-1.877724)
-#define CS_EQUALISER_32000_A2                     0.389741
-#define CS_EQUALISER_32000_B1                    (-0.907410)
-#define CS_EQUALISER_32000_B2                    (-0.070489)
-#define CS_EQUALISER_32000_SCALE                     14
-#define CSEX_EQUALISER_32000_A0                  1.785049
-#define CSEX_EQUALISER_32000_A1                 (-2.233497)
-#define CSEX_EQUALISER_32000_A2                  0.526431
-#define CSEX_EQUALISER_32000_B1                 (-0.445939)
-#define CSEX_EQUALISER_32000_B2                 (-0.522446)
-#define CSEX_EQUALISER_32000_SCALE                   13
+#define CS_EQUALISER_32000_A0 1.560988
+#define CS_EQUALISER_32000_A1 (-1.877724)
+#define CS_EQUALISER_32000_A2 0.389741
+#define CS_EQUALISER_32000_B1 (-0.907410)
+#define CS_EQUALISER_32000_B2 (-0.070489)
+#define CS_EQUALISER_32000_SCALE 14
+#define CSEX_EQUALISER_32000_A0 1.785049
+#define CSEX_EQUALISER_32000_A1 (-2.233497)
+#define CSEX_EQUALISER_32000_A2 0.526431
+#define CSEX_EQUALISER_32000_B1 (-0.445939)
+#define CSEX_EQUALISER_32000_B2 (-0.522446)
+#define CSEX_EQUALISER_32000_SCALE 13
 
 /* Equaliser coefficients for 44100Hz sample rate, \
    CS scaled with 1.022170 and CSEX scaled with 0.763288 */
-#define CS_EQUALISER_44100_A0                  1.623993
-#define CS_EQUALISER_44100_A1                 (-2.270743)
-#define CS_EQUALISER_44100_A2                  0.688829
-#define CS_EQUALISER_44100_B1                 (-1.117190)
-#define CS_EQUALISER_44100_B2                  0.130208
-#define CS_EQUALISER_44100_SCALE                     13
-#define CSEX_EQUALISER_44100_A0                   2.028315
-#define CSEX_EQUALISER_44100_A1                  (-2.882459)
-#define CSEX_EQUALISER_44100_A2                   0.904535
-#define CSEX_EQUALISER_44100_B1                  (-0.593308)
-#define CSEX_EQUALISER_44100_B2                  (-0.385816)
-#define CSEX_EQUALISER_44100_SCALE                   13
+#define CS_EQUALISER_44100_A0 1.623993
+#define CS_EQUALISER_44100_A1 (-2.270743)
+#define CS_EQUALISER_44100_A2 0.688829
+#define CS_EQUALISER_44100_B1 (-1.117190)
+#define CS_EQUALISER_44100_B2 0.130208
+#define CS_EQUALISER_44100_SCALE 13
+#define CSEX_EQUALISER_44100_A0 2.028315
+#define CSEX_EQUALISER_44100_A1 (-2.882459)
+#define CSEX_EQUALISER_44100_A2 0.904535
+#define CSEX_EQUALISER_44100_B1 (-0.593308)
+#define CSEX_EQUALISER_44100_B2 (-0.385816)
+#define CSEX_EQUALISER_44100_SCALE 13
 
 /* Equaliser coefficients for 48000Hz sample rate, \
    CS scaled with 1.018635 and CSEX scaled with 0.760648 */
-#define CS_EQUALISER_48000_A0                    1.641177
-#define CS_EQUALISER_48000_A1                   (-2.364687)
-#define CS_EQUALISER_48000_A2                    0.759910
-#define CS_EQUALISER_48000_B1                   (-1.166774)
-#define CS_EQUALISER_48000_B2                    0.178074
-#define CS_EQUALISER_48000_SCALE                     13
-#define CSEX_EQUALISER_48000_A0                  2.099655
-#define CSEX_EQUALISER_48000_A1                 (-3.065220)
-#define CSEX_EQUALISER_48000_A2                  1.010417
-#define CSEX_EQUALISER_48000_B1                 (-0.634021)
-#define CSEX_EQUALISER_48000_B2                 (-0.347332)
-#define CSEX_EQUALISER_48000_SCALE                   13
+#define CS_EQUALISER_48000_A0 1.641177
+#define CS_EQUALISER_48000_A1 (-2.364687)
+#define CS_EQUALISER_48000_A2 0.759910
+#define CS_EQUALISER_48000_B1 (-1.166774)
+#define CS_EQUALISER_48000_B2 0.178074
+#define CS_EQUALISER_48000_SCALE 13
+#define CSEX_EQUALISER_48000_A0 2.099655
+#define CSEX_EQUALISER_48000_A1 (-3.065220)
+#define CSEX_EQUALISER_48000_A2 1.010417
+#define CSEX_EQUALISER_48000_B1 (-0.634021)
+#define CSEX_EQUALISER_48000_B2 (-0.347332)
+#define CSEX_EQUALISER_48000_SCALE 13
 
 /* Equaliser coefficients for 88200Hz sample rate.
  * The filter coefficients are obtained by carrying out
  * state-space analysis using the coefficients available
  * for 44100Hz.
  */
-#define CS_EQUALISER_88200_A0                   1.771899f
-#define CS_EQUALISER_88200_A1                   (-2.930762f)
-#define CS_EQUALISER_88200_A2                   1.172175f
-#define CS_EQUALISER_88200_B1                   (-1.438349f)
-#define CS_EQUALISER_88200_B2                   0.442520f
-#define CS_EQUALISER_88200_SCALE                13
-#define CSEX_EQUALISER_88200_A0                 2.675241f
-#define CSEX_EQUALISER_88200_A1                 (-4.466154f)
-#define CSEX_EQUALISER_88200_A2                 1.810305f
-#define CSEX_EQUALISER_88200_B1                 (-0.925350f)
-#define CSEX_EQUALISER_88200_B2                 (-0.066616f)
-#define CSEX_EQUALISER_88200_SCALE              13
+#define CS_EQUALISER_88200_A0 1.771899f
+#define CS_EQUALISER_88200_A1 (-2.930762f)
+#define CS_EQUALISER_88200_A2 1.172175f
+#define CS_EQUALISER_88200_B1 (-1.438349f)
+#define CS_EQUALISER_88200_B2 0.442520f
+#define CS_EQUALISER_88200_SCALE 13
+#define CSEX_EQUALISER_88200_A0 2.675241f
+#define CSEX_EQUALISER_88200_A1 (-4.466154f)
+#define CSEX_EQUALISER_88200_A2 1.810305f
+#define CSEX_EQUALISER_88200_B1 (-0.925350f)
+#define CSEX_EQUALISER_88200_B2 (-0.066616f)
+#define CSEX_EQUALISER_88200_SCALE 13
 
-#define CS_EQUALISER_96000_A0                    1.784497
-#define CS_EQUALISER_96000_A1                   (-3.001435)
-#define CS_EQUALISER_96000_A2                    1.228422
-#define CS_EQUALISER_96000_B1                   (-1.477804)
-#define CS_EQUALISER_96000_B2                    0.481369
-#define CS_EQUALISER_96000_SCALE                     13
-#define CSEX_EQUALISER_96000_A0                  2.7573
-#define CSEX_EQUALISER_96000_A1                 (-4.6721)
-#define CSEX_EQUALISER_96000_A2                  1.9317
-#define CSEX_EQUALISER_96000_B1                 (-0.971718)
-#define CSEX_EQUALISER_96000_B2                 (-0.021216)
-#define CSEX_EQUALISER_96000_SCALE                   13
+#define CS_EQUALISER_96000_A0 1.784497
+#define CS_EQUALISER_96000_A1 (-3.001435)
+#define CS_EQUALISER_96000_A2 1.228422
+#define CS_EQUALISER_96000_B1 (-1.477804)
+#define CS_EQUALISER_96000_B2 0.481369
+#define CS_EQUALISER_96000_SCALE 13
+#define CSEX_EQUALISER_96000_A0 2.7573
+#define CSEX_EQUALISER_96000_A1 (-4.6721)
+#define CSEX_EQUALISER_96000_A2 1.9317
+#define CSEX_EQUALISER_96000_B1 (-0.971718)
+#define CSEX_EQUALISER_96000_B2 (-0.021216)
+#define CSEX_EQUALISER_96000_SCALE 13
 /* Equaliser coefficients for 176400Hz sample rate.
  * The filter coefficients are obtained by carrying out
  * state-space analysis using the coefficients available
  * for 44100Hz.
  */
-#define CS_EQUALISER_176400_A0                  1.883440f
-#define CS_EQUALISER_176400_A1                  (-3.414272f)
-#define CS_EQUALISER_176400_A2                  1.534702f
-#define CS_EQUALISER_176400_B1                  (-1.674614f)
-#define CS_EQUALISER_176400_B2                  0.675827f
-#define CS_EQUALISER_176400_SCALE               13
-#define CSEX_EQUALISER_176400_A0                3.355068f
-#define CSEX_EQUALISER_176400_A1                (-6.112578f)
-#define CSEX_EQUALISER_176400_A2                2.764135f
-#define CSEX_EQUALISER_176400_B1                (-1.268533f)
-#define CSEX_EQUALISER_176400_B2                0.271277f
-#define CSEX_EQUALISER_176400_SCALE             13
+#define CS_EQUALISER_176400_A0 1.883440f
+#define CS_EQUALISER_176400_A1 (-3.414272f)
+#define CS_EQUALISER_176400_A2 1.534702f
+#define CS_EQUALISER_176400_B1 (-1.674614f)
+#define CS_EQUALISER_176400_B2 0.675827f
+#define CS_EQUALISER_176400_SCALE 13
+#define CSEX_EQUALISER_176400_A0 3.355068f
+#define CSEX_EQUALISER_176400_A1 (-6.112578f)
+#define CSEX_EQUALISER_176400_A2 2.764135f
+#define CSEX_EQUALISER_176400_B1 (-1.268533f)
+#define CSEX_EQUALISER_176400_B2 0.271277f
+#define CSEX_EQUALISER_176400_SCALE 13
 
-#define CS_EQUALISER_192000_A0                    1.889582
-#define CS_EQUALISER_192000_A1                   (-3.456140)
-#define CS_EQUALISER_192000_A2                    1.569864
-#define CS_EQUALISER_192000_B1                   (-1.700798)
-#define CS_EQUALISER_192000_B2                    0.701824
-#define CS_EQUALISER_192000_SCALE                     13
-#define CSEX_EQUALISER_192000_A0                  3.4273
-#define CSEX_EQUALISER_192000_A1                 (-6.2936)
-#define CSEX_EQUALISER_192000_A2                  2.8720
-#define CSEX_EQUALISER_192000_B1                 (-1.31074)
-#define CSEX_EQUALISER_192000_B2                 0.31312
-#define CSEX_EQUALISER_192000_SCALE                   13
+#define CS_EQUALISER_192000_A0 1.889582
+#define CS_EQUALISER_192000_A1 (-3.456140)
+#define CS_EQUALISER_192000_A2 1.569864
+#define CS_EQUALISER_192000_B1 (-1.700798)
+#define CS_EQUALISER_192000_B2 0.701824
+#define CS_EQUALISER_192000_SCALE 13
+#define CSEX_EQUALISER_192000_A0 3.4273
+#define CSEX_EQUALISER_192000_A1 (-6.2936)
+#define CSEX_EQUALISER_192000_A2 2.8720
+#define CSEX_EQUALISER_192000_B1 (-1.31074)
+#define CSEX_EQUALISER_192000_B2 0.31312
+#define CSEX_EQUALISER_192000_SCALE 13
 
-#define LVCS_HEADPHONE_SHIFT                          2              /* Output Shift */
-#define LVCS_HEADPHONE_SHIFTLOSS                  0.8477735          /* Output Shift loss */
-#define LVCS_HEADPHONE_GAIN                       0.2087465          /* Unprocessed path gain */
-#define LVCS_EX_HEADPHONE_SHIFT                       3              /* EX Output Shift */
-#define LVCS_EX_HEADPHONE_SHIFTLOSS               0.569225           /* EX Output Shift loss */
-#define LVCS_EX_HEADPHONE_GAIN                    0.07794425         /* EX Unprocessed path gain */
+#define LVCS_HEADPHONE_SHIFT 2               /* Output Shift */
+#define LVCS_HEADPHONE_SHIFTLOSS 0.8477735   /* Output Shift loss */
+#define LVCS_HEADPHONE_GAIN 0.2087465        /* Unprocessed path gain */
+#define LVCS_EX_HEADPHONE_SHIFT 3            /* EX Output Shift */
+#define LVCS_EX_HEADPHONE_SHIFTLOSS 0.569225 /* EX Output Shift loss */
+#define LVCS_EX_HEADPHONE_GAIN 0.07794425    /* EX Unprocessed path gain */
 #endif
-
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
index 630ecf7..5c8f1ae 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
@@ -20,99 +20,11 @@
 /*  Includes                                                                        */
 /*                                                                                  */
 /************************************************************************************/
-
+#include <stdlib.h>
 #include "LVCS.h"
 #include "LVCS_Private.h"
 #include "LVCS_Tables.h"
 
-/****************************************************************************************/
-/*                                                                                      */
-/* FUNCTION:                LVCS_Memory                                                 */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*      hInstance = NULL                Returns the memory requirements                 */
-/*      hInstance = Instance handle     Returns the memory requirements and             */
-/*                                      allocated base addresses for the instance       */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) it is           */
-/*  passed the default capabilities.                                                    */
-/*                                                                                      */
-/*  When called for memory allocation the memory base address pointers are NULL on      */
-/*  return.                                                                             */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the              */
-/*  capabilities are ignored and the memory table returns the allocated memory and      */
-/*  base addresses used during initialisation.                                          */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory definition table                 */
-/*  pCapabilities           Pointer to the default capabilites                          */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVCS_Success            Succeeded                                                   */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVCS_Process function                   */
-/*                                                                                      */
-/****************************************************************************************/
-
-LVCS_ReturnStatus_en LVCS_Memory(LVCS_Handle_t          hInstance,
-                                 LVCS_MemTab_t          *pMemoryTable,
-                                 LVCS_Capabilities_t    *pCapabilities)
-{
-
-    LVM_UINT32          ScratchSize;
-    LVCS_Instance_t     *pInstance = (LVCS_Instance_t *)hInstance;
-
-    /*
-     * Fill in the memory table
-     */
-    if (hInstance == LVM_NULL)
-    {
-        /*
-         * Instance memory
-         */
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_SLOW_DATA].Size         = (LVM_UINT32)sizeof(LVCS_Instance_t);
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_SLOW_DATA].Type         = LVCS_PERSISTENT;
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_SLOW_DATA].pBaseAddress = LVM_NULL;
-
-        /*
-         * Data memory
-         */
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].Size         = (LVM_UINT32)sizeof(LVCS_Data_t);
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].Type         = LVCS_DATA;
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress = LVM_NULL;
-
-        /*
-         * Coefficient memory
-         */
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].Size         = (LVM_UINT32)sizeof(LVCS_Coefficient_t);
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].Type         = LVCS_COEFFICIENT;
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress = LVM_NULL;
-
-        /*
-         * Scratch memory
-         */
-        /* Inplace processing */
-        ScratchSize = (LVM_UINT32) \
-                        (LVCS_SCRATCHBUFFERS * sizeof(LVM_FLOAT) * pCapabilities->MaxBlockSize);
-        pMemoryTable->Region[LVCS_MEMREGION_TEMPORARY_FAST].Size         = ScratchSize;
-        pMemoryTable->Region[LVCS_MEMREGION_TEMPORARY_FAST].Type         = LVCS_SCRATCH;
-        pMemoryTable->Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress = LVM_NULL;
-    }
-    else
-    {
-        /* Read back memory allocation table */
-        *pMemoryTable = pInstance->MemoryTable;
-    }
-
-    return(LVCS_SUCCESS);
-}
-
 /************************************************************************************/
 /*                                                                                  */
 /* FUNCTION:                LVCS_Init                                               */
@@ -120,91 +32,106 @@
 /* DESCRIPTION:                                                                     */
 /*  Create and initialisation function for the Concert Sound module                 */
 /*                                                                                  */
-/*  This function can be used to create an algorithm instance by calling with       */
-/*  hInstance set to LVM_NULL. In this case the algorithm returns the new instance  */
-/*  handle.                                                                         */
-/*                                                                                  */
-/*  This function can be used to force a full re-initialisation of the algorithm    */
-/*  by calling with hInstance = Instance Handle. In this case the memory table      */
-/*  should be correct for the instance, this can be ensured by calling the function */
-/*  LVCS_Memory before calling this function.                                       */
-/*                                                                                  */
 /* PARAMETERS:                                                                      */
-/*  hInstance               Instance handle                                         */
-/*  pMemoryTable            Pointer to the memory definition table                  */
+/*  phInstance              Pointer to instance handle                              */
 /*  pCapabilities           Pointer to the capabilities structure                   */
+/*  pScratch                Pointer to scratch buffer                               */
 /*                                                                                  */
 /* RETURNS:                                                                         */
 /*  LVCS_Success            Initialisation succeeded                                */
+/*  LVDBE_NULLADDRESS       One or more memory has a NULL pointer - malloc failure  */
 /*                                                                                  */
 /* NOTES:                                                                           */
-/*  1.  The instance handle is the pointer to the base address of the first memory  */
-/*      region.                                                                     */
-/*  2.  This function must not be interrupted by the LVCS_Process function          */
-/*  3.  This function must be called with the same capabilities as used for the     */
-/*      call to the memory function                                                 */
+/*  1.  This function must not be interrupted by the LVCS_Process function          */
 /*                                                                                  */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_Init(LVCS_Handle_t         *phInstance,
-                               LVCS_MemTab_t         *pMemoryTable,
-                               LVCS_Capabilities_t   *pCapabilities)
-{
-
-    LVCS_Instance_t                 *pInstance;
-    LVCS_VolCorrect_t               *pLVCS_VolCorrectTable;
+LVCS_ReturnStatus_en LVCS_Init(LVCS_Handle_t* phInstance, LVCS_Capabilities_t* pCapabilities,
+                               void* pScratch) {
+    LVCS_Instance_t* pInstance;
+    LVCS_VolCorrect_t* pLVCS_VolCorrectTable;
 
     /*
-     * Set the instance handle if not already initialised
+     * Create the instance handle if not already initialised
      */
-    if (*phInstance == LVM_NULL)
-    {
-        *phInstance = (LVCS_Handle_t)pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_SLOW_DATA].pBaseAddress;
+    if (*phInstance == LVM_NULL) {
+        *phInstance = calloc(1, sizeof(*pInstance));
     }
-    pInstance =(LVCS_Instance_t  *)*phInstance;
+    if (*phInstance == LVM_NULL) {
+        return LVCS_NULLADDRESS;
+    }
+    pInstance = (LVCS_Instance_t*)*phInstance;
 
     /*
      * Save the capabilities in the instance structure
      */
     pInstance->Capabilities = *pCapabilities;
 
-    /*
-     * Save the memory table in the instance structure
-     */
-    pInstance->MemoryTable = *pMemoryTable;
+    pInstance->pScratch = pScratch;
 
     /*
      * Set all initial parameters to invalid to force a full initialisation
      */
-    pInstance->Params.OperatingMode  = LVCS_OFF;
-    pInstance->Params.SpeakerType    = LVCS_SPEAKERTYPE_MAX;
-    pInstance->OutputDevice          = LVCS_HEADPHONE;
-    pInstance->Params.SourceFormat   = LVCS_SOURCEMAX;
+    pInstance->Params.OperatingMode = LVCS_OFF;
+    pInstance->Params.SpeakerType = LVCS_SPEAKERTYPE_MAX;
+    pInstance->OutputDevice = LVCS_HEADPHONE;
+    pInstance->Params.SourceFormat = LVCS_SOURCEMAX;
     pInstance->Params.CompressorMode = LVM_MODE_OFF;
-    pInstance->Params.SampleRate     = LVM_FS_INVALID;
-    pInstance->Params.EffectLevel    = 0;
-    pInstance->Params.ReverbLevel    = (LVM_UINT16)0x8000;
-    pLVCS_VolCorrectTable            = (LVCS_VolCorrect_t*)&LVCS_VolCorrectTable[0];
-    pInstance->VolCorrect            = pLVCS_VolCorrectTable[0];
-    pInstance->TransitionGain        = 0;
+    pInstance->Params.SampleRate = LVM_FS_INVALID;
+    pInstance->Params.EffectLevel = 0;
+    pInstance->Params.ReverbLevel = (LVM_UINT16)0x8000;
+    pLVCS_VolCorrectTable = (LVCS_VolCorrect_t*)&LVCS_VolCorrectTable[0];
+    pInstance->VolCorrect = pLVCS_VolCorrectTable[0];
+    pInstance->TransitionGain = 0;
 
     /* These current and target values are intialized again in LVCS_Control.c */
-    LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[0],0,0);
+    LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[0], 0, 0);
     /* These current and target values are intialized again in LVCS_Control.c */
-    LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[1],0,0);
+    LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[1], 0, 0);
 
     /*
      * Initialise the bypass variables
      */
-    pInstance->MSTarget0=0;
-    pInstance->MSTarget1=0;
-    pInstance->bInOperatingModeTransition          = LVM_FALSE;
-    pInstance->bTimerDone                        = LVM_FALSE;
-    pInstance->TimerParams.CallBackParam         = 0;
-    pInstance->TimerParams.pCallBack             = LVCS_TimerCallBack;
-    pInstance->TimerParams.pCallbackInstance     = pInstance;
-    pInstance->TimerParams.pCallBackParams       = LVM_NULL;
+    pInstance->MSTarget0 = 0;
+    pInstance->MSTarget1 = 0;
+    pInstance->bInOperatingModeTransition = LVM_FALSE;
+    pInstance->bTimerDone = LVM_FALSE;
+    pInstance->TimerParams.CallBackParam = 0;
+    pInstance->TimerParams.pCallBack = LVCS_TimerCallBack;
+    pInstance->TimerParams.pCallbackInstance = pInstance;
+    pInstance->TimerParams.pCallBackParams = LVM_NULL;
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
 
+/************************************************************************************/
+/*                                                                                  */
+/* FUNCTION:                LVCS_DeInit                                             */
+/*                                                                                  */
+/* DESCRIPTION:                                                                     */
+/*  Free memories created during the LVCS_Init call including instance handle       */
+/*                                                                                  */
+/* PARAMETERS:                                                                      */
+/*  phInstance              Pointer to instance handle                              */
+/*                                                                                  */
+/* NOTES:                                                                           */
+/*  1.  This function must not be interrupted by the LVCS_Process function          */
+/*                                                                                  */
+/************************************************************************************/
+void LVCS_DeInit(LVCS_Handle_t* phInstance) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)*phInstance;
+    if (pInstance == LVM_NULL) {
+        return;
+    }
+    if (pInstance->pCoeff != LVM_NULL) {
+        free(pInstance->pCoeff);
+        pInstance->pCoeff = LVM_NULL;
+    }
+    if (pInstance->pData != LVM_NULL) {
+        free(pInstance->pData);
+        pInstance->pData = LVM_NULL;
+    }
+    free(pInstance);
+    *phInstance = LVM_NULL;
+    return;
+}
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Private.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Private.h
index 154ea55..f9c23b3 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Private.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Private.h
@@ -33,11 +33,11 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#include "LVCS.h"                               /* Calling or Application layer definitions */
-#include "LVCS_StereoEnhancer.h"                /* Stereo enhancer module definitions */
-#include "LVCS_ReverbGenerator.h"               /* Reverberation module definitions */
-#include "LVCS_Equaliser.h"                     /* Equaliser module definitions */
-#include "LVCS_BypassMix.h"                     /* Bypass Mixer module definitions */
+#include "LVCS.h"                 /* Calling or Application layer definitions */
+#include "LVCS_StereoEnhancer.h"  /* Stereo enhancer module definitions */
+#include "LVCS_ReverbGenerator.h" /* Reverberation module definitions */
+#include "LVCS_Equaliser.h"       /* Equaliser module definitions */
+#include "LVCS_BypassMix.h"       /* Bypass Mixer module definitions */
 #include "LVM_Timer.h"
 
 /************************************************************************************/
@@ -47,35 +47,29 @@
 /************************************************************************************/
 
 /* Configuration switch controls */
-#define LVCS_STEREOENHANCESWITCH    0x0001      /* Stereo enhancement enable control */
-#define LVCS_REVERBSWITCH           0x0002      /* Reverberation enable control */
-#define LVCS_EQUALISERSWITCH        0x0004      /* Equaliser enable control */
-#define LVCS_BYPASSMIXSWITCH        0x0008      /* Bypass mixer enable control */
-#define LVCS_COMPGAINFRAME          64          /* Compressor gain update interval */
+#define LVCS_STEREOENHANCESWITCH 0x0001 /* Stereo enhancement enable control */
+#define LVCS_REVERBSWITCH 0x0002        /* Reverberation enable control */
+#define LVCS_EQUALISERSWITCH 0x0004     /* Equaliser enable control */
+#define LVCS_BYPASSMIXSWITCH 0x0008     /* Bypass mixer enable control */
+#define LVCS_COMPGAINFRAME 64           /* Compressor gain update interval */
 
 /* Memory */
-#ifdef SUPPORT_MC
-#define LVCS_SCRATCHBUFFERS              8      /* Number of buffers required for inplace processing */
-#else
-#define LVCS_SCRATCHBUFFERS              6      /* Number of buffers required for inplace processing */
-#endif
-#ifdef SUPPORT_MC
+#define LVCS_SCRATCHBUFFERS 8 /* Number of buffers required for inplace processing */
 /*
  * The Concert Surround module applies processing only on the first two
  * channels of a multichannel input. The data of first two channels is copied
  * from the multichannel input into scratch buffer. The buffers added here
  * are used for this purpose
  */
-#define LVCS_MC_SCRATCHBUFFERS           2
-#endif
+#define LVCS_MC_SCRATCHBUFFERS 2
 
 /* General */
-#define LVCS_INVALID                0xFFFF      /* Invalid init parameter */
-#define LVCS_BYPASS_MIXER_TC        100         /* Bypass mixer time */
+#define LVCS_INVALID 0xFFFF      /* Invalid init parameter */
+#define LVCS_BYPASS_MIXER_TC 100 /* Bypass mixer time */
 
 /* Access to external coefficients table */
-#define LVCS_NR_OF_FS                    9
-#define LVCS_NR_OF_CHAN_CFG              2
+#define LVCS_NR_OF_FS 9
+#define LVCS_NR_OF_CHAN_CFG 2
 
 /************************************************************************************/
 /*                                                                                  */
@@ -83,13 +77,9 @@
 /*                                                                                  */
 /************************************************************************************/
 
-typedef LVM_UINT16  LVCS_Configuration_t;       /* Internal algorithm configuration */
+typedef LVM_UINT16 LVCS_Configuration_t; /* Internal algorithm configuration */
 
-typedef enum
-{
-    LVCS_HEADPHONE  = 0,
-    LVCS_DEVICE_MAX = LVM_MAXENUM
-} LVCS_OutputDevice_en;
+typedef enum { LVCS_HEADPHONE = 0, LVCS_DEVICE_MAX = LVM_MAXENUM } LVCS_OutputDevice_en;
 
 /************************************************************************************/
 /*                                                                                  */
@@ -98,65 +88,60 @@
 /************************************************************************************/
 
 /* Volume correction structure */
-typedef struct
-{
-    LVM_FLOAT   CompFull;                       /* Post CS compression 100% effect */
-    LVM_FLOAT   CompMin;                        /* Post CS compression 0% effect */
-    LVM_FLOAT   GainFull;                       /* CS gain correct 100% effect */
-    LVM_FLOAT   GainMin;                        /* CS gain correct 0% effect */
+typedef struct {
+    LVM_FLOAT CompFull; /* Post CS compression 100% effect */
+    LVM_FLOAT CompMin;  /* Post CS compression 0% effect */
+    LVM_FLOAT GainFull; /* CS gain correct 100% effect */
+    LVM_FLOAT GainMin;  /* CS gain correct 0% effect */
 } LVCS_VolCorrect_t;
 
 /* Instance structure */
-typedef struct
-{
+typedef struct {
     /* Public parameters */
-    LVCS_MemTab_t           MemoryTable;        /* Instance memory allocation table */
-    LVCS_Params_t           Params;             /* Instance parameters */
-    LVCS_Capabilities_t     Capabilities;       /* Initialisation capabilities */
+    LVCS_Params_t Params;             /* Instance parameters */
+    LVCS_Capabilities_t Capabilities; /* Initialisation capabilities */
 
     /* Private parameters */
-    LVCS_OutputDevice_en    OutputDevice;       /* Selected output device type */
-    LVCS_VolCorrect_t       VolCorrect;         /* Volume correction settings */
-    LVM_FLOAT               TransitionGain;     /* Transition gain */
-    LVM_FLOAT               CompressGain;       /* Last used compressor gain*/
+    LVCS_OutputDevice_en OutputDevice; /* Selected output device type */
+    LVCS_VolCorrect_t VolCorrect;      /* Volume correction settings */
+    LVM_FLOAT TransitionGain;          /* Transition gain */
+    LVM_FLOAT CompressGain;            /* Last used compressor gain*/
 
     /* Sub-block configurations */
-    LVCS_StereoEnhancer_t   StereoEnhancer;     /* Stereo enhancer configuration */
-    LVCS_ReverbGenerator_t  Reverberation;      /* Reverberation configuration */
-    LVCS_Equaliser_t        Equaliser;          /* Equaliser configuration */
-    LVCS_BypassMix_t        BypassMix;          /* Bypass mixer configuration */
+    LVCS_StereoEnhancer_t StereoEnhancer; /* Stereo enhancer configuration */
+    LVCS_ReverbGenerator_t Reverberation; /* Reverberation configuration */
+    LVCS_Equaliser_t Equaliser;           /* Equaliser configuration */
+    LVCS_BypassMix_t BypassMix;           /* Bypass mixer configuration */
 
     /* Bypass variable */
-    LVM_INT16               MSTarget0;                          /* Mixer state control variable for smooth transtion */
-    LVM_INT16               MSTarget1;                          /* Mixer state control variable for smooth transtion */
-    LVM_INT16               bInOperatingModeTransition;         /* Operating mode transition flag */
-    LVM_INT16               bTimerDone;                         /* Timer completion flag */
-    LVM_Timer_Params_t      TimerParams;                        /* Timer parameters */
-    LVM_Timer_Instance_t    TimerInstance;                      /* Timer instance */
+    LVM_INT16 MSTarget0;                  /* Mixer state control variable for smooth transition */
+    LVM_INT16 MSTarget1;                  /* Mixer state control variable for smooth transition */
+    LVM_INT16 bInOperatingModeTransition; /* Operating mode transition flag */
+    LVM_INT16 bTimerDone;                 /* Timer completion flag */
+    LVM_Timer_Params_t TimerParams;       /* Timer parameters */
+    LVM_Timer_Instance_t TimerInstance;   /* Timer instance */
+    void* pCoeff;                         /* pointer to buffer for equaliser filter coeffs */
+    void* pData;                          /* pointer to buffer for equaliser filter states */
+    void* pScratch;                       /* Pointer to bundle scratch buffer */
 
 } LVCS_Instance_t;
 
 /* Coefficient Structure */
-typedef struct
-{
-    Biquad_FLOAT_Instance_t       EqualiserBiquadInstance;
-    Biquad_FLOAT_Instance_t       ReverbBiquadInstance;
-    Biquad_FLOAT_Instance_t       SEBiquadInstanceMid;
-    Biquad_FLOAT_Instance_t       SEBiquadInstanceSide;
+typedef struct {
+    Biquad_FLOAT_Instance_t EqualiserBiquadInstance;
+    Biquad_FLOAT_Instance_t ReverbBiquadInstance;
+    Biquad_FLOAT_Instance_t SEBiquadInstanceMid;
+    Biquad_FLOAT_Instance_t SEBiquadInstanceSide;
 } LVCS_Coefficient_t;
 
 /* Data Structure */
-typedef struct
-{
+typedef struct {
     Biquad_2I_Order2_FLOAT_Taps_t EqualiserBiquadTaps;
     Biquad_2I_Order2_FLOAT_Taps_t ReverbBiquadTaps;
     Biquad_1I_Order1_FLOAT_Taps_t SEBiquadTapsMid;
     Biquad_1I_Order2_FLOAT_Taps_t SEBiquadTapsSide;
 } LVCS_Data_t;
 
-void LVCS_TimerCallBack (   void* hInstance,
-                            void* pCallBackParams,
-                            LVM_INT32 CallbackParam);
+void LVCS_TimerCallBack(void* hInstance, void* pCallBackParams, LVM_INT32 CallbackParam);
 
-#endif      /* PRIVATE_H */
-
+#endif /* PRIVATE_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Process.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Process.cpp
index 8e09be2..d18f2c3 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Process.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Process.cpp
@@ -65,18 +65,14 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVCS_ReturnStatus_en LVCS_Process_CS(LVCS_Handle_t              hInstance,
-                                     const LVM_FLOAT            *pInData,
-                                     LVM_FLOAT                  *pOutData,
-                                     LVM_UINT16                 NumSamples)
-{
-    const LVM_FLOAT     *pInput;
-    LVCS_Instance_t     *pInstance = (LVCS_Instance_t  *)hInstance;
-    LVM_FLOAT           *pScratch;
+LVCS_ReturnStatus_en LVCS_Process_CS(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                     LVM_FLOAT* pOutData, LVM_UINT16 NumSamples) {
+    const LVM_FLOAT* pInput;
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVM_FLOAT* pScratch;
     LVCS_ReturnStatus_en err;
-#ifdef SUPPORT_MC
-    LVM_FLOAT           *pStIn;
-    LVM_INT32           channels = pInstance->Params.NrChannels;
+    LVM_FLOAT* pStIn;
+    LVM_INT32 channels = pInstance->Params.NrChannels;
 #define NrFrames NumSamples  // alias for clarity
 
     /*In case of mono processing, stereo input is created from mono
@@ -85,96 +81,64 @@
      *at this point.
      *So to treat the pInData as stereo we are setting channels to 2
      */
-    if (channels == 1)
-    {
+    if (channels == 1) {
         channels = 2;
     }
-#endif
 
-    pScratch  = (LVM_FLOAT *) \
-                  pInstance->MemoryTable.Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress;
+    pScratch = (LVM_FLOAT*)pInstance->pScratch;
 
     /*
      * Check if the processing is inplace
      */
-#ifdef SUPPORT_MC
     /*
      * The pInput buffer holds the first 2 (Left, Right) channels information.
      * Hence the memory required by this buffer is 2 * NumFrames.
      * The Concert Surround module carries out processing only on L, R.
      */
     pInput = pScratch + (2 * NrFrames);
-    pStIn  = pScratch + ((LVCS_SCRATCHBUFFERS - 2) * NrFrames);
+    pStIn = pScratch + ((LVCS_SCRATCHBUFFERS - 2) * NrFrames);
     /* The first two channel data is extracted from the input data and
      * copied into pInput buffer
      */
-    Copy_Float_Mc_Stereo((LVM_FLOAT *)pInData,
-                         (LVM_FLOAT *)pInput,
-                         NrFrames,
-                         channels);
-    Copy_Float((LVM_FLOAT *)pInput,
-               (LVM_FLOAT *)pStIn,
-               (LVM_INT16)(2 * NrFrames));
-#else
-    if (pInData == pOutData)
-    {
-        /* Processing inplace */
-        pInput = pScratch + (2 * NumSamples);
-        Copy_Float((LVM_FLOAT *)pInData,           /* Source */
-                   (LVM_FLOAT *)pInput,            /* Destination */
-                   (LVM_INT16)(2 * NumSamples));     /* Left and right */
-    }
-    else
-    {
-        /* Processing outplace */
-        pInput = pInData;
-    }
-#endif
+    Copy_Float_Mc_Stereo((LVM_FLOAT*)pInData, (LVM_FLOAT*)pInput, NrFrames, channels);
+    Copy_Float((LVM_FLOAT*)pInput, (LVM_FLOAT*)pStIn, (LVM_INT16)(2 * NrFrames));
     /*
      * Call the stereo enhancer
      */
-#ifdef SUPPORT_MC
-    err = LVCS_StereoEnhancer(hInstance,              /* Instance handle */
-                              pStIn,                  /* Pointer to the input data */
-                              pOutData,               /* Pointer to the output data */
-                              NrFrames);              /* Number of frames to process */
-#else
-    err = LVCS_StereoEnhancer(hInstance,              /* Instance handle */
-                              pInData,                    /* Pointer to the input data */
-                              pOutData,                   /* Pointer to the output data */
-                              NumSamples);                /* Number of samples to process */
-#endif
+    err = LVCS_StereoEnhancer(hInstance, /* Instance handle */
+                              pStIn,     /* Pointer to the input data */
+                              pOutData,  /* Pointer to the output data */
+                              NrFrames); /* Number of frames to process */
 
     /*
      * Call the reverb generator
      */
-    err = LVCS_ReverbGenerator(hInstance,             /* Instance handle */
-                               pOutData,                  /* Pointer to the input data */
-                               pOutData,                  /* Pointer to the output data */
-                               NumSamples);               /* Number of samples to process */
+    err = LVCS_ReverbGenerator(hInstance,   /* Instance handle */
+                               pOutData,    /* Pointer to the input data */
+                               pOutData,    /* Pointer to the output data */
+                               NumSamples); /* Number of samples to process */
 
     /*
      * Call the equaliser
      */
-    err = LVCS_Equaliser(hInstance,                   /* Instance handle */
-                         pOutData,                        /* Pointer to the input data */
-                         NumSamples);                     /* Number of samples to process */
+    err = LVCS_Equaliser(hInstance,   /* Instance handle */
+                         pOutData,    /* Pointer to the input data */
+                         NumSamples); /* Number of samples to process */
 
     /*
      * Call the bypass mixer
      */
-    err = LVCS_BypassMixer(hInstance,                 /* Instance handle */
-                           pOutData,                      /* Pointer to the processed data */
-                           pInput,                        /* Pointer to the input (unprocessed) data */
-                           pOutData,                      /* Pointer to the output data */
-                           NumSamples);                   /* Number of samples to process */
+    err = LVCS_BypassMixer(hInstance,   /* Instance handle */
+                           pOutData,    /* Pointer to the processed data */
+                           pInput,      /* Pointer to the input (unprocessed) data */
+                           pOutData,    /* Pointer to the output data */
+                           NumSamples); /* Number of samples to process */
 
-    if(err != LVCS_SUCCESS)
-    {
+    if (err != LVCS_SUCCESS) {
         return err;
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
 /************************************************************************************/
 /*                                                                                  */
@@ -202,171 +166,114 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVCS_ReturnStatus_en LVCS_Process(LVCS_Handle_t             hInstance,
-                                  const LVM_FLOAT           *pInData,
-                                  LVM_FLOAT                 *pOutData,
-                                  LVM_UINT16                NumSamples)
-{
-
-    LVCS_Instance_t *pInstance = (LVCS_Instance_t  *)hInstance;
+LVCS_ReturnStatus_en LVCS_Process(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                  LVM_FLOAT* pOutData, LVM_UINT16 NumSamples) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
     LVCS_ReturnStatus_en err;
-#ifdef SUPPORT_MC
     /*Extract number of Channels info*/
     LVM_INT32 channels = pInstance->Params.NrChannels;
 #define NrFrames NumSamples  // alias for clarity
-    if (channels == 1)
-    {
+    if (channels == 1) {
         channels = 2;
     }
-#endif
     /*
      * Check the number of samples is not too large
      */
-    if (NumSamples > pInstance->Capabilities.MaxBlockSize)
-    {
-        return(LVCS_TOOMANYSAMPLES);
+    if (NumSamples > pInstance->Capabilities.MaxBlockSize) {
+        return (LVCS_TOOMANYSAMPLES);
     }
 
     /*
      * Check if the algorithm is enabled
      */
-    if (pInstance->Params.OperatingMode != LVCS_OFF)
-    {
-#ifdef SUPPORT_MC
-        LVM_FLOAT *pStereoOut;
+    if (pInstance->Params.OperatingMode != LVCS_OFF) {
+        LVM_FLOAT* pStereoOut;
         /*
          * LVCS_Process_CS uses output buffer to store intermediate outputs of StereoEnhancer,
          * Equalizer, ReverbGenerator and BypassMixer.
          * So, to avoid i/o data overlapping, when i/o buffers are common, use scratch buffer
          * to store intermediate outputs.
          */
-        if (pOutData == pInData)
-        {
-          /*
-           * Scratch memory is used in 4 chunks of (2 * NrFrames) size.
-           * First chunk of memory is used by LVCS_StereoEnhancer and LVCS_ReverbGenerator,
-           * second and fourth are used as input buffers by pInput and pStIn in LVCS_Process_CS.
-           * Hence, pStereoOut is pointed to use unused third portion of scratch memory.
-           */
-            pStereoOut = (LVM_FLOAT *) \
-                          pInstance->MemoryTable. \
-                          Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress +
-                          ((LVCS_SCRATCHBUFFERS - 4) * NrFrames);
-        }
-        else
-        {
+        if (pOutData == pInData) {
+            /*
+             * Scratch memory is used in 4 chunks of (2 * NrFrames) size.
+             * First chunk of memory is used by LVCS_StereoEnhancer and LVCS_ReverbGenerator,
+             * second and fourth are used as input buffers by pInput and pStIn in LVCS_Process_CS.
+             * Hence, pStereoOut is pointed to use unused third portion of scratch memory.
+             */
+            pStereoOut = (LVM_FLOAT*)pInstance->pScratch + ((LVCS_SCRATCHBUFFERS - 4) * NrFrames);
+        } else {
             pStereoOut = pOutData;
         }
 
         /*
          * Call CS process function
          */
-            err = LVCS_Process_CS(hInstance,
-                                  pInData,
-                                  pStereoOut,
-                                  NrFrames);
-#else
-            err = LVCS_Process_CS(hInstance,
-                                  pInData,
-                                  pOutData,
-                                  NumSamples);
-#endif
+        err = LVCS_Process_CS(hInstance, pInData, pStereoOut, NrFrames);
 
         /*
          * Compress to reduce expansion effect of Concert Sound and correct volume
          * differences for difference settings. Not applied in test modes
          */
-        if ((pInstance->Params.OperatingMode == LVCS_ON)&& \
-                                        (pInstance->Params.CompressorMode == LVM_MODE_ON))
-        {
+        if ((pInstance->Params.OperatingMode == LVCS_ON) &&
+            (pInstance->Params.CompressorMode == LVM_MODE_ON)) {
             LVM_FLOAT Gain = pInstance->VolCorrect.CompMin;
             LVM_FLOAT Current1;
 
             Current1 = LVC_Mixer_GetCurrent(&pInstance->BypassMix.Mixer_Instance.MixerStream[0]);
-            Gain = (LVM_FLOAT)(  pInstance->VolCorrect.CompMin
-                               - (((LVM_FLOAT)pInstance->VolCorrect.CompMin  * (Current1)))
-                               + (((LVM_FLOAT)pInstance->VolCorrect.CompFull * (Current1))));
+            Gain = (LVM_FLOAT)(pInstance->VolCorrect.CompMin -
+                               (((LVM_FLOAT)pInstance->VolCorrect.CompMin * (Current1))) +
+                               (((LVM_FLOAT)pInstance->VolCorrect.CompFull * (Current1))));
 
-            if(NumSamples < LVCS_COMPGAINFRAME)
-            {
-#ifdef SUPPORT_MC
-                NonLinComp_Float(Gain,                    /* Compressor gain setting */
-                                 pStereoOut,
-                                 pStereoOut,
-                                 (LVM_INT32)(2 * NrFrames));
-#else
-                NonLinComp_Float(Gain,                    /* Compressor gain setting */
-                                 pOutData,
-                                 pOutData,
-                                 (LVM_INT32)(2 * NumSamples));
-#endif
-            }
-            else
-            {
-                LVM_FLOAT  GainStep;
-                LVM_FLOAT  FinalGain;
-                LVM_INT16  SampleToProcess = NumSamples;
-                LVM_FLOAT  *pOutPtr;
+            if (NumSamples < LVCS_COMPGAINFRAME) {
+                NonLinComp_Float(Gain, /* Compressor gain setting */
+                                 pStereoOut, pStereoOut, (LVM_INT32)(2 * NrFrames));
+            } else {
+                LVM_FLOAT GainStep;
+                LVM_FLOAT FinalGain;
+                LVM_INT16 SampleToProcess = NumSamples;
+                LVM_FLOAT* pOutPtr;
 
                 /* Large changes in Gain can cause clicks in output
                    Split data into small blocks and use interpolated gain values */
 
-                GainStep = (LVM_FLOAT)(((Gain-pInstance->CompressGain) * \
-                                                LVCS_COMPGAINFRAME) / NumSamples);
+                GainStep = (LVM_FLOAT)(((Gain - pInstance->CompressGain) * LVCS_COMPGAINFRAME) /
+                                       NumSamples);
 
-                if((GainStep == 0) && (pInstance->CompressGain < Gain))
-                {
+                if ((GainStep == 0) && (pInstance->CompressGain < Gain)) {
                     GainStep = 1;
-                }
-                else
-                {
-                    if((GainStep == 0) && (pInstance->CompressGain > Gain))
-                    {
+                } else {
+                    if ((GainStep == 0) && (pInstance->CompressGain > Gain)) {
                         GainStep = -1;
                     }
                 }
 
                 FinalGain = Gain;
                 Gain = pInstance->CompressGain;
-#ifdef SUPPORT_MC
                 pOutPtr = pStereoOut;
-#else
-                pOutPtr = pOutData;
-#endif
 
-                while(SampleToProcess > 0)
-                {
+                while (SampleToProcess > 0) {
                     Gain = (LVM_FLOAT)(Gain + GainStep);
-                    if((GainStep > 0) && (FinalGain <= Gain))
-                    {
+                    if ((GainStep > 0) && (FinalGain <= Gain)) {
                         Gain = FinalGain;
                         GainStep = 0;
                     }
 
-                    if((GainStep < 0) && (FinalGain > Gain))
-                    {
+                    if ((GainStep < 0) && (FinalGain > Gain)) {
                         Gain = FinalGain;
                         GainStep = 0;
                     }
 
-                    if(SampleToProcess > LVCS_COMPGAINFRAME)
-                    {
-                        NonLinComp_Float(Gain,                    /* Compressor gain setting */
-                                         pOutPtr,
-                                         pOutPtr,
-                                         (LVM_INT32)(2 * LVCS_COMPGAINFRAME));
+                    if (SampleToProcess > LVCS_COMPGAINFRAME) {
+                        NonLinComp_Float(Gain, /* Compressor gain setting */
+                                         pOutPtr, pOutPtr, (LVM_INT32)(2 * LVCS_COMPGAINFRAME));
                         pOutPtr += (2 * LVCS_COMPGAINFRAME);
                         SampleToProcess = (LVM_INT16)(SampleToProcess - LVCS_COMPGAINFRAME);
-                    }
-                    else
-                    {
-                        NonLinComp_Float(Gain,                    /* Compressor gain setting */
-                                         pOutPtr,
-                                         pOutPtr,
-                                         (LVM_INT32)(2 * SampleToProcess));
+                    } else {
+                        NonLinComp_Float(Gain, /* Compressor gain setting */
+                                         pOutPtr, pOutPtr, (LVM_INT32)(2 * SampleToProcess));
                         SampleToProcess = 0;
                     }
-
                 }
             }
 
@@ -374,57 +281,33 @@
             pInstance->CompressGain = Gain;
         }
 
-        if(pInstance->bInOperatingModeTransition == LVM_TRUE){
-
+        if (pInstance->bInOperatingModeTransition == LVM_TRUE) {
             /*
              * Re-init bypass mix when timer has completed
              */
             if ((pInstance->bTimerDone == LVM_TRUE) &&
-                (pInstance->BypassMix.Mixer_Instance.MixerStream[1].CallbackSet == 0))
-            {
-                err = LVCS_BypassMixInit(hInstance,
-                                         &pInstance->Params);
+                (pInstance->BypassMix.Mixer_Instance.MixerStream[1].CallbackSet == 0)) {
+                err = LVCS_BypassMixInit(hInstance, &pInstance->Params);
 
-                if(err != LVCS_SUCCESS)
-                {
+                if (err != LVCS_SUCCESS) {
                     return err;
                 }
 
-            }
-            else{
-                LVM_Timer ( &pInstance->TimerInstance,
-                            (LVM_INT16)NumSamples);
+            } else {
+                LVM_Timer(&pInstance->TimerInstance, (LVM_INT16)NumSamples);
             }
         }
-#ifdef SUPPORT_MC
-        Copy_Float_Stereo_Mc(pInData,
-                             pStereoOut,
-                             pOutData,
-                             NrFrames,
-                             channels);
-#endif
-    }
-    else
-    {
-        if (pInData != pOutData)
-        {
-#ifdef SUPPORT_MC
+        Copy_Float_Stereo_Mc(pInData, pStereoOut, pOutData, NrFrames, channels);
+    } else {
+        if (pInData != pOutData) {
             /*
              * The algorithm is disabled so just copy the data
              */
-            Copy_Float((LVM_FLOAT *)pInData,               /* Source */
-                       (LVM_FLOAT *)pOutData,                  /* Destination */
-                       (LVM_INT16)(channels * NrFrames));    /* All Channels*/
-#else
-            /*
-             * The algorithm is disabled so just copy the data
-             */
-            Copy_Float((LVM_FLOAT *)pInData,               /* Source */
-                       (LVM_FLOAT *)pOutData,                  /* Destination */
-                       (LVM_INT16)(2 * NumSamples));             /* Left and right */
-#endif
+            Copy_Float((LVM_FLOAT*)pInData,               /* Source */
+                       (LVM_FLOAT*)pOutData,              /* Destination */
+                       (LVM_INT16)(channels * NrFrames)); /* All Channels*/
         }
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
index d0e6e09..f6d2453 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
@@ -20,7 +20,7 @@
 /*  Includes                                                                        */
 /*                                                                                  */
 /************************************************************************************/
-
+#include <stdlib.h>
 #include "LVCS.h"
 #include "LVCS_Private.h"
 #include "LVCS_ReverbGenerator.h"
@@ -57,31 +57,39 @@
 /*  2.  The numerator coefficients of the filter are negated to cause an inversion. */
 /*                                                                                  */
 /************************************************************************************/
-LVCS_ReturnStatus_en LVCS_ReverbGeneratorInit(LVCS_Handle_t     hInstance,
-                                              LVCS_Params_t     *pParams)
-{
+LVCS_ReturnStatus_en LVCS_ReverbGeneratorInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+    LVM_UINT16 Delay;
+    LVM_UINT16 Offset;
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVCS_ReverbGenerator_t* pConfig = (LVCS_ReverbGenerator_t*)&pInstance->Reverberation;
+    LVCS_Data_t* pData;
+    LVCS_Coefficient_t* pCoefficients;
+    BQ_FLOAT_Coefs_t Coeffs;
+    const BiquadA012B12CoefsSP_t* pReverbCoefTable;
 
-    LVM_UINT16              Delay;
-    LVM_UINT16              Offset;
-    LVCS_Instance_t         *pInstance = (LVCS_Instance_t  *)hInstance;
-    LVCS_ReverbGenerator_t  *pConfig   = (LVCS_ReverbGenerator_t *)&pInstance->Reverberation;
-    LVCS_Data_t             *pData;
-    LVCS_Coefficient_t      *pCoefficients;
-    BQ_FLOAT_Coefs_t         Coeffs;
-    const BiquadA012B12CoefsSP_t  *pReverbCoefTable;
-
-    pData = (LVCS_Data_t *) \
-                 pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress;
-
-    pCoefficients = (LVCS_Coefficient_t *) \
-                 pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
+    if (pInstance->pData == LVM_NULL) {
+        pInstance->pData = pData = (LVCS_Data_t*)calloc(1, sizeof(*pData));
+        if (pData == LVM_NULL) {
+            return LVCS_NULLADDRESS;
+        }
+    } else {
+        pData = (LVCS_Data_t*)pInstance->pData;
+    }
+    if (pInstance->pCoeff == LVM_NULL) {
+        pInstance->pCoeff = pCoefficients = (LVCS_Coefficient_t*)calloc(1, sizeof(*pCoefficients));
+        if (pCoefficients == LVM_NULL) {
+            return LVCS_NULLADDRESS;
+        }
+    } else {
+        pCoefficients = (LVCS_Coefficient_t*)pInstance->pCoeff;
+    }
 
     /*
      * Initialise the delay and filters if:
      *  - the sample rate has changed
      *  - the speaker type has changed to or from the mobile speaker
      */
-    if(pInstance->Params.SampleRate != pParams->SampleRate )      /* Sample rate change test */
+    if (pInstance->Params.SampleRate != pParams->SampleRate) /* Sample rate change test */
 
     {
         /*
@@ -89,10 +97,10 @@
          */
         Delay = (LVM_UINT16)LVCS_StereoDelayCS[(LVM_UINT16)pParams->SampleRate];
 
-        pConfig->DelaySize      = (LVM_INT16)(2 * Delay);
-        pConfig->DelayOffset    = 0;
-        LoadConst_Float(0,                                            /* Value */
-                        (LVM_FLOAT *)&pConfig->StereoSamples[0],      /* Destination */
+        pConfig->DelaySize = (LVM_INT16)(2 * Delay);
+        pConfig->DelayOffset = 0;
+        LoadConst_Float(0,                                      /* Value */
+                        (LVM_FLOAT*)&pConfig->StereoSamples[0], /* Destination */
                         /* Number of words */
                         (LVM_UINT16)(sizeof(pConfig->StereoSamples) / sizeof(LVM_FLOAT)));
         /*
@@ -108,23 +116,21 @@
         Coeffs.B1 = (LVM_FLOAT)-pReverbCoefTable[Offset].B1;
         Coeffs.B2 = (LVM_FLOAT)-pReverbCoefTable[Offset].B2;
 
-        LoadConst_Float(0,                                 /* Value */
-                        (LVM_FLOAT *)&pData->ReverbBiquadTaps, /* Destination */
+        LoadConst_Float(0,                                    /* Value */
+                        (LVM_FLOAT*)&pData->ReverbBiquadTaps, /* Destination */
                         /* Number of words */
                         (LVM_UINT16)(sizeof(pData->ReverbBiquadTaps) / sizeof(LVM_FLOAT)));
 
         BQ_2I_D16F16Css_TRC_WRA_01_Init(&pCoefficients->ReverbBiquadInstance,
-                                        &pData->ReverbBiquadTaps,
-                                        &Coeffs);
+                                        &pData->ReverbBiquadTaps, &Coeffs);
 
         /* Callbacks */
-        switch(pReverbCoefTable[Offset].Scale)
-        {
+        switch (pReverbCoefTable[Offset].Scale) {
             case 14:
-                pConfig->pBiquadCallBack  = BQ_2I_D16F16C14_TRC_WRA_01;
+                pConfig->pBiquadCallBack = BQ_2I_D16F16C14_TRC_WRA_01;
                 break;
             case 15:
-                pConfig->pBiquadCallBack  = BQ_2I_D16F16C15_TRC_WRA_01;
+                pConfig->pBiquadCallBack = BQ_2I_D16F16C15_TRC_WRA_01;
                 break;
         }
 
@@ -132,16 +138,15 @@
          * Setup the mixer
          */
         pConfig->ProcGain = (LVM_UINT16)(HEADPHONEGAINPROC);
-        pConfig->UnprocGain  = (LVM_UINT16)(HEADPHONEGAINUNPROC);
+        pConfig->UnprocGain = (LVM_UINT16)(HEADPHONEGAINUNPROC);
     }
 
-    if(pInstance->Params.ReverbLevel != pParams->ReverbLevel)
-    {
-        LVM_INT32   ReverbPercentage = 83886;      // 1 Percent Reverb i.e 1/100 in Q 23 format
+    if (pInstance->Params.ReverbLevel != pParams->ReverbLevel) {
+        LVM_INT32 ReverbPercentage = 83886;        // 1 Percent Reverb i.e 1/100 in Q 23 format
         ReverbPercentage *= pParams->ReverbLevel;  // Actual Reverb Level in Q 23 format
-        pConfig->ReverbLevel = ((LVM_FLOAT)(ReverbPercentage>>8)) / 32767.0f;
+        pConfig->ReverbLevel = ((LVM_FLOAT)(ReverbPercentage >> 8)) / 32767.0f;
     }
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
 /************************************************************************************/
 /*                                                                                  */
@@ -181,46 +186,37 @@
 /*  2.  The Gain is combined with the LPF and incorporated in to the coefficients   */
 /*                                                                                  */
 /************************************************************************************/
-LVCS_ReturnStatus_en LVCS_ReverbGenerator(LVCS_Handle_t         hInstance,
-                                          const LVM_FLOAT       *pInData,
-                                          LVM_FLOAT             *pOutData,
-                                          LVM_UINT16            NumSamples)
-{
+LVCS_ReturnStatus_en LVCS_ReverbGenerator(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                          LVM_FLOAT* pOutData, LVM_UINT16 NumSamples) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVCS_ReverbGenerator_t* pConfig = (LVCS_ReverbGenerator_t*)&pInstance->Reverberation;
+    LVCS_Coefficient_t* pCoefficients;
+    LVM_FLOAT* pScratch;
 
-    LVCS_Instance_t         *pInstance = (LVCS_Instance_t  *)hInstance;
-    LVCS_ReverbGenerator_t  *pConfig   = (LVCS_ReverbGenerator_t *)&pInstance->Reverberation;
-    LVCS_Coefficient_t      *pCoefficients;
-    LVM_FLOAT               *pScratch;
-
-    pCoefficients = (LVCS_Coefficient_t *)\
-                   pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
-
-    pScratch  = (LVM_FLOAT *)\
-                    pInstance->MemoryTable.Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress;
+    pCoefficients = (LVCS_Coefficient_t*)pInstance->pCoeff;
+    pScratch = (LVM_FLOAT*)pInstance->pScratch;
 
     /*
      * Copy the data to the output in outplace processing
      */
-    if (pInData != pOutData)
-    {
+    if (pInData != pOutData) {
         /*
          * Reverb not required so just copy the data
          */
-        Copy_Float((LVM_FLOAT *)pInData,                                       /* Source */
-                   (LVM_FLOAT *)pOutData,                                      /* Destination */
-                   (LVM_INT16)(2 * NumSamples));                                 /* Left and right */
+        Copy_Float((LVM_FLOAT*)pInData,          /* Source */
+                   (LVM_FLOAT*)pOutData,         /* Destination */
+                   (LVM_INT16)(2 * NumSamples)); /* Left and right */
     }
 
     /*
      * Check if the reverb is required
      */
     /* Disable when CS4MS in stereo mode */
-    if ((((LVCS_OutputDevice_en)pInstance->Params.SpeakerType == LVCS_HEADPHONE) || \
+    if ((((LVCS_OutputDevice_en)pInstance->Params.SpeakerType == LVCS_HEADPHONE) ||
          (pInstance->Params.SpeakerType == LVCS_EX_HEADPHONES) ||
-         (pInstance->Params.SourceFormat != LVCS_STEREO))  &&
-                                    /* For validation testing */
-        ((pInstance->Params.OperatingMode & LVCS_REVERBSWITCH) !=0))
-    {
+         (pInstance->Params.SourceFormat != LVCS_STEREO)) &&
+        /* For validation testing */
+        ((pInstance->Params.OperatingMode & LVCS_REVERBSWITCH) != 0)) {
         /********************************************************************************/
         /*                                                                              */
         /* Copy the input data to scratch memory and filter it                          */
@@ -230,34 +226,26 @@
         /*
          * Copy the input data to the scratch memory
          */
-        Copy_Float((LVM_FLOAT *)pInData,                                     /* Source */
-                   (LVM_FLOAT *)pScratch,                                    /* Destination */
-                   (LVM_INT16)(2 * NumSamples));                               /* Left and right */
+        Copy_Float((LVM_FLOAT*)pInData,          /* Source */
+                   (LVM_FLOAT*)pScratch,         /* Destination */
+                   (LVM_INT16)(2 * NumSamples)); /* Left and right */
 
         /*
          * Filter the data
          */
         (pConfig->pBiquadCallBack)((Biquad_FLOAT_Instance_t*)&pCoefficients->ReverbBiquadInstance,
-                                   (LVM_FLOAT *)pScratch,
-                                   (LVM_FLOAT *)pScratch,
+                                   (LVM_FLOAT*)pScratch, (LVM_FLOAT*)pScratch,
                                    (LVM_INT16)NumSamples);
 
-        Mult3s_Float( (LVM_FLOAT *)pScratch,
-                      pConfig->ReverbLevel,
-                      (LVM_FLOAT *)pScratch,
-                      (LVM_INT16)(2 * NumSamples));
+        Mult3s_Float((LVM_FLOAT*)pScratch, pConfig->ReverbLevel, (LVM_FLOAT*)pScratch,
+                     (LVM_INT16)(2 * NumSamples));
 
         /*
          * Apply the delay mix
          */
-        DelayMix_Float((LVM_FLOAT *)pScratch,
-                       &pConfig->StereoSamples[0],
-                       pConfig->DelaySize,
-                       pOutData,
-                       &pConfig->DelayOffset,
-                       (LVM_INT16)NumSamples);
-
+        DelayMix_Float((LVM_FLOAT*)pScratch, &pConfig->StereoSamples[0], pConfig->DelaySize,
+                       pOutData, &pConfig->DelayOffset, (LVM_INT16)NumSamples);
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.h
index 1bc4338..b666da3 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.h
@@ -32,8 +32,8 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#define     HEADPHONEGAINPROC           LVCS_HEADPHONE_PROCGAIN
-#define     HEADPHONEGAINUNPROC         LVCS_HEADPHONE_UNPROCGAIN
+#define HEADPHONEGAINPROC LVCS_HEADPHONE_PROCGAIN
+#define HEADPHONEGAINUNPROC LVCS_HEADPHONE_UNPROCGAIN
 
 /************************************************************************************/
 /*                                                                                  */
@@ -42,20 +42,17 @@
 /************************************************************************************/
 
 /* Reverberation module structure */
-typedef struct
-{
-
+typedef struct {
     /* Stereo delay */
-    LVM_INT16                   DelaySize;
-    LVM_INT16                   DelayOffset;
-    LVM_INT16                   ProcGain;
-    LVM_INT16                   UnprocGain;
-    LVM_FLOAT                   StereoSamples[2 * LVCS_STEREODELAY_CS_MAX_VAL];
+    LVM_INT16 DelaySize;
+    LVM_INT16 DelayOffset;
+    LVM_INT16 ProcGain;
+    LVM_INT16 UnprocGain;
+    LVM_FLOAT StereoSamples[2 * LVCS_STEREODELAY_CS_MAX_VAL];
     /* Reverb Level */
-    LVM_FLOAT                   ReverbLevel;
+    LVM_FLOAT ReverbLevel;
     /* Filter */
-    void                        (*pBiquadCallBack) (Biquad_FLOAT_Instance_t*,
-                                                    LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
+    void (*pBiquadCallBack)(Biquad_FLOAT_Instance_t*, LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
 } LVCS_ReverbGenerator_t;
 
 /************************************************************************************/
@@ -64,11 +61,8 @@
 /*                                                                                    */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_ReverbGeneratorInit(LVCS_Handle_t     hInstance,
-                                                 LVCS_Params_t  *pParams);
-LVCS_ReturnStatus_en LVCS_ReverbGenerator(LVCS_Handle_t         hInstance,
-                                          const LVM_FLOAT       *pInput,
-                                          LVM_FLOAT             *pOutput,
-                                          LVM_UINT16            NumSamples);
+LVCS_ReturnStatus_en LVCS_ReverbGeneratorInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
+LVCS_ReturnStatus_en LVCS_ReverbGenerator(LVCS_Handle_t hInstance, const LVM_FLOAT* pInput,
+                                          LVM_FLOAT* pOutput, LVM_UINT16 NumSamples);
 
-#endif  /* REVERB_H */
+#endif /* REVERB_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp
index 7fd8444..ffa9c9b 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp
@@ -49,31 +49,24 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVCS_ReturnStatus_en LVCS_SEnhancerInit(LVCS_Handle_t       hInstance,
-                                        LVCS_Params_t       *pParams)
-{
+LVCS_ReturnStatus_en LVCS_SEnhancerInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+    LVM_UINT16 Offset;
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVCS_StereoEnhancer_t* pConfig = (LVCS_StereoEnhancer_t*)&pInstance->StereoEnhancer;
+    LVCS_Data_t* pData;
+    LVCS_Coefficient_t* pCoefficient;
+    FO_FLOAT_Coefs_t CoeffsMid;
+    BQ_FLOAT_Coefs_t CoeffsSide;
+    const BiquadA012B12CoefsSP_t* pSESideCoefs;
 
-    LVM_UINT16              Offset;
-    LVCS_Instance_t         *pInstance = (LVCS_Instance_t  *)hInstance;
-    LVCS_StereoEnhancer_t   *pConfig   = (LVCS_StereoEnhancer_t *)&pInstance->StereoEnhancer;
-    LVCS_Data_t             *pData;
-    LVCS_Coefficient_t      *pCoefficient;
-    FO_FLOAT_Coefs_t          CoeffsMid;
-    BQ_FLOAT_Coefs_t          CoeffsSide;
-    const BiquadA012B12CoefsSP_t *pSESideCoefs;
-
-    pData     = (LVCS_Data_t *) \
-                  pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress;
-
-    pCoefficient = (LVCS_Coefficient_t *) \
-                  pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
+    pData = (LVCS_Data_t*)pInstance->pData;
+    pCoefficient = (LVCS_Coefficient_t*)pInstance->pCoeff;
 
     /*
      * If the sample rate or speaker type has changed update the filters
      */
     if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
-        (pInstance->Params.SpeakerType != pParams->SpeakerType))
-    {
+        (pInstance->Params.SpeakerType != pParams->SpeakerType)) {
         /*
          * Set the filter coefficients based on the sample rate
          */
@@ -81,24 +74,22 @@
         Offset = (LVM_UINT16)pParams->SampleRate;
 
         /* Convert incoming coefficients to the required format/ordering */
-        CoeffsMid.A0 = (LVM_FLOAT) LVCS_SEMidCoefTable[Offset].A0;
-        CoeffsMid.A1 = (LVM_FLOAT) LVCS_SEMidCoefTable[Offset].A1;
+        CoeffsMid.A0 = (LVM_FLOAT)LVCS_SEMidCoefTable[Offset].A0;
+        CoeffsMid.A1 = (LVM_FLOAT)LVCS_SEMidCoefTable[Offset].A1;
         CoeffsMid.B1 = (LVM_FLOAT)-LVCS_SEMidCoefTable[Offset].B1;
 
         /* Clear the taps */
-        LoadConst_Float(0,                                  /* Value */
-                        (LVM_FLOAT *)&pData->SEBiquadTapsMid,    /* Destination */
+        LoadConst_Float(0,                                   /* Value */
+                        (LVM_FLOAT*)&pData->SEBiquadTapsMid, /* Destination */
                         /* Number of words */
                         (LVM_UINT16)(sizeof(pData->SEBiquadTapsMid) / sizeof(LVM_FLOAT)));
 
-        FO_1I_D16F16Css_TRC_WRA_01_Init(&pCoefficient->SEBiquadInstanceMid,
-                                        &pData->SEBiquadTapsMid,
+        FO_1I_D16F16Css_TRC_WRA_01_Init(&pCoefficient->SEBiquadInstanceMid, &pData->SEBiquadTapsMid,
                                         &CoeffsMid);
 
         /* Callbacks */
-        if(LVCS_SEMidCoefTable[Offset].Scale == 15)
-        {
-            pConfig->pBiquadCallBack_Mid  = FO_1I_D16F16C15_TRC_WRA_01;
+        if (LVCS_SEMidCoefTable[Offset].Scale == 15) {
+            pConfig->pBiquadCallBack_Mid = FO_1I_D16F16C15_TRC_WRA_01;
         }
 
         Offset = (LVM_UINT16)(pParams->SampleRate);
@@ -106,39 +97,35 @@
 
         /* Side filter */
         /* Convert incoming coefficients to the required format/ordering */
-        CoeffsSide.A0 = (LVM_FLOAT) pSESideCoefs[Offset].A0;
-        CoeffsSide.A1 = (LVM_FLOAT) pSESideCoefs[Offset].A1;
-        CoeffsSide.A2 = (LVM_FLOAT) pSESideCoefs[Offset].A2;
+        CoeffsSide.A0 = (LVM_FLOAT)pSESideCoefs[Offset].A0;
+        CoeffsSide.A1 = (LVM_FLOAT)pSESideCoefs[Offset].A1;
+        CoeffsSide.A2 = (LVM_FLOAT)pSESideCoefs[Offset].A2;
         CoeffsSide.B1 = (LVM_FLOAT)-pSESideCoefs[Offset].B1;
         CoeffsSide.B2 = (LVM_FLOAT)-pSESideCoefs[Offset].B2;
 
         /* Clear the taps */
-        LoadConst_Float(0,                                /* Value */
-                        (LVM_FLOAT *)&pData->SEBiquadTapsSide, /* Destination */
+        LoadConst_Float(0,                                    /* Value */
+                        (LVM_FLOAT*)&pData->SEBiquadTapsSide, /* Destination */
                         /* Number of words */
                         (LVM_UINT16)(sizeof(pData->SEBiquadTapsSide) / sizeof(LVM_FLOAT)));
         /* Callbacks */
-        switch(pSESideCoefs[Offset].Scale)
-        {
+        switch (pSESideCoefs[Offset].Scale) {
             case 14:
                 BQ_1I_D16F32Css_TRC_WRA_01_Init(&pCoefficient->SEBiquadInstanceSide,
-                                                &pData->SEBiquadTapsSide,
-                                                &CoeffsSide);
+                                                &pData->SEBiquadTapsSide, &CoeffsSide);
 
-                pConfig->pBiquadCallBack_Side  = BQ_1I_D16F32C14_TRC_WRA_01;
+                pConfig->pBiquadCallBack_Side = BQ_1I_D16F32C14_TRC_WRA_01;
                 break;
             case 15:
                 BQ_1I_D16F16Css_TRC_WRA_01_Init(&pCoefficient->SEBiquadInstanceSide,
-                                                &pData->SEBiquadTapsSide,
-                                                &CoeffsSide);
+                                                &pData->SEBiquadTapsSide, &CoeffsSide);
 
-                pConfig->pBiquadCallBack_Side  = BQ_1I_D16F16C15_TRC_WRA_01;
+                pConfig->pBiquadCallBack_Side = BQ_1I_D16F16C15_TRC_WRA_01;
                 break;
         }
-
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
 /************************************************************************************/
 /*                                                                                  */
@@ -178,85 +165,61 @@
 /*  1.  The side filter is not used in Mobile Speaker mode                          */
 /*                                                                                  */
 /************************************************************************************/
-LVCS_ReturnStatus_en LVCS_StereoEnhancer(LVCS_Handle_t          hInstance,
-                                         const LVM_FLOAT        *pInData,
-                                         LVM_FLOAT              *pOutData,
-                                         LVM_UINT16             NumSamples)
-{
-
-    LVCS_Instance_t         *pInstance = (LVCS_Instance_t  *)hInstance;
-    LVCS_StereoEnhancer_t   *pConfig   = (LVCS_StereoEnhancer_t *)&pInstance->StereoEnhancer;
-    LVCS_Coefficient_t      *pCoefficient;
-    LVM_FLOAT               *pScratch;
-
-    pCoefficient = (LVCS_Coefficient_t *) \
-                   pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
-
-    pScratch  = (LVM_FLOAT *) \
-                    pInstance->MemoryTable.Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress;
+LVCS_ReturnStatus_en LVCS_StereoEnhancer(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                         LVM_FLOAT* pOutData, LVM_UINT16 NumSamples) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVCS_StereoEnhancer_t* pConfig = (LVCS_StereoEnhancer_t*)&pInstance->StereoEnhancer;
+    LVCS_Coefficient_t* pCoefficient;
+    LVM_FLOAT* pScratch;
+    pCoefficient = (LVCS_Coefficient_t*)pInstance->pCoeff;
+    pScratch = (LVM_FLOAT*)pInstance->pScratch;
     /*
      * Check if the Stereo Enhancer is enabled
      */
-    if ((pInstance->Params.OperatingMode & LVCS_STEREOENHANCESWITCH) != 0)
-        {
+    if ((pInstance->Params.OperatingMode & LVCS_STEREOENHANCESWITCH) != 0) {
         /*
          * Convert from stereo to middle and side
          */
-        From2iToMS_Float(pInData,
-                         pScratch,
-                         pScratch + NumSamples,
-                         (LVM_INT16)NumSamples);
+        From2iToMS_Float(pInData, pScratch, pScratch + NumSamples, (LVM_INT16)NumSamples);
 
         /*
          * Apply filter to the middle signal
          */
-        if (pInstance->OutputDevice == LVCS_HEADPHONE)
-        {
-            (pConfig->pBiquadCallBack_Mid)((Biquad_FLOAT_Instance_t*)\
-                                            &pCoefficient->SEBiquadInstanceMid,
-                                            (LVM_FLOAT *)pScratch,
-                                            (LVM_FLOAT *)pScratch,
-                                            (LVM_INT16)NumSamples);
-        }
-        else
-        {
-            Mult3s_Float(pScratch,              /* Source */
-                         (LVM_FLOAT)pConfig->MidGain,      /* Gain */
-                         pScratch,              /* Destination */
-                         (LVM_INT16)NumSamples);           /* Number of samples */
+        if (pInstance->OutputDevice == LVCS_HEADPHONE) {
+            (pConfig->pBiquadCallBack_Mid)(
+                    (Biquad_FLOAT_Instance_t*)&pCoefficient->SEBiquadInstanceMid,
+                    (LVM_FLOAT*)pScratch, (LVM_FLOAT*)pScratch, (LVM_INT16)NumSamples);
+        } else {
+            Mult3s_Float(pScratch,                    /* Source */
+                         (LVM_FLOAT)pConfig->MidGain, /* Gain */
+                         pScratch,                    /* Destination */
+                         (LVM_INT16)NumSamples);      /* Number of samples */
         }
 
         /*
          * Apply the filter the side signal only in stereo mode for headphones
          * and in all modes for mobile speakers
          */
-        if (pInstance->Params.SourceFormat == LVCS_STEREO)
-        {
-            (pConfig->pBiquadCallBack_Side)((Biquad_FLOAT_Instance_t*) \
-                                            &pCoefficient->SEBiquadInstanceSide,
-                                            (LVM_FLOAT *)(pScratch + NumSamples),
-                                            (LVM_FLOAT *)(pScratch + NumSamples),
-                                            (LVM_INT16)NumSamples);
+        if (pInstance->Params.SourceFormat == LVCS_STEREO) {
+            (pConfig->pBiquadCallBack_Side)(
+                    (Biquad_FLOAT_Instance_t*)&pCoefficient->SEBiquadInstanceSide,
+                    (LVM_FLOAT*)(pScratch + NumSamples), (LVM_FLOAT*)(pScratch + NumSamples),
+                    (LVM_INT16)NumSamples);
         }
 
         /*
          * Convert from middle and side to stereo
          */
-        MSTo2i_Sat_Float(pScratch,
-                         pScratch + NumSamples,
-                         pOutData,
-                         (LVM_INT16)NumSamples);
+        MSTo2i_Sat_Float(pScratch, pScratch + NumSamples, pOutData, (LVM_INT16)NumSamples);
 
-    }
-    else
-    {
+    } else {
         /*
          * The stereo enhancer is disabled so just copy the data
          */
-        Copy_Float((LVM_FLOAT *)pInData,           /* Source */
-                   (LVM_FLOAT *)pOutData,          /* Destination */
-                   (LVM_INT16)(2 * NumSamples));     /* Left and right */
+        Copy_Float((LVM_FLOAT*)pInData,          /* Source */
+                   (LVM_FLOAT*)pOutData,         /* Destination */
+                   (LVM_INT16)(2 * NumSamples)); /* Left and right */
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.h
index 12a5982..c92f8a5 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.h
@@ -24,8 +24,8 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#include "Filters.h"                        /* Filter definitions */
-#include "LVCS_Headphone_Coeffs.h"          /* Headphone coefficients */
+#include "Filters.h"               /* Filter definitions */
+#include "LVCS_Headphone_Coeffs.h" /* Headphone coefficients */
 #include "BIQUAD.h"
 
 /************************************************************************************/
@@ -35,21 +35,17 @@
 /************************************************************************************/
 
 /* Stereo enhancer structure */
-typedef struct
-{
-
+typedef struct {
     /*
      * Middle filter
      */
-    void                    (*pBiquadCallBack_Mid)(Biquad_FLOAT_Instance_t*,
-                                    LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
+    void (*pBiquadCallBack_Mid)(Biquad_FLOAT_Instance_t*, LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
 
     /*
      * Side filter
      */
-    void                    (*pBiquadCallBack_Side)(Biquad_FLOAT_Instance_t*,
-                                    LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
-    LVM_FLOAT              MidGain;            /* Middle gain in mobile speaker mode */
+    void (*pBiquadCallBack_Side)(Biquad_FLOAT_Instance_t*, LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
+    LVM_FLOAT MidGain; /* Middle gain in mobile speaker mode */
 } LVCS_StereoEnhancer_t;
 
 /************************************************************************************/
@@ -58,12 +54,9 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_SEnhancerInit(LVCS_Handle_t        hInstance,
-                                        LVCS_Params_t        *pParams);
+LVCS_ReturnStatus_en LVCS_SEnhancerInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
 
-LVCS_ReturnStatus_en LVCS_StereoEnhancer(LVCS_Handle_t        hInstance,
-                                         const LVM_FLOAT    *pInData,
-                                         LVM_FLOAT            *pOutData,
-                                         LVM_UINT16            NumSamples);
+LVCS_ReturnStatus_en LVCS_StereoEnhancer(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                         LVM_FLOAT* pOutData, LVM_UINT16 NumSamples);
 
-#endif  /* STEREOENHANCE_H */
+#endif /* STEREOENHANCE_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.cpp
index d79db61..55b5243 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.cpp
@@ -23,9 +23,9 @@
 
 #include "LVCS_Private.h"
 #include "LVCS_Tables.h"
-#include "Filters.h"                            /* Filter definitions */
-#include "BIQUAD.h"                             /* Biquad definitions */
-#include "LVCS_Headphone_Coeffs.h"              /* Headphone coefficients */
+#include "Filters.h"               /* Filter definitions */
+#include "BIQUAD.h"                /* Biquad definitions */
+#include "LVCS_Headphone_Coeffs.h" /* Headphone coefficients */
 
 /************************************************************************************/
 /*                                                                                  */
@@ -35,144 +35,75 @@
 
 /* Coefficient table for the middle filter */
 const BiquadA01B1CoefsSP_t LVCS_SEMidCoefTable[] = {
-    {CS_MIDDLE_8000_A0,         /* 8kS/s coefficients */
-     CS_MIDDLE_8000_A1,
-     CS_MIDDLE_8000_B1,
-     (LVM_UINT16 )CS_MIDDLE_8000_SCALE},
-    {CS_MIDDLE_11025_A0,        /* 11kS/s coefficients */
-     CS_MIDDLE_11025_A1,
-     CS_MIDDLE_11025_B1,
-     (LVM_UINT16 )CS_MIDDLE_11025_SCALE},
-    {CS_MIDDLE_12000_A0,        /* 12kS/s coefficients */
-     CS_MIDDLE_12000_A1,
-     CS_MIDDLE_12000_B1,
-     (LVM_UINT16 )CS_MIDDLE_12000_SCALE},
-    {CS_MIDDLE_16000_A0,        /* 16kS/s coefficients */
-     CS_MIDDLE_16000_A1,
-     CS_MIDDLE_16000_B1,
-     (LVM_UINT16 )CS_MIDDLE_16000_SCALE},
-    {CS_MIDDLE_22050_A0,        /* 22kS/s coefficients */
-     CS_MIDDLE_22050_A1,
-     CS_MIDDLE_22050_B1,
-     (LVM_UINT16 )CS_MIDDLE_22050_SCALE},
-    {CS_MIDDLE_24000_A0,        /* 24kS/s coefficients */
-     CS_MIDDLE_24000_A1,
-     CS_MIDDLE_24000_B1,
-     (LVM_UINT16 )CS_MIDDLE_24000_SCALE},
-    {CS_MIDDLE_32000_A0,        /* 32kS/s coefficients */
-     CS_MIDDLE_32000_A1,
-     CS_MIDDLE_32000_B1,
-     (LVM_UINT16 )CS_MIDDLE_32000_SCALE},
-    {CS_MIDDLE_44100_A0,        /* 44kS/s coefficients */
-     CS_MIDDLE_44100_A1,
-     CS_MIDDLE_44100_B1,
-     (LVM_UINT16 )CS_MIDDLE_44100_SCALE},
-    {CS_MIDDLE_48000_A0,        /* 48kS/s coefficients */
-     CS_MIDDLE_48000_A1,
-     CS_MIDDLE_48000_B1,
-     (LVM_UINT16 )CS_MIDDLE_48000_SCALE}
-    ,
-    {CS_MIDDLE_88200_A0,        /* 88kS/s coefficients */
-     CS_MIDDLE_88200_A1,
-     CS_MIDDLE_88200_B1,
-     (LVM_UINT16)CS_MIDDLE_88200_SCALE},
-    {CS_MIDDLE_96000_A0,        /* 96kS/s coefficients */
-     CS_MIDDLE_96000_A1,
-     CS_MIDDLE_96000_B1,
-     (LVM_UINT16 )CS_MIDDLE_96000_SCALE},
-    {CS_MIDDLE_176400_A0,        /* 176kS/s coefficients */
-     CS_MIDDLE_176400_A1,
-     CS_MIDDLE_176400_B1,
-     (LVM_UINT16)CS_MIDDLE_176400_SCALE},
-    {CS_MIDDLE_192000_A0,        /* 192kS/s coefficients */
-     CS_MIDDLE_192000_A1,
-     CS_MIDDLE_192000_B1,
-     (LVM_UINT16 )CS_MIDDLE_192000_SCALE}
-    };
+        {CS_MIDDLE_8000_A0, /* 8kS/s coefficients */
+         CS_MIDDLE_8000_A1, CS_MIDDLE_8000_B1, (LVM_UINT16)CS_MIDDLE_8000_SCALE},
+        {CS_MIDDLE_11025_A0, /* 11kS/s coefficients */
+         CS_MIDDLE_11025_A1, CS_MIDDLE_11025_B1, (LVM_UINT16)CS_MIDDLE_11025_SCALE},
+        {CS_MIDDLE_12000_A0, /* 12kS/s coefficients */
+         CS_MIDDLE_12000_A1, CS_MIDDLE_12000_B1, (LVM_UINT16)CS_MIDDLE_12000_SCALE},
+        {CS_MIDDLE_16000_A0, /* 16kS/s coefficients */
+         CS_MIDDLE_16000_A1, CS_MIDDLE_16000_B1, (LVM_UINT16)CS_MIDDLE_16000_SCALE},
+        {CS_MIDDLE_22050_A0, /* 22kS/s coefficients */
+         CS_MIDDLE_22050_A1, CS_MIDDLE_22050_B1, (LVM_UINT16)CS_MIDDLE_22050_SCALE},
+        {CS_MIDDLE_24000_A0, /* 24kS/s coefficients */
+         CS_MIDDLE_24000_A1, CS_MIDDLE_24000_B1, (LVM_UINT16)CS_MIDDLE_24000_SCALE},
+        {CS_MIDDLE_32000_A0, /* 32kS/s coefficients */
+         CS_MIDDLE_32000_A1, CS_MIDDLE_32000_B1, (LVM_UINT16)CS_MIDDLE_32000_SCALE},
+        {CS_MIDDLE_44100_A0, /* 44kS/s coefficients */
+         CS_MIDDLE_44100_A1, CS_MIDDLE_44100_B1, (LVM_UINT16)CS_MIDDLE_44100_SCALE},
+        {CS_MIDDLE_48000_A0, /* 48kS/s coefficients */
+         CS_MIDDLE_48000_A1, CS_MIDDLE_48000_B1, (LVM_UINT16)CS_MIDDLE_48000_SCALE},
+        {CS_MIDDLE_88200_A0, /* 88kS/s coefficients */
+         CS_MIDDLE_88200_A1, CS_MIDDLE_88200_B1, (LVM_UINT16)CS_MIDDLE_88200_SCALE},
+        {CS_MIDDLE_96000_A0, /* 96kS/s coefficients */
+         CS_MIDDLE_96000_A1, CS_MIDDLE_96000_B1, (LVM_UINT16)CS_MIDDLE_96000_SCALE},
+        {CS_MIDDLE_176400_A0, /* 176kS/s coefficients */
+         CS_MIDDLE_176400_A1, CS_MIDDLE_176400_B1, (LVM_UINT16)CS_MIDDLE_176400_SCALE},
+        {CS_MIDDLE_192000_A0, /* 192kS/s coefficients */
+         CS_MIDDLE_192000_A1, CS_MIDDLE_192000_B1, (LVM_UINT16)CS_MIDDLE_192000_SCALE}};
 
 /* Coefficient table for the side filter */
 const BiquadA012B12CoefsSP_t LVCS_SESideCoefTable[] = {
-    /* Headphone Side coefficients */
-    {CS_SIDE_8000_A0,           /* 8kS/s coefficients */
-     CS_SIDE_8000_A1,
-     CS_SIDE_8000_A2,
-     CS_SIDE_8000_B1,
-     CS_SIDE_8000_B2,
-     (LVM_UINT16 )CS_SIDE_8000_SCALE},
-    {CS_SIDE_11025_A0,          /* 11kS/s coefficients */
-     CS_SIDE_11025_A1,
-     CS_SIDE_11025_A2,
-     CS_SIDE_11025_B1,
-     CS_SIDE_11025_B2,
-     (LVM_UINT16 )CS_SIDE_11025_SCALE},
-    {CS_SIDE_12000_A0,          /* 12kS/s coefficients */
-     CS_SIDE_12000_A1,
-     CS_SIDE_12000_A2,
-     CS_SIDE_12000_B1,
-     CS_SIDE_12000_B2,
-     (LVM_UINT16 )CS_SIDE_12000_SCALE},
-    {CS_SIDE_16000_A0,          /* 16kS/s coefficients */
-     CS_SIDE_16000_A1,
-     CS_SIDE_16000_A2,
-     CS_SIDE_16000_B1,
-     CS_SIDE_16000_B2,
-     (LVM_UINT16 )CS_SIDE_16000_SCALE},
-    {CS_SIDE_22050_A0,          /* 22kS/s coefficients */
-     CS_SIDE_22050_A1,
-     CS_SIDE_22050_A2,
-     CS_SIDE_22050_B1,
-     CS_SIDE_22050_B2,
-     (LVM_UINT16 )CS_SIDE_22050_SCALE},
-    {CS_SIDE_24000_A0,          /* 24kS/s coefficients */
-     CS_SIDE_24000_A1,
-     CS_SIDE_24000_A2,
-     CS_SIDE_24000_B1,
-     CS_SIDE_24000_B2,
-     (LVM_UINT16 )CS_SIDE_24000_SCALE},
-    {CS_SIDE_32000_A0,          /* 32kS/s coefficients */
-     CS_SIDE_32000_A1,
-     CS_SIDE_32000_A2,
-     CS_SIDE_32000_B1,
-     CS_SIDE_32000_B2,
-     (LVM_UINT16 )CS_SIDE_32000_SCALE},
-    {CS_SIDE_44100_A0,          /* 44kS/s coefficients */
-     CS_SIDE_44100_A1,
-     CS_SIDE_44100_A2,
-     CS_SIDE_44100_B1,
-     CS_SIDE_44100_B2,
-     (LVM_UINT16 )CS_SIDE_44100_SCALE},
-    {CS_SIDE_48000_A0,          /* 48kS/s coefficients */
-     CS_SIDE_48000_A1,
-     CS_SIDE_48000_A2,
-     CS_SIDE_48000_B1,
-     CS_SIDE_48000_B2,
-     (LVM_UINT16 )CS_SIDE_48000_SCALE}
-     ,
-    {CS_SIDE_88200_A0,          /* 88kS/s coefficients */
-     CS_SIDE_88200_A1,
-     CS_SIDE_88200_A2,
-     CS_SIDE_88200_B1,
-     CS_SIDE_88200_B2,
-     (LVM_UINT16)CS_SIDE_88200_SCALE},
-     {CS_SIDE_96000_A0,          /* 96kS/s coefficients */
-     CS_SIDE_96000_A1,
-     CS_SIDE_96000_A2,
-     CS_SIDE_96000_B1,
-     CS_SIDE_96000_B2,
-     (LVM_UINT16 )CS_SIDE_96000_SCALE},
-    {CS_SIDE_176400_A0,          /*176kS/s coefficients */
-     CS_SIDE_176400_A1,
-     CS_SIDE_176400_A2,
-     CS_SIDE_176400_B1,
-     CS_SIDE_176400_B2,
-     (LVM_UINT16)CS_SIDE_176400_SCALE},
-     {CS_SIDE_192000_A0,          /* 192kS/s coefficients */
-     CS_SIDE_192000_A1,
-     CS_SIDE_192000_A2,
-     CS_SIDE_192000_B1,
-     CS_SIDE_192000_B2,
-     (LVM_UINT16 )CS_SIDE_192000_SCALE}
-};
+        /* Headphone Side coefficients */
+        {CS_SIDE_8000_A0, /* 8kS/s coefficients */
+         CS_SIDE_8000_A1, CS_SIDE_8000_A2, CS_SIDE_8000_B1, CS_SIDE_8000_B2,
+         (LVM_UINT16)CS_SIDE_8000_SCALE},
+        {CS_SIDE_11025_A0, /* 11kS/s coefficients */
+         CS_SIDE_11025_A1, CS_SIDE_11025_A2, CS_SIDE_11025_B1, CS_SIDE_11025_B2,
+         (LVM_UINT16)CS_SIDE_11025_SCALE},
+        {CS_SIDE_12000_A0, /* 12kS/s coefficients */
+         CS_SIDE_12000_A1, CS_SIDE_12000_A2, CS_SIDE_12000_B1, CS_SIDE_12000_B2,
+         (LVM_UINT16)CS_SIDE_12000_SCALE},
+        {CS_SIDE_16000_A0, /* 16kS/s coefficients */
+         CS_SIDE_16000_A1, CS_SIDE_16000_A2, CS_SIDE_16000_B1, CS_SIDE_16000_B2,
+         (LVM_UINT16)CS_SIDE_16000_SCALE},
+        {CS_SIDE_22050_A0, /* 22kS/s coefficients */
+         CS_SIDE_22050_A1, CS_SIDE_22050_A2, CS_SIDE_22050_B1, CS_SIDE_22050_B2,
+         (LVM_UINT16)CS_SIDE_22050_SCALE},
+        {CS_SIDE_24000_A0, /* 24kS/s coefficients */
+         CS_SIDE_24000_A1, CS_SIDE_24000_A2, CS_SIDE_24000_B1, CS_SIDE_24000_B2,
+         (LVM_UINT16)CS_SIDE_24000_SCALE},
+        {CS_SIDE_32000_A0, /* 32kS/s coefficients */
+         CS_SIDE_32000_A1, CS_SIDE_32000_A2, CS_SIDE_32000_B1, CS_SIDE_32000_B2,
+         (LVM_UINT16)CS_SIDE_32000_SCALE},
+        {CS_SIDE_44100_A0, /* 44kS/s coefficients */
+         CS_SIDE_44100_A1, CS_SIDE_44100_A2, CS_SIDE_44100_B1, CS_SIDE_44100_B2,
+         (LVM_UINT16)CS_SIDE_44100_SCALE},
+        {CS_SIDE_48000_A0, /* 48kS/s coefficients */
+         CS_SIDE_48000_A1, CS_SIDE_48000_A2, CS_SIDE_48000_B1, CS_SIDE_48000_B2,
+         (LVM_UINT16)CS_SIDE_48000_SCALE},
+        {CS_SIDE_88200_A0, /* 88kS/s coefficients */
+         CS_SIDE_88200_A1, CS_SIDE_88200_A2, CS_SIDE_88200_B1, CS_SIDE_88200_B2,
+         (LVM_UINT16)CS_SIDE_88200_SCALE},
+        {CS_SIDE_96000_A0, /* 96kS/s coefficients */
+         CS_SIDE_96000_A1, CS_SIDE_96000_A2, CS_SIDE_96000_B1, CS_SIDE_96000_B2,
+         (LVM_UINT16)CS_SIDE_96000_SCALE},
+        {CS_SIDE_176400_A0, /*176kS/s coefficients */
+         CS_SIDE_176400_A1, CS_SIDE_176400_A2, CS_SIDE_176400_B1, CS_SIDE_176400_B2,
+         (LVM_UINT16)CS_SIDE_176400_SCALE},
+        {CS_SIDE_192000_A0, /* 192kS/s coefficients */
+         CS_SIDE_192000_A1, CS_SIDE_192000_A2, CS_SIDE_192000_B1, CS_SIDE_192000_B2,
+         (LVM_UINT16)CS_SIDE_192000_SCALE}};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -181,167 +112,87 @@
 /************************************************************************************/
 
 const BiquadA012B12CoefsSP_t LVCS_EqualiserCoefTable[] = {
-    /* Headphone coefficients */
-    {CS_EQUALISER_8000_A0,      /* 8kS/s coefficients */
-     CS_EQUALISER_8000_A1,
-     CS_EQUALISER_8000_A2,
-     CS_EQUALISER_8000_B1,
-     CS_EQUALISER_8000_B2,
-     (LVM_UINT16 )CS_EQUALISER_8000_SCALE},
-    {CS_EQUALISER_11025_A0,     /* 11kS/s coefficients */
-     CS_EQUALISER_11025_A1,
-     CS_EQUALISER_11025_A2,
-     CS_EQUALISER_11025_B1,
-     CS_EQUALISER_11025_B2,
-     (LVM_UINT16 )CS_EQUALISER_11025_SCALE},
-    {CS_EQUALISER_12000_A0,     /* 12kS/s coefficients */
-     CS_EQUALISER_12000_A1,
-     CS_EQUALISER_12000_A2,
-     CS_EQUALISER_12000_B1,
-     CS_EQUALISER_12000_B2,
-     (LVM_UINT16 )CS_EQUALISER_12000_SCALE},
-    {CS_EQUALISER_16000_A0,     /* 16kS/s coefficients */
-     CS_EQUALISER_16000_A1,
-     CS_EQUALISER_16000_A2,
-     CS_EQUALISER_16000_B1,
-     CS_EQUALISER_16000_B2,
-     (LVM_UINT16 )CS_EQUALISER_16000_SCALE},
-    {CS_EQUALISER_22050_A0,     /* 22kS/s coefficients */
-     CS_EQUALISER_22050_A1,
-     CS_EQUALISER_22050_A2,
-     CS_EQUALISER_22050_B1,
-     CS_EQUALISER_22050_B2,
-     (LVM_UINT16 )CS_EQUALISER_22050_SCALE},
-    {CS_EQUALISER_24000_A0,     /* 24kS/s coefficients */
-     CS_EQUALISER_24000_A1,
-     CS_EQUALISER_24000_A2,
-     CS_EQUALISER_24000_B1,
-     CS_EQUALISER_24000_B2,
-     (LVM_UINT16 )CS_EQUALISER_24000_SCALE},
-    {CS_EQUALISER_32000_A0,     /* 32kS/s coefficients */
-     CS_EQUALISER_32000_A1,
-     CS_EQUALISER_32000_A2,
-     CS_EQUALISER_32000_B1,
-     CS_EQUALISER_32000_B2,
-     (LVM_UINT16 )CS_EQUALISER_32000_SCALE},
-    {CS_EQUALISER_44100_A0,     /* 44kS/s coefficients */
-     CS_EQUALISER_44100_A1,
-     CS_EQUALISER_44100_A2,
-     CS_EQUALISER_44100_B1,
-     CS_EQUALISER_44100_B2,
-     (LVM_UINT16 )CS_EQUALISER_44100_SCALE},
-    {CS_EQUALISER_48000_A0,     /* 48kS/s coefficients */
-     CS_EQUALISER_48000_A1,
-     CS_EQUALISER_48000_A2,
-     CS_EQUALISER_48000_B1,
-     CS_EQUALISER_48000_B2,
-     (LVM_UINT16 )CS_EQUALISER_48000_SCALE},
-    {CS_EQUALISER_88200_A0,     /* 88kS/s coeffieients */
-     CS_EQUALISER_88200_A1,
-     CS_EQUALISER_88200_A2,
-     CS_EQUALISER_88200_B1,
-     CS_EQUALISER_88200_B2,
-     (LVM_UINT16)CS_EQUALISER_88200_SCALE},
-    {CS_EQUALISER_96000_A0,     /* 96kS/s coefficients */
-     CS_EQUALISER_96000_A1,
-     CS_EQUALISER_96000_A2,
-     CS_EQUALISER_96000_B1,
-     CS_EQUALISER_96000_B2,
-     (LVM_UINT16 )CS_EQUALISER_96000_SCALE},
-    {CS_EQUALISER_176400_A0,     /* 176kS/s coefficients */
-     CS_EQUALISER_176400_A1,
-     CS_EQUALISER_176400_A2,
-     CS_EQUALISER_176400_B1,
-     CS_EQUALISER_176400_B2,
-     (LVM_UINT16)CS_EQUALISER_176400_SCALE},
-    {CS_EQUALISER_192000_A0,     /* 192kS/s coefficients */
-     CS_EQUALISER_192000_A1,
-     CS_EQUALISER_192000_A2,
-     CS_EQUALISER_192000_B1,
-     CS_EQUALISER_192000_B2,
-     (LVM_UINT16 )CS_EQUALISER_192000_SCALE},
+        /* Headphone coefficients */
+        {CS_EQUALISER_8000_A0, /* 8kS/s coefficients */
+         CS_EQUALISER_8000_A1, CS_EQUALISER_8000_A2, CS_EQUALISER_8000_B1, CS_EQUALISER_8000_B2,
+         (LVM_UINT16)CS_EQUALISER_8000_SCALE},
+        {CS_EQUALISER_11025_A0, /* 11kS/s coefficients */
+         CS_EQUALISER_11025_A1, CS_EQUALISER_11025_A2, CS_EQUALISER_11025_B1, CS_EQUALISER_11025_B2,
+         (LVM_UINT16)CS_EQUALISER_11025_SCALE},
+        {CS_EQUALISER_12000_A0, /* 12kS/s coefficients */
+         CS_EQUALISER_12000_A1, CS_EQUALISER_12000_A2, CS_EQUALISER_12000_B1, CS_EQUALISER_12000_B2,
+         (LVM_UINT16)CS_EQUALISER_12000_SCALE},
+        {CS_EQUALISER_16000_A0, /* 16kS/s coefficients */
+         CS_EQUALISER_16000_A1, CS_EQUALISER_16000_A2, CS_EQUALISER_16000_B1, CS_EQUALISER_16000_B2,
+         (LVM_UINT16)CS_EQUALISER_16000_SCALE},
+        {CS_EQUALISER_22050_A0, /* 22kS/s coefficients */
+         CS_EQUALISER_22050_A1, CS_EQUALISER_22050_A2, CS_EQUALISER_22050_B1, CS_EQUALISER_22050_B2,
+         (LVM_UINT16)CS_EQUALISER_22050_SCALE},
+        {CS_EQUALISER_24000_A0, /* 24kS/s coefficients */
+         CS_EQUALISER_24000_A1, CS_EQUALISER_24000_A2, CS_EQUALISER_24000_B1, CS_EQUALISER_24000_B2,
+         (LVM_UINT16)CS_EQUALISER_24000_SCALE},
+        {CS_EQUALISER_32000_A0, /* 32kS/s coefficients */
+         CS_EQUALISER_32000_A1, CS_EQUALISER_32000_A2, CS_EQUALISER_32000_B1, CS_EQUALISER_32000_B2,
+         (LVM_UINT16)CS_EQUALISER_32000_SCALE},
+        {CS_EQUALISER_44100_A0, /* 44kS/s coefficients */
+         CS_EQUALISER_44100_A1, CS_EQUALISER_44100_A2, CS_EQUALISER_44100_B1, CS_EQUALISER_44100_B2,
+         (LVM_UINT16)CS_EQUALISER_44100_SCALE},
+        {CS_EQUALISER_48000_A0, /* 48kS/s coefficients */
+         CS_EQUALISER_48000_A1, CS_EQUALISER_48000_A2, CS_EQUALISER_48000_B1, CS_EQUALISER_48000_B2,
+         (LVM_UINT16)CS_EQUALISER_48000_SCALE},
+        {CS_EQUALISER_88200_A0, /* 88kS/s coeffieients */
+         CS_EQUALISER_88200_A1, CS_EQUALISER_88200_A2, CS_EQUALISER_88200_B1, CS_EQUALISER_88200_B2,
+         (LVM_UINT16)CS_EQUALISER_88200_SCALE},
+        {CS_EQUALISER_96000_A0, /* 96kS/s coefficients */
+         CS_EQUALISER_96000_A1, CS_EQUALISER_96000_A2, CS_EQUALISER_96000_B1, CS_EQUALISER_96000_B2,
+         (LVM_UINT16)CS_EQUALISER_96000_SCALE},
+        {CS_EQUALISER_176400_A0, /* 176kS/s coefficients */
+         CS_EQUALISER_176400_A1, CS_EQUALISER_176400_A2, CS_EQUALISER_176400_B1,
+         CS_EQUALISER_176400_B2, (LVM_UINT16)CS_EQUALISER_176400_SCALE},
+        {CS_EQUALISER_192000_A0, /* 192kS/s coefficients */
+         CS_EQUALISER_192000_A1, CS_EQUALISER_192000_A2, CS_EQUALISER_192000_B1,
+         CS_EQUALISER_192000_B2, (LVM_UINT16)CS_EQUALISER_192000_SCALE},
 
-    /* Concert Sound EX Headphone coefficients */
-    {CSEX_EQUALISER_8000_A0,    /* 8kS/s coefficients */
-     CSEX_EQUALISER_8000_A1,
-     CSEX_EQUALISER_8000_A2,
-     CSEX_EQUALISER_8000_B1,
-     CSEX_EQUALISER_8000_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_8000_SCALE},
-    {CSEX_EQUALISER_11025_A0,   /* 11kS/s coefficients */
-     CSEX_EQUALISER_11025_A1,
-     CSEX_EQUALISER_11025_A2,
-     CSEX_EQUALISER_11025_B1,
-     CSEX_EQUALISER_11025_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_11025_SCALE},
-    {CSEX_EQUALISER_12000_A0,   /* 12kS/s coefficients */
-     CSEX_EQUALISER_12000_A1,
-     CSEX_EQUALISER_12000_A2,
-     CSEX_EQUALISER_12000_B1,
-     CSEX_EQUALISER_12000_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_12000_SCALE},
-    {CSEX_EQUALISER_16000_A0,   /* 16kS/s coefficients */
-     CSEX_EQUALISER_16000_A1,
-     CSEX_EQUALISER_16000_A2,
-     CSEX_EQUALISER_16000_B1,
-     CSEX_EQUALISER_16000_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_16000_SCALE},
-    {CSEX_EQUALISER_22050_A0,   /* 22kS/s coefficients */
-     CSEX_EQUALISER_22050_A1,
-     CSEX_EQUALISER_22050_A2,
-     CSEX_EQUALISER_22050_B1,
-     CSEX_EQUALISER_22050_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_22050_SCALE},
-    {CSEX_EQUALISER_24000_A0,   /* 24kS/s coefficients */
-     CSEX_EQUALISER_24000_A1,
-     CSEX_EQUALISER_24000_A2,
-     CSEX_EQUALISER_24000_B1,
-     CSEX_EQUALISER_24000_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_24000_SCALE},
-    {CSEX_EQUALISER_32000_A0,   /* 32kS/s coefficients */
-     CSEX_EQUALISER_32000_A1,
-     CSEX_EQUALISER_32000_A2,
-     CSEX_EQUALISER_32000_B1,
-     CSEX_EQUALISER_32000_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_32000_SCALE},
-    {CSEX_EQUALISER_44100_A0,   /* 44kS/s coefficients */
-     CSEX_EQUALISER_44100_A1,
-     CSEX_EQUALISER_44100_A2,
-     CSEX_EQUALISER_44100_B1,
-     CSEX_EQUALISER_44100_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_44100_SCALE},
-    {CSEX_EQUALISER_48000_A0,   /* 48kS/s coefficients */
-     CSEX_EQUALISER_48000_A1,
-     CSEX_EQUALISER_48000_A2,
-     CSEX_EQUALISER_48000_B1,
-     CSEX_EQUALISER_48000_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_48000_SCALE}
-    ,
-    {CSEX_EQUALISER_88200_A0,   /* 88kS/s coefficients */
-     CSEX_EQUALISER_88200_A1,
-     CSEX_EQUALISER_88200_A2,
-     CSEX_EQUALISER_88200_B1,
-     CSEX_EQUALISER_88200_B2,
-     (LVM_UINT16)CSEX_EQUALISER_88200_SCALE},
-    {CSEX_EQUALISER_96000_A0,   /* 96kS/s coefficients */
-     CSEX_EQUALISER_96000_A1,
-     CSEX_EQUALISER_96000_A2,
-     CSEX_EQUALISER_96000_B1,
-     CSEX_EQUALISER_96000_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_96000_SCALE},
-    {CSEX_EQUALISER_176400_A0,   /* 176kS/s coefficients */
-     CSEX_EQUALISER_176400_A1,
-     CSEX_EQUALISER_176400_A2,
-     CSEX_EQUALISER_176400_B1,
-     CSEX_EQUALISER_176400_B2,
-     (LVM_UINT16)CSEX_EQUALISER_176400_SCALE},
-     {CSEX_EQUALISER_192000_A0,   /* 192kS/s coefficients */
-     CSEX_EQUALISER_192000_A1,
-     CSEX_EQUALISER_192000_A2,
-     CSEX_EQUALISER_192000_B1,
-     CSEX_EQUALISER_192000_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_192000_SCALE}
-};
+        /* Concert Sound EX Headphone coefficients */
+        {CSEX_EQUALISER_8000_A0, /* 8kS/s coefficients */
+         CSEX_EQUALISER_8000_A1, CSEX_EQUALISER_8000_A2, CSEX_EQUALISER_8000_B1,
+         CSEX_EQUALISER_8000_B2, (LVM_UINT16)CSEX_EQUALISER_8000_SCALE},
+        {CSEX_EQUALISER_11025_A0, /* 11kS/s coefficients */
+         CSEX_EQUALISER_11025_A1, CSEX_EQUALISER_11025_A2, CSEX_EQUALISER_11025_B1,
+         CSEX_EQUALISER_11025_B2, (LVM_UINT16)CSEX_EQUALISER_11025_SCALE},
+        {CSEX_EQUALISER_12000_A0, /* 12kS/s coefficients */
+         CSEX_EQUALISER_12000_A1, CSEX_EQUALISER_12000_A2, CSEX_EQUALISER_12000_B1,
+         CSEX_EQUALISER_12000_B2, (LVM_UINT16)CSEX_EQUALISER_12000_SCALE},
+        {CSEX_EQUALISER_16000_A0, /* 16kS/s coefficients */
+         CSEX_EQUALISER_16000_A1, CSEX_EQUALISER_16000_A2, CSEX_EQUALISER_16000_B1,
+         CSEX_EQUALISER_16000_B2, (LVM_UINT16)CSEX_EQUALISER_16000_SCALE},
+        {CSEX_EQUALISER_22050_A0, /* 22kS/s coefficients */
+         CSEX_EQUALISER_22050_A1, CSEX_EQUALISER_22050_A2, CSEX_EQUALISER_22050_B1,
+         CSEX_EQUALISER_22050_B2, (LVM_UINT16)CSEX_EQUALISER_22050_SCALE},
+        {CSEX_EQUALISER_24000_A0, /* 24kS/s coefficients */
+         CSEX_EQUALISER_24000_A1, CSEX_EQUALISER_24000_A2, CSEX_EQUALISER_24000_B1,
+         CSEX_EQUALISER_24000_B2, (LVM_UINT16)CSEX_EQUALISER_24000_SCALE},
+        {CSEX_EQUALISER_32000_A0, /* 32kS/s coefficients */
+         CSEX_EQUALISER_32000_A1, CSEX_EQUALISER_32000_A2, CSEX_EQUALISER_32000_B1,
+         CSEX_EQUALISER_32000_B2, (LVM_UINT16)CSEX_EQUALISER_32000_SCALE},
+        {CSEX_EQUALISER_44100_A0, /* 44kS/s coefficients */
+         CSEX_EQUALISER_44100_A1, CSEX_EQUALISER_44100_A2, CSEX_EQUALISER_44100_B1,
+         CSEX_EQUALISER_44100_B2, (LVM_UINT16)CSEX_EQUALISER_44100_SCALE},
+        {CSEX_EQUALISER_48000_A0, /* 48kS/s coefficients */
+         CSEX_EQUALISER_48000_A1, CSEX_EQUALISER_48000_A2, CSEX_EQUALISER_48000_B1,
+         CSEX_EQUALISER_48000_B2, (LVM_UINT16)CSEX_EQUALISER_48000_SCALE},
+        {CSEX_EQUALISER_88200_A0, /* 88kS/s coefficients */
+         CSEX_EQUALISER_88200_A1, CSEX_EQUALISER_88200_A2, CSEX_EQUALISER_88200_B1,
+         CSEX_EQUALISER_88200_B2, (LVM_UINT16)CSEX_EQUALISER_88200_SCALE},
+        {CSEX_EQUALISER_96000_A0, /* 96kS/s coefficients */
+         CSEX_EQUALISER_96000_A1, CSEX_EQUALISER_96000_A2, CSEX_EQUALISER_96000_B1,
+         CSEX_EQUALISER_96000_B2, (LVM_UINT16)CSEX_EQUALISER_96000_SCALE},
+        {CSEX_EQUALISER_176400_A0, /* 176kS/s coefficients */
+         CSEX_EQUALISER_176400_A1, CSEX_EQUALISER_176400_A2, CSEX_EQUALISER_176400_B1,
+         CSEX_EQUALISER_176400_B2, (LVM_UINT16)CSEX_EQUALISER_176400_SCALE},
+        {CSEX_EQUALISER_192000_A0, /* 192kS/s coefficients */
+         CSEX_EQUALISER_192000_A1, CSEX_EQUALISER_192000_A2, CSEX_EQUALISER_192000_B1,
+         CSEX_EQUALISER_192000_B2, (LVM_UINT16)CSEX_EQUALISER_192000_SCALE}};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -350,20 +201,12 @@
 /************************************************************************************/
 
 /* Stereo delay table for Concert Sound */
-const LVM_UINT16    LVCS_StereoDelayCS[] = {
-    LVCS_STEREODELAY_CS_8KHZ,
-    LVCS_STEREODELAY_CS_11KHZ,
-    LVCS_STEREODELAY_CS_12KHZ,
-    LVCS_STEREODELAY_CS_16KHZ,
-    LVCS_STEREODELAY_CS_22KHZ,
-    LVCS_STEREODELAY_CS_24KHZ,
-    LVCS_STEREODELAY_CS_32KHZ,
-    LVCS_STEREODELAY_CS_44KHZ,
-    LVCS_STEREODELAY_CS_48KHZ,
-    LVCS_STEREODELAY_CS_88KHZ,
-    LVCS_STEREODELAY_CS_96KHZ,
-    LVCS_STEREODELAY_CS_176KHZ,
-    LVCS_STEREODELAY_CS_192KHZ,
+const LVM_UINT16 LVCS_StereoDelayCS[] = {
+        LVCS_STEREODELAY_CS_8KHZ,   LVCS_STEREODELAY_CS_11KHZ, LVCS_STEREODELAY_CS_12KHZ,
+        LVCS_STEREODELAY_CS_16KHZ,  LVCS_STEREODELAY_CS_22KHZ, LVCS_STEREODELAY_CS_24KHZ,
+        LVCS_STEREODELAY_CS_32KHZ,  LVCS_STEREODELAY_CS_44KHZ, LVCS_STEREODELAY_CS_48KHZ,
+        LVCS_STEREODELAY_CS_88KHZ,  LVCS_STEREODELAY_CS_96KHZ, LVCS_STEREODELAY_CS_176KHZ,
+        LVCS_STEREODELAY_CS_192KHZ,
 };
 
 /************************************************************************************/
@@ -373,87 +216,46 @@
 /************************************************************************************/
 
 const BiquadA012B12CoefsSP_t LVCS_ReverbCoefTable[] = {
-    /* Headphone coefficients */
-    {CS_REVERB_8000_A0,             /* 8kS/s coefficients */
-     CS_REVERB_8000_A1,
-     CS_REVERB_8000_A2,
-     CS_REVERB_8000_B1,
-     CS_REVERB_8000_B2,
-     (LVM_UINT16 )CS_REVERB_8000_SCALE},
-    {CS_REVERB_11025_A0,            /* 11kS/s coefficients */
-     CS_REVERB_11025_A1,
-     CS_REVERB_11025_A2,
-     CS_REVERB_11025_B1,
-     CS_REVERB_11025_B2,
-     (LVM_UINT16 )CS_REVERB_11025_SCALE},
-    {CS_REVERB_12000_A0,            /* 12kS/s coefficients */
-     CS_REVERB_12000_A1,
-     CS_REVERB_12000_A2,
-     CS_REVERB_12000_B1,
-     CS_REVERB_12000_B2,
-     (LVM_UINT16 )CS_REVERB_12000_SCALE},
-    {CS_REVERB_16000_A0,            /* 16kS/s coefficients */
-     CS_REVERB_16000_A1,
-     CS_REVERB_16000_A2,
-     CS_REVERB_16000_B1,
-     CS_REVERB_16000_B2,
-     (LVM_UINT16 )CS_REVERB_16000_SCALE},
-    {CS_REVERB_22050_A0,            /* 22kS/s coefficients */
-     CS_REVERB_22050_A1,
-     CS_REVERB_22050_A2,
-     CS_REVERB_22050_B1,
-     CS_REVERB_22050_B2,
-     (LVM_UINT16 )CS_REVERB_22050_SCALE},
-    {CS_REVERB_24000_A0,            /* 24kS/s coefficients */
-     CS_REVERB_24000_A1,
-     CS_REVERB_24000_A2,
-     CS_REVERB_24000_B1,
-     CS_REVERB_24000_B2,
-     (LVM_UINT16 )CS_REVERB_24000_SCALE},
-    {CS_REVERB_32000_A0,            /* 32kS/s coefficients */
-     CS_REVERB_32000_A1,
-     CS_REVERB_32000_A2,
-     CS_REVERB_32000_B1,
-     CS_REVERB_32000_B2,
-     (LVM_UINT16 )CS_REVERB_32000_SCALE},
-    {CS_REVERB_44100_A0,            /* 44kS/s coefficients */
-     CS_REVERB_44100_A1,
-     CS_REVERB_44100_A2,
-     CS_REVERB_44100_B1,
-     CS_REVERB_44100_B2,
-     (LVM_UINT16 )CS_REVERB_44100_SCALE},
-    {CS_REVERB_48000_A0,            /* 48kS/s coefficients */
-     CS_REVERB_48000_A1,
-     CS_REVERB_48000_A2,
-     CS_REVERB_48000_B1,
-     CS_REVERB_48000_B2,
-     (LVM_UINT16 )CS_REVERB_48000_SCALE}
-    ,
-    {CS_REVERB_88200_A0,            /* 88kS/s coefficients */
-     CS_REVERB_88200_A1,
-     CS_REVERB_88200_A2,
-     CS_REVERB_88200_B1,
-     CS_REVERB_88200_B2,
-     (LVM_UINT16)CS_REVERB_88200_SCALE},
-    {CS_REVERB_96000_A0,            /* 96kS/s coefficients */
-     CS_REVERB_96000_A1,
-     CS_REVERB_96000_A2,
-     CS_REVERB_96000_B1,
-     CS_REVERB_96000_B2,
-     (LVM_UINT16 )CS_REVERB_96000_SCALE},
-    {CS_REVERB_176400_A0,            /* 176kS/s coefficients */
-     CS_REVERB_176400_A1,
-     CS_REVERB_176400_A2,
-     CS_REVERB_176400_B1,
-     CS_REVERB_176400_B2,
-     (LVM_UINT16)CS_REVERB_176400_SCALE},
-     {CS_REVERB_192000_A0,            /* 192kS/s coefficients */
-     CS_REVERB_192000_A1,
-     CS_REVERB_192000_A2,
-     CS_REVERB_192000_B1,
-     CS_REVERB_192000_B2,
-     (LVM_UINT16 )CS_REVERB_192000_SCALE}
-};
+        /* Headphone coefficients */
+        {CS_REVERB_8000_A0, /* 8kS/s coefficients */
+         CS_REVERB_8000_A1, CS_REVERB_8000_A2, CS_REVERB_8000_B1, CS_REVERB_8000_B2,
+         (LVM_UINT16)CS_REVERB_8000_SCALE},
+        {CS_REVERB_11025_A0, /* 11kS/s coefficients */
+         CS_REVERB_11025_A1, CS_REVERB_11025_A2, CS_REVERB_11025_B1, CS_REVERB_11025_B2,
+         (LVM_UINT16)CS_REVERB_11025_SCALE},
+        {CS_REVERB_12000_A0, /* 12kS/s coefficients */
+         CS_REVERB_12000_A1, CS_REVERB_12000_A2, CS_REVERB_12000_B1, CS_REVERB_12000_B2,
+         (LVM_UINT16)CS_REVERB_12000_SCALE},
+        {CS_REVERB_16000_A0, /* 16kS/s coefficients */
+         CS_REVERB_16000_A1, CS_REVERB_16000_A2, CS_REVERB_16000_B1, CS_REVERB_16000_B2,
+         (LVM_UINT16)CS_REVERB_16000_SCALE},
+        {CS_REVERB_22050_A0, /* 22kS/s coefficients */
+         CS_REVERB_22050_A1, CS_REVERB_22050_A2, CS_REVERB_22050_B1, CS_REVERB_22050_B2,
+         (LVM_UINT16)CS_REVERB_22050_SCALE},
+        {CS_REVERB_24000_A0, /* 24kS/s coefficients */
+         CS_REVERB_24000_A1, CS_REVERB_24000_A2, CS_REVERB_24000_B1, CS_REVERB_24000_B2,
+         (LVM_UINT16)CS_REVERB_24000_SCALE},
+        {CS_REVERB_32000_A0, /* 32kS/s coefficients */
+         CS_REVERB_32000_A1, CS_REVERB_32000_A2, CS_REVERB_32000_B1, CS_REVERB_32000_B2,
+         (LVM_UINT16)CS_REVERB_32000_SCALE},
+        {CS_REVERB_44100_A0, /* 44kS/s coefficients */
+         CS_REVERB_44100_A1, CS_REVERB_44100_A2, CS_REVERB_44100_B1, CS_REVERB_44100_B2,
+         (LVM_UINT16)CS_REVERB_44100_SCALE},
+        {CS_REVERB_48000_A0, /* 48kS/s coefficients */
+         CS_REVERB_48000_A1, CS_REVERB_48000_A2, CS_REVERB_48000_B1, CS_REVERB_48000_B2,
+         (LVM_UINT16)CS_REVERB_48000_SCALE},
+        {CS_REVERB_88200_A0, /* 88kS/s coefficients */
+         CS_REVERB_88200_A1, CS_REVERB_88200_A2, CS_REVERB_88200_B1, CS_REVERB_88200_B2,
+         (LVM_UINT16)CS_REVERB_88200_SCALE},
+        {CS_REVERB_96000_A0, /* 96kS/s coefficients */
+         CS_REVERB_96000_A1, CS_REVERB_96000_A2, CS_REVERB_96000_B1, CS_REVERB_96000_B2,
+         (LVM_UINT16)CS_REVERB_96000_SCALE},
+        {CS_REVERB_176400_A0, /* 176kS/s coefficients */
+         CS_REVERB_176400_A1, CS_REVERB_176400_A2, CS_REVERB_176400_B1, CS_REVERB_176400_B2,
+         (LVM_UINT16)CS_REVERB_176400_SCALE},
+        {CS_REVERB_192000_A0, /* 192kS/s coefficients */
+         CS_REVERB_192000_A1, CS_REVERB_192000_A2, CS_REVERB_192000_B1, CS_REVERB_192000_B2,
+         (LVM_UINT16)CS_REVERB_192000_SCALE}};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -461,20 +263,14 @@
 /*                                                                                  */
 /************************************************************************************/
 
-const Gain_t LVCS_OutputGainTable[] = {
-    {LVCS_HEADPHONE_SHIFT,         /* Headphone, stereo mode */
-     LVCS_HEADPHONE_SHIFTLOSS,
-     LVCS_HEADPHONE_GAIN},
-    {LVCS_EX_HEADPHONE_SHIFT,      /* EX Headphone, stereo mode */
-     LVCS_EX_HEADPHONE_SHIFTLOSS,
-     LVCS_EX_HEADPHONE_GAIN},
-    {LVCS_HEADPHONE_SHIFT,         /* Headphone, mono mode */
-     LVCS_HEADPHONE_SHIFTLOSS,
-     LVCS_HEADPHONE_GAIN},
-    {LVCS_EX_HEADPHONE_SHIFT,      /* EX Headphone, mono mode */
-     LVCS_EX_HEADPHONE_SHIFTLOSS,
-     LVCS_EX_HEADPHONE_GAIN}
-};
+const Gain_t LVCS_OutputGainTable[] = {{LVCS_HEADPHONE_SHIFT, /* Headphone, stereo mode */
+                                        LVCS_HEADPHONE_SHIFTLOSS, LVCS_HEADPHONE_GAIN},
+                                       {LVCS_EX_HEADPHONE_SHIFT, /* EX Headphone, stereo mode */
+                                        LVCS_EX_HEADPHONE_SHIFTLOSS, LVCS_EX_HEADPHONE_GAIN},
+                                       {LVCS_HEADPHONE_SHIFT, /* Headphone, mono mode */
+                                        LVCS_HEADPHONE_SHIFTLOSS, LVCS_HEADPHONE_GAIN},
+                                       {LVCS_EX_HEADPHONE_SHIFT, /* EX Headphone, mono mode */
+                                        LVCS_EX_HEADPHONE_SHIFTLOSS, LVCS_EX_HEADPHONE_GAIN}};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -501,24 +297,14 @@
 /*          1024    is -12dB gain                                                   */
 /*                                                                                  */
 /************************************************************************************/
-const LVCS_VolCorrect_t LVCS_VolCorrectTable[] = {
-    {0.433362f,          /* Headphone, stereo mode */
-     0.000000f,
-     1.000024f,
-     1.412640f},
-    {0.433362f,          /* EX Headphone, stereo mode */
-     0.000000f,
-     1.000024f,
-     1.412640f},
-    {1.000000f,         /* Headphone, mono mode */
-     0.000000f,
-     1.000024f,
-     1.412640f},
-    {1.000000f,         /* EX Headphone, mono mode */
-     0.000000f,
-     1.000024f,
-     1.412640f}
-};
+const LVCS_VolCorrect_t LVCS_VolCorrectTable[] = {{0.433362f, /* Headphone, stereo mode */
+                                                   0.000000f, 1.000024f, 1.412640f},
+                                                  {0.433362f, /* EX Headphone, stereo mode */
+                                                   0.000000f, 1.000024f, 1.412640f},
+                                                  {1.000000f, /* Headphone, mono mode */
+                                                   0.000000f, 1.000024f, 1.412640f},
+                                                  {1.000000f, /* EX Headphone, mono mode */
+                                                   0.000000f, 1.000024f, 1.412640f}};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -526,51 +312,32 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#define LVCS_VOL_TC_Fs8000      32580       /* Floating point value 0.994262695 */
-#define LVCS_VOL_TC_Fs11025     32632       /* Floating point value 0.995849609 */
-#define LVCS_VOL_TC_Fs12000     32643       /* Floating point value 0.996185303 */
-#define LVCS_VOL_TC_Fs16000     32674       /* Floating point value 0.997131348 */
-#define LVCS_VOL_TC_Fs22050     32700       /* Floating point value 0.997924805 */
-#define LVCS_VOL_TC_Fs24000     32705       /* Floating point value 0.998077393 */
-#define LVCS_VOL_TC_Fs32000     32721       /* Floating point value 0.998565674 */
-#define LVCS_VOL_TC_Fs44100     32734       /* Floating point value 0.998962402 */
-#define LVCS_VOL_TC_Fs48000     32737       /* Floating point value 0.999053955 */
-#define LVCS_VOL_TC_Fs88200     32751       /* Floating point value 0.999481066 */
-#define LVCS_VOL_TC_Fs96000     32751       /* Floating point value 0.999511703 */   /* Todo @ need to re check this value*/
-#define LVCS_VOL_TC_Fs176400    32759       /* Floating point value 0.999740499 */
-#define LVCS_VOL_TC_Fs192000    32763       /* Floating point value 0.999877925 */  /* Todo @ need to re check this value*/
+#define LVCS_VOL_TC_Fs8000 32580  /* Floating point value 0.994262695 */
+#define LVCS_VOL_TC_Fs11025 32632 /* Floating point value 0.995849609 */
+#define LVCS_VOL_TC_Fs12000 32643 /* Floating point value 0.996185303 */
+#define LVCS_VOL_TC_Fs16000 32674 /* Floating point value 0.997131348 */
+#define LVCS_VOL_TC_Fs22050 32700 /* Floating point value 0.997924805 */
+#define LVCS_VOL_TC_Fs24000 32705 /* Floating point value 0.998077393 */
+#define LVCS_VOL_TC_Fs32000 32721 /* Floating point value 0.998565674 */
+#define LVCS_VOL_TC_Fs44100 32734 /* Floating point value 0.998962402 */
+#define LVCS_VOL_TC_Fs48000 32737 /* Floating point value 0.999053955 */
+#define LVCS_VOL_TC_Fs88200 32751 /* Floating point value 0.999481066 */
+#define LVCS_VOL_TC_Fs96000 \
+    32751 /* Floating point value 0.999511703 */ /* Todo @ need to re check this value*/
+#define LVCS_VOL_TC_Fs176400 32759               /* Floating point value 0.999740499 */
+#define LVCS_VOL_TC_Fs192000 \
+    32763 /* Floating point value 0.999877925 */ /* Todo @ need to re check this value*/
 
-const LVM_INT16 LVCS_VolumeTCTable[13] = {LVCS_VOL_TC_Fs8000,
-                                          LVCS_VOL_TC_Fs11025,
-                                          LVCS_VOL_TC_Fs12000,
-                                          LVCS_VOL_TC_Fs16000,
-                                          LVCS_VOL_TC_Fs22050,
-                                          LVCS_VOL_TC_Fs24000,
-                                          LVCS_VOL_TC_Fs32000,
-                                          LVCS_VOL_TC_Fs44100,
-                                          LVCS_VOL_TC_Fs48000,
-                                          LVCS_VOL_TC_Fs88200,
-                                          LVCS_VOL_TC_Fs96000,
-                                          LVCS_VOL_TC_Fs176400,
-                                          LVCS_VOL_TC_Fs192000
-};
+const LVM_INT16 LVCS_VolumeTCTable[13] = {
+        LVCS_VOL_TC_Fs8000,  LVCS_VOL_TC_Fs11025, LVCS_VOL_TC_Fs12000, LVCS_VOL_TC_Fs16000,
+        LVCS_VOL_TC_Fs22050, LVCS_VOL_TC_Fs24000, LVCS_VOL_TC_Fs32000, LVCS_VOL_TC_Fs44100,
+        LVCS_VOL_TC_Fs48000, LVCS_VOL_TC_Fs88200, LVCS_VOL_TC_Fs96000, LVCS_VOL_TC_Fs176400,
+        LVCS_VOL_TC_Fs192000};
 
 /************************************************************************************/
 /*                                                                                  */
 /*  Sample rate table                                                               */
 /*                                                                                  */
 /************************************************************************************/
-const LVM_INT32   LVCS_SampleRateTable[13] = {8000,
-                                              11025,
-                                              12000,
-                                              16000,
-                                              22050,
-                                              24000,
-                                              32000,
-                                              44100,
-                                              48000,
-                                              88200,
-                                              96000,
-                                              176400,
-                                              192000
-};
+const LVM_INT32 LVCS_SampleRateTable[13] = {8000,  11025, 12000, 16000, 22050,  24000, 32000,
+                                            44100, 48000, 88200, 96000, 176400, 192000};
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.h
index 5490699..766f5f2 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.h
@@ -24,7 +24,7 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#include "BIQUAD.h"                             /* Biquad definitions */
+#include "BIQUAD.h" /* Biquad definitions */
 
 /************************************************************************************/
 /*                                                                                  */
@@ -106,35 +106,34 @@
 /*                                                                                  */
 /************************************************************************************/
 
-extern const LVM_INT32          LVCS_SampleRateTable[];
+extern const LVM_INT32 LVCS_SampleRateTable[];
 
 /*Speaker coeffient tables*/
-extern LVM_UINT16               LVCS_MS_Small_SEMiddleGainTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Small_SESideCoefTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Small_EqualiserCoefTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Small_ReverbCoefTable[] ;
-extern LVM_UINT16               LVCS_MS_Small_StereoDelayCS4MS[];
-extern Gain_t                   LVCS_MS_Small_OutputGainTable[];
-extern LVCS_VolCorrect_t        LVCS_MS_Small_VolCorrectTable[];
-extern LVM_UINT16               LVCS_MS_Small_ReverbGainTable[];
+extern LVM_UINT16 LVCS_MS_Small_SEMiddleGainTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Small_SESideCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Small_EqualiserCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Small_ReverbCoefTable[];
+extern LVM_UINT16 LVCS_MS_Small_StereoDelayCS4MS[];
+extern Gain_t LVCS_MS_Small_OutputGainTable[];
+extern LVCS_VolCorrect_t LVCS_MS_Small_VolCorrectTable[];
+extern LVM_UINT16 LVCS_MS_Small_ReverbGainTable[];
 
-extern LVM_UINT16               LVCS_MS_Medium_SEMiddleGainTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Medium_SESideCoefTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Medium_EqualiserCoefTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Medium_ReverbCoefTable[] ;
-extern LVM_UINT16               LVCS_MS_Medium_StereoDelayCS4MS[];
-extern Gain_t                   LVCS_MS_Medium_OutputGainTable[];
-extern LVCS_VolCorrect_t        LVCS_MS_Medium_VolCorrectTable[];
-extern LVM_UINT16               LVCS_MS_Medium_ReverbGainTable[];
+extern LVM_UINT16 LVCS_MS_Medium_SEMiddleGainTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Medium_SESideCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Medium_EqualiserCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Medium_ReverbCoefTable[];
+extern LVM_UINT16 LVCS_MS_Medium_StereoDelayCS4MS[];
+extern Gain_t LVCS_MS_Medium_OutputGainTable[];
+extern LVCS_VolCorrect_t LVCS_MS_Medium_VolCorrectTable[];
+extern LVM_UINT16 LVCS_MS_Medium_ReverbGainTable[];
 
-extern LVM_UINT16               LVCS_MS_Large_SEMiddleGainTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Large_SESideCoefTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Large_EqualiserCoefTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Large_ReverbCoefTable[] ;
-extern LVM_UINT16               LVCS_MS_Large_StereoDelayCS4MS[];
-extern Gain_t                   LVCS_MS_Large_OutputGainTable[];
-extern LVCS_VolCorrect_t        LVCS_MS_Large_VolCorrectTable[];
-extern LVM_UINT16               LVCS_MS_Large_ReverbGainTable[];
+extern LVM_UINT16 LVCS_MS_Large_SEMiddleGainTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Large_SESideCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Large_EqualiserCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Large_ReverbCoefTable[];
+extern LVM_UINT16 LVCS_MS_Large_StereoDelayCS4MS[];
+extern Gain_t LVCS_MS_Large_OutputGainTable[];
+extern LVCS_VolCorrect_t LVCS_MS_Large_VolCorrectTable[];
+extern LVM_UINT16 LVCS_MS_Large_ReverbGainTable[];
 
 #endif /* __LVCS_TABLES_H__ */
-
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 674c246..d026ab6 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -44,6 +44,36 @@
 }
 
 cc_test {
+    name: "reverb_test",
+    host_supported: false,
+    proprietary: true,
+
+    include_dirs: [
+        "frameworks/av/media/libeffects/lvm/wrapper/Reverb",
+    ],
+
+    header_libs: [
+        "libaudioeffects",
+    ],
+
+    shared_libs: [
+        "libaudioutils",
+        "liblog",
+        "libreverbwrapper",
+    ],
+
+    srcs: [
+        "reverb_test.cpp",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+}
+
+cc_test {
     name: "snr",
     host_supported: false,
 
diff --git a/media/libeffects/lvm/tests/build_and_run_all_unit_tests_reverb.sh b/media/libeffects/lvm/tests/build_and_run_all_unit_tests_reverb.sh
new file mode 100755
index 0000000..0c3b0b5
--- /dev/null
+++ b/media/libeffects/lvm/tests/build_and_run_all_unit_tests_reverb.sh
@@ -0,0 +1,105 @@
+#!/bin/bash
+#
+# reverb test
+#
+
+if [ -z "$ANDROID_BUILD_TOP" ]; then
+    echo "Android build environment not set"
+    exit -1
+fi
+
+# ensure we have mm
+. $ANDROID_BUILD_TOP/build/envsetup.sh
+
+mm -j
+
+echo "waiting for device"
+
+adb root && adb wait-for-device remount
+
+# location of test files
+testdir="/data/local/tmp/revTest"
+
+echo "========================================"
+echo "testing reverb"
+adb shell mkdir -p $testdir
+adb push $ANDROID_BUILD_TOP/cts/tests/tests/media/res/raw/sinesweepraw.raw $testdir
+
+E_VAL=1
+cmds="adb push $OUT/testcases/reverb_test/arm/reverb_test $testdir"
+
+fs_arr=(
+    8000
+    16000
+    22050
+    32000
+    44100
+    48000
+    88200
+    96000
+    176400
+    192000
+)
+
+flags_arr=(
+    "--M --fch 1"
+    "--fch 2"
+)
+
+# run reverb at different configs, saving only the stereo channel
+# pair.
+error_count=0
+testcase_count=0
+for cmd in "${cmds[@]}"
+do
+    $cmd
+    for flags in "${flags_arr[@]}"
+    do
+        for preset_val in {0..6}
+        do
+            for fs in ${fs_arr[*]}
+            do
+                for chMask in {0..22}
+                do
+                    adb shell LD_LIBRARY_PATH=/system/vendor/lib/soundfx $testdir/reverb_test \
+                        --input $testdir/sinesweepraw.raw \
+                        --output $testdir/sinesweep_$((chMask))_$((fs)).raw \
+                        --chMask $chMask $flags --fs $fs --preset $preset_val
+
+                    shell_ret=$?
+                    if [ $shell_ret -ne 0 ]; then
+                        echo "error: $shell_ret"
+                        ((++error_count))
+                    fi
+
+                    if [[ "$chMask" -gt 0 ]] && [[ $flags != *"--fch 2"* ]]
+                    then
+                        # single channel files should be identical to higher channel
+                        # computation (first channel).
+                        adb shell cmp $testdir/sinesweep_0_$((fs)).raw \
+                            $testdir/sinesweep_$((chMask))_$((fs)).raw
+                    elif [[ "$chMask" -gt 1 ]]
+                    then
+                        # two channel files should be identical to higher channel
+                        # computation (first 2 channels).
+                        adb shell cmp $testdir/sinesweep_1_$((fs)).raw \
+                            $testdir/sinesweep_$((chMask))_$((fs)).raw
+                    fi
+
+                    # cmp returns EXIT_FAILURE on mismatch.
+                    shell_ret=$?
+                    if [ $shell_ret -ne 0 ]; then
+                        echo "error: $shell_ret"
+                        ((++error_count))
+                    fi
+                    ((++testcase_count))
+                done
+            done
+        done
+    done
+done
+
+adb shell rm -r $testdir
+echo "$testcase_count tests performed"
+echo "$error_count errors"
+exit $error_count
diff --git a/media/libeffects/lvm/tests/lvmtest.cpp b/media/libeffects/lvm/tests/lvmtest.cpp
index a4ace6c..5c5f646 100644
--- a/media/libeffects/lvm/tests/lvmtest.cpp
+++ b/media/libeffects/lvm/tests/lvmtest.cpp
@@ -33,198 +33,148 @@
 #define ALOGVV ALOGV
 #else
 #define ALOGVV(a...) \
-  do {               \
-  } while (false)
+    do {             \
+    } while (false)
 #endif
 
-#define CHECK_ARG(cond)                                \
-  {                                                    \
-    if (!(cond)) {                                     \
-      ALOGE("\tLVM_ERROR : Invalid argument: " #cond); \
-      return -EINVAL;                                  \
-    }                                                  \
-  \
-}
+#define CHECK_ARG(cond)                                      \
+    {                                                        \
+        if (!(cond)) {                                       \
+            ALOGE("\tLVM_ERROR : Invalid argument: " #cond); \
+            return -EINVAL;                                  \
+        }                                                    \
+    }
 
-#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc)     \
-  {                                                             \
-    if ((LvmStatus) == LVM_NULLADDRESS) {                       \
-      ALOGE(                                                    \
-          "\tLVM_ERROR : Parameter error - "                    \
-          "null pointer returned by %s in %s\n\n\n\n",          \
-          callingFunc, calledFunc);                             \
-    }                                                           \
-    if ((LvmStatus) == LVM_ALIGNMENTERROR) {                    \
-      ALOGE(                                                    \
-          "\tLVM_ERROR : Parameter error - "                    \
-          "bad alignment returned by %s in %s\n\n\n\n",         \
-          callingFunc, calledFunc);                             \
-    }                                                           \
-    if ((LvmStatus) == LVM_INVALIDNUMSAMPLES) {                 \
-      ALOGE(                                                    \
-          "\tLVM_ERROR : Parameter error - "                    \
-          "bad number of samples returned by %s in %s\n\n\n\n", \
-          callingFunc, calledFunc);                             \
-    }                                                           \
-    if ((LvmStatus) == LVM_OUTOFRANGE) {                        \
-      ALOGE(                                                    \
-          "\tLVM_ERROR : Parameter error - "                    \
-          "out of range returned by %s in %s\n",                \
-          callingFunc, calledFunc);                             \
-    }                                                           \
-  }
+#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc)             \
+    {                                                                   \
+        if ((LvmStatus) == LVM_NULLADDRESS) {                           \
+            ALOGE("\tLVM_ERROR : Parameter error - "                    \
+                  "null pointer returned by %s in %s\n\n\n\n",          \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+        if ((LvmStatus) == LVM_ALIGNMENTERROR) {                        \
+            ALOGE("\tLVM_ERROR : Parameter error - "                    \
+                  "bad alignment returned by %s in %s\n\n\n\n",         \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+        if ((LvmStatus) == LVM_INVALIDNUMSAMPLES) {                     \
+            ALOGE("\tLVM_ERROR : Parameter error - "                    \
+                  "bad number of samples returned by %s in %s\n\n\n\n", \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+        if ((LvmStatus) == LVM_OUTOFRANGE) {                            \
+            ALOGE("\tLVM_ERROR : Parameter error - "                    \
+                  "out of range returned by %s in %s\n",                \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+    }
 
 struct lvmConfigParams_t {
-  int              samplingFreq    = 44100;
-  int              nrChannels      = 2;
-  int              chMask          = AUDIO_CHANNEL_OUT_STEREO;
-  int              vcBal           = 0;
-  int              fChannels       = 2;
-  bool             monoMode        = false;
-  int              bassEffectLevel = 0;
-  int              eqPresetLevel   = 0;
-  int              frameLength     = 256;
-  LVM_BE_Mode_en   bassEnable      = LVM_BE_OFF;
-  LVM_TE_Mode_en   trebleEnable    = LVM_TE_OFF;
-  LVM_EQNB_Mode_en eqEnable        = LVM_EQNB_OFF;
-  LVM_Mode_en      csEnable        = LVM_MODE_OFF;
+    int samplingFreq = 44100;
+    int nrChannels = 2;
+    int chMask = AUDIO_CHANNEL_OUT_STEREO;
+    int vcBal = 0;
+    int fChannels = 2;
+    bool monoMode = false;
+    int bassEffectLevel = 0;
+    int eqPresetLevel = 0;
+    int frameLength = 256;
+    LVM_BE_Mode_en bassEnable = LVM_BE_OFF;
+    LVM_TE_Mode_en trebleEnable = LVM_TE_OFF;
+    LVM_EQNB_Mode_en eqEnable = LVM_EQNB_OFF;
+    LVM_Mode_en csEnable = LVM_MODE_OFF;
 };
 
 constexpr audio_channel_mask_t lvmConfigChMask[] = {
-    AUDIO_CHANNEL_OUT_MONO,
-    AUDIO_CHANNEL_OUT_STEREO,
-    AUDIO_CHANNEL_OUT_2POINT1,
-    AUDIO_CHANNEL_OUT_2POINT0POINT2,
-    AUDIO_CHANNEL_OUT_QUAD,
-    AUDIO_CHANNEL_OUT_QUAD_BACK,
-    AUDIO_CHANNEL_OUT_QUAD_SIDE,
-    AUDIO_CHANNEL_OUT_SURROUND,
-    (1 << 4) - 1,
-    AUDIO_CHANNEL_OUT_2POINT1POINT2,
-    AUDIO_CHANNEL_OUT_3POINT0POINT2,
-    AUDIO_CHANNEL_OUT_PENTA,
-    (1 << 5) - 1,
-    AUDIO_CHANNEL_OUT_3POINT1POINT2,
-    AUDIO_CHANNEL_OUT_5POINT1,
-    AUDIO_CHANNEL_OUT_5POINT1_BACK,
-    AUDIO_CHANNEL_OUT_5POINT1_SIDE,
-    (1 << 6) - 1,
-    AUDIO_CHANNEL_OUT_6POINT1,
-    (1 << 7) - 1,
-    AUDIO_CHANNEL_OUT_5POINT1POINT2,
-    AUDIO_CHANNEL_OUT_7POINT1,
-    (1 << 8) - 1,
+        AUDIO_CHANNEL_OUT_MONO,
+        AUDIO_CHANNEL_OUT_STEREO,
+        AUDIO_CHANNEL_OUT_2POINT1,
+        AUDIO_CHANNEL_OUT_2POINT0POINT2,
+        AUDIO_CHANNEL_OUT_QUAD,
+        AUDIO_CHANNEL_OUT_QUAD_BACK,
+        AUDIO_CHANNEL_OUT_QUAD_SIDE,
+        AUDIO_CHANNEL_OUT_SURROUND,
+        AUDIO_CHANNEL_INDEX_MASK_4,
+        AUDIO_CHANNEL_OUT_2POINT1POINT2,
+        AUDIO_CHANNEL_OUT_3POINT0POINT2,
+        AUDIO_CHANNEL_OUT_PENTA,
+        AUDIO_CHANNEL_INDEX_MASK_5,
+        AUDIO_CHANNEL_OUT_3POINT1POINT2,
+        AUDIO_CHANNEL_OUT_5POINT1,
+        AUDIO_CHANNEL_OUT_5POINT1_BACK,
+        AUDIO_CHANNEL_OUT_5POINT1_SIDE,
+        AUDIO_CHANNEL_INDEX_MASK_6,
+        AUDIO_CHANNEL_OUT_6POINT1,
+        AUDIO_CHANNEL_INDEX_MASK_7,
+        AUDIO_CHANNEL_OUT_5POINT1POINT2,
+        AUDIO_CHANNEL_OUT_7POINT1,
+        AUDIO_CHANNEL_INDEX_MASK_8,
 };
 
-
 void printUsage() {
-  printf("\nUsage: ");
-  printf("\n     <executable> -i:<input_file> -o:<out_file> [options]\n");
-  printf("\nwhere, \n     <inputfile>  is the input file name");
-  printf("\n                  on which LVM effects are applied");
-  printf("\n     <outputfile> processed output file");
-  printf("\n     and options are mentioned below");
-  printf("\n");
-  printf("\n     -help (or) -h");
-  printf("\n           Prints this usage information");
-  printf("\n");
-  printf("\n     -chMask:<channel_mask>\n");
-  printf("\n         0  - AUDIO_CHANNEL_OUT_MONO");
-  printf("\n         1  - AUDIO_CHANNEL_OUT_STEREO");
-  printf("\n         2  - AUDIO_CHANNEL_OUT_2POINT1");
-  printf("\n         3  - AUDIO_CHANNEL_OUT_2POINT0POINT2");
-  printf("\n         4  - AUDIO_CHANNEL_OUT_QUAD");
-  printf("\n         5  - AUDIO_CHANNEL_OUT_QUAD_BACK");
-  printf("\n         6  - AUDIO_CHANNEL_OUT_QUAD_SIDE");
-  printf("\n         7  - AUDIO_CHANNEL_OUT_SURROUND");
-  printf("\n         8  - canonical channel index mask for 4 ch: (1 << 4) - 1");
-  printf("\n         9  - AUDIO_CHANNEL_OUT_2POINT1POINT2");
-  printf("\n         10 - AUDIO_CHANNEL_OUT_3POINT0POINT2");
-  printf("\n         11 - AUDIO_CHANNEL_OUT_PENTA");
-  printf("\n         12 - canonical channel index mask for 5 ch: (1 << 5) - 1");
-  printf("\n         13 - AUDIO_CHANNEL_OUT_3POINT1POINT2");
-  printf("\n         14 - AUDIO_CHANNEL_OUT_5POINT1");
-  printf("\n         15 - AUDIO_CHANNEL_OUT_5POINT1_BACK");
-  printf("\n         16 - AUDIO_CHANNEL_OUT_5POINT1_SIDE");
-  printf("\n         17 - canonical channel index mask for 6 ch: (1 << 6) - 1");
-  printf("\n         18 - AUDIO_CHANNEL_OUT_6POINT1");
-  printf("\n         19 - canonical channel index mask for 7 ch: (1 << 7) - 1");
-  printf("\n         20 - AUDIO_CHANNEL_OUT_5POINT1POINT2");
-  printf("\n         21 - AUDIO_CHANNEL_OUT_7POINT1");
-  printf("\n         22 - canonical channel index mask for 8 ch: (1 << 8) - 1");
-  printf("\n         default 0");
-  printf("\n     -vcBal:<Left Right Balance control in dB [-96 to 96 dB]>");
-  printf("\n            -ve values reduce Right channel while +ve value reduces Left channel");
-  printf("\n                 default 0");
-  printf("\n     -fch:<file_channels> (1 through 8)\n\n");
-  printf("\n     -M");
-  printf("\n           Mono mode (force all input audio channels to be identical)");
-  printf("\n     -basslvl:<effect_level>");
-  printf("\n           A value that ranges between %d - %d default 0", LVM_BE_MIN_EFFECTLEVEL,
-    LVM_BE_MAX_EFFECTLEVEL);
-  printf("\n");
-  printf("\n     -eqPreset:<preset Value>");
-  const size_t numPresetLvls  = std::size(gEqualizerPresets);
-  for (size_t i = 0; i < numPresetLvls; ++i) {
-    printf("\n           %zu - %s", i, gEqualizerPresets[i].name);
-  }
-  printf("\n           default - 0");
-  printf("\n     -bE ");
-  printf("\n           Enable Dynamic Bass Enhancement");
-  printf("\n");
-  printf("\n     -tE ");
-  printf("\n           Enable Treble Boost");
-  printf("\n");
-  printf("\n     -csE ");
-  printf("\n           Enable Concert Surround");
-  printf("\n");
-  printf("\n     -eqE ");
-  printf("\n           Enable Equalizer");
-}
-
-//----------------------------------------------------------------------------
-// LvmEffect_free()
-//----------------------------------------------------------------------------
-// Purpose: Free all memory associated with the Bundle.
-//
-// Inputs:
-//  pContext:   effect engine context
-//
-// Outputs:
-//
-//----------------------------------------------------------------------------
-
-void LvmEffect_free(struct EffectContext *pContext) {
-  LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
-  LVM_MemTab_t MemTab;
-
-  /* Free the algorithm memory */
-  LvmStatus = LVM_GetMemoryTable(pContext->pBundledContext->hInstance, &MemTab,
-                                 LVM_NULL);
-
-  LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "LvmEffect_free")
-
-  for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
-    if (MemTab.Region[i].Size != 0) {
-      if (MemTab.Region[i].pBaseAddress != NULL) {
-        ALOGV("\tLvmEffect_free - START freeing %" PRIu32
-              " bytes for region %u at %p\n",
-              MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-
-        free(MemTab.Region[i].pBaseAddress);
-
-        ALOGV("\tLvmEffect_free - END   freeing %" PRIu32
-              " bytes for region %u at %p\n",
-              MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-      } else {
-        ALOGE(
-            "\tLVM_ERROR : LvmEffect_free - trying to free with NULL pointer "
-            "%" PRIu32 " bytes for region %u at %p ERROR\n",
-            MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-      }
+    printf("\nUsage: ");
+    printf("\n     <executable> -i:<input_file> -o:<out_file> [options]\n");
+    printf("\nwhere, \n     <inputfile>  is the input file name");
+    printf("\n                  on which LVM effects are applied");
+    printf("\n     <outputfile> processed output file");
+    printf("\n     and options are mentioned below");
+    printf("\n");
+    printf("\n     -help (or) -h");
+    printf("\n           Prints this usage information");
+    printf("\n");
+    printf("\n     -chMask:<channel_mask>\n");
+    printf("\n         0  - AUDIO_CHANNEL_OUT_MONO");
+    printf("\n         1  - AUDIO_CHANNEL_OUT_STEREO");
+    printf("\n         2  - AUDIO_CHANNEL_OUT_2POINT1");
+    printf("\n         3  - AUDIO_CHANNEL_OUT_2POINT0POINT2");
+    printf("\n         4  - AUDIO_CHANNEL_OUT_QUAD");
+    printf("\n         5  - AUDIO_CHANNEL_OUT_QUAD_BACK");
+    printf("\n         6  - AUDIO_CHANNEL_OUT_QUAD_SIDE");
+    printf("\n         7  - AUDIO_CHANNEL_OUT_SURROUND");
+    printf("\n         8  - canonical channel index mask for 4 ch: (1 << 4) - 1");
+    printf("\n         9  - AUDIO_CHANNEL_OUT_2POINT1POINT2");
+    printf("\n         10 - AUDIO_CHANNEL_OUT_3POINT0POINT2");
+    printf("\n         11 - AUDIO_CHANNEL_OUT_PENTA");
+    printf("\n         12 - canonical channel index mask for 5 ch: (1 << 5) - 1");
+    printf("\n         13 - AUDIO_CHANNEL_OUT_3POINT1POINT2");
+    printf("\n         14 - AUDIO_CHANNEL_OUT_5POINT1");
+    printf("\n         15 - AUDIO_CHANNEL_OUT_5POINT1_BACK");
+    printf("\n         16 - AUDIO_CHANNEL_OUT_5POINT1_SIDE");
+    printf("\n         17 - canonical channel index mask for 6 ch: (1 << 6) - 1");
+    printf("\n         18 - AUDIO_CHANNEL_OUT_6POINT1");
+    printf("\n         19 - canonical channel index mask for 7 ch: (1 << 7) - 1");
+    printf("\n         20 - AUDIO_CHANNEL_OUT_5POINT1POINT2");
+    printf("\n         21 - AUDIO_CHANNEL_OUT_7POINT1");
+    printf("\n         22 - canonical channel index mask for 8 ch: (1 << 8) - 1");
+    printf("\n         default 0");
+    printf("\n     -vcBal:<Left Right Balance control in dB [-96 to 96 dB]>");
+    printf("\n            -ve values reduce Right channel while +ve value reduces Left channel");
+    printf("\n                 default 0");
+    printf("\n     -fch:<file_channels> (1 through 8)\n\n");
+    printf("\n     -M");
+    printf("\n           Mono mode (force all input audio channels to be identical)");
+    printf("\n     -basslvl:<effect_level>");
+    printf("\n           A value that ranges between %d - %d default 0", LVM_BE_MIN_EFFECTLEVEL,
+           LVM_BE_MAX_EFFECTLEVEL);
+    printf("\n");
+    printf("\n     -eqPreset:<preset Value>");
+    const size_t numPresetLvls = std::size(gEqualizerPresets);
+    for (size_t i = 0; i < numPresetLvls; ++i) {
+        printf("\n           %zu - %s", i, gEqualizerPresets[i].name);
     }
-  }
-} /* end LvmEffect_free */
+    printf("\n           default - 0");
+    printf("\n     -bE ");
+    printf("\n           Enable Dynamic Bass Enhancement");
+    printf("\n");
+    printf("\n     -tE ");
+    printf("\n           Enable Treble Boost");
+    printf("\n");
+    printf("\n     -csE ");
+    printf("\n           Enable Concert Surround");
+    printf("\n");
+    printf("\n     -eqE ");
+    printf("\n           Enable Equalizer");
+}
 
 //----------------------------------------------------------------------------
 // LvmBundle_init()
@@ -239,586 +189,510 @@
 //
 //----------------------------------------------------------------------------
 
-int LvmBundle_init(struct EffectContext *pContext, LVM_ControlParams_t *params) {
-  ALOGV("\tLvmBundle_init start");
+int LvmBundle_init(struct EffectContext* pContext, LVM_ControlParams_t* params) {
+    ALOGV("\tLvmBundle_init start");
 
-  pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
-  pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
-  pContext->config.inputCfg.format = EFFECT_BUFFER_FORMAT;
-  pContext->config.inputCfg.samplingRate = 44100;
-  pContext->config.inputCfg.bufferProvider.getBuffer = NULL;
-  pContext->config.inputCfg.bufferProvider.releaseBuffer = NULL;
-  pContext->config.inputCfg.bufferProvider.cookie = NULL;
-  pContext->config.inputCfg.mask = EFFECT_CONFIG_ALL;
-  pContext->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
-  pContext->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
-  pContext->config.outputCfg.format = EFFECT_BUFFER_FORMAT;
-  pContext->config.outputCfg.samplingRate = 44100;
-  pContext->config.outputCfg.bufferProvider.getBuffer = NULL;
-  pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL;
-  pContext->config.outputCfg.bufferProvider.cookie = NULL;
-  pContext->config.outputCfg.mask = EFFECT_CONFIG_ALL;
+    pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+    pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    pContext->config.inputCfg.format = EFFECT_BUFFER_FORMAT;
+    pContext->config.inputCfg.samplingRate = 44100;
+    pContext->config.inputCfg.bufferProvider.getBuffer = NULL;
+    pContext->config.inputCfg.bufferProvider.releaseBuffer = NULL;
+    pContext->config.inputCfg.bufferProvider.cookie = NULL;
+    pContext->config.inputCfg.mask = EFFECT_CONFIG_ALL;
+    pContext->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+    pContext->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    pContext->config.outputCfg.format = EFFECT_BUFFER_FORMAT;
+    pContext->config.outputCfg.samplingRate = 44100;
+    pContext->config.outputCfg.bufferProvider.getBuffer = NULL;
+    pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL;
+    pContext->config.outputCfg.bufferProvider.cookie = NULL;
+    pContext->config.outputCfg.mask = EFFECT_CONFIG_ALL;
 
-  if (pContext->pBundledContext->hInstance != NULL) {
-    ALOGV(
-        "\tLvmBundle_init pContext->pBassBoost != NULL "
-        "-> Calling pContext->pBassBoost->free()");
+    if (pContext->pBundledContext->hInstance != NULL) {
+        ALOGV("\tLvmBundle_init pContext->pBassBoost != NULL "
+              "-> Calling pContext->pBassBoost->free()");
+        LVM_DelInstanceHandle(&pContext->pBundledContext->hInstance);
 
-    LvmEffect_free(pContext);
-
-    ALOGV(
-        "\tLvmBundle_init pContext->pBassBoost != NULL "
-        "-> Called pContext->pBassBoost->free()");
-  }
-
-  LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
-  LVM_InstParams_t InstParams;                 /* Instance parameters */
-  LVM_EQNB_BandDef_t BandDefs[MAX_NUM_BANDS];  /* Equaliser band definitions */
-  LVM_HeadroomParams_t HeadroomParams;         /* Headroom parameters */
-  LVM_HeadroomBandDef_t HeadroomBandDef[LVM_HEADROOM_MAX_NBANDS];
-  LVM_MemTab_t MemTab; /* Memory allocation table */
-  bool bMallocFailure = LVM_FALSE;
-
-  /* Set the capabilities */
-  InstParams.BufferMode = LVM_UNMANAGED_BUFFERS;
-  InstParams.MaxBlockSize = MAX_CALL_SIZE;
-  InstParams.EQNB_NumBands = MAX_NUM_BANDS;
-  InstParams.PSA_Included = LVM_PSA_ON;
-
-  /* Allocate memory, forcing alignment */
-  LvmStatus = LVM_GetMemoryTable(LVM_NULL, &MemTab, &InstParams);
-
-  LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "LvmBundle_init");
-  if (LvmStatus != LVM_SUCCESS) return -EINVAL;
-
-  ALOGV("\tCreateInstance Succesfully called LVM_GetMemoryTable\n");
-
-  /* Allocate memory */
-  for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
-    if (MemTab.Region[i].Size != 0) {
-      MemTab.Region[i].pBaseAddress = malloc(MemTab.Region[i].Size);
-
-      if (MemTab.Region[i].pBaseAddress == LVM_NULL) {
-        ALOGE(
-            "\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate "
-            "%" PRIu32 " bytes for region %u\n",
-            MemTab.Region[i].Size, i);
-        bMallocFailure = LVM_TRUE;
-        break;
-      } else {
-        ALOGV("\tLvmBundle_init CreateInstance allocated %" PRIu32
-              " bytes for region %u at %p\n",
-              MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-      }
+        ALOGV("\tLvmBundle_init pContext->pBassBoost != NULL "
+              "-> Called pContext->pBassBoost->free()");
     }
-  }
 
-  /* If one or more of the memory regions failed to allocate, free the regions
-   * that were
-   * succesfully allocated and return with an error
-   */
-  if (bMallocFailure == LVM_TRUE) {
-    for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
-      if (MemTab.Region[i].pBaseAddress == LVM_NULL) {
-        ALOGE(
-            "\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate "
-            "%" PRIu32 " bytes for region %u Not freeing\n",
-            MemTab.Region[i].Size, i);
-      } else {
-        ALOGE(
-            "\tLVM_ERROR :LvmBundle_init CreateInstance Failed: but allocated "
-            "%" PRIu32 " bytes for region %u at %p- free\n",
-            MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-        free(MemTab.Region[i].pBaseAddress);
-      }
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+    LVM_InstParams_t InstParams;                 /* Instance parameters */
+    LVM_EQNB_BandDef_t BandDefs[MAX_NUM_BANDS];  /* Equaliser band definitions */
+    LVM_HeadroomParams_t HeadroomParams;         /* Headroom parameters */
+    LVM_HeadroomBandDef_t HeadroomBandDef[LVM_HEADROOM_MAX_NBANDS];
+
+    /* Set the capabilities */
+    InstParams.BufferMode = LVM_UNMANAGED_BUFFERS;
+    InstParams.MaxBlockSize = MAX_CALL_SIZE;
+    InstParams.EQNB_NumBands = MAX_NUM_BANDS;
+    InstParams.PSA_Included = LVM_PSA_ON;
+
+    LvmStatus = LVM_GetInstanceHandle(&pContext->pBundledContext->hInstance, &InstParams);
+
+    LVM_ERROR_CHECK(LvmStatus, "LVM_GetInstanceHandle", "LvmBundle_init");
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+
+    ALOGV("\tLvmBundle_init CreateInstance Successfully called "
+          "LVM_GetInstanceHandle\n");
+
+    /* Set the initial process parameters */
+    /* General parameters */
+    params->OperatingMode = LVM_MODE_ON;
+    params->SampleRate = LVM_FS_44100;
+    params->SourceFormat = LVM_STEREO;
+    params->ChMask = AUDIO_CHANNEL_OUT_STEREO;
+    params->SpeakerType = LVM_HEADPHONES;
+
+    pContext->pBundledContext->SampleRate = LVM_FS_44100;
+
+    /* Concert Sound parameters */
+    params->VirtualizerOperatingMode = LVM_MODE_OFF;
+    params->VirtualizerType = LVM_CONCERTSOUND;
+    params->VirtualizerReverbLevel = 100;
+    params->CS_EffectLevel = LVM_CS_EFFECT_NONE;
+
+    /* N-Band Equaliser parameters */
+    params->EQNB_OperatingMode = LVM_EQNB_ON;
+    params->EQNB_NBands = FIVEBAND_NUMBANDS;
+    params->pEQNB_BandDefinition = &BandDefs[0];
+
+    for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+        BandDefs[i].Frequency = EQNB_5BandPresetsFrequencies[i];
+        BandDefs[i].QFactor = EQNB_5BandPresetsQFactors[i];
+        BandDefs[i].Gain = EQNB_5BandSoftPresets[i];
     }
-    return -EINVAL;
-  }
-  ALOGV("\tLvmBundle_init CreateInstance Succesfully malloc'd memory\n");
 
-  /* Initialise */
-  pContext->pBundledContext->hInstance = LVM_NULL;
+    /* Volume Control parameters */
+    params->VC_EffectLevel = 0;
+    params->VC_Balance = 0;
 
-  /* Init sets the instance handle */
-  LvmStatus = LVM_GetInstanceHandle(&pContext->pBundledContext->hInstance,
-                                    &MemTab, &InstParams);
+    /* Treble Enhancement parameters */
+    params->TE_OperatingMode = LVM_TE_OFF;
+    params->TE_EffectLevel = 0;
 
-  LVM_ERROR_CHECK(LvmStatus, "LVM_GetInstanceHandle", "LvmBundle_init");
-  if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+    /* PSA Control parameters */
+    params->PSA_Enable = LVM_PSA_OFF;
+    params->PSA_PeakDecayRate = (LVM_PSA_DecaySpeed_en)0;
 
-  ALOGV(
-      "\tLvmBundle_init CreateInstance Succesfully called "
-      "LVM_GetInstanceHandle\n");
+    /* Bass Enhancement parameters */
+    params->BE_OperatingMode = LVM_BE_ON;
+    params->BE_EffectLevel = 0;
+    params->BE_CentreFreq = LVM_BE_CENTRE_90Hz;
+    params->BE_HPF = LVM_BE_HPF_ON;
 
-  /* Set the initial process parameters */
-  /* General parameters */
-  params->OperatingMode = LVM_MODE_ON;
-  params->SampleRate = LVM_FS_44100;
-  params->SourceFormat = LVM_STEREO;
-  params->ChMask       = AUDIO_CHANNEL_OUT_STEREO;
-  params->SpeakerType = LVM_HEADPHONES;
+    /* PSA Control parameters */
+    params->PSA_Enable = LVM_PSA_OFF;
+    params->PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
 
-  pContext->pBundledContext->SampleRate = LVM_FS_44100;
+    /* TE Control parameters */
+    params->TE_OperatingMode = LVM_TE_OFF;
+    params->TE_EffectLevel = 0;
 
-  /* Concert Sound parameters */
-  params->VirtualizerOperatingMode = LVM_MODE_OFF;
-  params->VirtualizerType = LVM_CONCERTSOUND;
-  params->VirtualizerReverbLevel = 100;
-  params->CS_EffectLevel = LVM_CS_EFFECT_NONE;
+    /* Activate the initial settings */
+    LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, params);
 
-  /* N-Band Equaliser parameters */
-  params->EQNB_OperatingMode = LVM_EQNB_ON;
-  params->EQNB_NBands = FIVEBAND_NUMBANDS;
-  params->pEQNB_BandDefinition = &BandDefs[0];
+    LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmBundle_init");
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-  for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-    BandDefs[i].Frequency = EQNB_5BandPresetsFrequencies[i];
-    BandDefs[i].QFactor = EQNB_5BandPresetsQFactors[i];
-    BandDefs[i].Gain = EQNB_5BandSoftPresets[i];
-  }
+    ALOGV("\tLvmBundle_init CreateInstance Successfully called "
+          "LVM_SetControlParameters\n");
 
-  /* Volume Control parameters */
-  params->VC_EffectLevel = 0;
-  params->VC_Balance = 0;
+    /* Set the headroom parameters */
+    HeadroomBandDef[0].Limit_Low = 20;
+    HeadroomBandDef[0].Limit_High = 4999;
+    HeadroomBandDef[0].Headroom_Offset = 0;
+    HeadroomBandDef[1].Limit_Low = 5000;
+    HeadroomBandDef[1].Limit_High = 24000;
+    HeadroomBandDef[1].Headroom_Offset = 0;
+    HeadroomParams.pHeadroomDefinition = &HeadroomBandDef[0];
+    HeadroomParams.Headroom_OperatingMode = LVM_HEADROOM_ON;
+    HeadroomParams.NHeadroomBands = 2;
 
-  /* Treble Enhancement parameters */
-  params->TE_OperatingMode = LVM_TE_OFF;
-  params->TE_EffectLevel = 0;
+    LvmStatus = LVM_SetHeadroomParams(pContext->pBundledContext->hInstance, &HeadroomParams);
 
-  /* PSA Control parameters */
-  params->PSA_Enable = LVM_PSA_OFF;
-  params->PSA_PeakDecayRate = (LVM_PSA_DecaySpeed_en)0;
+    LVM_ERROR_CHECK(LvmStatus, "LVM_SetHeadroomParams", "LvmBundle_init");
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-  /* Bass Enhancement parameters */
-  params->BE_OperatingMode = LVM_BE_ON;
-  params->BE_EffectLevel = 0;
-  params->BE_CentreFreq = LVM_BE_CENTRE_90Hz;
-  params->BE_HPF = LVM_BE_HPF_ON;
-
-  /* PSA Control parameters */
-  params->PSA_Enable = LVM_PSA_OFF;
-  params->PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
-
-  /* TE Control parameters */
-  params->TE_OperatingMode = LVM_TE_OFF;
-  params->TE_EffectLevel = 0;
-
-  /* Activate the initial settings */
-  LvmStatus =
-      LVM_SetControlParameters(pContext->pBundledContext->hInstance, params);
-
-  LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmBundle_init");
-  if (LvmStatus != LVM_SUCCESS) return -EINVAL;
-
-  ALOGV(
-      "\tLvmBundle_init CreateInstance Succesfully called "
-      "LVM_SetControlParameters\n");
-
-  /* Set the headroom parameters */
-  HeadroomBandDef[0].Limit_Low = 20;
-  HeadroomBandDef[0].Limit_High = 4999;
-  HeadroomBandDef[0].Headroom_Offset = 0;
-  HeadroomBandDef[1].Limit_Low = 5000;
-  HeadroomBandDef[1].Limit_High = 24000;
-  HeadroomBandDef[1].Headroom_Offset = 0;
-  HeadroomParams.pHeadroomDefinition = &HeadroomBandDef[0];
-  HeadroomParams.Headroom_OperatingMode = LVM_HEADROOM_ON;
-  HeadroomParams.NHeadroomBands = 2;
-
-  LvmStatus = LVM_SetHeadroomParams(pContext->pBundledContext->hInstance,
-                                    &HeadroomParams);
-
-  LVM_ERROR_CHECK(LvmStatus, "LVM_SetHeadroomParams", "LvmBundle_init");
-  if (LvmStatus != LVM_SUCCESS) return -EINVAL;
-
-  ALOGV(
-      "\tLvmBundle_init CreateInstance Succesfully called "
-      "LVM_SetHeadroomParams\n");
-  ALOGV("\tLvmBundle_init End");
-  return 0;
+    ALOGV("\tLvmBundle_init CreateInstance Successfully called "
+          "LVM_SetHeadroomParams\n");
+    ALOGV("\tLvmBundle_init End");
+    return 0;
 } /* end LvmBundle_init */
 
-int lvmCreate(struct EffectContext *pContext,
-              lvmConfigParams_t    *plvmConfigParams,
-              LVM_ControlParams_t  *params) {
-  int ret = 0;
-  pContext->pBundledContext = NULL;
-  pContext->pBundledContext = (BundledEffectContext *)malloc(sizeof(struct BundledEffectContext));
-  if (NULL == pContext->pBundledContext) {
-    return -EINVAL;
-  }
-
-  pContext->pBundledContext->SessionNo = 0;
-  pContext->pBundledContext->SessionId = 0;
-  pContext->pBundledContext->hInstance = NULL;
-  pContext->pBundledContext->bVolumeEnabled = LVM_FALSE;
-  pContext->pBundledContext->bEqualizerEnabled = LVM_FALSE;
-  pContext->pBundledContext->bBassEnabled = LVM_FALSE;
-  pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
-  pContext->pBundledContext->bVirtualizerEnabled = LVM_FALSE;
-  pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
-  pContext->pBundledContext->nOutputDevice = AUDIO_DEVICE_NONE;
-  pContext->pBundledContext->nVirtualizerForcedDevice = AUDIO_DEVICE_NONE;
-  pContext->pBundledContext->NumberEffectsEnabled = 0;
-  pContext->pBundledContext->NumberEffectsCalled = 0;
-  pContext->pBundledContext->firstVolume = LVM_TRUE;
-  pContext->pBundledContext->volume = 0;
-
-  /* Saved strength is used to return the exact strength that was used in the
-   * set to the get
-   * because we map the original strength range of 0:1000 to 1:15, and this will
-   * avoid
-   * quantisation like effect when returning
-   */
-  pContext->pBundledContext->BassStrengthSaved = 0;
-  pContext->pBundledContext->VirtStrengthSaved = 0;
-  pContext->pBundledContext->CurPreset = PRESET_CUSTOM;
-  pContext->pBundledContext->levelSaved = 0;
-  pContext->pBundledContext->bMuteEnabled = LVM_FALSE;
-  pContext->pBundledContext->bStereoPositionEnabled = LVM_FALSE;
-  pContext->pBundledContext->positionSaved = 0;
-  pContext->pBundledContext->workBuffer = NULL;
-  pContext->pBundledContext->frameCount = -1;
-  pContext->pBundledContext->SamplesToExitCountVirt = 0;
-  pContext->pBundledContext->SamplesToExitCountBb = 0;
-  pContext->pBundledContext->SamplesToExitCountEq = 0;
-  for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-    pContext->pBundledContext->bandGaindB[i] = EQNB_5BandSoftPresets[i];
-  }
-  pContext->config.inputCfg.channels = plvmConfigParams->nrChannels;
-  ALOGV("\tEffectCreate - Calling LvmBundle_init");
-  ret = LvmBundle_init(pContext, params);
-
-  if (ret < 0) {
-    ALOGE("\tLVM_ERROR : lvmCreate() Bundle init failed");
-    return ret;
-  }
-  return 0;
-}
-
-int lvmControl(struct EffectContext *pContext,
-               lvmConfigParams_t    *plvmConfigParams,
-               LVM_ControlParams_t  *params) {
-  LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
-
-  /* Set the initial process parameters */
-  /* General parameters */
-  params->OperatingMode = LVM_MODE_ON;
-  params->SpeakerType = LVM_HEADPHONES;
-
-  params->ChMask     = plvmConfigParams->chMask;
-  params->NrChannels = plvmConfigParams->nrChannels;
-  if (params->NrChannels == 1) {
-    params->SourceFormat = LVM_MONO;
-  } else if (params->NrChannels == 2) {
-    params->SourceFormat = LVM_STEREO;
-  } else if (params->NrChannels > 2 && params->NrChannels <= 8) { // FCC_2 FCC_8
-    params->SourceFormat = LVM_MULTICHANNEL;
-  } else {
-      return -EINVAL;
-  }
-
-  LVM_Fs_en sampleRate;
-  switch (plvmConfigParams->samplingFreq) {
-    case 8000:
-      sampleRate = LVM_FS_8000;
-      break;
-    case 11025:
-      sampleRate = LVM_FS_11025;
-      break;
-    case 12000:
-      sampleRate = LVM_FS_12000;
-      break;
-    case 16000:
-      sampleRate = LVM_FS_16000;
-      break;
-    case 22050:
-      sampleRate = LVM_FS_22050;
-      break;
-    case 24000:
-      sampleRate = LVM_FS_24000;
-      break;
-    case 32000:
-      sampleRate = LVM_FS_32000;
-      break;
-    case 44100:
-      sampleRate = LVM_FS_44100;
-      break;
-    case 48000:
-      sampleRate = LVM_FS_48000;
-      break;
-    case 88200:
-      sampleRate = LVM_FS_88200;
-      break;
-    case 96000:
-      sampleRate = LVM_FS_96000;
-      break;
-    case 176400:
-      sampleRate = LVM_FS_176400;
-      break;
-    case 192000:
-      sampleRate = LVM_FS_192000;
-      break;
-    default:
-      return -EINVAL;
-  }
-  params->SampleRate = sampleRate;
-
-  /* Concert Sound parameters */
-  params->VirtualizerOperatingMode = plvmConfigParams->csEnable;
-  params->VirtualizerType = LVM_CONCERTSOUND;
-  params->VirtualizerReverbLevel = 100;
-  params->CS_EffectLevel = LVM_CS_EFFECT_NONE;
-
-  /* N-Band Equaliser parameters */
-  const int eqPresetLevel = plvmConfigParams->eqPresetLevel;
-  LVM_EQNB_BandDef_t BandDefs[MAX_NUM_BANDS];  /* Equaliser band definitions */
-  for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-    BandDefs[i].Frequency = EQNB_5BandPresetsFrequencies[i];
-    BandDefs[i].QFactor = EQNB_5BandPresetsQFactors[i];
-    BandDefs[i].Gain =
-        EQNB_5BandSoftPresets[(FIVEBAND_NUMBANDS * eqPresetLevel) + i];
-  }
-  params->EQNB_OperatingMode = plvmConfigParams->eqEnable;
- // Caution: raw pointer to stack data, stored in instance by LVM_SetControlParameters.
-  params->pEQNB_BandDefinition = &BandDefs[0];
-
-  /* Volume Control parameters */
-  params->VC_EffectLevel = 0;
-  params->VC_Balance = plvmConfigParams->vcBal;
-
-  /* Treble Enhancement parameters */
-  params->TE_OperatingMode = plvmConfigParams->trebleEnable;
-
-  /* PSA Control parameters */
-  params->PSA_Enable = LVM_PSA_ON;
-
-  /* Bass Enhancement parameters */
-  params->BE_OperatingMode = plvmConfigParams->bassEnable;
-
-  /* Activate the initial settings */
-  LvmStatus =
-      LVM_SetControlParameters(pContext->pBundledContext->hInstance, params);
-
-  LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmBundle_init");
-  if (LvmStatus != LVM_SUCCESS) return -EINVAL;
-
-  LvmStatus = LVM_ApplyNewSettings(pContext->pBundledContext->hInstance);
-
-  if (LvmStatus != LVM_SUCCESS) return -EINVAL;
-
-  return 0;
-}
-
-int lvmExecute(float *floatIn, float *floatOut, struct EffectContext *pContext,
-               lvmConfigParams_t *plvmConfigParams) {
-  const int frameLength = plvmConfigParams->frameLength;
-  return
-      LVM_Process(pContext->pBundledContext->hInstance, /* Instance handle */
-                  floatIn,                              /* Input buffer */
-                  floatOut,                             /* Output buffer */
-                  (LVM_UINT16)frameLength, /* Number of samples to read */
-                  0);                      /* Audio Time */
-}
-
-int lvmMainProcess(EffectContext *pContext,
-                   LVM_ControlParams_t *pParams,
-                   lvmConfigParams_t *plvmConfigParams,
-                   FILE *finp,
-                   FILE *fout) {
-  int errCode = lvmControl(pContext, plvmConfigParams, pParams);
-  if (errCode) {
-    ALOGE("Error: lvmControl returned with %d\n", errCode);
-    return errCode;
-  }
-
-  const int channelCount = plvmConfigParams->nrChannels;
-  const int frameLength = plvmConfigParams->frameLength;
-  const int frameSize = channelCount * sizeof(float);  // processing size
-  const int ioChannelCount = plvmConfigParams->fChannels;
-  const int ioFrameSize = ioChannelCount * sizeof(short); // file load size
-  const int maxChannelCount = std::max(channelCount, ioChannelCount);
-  /*
-   * Mono input will be converted to 2 channels internally in the process call
-   * by copying the same data into the second channel.
-   * Hence when channelCount is 1, output buffer should be allocated for
-   * 2 channels. The memAllocChCount takes care of allocation of sufficient
-   * memory for the output buffer.
-   */
-  const int memAllocChCount = (channelCount == 1 ? 2 : channelCount);
-
-  std::vector<short> in(frameLength * maxChannelCount);
-  std::vector<short> out(frameLength * maxChannelCount);
-  std::vector<float> floatIn(frameLength * channelCount);
-  std::vector<float> floatOut(frameLength * memAllocChCount);
-
-  int frameCounter = 0;
-  while (fread(in.data(), ioFrameSize, frameLength, finp) == (size_t)frameLength) {
-    if (ioChannelCount != channelCount) {
-        adjust_channels(in.data(), ioChannelCount, in.data(), channelCount,
-               sizeof(short), frameLength * ioFrameSize);
+int lvmCreate(struct EffectContext* pContext, lvmConfigParams_t* plvmConfigParams,
+              LVM_ControlParams_t* params) {
+    int ret = 0;
+    pContext->pBundledContext = NULL;
+    pContext->pBundledContext = (BundledEffectContext*)malloc(sizeof(struct BundledEffectContext));
+    if (NULL == pContext->pBundledContext) {
+        return -EINVAL;
     }
-    memcpy_to_float_from_i16(floatIn.data(), in.data(), frameLength * channelCount);
 
-    // Mono mode will replicate the first channel to all other channels.
-    // This ensures all audio channels are identical. This is useful for testing
-    // Bass Boost, which extracts a mono signal for processing.
-    if (plvmConfigParams->monoMode && channelCount > 1) {
-        for (int i = 0; i < frameLength; ++i) {
-            auto *fp = &floatIn[i * channelCount];
-            std::fill(fp + 1, fp + channelCount, *fp); // replicate ch 0
+    pContext->pBundledContext->SessionNo = 0;
+    pContext->pBundledContext->SessionId = 0;
+    pContext->pBundledContext->hInstance = NULL;
+    pContext->pBundledContext->bVolumeEnabled = LVM_FALSE;
+    pContext->pBundledContext->bEqualizerEnabled = LVM_FALSE;
+    pContext->pBundledContext->bBassEnabled = LVM_FALSE;
+    pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
+    pContext->pBundledContext->bVirtualizerEnabled = LVM_FALSE;
+    pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
+    pContext->pBundledContext->nOutputDevice = AUDIO_DEVICE_NONE;
+    pContext->pBundledContext->nVirtualizerForcedDevice = AUDIO_DEVICE_NONE;
+    pContext->pBundledContext->NumberEffectsEnabled = 0;
+    pContext->pBundledContext->NumberEffectsCalled = 0;
+    pContext->pBundledContext->firstVolume = LVM_TRUE;
+    pContext->pBundledContext->volume = 0;
+
+    /* Saved strength is used to return the exact strength that was used in the
+     * set to the get
+     * because we map the original strength range of 0:1000 to 1:15, and this will
+     * avoid
+     * quantisation like effect when returning
+     */
+    pContext->pBundledContext->BassStrengthSaved = 0;
+    pContext->pBundledContext->VirtStrengthSaved = 0;
+    pContext->pBundledContext->CurPreset = PRESET_CUSTOM;
+    pContext->pBundledContext->levelSaved = 0;
+    pContext->pBundledContext->bMuteEnabled = LVM_FALSE;
+    pContext->pBundledContext->bStereoPositionEnabled = LVM_FALSE;
+    pContext->pBundledContext->positionSaved = 0;
+    pContext->pBundledContext->workBuffer = NULL;
+    pContext->pBundledContext->frameCount = -1;
+    pContext->pBundledContext->SamplesToExitCountVirt = 0;
+    pContext->pBundledContext->SamplesToExitCountBb = 0;
+    pContext->pBundledContext->SamplesToExitCountEq = 0;
+    for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+        pContext->pBundledContext->bandGaindB[i] = EQNB_5BandSoftPresets[i];
+    }
+    pContext->config.inputCfg.channels = plvmConfigParams->nrChannels;
+    ALOGV("\tEffectCreate - Calling LvmBundle_init");
+    ret = LvmBundle_init(pContext, params);
+
+    if (ret < 0) {
+        ALOGE("\tLVM_ERROR : lvmCreate() Bundle init failed");
+        return ret;
+    }
+    return 0;
+}
+
+int lvmControl(struct EffectContext* pContext, lvmConfigParams_t* plvmConfigParams,
+               LVM_ControlParams_t* params) {
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+
+    /* Set the initial process parameters */
+    /* General parameters */
+    params->OperatingMode = LVM_MODE_ON;
+    params->SpeakerType = LVM_HEADPHONES;
+
+    params->ChMask = plvmConfigParams->chMask;
+    params->NrChannels = plvmConfigParams->nrChannels;
+    if (params->NrChannels == 1) {
+        params->SourceFormat = LVM_MONO;
+    } else if (params->NrChannels == 2) {
+        params->SourceFormat = LVM_STEREO;
+    } else if (params->NrChannels > 2 && params->NrChannels <= 8) {  // FCC_2 FCC_8
+        params->SourceFormat = LVM_MULTICHANNEL;
+    } else {
+        return -EINVAL;
+    }
+
+    LVM_Fs_en sampleRate;
+    switch (plvmConfigParams->samplingFreq) {
+        case 8000:
+            sampleRate = LVM_FS_8000;
+            break;
+        case 11025:
+            sampleRate = LVM_FS_11025;
+            break;
+        case 12000:
+            sampleRate = LVM_FS_12000;
+            break;
+        case 16000:
+            sampleRate = LVM_FS_16000;
+            break;
+        case 22050:
+            sampleRate = LVM_FS_22050;
+            break;
+        case 24000:
+            sampleRate = LVM_FS_24000;
+            break;
+        case 32000:
+            sampleRate = LVM_FS_32000;
+            break;
+        case 44100:
+            sampleRate = LVM_FS_44100;
+            break;
+        case 48000:
+            sampleRate = LVM_FS_48000;
+            break;
+        case 88200:
+            sampleRate = LVM_FS_88200;
+            break;
+        case 96000:
+            sampleRate = LVM_FS_96000;
+            break;
+        case 176400:
+            sampleRate = LVM_FS_176400;
+            break;
+        case 192000:
+            sampleRate = LVM_FS_192000;
+            break;
+        default:
+            return -EINVAL;
+    }
+    params->SampleRate = sampleRate;
+
+    /* Concert Sound parameters */
+    params->VirtualizerOperatingMode = plvmConfigParams->csEnable;
+    params->VirtualizerType = LVM_CONCERTSOUND;
+    params->VirtualizerReverbLevel = 100;
+    params->CS_EffectLevel = LVM_CS_EFFECT_NONE;
+
+    /* N-Band Equaliser parameters */
+    const int eqPresetLevel = plvmConfigParams->eqPresetLevel;
+    LVM_EQNB_BandDef_t BandDefs[MAX_NUM_BANDS]; /* Equaliser band definitions */
+    for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+        BandDefs[i].Frequency = EQNB_5BandPresetsFrequencies[i];
+        BandDefs[i].QFactor = EQNB_5BandPresetsQFactors[i];
+        BandDefs[i].Gain = EQNB_5BandSoftPresets[(FIVEBAND_NUMBANDS * eqPresetLevel) + i];
+    }
+    params->EQNB_OperatingMode = plvmConfigParams->eqEnable;
+    // Caution: raw pointer to stack data, stored in instance by LVM_SetControlParameters.
+    params->pEQNB_BandDefinition = &BandDefs[0];
+
+    /* Volume Control parameters */
+    params->VC_EffectLevel = 0;
+    params->VC_Balance = plvmConfigParams->vcBal;
+
+    /* Treble Enhancement parameters */
+    params->TE_OperatingMode = plvmConfigParams->trebleEnable;
+
+    /* PSA Control parameters */
+    params->PSA_Enable = LVM_PSA_ON;
+
+    /* Bass Enhancement parameters */
+    params->BE_OperatingMode = plvmConfigParams->bassEnable;
+
+    /* Activate the initial settings */
+    LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, params);
+
+    LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmBundle_init");
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+
+    LvmStatus = LVM_ApplyNewSettings(pContext->pBundledContext->hInstance);
+
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+
+    return 0;
+}
+
+int lvmExecute(float* floatIn, float* floatOut, struct EffectContext* pContext,
+               lvmConfigParams_t* plvmConfigParams) {
+    const int frameLength = plvmConfigParams->frameLength;
+    return LVM_Process(pContext->pBundledContext->hInstance, /* Instance handle */
+                       floatIn,                              /* Input buffer */
+                       floatOut,                             /* Output buffer */
+                       (LVM_UINT16)frameLength,              /* Number of samples to read */
+                       0);                                   /* Audio Time */
+}
+
+int lvmMainProcess(EffectContext* pContext, LVM_ControlParams_t* pParams,
+                   lvmConfigParams_t* plvmConfigParams, FILE* finp, FILE* fout) {
+    int errCode = lvmControl(pContext, plvmConfigParams, pParams);
+    if (errCode) {
+        ALOGE("Error: lvmControl returned with %d\n", errCode);
+        return errCode;
+    }
+
+    const int channelCount = plvmConfigParams->nrChannels;
+    const int frameLength = plvmConfigParams->frameLength;
+    const int frameSize = channelCount * sizeof(float);  // processing size
+    const int ioChannelCount = plvmConfigParams->fChannels;
+    const int ioFrameSize = ioChannelCount * sizeof(short);  // file load size
+    const int maxChannelCount = std::max(channelCount, ioChannelCount);
+    /*
+     * Mono input will be converted to 2 channels internally in the process call
+     * by copying the same data into the second channel.
+     * Hence when channelCount is 1, output buffer should be allocated for
+     * 2 channels. The memAllocChCount takes care of allocation of sufficient
+     * memory for the output buffer.
+     */
+    const int memAllocChCount = (channelCount == 1 ? 2 : channelCount);
+
+    std::vector<short> in(frameLength * maxChannelCount);
+    std::vector<short> out(frameLength * maxChannelCount);
+    std::vector<float> floatIn(frameLength * channelCount);
+    std::vector<float> floatOut(frameLength * memAllocChCount);
+
+    int frameCounter = 0;
+    while (fread(in.data(), ioFrameSize, frameLength, finp) == (size_t)frameLength) {
+        if (ioChannelCount != channelCount) {
+            adjust_channels(in.data(), ioChannelCount, in.data(), channelCount, sizeof(short),
+                            frameLength * ioFrameSize);
+        }
+        memcpy_to_float_from_i16(floatIn.data(), in.data(), frameLength * channelCount);
+
+        // Mono mode will replicate the first channel to all other channels.
+        // This ensures all audio channels are identical. This is useful for testing
+        // Bass Boost, which extracts a mono signal for processing.
+        if (plvmConfigParams->monoMode && channelCount > 1) {
+            for (int i = 0; i < frameLength; ++i) {
+                auto* fp = &floatIn[i * channelCount];
+                std::fill(fp + 1, fp + channelCount, *fp);  // replicate ch 0
+            }
+        }
+#ifndef BYPASS_EXEC
+        errCode = lvmExecute(floatIn.data(), floatOut.data(), pContext, plvmConfigParams);
+        if (errCode) {
+            printf("\nError: lvmExecute returned with %d\n", errCode);
+            return errCode;
+        }
+
+        (void)frameSize;  // eliminate warning
+#else
+        memcpy(floatOut.data(), floatIn.data(), frameLength * frameSize);
+#endif
+        memcpy_to_i16_from_float(out.data(), floatOut.data(), frameLength * channelCount);
+        if (ioChannelCount != channelCount) {
+            adjust_channels(out.data(), channelCount, out.data(), ioChannelCount, sizeof(short),
+                            frameLength * channelCount * sizeof(short));
+        }
+        (void)fwrite(out.data(), ioFrameSize, frameLength, fout);
+        frameCounter += frameLength;
+    }
+    printf("frameCounter: [%d]\n", frameCounter);
+    return 0;
+}
+
+int main(int argc, const char* argv[]) {
+    if (argc == 1) {
+        printUsage();
+        return -1;
+    }
+
+    lvmConfigParams_t lvmConfigParams{};  // default initialize
+    const char* infile = nullptr;
+    const char* outfile = nullptr;
+
+    for (int i = 1; i < argc; i++) {
+        printf("%s ", argv[i]);
+        if (!strncmp(argv[i], "-i:", 3)) {
+            infile = argv[i] + 3;
+        } else if (!strncmp(argv[i], "-o:", 3)) {
+            outfile = argv[i] + 3;
+        } else if (!strncmp(argv[i], "-fs:", 4)) {
+            const int samplingFreq = atoi(argv[i] + 4);
+            if (samplingFreq != 8000 && samplingFreq != 11025 && samplingFreq != 12000 &&
+                samplingFreq != 16000 && samplingFreq != 22050 && samplingFreq != 24000 &&
+                samplingFreq != 32000 && samplingFreq != 44100 && samplingFreq != 48000 &&
+                samplingFreq != 88200 && samplingFreq != 96000 && samplingFreq != 176400 &&
+                samplingFreq != 192000) {
+                printf("Error: Unsupported Sampling Frequency : %d\n", samplingFreq);
+                return -1;
+            }
+            lvmConfigParams.samplingFreq = samplingFreq;
+        } else if (!strncmp(argv[i], "-chMask:", 8)) {
+            const int chMaskConfigIdx = atoi(argv[i] + 8);
+            if (chMaskConfigIdx < 0 || (size_t)chMaskConfigIdx >= std::size(lvmConfigChMask)) {
+                ALOGE("\nError: Unsupported Channel Mask : %d\n", chMaskConfigIdx);
+                return -1;
+            }
+            const audio_channel_mask_t chMask = lvmConfigChMask[chMaskConfigIdx];
+            lvmConfigParams.chMask = chMask;
+            lvmConfigParams.nrChannels = audio_channel_count_from_out_mask(chMask);
+        } else if (!strncmp(argv[i], "-vcBal:", 7)) {
+            const int vcBalance = atoi(argv[i] + 7);
+            if (vcBalance > 96 || vcBalance < -96) {
+                ALOGE("\nError: Unsupported volume balance value: %d\n", vcBalance);
+            }
+            lvmConfigParams.vcBal = vcBalance;
+        } else if (!strncmp(argv[i], "-fch:", 5)) {
+            const int fChannels = atoi(argv[i] + 5);
+            if (fChannels > 8 || fChannels < 1) {
+                printf("Error: Unsupported number of file channels : %d\n", fChannels);
+                return -1;
+            }
+            lvmConfigParams.fChannels = fChannels;
+        } else if (!strcmp(argv[i], "-M")) {
+            lvmConfigParams.monoMode = true;
+        } else if (!strncmp(argv[i], "-basslvl:", 9)) {
+            const int bassEffectLevel = atoi(argv[i] + 9);
+            if (bassEffectLevel > LVM_BE_MAX_EFFECTLEVEL ||
+                bassEffectLevel < LVM_BE_MIN_EFFECTLEVEL) {
+                printf("Error: Unsupported Bass Effect Level : %d\n", bassEffectLevel);
+                printUsage();
+                return -1;
+            }
+            lvmConfigParams.bassEffectLevel = bassEffectLevel;
+        } else if (!strncmp(argv[i], "-eqPreset:", 10)) {
+            const int eqPresetLevel = atoi(argv[i] + 10);
+            const int numPresetLvls = std::size(gEqualizerPresets);
+            if (eqPresetLevel >= numPresetLvls || eqPresetLevel < 0) {
+                printf("Error: Unsupported Equalizer Preset : %d\n", eqPresetLevel);
+                printUsage();
+                return -1;
+            }
+            lvmConfigParams.eqPresetLevel = eqPresetLevel;
+        } else if (!strcmp(argv[i], "-bE")) {
+            lvmConfigParams.bassEnable = LVM_BE_ON;
+        } else if (!strcmp(argv[i], "-eqE")) {
+            lvmConfigParams.eqEnable = LVM_EQNB_ON;
+        } else if (!strcmp(argv[i], "-tE")) {
+            lvmConfigParams.trebleEnable = LVM_TE_ON;
+        } else if (!strcmp(argv[i], "-csE")) {
+            lvmConfigParams.csEnable = LVM_MODE_ON;
+        } else if (!strcmp(argv[i], "-h")) {
+            printUsage();
+            return 0;
         }
     }
-#ifndef BYPASS_EXEC
-    errCode = lvmExecute(floatIn.data(), floatOut.data(), pContext, plvmConfigParams);
-    if (errCode) {
-      printf("\nError: lvmExecute returned with %d\n", errCode);
-      return errCode;
-    }
 
-    (void)frameSize; // eliminate warning
-#else
-    memcpy(floatOut.data(), floatIn.data(), frameLength * frameSize);
-#endif
-    memcpy_to_i16_from_float(out.data(), floatOut.data(), frameLength * channelCount);
-    if (ioChannelCount != channelCount) {
-        adjust_channels(out.data(), channelCount, out.data(), ioChannelCount,
-               sizeof(short), frameLength * channelCount * sizeof(short));
-    }
-    (void) fwrite(out.data(), ioFrameSize, frameLength, fout);
-    frameCounter += frameLength;
-  }
-  printf("frameCounter: [%d]\n", frameCounter);
-  return 0;
-}
-
-int main(int argc, const char *argv[]) {
-  if (argc == 1) {
-    printUsage();
-    return -1;
-  }
-
-  lvmConfigParams_t lvmConfigParams{}; // default initialize
-  const char *infile = nullptr;
-  const char *outfile = nullptr;
-
-  for (int i = 1; i < argc; i++) {
-    printf("%s ", argv[i]);
-    if (!strncmp(argv[i], "-i:", 3)) {
-      infile = argv[i] + 3;
-    } else if (!strncmp(argv[i], "-o:", 3)) {
-      outfile = argv[i] + 3;
-    } else if (!strncmp(argv[i], "-fs:", 4)) {
-      const int samplingFreq = atoi(argv[i] + 4);
-      if (samplingFreq != 8000 && samplingFreq != 11025 &&
-          samplingFreq != 12000 && samplingFreq != 16000 &&
-          samplingFreq != 22050 && samplingFreq != 24000 &&
-          samplingFreq != 32000 && samplingFreq != 44100 &&
-          samplingFreq != 48000 && samplingFreq != 88200 &&
-          samplingFreq != 96000 && samplingFreq != 176400 &&
-          samplingFreq != 192000) {
-        printf("Error: Unsupported Sampling Frequency : %d\n", samplingFreq);
-        return -1;
-      }
-      lvmConfigParams.samplingFreq = samplingFreq;
-    } else if (!strncmp(argv[i], "-chMask:", 8)) {
-      const int chMaskConfigIdx = atoi(argv[i] + 8);
-      if (chMaskConfigIdx < 0 || (size_t)chMaskConfigIdx >= std::size(lvmConfigChMask)) {
-        ALOGE("\nError: Unsupported Channel Mask : %d\n", chMaskConfigIdx);
-        return -1;
-      }
-      const audio_channel_mask_t chMask = lvmConfigChMask[chMaskConfigIdx];
-      lvmConfigParams.chMask = chMask;
-      lvmConfigParams.nrChannels = audio_channel_count_from_out_mask(chMask);
-    } else if (!strncmp(argv[i], "-vcBal:", 7)) {
-      const int vcBalance = atoi(argv[i] + 7);
-      if (vcBalance > 96 || vcBalance < -96) {
-        ALOGE("\nError: Unsupported volume balance value: %d\n", vcBalance);
-      }
-      lvmConfigParams.vcBal = vcBalance;
-    } else if (!strncmp(argv[i], "-fch:", 5)) {
-      const int fChannels = atoi(argv[i] + 5);
-      if (fChannels > 8 || fChannels < 1) {
-             printf("Error: Unsupported number of file channels : %d\n", fChannels);
-             return -1;
-           }
-           lvmConfigParams.fChannels = fChannels;
-    } else if (!strcmp(argv[i],"-M")) {
-          lvmConfigParams.monoMode = true;
-    } else if (!strncmp(argv[i], "-basslvl:", 9)) {
-      const int bassEffectLevel = atoi(argv[i] + 9);
-      if (bassEffectLevel > LVM_BE_MAX_EFFECTLEVEL || bassEffectLevel < LVM_BE_MIN_EFFECTLEVEL) {
-        printf("Error: Unsupported Bass Effect Level : %d\n",
-               bassEffectLevel);
+    if (infile == nullptr || outfile == nullptr) {
+        printf("Error: missing input/output files\n");
         printUsage();
         return -1;
-      }
-      lvmConfigParams.bassEffectLevel = bassEffectLevel;
-    } else if (!strncmp(argv[i], "-eqPreset:", 10)) {
-      const int eqPresetLevel = atoi(argv[i] + 10);
-      const int numPresetLvls = std::size(gEqualizerPresets);
-      if (eqPresetLevel >= numPresetLvls || eqPresetLevel < 0) {
-        printf("Error: Unsupported Equalizer Preset : %d\n", eqPresetLevel);
-        printUsage();
-        return -1;
-      }
-      lvmConfigParams.eqPresetLevel = eqPresetLevel;
-    } else if (!strcmp(argv[i], "-bE")) {
-      lvmConfigParams.bassEnable = LVM_BE_ON;
-    } else if (!strcmp(argv[i], "-eqE")) {
-      lvmConfigParams.eqEnable = LVM_EQNB_ON;
-    } else if (!strcmp(argv[i], "-tE")) {
-      lvmConfigParams.trebleEnable = LVM_TE_ON;
-    } else if (!strcmp(argv[i], "-csE")) {
-      lvmConfigParams.csEnable = LVM_MODE_ON;
-    } else if (!strcmp(argv[i], "-h")) {
-      printUsage();
-      return 0;
     }
-  }
 
-  if (infile == nullptr || outfile == nullptr) {
-    printf("Error: missing input/output files\n");
-    printUsage();
-    return -1;
-  }
+    FILE* finp = fopen(infile, "rb");
+    if (finp == nullptr) {
+        printf("Cannot open input file %s", infile);
+        return -1;
+    }
 
-  FILE *finp = fopen(infile, "rb");
-  if (finp == nullptr) {
-    printf("Cannot open input file %s", infile);
-    return -1;
-  }
+    FILE* fout = fopen(outfile, "wb");
+    if (fout == nullptr) {
+        printf("Cannot open output file %s", outfile);
+        fclose(finp);
+        return -1;
+    }
 
-  FILE *fout = fopen(outfile, "wb");
-  if (fout == nullptr) {
-    printf("Cannot open output file %s", outfile);
+    EffectContext context;
+    LVM_ControlParams_t params;
+    int errCode = lvmCreate(&context, &lvmConfigParams, &params);
+    if (errCode == 0) {
+        errCode = lvmMainProcess(&context, &params, &lvmConfigParams, finp, fout);
+        if (errCode != 0) {
+            printf("Error: lvmMainProcess returned with the error: %d", errCode);
+        }
+    } else {
+        printf("Error: lvmCreate returned with the error: %d", errCode);
+    }
     fclose(finp);
-    return -1;
-  }
-
-  EffectContext context;
-  LVM_ControlParams_t params;
-  int errCode = lvmCreate(&context, &lvmConfigParams, &params);
-  if (errCode == 0) {
-    errCode = lvmMainProcess(&context, &params, &lvmConfigParams, finp, fout);
-    if (errCode != 0) {
-      printf("Error: lvmMainProcess returned with the error: %d",errCode);
+    fclose(fout);
+    /* Free the allocated buffers */
+    if (context.pBundledContext != nullptr) {
+        if (context.pBundledContext->hInstance != nullptr) {
+            LVM_DelInstanceHandle(&context.pBundledContext->hInstance);
+        }
+        free(context.pBundledContext);
     }
-  } else {
-    printf("Error: lvmCreate returned with the error: %d", errCode);
-  }
-  fclose(finp);
-  fclose(fout);
-  /* Free the allocated buffers */
-  if (context.pBundledContext != nullptr) {
-    if (context.pBundledContext->hInstance != nullptr) {
-      LvmEffect_free(&context);
-    }
-    free(context.pBundledContext);
-  }
 
-  if (errCode) {
-    return -1;
-  }
-  return 0;
+    if (errCode) {
+        return -1;
+    }
+    return 0;
 }
diff --git a/media/libeffects/lvm/tests/reverb_test.cpp b/media/libeffects/lvm/tests/reverb_test.cpp
new file mode 100644
index 0000000..7cbca9b
--- /dev/null
+++ b/media/libeffects/lvm/tests/reverb_test.cpp
@@ -0,0 +1,396 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <assert.h>
+#include <getopt.h>
+#include <inttypes.h>
+#include <iterator>
+#include <math.h>
+#include <stdlib.h>
+#include <string.h>
+#include <vector>
+
+#include <audio_utils/channels.h>
+#include <audio_utils/primitives.h>
+#include <log/log.h>
+#include <system/audio.h>
+
+#include "EffectReverb.h"
+
+// This is the only symbol that needs to be exported
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+
+// Global Variables
+enum ReverbParams {
+    ARG_HELP = 1,
+    ARG_INPUT,
+    ARG_OUTPUT,
+    ARG_FS,
+    ARG_CH_MASK,
+    ARG_PRESET,
+    ARG_AUX,
+    ARG_MONO_MODE,
+    ARG_FILE_CH,
+};
+
+const effect_uuid_t kReverbUuids[] = {
+        {0x172cdf00,
+         0xa3bc,
+         0x11df,
+         0xa72f,
+         {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // preset-insert mode
+        {0xf29a1400,
+         0xa3bb,
+         0x11df,
+         0x8ddc,
+         {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // preset-aux mode
+};
+
+// structures
+struct reverbConfigParams_t {
+    int fChannels = 2;
+    int monoMode = false;
+    int frameLength = 256;
+    int preset = 0;
+    int nrChannels = 2;
+    int sampleRate = 48000;
+    int auxiliary = 0;
+    audio_channel_mask_t chMask = AUDIO_CHANNEL_OUT_STEREO;
+};
+
+constexpr audio_channel_mask_t kReverbConfigChMask[] = {
+        AUDIO_CHANNEL_OUT_MONO,
+        AUDIO_CHANNEL_OUT_STEREO,
+        AUDIO_CHANNEL_OUT_2POINT1,
+        AUDIO_CHANNEL_OUT_2POINT0POINT2,
+        AUDIO_CHANNEL_OUT_QUAD,
+        AUDIO_CHANNEL_OUT_QUAD_BACK,
+        AUDIO_CHANNEL_OUT_QUAD_SIDE,
+        AUDIO_CHANNEL_OUT_SURROUND,
+        AUDIO_CHANNEL_INDEX_MASK_4,
+        AUDIO_CHANNEL_OUT_2POINT1POINT2,
+        AUDIO_CHANNEL_OUT_3POINT0POINT2,
+        AUDIO_CHANNEL_OUT_PENTA,
+        AUDIO_CHANNEL_INDEX_MASK_5,
+        AUDIO_CHANNEL_OUT_3POINT1POINT2,
+        AUDIO_CHANNEL_OUT_5POINT1,
+        AUDIO_CHANNEL_OUT_5POINT1_BACK,
+        AUDIO_CHANNEL_OUT_5POINT1_SIDE,
+        AUDIO_CHANNEL_INDEX_MASK_6,
+        AUDIO_CHANNEL_OUT_6POINT1,
+        AUDIO_CHANNEL_INDEX_MASK_7,
+        AUDIO_CHANNEL_OUT_5POINT1POINT2,
+        AUDIO_CHANNEL_OUT_7POINT1,
+        AUDIO_CHANNEL_INDEX_MASK_8,
+};
+
+constexpr int kReverbConfigChMaskCount = std::size(kReverbConfigChMask);
+
+int reverbCreateEffect(effect_handle_t* pEffectHandle, effect_config_t* pConfig, int sessionId,
+                       int ioId, int auxFlag) {
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(&kReverbUuids[auxFlag], sessionId,
+                                                                 ioId, pEffectHandle);
+        status != 0) {
+        ALOGE("Reverb create returned an error = %d\n", status);
+        return EXIT_FAILURE;
+    }
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    (**pEffectHandle)
+            ->command(*pEffectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t), pConfig,
+                      &replySize, &reply);
+    return reply;
+}
+
+int reverbSetConfigParam(uint32_t paramType, uint32_t paramValue, effect_handle_t effectHandle) {
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    uint32_t paramData[2] = {paramType, paramValue};
+    effect_param_t* effectParam = (effect_param_t*)malloc(sizeof(*effectParam) + sizeof(paramData));
+    memcpy(&effectParam->data[0], &paramData[0], sizeof(paramData));
+    effectParam->psize = sizeof(paramData[0]);
+    effectParam->vsize = sizeof(paramData[1]);
+    int status = (*effectHandle)
+                         ->command(effectHandle, EFFECT_CMD_SET_PARAM,
+                                   sizeof(effect_param_t) + sizeof(paramData), effectParam,
+                                   &replySize, &reply);
+    free(effectParam);
+    if (status != 0) {
+        ALOGE("Reverb set config returned an error = %d\n", status);
+        return status;
+    }
+    return reply;
+}
+
+void printUsage() {
+    printf("\nUsage: ");
+    printf("\n     <executable> [options]\n");
+    printf("\nwhere options are, ");
+    printf("\n     --input <inputfile>");
+    printf("\n           path to the input file");
+    printf("\n     --output <outputfile>");
+    printf("\n           path to the output file");
+    printf("\n     --help");
+    printf("\n           prints this usage information");
+    printf("\n     --chMask <channel_mask>\n");
+    printf("\n           0  - AUDIO_CHANNEL_OUT_MONO");
+    printf("\n           1  - AUDIO_CHANNEL_OUT_STEREO");
+    printf("\n           2  - AUDIO_CHANNEL_OUT_2POINT1");
+    printf("\n           3  - AUDIO_CHANNEL_OUT_2POINT0POINT2");
+    printf("\n           4  - AUDIO_CHANNEL_OUT_QUAD");
+    printf("\n           5  - AUDIO_CHANNEL_OUT_QUAD_BACK");
+    printf("\n           6  - AUDIO_CHANNEL_OUT_QUAD_SIDE");
+    printf("\n           7  - AUDIO_CHANNEL_OUT_SURROUND");
+    printf("\n           8  - canonical channel index mask for 4 ch: (1 << 4) - 1");
+    printf("\n           9  - AUDIO_CHANNEL_OUT_2POINT1POINT2");
+    printf("\n           10 - AUDIO_CHANNEL_OUT_3POINT0POINT2");
+    printf("\n           11 - AUDIO_CHANNEL_OUT_PENTA");
+    printf("\n           12 - canonical channel index mask for 5 ch: (1 << 5) - 1");
+    printf("\n           13 - AUDIO_CHANNEL_OUT_3POINT1POINT2");
+    printf("\n           14 - AUDIO_CHANNEL_OUT_5POINT1");
+    printf("\n           15 - AUDIO_CHANNEL_OUT_5POINT1_BACK");
+    printf("\n           16 - AUDIO_CHANNEL_OUT_5POINT1_SIDE");
+    printf("\n           17 - canonical channel index mask for 6 ch: (1 << 6) - 1");
+    printf("\n           18 - AUDIO_CHANNEL_OUT_6POINT1");
+    printf("\n           19 - canonical channel index mask for 7 ch: (1 << 7) - 1");
+    printf("\n           20 - AUDIO_CHANNEL_OUT_5POINT1POINT2");
+    printf("\n           21 - AUDIO_CHANNEL_OUT_7POINT1");
+    printf("\n           22 - canonical channel index mask for 8 ch: (1 << 8) - 1");
+    printf("\n           default 0");
+    printf("\n     --fs <sampling_freq>");
+    printf("\n           Sampling frequency in Hz, default 48000.");
+    printf("\n     --preset <preset_value>");
+    printf("\n           0 - None");
+    printf("\n           1 - Small Room");
+    printf("\n           2 - Medium Room");
+    printf("\n           3 - Large Room");
+    printf("\n           4 - Medium Hall");
+    printf("\n           5 - Large Hall");
+    printf("\n           6 - Plate");
+    printf("\n           default 0");
+    printf("\n     --fch <file_channels>");
+    printf("\n           number of channels in input file (1 through 8), default 1");
+    printf("\n     --M");
+    printf("\n           Mono mode (force all input audio channels to be identical)");
+    printf("\n     --aux <auxiliary_flag> ");
+    printf("\n           0 - Insert Mode on");
+    printf("\n           1 - auxiliary Mode on");
+    printf("\n           default 0");
+    printf("\n");
+}
+
+int main(int argc, const char* argv[]) {
+    if (argc == 1) {
+        printUsage();
+        return EXIT_FAILURE;
+    }
+
+    reverbConfigParams_t revConfigParams{};  // default initialize
+    const char* inputFile = nullptr;
+    const char* outputFile = nullptr;
+
+    const option long_opts[] = {
+            {"help", no_argument, nullptr, ARG_HELP},
+            {"input", required_argument, nullptr, ARG_INPUT},
+            {"output", required_argument, nullptr, ARG_OUTPUT},
+            {"fs", required_argument, nullptr, ARG_FS},
+            {"chMask", required_argument, nullptr, ARG_CH_MASK},
+            {"preset", required_argument, nullptr, ARG_PRESET},
+            {"aux", required_argument, nullptr, ARG_AUX},
+            {"M", no_argument, &revConfigParams.monoMode, true},
+            {"fch", required_argument, nullptr, ARG_FILE_CH},
+            {nullptr, 0, nullptr, 0},
+    };
+
+    while (true) {
+        const int opt = getopt_long(argc, (char* const*)argv, "i:o:", long_opts, nullptr);
+        if (opt == -1) {
+            break;
+        }
+        switch (opt) {
+            case ARG_HELP:
+                printUsage();
+                return EXIT_SUCCESS;
+            case ARG_INPUT: {
+                inputFile = (char*)optarg;
+                break;
+            }
+            case ARG_OUTPUT: {
+                outputFile = (char*)optarg;
+                break;
+            }
+            case ARG_FS: {
+                revConfigParams.sampleRate = atoi(optarg);
+                break;
+            }
+            case ARG_CH_MASK: {
+                int chMaskIdx = atoi(optarg);
+                if (chMaskIdx < 0 or chMaskIdx > kReverbConfigChMaskCount) {
+                    ALOGE("Channel Mask index not in correct range\n");
+                    printUsage();
+                    return EXIT_FAILURE;
+                }
+                revConfigParams.chMask = kReverbConfigChMask[chMaskIdx];
+                break;
+            }
+            case ARG_PRESET: {
+                revConfigParams.preset = atoi(optarg);
+                break;
+            }
+            case ARG_AUX: {
+                revConfigParams.auxiliary = atoi(optarg);
+                break;
+            }
+            case ARG_MONO_MODE: {
+                break;
+            }
+            case ARG_FILE_CH: {
+                revConfigParams.fChannels = atoi(optarg);
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    if (inputFile == nullptr) {
+        ALOGE("Error: missing input files\n");
+        printUsage();
+        return EXIT_FAILURE;
+    }
+    std::unique_ptr<FILE, decltype(&fclose)> inputFp(fopen(inputFile, "rb"), &fclose);
+
+    if (inputFp == nullptr) {
+        ALOGE("Cannot open input file %s\n", inputFile);
+        return EXIT_FAILURE;
+    }
+
+    if (outputFile == nullptr) {
+        ALOGE("Error: missing output files\n");
+        printUsage();
+        return EXIT_FAILURE;
+    }
+    std::unique_ptr<FILE, decltype(&fclose)> outputFp(fopen(outputFile, "wb"), &fclose);
+
+    if (outputFp == nullptr) {
+        ALOGE("Cannot open output file %s\n", outputFile);
+        return EXIT_FAILURE;
+    }
+
+    int32_t sessionId = 1;
+    int32_t ioId = 1;
+    effect_handle_t effectHandle = nullptr;
+    effect_config_t config;
+    config.inputCfg.samplingRate = config.outputCfg.samplingRate = revConfigParams.sampleRate;
+    config.inputCfg.channels = config.outputCfg.channels = revConfigParams.chMask;
+    config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+    if (AUDIO_CHANNEL_OUT_MONO == revConfigParams.chMask) {
+        config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    }
+    if (int status = reverbCreateEffect(&effectHandle, &config, sessionId, ioId,
+                                        revConfigParams.auxiliary);
+        status != 0) {
+        ALOGE("Create effect call returned error %i", status);
+        return EXIT_FAILURE;
+    }
+
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    (*effectHandle)->command(effectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+    if (reply != 0) {
+        ALOGE("Command enable call returned error %d\n", reply);
+        return EXIT_FAILURE;
+    }
+
+    if (int status = reverbSetConfigParam(REVERB_PARAM_PRESET, (uint32_t)revConfigParams.preset,
+                                          effectHandle);
+        status != 0) {
+        ALOGE("Invalid reverb preset. Error %d\n", status);
+        return EXIT_FAILURE;
+    }
+
+    revConfigParams.nrChannels = audio_channel_count_from_out_mask(revConfigParams.chMask);
+    const int channelCount = revConfigParams.nrChannels;
+    const int frameLength = revConfigParams.frameLength;
+#ifdef BYPASS_EXEC
+    const int frameSize = (int)channelCount * sizeof(float);
+#endif
+    const int ioChannelCount = revConfigParams.fChannels;
+    const int ioFrameSize = ioChannelCount * sizeof(short);
+    const int maxChannelCount = std::max(channelCount, ioChannelCount);
+    /*
+     * Mono input will be converted to 2 channels internally in the process call
+     * by copying the same data into the second channel.
+     * Hence when channelCount is 1, output buffer should be allocated for
+     * 2 channels. The outChannelCount takes care of allocation of sufficient
+     * memory for the output buffer.
+     */
+    const int outChannelCount = (channelCount == 1 ? 2 : channelCount);
+
+    std::vector<short> in(frameLength * maxChannelCount);
+    std::vector<short> out(frameLength * maxChannelCount);
+    std::vector<float> floatIn(frameLength * channelCount);
+    std::vector<float> floatOut(frameLength * outChannelCount);
+
+    int frameCounter = 0;
+
+    while (fread(in.data(), ioFrameSize, frameLength, inputFp.get()) == (size_t)frameLength) {
+        if (ioChannelCount != channelCount) {
+            adjust_channels(in.data(), ioChannelCount, in.data(), channelCount, sizeof(short),
+                            frameLength * ioFrameSize);
+        }
+        memcpy_to_float_from_i16(floatIn.data(), in.data(), frameLength * channelCount);
+
+        // Mono mode will replicate the first channel to all other channels.
+        // This ensures all audio channels are identical. This is useful for testing
+        // Bass Boost, which extracts a mono signal for processing.
+        if (revConfigParams.monoMode && channelCount > 1) {
+            for (int i = 0; i < frameLength; ++i) {
+                auto* fp = &floatIn[i * channelCount];
+                std::fill(fp + 1, fp + channelCount, *fp);  // replicate ch 0
+            }
+        }
+
+        audio_buffer_t inputBuffer, outputBuffer;
+        inputBuffer.frameCount = outputBuffer.frameCount = frameLength;
+        inputBuffer.f32 = floatIn.data();
+        outputBuffer.f32 = floatOut.data();
+#ifndef BYPASS_EXEC
+        if (int status = (*effectHandle)->process(effectHandle, &inputBuffer, &outputBuffer);
+            status != 0) {
+            ALOGE("\nError: Process returned with error %d\n", status);
+            return EXIT_FAILURE;
+        }
+#else
+        memcpy(floatOut.data(), floatIn.data(), frameLength * frameSize);
+#endif
+        memcpy_to_i16_from_float(out.data(), floatOut.data(), frameLength * outChannelCount);
+
+        if (ioChannelCount != outChannelCount) {
+            adjust_channels(out.data(), outChannelCount, out.data(), ioChannelCount, sizeof(short),
+                            frameLength * outChannelCount * sizeof(short));
+        }
+        (void)fwrite(out.data(), ioFrameSize, frameLength, outputFp.get());
+        frameCounter += frameLength;
+    }
+
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle); status != 0) {
+        ALOGE("Audio Preprocessing release returned an error = %d\n", status);
+        return EXIT_FAILURE;
+    }
+    printf("frameCounter: [%d]\n", frameCounter);
+
+    return EXIT_SUCCESS;
+}
diff --git a/media/libeffects/lvm/tests/snr.cpp b/media/libeffects/lvm/tests/snr.cpp
index 885994c..0fef334 100644
--- a/media/libeffects/lvm/tests/snr.cpp
+++ b/media/libeffects/lvm/tests/snr.cpp
@@ -22,84 +22,83 @@
 #include <vector>
 
 template <typename T, typename A = float>
-std::pair<A, A> getSignalNoise(FILE *finp, FILE *fref) {
-  constexpr size_t framesize = 256;
-  std::vector<T> in(framesize);
-  std::vector<T> ref(framesize);
-  A signal{};
-  A noise{};
+std::pair<A, A> getSignalNoise(FILE* finp, FILE* fref) {
+    constexpr size_t framesize = 256;
+    std::vector<T> in(framesize);
+    std::vector<T> ref(framesize);
+    A signal{};
+    A noise{};
 
-  for (;;) {
-    size_t read_samples_in = fread(&in[0], sizeof(T), framesize, finp);
-    const size_t read_samples_ref = fread(&ref[0], sizeof(T), framesize, fref);
-    if (read_samples_in != read_samples_ref) {
-      printf("file sizes do not match (last %zu %zu)", read_samples_in, read_samples_ref);
-      read_samples_in = std::min(read_samples_in, read_samples_ref);
+    for (;;) {
+        size_t read_samples_in = fread(&in[0], sizeof(T), framesize, finp);
+        const size_t read_samples_ref = fread(&ref[0], sizeof(T), framesize, fref);
+        if (read_samples_in != read_samples_ref) {
+            printf("file sizes do not match (last %zu %zu)", read_samples_in, read_samples_ref);
+            read_samples_in = std::min(read_samples_in, read_samples_ref);
+        }
+        if (read_samples_in == 0) {
+            return {signal, noise};
+        }
+        for (size_t i = 0; i < read_samples_in; ++i) {
+            const A value(ref[i]);
+            const A diff(A(in[i]) - value);
+            signal += value * value;
+            noise += diff * diff;
+        }
     }
-    if (read_samples_in == 0) {
-        return { signal, noise };
-    }
-    for (size_t i = 0; i < read_samples_in; ++i) {
-       const A value(ref[i]);
-       const A diff(A(in[i]) - value);
-       signal += value * value;
-       noise += diff * diff;
-    }
-  }
 }
 
 void printUsage() {
-  printf("\nUsage: ");
-  printf("\n     snr <ref_file> <test_file> [options]\n");
-  printf("\nwhere, \n     <ref_file>  is the reference file name");
-  printf("\n                  on which will be taken as pure signal");
-  printf("\n     <test_file> is test file for snr calculation");
-  printf("\n     and options are mentioned below");
-  printf("\n");
-  printf("\n     -pcm_format:<pcm format of input files>");
-  printf("\n           0 - 16 bit pcm");
-  printf("\n           1 - 32 bit float");
-  printf("\n           default 0");
-  printf("\n     -thr:<threshold value>");
-  printf("\n           default - negative infinity\n\n");
+    printf("\nUsage: ");
+    printf("\n     snr <ref_file> <test_file> [options]\n");
+    printf("\nwhere, \n     <ref_file>  is the reference file name");
+    printf("\n                  on which will be taken as pure signal");
+    printf("\n     <test_file> is test file for snr calculation");
+    printf("\n     and options are mentioned below");
+    printf("\n");
+    printf("\n     -pcm_format:<pcm format of input files>");
+    printf("\n           0 - 16 bit pcm");
+    printf("\n           1 - 32 bit float");
+    printf("\n           default 0");
+    printf("\n     -thr:<threshold value>");
+    printf("\n           default - negative infinity\n\n");
 }
 
-int main(int argc, const char *argv[]) {
-  if (argc < 3) {
-    printUsage();
-    return -1;
-  }
-  int pcm_format = 0;
-  float thr = - std::numeric_limits<float>::infinity();
-  FILE *fref = fopen(argv[1], "rb");
-  FILE *finp = fopen(argv[2], "rb");
-  for (int i = 3; i < argc; i++) {
-    if (!strncmp(argv[i], "-pcm_format:", 12)) {
-      pcm_format = atoi(argv[i] + 12);
-    } else if (!strncmp(argv[i], "-thr:", 5)) {
-      thr = atof(argv[i] + 5);
+int main(int argc, const char* argv[]) {
+    if (argc < 3) {
+        printUsage();
+        return -1;
     }
-  }
-  if (finp == nullptr || fref == nullptr) {
-    printf("\nError: missing input/reference files\n");
-    return -1;
-  }
-  int ret = EXIT_SUCCESS;
-  auto sn = pcm_format == 0
-      ? getSignalNoise<short>(finp, fref)
-      : getSignalNoise<float>(finp, fref);
-  if (sn.first > 0.f && sn.second > 0.f) {
-    float snr = 10.f * log(sn.first / sn.second);
-    // compare the measured snr value with threshold
-    if (snr < thr) {
-      printf("%.6f less than threshold %.6f\n", snr, thr);
-      ret = EXIT_FAILURE;
-    } else {
-      printf("%.6f\n", snr);
+    int pcm_format = 0;
+    float thr = -std::numeric_limits<float>::infinity();
+    FILE* fref = fopen(argv[1], "rb");
+    FILE* finp = fopen(argv[2], "rb");
+    for (int i = 3; i < argc; i++) {
+        if (!strncmp(argv[i], "-pcm_format:", 12)) {
+            pcm_format = atoi(argv[i] + 12);
+        } else if (!strncmp(argv[i], "-thr:", 5)) {
+            thr = atof(argv[i] + 5);
+        }
     }
-  }
-  fclose(finp);
-  fclose(fref);
+    if (finp == nullptr || fref == nullptr) {
+        printf("\nError: missing input/reference files\n");
+        return -1;
+    }
+    int ret = EXIT_SUCCESS;
+    auto sn =
+            pcm_format == 0 ? getSignalNoise<short>(finp, fref) : getSignalNoise<float>(finp, fref);
+    if (sn.first > 0.f && sn.second > 0.f) {
+        float snr = 10.f * log(sn.first / sn.second);
+        // compare the measured snr value with threshold
+        if (snr < thr) {
+            printf("%.6f less than threshold %.6f\n", snr, thr);
+            ret = EXIT_FAILURE;
+        } else {
+            printf("%.6f\n", snr);
+        }
+    }
+    fclose(finp);
+    fclose(fref);
 
-  return ret;
+    return ret;
 }
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index afc4220..be60aae 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -1,5 +1,5 @@
 // music bundle wrapper
-cc_library_shared {
+cc_library {
     name: "libbundlewrapper",
 
     arch: {
@@ -13,7 +13,6 @@
 
     cppflags: [
         "-fvisibility=hidden",
-        "-DSUPPORT_MC",
 
         "-Wall",
         "-Werror",
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index 6fca0e7..670b415 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -18,7 +18,7 @@
 typedef float LVM_FLOAT;
 #endif
 #define LOG_TAG "Bundle"
-#define ARRAY_SIZE(array) (sizeof (array) / sizeof (array)[0])
+#define ARRAY_SIZE(array) (sizeof(array) / sizeof(array)[0])
 //#define LOG_NDEBUG 0
 
 #include <assert.h>
@@ -42,26 +42,33 @@
 #ifdef VERY_VERY_VERBOSE_LOGGING
 #define ALOGVV ALOGV
 #else
-#define ALOGVV(a...) do { } while (false)
+#define ALOGVV(a...) \
+    do {             \
+    } while (false)
 #endif
 
-#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc){\
-        if ((LvmStatus) == LVM_NULLADDRESS){\
-            ALOGV("\tLVM_ERROR : Parameter error - "\
-                    "null pointer returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\
-        }\
-        if ((LvmStatus) == LVM_ALIGNMENTERROR){\
-            ALOGV("\tLVM_ERROR : Parameter error - "\
-                    "bad alignment returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\
-        }\
-        if ((LvmStatus) == LVM_INVALIDNUMSAMPLES){\
-            ALOGV("\tLVM_ERROR : Parameter error - "\
-                    "bad number of samples returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\
-        }\
-        if ((LvmStatus) == LVM_OUTOFRANGE){\
-            ALOGV("\tLVM_ERROR : Parameter error - "\
-                    "out of range returned by %s in %s\n", callingFunc, calledFunc);\
-        }\
+#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc)             \
+    {                                                                   \
+        if ((LvmStatus) == LVM_NULLADDRESS) {                           \
+            ALOGV("\tLVM_ERROR : Parameter error - "                    \
+                  "null pointer returned by %s in %s\n\n\n\n",          \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+        if ((LvmStatus) == LVM_ALIGNMENTERROR) {                        \
+            ALOGV("\tLVM_ERROR : Parameter error - "                    \
+                  "bad alignment returned by %s in %s\n\n\n\n",         \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+        if ((LvmStatus) == LVM_INVALIDNUMSAMPLES) {                     \
+            ALOGV("\tLVM_ERROR : Parameter error - "                    \
+                  "bad number of samples returned by %s in %s\n\n\n\n", \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+        if ((LvmStatus) == LVM_OUTOFRANGE) {                            \
+            ALOGV("\tLVM_ERROR : Parameter error - "                    \
+                  "out of range returned by %s in %s\n",                \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
     }
 
 // Namespaces
@@ -74,20 +81,21 @@
 int SessionIndex[LVM_MAX_SESSIONS];
 
 /* local functions */
-#define CHECK_ARG(cond) {                     \
-    if (!(cond)) {                            \
-        ALOGV("\tLVM_ERROR : Invalid argument: "#cond);      \
-        return -EINVAL;                       \
-    }                                         \
-}
+#define CHECK_ARG(cond)                                      \
+    {                                                        \
+        if (!(cond)) {                                       \
+            ALOGV("\tLVM_ERROR : Invalid argument: " #cond); \
+            return -EINVAL;                                  \
+        }                                                    \
+    }
 
 // NXP SW BassBoost UUID
 const effect_descriptor_t gBassBoostDescriptor = {
-        {0x0634f220, 0xddd4, 0x11db, 0xa0fc, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }},
-        {0x8631f300, 0x72e2, 0x11df, 0xb57e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid
+        {0x0634f220, 0xddd4, 0x11db, 0xa0fc, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        {0x8631f300, 0x72e2, 0x11df, 0xb57e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // uuid
         EFFECT_CONTROL_API_VERSION,
-        (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST | EFFECT_FLAG_DEVICE_IND
-        | EFFECT_FLAG_VOLUME_CTRL),
+        (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST | EFFECT_FLAG_DEVICE_IND |
+         EFFECT_FLAG_VOLUME_CTRL),
         BASS_BOOST_CUP_LOAD_ARM9E,
         BUNDLE_MEM_USAGE,
         "Dynamic Bass Boost",
@@ -99,8 +107,8 @@
         {0x37cc2c00, 0xdddd, 0x11db, 0x8577, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
         {0x1d4033c0, 0x8557, 0x11df, 0x9f2d, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
         EFFECT_CONTROL_API_VERSION,
-        (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | EFFECT_FLAG_DEVICE_IND
-        | EFFECT_FLAG_VOLUME_CTRL),
+        (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | EFFECT_FLAG_DEVICE_IND |
+         EFFECT_FLAG_VOLUME_CTRL),
         VIRTUALIZER_CUP_LOAD_ARM9E,
         BUNDLE_MEM_USAGE,
         "Virtualizer",
@@ -109,8 +117,8 @@
 
 // NXP SW Equalizer UUID
 const effect_descriptor_t gEqualizerDescriptor = {
-        {0x0bed4300, 0xddd6, 0x11db, 0x8f34, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // type
-        {0xce772f20, 0x847d, 0x11df, 0xbb17, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid Eq NXP
+        {0x0bed4300, 0xddd6, 0x11db, 0x8f34, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // type
+        {0xce772f20, 0x847d, 0x11df, 0xbb17, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // uuid Eq NXP
         EFFECT_CONTROL_API_VERSION,
         (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST | EFFECT_FLAG_VOLUME_CTRL),
         EQUALIZER_CUP_LOAD_ARM9E,
@@ -121,8 +129,8 @@
 
 // NXP SW Volume UUID
 const effect_descriptor_t gVolumeDescriptor = {
-        {0x09e8ede0, 0xddde, 0x11db, 0xb4f6, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }},
-        {0x119341a0, 0x8469, 0x11df, 0x81f9, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }}, //uuid VOL NXP
+        {0x09e8ede0, 0xddde, 0x11db, 0xb4f6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        {0x119341a0, 0x8469, 0x11df, 0x81f9, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // uuid VOL NXP
         EFFECT_CONTROL_API_VERSION,
         (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | EFFECT_FLAG_VOLUME_CTRL),
         VOLUME_CUP_LOAD_ARM9E,
@@ -132,77 +140,50 @@
 };
 
 //--- local function prototypes
-void LvmGlobalBundle_init      (void);
-int  LvmBundle_init            (EffectContext *pContext);
-int  LvmEffect_enable          (EffectContext *pContext);
-int  LvmEffect_disable         (EffectContext *pContext);
-void LvmEffect_free            (EffectContext *pContext);
-int  Effect_setConfig          (EffectContext *pContext, effect_config_t *pConfig);
-void Effect_getConfig          (EffectContext *pContext, effect_config_t *pConfig);
-int  BassBoost_setParameter    (EffectContext *pContext,
-                                uint32_t       paramSize,
-                                void          *pParam,
-                                uint32_t       valueSize,
-                                void          *pValue);
-int  BassBoost_getParameter    (EffectContext *pContext,
-                                uint32_t       paramSize,
-                                void          *pParam,
-                                uint32_t      *pValueSize,
-                                void          *pValue);
-int  Virtualizer_setParameter  (EffectContext *pContext,
-                                uint32_t       paramSize,
-                                void          *pParam,
-                                uint32_t       valueSize,
-                                void          *pValue);
-int  Virtualizer_getParameter  (EffectContext *pContext,
-                                uint32_t       paramSize,
-                                void          *pParam,
-                                uint32_t      *pValueSize,
-                                void          *pValue);
-int  Equalizer_setParameter    (EffectContext *pContext,
-                                uint32_t       paramSize,
-                                void          *pParam,
-                                uint32_t       valueSize,
-                                void          *pValue);
-int  Equalizer_getParameter    (EffectContext *pContext,
-                                uint32_t       paramSize,
-                                void          *pParam,
-                                uint32_t      *pValueSize,
-                                void          *pValue);
-int  Volume_setParameter       (EffectContext *pContext,
-                                uint32_t       paramSize,
-                                void          *pParam,
-                                uint32_t       valueSize,
-                                void          *pValue);
-int  Volume_getParameter       (EffectContext *pContext,
-                                uint32_t       paramSize,
-                                void          *pParam,
-                                uint32_t      *pValueSize,
-                                void          *pValue);
-int Effect_setEnabled(EffectContext *pContext, bool enabled);
+void LvmGlobalBundle_init(void);
+int LvmBundle_init(EffectContext* pContext);
+int LvmEffect_enable(EffectContext* pContext);
+int LvmEffect_disable(EffectContext* pContext);
+int Effect_setConfig(EffectContext* pContext, effect_config_t* pConfig);
+void Effect_getConfig(EffectContext* pContext, effect_config_t* pConfig);
+int BassBoost_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                           uint32_t valueSize, void* pValue);
+int BassBoost_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                           uint32_t* pValueSize, void* pValue);
+int Virtualizer_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                             uint32_t valueSize, void* pValue);
+int Virtualizer_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                             uint32_t* pValueSize, void* pValue);
+int Equalizer_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                           uint32_t valueSize, void* pValue);
+int Equalizer_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                           uint32_t* pValueSize, void* pValue);
+int Volume_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                        uint32_t valueSize, void* pValue);
+int Volume_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                        uint32_t* pValueSize, void* pValue);
+int Effect_setEnabled(EffectContext* pContext, bool enabled);
 
 /* Effect Library Interface Implementation */
 
-extern "C" int EffectCreate(const effect_uuid_t *uuid,
-                            int32_t             sessionId,
-                            int32_t             ioId __unused,
-                            effect_handle_t  *pHandle){
+extern "C" int EffectCreate(const effect_uuid_t* uuid, int32_t sessionId, int32_t ioId __unused,
+                            effect_handle_t* pHandle) {
     int ret = 0;
     int sessionNo = -1;
     int i;
-    EffectContext *pContext = NULL;
+    EffectContext* pContext = NULL;
     bool newBundle = false;
-    SessionContext *pSessionContext;
+    SessionContext* pSessionContext;
 
     ALOGV("\n\tEffectCreate start session %d", sessionId);
 
-    if (pHandle == NULL || uuid == NULL){
+    if (pHandle == NULL || uuid == NULL) {
         ALOGV("\tLVM_ERROR : EffectCreate() called with NULL pointer");
         ret = -EINVAL;
         goto exit;
     }
 
-    if(LvmInitFlag == LVM_FALSE){
+    if (LvmInitFlag == LVM_FALSE) {
         LvmInitFlag = LVM_TRUE;
         ALOGV("\tEffectCreate - Initializing all global memory");
         LvmGlobalBundle_init();
@@ -210,7 +191,7 @@
 
     // Find sessionNo: if one already exists for the sessionId use it,
     // otherwise choose the first available empty slot.
-    for(i=0; i<LVM_MAX_SESSIONS; i++){
+    for (i = 0; i < LVM_MAX_SESSIONS; i++) {
         if (SessionIndex[i] == sessionId) {
             sessionNo = i;
             break;
@@ -232,106 +213,102 @@
     pContext = new EffectContext;
 
     // If this is the first create in this session
-    if(GlobalSessionMemory[sessionNo].bBundledEffectsEnabled == LVM_FALSE){
+    if (GlobalSessionMemory[sessionNo].bBundledEffectsEnabled == LVM_FALSE) {
         ALOGV("\tEffectCreate - This is the first effect in current sessionId %d sessionNo %d",
-                sessionId, sessionNo);
+              sessionId, sessionNo);
 
         GlobalSessionMemory[sessionNo].bBundledEffectsEnabled = LVM_TRUE;
-        GlobalSessionMemory[sessionNo].pBundledContext        = new BundledEffectContext;
+        GlobalSessionMemory[sessionNo].pBundledContext = new BundledEffectContext;
         newBundle = true;
 
         pContext->pBundledContext = GlobalSessionMemory[sessionNo].pBundledContext;
-        pContext->pBundledContext->SessionNo                = sessionNo;
-        pContext->pBundledContext->SessionId                = sessionId;
-        pContext->pBundledContext->hInstance                = NULL;
-        pContext->pBundledContext->bVolumeEnabled           = LVM_FALSE;
-        pContext->pBundledContext->bEqualizerEnabled        = LVM_FALSE;
-        pContext->pBundledContext->bBassEnabled             = LVM_FALSE;
-        pContext->pBundledContext->bBassTempDisabled        = LVM_FALSE;
-        pContext->pBundledContext->bVirtualizerEnabled      = LVM_FALSE;
+        pContext->pBundledContext->SessionNo = sessionNo;
+        pContext->pBundledContext->SessionId = sessionId;
+        pContext->pBundledContext->hInstance = NULL;
+        pContext->pBundledContext->bVolumeEnabled = LVM_FALSE;
+        pContext->pBundledContext->bEqualizerEnabled = LVM_FALSE;
+        pContext->pBundledContext->bBassEnabled = LVM_FALSE;
+        pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
+        pContext->pBundledContext->bVirtualizerEnabled = LVM_FALSE;
         pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
-        pContext->pBundledContext->nOutputDevice            = AUDIO_DEVICE_NONE;
+        pContext->pBundledContext->nOutputDevice = AUDIO_DEVICE_NONE;
         pContext->pBundledContext->nVirtualizerForcedDevice = AUDIO_DEVICE_NONE;
-        pContext->pBundledContext->NumberEffectsEnabled     = 0;
-        pContext->pBundledContext->NumberEffectsCalled      = 0;
-        pContext->pBundledContext->firstVolume              = LVM_TRUE;
-        pContext->pBundledContext->volume                   = 0;
-
+        pContext->pBundledContext->NumberEffectsEnabled = 0;
+        pContext->pBundledContext->NumberEffectsCalled = 0;
+        pContext->pBundledContext->firstVolume = LVM_TRUE;
+        pContext->pBundledContext->volume = 0;
 
         /* Saved strength is used to return the exact strength that was used in the set to the get
          * because we map the original strength range of 0:1000 to 1:15, and this will avoid
          * quantisation like effect when returning
          */
-        pContext->pBundledContext->BassStrengthSaved        = 0;
-        pContext->pBundledContext->VirtStrengthSaved        = 0;
-        pContext->pBundledContext->CurPreset                = PRESET_CUSTOM;
-        pContext->pBundledContext->levelSaved               = 0;
-        pContext->pBundledContext->bMuteEnabled             = LVM_FALSE;
-        pContext->pBundledContext->bStereoPositionEnabled   = LVM_FALSE;
-        pContext->pBundledContext->positionSaved            = 0;
-        pContext->pBundledContext->workBuffer               = NULL;
-        pContext->pBundledContext->frameCount               = -1;
-        pContext->pBundledContext->SamplesToExitCountVirt   = 0;
-        pContext->pBundledContext->SamplesToExitCountBb     = 0;
-        pContext->pBundledContext->SamplesToExitCountEq     = 0;
+        pContext->pBundledContext->BassStrengthSaved = 0;
+        pContext->pBundledContext->VirtStrengthSaved = 0;
+        pContext->pBundledContext->CurPreset = PRESET_CUSTOM;
+        pContext->pBundledContext->levelSaved = 0;
+        pContext->pBundledContext->bMuteEnabled = LVM_FALSE;
+        pContext->pBundledContext->bStereoPositionEnabled = LVM_FALSE;
+        pContext->pBundledContext->positionSaved = 0;
+        pContext->pBundledContext->workBuffer = NULL;
+        pContext->pBundledContext->frameCount = -1;
+        pContext->pBundledContext->SamplesToExitCountVirt = 0;
+        pContext->pBundledContext->SamplesToExitCountBb = 0;
+        pContext->pBundledContext->SamplesToExitCountEq = 0;
         for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
             pContext->pBundledContext->bandGaindB[i] = EQNB_5BandSoftPresets[i];
         }
-        pContext->pBundledContext->effectProcessCalled      = 0;
-        pContext->pBundledContext->effectInDrain            = 0;
+        pContext->pBundledContext->effectProcessCalled = 0;
+        pContext->pBundledContext->effectInDrain = 0;
 
         ALOGV("\tEffectCreate - Calling LvmBundle_init");
         ret = LvmBundle_init(pContext);
 
-        if (ret < 0){
+        if (ret < 0) {
             ALOGV("\tLVM_ERROR : EffectCreate() Bundle init failed");
             goto exit;
         }
-    }
-    else{
+    } else {
         ALOGV("\tEffectCreate - Assigning memory for previously created effect on sessionNo %d",
-                sessionNo);
-        pContext->pBundledContext =
-                GlobalSessionMemory[sessionNo].pBundledContext;
+              sessionNo);
+        pContext->pBundledContext = GlobalSessionMemory[sessionNo].pBundledContext;
     }
     ALOGV("\tEffectCreate - pBundledContext is %p", pContext->pBundledContext);
 
     pSessionContext = &GlobalSessionMemory[pContext->pBundledContext->SessionNo];
 
     // Create each Effect
-    if (memcmp(uuid, &gBassBoostDescriptor.uuid, sizeof(effect_uuid_t)) == 0){
+    if (memcmp(uuid, &gBassBoostDescriptor.uuid, sizeof(effect_uuid_t)) == 0) {
         // Create Bass Boost
         ALOGV("\tEffectCreate - Effect to be created is LVM_BASS_BOOST");
         pSessionContext->bBassInstantiated = LVM_TRUE;
         pContext->pBundledContext->SamplesToExitCountBb = 0;
 
-        pContext->itfe       = &gLvmEffectInterface;
+        pContext->itfe = &gLvmEffectInterface;
         pContext->EffectType = LVM_BASS_BOOST;
-    } else if (memcmp(uuid, &gVirtualizerDescriptor.uuid, sizeof(effect_uuid_t)) == 0){
+    } else if (memcmp(uuid, &gVirtualizerDescriptor.uuid, sizeof(effect_uuid_t)) == 0) {
         // Create Virtualizer
         ALOGV("\tEffectCreate - Effect to be created is LVM_VIRTUALIZER");
-        pSessionContext->bVirtualizerInstantiated=LVM_TRUE;
+        pSessionContext->bVirtualizerInstantiated = LVM_TRUE;
         pContext->pBundledContext->SamplesToExitCountVirt = 0;
 
-        pContext->itfe       = &gLvmEffectInterface;
+        pContext->itfe = &gLvmEffectInterface;
         pContext->EffectType = LVM_VIRTUALIZER;
-    } else if (memcmp(uuid, &gEqualizerDescriptor.uuid, sizeof(effect_uuid_t)) == 0){
+    } else if (memcmp(uuid, &gEqualizerDescriptor.uuid, sizeof(effect_uuid_t)) == 0) {
         // Create Equalizer
         ALOGV("\tEffectCreate - Effect to be created is LVM_EQUALIZER");
         pSessionContext->bEqualizerInstantiated = LVM_TRUE;
         pContext->pBundledContext->SamplesToExitCountEq = 0;
 
-        pContext->itfe       = &gLvmEffectInterface;
+        pContext->itfe = &gLvmEffectInterface;
         pContext->EffectType = LVM_EQUALIZER;
-    } else if (memcmp(uuid, &gVolumeDescriptor.uuid, sizeof(effect_uuid_t)) == 0){
+    } else if (memcmp(uuid, &gVolumeDescriptor.uuid, sizeof(effect_uuid_t)) == 0) {
         // Create Volume
         ALOGV("\tEffectCreate - Effect to be created is LVM_VOLUME");
         pSessionContext->bVolumeInstantiated = LVM_TRUE;
 
-        pContext->itfe       = &gLvmEffectInterface;
+        pContext->itfe = &gLvmEffectInterface;
         pContext->EffectType = LVM_VOLUME;
-    }
-    else{
+    } else {
         ALOGV("\tLVM_ERROR : EffectCreate() invalid UUID");
         ret = -EINVAL;
         goto exit;
@@ -347,66 +324,64 @@
             }
             delete pContext;
         }
-        if (pHandle != NULL)
-          *pHandle = (effect_handle_t)NULL;
+        if (pHandle != NULL) *pHandle = (effect_handle_t)NULL;
     } else {
-      if (pHandle != NULL)
-        *pHandle = (effect_handle_t)pContext;
+        if (pHandle != NULL) *pHandle = (effect_handle_t)pContext;
     }
     ALOGV("\tEffectCreate end..\n\n");
     return ret;
 } /* end EffectCreate */
 
-extern "C" int EffectRelease(effect_handle_t handle){
+extern "C" int EffectRelease(effect_handle_t handle) {
     ALOGV("\n\tEffectRelease start %p", handle);
-    EffectContext * pContext = (EffectContext *)handle;
+    EffectContext* pContext = (EffectContext*)handle;
 
     ALOGV("\tEffectRelease start handle: %p, context %p", handle, pContext->pBundledContext);
-    if (pContext == NULL){
+    if (pContext == NULL) {
         ALOGV("\tLVM_ERROR : EffectRelease called with NULL pointer");
         return -EINVAL;
     }
 
-    SessionContext *pSessionContext = &GlobalSessionMemory[pContext->pBundledContext->SessionNo];
+    SessionContext* pSessionContext = &GlobalSessionMemory[pContext->pBundledContext->SessionNo];
 
     // Clear the instantiated flag for the effect
     // protect agains the case where an effect is un-instantiated without being disabled
 
-    int &effectInDrain = pContext->pBundledContext->effectInDrain;
-    if(pContext->EffectType == LVM_BASS_BOOST) {
+    int& effectInDrain = pContext->pBundledContext->effectInDrain;
+    if (pContext->EffectType == LVM_BASS_BOOST) {
         ALOGV("\tEffectRelease LVM_BASS_BOOST Clearing global intstantiated flag");
         pSessionContext->bBassInstantiated = LVM_FALSE;
-        if(pContext->pBundledContext->SamplesToExitCountBb > 0){
+        if (pContext->pBundledContext->SamplesToExitCountBb > 0) {
             pContext->pBundledContext->NumberEffectsEnabled--;
         }
         pContext->pBundledContext->SamplesToExitCountBb = 0;
-    } else if(pContext->EffectType == LVM_VIRTUALIZER) {
+    } else if (pContext->EffectType == LVM_VIRTUALIZER) {
         ALOGV("\tEffectRelease LVM_VIRTUALIZER Clearing global intstantiated flag");
         pSessionContext->bVirtualizerInstantiated = LVM_FALSE;
-        if(pContext->pBundledContext->SamplesToExitCountVirt > 0){
+        if (pContext->pBundledContext->SamplesToExitCountVirt > 0) {
             pContext->pBundledContext->NumberEffectsEnabled--;
         }
         pContext->pBundledContext->SamplesToExitCountVirt = 0;
-    } else if(pContext->EffectType == LVM_EQUALIZER) {
+    } else if (pContext->EffectType == LVM_EQUALIZER) {
         ALOGV("\tEffectRelease LVM_EQUALIZER Clearing global intstantiated flag");
-        pSessionContext->bEqualizerInstantiated =LVM_FALSE;
-        if(pContext->pBundledContext->SamplesToExitCountEq > 0){
+        pSessionContext->bEqualizerInstantiated = LVM_FALSE;
+        if (pContext->pBundledContext->SamplesToExitCountEq > 0) {
             pContext->pBundledContext->NumberEffectsEnabled--;
         }
         pContext->pBundledContext->SamplesToExitCountEq = 0;
-    } else if(pContext->EffectType == LVM_VOLUME) {
+    } else if (pContext->EffectType == LVM_VOLUME) {
         ALOGV("\tEffectRelease LVM_VOLUME Clearing global intstantiated flag");
         pSessionContext->bVolumeInstantiated = LVM_FALSE;
         // There is no samplesToExitCount for volume so we also use the drain flag to check
         // if we should decrement the effects enabled.
-        if (pContext->pBundledContext->bVolumeEnabled == LVM_TRUE
-                || (effectInDrain & 1 << LVM_VOLUME) != 0) {
+        if (pContext->pBundledContext->bVolumeEnabled == LVM_TRUE ||
+            (effectInDrain & 1 << LVM_VOLUME) != 0) {
             pContext->pBundledContext->NumberEffectsEnabled--;
         }
     } else {
         ALOGV("\tLVM_ERROR : EffectRelease : Unsupported effect\n\n\n\n\n\n\n");
     }
-    effectInDrain &= ~(1 << pContext->EffectType); // no need to drain if released
+    effectInDrain &= ~(1 << pContext->EffectType);  // no need to drain if released
 
     // Disable effect, in this case ignore errors (return codes)
     // if an effect has already been disabled
@@ -414,17 +389,15 @@
 
     // if all effects are no longer instantiaed free the lvm memory and delete BundledEffectContext
     if ((pSessionContext->bBassInstantiated == LVM_FALSE) &&
-            (pSessionContext->bVolumeInstantiated == LVM_FALSE) &&
-            (pSessionContext->bEqualizerInstantiated ==LVM_FALSE) &&
-            (pSessionContext->bVirtualizerInstantiated==LVM_FALSE))
-    {
-
+        (pSessionContext->bVolumeInstantiated == LVM_FALSE) &&
+        (pSessionContext->bEqualizerInstantiated == LVM_FALSE) &&
+        (pSessionContext->bVirtualizerInstantiated == LVM_FALSE)) {
         // Clear the SessionIndex
-        for(int i=0; i<LVM_MAX_SESSIONS; i++){
-            if(SessionIndex[i] == pContext->pBundledContext->SessionId){
+        for (int i = 0; i < LVM_MAX_SESSIONS; i++) {
+            if (SessionIndex[i] == pContext->pBundledContext->SessionId) {
                 SessionIndex[i] = LVM_UNUSED_SESSION;
-                ALOGV("\tEffectRelease: Clearing SessionIndex SessionNo %d for SessionId %d\n",
-                        i, pContext->pBundledContext->SessionId);
+                ALOGV("\tEffectRelease: Clearing SessionIndex SessionNo %d for SessionId %d\n", i,
+                      pContext->pBundledContext->SessionId);
                 break;
             }
         }
@@ -433,7 +406,7 @@
         pSessionContext->bBundledEffectsEnabled = LVM_FALSE;
         pSessionContext->pBundledContext = LVM_NULL;
         ALOGV("\tEffectRelease: Freeing LVM Bundle memory\n");
-        LvmEffect_free(pContext);
+        LVM_DelInstanceHandle(&pContext->pBundledContext->hInstance);
         ALOGV("\tEffectRelease: Deleting LVM Bundle context %p\n", pContext->pBundledContext);
         if (pContext->pBundledContext->workBuffer != NULL) {
             free(pContext->pBundledContext->workBuffer);
@@ -449,11 +422,10 @@
 
 } /* end EffectRelease */
 
-extern "C" int EffectGetDescriptor(const effect_uuid_t *uuid,
-                                   effect_descriptor_t *pDescriptor) {
-    const effect_descriptor_t *desc = NULL;
+extern "C" int EffectGetDescriptor(const effect_uuid_t* uuid, effect_descriptor_t* pDescriptor) {
+    const effect_descriptor_t* desc = NULL;
 
-    if (pDescriptor == NULL || uuid == NULL){
+    if (pDescriptor == NULL || uuid == NULL) {
         ALOGV("EffectGetDescriptor() called with NULL pointer");
         return -EINVAL;
     }
@@ -469,7 +441,7 @@
     }
 
     if (desc == NULL) {
-        return  -EINVAL;
+        return -EINVAL;
     }
 
     *pDescriptor = *desc;
@@ -477,15 +449,15 @@
     return 0;
 } /* end EffectGetDescriptor */
 
-void LvmGlobalBundle_init(){
+void LvmGlobalBundle_init() {
     ALOGV("\tLvmGlobalBundle_init start");
-    for(int i=0; i<LVM_MAX_SESSIONS; i++){
-        GlobalSessionMemory[i].bBundledEffectsEnabled   = LVM_FALSE;
-        GlobalSessionMemory[i].bVolumeInstantiated      = LVM_FALSE;
-        GlobalSessionMemory[i].bEqualizerInstantiated   = LVM_FALSE;
-        GlobalSessionMemory[i].bBassInstantiated        = LVM_FALSE;
+    for (int i = 0; i < LVM_MAX_SESSIONS; i++) {
+        GlobalSessionMemory[i].bBundledEffectsEnabled = LVM_FALSE;
+        GlobalSessionMemory[i].bVolumeInstantiated = LVM_FALSE;
+        GlobalSessionMemory[i].bEqualizerInstantiated = LVM_FALSE;
+        GlobalSessionMemory[i].bBassInstantiated = LVM_FALSE;
         GlobalSessionMemory[i].bVirtualizerInstantiated = LVM_FALSE;
-        GlobalSessionMemory[i].pBundledContext          = LVM_NULL;
+        GlobalSessionMemory[i].pBundledContext = LVM_NULL;
 
         SessionIndex[i] = LVM_UNUSED_SESSION;
     }
@@ -504,203 +476,140 @@
 //
 //----------------------------------------------------------------------------
 
-int LvmBundle_init(EffectContext *pContext){
+int LvmBundle_init(EffectContext* pContext) {
     ALOGV("\tLvmBundle_init start");
 
-    pContext->config.inputCfg.accessMode                    = EFFECT_BUFFER_ACCESS_READ;
-    pContext->config.inputCfg.channels                      = AUDIO_CHANNEL_OUT_STEREO;
-    pContext->config.inputCfg.format                        = EFFECT_BUFFER_FORMAT;
-    pContext->config.inputCfg.samplingRate                  = 44100;
-    pContext->config.inputCfg.bufferProvider.getBuffer      = NULL;
-    pContext->config.inputCfg.bufferProvider.releaseBuffer  = NULL;
-    pContext->config.inputCfg.bufferProvider.cookie         = NULL;
-    pContext->config.inputCfg.mask                          = EFFECT_CONFIG_ALL;
-    pContext->config.outputCfg.accessMode                   = EFFECT_BUFFER_ACCESS_ACCUMULATE;
-    pContext->config.outputCfg.channels                     = AUDIO_CHANNEL_OUT_STEREO;
-    pContext->config.outputCfg.format                       = EFFECT_BUFFER_FORMAT;
-    pContext->config.outputCfg.samplingRate                 = 44100;
-    pContext->config.outputCfg.bufferProvider.getBuffer     = NULL;
+    pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+    pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    pContext->config.inputCfg.format = EFFECT_BUFFER_FORMAT;
+    pContext->config.inputCfg.samplingRate = 44100;
+    pContext->config.inputCfg.bufferProvider.getBuffer = NULL;
+    pContext->config.inputCfg.bufferProvider.releaseBuffer = NULL;
+    pContext->config.inputCfg.bufferProvider.cookie = NULL;
+    pContext->config.inputCfg.mask = EFFECT_CONFIG_ALL;
+    pContext->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+    pContext->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    pContext->config.outputCfg.format = EFFECT_BUFFER_FORMAT;
+    pContext->config.outputCfg.samplingRate = 44100;
+    pContext->config.outputCfg.bufferProvider.getBuffer = NULL;
     pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL;
-    pContext->config.outputCfg.bufferProvider.cookie        = NULL;
-    pContext->config.outputCfg.mask                         = EFFECT_CONFIG_ALL;
+    pContext->config.outputCfg.bufferProvider.cookie = NULL;
+    pContext->config.outputCfg.mask = EFFECT_CONFIG_ALL;
 
     CHECK_ARG(pContext != NULL);
 
-    if (pContext->pBundledContext->hInstance != NULL){
+    if (pContext->pBundledContext->hInstance != NULL) {
         ALOGV("\tLvmBundle_init pContext->pBassBoost != NULL "
-                "-> Calling pContext->pBassBoost->free()");
-
-        LvmEffect_free(pContext);
+              "-> Calling pContext->pBassBoost->free()");
+        LVM_DelInstanceHandle(&pContext->pBundledContext->hInstance);
 
         ALOGV("\tLvmBundle_init pContext->pBassBoost != NULL "
-                "-> Called pContext->pBassBoost->free()");
+              "-> Called pContext->pBassBoost->free()");
     }
 
-    LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;          /* Function call status */
-    LVM_ControlParams_t     params;                         /* Control Parameters */
-    LVM_InstParams_t        InstParams;                     /* Instance parameters */
-    LVM_EQNB_BandDef_t      BandDefs[MAX_NUM_BANDS];        /* Equaliser band definitions */
-    LVM_HeadroomParams_t    HeadroomParams;                 /* Headroom parameters */
-    LVM_HeadroomBandDef_t   HeadroomBandDef[LVM_HEADROOM_MAX_NBANDS];
-    LVM_MemTab_t            MemTab;                         /* Memory allocation table */
-    bool                    bMallocFailure = LVM_FALSE;
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+    LVM_ControlParams_t params;                  /* Control Parameters */
+    LVM_InstParams_t InstParams;                 /* Instance parameters */
+    LVM_EQNB_BandDef_t BandDefs[MAX_NUM_BANDS];  /* Equaliser band definitions */
+    LVM_HeadroomParams_t HeadroomParams;         /* Headroom parameters */
+    LVM_HeadroomBandDef_t HeadroomBandDef[LVM_HEADROOM_MAX_NBANDS];
 
     /* Set the capabilities */
-    InstParams.BufferMode       = LVM_UNMANAGED_BUFFERS;
-    InstParams.MaxBlockSize     = MAX_CALL_SIZE;
-    InstParams.EQNB_NumBands    = MAX_NUM_BANDS;
-    InstParams.PSA_Included     = LVM_PSA_ON;
+    InstParams.BufferMode = LVM_UNMANAGED_BUFFERS;
+    InstParams.MaxBlockSize = MAX_CALL_SIZE;
+    InstParams.EQNB_NumBands = MAX_NUM_BANDS;
+    InstParams.PSA_Included = LVM_PSA_ON;
 
-    /* Allocate memory, forcing alignment */
-    LvmStatus = LVM_GetMemoryTable(LVM_NULL,
-                                  &MemTab,
-                                  &InstParams);
-
-    LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "LvmBundle_init")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
-
-    ALOGV("\tCreateInstance Succesfully called LVM_GetMemoryTable\n");
-
-    /* Allocate memory */
-    for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
-        if (MemTab.Region[i].Size != 0){
-            MemTab.Region[i].pBaseAddress = malloc(MemTab.Region[i].Size);
-
-            if (MemTab.Region[i].pBaseAddress == LVM_NULL){
-                ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate %" PRIu32
-                        " bytes for region %u\n", MemTab.Region[i].Size, i );
-                bMallocFailure = LVM_TRUE;
-            }else{
-                ALOGV("\tLvmBundle_init CreateInstance allocated %" PRIu32
-                        " bytes for region %u at %p\n",
-                        MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-            }
-        }
-    }
-
-    /* If one or more of the memory regions failed to allocate, free the regions that were
-     * succesfully allocated and return with an error
-     */
-    if(bMallocFailure == LVM_TRUE){
-        for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
-            if (MemTab.Region[i].pBaseAddress == LVM_NULL){
-                ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate %" PRIu32
-                        " bytes for region %u Not freeing\n", MemTab.Region[i].Size, i );
-            }else{
-                ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed: but allocated %" PRIu32
-                     " bytes for region %u at %p- free\n",
-                     MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-                free(MemTab.Region[i].pBaseAddress);
-            }
-        }
-        return -EINVAL;
-    }
-    ALOGV("\tLvmBundle_init CreateInstance Succesfully malloc'd memory\n");
-
-    /* Initialise */
-    pContext->pBundledContext->hInstance = LVM_NULL;
-
-    /* Init sets the instance handle */
-    LvmStatus = LVM_GetInstanceHandle(&pContext->pBundledContext->hInstance,
-                                      &MemTab,
-                                      &InstParams);
+    LvmStatus = LVM_GetInstanceHandle(&pContext->pBundledContext->hInstance, &InstParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetInstanceHandle", "LvmBundle_init")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    ALOGV("\tLvmBundle_init CreateInstance Succesfully called LVM_GetInstanceHandle\n");
+    ALOGV("\tLvmBundle_init CreateInstance Successfully called LVM_GetInstanceHandle\n");
 
     /* Set the initial process parameters */
     /* General parameters */
-    params.OperatingMode          = LVM_MODE_ON;
-    params.SampleRate             = LVM_FS_44100;
-    params.SourceFormat           = LVM_STEREO;
-    params.SpeakerType            = LVM_HEADPHONES;
+    params.OperatingMode = LVM_MODE_ON;
+    params.SampleRate = LVM_FS_44100;
+    params.SourceFormat = LVM_STEREO;
+    params.SpeakerType = LVM_HEADPHONES;
 
     pContext->pBundledContext->SampleRate = LVM_FS_44100;
-#ifdef SUPPORT_MC
     pContext->pBundledContext->ChMask = AUDIO_CHANNEL_OUT_STEREO;
-#endif
 
     /* Concert Sound parameters */
-    params.VirtualizerOperatingMode   = LVM_MODE_OFF;
-    params.VirtualizerType            = LVM_CONCERTSOUND;
-    params.VirtualizerReverbLevel     = 100;
-    params.CS_EffectLevel             = LVM_CS_EFFECT_NONE;
+    params.VirtualizerOperatingMode = LVM_MODE_OFF;
+    params.VirtualizerType = LVM_CONCERTSOUND;
+    params.VirtualizerReverbLevel = 100;
+    params.CS_EffectLevel = LVM_CS_EFFECT_NONE;
 
     /* N-Band Equaliser parameters */
-    params.EQNB_OperatingMode     = LVM_EQNB_OFF;
-    params.EQNB_NBands            = FIVEBAND_NUMBANDS;
-    params.pEQNB_BandDefinition   = &BandDefs[0];
+    params.EQNB_OperatingMode = LVM_EQNB_OFF;
+    params.EQNB_NBands = FIVEBAND_NUMBANDS;
+    params.pEQNB_BandDefinition = &BandDefs[0];
 
-    for (int i=0; i<FIVEBAND_NUMBANDS; i++)
-    {
+    for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
         BandDefs[i].Frequency = EQNB_5BandPresetsFrequencies[i];
-        BandDefs[i].QFactor   = EQNB_5BandPresetsQFactors[i];
-        BandDefs[i].Gain      = EQNB_5BandSoftPresets[i];
+        BandDefs[i].QFactor = EQNB_5BandPresetsQFactors[i];
+        BandDefs[i].Gain = EQNB_5BandSoftPresets[i];
     }
 
     /* Volume Control parameters */
-    params.VC_EffectLevel         = 0;
-    params.VC_Balance             = 0;
+    params.VC_EffectLevel = 0;
+    params.VC_Balance = 0;
 
     /* Treble Enhancement parameters */
-    params.TE_OperatingMode       = LVM_TE_OFF;
-    params.TE_EffectLevel         = 0;
+    params.TE_OperatingMode = LVM_TE_OFF;
+    params.TE_EffectLevel = 0;
 
     /* PSA Control parameters */
-    params.PSA_Enable             = LVM_PSA_OFF;
-    params.PSA_PeakDecayRate      = (LVM_PSA_DecaySpeed_en)0;
+    params.PSA_Enable = LVM_PSA_OFF;
+    params.PSA_PeakDecayRate = (LVM_PSA_DecaySpeed_en)0;
 
     /* Bass Enhancement parameters */
-    params.BE_OperatingMode       = LVM_BE_OFF;
-    params.BE_EffectLevel         = 0;
-    params.BE_CentreFreq          = LVM_BE_CENTRE_90Hz;
-    params.BE_HPF                 = LVM_BE_HPF_ON;
+    params.BE_OperatingMode = LVM_BE_OFF;
+    params.BE_EffectLevel = 0;
+    params.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
+    params.BE_HPF = LVM_BE_HPF_ON;
 
     /* PSA Control parameters */
-    params.PSA_Enable             = LVM_PSA_OFF;
-    params.PSA_PeakDecayRate      = LVM_PSA_SPEED_MEDIUM;
+    params.PSA_Enable = LVM_PSA_OFF;
+    params.PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
 
     /* TE Control parameters */
-    params.TE_OperatingMode       = LVM_TE_OFF;
-    params.TE_EffectLevel         = 0;
+    params.TE_OperatingMode = LVM_TE_OFF;
+    params.TE_EffectLevel = 0;
 
-#ifdef SUPPORT_MC
-    params.NrChannels             =
-        audio_channel_count_from_out_mask(AUDIO_CHANNEL_OUT_STEREO);
-    params.ChMask                 = AUDIO_CHANNEL_OUT_STEREO;
-#endif
+    params.NrChannels = audio_channel_count_from_out_mask(AUDIO_CHANNEL_OUT_STEREO);
+    params.ChMask = AUDIO_CHANNEL_OUT_STEREO;
     /* Activate the initial settings */
-    LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance,
-                                         &params);
+    LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &params);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmBundle_init")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    ALOGV("\tLvmBundle_init CreateInstance Succesfully called LVM_SetControlParameters\n");
+    ALOGV("\tLvmBundle_init CreateInstance Successfully called LVM_SetControlParameters\n");
 
     /* Set the headroom parameters */
-    HeadroomBandDef[0].Limit_Low          = 20;
-    HeadroomBandDef[0].Limit_High         = 4999;
-    HeadroomBandDef[0].Headroom_Offset    = 0;
-    HeadroomBandDef[1].Limit_Low          = 5000;
-    HeadroomBandDef[1].Limit_High         = 24000;
-    HeadroomBandDef[1].Headroom_Offset    = 0;
-    HeadroomParams.pHeadroomDefinition    = &HeadroomBandDef[0];
+    HeadroomBandDef[0].Limit_Low = 20;
+    HeadroomBandDef[0].Limit_High = 4999;
+    HeadroomBandDef[0].Headroom_Offset = 0;
+    HeadroomBandDef[1].Limit_Low = 5000;
+    HeadroomBandDef[1].Limit_High = 24000;
+    HeadroomBandDef[1].Headroom_Offset = 0;
+    HeadroomParams.pHeadroomDefinition = &HeadroomBandDef[0];
     HeadroomParams.Headroom_OperatingMode = LVM_HEADROOM_ON;
-    HeadroomParams.NHeadroomBands         = 2;
+    HeadroomParams.NHeadroomBands = 2;
 
-    LvmStatus = LVM_SetHeadroomParams(pContext->pBundledContext->hInstance,
-                                      &HeadroomParams);
+    LvmStatus = LVM_SetHeadroomParams(pContext->pBundledContext->hInstance, &HeadroomParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetHeadroomParams", "LvmBundle_init")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    ALOGV("\tLvmBundle_init CreateInstance Succesfully called LVM_SetHeadroomParams\n");
+    ALOGV("\tLvmBundle_init CreateInstance Successfully called LVM_SetHeadroomParams\n");
     ALOGV("\tLvmBundle_init End");
     return 0;
-}   /* end LvmBundle_init */
+} /* end LvmBundle_init */
 
 //----------------------------------------------------------------------------
 // LvmBundle_process()
@@ -719,25 +628,22 @@
 //  pOut:       pointer to updated stereo 16 bit output data
 //
 //----------------------------------------------------------------------------
-int LvmBundle_process(effect_buffer_t  *pIn,
-                      effect_buffer_t  *pOut,
-                      int              frameCount,
-                      EffectContext    *pContext){
-
-    LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
-    effect_buffer_t         *pOutTmp;
+int LvmBundle_process(effect_buffer_t* pIn, effect_buffer_t* pOut, int frameCount,
+                      EffectContext* pContext) {
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+    effect_buffer_t* pOutTmp;
     const LVM_INT32 NrChannels =
-        audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
+            audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
 
-    if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE){
+    if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE) {
         pOutTmp = pOut;
-    } else if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE){
+    } else if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
         if (pContext->pBundledContext->frameCount != frameCount) {
             if (pContext->pBundledContext->workBuffer != NULL) {
                 free(pContext->pBundledContext->workBuffer);
             }
             pContext->pBundledContext->workBuffer =
-                    (effect_buffer_t *)calloc(frameCount, sizeof(effect_buffer_t) * NrChannels);
+                    (effect_buffer_t*)calloc(frameCount, sizeof(effect_buffer_t) * NrChannels);
             if (pContext->pBundledContext->workBuffer == NULL) {
                 return -ENOMEM;
             }
@@ -749,7 +655,6 @@
         return -EINVAL;
     }
 
-
     /* Process the samples */
     LvmStatus = LVM_Process(pContext->pBundledContext->hInstance, /* Instance handle */
                             pIn,                                  /* Input buffer */
@@ -757,16 +662,15 @@
                             (LVM_UINT16)frameCount,               /* Number of samples to read */
                             0);                                   /* Audio Time */
     LVM_ERROR_CHECK(LvmStatus, "LVM_Process", "LvmBundle_process")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-
-    if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE){
+    if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
         for (int i = 0; i < frameCount * NrChannels; i++) {
             pOut[i] = pOut[i] + pOutTmp[i];
         }
     }
     return 0;
-}    /* end LvmBundle_process */
+} /* end LvmBundle_process */
 
 //----------------------------------------------------------------------------
 // EqualizerUpdateActiveParams()
@@ -779,29 +683,28 @@
 // Outputs:
 //
 //----------------------------------------------------------------------------
-void EqualizerUpdateActiveParams(EffectContext *pContext) {
-    LVM_ControlParams_t     ActiveParams;              /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;     /* Function call status */
+void EqualizerUpdateActiveParams(EffectContext* pContext) {
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "EqualizerUpdateActiveParams")
-    //ALOGV("\tEqualizerUpdateActiveParams Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tEqualizerUpdateActiveParams just Got -> %d\n",
+    // ALOGV("\tEqualizerUpdateActiveParams Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tEqualizerUpdateActiveParams just Got -> %d\n",
     //          ActiveParams.pEQNB_BandDefinition[band].Gain);
 
     for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-           ActiveParams.pEQNB_BandDefinition[i].Frequency = EQNB_5BandPresetsFrequencies[i];
-           ActiveParams.pEQNB_BandDefinition[i].QFactor   = EQNB_5BandPresetsQFactors[i];
-           ActiveParams.pEQNB_BandDefinition[i].Gain = pContext->pBundledContext->bandGaindB[i];
-       }
+        ActiveParams.pEQNB_BandDefinition[i].Frequency = EQNB_5BandPresetsFrequencies[i];
+        ActiveParams.pEQNB_BandDefinition[i].QFactor = EQNB_5BandPresetsQFactors[i];
+        ActiveParams.pEQNB_BandDefinition[i].Gain = pContext->pBundledContext->bandGaindB[i];
+    }
 
     /* Activate the initial settings */
     LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "EqualizerUpdateActiveParams")
-    //ALOGV("\tEqualizerUpdateActiveParams just Set -> %d\n",
+    // ALOGV("\tEqualizerUpdateActiveParams just Set -> %d\n",
     //          ActiveParams.pEQNB_BandDefinition[band].Gain);
-
 }
 
 //----------------------------------------------------------------------------
@@ -816,19 +719,19 @@
 // Outputs:
 //
 //----------------------------------------------------------------------------
-void LvmEffect_limitLevel(EffectContext *pContext) {
-    LVM_ControlParams_t     ActiveParams;              /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;     /* Function call status */
+void LvmEffect_limitLevel(EffectContext* pContext) {
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "LvmEffect_limitLevel")
-    //ALOGV("\tLvmEffect_limitLevel Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tLvmEffect_limitLevel just Got -> %d\n",
+    // ALOGV("\tLvmEffect_limitLevel Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tLvmEffect_limitLevel just Got -> %d\n",
     //          ActiveParams.pEQNB_BandDefinition[band].Gain);
 
     int gainCorrection = 0;
-    //Count the energy contribution per band for EQ and BassBoost only if they are active.
+    // Count the energy contribution per band for EQ and BassBoost only if they are active.
     float energyContribution = 0;
     float energyCross = 0;
     float energyBassBoost = 0;
@@ -838,88 +741,83 @@
     bool bbEnabled = pContext->pBundledContext->bBassEnabled == LVM_TRUE;
     bool viEnabled = pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE;
 
-    //EQ contribution
+    // EQ contribution
     if (eqEnabled) {
         for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-            float bandFactor = pContext->pBundledContext->bandGaindB[i]/15.0;
+            float bandFactor = pContext->pBundledContext->bandGaindB[i] / 15.0;
             float bandCoefficient = LimitLevel_bandEnergyCoefficient[i];
             float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;
-            if (bandEnergy > 0)
-                energyContribution += bandEnergy;
+            if (bandEnergy > 0) energyContribution += bandEnergy;
         }
 
-        //cross EQ coefficients
+        // cross EQ coefficients
         float bandFactorSum = 0;
-        for (int i = 0; i < FIVEBAND_NUMBANDS-1; i++) {
-            float bandFactor1 = pContext->pBundledContext->bandGaindB[i]/15.0;
-            float bandFactor2 = pContext->pBundledContext->bandGaindB[i+1]/15.0;
+        for (int i = 0; i < FIVEBAND_NUMBANDS - 1; i++) {
+            float bandFactor1 = pContext->pBundledContext->bandGaindB[i] / 15.0;
+            float bandFactor2 = pContext->pBundledContext->bandGaindB[i + 1] / 15.0;
 
             if (bandFactor1 > 0 && bandFactor2 > 0) {
-                float crossEnergy = bandFactor1 * bandFactor2 *
-                        LimitLevel_bandEnergyCrossCoefficient[i];
+                float crossEnergy =
+                        bandFactor1 * bandFactor2 * LimitLevel_bandEnergyCrossCoefficient[i];
                 bandFactorSum += bandFactor1 * bandFactor2;
 
-                if (crossEnergy > 0)
-                    energyCross += crossEnergy;
+                if (crossEnergy > 0) energyCross += crossEnergy;
             }
         }
         bandFactorSum -= 1.0;
-        if (bandFactorSum > 0)
-          crossCorrection = bandFactorSum * 0.7;
+        if (bandFactorSum > 0) crossCorrection = bandFactorSum * 0.7;
     }
 
-    //BassBoost contribution
+    // BassBoost contribution
     if (bbEnabled) {
-        float boostFactor = (pContext->pBundledContext->BassStrengthSaved)/1000.0;
+        float boostFactor = (pContext->pBundledContext->BassStrengthSaved) / 1000.0;
         float boostCoefficient = LimitLevel_bassBoostEnergyCoefficient;
 
         energyContribution += boostFactor * boostCoefficient * boostCoefficient;
 
         if (eqEnabled) {
             for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-                float bandFactor = pContext->pBundledContext->bandGaindB[i]/15.0;
+                float bandFactor = pContext->pBundledContext->bandGaindB[i] / 15.0;
                 float bandCrossCoefficient = LimitLevel_bassBoostEnergyCrossCoefficient[i];
-                float bandEnergy = boostFactor * bandFactor *
-                    bandCrossCoefficient;
-                if (bandEnergy > 0)
-                  energyBassBoost += bandEnergy;
+                float bandEnergy = boostFactor * bandFactor * bandCrossCoefficient;
+                if (bandEnergy > 0) energyBassBoost += bandEnergy;
             }
         }
     }
 
-    //Virtualizer contribution
+    // Virtualizer contribution
     if (viEnabled) {
-        energyContribution += LimitLevel_virtualizerContribution *
-                LimitLevel_virtualizerContribution;
+        energyContribution +=
+                LimitLevel_virtualizerContribution * LimitLevel_virtualizerContribution;
     }
 
-    double totalEnergyEstimation = sqrt(energyContribution + energyCross + energyBassBoost) -
-            crossCorrection;
+    double totalEnergyEstimation =
+            sqrt(energyContribution + energyCross + energyBassBoost) - crossCorrection;
     ALOGV(" TOTAL energy estimation: %0.2f dB", totalEnergyEstimation);
 
-    //roundoff
+    // roundoff
     int maxLevelRound = (int)(totalEnergyEstimation + 0.99);
     if (maxLevelRound + pContext->pBundledContext->volume > 0) {
         gainCorrection = maxLevelRound + pContext->pBundledContext->volume;
     }
 
-    ActiveParams.VC_EffectLevel  = pContext->pBundledContext->volume - gainCorrection;
+    ActiveParams.VC_EffectLevel = pContext->pBundledContext->volume - gainCorrection;
     if (ActiveParams.VC_EffectLevel < -96) {
         ActiveParams.VC_EffectLevel = -96;
     }
     ALOGV("\tVol:%d, GainCorrection: %d, Actual vol: %d", pContext->pBundledContext->volume,
-            gainCorrection, ActiveParams.VC_EffectLevel);
+          gainCorrection, ActiveParams.VC_EffectLevel);
 
     /* Activate the initial settings */
     LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmEffect_limitLevel")
 
     ALOGV("LVM_SetControlParameters return:%d", (int)LvmStatus);
-    //ALOGV("\tLvmEffect_limitLevel just Set -> %d\n",
+    // ALOGV("\tLvmEffect_limitLevel just Set -> %d\n",
     //          ActiveParams.pEQNB_BandDefinition[band].Gain);
 
-    //ALOGV("\tLvmEffect_limitLevel just set (-96dB -> 0dB) -> %d\n",ActiveParams.VC_EffectLevel );
-    if (pContext->pBundledContext->firstVolume == LVM_TRUE){
+    // ALOGV("\tLvmEffect_limitLevel just set (-96dB -> 0dB) -> %d\n",ActiveParams.VC_EffectLevel );
+    if (pContext->pBundledContext->firstVolume == LVM_TRUE) {
         LvmStatus = LVM_SetVolumeNoSmoothing(pContext->pBundledContext->hInstance, &ActiveParams);
         LVM_ERROR_CHECK(LvmStatus, "LVM_SetVolumeNoSmoothing", "LvmBundle_process")
         ALOGV("\tLVM_VOLUME: Disabling Smoothing for first volume change to remove spikes/clicks");
@@ -939,42 +837,41 @@
 //
 //----------------------------------------------------------------------------
 
-int LvmEffect_enable(EffectContext *pContext){
-    //ALOGV("\tLvmEffect_enable start");
+int LvmEffect_enable(EffectContext* pContext) {
+    // ALOGV("\tLvmEffect_enable start");
 
-    LVM_ControlParams_t     ActiveParams;                           /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
 
     /* Get the current settings */
-    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
-                                         &ActiveParams);
+    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "LvmEffect_enable")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
-    //ALOGV("\tLvmEffect_enable Succesfully called LVM_GetControlParameters\n");
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+    // ALOGV("\tLvmEffect_enable Successfully called LVM_GetControlParameters\n");
 
-    if(pContext->EffectType == LVM_BASS_BOOST) {
+    if (pContext->EffectType == LVM_BASS_BOOST) {
         ALOGV("\tLvmEffect_enable : Enabling LVM_BASS_BOOST");
-        ActiveParams.BE_OperatingMode       = LVM_BE_ON;
+        ActiveParams.BE_OperatingMode = LVM_BE_ON;
     }
-    if(pContext->EffectType == LVM_VIRTUALIZER) {
+    if (pContext->EffectType == LVM_VIRTUALIZER) {
         ALOGV("\tLvmEffect_enable : Enabling LVM_VIRTUALIZER");
-        ActiveParams.VirtualizerOperatingMode   = LVM_MODE_ON;
+        ActiveParams.VirtualizerOperatingMode = LVM_MODE_ON;
     }
-    if(pContext->EffectType == LVM_EQUALIZER) {
+    if (pContext->EffectType == LVM_EQUALIZER) {
         ALOGV("\tLvmEffect_enable : Enabling LVM_EQUALIZER");
-        ActiveParams.EQNB_OperatingMode     = LVM_EQNB_ON;
+        ActiveParams.EQNB_OperatingMode = LVM_EQNB_ON;
     }
-    if(pContext->EffectType == LVM_VOLUME) {
+    if (pContext->EffectType == LVM_VOLUME) {
         ALOGV("\tLvmEffect_enable : Enabling LVM_VOLUME");
     }
 
     LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmEffect_enable")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    //ALOGV("\tLvmEffect_enable Succesfully called LVM_SetControlParameters\n");
-    //ALOGV("\tLvmEffect_enable end");
+    // ALOGV("\tLvmEffect_enable Successfully called LVM_SetControlParameters\n");
+    // ALOGV("\tLvmEffect_enable end");
     LvmEffect_limitLevel(pContext);
     return 0;
 }
@@ -991,82 +888,45 @@
 //
 //----------------------------------------------------------------------------
 
-int LvmEffect_disable(EffectContext *pContext){
-    //ALOGV("\tLvmEffect_disable start");
+int LvmEffect_disable(EffectContext* pContext) {
+    // ALOGV("\tLvmEffect_disable start");
 
-    LVM_ControlParams_t     ActiveParams;                           /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
     /* Get the current settings */
-    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
-                                         &ActiveParams);
+    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "LvmEffect_disable")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
-    //ALOGV("\tLvmEffect_disable Succesfully called LVM_GetControlParameters\n");
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+    // ALOGV("\tLvmEffect_disable Successfully called LVM_GetControlParameters\n");
 
-    if(pContext->EffectType == LVM_BASS_BOOST) {
+    if (pContext->EffectType == LVM_BASS_BOOST) {
         ALOGV("\tLvmEffect_disable : Disabling LVM_BASS_BOOST");
-        ActiveParams.BE_OperatingMode       = LVM_BE_OFF;
+        ActiveParams.BE_OperatingMode = LVM_BE_OFF;
     }
-    if(pContext->EffectType == LVM_VIRTUALIZER) {
+    if (pContext->EffectType == LVM_VIRTUALIZER) {
         ALOGV("\tLvmEffect_disable : Disabling LVM_VIRTUALIZER");
-        ActiveParams.VirtualizerOperatingMode   = LVM_MODE_OFF;
+        ActiveParams.VirtualizerOperatingMode = LVM_MODE_OFF;
     }
-    if(pContext->EffectType == LVM_EQUALIZER) {
+    if (pContext->EffectType == LVM_EQUALIZER) {
         ALOGV("\tLvmEffect_disable : Disabling LVM_EQUALIZER");
-        ActiveParams.EQNB_OperatingMode     = LVM_EQNB_OFF;
+        ActiveParams.EQNB_OperatingMode = LVM_EQNB_OFF;
     }
-    if(pContext->EffectType == LVM_VOLUME) {
+    if (pContext->EffectType == LVM_VOLUME) {
         ALOGV("\tLvmEffect_disable : Disabling LVM_VOLUME");
     }
 
     LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmEffect_disable")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    //ALOGV("\tLvmEffect_disable Succesfully called LVM_SetControlParameters\n");
-    //ALOGV("\tLvmEffect_disable end");
+    // ALOGV("\tLvmEffect_disable Successfully called LVM_SetControlParameters\n");
+    // ALOGV("\tLvmEffect_disable end");
     LvmEffect_limitLevel(pContext);
     return 0;
 }
 
 //----------------------------------------------------------------------------
-// LvmEffect_free()
-//----------------------------------------------------------------------------
-// Purpose: Free all memory associated with the Bundle.
-//
-// Inputs:
-//  pContext:   effect engine context
-//
-// Outputs:
-//
-//----------------------------------------------------------------------------
-
-void LvmEffect_free(EffectContext *pContext){
-    LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;         /* Function call status */
-    LVM_MemTab_t            MemTab;
-
-    /* Free the algorithm memory */
-    LvmStatus = LVM_GetMemoryTable(pContext->pBundledContext->hInstance,
-                                   &MemTab,
-                                   LVM_NULL);
-
-    LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "LvmEffect_free")
-
-    for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
-        if (MemTab.Region[i].Size != 0){
-            if (MemTab.Region[i].pBaseAddress != NULL){
-                free(MemTab.Region[i].pBaseAddress);
-            }else{
-                ALOGV("\tLVM_ERROR : LvmEffect_free - trying to free with NULL pointer %" PRIu32
-                        " bytes for region %u at %p ERROR\n",
-                        MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-            }
-        }
-    }
-}    /* end LvmEffect_free */
-
-//----------------------------------------------------------------------------
 // Effect_setConfig()
 //----------------------------------------------------------------------------
 // Purpose: Set input and output audio configuration.
@@ -1080,9 +940,9 @@
 //
 //----------------------------------------------------------------------------
 
-int Effect_setConfig(EffectContext *pContext, effect_config_t *pConfig){
-    LVM_Fs_en   SampleRate;
-    //ALOGV("\tEffect_setConfig start");
+int Effect_setConfig(EffectContext* pContext, effect_config_t* pConfig) {
+    LVM_Fs_en SampleRate;
+    // ALOGV("\tEffect_setConfig start");
 
     CHECK_ARG(pContext != NULL);
     CHECK_ARG(pConfig != NULL);
@@ -1090,107 +950,93 @@
     CHECK_ARG(pConfig->inputCfg.samplingRate == pConfig->outputCfg.samplingRate);
     CHECK_ARG(pConfig->inputCfg.channels == pConfig->outputCfg.channels);
     CHECK_ARG(pConfig->inputCfg.format == pConfig->outputCfg.format);
-#ifdef SUPPORT_MC
     CHECK_ARG(audio_channel_count_from_out_mask(pConfig->inputCfg.channels) <= LVM_MAX_CHANNELS);
-#else
-    CHECK_ARG(pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_STEREO);
-#endif
-    CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE
-              || pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
+    CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE ||
+              pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
     CHECK_ARG(pConfig->inputCfg.format == EFFECT_BUFFER_FORMAT);
     pContext->config = *pConfig;
     const LVM_INT16 NrChannels = audio_channel_count_from_out_mask(pConfig->inputCfg.channels);
 
     switch (pConfig->inputCfg.samplingRate) {
-    case 8000:
-        SampleRate = LVM_FS_8000;
-        pContext->pBundledContext->SamplesPerSecond = 8000 * NrChannels;
-        break;
-    case 16000:
-        SampleRate = LVM_FS_16000;
-        pContext->pBundledContext->SamplesPerSecond = 16000 * NrChannels;
-        break;
-    case 22050:
-        SampleRate = LVM_FS_22050;
-        pContext->pBundledContext->SamplesPerSecond = 22050 * NrChannels;
-        break;
-    case 32000:
-        SampleRate = LVM_FS_32000;
-        pContext->pBundledContext->SamplesPerSecond = 32000 * NrChannels;
-        break;
-    case 44100:
-        SampleRate = LVM_FS_44100;
-        pContext->pBundledContext->SamplesPerSecond = 44100 * NrChannels;
-        break;
-    case 48000:
-        SampleRate = LVM_FS_48000;
-        pContext->pBundledContext->SamplesPerSecond = 48000 * NrChannels;
-        break;
-    case 88200:
-        SampleRate = LVM_FS_88200;
-        pContext->pBundledContext->SamplesPerSecond = 88200 * NrChannels;
-        break;
-    case 96000:
-        SampleRate = LVM_FS_96000;
-        pContext->pBundledContext->SamplesPerSecond = 96000 * NrChannels;
-        break;
-    case 176400:
-        SampleRate = LVM_FS_176400;
-        pContext->pBundledContext->SamplesPerSecond = 176400 * NrChannels;
-        break;
-    case 192000:
-        SampleRate = LVM_FS_192000;
-        pContext->pBundledContext->SamplesPerSecond = 192000 * NrChannels;
-        break;
-    default:
-        ALOGV("\tEffect_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
-        return -EINVAL;
+        case 8000:
+            SampleRate = LVM_FS_8000;
+            pContext->pBundledContext->SamplesPerSecond = 8000 * NrChannels;
+            break;
+        case 16000:
+            SampleRate = LVM_FS_16000;
+            pContext->pBundledContext->SamplesPerSecond = 16000 * NrChannels;
+            break;
+        case 22050:
+            SampleRate = LVM_FS_22050;
+            pContext->pBundledContext->SamplesPerSecond = 22050 * NrChannels;
+            break;
+        case 32000:
+            SampleRate = LVM_FS_32000;
+            pContext->pBundledContext->SamplesPerSecond = 32000 * NrChannels;
+            break;
+        case 44100:
+            SampleRate = LVM_FS_44100;
+            pContext->pBundledContext->SamplesPerSecond = 44100 * NrChannels;
+            break;
+        case 48000:
+            SampleRate = LVM_FS_48000;
+            pContext->pBundledContext->SamplesPerSecond = 48000 * NrChannels;
+            break;
+        case 88200:
+            SampleRate = LVM_FS_88200;
+            pContext->pBundledContext->SamplesPerSecond = 88200 * NrChannels;
+            break;
+        case 96000:
+            SampleRate = LVM_FS_96000;
+            pContext->pBundledContext->SamplesPerSecond = 96000 * NrChannels;
+            break;
+        case 176400:
+            SampleRate = LVM_FS_176400;
+            pContext->pBundledContext->SamplesPerSecond = 176400 * NrChannels;
+            break;
+        case 192000:
+            SampleRate = LVM_FS_192000;
+            pContext->pBundledContext->SamplesPerSecond = 192000 * NrChannels;
+            break;
+        default:
+            ALOGV("\tEffect_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
+            return -EINVAL;
     }
 
-#ifdef SUPPORT_MC
     if (pContext->pBundledContext->SampleRate != SampleRate ||
         pContext->pBundledContext->ChMask != pConfig->inputCfg.channels) {
-#else
-    if(pContext->pBundledContext->SampleRate != SampleRate){
-#endif
-
-        LVM_ControlParams_t     ActiveParams;
-        LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;
+        LVM_ControlParams_t ActiveParams;
+        LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS;
 
         ALOGV("\tEffect_setConfig change sampling rate to %d", SampleRate);
 
         /* Get the current settings */
-        LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
-                                         &ActiveParams);
+        LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
         LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "Effect_setConfig")
-        if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+        if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
         ActiveParams.SampleRate = SampleRate;
 
-#ifdef SUPPORT_MC
         ActiveParams.NrChannels = NrChannels;
         ActiveParams.ChMask = pConfig->inputCfg.channels;
-#endif
 
         LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
         LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "Effect_setConfig")
-        ALOGV("\tEffect_setConfig Succesfully called LVM_SetControlParameters\n");
+        ALOGV("\tEffect_setConfig Successfully called LVM_SetControlParameters\n");
         pContext->pBundledContext->SampleRate = SampleRate;
-#ifdef SUPPORT_MC
         pContext->pBundledContext->ChMask = pConfig->inputCfg.channels;
-#endif
 
         LvmEffect_limitLevel(pContext);
 
-    }else{
-        //ALOGV("\tEffect_setConfig keep sampling rate at %d", SampleRate);
+    } else {
+        // ALOGV("\tEffect_setConfig keep sampling rate at %d", SampleRate);
     }
 
-    //ALOGV("\tEffect_setConfig End....");
+    // ALOGV("\tEffect_setConfig End....");
     return 0;
-}   /* end Effect_setConfig */
+} /* end Effect_setConfig */
 
 //----------------------------------------------------------------------------
 // Effect_getConfig()
@@ -1206,10 +1052,9 @@
 //
 //----------------------------------------------------------------------------
 
-void Effect_getConfig(EffectContext *pContext, effect_config_t *pConfig)
-{
+void Effect_getConfig(EffectContext* pContext, effect_config_t* pConfig) {
     *pConfig = pContext->config;
-}   /* end Effect_getConfig */
+} /* end Effect_getConfig */
 
 //----------------------------------------------------------------------------
 // BassGetStrength()
@@ -1225,32 +1070,31 @@
 //
 //----------------------------------------------------------------------------
 
-uint32_t BassGetStrength(EffectContext *pContext){
-    //ALOGV("\tBassGetStrength() (0-1000) -> %d\n", pContext->pBundledContext->BassStrengthSaved);
+uint32_t BassGetStrength(EffectContext* pContext) {
+    // ALOGV("\tBassGetStrength() (0-1000) -> %d\n", pContext->pBundledContext->BassStrengthSaved);
 
-    LVM_ControlParams_t     ActiveParams;                           /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
     /* Get the current settings */
-    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
-                                         &ActiveParams);
+    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "BassGetStrength")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    //ALOGV("\tBassGetStrength Succesfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tBassGetStrength Successfully returned from LVM_GetControlParameters\n");
 
     /* Check that the strength returned matches the strength that was set earlier */
-    if(ActiveParams.BE_EffectLevel !=
-       (LVM_INT16)((15*pContext->pBundledContext->BassStrengthSaved)/1000)){
+    if (ActiveParams.BE_EffectLevel !=
+        (LVM_INT16)((15 * pContext->pBundledContext->BassStrengthSaved) / 1000)) {
         ALOGV("\tLVM_ERROR : BassGetStrength module strength does not match savedStrength %d %d\n",
-                ActiveParams.BE_EffectLevel, pContext->pBundledContext->BassStrengthSaved);
+              ActiveParams.BE_EffectLevel, pContext->pBundledContext->BassStrengthSaved);
         return -EINVAL;
     }
 
-    //ALOGV("\tBassGetStrength() (0-15)   -> %d\n", ActiveParams.BE_EffectLevel );
-    //ALOGV("\tBassGetStrength() (saved)  -> %d\n", pContext->pBundledContext->BassStrengthSaved );
+    // ALOGV("\tBassGetStrength() (0-15)   -> %d\n", ActiveParams.BE_EffectLevel );
+    // ALOGV("\tBassGetStrength() (saved)  -> %d\n", pContext->pBundledContext->BassStrengthSaved );
     return pContext->pBundledContext->BassStrengthSaved;
-}    /* end BassGetStrength */
+} /* end BassGetStrength */
 
 //----------------------------------------------------------------------------
 // BassSetStrength()
@@ -1264,35 +1108,34 @@
 //
 //----------------------------------------------------------------------------
 
-void BassSetStrength(EffectContext *pContext, uint32_t strength){
-    //ALOGV("\tBassSetStrength(%d)", strength);
+void BassSetStrength(EffectContext* pContext, uint32_t strength) {
+    // ALOGV("\tBassSetStrength(%d)", strength);
 
     pContext->pBundledContext->BassStrengthSaved = (int)strength;
 
-    LVM_ControlParams_t     ActiveParams;              /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;     /* Function call status */
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
 
     /* Get the current settings */
-    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
-                                         &ActiveParams);
+    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "BassSetStrength")
-    //ALOGV("\tBassSetStrength Succesfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tBassSetStrength Successfully returned from LVM_GetControlParameters\n");
 
     /* Bass Enhancement parameters */
-    ActiveParams.BE_EffectLevel    = (LVM_INT16)((15*strength)/1000);
-    ActiveParams.BE_CentreFreq     = LVM_BE_CENTRE_90Hz;
+    ActiveParams.BE_EffectLevel = (LVM_INT16)((15 * strength) / 1000);
+    ActiveParams.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
 
-    //ALOGV("\tBassSetStrength() (0-15)   -> %d\n", ActiveParams.BE_EffectLevel );
+    // ALOGV("\tBassSetStrength() (0-15)   -> %d\n", ActiveParams.BE_EffectLevel );
 
     /* Activate the initial settings */
     LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "BassSetStrength")
-    //ALOGV("\tBassSetStrength Succesfully called LVM_SetControlParameters\n");
+    // ALOGV("\tBassSetStrength Successfully called LVM_SetControlParameters\n");
 
     LvmEffect_limitLevel(pContext);
-}    /* end BassSetStrength */
+} /* end BassSetStrength */
 
 //----------------------------------------------------------------------------
 // VirtualizerGetStrength()
@@ -1308,21 +1151,23 @@
 //
 //----------------------------------------------------------------------------
 
-uint32_t VirtualizerGetStrength(EffectContext *pContext){
-    //ALOGV("\tVirtualizerGetStrength (0-1000) -> %d\n",pContext->pBundledContext->VirtStrengthSaved);
+uint32_t VirtualizerGetStrength(EffectContext* pContext) {
+    // ALOGV("\tVirtualizerGetStrength (0-1000) ->
+    // %d\n",pContext->pBundledContext->VirtStrengthSaved);
 
-    LVM_ControlParams_t     ActiveParams;                           /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
 
     LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VirtualizerGetStrength")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    //ALOGV("\tVirtualizerGetStrength Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tVirtualizerGetStrength() (0-100)   -> %d\n", ActiveParams.VirtualizerReverbLevel*10);
+    // ALOGV("\tVirtualizerGetStrength Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tVirtualizerGetStrength() (0-100)   -> %d\n",
+    // ActiveParams.VirtualizerReverbLevel*10);
     return pContext->pBundledContext->VirtStrengthSaved;
-}    /* end getStrength */
+} /* end getStrength */
 
 //----------------------------------------------------------------------------
 // VirtualizerSetStrength()
@@ -1336,31 +1181,31 @@
 //
 //----------------------------------------------------------------------------
 
-void VirtualizerSetStrength(EffectContext *pContext, uint32_t strength){
-    //ALOGV("\tVirtualizerSetStrength(%d)", strength);
-    LVM_ControlParams_t     ActiveParams;              /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;     /* Function call status */
+void VirtualizerSetStrength(EffectContext* pContext, uint32_t strength) {
+    // ALOGV("\tVirtualizerSetStrength(%d)", strength);
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
 
     pContext->pBundledContext->VirtStrengthSaved = (int)strength;
 
     /* Get the current settings */
-    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,&ActiveParams);
+    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VirtualizerSetStrength")
-    //ALOGV("\tVirtualizerSetStrength Succesfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tVirtualizerSetStrength Successfully returned from LVM_GetControlParameters\n");
 
     /* Virtualizer parameters */
-    ActiveParams.CS_EffectLevel             = (int)((strength*32767)/1000);
+    ActiveParams.CS_EffectLevel = (int)((strength * 32767) / 1000);
 
-    ALOGV("\tVirtualizerSetStrength() (0-1000)   -> %d\n", strength );
-    ALOGV("\tVirtualizerSetStrength() (0- 100)   -> %d\n", ActiveParams.CS_EffectLevel );
+    ALOGV("\tVirtualizerSetStrength() (0-1000)   -> %d\n", strength);
+    ALOGV("\tVirtualizerSetStrength() (0- 100)   -> %d\n", ActiveParams.CS_EffectLevel);
 
     /* Activate the initial settings */
     LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "VirtualizerSetStrength")
-    //ALOGV("\tVirtualizerSetStrength Succesfully called LVM_SetControlParameters\n\n");
+    // ALOGV("\tVirtualizerSetStrength Successfully called LVM_SetControlParameters\n\n");
     LvmEffect_limitLevel(pContext);
-}    /* end setStrength */
+} /* end setStrength */
 
 //----------------------------------------------------------------------------
 // VirtualizerIsDeviceSupported()
@@ -1376,14 +1221,14 @@
 //----------------------------------------------------------------------------
 int VirtualizerIsDeviceSupported(audio_devices_t deviceType) {
     switch (deviceType) {
-    case AUDIO_DEVICE_OUT_WIRED_HEADSET:
-    case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
-    case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
-    case AUDIO_DEVICE_OUT_USB_HEADSET:
-    // case AUDIO_DEVICE_OUT_USB_DEVICE:  // For USB testing of the virtualizer only.
-        return 0;
-    default :
-        return -EINVAL;
+        case AUDIO_DEVICE_OUT_WIRED_HEADSET:
+        case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
+        case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
+        case AUDIO_DEVICE_OUT_USB_HEADSET:
+            // case AUDIO_DEVICE_OUT_USB_DEVICE:  // For USB testing of the virtualizer only.
+            return 0;
+        default:
+            return -EINVAL;
     }
 }
 
@@ -1401,9 +1246,9 @@
 //  0            if the configuration is supported
 //----------------------------------------------------------------------------
 int VirtualizerIsConfigurationSupported(audio_channel_mask_t channelMask,
-        audio_devices_t deviceType) {
+                                        audio_devices_t deviceType) {
     uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
-    if (channelCount < 1 || channelCount > FCC_2) { // TODO: update to 8 channels when supported.
+    if (channelCount < 1 || channelCount > FCC_2) {  // TODO: update to 8 channels when supported.
         return -EINVAL;
     }
     return VirtualizerIsDeviceSupported(deviceType);
@@ -1423,16 +1268,16 @@
 //  0            if the device is supported and the virtualization mode forced
 //
 //----------------------------------------------------------------------------
-int VirtualizerForceVirtualizationMode(EffectContext *pContext, audio_devices_t forcedDevice) {
+int VirtualizerForceVirtualizationMode(EffectContext* pContext, audio_devices_t forcedDevice) {
     ALOGV("VirtualizerForceVirtualizationMode: forcedDev=0x%x enabled=%d tmpDisabled=%d",
-            forcedDevice, pContext->pBundledContext->bVirtualizerEnabled,
-            pContext->pBundledContext->bVirtualizerTempDisabled);
+          forcedDevice, pContext->pBundledContext->bVirtualizerEnabled,
+          pContext->pBundledContext->bVirtualizerTempDisabled);
     int status = 0;
     bool useVirtualizer = false;
 
     if (VirtualizerIsDeviceSupported(forcedDevice) != 0) {
         if (forcedDevice != AUDIO_DEVICE_NONE) {
-            //forced device is not supported, make it behave as a reset of forced mode
+            // forced device is not supported, make it behave as a reset of forced mode
             forcedDevice = AUDIO_DEVICE_NONE;
             // but return an error
             status = -EINVAL;
@@ -1472,8 +1317,8 @@
     }
 
     ALOGV("\tafter VirtualizerForceVirtualizationMode: enabled=%d tmpDisabled=%d",
-            pContext->pBundledContext->bVirtualizerEnabled,
-            pContext->pBundledContext->bVirtualizerTempDisabled);
+          pContext->pBundledContext->bVirtualizerEnabled,
+          pContext->pBundledContext->bVirtualizerTempDisabled);
 
     return status;
 }
@@ -1499,23 +1344,23 @@
 //
 //----------------------------------------------------------------------------
 void VirtualizerGetSpeakerAngles(audio_channel_mask_t channelMask,
-        audio_devices_t deviceType __unused, int32_t *pSpeakerAngles) {
+                                 audio_devices_t deviceType __unused, int32_t* pSpeakerAngles) {
     // the channel count is guaranteed to be 1 or 2
     // the device is guaranteed to be of type headphone
     // this virtualizer is always using 2 virtual speakers at -90 and 90deg of azimuth, 0deg of
     // elevation but the return information is sized for nbChannels * 3, so we have to consider
     // the (false here) case of a single channel, and return only 3 fields.
     if (audio_channel_count_from_out_mask(channelMask) == 1) {
-        *pSpeakerAngles++ = (int32_t) AUDIO_CHANNEL_OUT_MONO; // same as FRONT_LEFT
-        *pSpeakerAngles++ = 0; // azimuth
-        *pSpeakerAngles = 0; // elevation
+        *pSpeakerAngles++ = (int32_t)AUDIO_CHANNEL_OUT_MONO;  // same as FRONT_LEFT
+        *pSpeakerAngles++ = 0;                                // azimuth
+        *pSpeakerAngles = 0;                                  // elevation
     } else {
-        *pSpeakerAngles++ = (int32_t) AUDIO_CHANNEL_OUT_FRONT_LEFT;
-        *pSpeakerAngles++ = -90; // azimuth
-        *pSpeakerAngles++ = 0;   // elevation
-        *pSpeakerAngles++ = (int32_t) AUDIO_CHANNEL_OUT_FRONT_RIGHT;
+        *pSpeakerAngles++ = (int32_t)AUDIO_CHANNEL_OUT_FRONT_LEFT;
+        *pSpeakerAngles++ = -90;  // azimuth
+        *pSpeakerAngles++ = 0;    // elevation
+        *pSpeakerAngles++ = (int32_t)AUDIO_CHANNEL_OUT_FRONT_RIGHT;
         *pSpeakerAngles++ = 90;  // azimuth
-        *pSpeakerAngles   = 0;   // elevation
+        *pSpeakerAngles = 0;     // elevation
     }
 }
 
@@ -1529,10 +1374,10 @@
 //   AUDIO_DEVICE_NONE if the effect is not virtualizing
 //   or the device type if the effect is virtualizing
 //----------------------------------------------------------------------------
-audio_devices_t VirtualizerGetVirtualizationMode(EffectContext *pContext) {
+audio_devices_t VirtualizerGetVirtualizationMode(EffectContext* pContext) {
     audio_devices_t virtDevice = AUDIO_DEVICE_NONE;
-    if ((pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE)
-            && (pContext->pBundledContext->bVirtualizerTempDisabled == LVM_FALSE)) {
+    if ((pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) &&
+        (pContext->pBundledContext->bVirtualizerTempDisabled == LVM_FALSE)) {
         if (pContext->pBundledContext->nVirtualizerForcedDevice != AUDIO_DEVICE_NONE) {
             // virtualization mode is forced, return that device
             virtDevice = pContext->pBundledContext->nVirtualizerForcedDevice;
@@ -1557,8 +1402,8 @@
 // Outputs:
 //
 //----------------------------------------------------------------------------
-int32_t EqualizerGetBandLevel(EffectContext *pContext, int32_t band){
-    //ALOGV("\tEqualizerGetBandLevel -> %d\n", pContext->pBundledContext->bandGaindB[band] );
+int32_t EqualizerGetBandLevel(EffectContext* pContext, int32_t band) {
+    // ALOGV("\tEqualizerGetBandLevel -> %d\n", pContext->pBundledContext->bandGaindB[band] );
     return pContext->pBundledContext->bandGaindB[band] * 100;
 }
 
@@ -1576,14 +1421,14 @@
 // Outputs:
 //
 //---------------------------------------------------------------------------
-void EqualizerSetBandLevel(EffectContext *pContext, int band, short Gain){
+void EqualizerSetBandLevel(EffectContext* pContext, int band, short Gain) {
     int gainRounded;
-    if(Gain > 0){
-        gainRounded = (int)((Gain+50)/100);
-    }else{
-        gainRounded = (int)((Gain-50)/100);
+    if (Gain > 0) {
+        gainRounded = (int)((Gain + 50) / 100);
+    } else {
+        gainRounded = (int)((Gain - 50) / 100);
     }
-    //ALOGV("\tEqualizerSetBandLevel(%d)->(%d)", Gain, gainRounded);
+    // ALOGV("\tEqualizerSetBandLevel(%d)->(%d)", Gain, gainRounded);
     pContext->pBundledContext->bandGaindB[band] = gainRounded;
     pContext->pBundledContext->CurPreset = PRESET_CUSTOM;
 
@@ -1603,23 +1448,22 @@
 // Outputs:
 //
 //----------------------------------------------------------------------------
-int32_t EqualizerGetCentreFrequency(EffectContext *pContext, int32_t band){
-    int32_t Frequency =0;
+int32_t EqualizerGetCentreFrequency(EffectContext* pContext, int32_t band) {
+    int32_t Frequency = 0;
 
-    LVM_ControlParams_t     ActiveParams;                           /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
-    LVM_EQNB_BandDef_t      *BandDef;
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+    LVM_EQNB_BandDef_t* BandDef;
     /* Get the current settings */
-    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
-                                         &ActiveParams);
+    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "EqualizerGetCentreFrequency")
 
-    BandDef   = ActiveParams.pEQNB_BandDefinition;
-    Frequency = (int32_t)BandDef[band].Frequency*1000;     // Convert to millibels
+    BandDef = ActiveParams.pEQNB_BandDefinition;
+    Frequency = (int32_t)BandDef[band].Frequency * 1000;  // Convert to millibels
 
-    //ALOGV("\tEqualizerGetCentreFrequency -> %d\n", Frequency );
-    //ALOGV("\tEqualizerGetCentreFrequency Succesfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tEqualizerGetCentreFrequency -> %d\n", Frequency );
+    // ALOGV("\tEqualizerGetCentreFrequency Successfully returned from LVM_GetControlParameters\n");
     return Frequency;
 }
 
@@ -1641,10 +1485,10 @@
 //  pLow:       lower band range
 //  pLow:       upper band range
 //----------------------------------------------------------------------------
-int32_t EqualizerGetBandFreqRange(EffectContext *pContext __unused, int32_t band, uint32_t *pLow,
-                                  uint32_t *pHi){
+int32_t EqualizerGetBandFreqRange(EffectContext* pContext __unused, int32_t band, uint32_t* pLow,
+                                  uint32_t* pHi) {
     *pLow = bandFreqRange[band][0];
-    *pHi  = bandFreqRange[band][1];
+    *pHi = bandFreqRange[band][1];
     return 0;
 }
 
@@ -1665,16 +1509,16 @@
 //  pLow:       lower band range
 //  pLow:       upper band range
 //----------------------------------------------------------------------------
-int32_t EqualizerGetBand(EffectContext *pContext __unused, uint32_t targetFreq){
+int32_t EqualizerGetBand(EffectContext* pContext __unused, uint32_t targetFreq) {
     int band = 0;
 
-    if(targetFreq < bandFreqRange[0][0]){
+    if (targetFreq < bandFreqRange[0][0]) {
         return -EINVAL;
-    }else if(targetFreq == bandFreqRange[0][0]){
+    } else if (targetFreq == bandFreqRange[0][0]) {
         return 0;
     }
-    for(int i=0; i<FIVEBAND_NUMBANDS;i++){
-        if((targetFreq > bandFreqRange[i][0])&&(targetFreq <= bandFreqRange[i][1])){
+    for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+        if ((targetFreq > bandFreqRange[i][0]) && (targetFreq <= bandFreqRange[i][1])) {
             band = i;
         }
     }
@@ -1694,7 +1538,7 @@
 //  pContext:    effect engine context
 //
 //----------------------------------------------------------------------------
-int32_t EqualizerGetPreset(EffectContext *pContext){
+int32_t EqualizerGetPreset(EffectContext* pContext) {
     return pContext->pBundledContext->CurPreset;
 }
 
@@ -1711,14 +1555,12 @@
 //  preset       The preset ID.
 //
 //----------------------------------------------------------------------------
-void EqualizerSetPreset(EffectContext *pContext, int preset){
-
-    //ALOGV("\tEqualizerSetPreset(%d)", preset);
+void EqualizerSetPreset(EffectContext* pContext, int preset) {
+    // ALOGV("\tEqualizerSetPreset(%d)", preset);
     pContext->pBundledContext->CurPreset = preset;
 
-    //ActiveParams.pEQNB_BandDefinition = &BandDefs[0];
-    for (int i=0; i<FIVEBAND_NUMBANDS; i++)
-    {
+    // ActiveParams.pEQNB_BandDefinition = &BandDefs[0];
+    for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
         pContext->pBundledContext->bandGaindB[i] =
                 EQNB_5BandSoftPresets[i + preset * FIVEBAND_NUMBANDS];
     }
@@ -1726,11 +1568,11 @@
     EqualizerUpdateActiveParams(pContext);
     LvmEffect_limitLevel(pContext);
 
-    //ALOGV("\tEqualizerSetPreset Succesfully called LVM_SetControlParameters\n");
+    // ALOGV("\tEqualizerSetPreset Successfully called LVM_SetControlParameters\n");
     return;
 }
 
-int32_t EqualizerGetNumPresets(){
+int32_t EqualizerGetNumPresets() {
     return sizeof(gEqualizerPresets) / sizeof(PresetConfig);
 }
 
@@ -1745,14 +1587,14 @@
 // preset       The preset ID. Must be less than number of presets.
 //
 //-------------------------------------------------------------------------
-const char * EqualizerGetPresetName(int32_t preset){
-    //ALOGV("\tEqualizerGetPresetName start(%d)", preset);
+const char* EqualizerGetPresetName(int32_t preset) {
+    // ALOGV("\tEqualizerGetPresetName start(%d)", preset);
     if (preset == PRESET_CUSTOM) {
         return "Custom";
     } else {
         return gEqualizerPresets[preset].name;
     }
-    //ALOGV("\tEqualizerGetPresetName end(%d)", preset);
+    // ALOGV("\tEqualizerGetPresetName end(%d)", preset);
     return 0;
 }
 
@@ -1767,8 +1609,7 @@
 //
 //----------------------------------------------------------------------------
 
-int VolumeSetVolumeLevel(EffectContext *pContext, int16_t level){
-
+int VolumeSetVolumeLevel(EffectContext* pContext, int16_t level) {
     if (level > 0 || level < -9600) {
         return -EINVAL;
     }
@@ -1782,7 +1623,7 @@
     LvmEffect_limitLevel(pContext);
 
     return 0;
-}    /* end VolumeSetVolumeLevel */
+} /* end VolumeSetVolumeLevel */
 
 //----------------------------------------------------------------------------
 // VolumeGetVolumeLevel()
@@ -1794,15 +1635,14 @@
 //
 //----------------------------------------------------------------------------
 
-int VolumeGetVolumeLevel(EffectContext *pContext, int16_t *level){
-
+int VolumeGetVolumeLevel(EffectContext* pContext, int16_t* level) {
     if (pContext->pBundledContext->bMuteEnabled == LVM_TRUE) {
         *level = pContext->pBundledContext->levelSaved * 100;
     } else {
         *level = pContext->pBundledContext->volume * 100;
     }
     return 0;
-}    /* end VolumeGetVolumeLevel */
+} /* end VolumeGetVolumeLevel */
 
 //----------------------------------------------------------------------------
 // VolumeSetMute()
@@ -1815,23 +1655,23 @@
 //
 //----------------------------------------------------------------------------
 
-int32_t VolumeSetMute(EffectContext *pContext, uint32_t mute){
-    //ALOGV("\tVolumeSetMute start(%d)", mute);
+int32_t VolumeSetMute(EffectContext* pContext, uint32_t mute) {
+    // ALOGV("\tVolumeSetMute start(%d)", mute);
 
     pContext->pBundledContext->bMuteEnabled = mute;
 
     /* Set appropriate volume level */
-    if(pContext->pBundledContext->bMuteEnabled == LVM_TRUE){
+    if (pContext->pBundledContext->bMuteEnabled == LVM_TRUE) {
         pContext->pBundledContext->levelSaved = pContext->pBundledContext->volume;
         pContext->pBundledContext->volume = -96;
-    }else{
+    } else {
         pContext->pBundledContext->volume = pContext->pBundledContext->levelSaved;
     }
 
     LvmEffect_limitLevel(pContext);
 
     return 0;
-}    /* end setMute */
+} /* end setMute */
 
 //----------------------------------------------------------------------------
 // VolumeGetMute()
@@ -1845,26 +1685,25 @@
 //  mute:       enable/disable flag
 //----------------------------------------------------------------------------
 
-int32_t VolumeGetMute(EffectContext *pContext, uint32_t *mute){
-    //ALOGV("\tVolumeGetMute start");
-    if((pContext->pBundledContext->bMuteEnabled == LVM_FALSE)||
-       (pContext->pBundledContext->bMuteEnabled == LVM_TRUE)){
+int32_t VolumeGetMute(EffectContext* pContext, uint32_t* mute) {
+    // ALOGV("\tVolumeGetMute start");
+    if ((pContext->pBundledContext->bMuteEnabled == LVM_FALSE) ||
+        (pContext->pBundledContext->bMuteEnabled == LVM_TRUE)) {
         *mute = pContext->pBundledContext->bMuteEnabled;
         return 0;
-    }else{
+    } else {
         ALOGV("\tLVM_ERROR : VolumeGetMute read an invalid value from context %d",
               pContext->pBundledContext->bMuteEnabled);
         return -EINVAL;
     }
-    //ALOGV("\tVolumeGetMute end");
-}    /* end getMute */
+    // ALOGV("\tVolumeGetMute end");
+} /* end getMute */
 
-int16_t VolumeConvertStereoPosition(int16_t position){
+int16_t VolumeConvertStereoPosition(int16_t position) {
     int16_t convertedPosition = 0;
 
-    convertedPosition = (int16_t)(((float)position/1000)*96);
+    convertedPosition = (int16_t)(((float)position / 1000) * 96);
     return convertedPosition;
-
 }
 
 //----------------------------------------------------------------------------
@@ -1879,55 +1718,55 @@
 // Outputs:
 //----------------------------------------------------------------------------
 
-int VolumeSetStereoPosition(EffectContext *pContext, int16_t position){
-
-    LVM_ControlParams_t     ActiveParams;              /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;     /* Function call status */
-    LVM_INT16               Balance = 0;
+int VolumeSetStereoPosition(EffectContext* pContext, int16_t position) {
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+    LVM_INT16 Balance = 0;
 
     pContext->pBundledContext->positionSaved = position;
     Balance = VolumeConvertStereoPosition(pContext->pBundledContext->positionSaved);
 
-    //ALOGV("\tVolumeSetStereoPosition start pContext->pBundledContext->positionSaved = %d",
-    //pContext->pBundledContext->positionSaved);
+    // ALOGV("\tVolumeSetStereoPosition start pContext->pBundledContext->positionSaved = %d",
+    // pContext->pBundledContext->positionSaved);
 
-    if(pContext->pBundledContext->bStereoPositionEnabled == LVM_TRUE){
-
-        //ALOGV("\tVolumeSetStereoPosition Position to be set is %d %d\n", position, Balance);
+    if (pContext->pBundledContext->bStereoPositionEnabled == LVM_TRUE) {
+        // ALOGV("\tVolumeSetStereoPosition Position to be set is %d %d\n", position, Balance);
         pContext->pBundledContext->positionSaved = position;
         /* Get the current settings */
         LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
         LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VolumeSetStereoPosition")
-        if(LvmStatus != LVM_SUCCESS) return -EINVAL;
-        //ALOGV("\tVolumeSetStereoPosition Succesfully returned from LVM_GetControlParameters got:"
+        if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+        // ALOGV("\tVolumeSetStereoPosition Successfully returned from LVM_GetControlParameters
+        // got:"
         //     " %d\n", ActiveParams.VC_Balance);
 
         /* Volume parameters */
-        ActiveParams.VC_Balance  = Balance;
-        //ALOGV("\tVolumeSetStereoPosition() (-96dB -> +96dB)   -> %d\n", ActiveParams.VC_Balance );
+        ActiveParams.VC_Balance = Balance;
+        // ALOGV("\tVolumeSetStereoPosition() (-96dB -> +96dB)   -> %d\n", ActiveParams.VC_Balance
+        // );
 
         /* Activate the initial settings */
         LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
         LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "VolumeSetStereoPosition")
-        if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+        if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-        //ALOGV("\tVolumeSetStereoPosition Succesfully called LVM_SetControlParameters\n");
+        // ALOGV("\tVolumeSetStereoPosition Successfully called LVM_SetControlParameters\n");
 
         /* Get the current settings */
         LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
         LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VolumeSetStereoPosition")
-        if(LvmStatus != LVM_SUCCESS) return -EINVAL;
-        //ALOGV("\tVolumeSetStereoPosition Succesfully returned from LVM_GetControlParameters got: "
+        if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+        // ALOGV("\tVolumeSetStereoPosition Successfully returned from LVM_GetControlParameters got:
+        // "
         //     "%d\n", ActiveParams.VC_Balance);
+    } else {
+        // ALOGV("\tVolumeSetStereoPosition Position attempting to set, but not enabled %d %d\n",
+        // position, Balance);
     }
-    else{
-        //ALOGV("\tVolumeSetStereoPosition Position attempting to set, but not enabled %d %d\n",
-        //position, Balance);
-    }
-    //ALOGV("\tVolumeSetStereoPosition end pContext->pBundledContext->positionSaved = %d\n",
-    //pContext->pBundledContext->positionSaved);
+    // ALOGV("\tVolumeSetStereoPosition end pContext->pBundledContext->positionSaved = %d\n",
+    // pContext->pBundledContext->positionSaved);
     return 0;
-}    /* end VolumeSetStereoPosition */
+} /* end VolumeSetStereoPosition */
 
 //----------------------------------------------------------------------------
 // VolumeGetStereoPosition()
@@ -1941,35 +1780,35 @@
 //  position:       stereo position
 //----------------------------------------------------------------------------
 
-int32_t VolumeGetStereoPosition(EffectContext *pContext, int16_t *position){
-    //ALOGV("\tVolumeGetStereoPosition start");
+int32_t VolumeGetStereoPosition(EffectContext* pContext, int16_t* position) {
+    // ALOGV("\tVolumeGetStereoPosition start");
 
-    LVM_ControlParams_t     ActiveParams;                           /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
-    LVM_INT16               balance;
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+    LVM_INT16 balance;
 
-    //ALOGV("\tVolumeGetStereoPosition start pContext->pBundledContext->positionSaved = %d",
-    //pContext->pBundledContext->positionSaved);
+    // ALOGV("\tVolumeGetStereoPosition start pContext->pBundledContext->positionSaved = %d",
+    // pContext->pBundledContext->positionSaved);
 
     LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VolumeGetStereoPosition")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    //ALOGV("\tVolumeGetStereoPosition -> %d\n", ActiveParams.VC_Balance);
-    //ALOGV("\tVolumeGetStereoPosition Succesfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tVolumeGetStereoPosition -> %d\n", ActiveParams.VC_Balance);
+    // ALOGV("\tVolumeGetStereoPosition Successfully returned from LVM_GetControlParameters\n");
 
     balance = VolumeConvertStereoPosition(pContext->pBundledContext->positionSaved);
 
-    if(pContext->pBundledContext->bStereoPositionEnabled == LVM_TRUE){
-        if(balance != ActiveParams.VC_Balance){
+    if (pContext->pBundledContext->bStereoPositionEnabled == LVM_TRUE) {
+        if (balance != ActiveParams.VC_Balance) {
             return -EINVAL;
         }
     }
-    *position = (LVM_INT16)pContext->pBundledContext->positionSaved;     // Convert dB to millibels
-    //ALOGV("\tVolumeGetStereoPosition end returning pContext->pBundledContext->positionSaved =%d\n",
-    //pContext->pBundledContext->positionSaved);
+    *position = (LVM_INT16)pContext->pBundledContext->positionSaved;  // Convert dB to millibels
+    // ALOGV("\tVolumeGetStereoPosition end returning pContext->pBundledContext->positionSaved
+    // =%d\n", pContext->pBundledContext->positionSaved);
     return 0;
-}    /* end VolumeGetStereoPosition */
+} /* end VolumeGetStereoPosition */
 
 //----------------------------------------------------------------------------
 // VolumeEnableStereoPosition()
@@ -1982,40 +1821,40 @@
 //
 //----------------------------------------------------------------------------
 
-int32_t VolumeEnableStereoPosition(EffectContext *pContext, uint32_t enabled){
-    //ALOGV("\tVolumeEnableStereoPosition start()");
+int32_t VolumeEnableStereoPosition(EffectContext* pContext, uint32_t enabled) {
+    // ALOGV("\tVolumeEnableStereoPosition start()");
 
     pContext->pBundledContext->bStereoPositionEnabled = enabled;
 
-    LVM_ControlParams_t     ActiveParams;              /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;     /* Function call status */
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VolumeEnableStereoPosition")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    //ALOGV("\tVolumeEnableStereoPosition Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tVolumeEnableStereoPosition to %d, position was %d\n",
+    // ALOGV("\tVolumeEnableStereoPosition Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tVolumeEnableStereoPosition to %d, position was %d\n",
     //     enabled, ActiveParams.VC_Balance );
 
     /* Set appropriate stereo position */
-    if(pContext->pBundledContext->bStereoPositionEnabled == LVM_FALSE){
+    if (pContext->pBundledContext->bStereoPositionEnabled == LVM_FALSE) {
         ActiveParams.VC_Balance = 0;
-    }else{
-        ActiveParams.VC_Balance  =
-                            VolumeConvertStereoPosition(pContext->pBundledContext->positionSaved);
+    } else {
+        ActiveParams.VC_Balance =
+                VolumeConvertStereoPosition(pContext->pBundledContext->positionSaved);
     }
 
     /* Activate the initial settings */
     LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "VolumeEnableStereoPosition")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    //ALOGV("\tVolumeEnableStereoPosition Succesfully called LVM_SetControlParameters\n");
-    //ALOGV("\tVolumeEnableStereoPosition end()\n");
+    // ALOGV("\tVolumeEnableStereoPosition Successfully called LVM_SetControlParameters\n");
+    // ALOGV("\tVolumeEnableStereoPosition end()\n");
     return 0;
-}    /* end VolumeEnableStereoPosition */
+} /* end VolumeEnableStereoPosition */
 
 //----------------------------------------------------------------------------
 // BassBoost_getParameter()
@@ -2038,13 +1877,10 @@
 //
 //----------------------------------------------------------------------------
 
-int BassBoost_getParameter(EffectContext *pContext,
-                           uint32_t       paramSize,
-                           void          *pParam,
-                           uint32_t      *pValueSize,
-                           void          *pValue) {
+int BassBoost_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                           uint32_t* pValueSize, void* pValue) {
     int status = 0;
-    int32_t *params = (int32_t *)pParam;
+    int32_t* params = (int32_t*)pParam;
 
     ALOGVV("%s start", __func__);
 
@@ -2055,28 +1891,27 @@
     switch (params[0]) {
         case BASSBOOST_PARAM_STRENGTH_SUPPORTED:
             if (*pValueSize != sizeof(uint32_t)) {  // legacy: check equality here.
-                ALOGV("%s BASSBOOST_PARAM_STRENGTH_SUPPORTED invalid *pValueSize %u",
-                        __func__, *pValueSize);
+                ALOGV("%s BASSBOOST_PARAM_STRENGTH_SUPPORTED invalid *pValueSize %u", __func__,
+                      *pValueSize);
                 status = -EINVAL;
                 break;
             }
             // no need to set *pValueSize
 
-            *(uint32_t *)pValue = 1;
-            ALOGVV("%s BASSBOOST_PARAM_STRENGTH_SUPPORTED %u", __func__, *(uint32_t *)pValue);
+            *(uint32_t*)pValue = 1;
+            ALOGVV("%s BASSBOOST_PARAM_STRENGTH_SUPPORTED %u", __func__, *(uint32_t*)pValue);
             break;
 
         case BASSBOOST_PARAM_STRENGTH:
             if (*pValueSize != sizeof(int16_t)) {  // legacy: check equality here.
-                ALOGV("%s BASSBOOST_PARAM_STRENGTH invalid *pValueSize %u",
-                        __func__, *pValueSize);
+                ALOGV("%s BASSBOOST_PARAM_STRENGTH invalid *pValueSize %u", __func__, *pValueSize);
                 status = -EINVAL;
                 break;
             }
             // no need to set *pValueSize
 
-            *(int16_t *)pValue = BassGetStrength(pContext);
-            ALOGVV("%s BASSBOOST_PARAM_STRENGTH %d", __func__, *(int16_t *)pValue);
+            *(int16_t*)pValue = BassGetStrength(pContext);
+            ALOGVV("%s BASSBOOST_PARAM_STRENGTH %d", __func__, *(int16_t*)pValue);
             break;
 
         default:
@@ -2104,13 +1939,10 @@
 //
 //----------------------------------------------------------------------------
 
-int BassBoost_setParameter(EffectContext *pContext,
-                           uint32_t       paramSize,
-                           void          *pParam,
-                           uint32_t       valueSize,
-                           void          *pValue) {
+int BassBoost_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                           uint32_t valueSize, void* pValue) {
     int status = 0;
-    int32_t *params = (int32_t *)pParam;
+    int32_t* params = (int32_t*)pParam;
 
     ALOGVV("%s start", __func__);
 
@@ -2126,7 +1958,7 @@
                 break;
             }
 
-            const int16_t strength = *(int16_t *)pValue;
+            const int16_t strength = *(int16_t*)pValue;
             ALOGVV("%s BASSBOOST_PARAM_STRENGTH %d", __func__, strength);
             ALOGVV("%s BASSBOOST_PARAM_STRENGTH Calling BassSetStrength", __func__);
             BassSetStrength(pContext, (int32_t)strength);
@@ -2164,13 +1996,10 @@
 //
 //----------------------------------------------------------------------------
 
-int Virtualizer_getParameter(EffectContext *pContext,
-                             uint32_t       paramSize,
-                             void          *pParam,
-                             uint32_t      *pValueSize,
-                             void          *pValue) {
+int Virtualizer_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                             uint32_t* pValueSize, void* pValue) {
     int status = 0;
-    int32_t *params = (int32_t *)pParam;
+    int32_t* params = (int32_t*)pParam;
 
     ALOGVV("%s start", __func__);
 
@@ -2180,47 +2009,47 @@
     }
     switch (params[0]) {
         case VIRTUALIZER_PARAM_STRENGTH_SUPPORTED:
-            if (*pValueSize != sizeof(uint32_t)) { // legacy: check equality here.
-                ALOGV("%s VIRTUALIZER_PARAM_STRENGTH_SUPPORTED invalid *pValueSize %u",
-                        __func__, *pValueSize);
+            if (*pValueSize != sizeof(uint32_t)) {  // legacy: check equality here.
+                ALOGV("%s VIRTUALIZER_PARAM_STRENGTH_SUPPORTED invalid *pValueSize %u", __func__,
+                      *pValueSize);
                 status = -EINVAL;
                 break;
             }
             // no need to set *pValueSize
 
-            *(uint32_t *)pValue = 1;
-            ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH_SUPPORTED %d", __func__, *(uint32_t *)pValue);
+            *(uint32_t*)pValue = 1;
+            ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH_SUPPORTED %d", __func__, *(uint32_t*)pValue);
             break;
 
         case VIRTUALIZER_PARAM_STRENGTH:
-            if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
-                ALOGV("%s VIRTUALIZER_PARAM_STRENGTH invalid *pValueSize %u",
-                        __func__, *pValueSize);
+            if (*pValueSize != sizeof(int16_t)) {  // legacy: check equality here.
+                ALOGV("%s VIRTUALIZER_PARAM_STRENGTH invalid *pValueSize %u", __func__,
+                      *pValueSize);
                 status = -EINVAL;
                 break;
             }
             // no need to set *pValueSize
 
-            *(int16_t *)pValue = VirtualizerGetStrength(pContext);
+            *(int16_t*)pValue = VirtualizerGetStrength(pContext);
 
-            ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH %d", __func__, *(int16_t *)pValue);
+            ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH %d", __func__, *(int16_t*)pValue);
             break;
 
         case VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES: {
             if (paramSize < 3 * sizeof(int32_t)) {
-                ALOGV("%s VIRTUALIZER_PARAM_SPEAKER_ANGLES invalid paramSize: %u",
-                        __func__, paramSize);
+                ALOGV("%s VIRTUALIZER_PARAM_SPEAKER_ANGLES invalid paramSize: %u", __func__,
+                      paramSize);
                 status = -EINVAL;
                 break;
             }
 
-            const audio_channel_mask_t channelMask = (audio_channel_mask_t) params[1];
-            const audio_devices_t deviceType = (audio_devices_t) params[2];
+            const audio_channel_mask_t channelMask = (audio_channel_mask_t)params[1];
+            const audio_devices_t deviceType = (audio_devices_t)params[2];
             const uint32_t nbChannels = audio_channel_count_from_out_mask(channelMask);
             const uint32_t valueSizeRequired = 3 * nbChannels * sizeof(int32_t);
             if (*pValueSize < valueSizeRequired) {
-                ALOGV("%s VIRTUALIZER_PARAM_SPEAKER_ANGLES invalid *pValueSize %u",
-                        __func__, *pValueSize);
+                ALOGV("%s VIRTUALIZER_PARAM_SPEAKER_ANGLES invalid *pValueSize %u", __func__,
+                      *pValueSize);
                 status = -EINVAL;
                 break;
             }
@@ -2229,23 +2058,23 @@
             // verify the configuration is supported
             status = VirtualizerIsConfigurationSupported(channelMask, deviceType);
             if (status == 0) {
-                ALOGV("%s VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES mask=0x%x device=0x%x",
-                        __func__, channelMask, deviceType);
+                ALOGV("%s VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES mask=0x%x device=0x%x", __func__,
+                      channelMask, deviceType);
                 // configuration is supported, get the angles
-                VirtualizerGetSpeakerAngles(channelMask, deviceType, (int32_t *)pValue);
+                VirtualizerGetSpeakerAngles(channelMask, deviceType, (int32_t*)pValue);
             }
         } break;
 
         case VIRTUALIZER_PARAM_VIRTUALIZATION_MODE:
-            if (*pValueSize != sizeof(uint32_t)) { // legacy: check equality here.
-                ALOGV("%s VIRTUALIZER_PARAM_VIRTUALIZATION_MODE invalid *pValueSize %u",
-                        __func__, *pValueSize);
+            if (*pValueSize != sizeof(uint32_t)) {  // legacy: check equality here.
+                ALOGV("%s VIRTUALIZER_PARAM_VIRTUALIZATION_MODE invalid *pValueSize %u", __func__,
+                      *pValueSize);
                 status = -EINVAL;
                 break;
             }
             // no need to set *pValueSize
 
-            *(uint32_t *)pValue = (uint32_t) VirtualizerGetVirtualizationMode(pContext);
+            *(uint32_t*)pValue = (uint32_t)VirtualizerGetVirtualizationMode(pContext);
             break;
 
         default:
@@ -2273,17 +2102,14 @@
 //
 //----------------------------------------------------------------------------
 
-int Virtualizer_setParameter(EffectContext *pContext,
-                             uint32_t       paramSize,
-                             void          *pParam,
-                             uint32_t       valueSize,
-                             void          *pValue) {
+int Virtualizer_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                             uint32_t valueSize, void* pValue) {
     int status = 0;
-    int32_t *params = (int32_t *)pParam;
+    int32_t* params = (int32_t*)pParam;
 
     ALOGVV("%s start", __func__);
 
-    if (paramSize != sizeof(int32_t)) { // legacy: check equality here.
+    if (paramSize != sizeof(int32_t)) {  // legacy: check equality here.
         ALOGV("%s invalid paramSize: %u", __func__, paramSize);
         return -EINVAL;
     }
@@ -2295,7 +2121,7 @@
                 break;
             }
 
-            const int16_t strength = *(int16_t *)pValue;
+            const int16_t strength = *(int16_t*)pValue;
             ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH %d", __func__, strength);
             ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH Calling VirtualizerSetStrength", __func__);
             VirtualizerSetStrength(pContext, (int32_t)strength);
@@ -2305,16 +2131,16 @@
         case VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE: {
             if (valueSize < sizeof(int32_t)) {
                 ALOGV("%s VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE invalid valueSize: %u",
-                        __func__, valueSize);
+                      __func__, valueSize);
                 android_errorWriteLog(0x534e4554, "64478003");
                 status = -EINVAL;
                 break;
             }
 
-            const audio_devices_t deviceType = (audio_devices_t)*(int32_t *)pValue;
+            const audio_devices_t deviceType = (audio_devices_t) * (int32_t*)pValue;
             status = VirtualizerForceVirtualizationMode(pContext, deviceType);
-            ALOGVV("%s VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE device=%#x result=%d",
-                    __func__, deviceType, status);
+            ALOGVV("%s VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE device=%#x result=%d", __func__,
+                   deviceType, status);
         } break;
 
         default:
@@ -2347,13 +2173,10 @@
 // Side Effects:
 //
 //----------------------------------------------------------------------------
-int Equalizer_getParameter(EffectContext *pContext,
-                           uint32_t       paramSize,
-                           void          *pParam,
-                           uint32_t      *pValueSize,
-                           void          *pValue) {
+int Equalizer_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                           uint32_t* pValueSize, void* pValue) {
     int status = 0;
-    int32_t *params = (int32_t *)pParam;
+    int32_t* params = (int32_t*)pParam;
 
     ALOGVV("%s start", __func__);
 
@@ -2362,211 +2185,210 @@
         return -EINVAL;
     }
     switch (params[0]) {
-    case EQ_PARAM_NUM_BANDS:
-        if (*pValueSize < sizeof(uint16_t)) {
-            ALOGV("%s EQ_PARAM_NUM_BANDS invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
-            break;
-        }
-        *pValueSize = sizeof(uint16_t);
-
-        *(uint16_t *)pValue = (uint16_t)FIVEBAND_NUMBANDS;
-        ALOGVV("%s EQ_PARAM_NUM_BANDS %u", __func__, *(uint16_t *)pValue);
-        break;
-
-    case EQ_PARAM_CUR_PRESET:
-        if (*pValueSize < sizeof(uint16_t)) {
-            ALOGV("%s EQ_PARAM_CUR_PRESET invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
-            break;
-        }
-        *pValueSize = sizeof(uint16_t);
-
-        *(uint16_t *)pValue = (uint16_t)EqualizerGetPreset(pContext);
-        ALOGVV("%s EQ_PARAM_CUR_PRESET %u", __func__, *(uint16_t *)pValue);
-        break;
-
-    case EQ_PARAM_GET_NUM_OF_PRESETS:
-        if (*pValueSize < sizeof(uint16_t)) {
-            ALOGV("%s EQ_PARAM_GET_NUM_OF_PRESETS invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
-            break;
-        }
-        *pValueSize = sizeof(uint16_t);
-
-        *(uint16_t *)pValue = (uint16_t)EqualizerGetNumPresets();
-        ALOGVV("%s EQ_PARAM_GET_NUM_OF_PRESETS %u", __func__, *(uint16_t *)pValue);
-        break;
-
-    case EQ_PARAM_GET_BAND: {
-        if (paramSize < 2 * sizeof(int32_t)) {
-            ALOGV("%s EQ_PARAM_GET_BAND invalid paramSize: %u", __func__, paramSize);
-            status = -EINVAL;
-            break;
-        }
-        if (*pValueSize < sizeof(uint16_t)) {
-            ALOGV("%s EQ_PARAM_GET_BAND invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
-            break;
-        }
-        *pValueSize = sizeof(uint16_t);
-
-        const int32_t frequency = params[1];
-        *(uint16_t *)pValue = (uint16_t)EqualizerGetBand(pContext, frequency);
-        ALOGVV("%s EQ_PARAM_GET_BAND frequency %d, band %u",
-                __func__, frequency, *(uint16_t *)pValue);
-    } break;
-
-    case EQ_PARAM_BAND_LEVEL: {
-        if (paramSize < 2 * sizeof(int32_t)) {
-            ALOGV("%s EQ_PARAM_BAND_LEVEL invalid paramSize %u", __func__, paramSize);
-            status = -EINVAL;
-            break;
-        }
-        if (*pValueSize < sizeof(int16_t)) {
-            ALOGV("%s EQ_PARAM_BAND_LEVEL invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
-            break;
-        }
-        *pValueSize = sizeof(int16_t);
-
-        const int32_t band = params[1];
-        if (band < 0 || band >= FIVEBAND_NUMBANDS) {
-            if (band < 0) {
-                android_errorWriteLog(0x534e4554, "32438598");
-                ALOGW("%s EQ_PARAM_BAND_LEVEL invalid band %d", __func__, band);
+        case EQ_PARAM_NUM_BANDS:
+            if (*pValueSize < sizeof(uint16_t)) {
+                ALOGV("%s EQ_PARAM_NUM_BANDS invalid *pValueSize %u", __func__, *pValueSize);
+                status = -EINVAL;
+                break;
             }
-            status = -EINVAL;
-            break;
-        }
-        *(int16_t *)pValue = (int16_t)EqualizerGetBandLevel(pContext, band);
-        ALOGVV("%s EQ_PARAM_BAND_LEVEL band %d, level %d",
-                __func__, band, *(int16_t *)pValue);
-    } break;
+            *pValueSize = sizeof(uint16_t);
 
-    case EQ_PARAM_LEVEL_RANGE:
-        if (*pValueSize < 2 * sizeof(int16_t)) {
-            ALOGV("%s EQ_PARAM_LEVEL_RANGE invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
+            *(uint16_t*)pValue = (uint16_t)FIVEBAND_NUMBANDS;
+            ALOGVV("%s EQ_PARAM_NUM_BANDS %u", __func__, *(uint16_t*)pValue);
             break;
-        }
-        *pValueSize = 2 * sizeof(int16_t);
 
-        *(int16_t *)pValue = -1500;
-        *((int16_t *)pValue + 1) = 1500;
-        ALOGVV("%s EQ_PARAM_LEVEL_RANGE min %d, max %d",
-                __func__, *(int16_t *)pValue, *((int16_t *)pValue + 1));
-        break;
-
-    case EQ_PARAM_BAND_FREQ_RANGE: {
-        if (paramSize < 2 * sizeof(int32_t)) {
-            ALOGV("%s EQ_PARAM_BAND_FREQ_RANGE invalid paramSize: %u", __func__, paramSize);
-            status = -EINVAL;
-            break;
-        }
-        if (*pValueSize < 2 * sizeof(int32_t)) {
-            ALOGV("%s EQ_PARAM_BAND_FREQ_RANGE invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
-            break;
-        }
-        *pValueSize = 2 * sizeof(int32_t);
-
-        const int32_t band = params[1];
-        if (band < 0 || band >= FIVEBAND_NUMBANDS) {
-            if (band < 0) {
-                android_errorWriteLog(0x534e4554, "32247948");
-                ALOGW("%s EQ_PARAM_BAND_FREQ_RANGE invalid band %d",
-                        __func__, band);
+        case EQ_PARAM_CUR_PRESET:
+            if (*pValueSize < sizeof(uint16_t)) {
+                ALOGV("%s EQ_PARAM_CUR_PRESET invalid *pValueSize %u", __func__, *pValueSize);
+                status = -EINVAL;
+                break;
             }
-            status = -EINVAL;
-            break;
-        }
-        EqualizerGetBandFreqRange(pContext, band, (uint32_t *)pValue, ((uint32_t *)pValue + 1));
-        ALOGVV("%s EQ_PARAM_BAND_FREQ_RANGE band %d, min %d, max %d",
-                __func__, band, *(int32_t *)pValue, *((int32_t *)pValue + 1));
+            *pValueSize = sizeof(uint16_t);
 
-    } break;
-
-    case EQ_PARAM_CENTER_FREQ: {
-        if (paramSize < 2 * sizeof(int32_t)) {
-            ALOGV("%s EQ_PARAM_CENTER_FREQ invalid paramSize: %u", __func__, paramSize);
-            status = -EINVAL;
+            *(uint16_t*)pValue = (uint16_t)EqualizerGetPreset(pContext);
+            ALOGVV("%s EQ_PARAM_CUR_PRESET %u", __func__, *(uint16_t*)pValue);
             break;
-        }
-        if (*pValueSize < sizeof(int32_t)) {
-            ALOGV("%s EQ_PARAM_CENTER_FREQ invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
-            break;
-        }
-        *pValueSize = sizeof(int32_t);
 
-        const int32_t band = params[1];
-        if (band < 0 || band >= FIVEBAND_NUMBANDS) {
-            status = -EINVAL;
-            if (band < 0) {
-                android_errorWriteLog(0x534e4554, "32436341");
-                ALOGW("%s EQ_PARAM_CENTER_FREQ invalid band %d", __func__, band);
+        case EQ_PARAM_GET_NUM_OF_PRESETS:
+            if (*pValueSize < sizeof(uint16_t)) {
+                ALOGV("%s EQ_PARAM_GET_NUM_OF_PRESETS invalid *pValueSize %u", __func__,
+                      *pValueSize);
+                status = -EINVAL;
+                break;
             }
-            break;
-        }
-        *(int32_t *)pValue = EqualizerGetCentreFrequency(pContext, band);
-        ALOGVV("%s EQ_PARAM_CENTER_FREQ band %d, frequency %d",
-                __func__, band, *(int32_t *)pValue);
-    } break;
+            *pValueSize = sizeof(uint16_t);
 
-    case EQ_PARAM_GET_PRESET_NAME: {
-        if (paramSize < 2 * sizeof(int32_t)) {
-            ALOGV("%s EQ_PARAM_PRESET_NAME invalid paramSize: %u", __func__, paramSize);
-            status = -EINVAL;
+            *(uint16_t*)pValue = (uint16_t)EqualizerGetNumPresets();
+            ALOGVV("%s EQ_PARAM_GET_NUM_OF_PRESETS %u", __func__, *(uint16_t*)pValue);
             break;
-        }
-        if (*pValueSize < 1) {
-            android_errorWriteLog(0x534e4554, "37536407");
-            status = -EINVAL;
-            break;
-        }
 
-        const int32_t preset = params[1];
-        if ((preset < 0 && preset != PRESET_CUSTOM) ||  preset >= EqualizerGetNumPresets()) {
-            if (preset < 0) {
-                android_errorWriteLog(0x534e4554, "32448258");
-                ALOGE("%s EQ_PARAM_GET_PRESET_NAME preset %d", __func__, preset);
+        case EQ_PARAM_GET_BAND: {
+            if (paramSize < 2 * sizeof(int32_t)) {
+                ALOGV("%s EQ_PARAM_GET_BAND invalid paramSize: %u", __func__, paramSize);
+                status = -EINVAL;
+                break;
             }
+            if (*pValueSize < sizeof(uint16_t)) {
+                ALOGV("%s EQ_PARAM_GET_BAND invalid *pValueSize %u", __func__, *pValueSize);
+                status = -EINVAL;
+                break;
+            }
+            *pValueSize = sizeof(uint16_t);
+
+            const int32_t frequency = params[1];
+            *(uint16_t*)pValue = (uint16_t)EqualizerGetBand(pContext, frequency);
+            ALOGVV("%s EQ_PARAM_GET_BAND frequency %d, band %u", __func__, frequency,
+                   *(uint16_t*)pValue);
+        } break;
+
+        case EQ_PARAM_BAND_LEVEL: {
+            if (paramSize < 2 * sizeof(int32_t)) {
+                ALOGV("%s EQ_PARAM_BAND_LEVEL invalid paramSize %u", __func__, paramSize);
+                status = -EINVAL;
+                break;
+            }
+            if (*pValueSize < sizeof(int16_t)) {
+                ALOGV("%s EQ_PARAM_BAND_LEVEL invalid *pValueSize %u", __func__, *pValueSize);
+                status = -EINVAL;
+                break;
+            }
+            *pValueSize = sizeof(int16_t);
+
+            const int32_t band = params[1];
+            if (band < 0 || band >= FIVEBAND_NUMBANDS) {
+                if (band < 0) {
+                    android_errorWriteLog(0x534e4554, "32438598");
+                    ALOGW("%s EQ_PARAM_BAND_LEVEL invalid band %d", __func__, band);
+                }
+                status = -EINVAL;
+                break;
+            }
+            *(int16_t*)pValue = (int16_t)EqualizerGetBandLevel(pContext, band);
+            ALOGVV("%s EQ_PARAM_BAND_LEVEL band %d, level %d", __func__, band, *(int16_t*)pValue);
+        } break;
+
+        case EQ_PARAM_LEVEL_RANGE:
+            if (*pValueSize < 2 * sizeof(int16_t)) {
+                ALOGV("%s EQ_PARAM_LEVEL_RANGE invalid *pValueSize %u", __func__, *pValueSize);
+                status = -EINVAL;
+                break;
+            }
+            *pValueSize = 2 * sizeof(int16_t);
+
+            *(int16_t*)pValue = -1500;
+            *((int16_t*)pValue + 1) = 1500;
+            ALOGVV("%s EQ_PARAM_LEVEL_RANGE min %d, max %d", __func__, *(int16_t*)pValue,
+                   *((int16_t*)pValue + 1));
+            break;
+
+        case EQ_PARAM_BAND_FREQ_RANGE: {
+            if (paramSize < 2 * sizeof(int32_t)) {
+                ALOGV("%s EQ_PARAM_BAND_FREQ_RANGE invalid paramSize: %u", __func__, paramSize);
+                status = -EINVAL;
+                break;
+            }
+            if (*pValueSize < 2 * sizeof(int32_t)) {
+                ALOGV("%s EQ_PARAM_BAND_FREQ_RANGE invalid *pValueSize %u", __func__, *pValueSize);
+                status = -EINVAL;
+                break;
+            }
+            *pValueSize = 2 * sizeof(int32_t);
+
+            const int32_t band = params[1];
+            if (band < 0 || band >= FIVEBAND_NUMBANDS) {
+                if (band < 0) {
+                    android_errorWriteLog(0x534e4554, "32247948");
+                    ALOGW("%s EQ_PARAM_BAND_FREQ_RANGE invalid band %d", __func__, band);
+                }
+                status = -EINVAL;
+                break;
+            }
+            EqualizerGetBandFreqRange(pContext, band, (uint32_t*)pValue, ((uint32_t*)pValue + 1));
+            ALOGVV("%s EQ_PARAM_BAND_FREQ_RANGE band %d, min %d, max %d", __func__, band,
+                   *(int32_t*)pValue, *((int32_t*)pValue + 1));
+
+        } break;
+
+        case EQ_PARAM_CENTER_FREQ: {
+            if (paramSize < 2 * sizeof(int32_t)) {
+                ALOGV("%s EQ_PARAM_CENTER_FREQ invalid paramSize: %u", __func__, paramSize);
+                status = -EINVAL;
+                break;
+            }
+            if (*pValueSize < sizeof(int32_t)) {
+                ALOGV("%s EQ_PARAM_CENTER_FREQ invalid *pValueSize %u", __func__, *pValueSize);
+                status = -EINVAL;
+                break;
+            }
+            *pValueSize = sizeof(int32_t);
+
+            const int32_t band = params[1];
+            if (band < 0 || band >= FIVEBAND_NUMBANDS) {
+                status = -EINVAL;
+                if (band < 0) {
+                    android_errorWriteLog(0x534e4554, "32436341");
+                    ALOGW("%s EQ_PARAM_CENTER_FREQ invalid band %d", __func__, band);
+                }
+                break;
+            }
+            *(int32_t*)pValue = EqualizerGetCentreFrequency(pContext, band);
+            ALOGVV("%s EQ_PARAM_CENTER_FREQ band %d, frequency %d", __func__, band,
+                   *(int32_t*)pValue);
+        } break;
+
+        case EQ_PARAM_GET_PRESET_NAME: {
+            if (paramSize < 2 * sizeof(int32_t)) {
+                ALOGV("%s EQ_PARAM_PRESET_NAME invalid paramSize: %u", __func__, paramSize);
+                status = -EINVAL;
+                break;
+            }
+            if (*pValueSize < 1) {
+                android_errorWriteLog(0x534e4554, "37536407");
+                status = -EINVAL;
+                break;
+            }
+
+            const int32_t preset = params[1];
+            if ((preset < 0 && preset != PRESET_CUSTOM) || preset >= EqualizerGetNumPresets()) {
+                if (preset < 0) {
+                    android_errorWriteLog(0x534e4554, "32448258");
+                    ALOGE("%s EQ_PARAM_GET_PRESET_NAME preset %d", __func__, preset);
+                }
+                status = -EINVAL;
+                break;
+            }
+
+            char* const name = (char*)pValue;
+            strncpy(name, EqualizerGetPresetName(preset), *pValueSize - 1);
+            name[*pValueSize - 1] = 0;
+            *pValueSize = strlen(name) + 1;
+            ALOGVV("%s EQ_PARAM_GET_PRESET_NAME preset %d, name %s len %d", __func__, preset, name,
+                   *pValueSize);
+
+        } break;
+
+        case EQ_PARAM_PROPERTIES: {
+            constexpr uint32_t requiredValueSize = (2 + FIVEBAND_NUMBANDS) * sizeof(uint16_t);
+            if (*pValueSize < requiredValueSize) {
+                ALOGV("%s EQ_PARAM_PROPERTIES invalid *pValueSize %u", __func__, *pValueSize);
+                status = -EINVAL;
+                break;
+            }
+            *pValueSize = requiredValueSize;
+
+            int16_t* p = (int16_t*)pValue;
+            ALOGV("%s EQ_PARAM_PROPERTIES", __func__);
+            p[0] = (int16_t)EqualizerGetPreset(pContext);
+            p[1] = (int16_t)FIVEBAND_NUMBANDS;
+            for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+                p[2 + i] = (int16_t)EqualizerGetBandLevel(pContext, i);
+            }
+        } break;
+
+        default:
+            ALOGV("%s invalid param %d", __func__, params[0]);
             status = -EINVAL;
             break;
-        }
-
-        char * const name = (char *)pValue;
-        strncpy(name, EqualizerGetPresetName(preset), *pValueSize - 1);
-        name[*pValueSize - 1] = 0;
-        *pValueSize = strlen(name) + 1;
-        ALOGVV("%s EQ_PARAM_GET_PRESET_NAME preset %d, name %s len %d",
-               __func__, preset, name, *pValueSize);
-
-    } break;
-
-    case EQ_PARAM_PROPERTIES: {
-        constexpr uint32_t requiredValueSize = (2 + FIVEBAND_NUMBANDS) * sizeof(uint16_t);
-        if (*pValueSize < requiredValueSize) {
-            ALOGV("%s EQ_PARAM_PROPERTIES invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
-            break;
-        }
-        *pValueSize = requiredValueSize;
-
-        int16_t *p = (int16_t *)pValue;
-        ALOGV("%s EQ_PARAM_PROPERTIES", __func__);
-        p[0] = (int16_t)EqualizerGetPreset(pContext);
-        p[1] = (int16_t)FIVEBAND_NUMBANDS;
-        for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-            p[2 + i] = (int16_t)EqualizerGetBandLevel(pContext, i);
-        }
-    } break;
-
-    default:
-        ALOGV("%s invalid param %d", __func__, params[0]);
-        status = -EINVAL;
-        break;
     }
 
     ALOGVV("%s end param: %d, status: %d", __func__, params[0], status);
@@ -2589,13 +2411,10 @@
 // Outputs:
 //
 //----------------------------------------------------------------------------
-int Equalizer_setParameter(EffectContext *pContext,
-                           uint32_t       paramSize,
-                           void          *pParam,
-                           uint32_t       valueSize,
-                           void          *pValue) {
+int Equalizer_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                           uint32_t valueSize, void* pValue) {
     int status = 0;
-    int32_t *params = (int32_t *)pParam;
+    int32_t* params = (int32_t*)pParam;
 
     ALOGVV("%s start", __func__);
 
@@ -2604,87 +2423,87 @@
         return -EINVAL;
     }
     switch (params[0]) {
-    case EQ_PARAM_CUR_PRESET: {
-        if (valueSize < sizeof(int16_t)) {
-            ALOGV("%s EQ_PARAM_CUR_PRESET invalid valueSize %u", __func__, valueSize);
-            status = -EINVAL;
-            break;
-        }
-        const int32_t preset = (int32_t)*(uint16_t *)pValue;
-
-        ALOGVV("%s EQ_PARAM_CUR_PRESET %d", __func__, preset);
-        if (preset >= EqualizerGetNumPresets() || preset < 0) {
-            ALOGV("%s EQ_PARAM_CUR_PRESET invalid preset %d", __func__, preset);
-            status = -EINVAL;
-            break;
-        }
-        EqualizerSetPreset(pContext, preset);
-    } break;
-
-    case EQ_PARAM_BAND_LEVEL: {
-        if (paramSize < 2 * sizeof(int32_t)) {
-            ALOGV("%s EQ_PARAM_BAND_LEVEL invalid paramSize: %u", __func__, paramSize);
-            status = -EINVAL;
-            break;
-        }
-        if (valueSize < sizeof(int16_t)) {
-            ALOGV("%s EQ_PARAM_BAND_LEVEL invalid valueSize %u", __func__, valueSize);
-            status = -EINVAL;
-            break;
-        }
-        const int32_t band =  params[1];
-        const int32_t level = (int32_t)*(int16_t *)pValue;
-        ALOGVV("%s EQ_PARAM_BAND_LEVEL band %d, level %d", __func__, band, level);
-        if (band < 0 || band >= FIVEBAND_NUMBANDS) {
-            if (band < 0) {
-                android_errorWriteLog(0x534e4554, "32095626");
-                ALOGE("%s EQ_PARAM_BAND_LEVEL invalid band %d", __func__, band);
-            }
-            status = -EINVAL;
-            break;
-        }
-        EqualizerSetBandLevel(pContext, band, level);
-    } break;
-
-    case EQ_PARAM_PROPERTIES: {
-        ALOGVV("%s EQ_PARAM_PROPERTIES", __func__);
-        if (valueSize < sizeof(int16_t)) {
-            ALOGV("%s EQ_PARAM_PROPERTIES invalid valueSize %u", __func__, valueSize);
-            status = -EINVAL;
-            break;
-        }
-        int16_t *p = (int16_t *)pValue;
-        if ((int)p[0] >= EqualizerGetNumPresets()) {
-            ALOGV("%s EQ_PARAM_PROPERTIES invalid preset %d", __func__, (int)p[0]);
-            status = -EINVAL;
-            break;
-        }
-        if (p[0] >= 0) {
-            EqualizerSetPreset(pContext, (int)p[0]);
-        } else {
-            constexpr uint32_t valueSizeRequired = (2 + FIVEBAND_NUMBANDS) * sizeof(int16_t);
-            if (valueSize < valueSizeRequired) {
-              android_errorWriteLog(0x534e4554, "37563371");
-              ALOGE("%s EQ_PARAM_PROPERTIES invalid valueSize %u < %u",
-                      __func__, valueSize, valueSizeRequired);
-              status = -EINVAL;
-              break;
-            }
-            if ((int)p[1] != FIVEBAND_NUMBANDS) {
-                ALOGV("%s EQ_PARAM_PROPERTIES invalid bands %d", __func__, (int)p[1]);
+        case EQ_PARAM_CUR_PRESET: {
+            if (valueSize < sizeof(int16_t)) {
+                ALOGV("%s EQ_PARAM_CUR_PRESET invalid valueSize %u", __func__, valueSize);
                 status = -EINVAL;
                 break;
             }
-            for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-                EqualizerSetBandLevel(pContext, i, (int)p[2 + i]);
-            }
-        }
-    } break;
+            const int32_t preset = (int32_t) * (uint16_t*)pValue;
 
-    default:
-        ALOGV("%s invalid param %d", __func__, params[0]);
-        status = -EINVAL;
-        break;
+            ALOGVV("%s EQ_PARAM_CUR_PRESET %d", __func__, preset);
+            if (preset >= EqualizerGetNumPresets() || preset < 0) {
+                ALOGV("%s EQ_PARAM_CUR_PRESET invalid preset %d", __func__, preset);
+                status = -EINVAL;
+                break;
+            }
+            EqualizerSetPreset(pContext, preset);
+        } break;
+
+        case EQ_PARAM_BAND_LEVEL: {
+            if (paramSize < 2 * sizeof(int32_t)) {
+                ALOGV("%s EQ_PARAM_BAND_LEVEL invalid paramSize: %u", __func__, paramSize);
+                status = -EINVAL;
+                break;
+            }
+            if (valueSize < sizeof(int16_t)) {
+                ALOGV("%s EQ_PARAM_BAND_LEVEL invalid valueSize %u", __func__, valueSize);
+                status = -EINVAL;
+                break;
+            }
+            const int32_t band = params[1];
+            const int32_t level = (int32_t) * (int16_t*)pValue;
+            ALOGVV("%s EQ_PARAM_BAND_LEVEL band %d, level %d", __func__, band, level);
+            if (band < 0 || band >= FIVEBAND_NUMBANDS) {
+                if (band < 0) {
+                    android_errorWriteLog(0x534e4554, "32095626");
+                    ALOGE("%s EQ_PARAM_BAND_LEVEL invalid band %d", __func__, band);
+                }
+                status = -EINVAL;
+                break;
+            }
+            EqualizerSetBandLevel(pContext, band, level);
+        } break;
+
+        case EQ_PARAM_PROPERTIES: {
+            ALOGVV("%s EQ_PARAM_PROPERTIES", __func__);
+            if (valueSize < sizeof(int16_t)) {
+                ALOGV("%s EQ_PARAM_PROPERTIES invalid valueSize %u", __func__, valueSize);
+                status = -EINVAL;
+                break;
+            }
+            int16_t* p = (int16_t*)pValue;
+            if ((int)p[0] >= EqualizerGetNumPresets()) {
+                ALOGV("%s EQ_PARAM_PROPERTIES invalid preset %d", __func__, (int)p[0]);
+                status = -EINVAL;
+                break;
+            }
+            if (p[0] >= 0) {
+                EqualizerSetPreset(pContext, (int)p[0]);
+            } else {
+                constexpr uint32_t valueSizeRequired = (2 + FIVEBAND_NUMBANDS) * sizeof(int16_t);
+                if (valueSize < valueSizeRequired) {
+                    android_errorWriteLog(0x534e4554, "37563371");
+                    ALOGE("%s EQ_PARAM_PROPERTIES invalid valueSize %u < %u", __func__, valueSize,
+                          valueSizeRequired);
+                    status = -EINVAL;
+                    break;
+                }
+                if ((int)p[1] != FIVEBAND_NUMBANDS) {
+                    ALOGV("%s EQ_PARAM_PROPERTIES invalid bands %d", __func__, (int)p[1]);
+                    status = -EINVAL;
+                    break;
+                }
+                for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+                    EqualizerSetBandLevel(pContext, i, (int)p[2 + i]);
+                }
+            }
+        } break;
+
+        default:
+            ALOGV("%s invalid param %d", __func__, params[0]);
+            status = -EINVAL;
+            break;
     }
 
     ALOGVV("%s end param: %d, status: %d", __func__, params[0], status);
@@ -2712,13 +2531,10 @@
 //
 //----------------------------------------------------------------------------
 
-int Volume_getParameter(EffectContext *pContext,
-                        uint32_t       paramSize,
-                        void          *pParam,
-                        uint32_t      *pValueSize,
-                        void          *pValue) {
+int Volume_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                        uint32_t* pValueSize, void* pValue) {
     int status = 0;
-    int32_t *params = (int32_t *)pParam;
+    int32_t* params = (int32_t*)pParam;
 
     ALOGVV("%s start", __func__);
 
@@ -2728,19 +2544,19 @@
     }
     switch (params[0]) {
         case VOLUME_PARAM_LEVEL:
-            if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
+            if (*pValueSize != sizeof(int16_t)) {  // legacy: check equality here.
                 ALOGV("%s VOLUME_PARAM_LEVEL invalid *pValueSize %u", __func__, *pValueSize);
                 status = -EINVAL;
                 break;
             }
             // no need to set *pValueSize
 
-            status = VolumeGetVolumeLevel(pContext, (int16_t *)(pValue));
-            ALOGVV("%s VOLUME_PARAM_LEVEL %d", __func__, *(int16_t *)pValue);
+            status = VolumeGetVolumeLevel(pContext, (int16_t*)(pValue));
+            ALOGVV("%s VOLUME_PARAM_LEVEL %d", __func__, *(int16_t*)pValue);
             break;
 
         case VOLUME_PARAM_MAXLEVEL:
-            if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
+            if (*pValueSize != sizeof(int16_t)) {  // legacy: check equality here.
                 ALOGV("%s VOLUME_PARAM_MAXLEVEL invalid *pValueSize %u", __func__, *pValueSize);
                 status = -EINVAL;
                 break;
@@ -2748,21 +2564,21 @@
             // no need to set *pValueSize
 
             // in millibel
-            *(int16_t *)pValue = 0;
-            ALOGVV("%s VOLUME_PARAM_MAXLEVEL %d", __func__, *(int16_t *)pValue);
+            *(int16_t*)pValue = 0;
+            ALOGVV("%s VOLUME_PARAM_MAXLEVEL %d", __func__, *(int16_t*)pValue);
             break;
 
         case VOLUME_PARAM_STEREOPOSITION:
-            if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
-                ALOGV("%s VOLUME_PARAM_STEREOPOSITION invalid *pValueSize %u",
-                        __func__, *pValueSize);
+            if (*pValueSize != sizeof(int16_t)) {  // legacy: check equality here.
+                ALOGV("%s VOLUME_PARAM_STEREOPOSITION invalid *pValueSize %u", __func__,
+                      *pValueSize);
                 status = -EINVAL;
                 break;
             }
             // no need to set *pValueSize
 
-            VolumeGetStereoPosition(pContext, (int16_t *)pValue);
-            ALOGVV("%s VOLUME_PARAM_STEREOPOSITION %d", __func__, *(int16_t *)pValue);
+            VolumeGetStereoPosition(pContext, (int16_t*)pValue);
+            ALOGVV("%s VOLUME_PARAM_STEREOPOSITION %d", __func__, *(int16_t*)pValue);
             break;
 
         case VOLUME_PARAM_MUTE:
@@ -2773,21 +2589,21 @@
             }
             *pValueSize = sizeof(uint32_t);
 
-            status = VolumeGetMute(pContext, (uint32_t *)pValue);
-            ALOGV("%s VOLUME_PARAM_MUTE %u", __func__, *(uint32_t *)pValue);
+            status = VolumeGetMute(pContext, (uint32_t*)pValue);
+            ALOGV("%s VOLUME_PARAM_MUTE %u", __func__, *(uint32_t*)pValue);
             break;
 
         case VOLUME_PARAM_ENABLESTEREOPOSITION:
             if (*pValueSize < sizeof(int32_t)) {
-                ALOGV("%s VOLUME_PARAM_ENABLESTEREOPOSITION invalid *pValueSize %u",
-                        __func__, *pValueSize);
+                ALOGV("%s VOLUME_PARAM_ENABLESTEREOPOSITION invalid *pValueSize %u", __func__,
+                      *pValueSize);
                 status = -EINVAL;
                 break;
             }
             *pValueSize = sizeof(int32_t);
 
-            *(int32_t *)pValue = pContext->pBundledContext->bStereoPositionEnabled;
-            ALOGVV("%s VOLUME_PARAM_ENABLESTEREOPOSITION %d", __func__, *(int32_t *)pValue);
+            *(int32_t*)pValue = pContext->pBundledContext->bStereoPositionEnabled;
+            ALOGVV("%s VOLUME_PARAM_ENABLESTEREOPOSITION %d", __func__, *(int32_t*)pValue);
 
             break;
 
@@ -2816,13 +2632,10 @@
 //
 //----------------------------------------------------------------------------
 
-int Volume_setParameter(EffectContext *pContext,
-                        uint32_t       paramSize,
-                        void          *pParam,
-                        uint32_t       valueSize,
-                        void          *pValue) {
+int Volume_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                        uint32_t valueSize, void* pValue) {
     int status = 0;
-    int32_t *params = (int32_t *)pParam;
+    int32_t* params = (int32_t*)pParam;
 
     ALOGVV("%s start", __func__);
 
@@ -2838,7 +2651,7 @@
                 break;
             }
 
-            const int16_t level = *(int16_t *)pValue;
+            const int16_t level = *(int16_t*)pValue;
             ALOGVV("%s VOLUME_PARAM_LEVEL %d", __func__, level);
             ALOGVV("%s VOLUME_PARAM_LEVEL Calling VolumeSetVolumeLevel", __func__);
             status = VolumeSetVolumeLevel(pContext, level);
@@ -2853,7 +2666,7 @@
                 break;
             }
 
-            const uint32_t mute = *(uint32_t *)pValue;
+            const uint32_t mute = *(uint32_t*)pValue;
             ALOGVV("%s VOLUME_PARAM_MUTE %d", __func__, mute);
             ALOGVV("%s VOLUME_PARAM_MUTE Calling VolumeSetMute", __func__);
             status = VolumeSetMute(pContext, mute);
@@ -2862,15 +2675,16 @@
 
         case VOLUME_PARAM_ENABLESTEREOPOSITION: {
             if (valueSize < sizeof(uint32_t)) {
-                ALOGV("%s VOLUME_PARAM_ENABLESTEREOPOSITION invalid valueSize %u",
-                        __func__, valueSize);
+                ALOGV("%s VOLUME_PARAM_ENABLESTEREOPOSITION invalid valueSize %u", __func__,
+                      valueSize);
                 status = -EINVAL;
                 break;
             }
 
-            const uint32_t positionEnabled = *(uint32_t *)pValue;
+            const uint32_t positionEnabled = *(uint32_t*)pValue;
             status = VolumeEnableStereoPosition(pContext, positionEnabled)
-                    ?: VolumeSetStereoPosition(pContext, pContext->pBundledContext->positionSaved);
+                             ?: VolumeSetStereoPosition(pContext,
+                                                        pContext->pBundledContext->positionSaved);
             ALOGVV("%s VOLUME_PARAM_ENABLESTEREOPOSITION called", __func__);
         } break;
 
@@ -2881,13 +2695,11 @@
                 break;
             }
 
-            const int16_t position = *(int16_t *)pValue;
+            const int16_t position = *(int16_t*)pValue;
             ALOGVV("%s VOLUME_PARAM_STEREOPOSITION %d", __func__, position);
-            ALOGVV("%s VOLUME_PARAM_STEREOPOSITION Calling VolumeSetStereoPosition",
-                    __func__);
+            ALOGVV("%s VOLUME_PARAM_STEREOPOSITION Calling VolumeSetStereoPosition", __func__);
             status = VolumeSetStereoPosition(pContext, position);
-            ALOGVV("%s VOLUME_PARAM_STEREOPOSITION Called VolumeSetStereoPosition",
-                    __func__);
+            ALOGVV("%s VOLUME_PARAM_STEREOPOSITION Called VolumeSetStereoPosition", __func__);
         } break;
 
         default:
@@ -2912,18 +2724,15 @@
  *  Remarks     :
  ****************************************************************************************/
 
-LVM_INT16 LVC_ToDB_s32Tos16(LVM_INT32 Lin_fix)
-{
-    LVM_INT16   db_fix;
-    LVM_INT16   Shift;
-    LVM_INT16   SmallRemainder;
-    LVM_UINT32  Remainder = (LVM_UINT32)Lin_fix;
+LVM_INT16 LVC_ToDB_s32Tos16(LVM_INT32 Lin_fix) {
+    LVM_INT16 db_fix;
+    LVM_INT16 Shift;
+    LVM_INT16 SmallRemainder;
+    LVM_UINT32 Remainder = (LVM_UINT32)Lin_fix;
 
     /* Count leading bits, 1 cycle in assembly*/
-    for (Shift = 0; Shift<32; Shift++)
-    {
-        if ((Remainder & 0x80000000U)!=0)
-        {
+    for (Shift = 0; Shift < 32; Shift++) {
+        if ((Remainder & 0x80000000U) != 0) {
             break;
         }
         Remainder = Remainder << 1;
@@ -2934,9 +2743,9 @@
      *
      * dB = -96 * Shift + 16 * (8 * Remainder - 2 * Remainder^2)
      */
-    db_fix    = (LVM_INT16)(-96 * Shift);               /* Six dB steps in Q11.4 format*/
+    db_fix = (LVM_INT16)(-96 * Shift); /* Six dB steps in Q11.4 format*/
     SmallRemainder = (LVM_INT16)((Remainder & 0x7fffffff) >> 24);
-    db_fix = (LVM_INT16)(db_fix + SmallRemainder );
+    db_fix = (LVM_INT16)(db_fix + SmallRemainder);
     SmallRemainder = (LVM_INT16)(SmallRemainder * SmallRemainder);
     db_fix = (LVM_INT16)(db_fix - (LVM_INT16)((LVM_UINT16)SmallRemainder >> 9));
 
@@ -2960,11 +2769,10 @@
 //
 //----------------------------------------------------------------------------
 
-int Effect_setEnabled(EffectContext *pContext, bool enabled)
-{
-    ALOGV("%s effectType %d, enabled %d, currently enabled %d", __func__,
-            pContext->EffectType, enabled, pContext->pBundledContext->NumberEffectsEnabled);
-    int &effectInDrain = pContext->pBundledContext->effectInDrain;
+int Effect_setEnabled(EffectContext* pContext, bool enabled) {
+    ALOGV("%s effectType %d, enabled %d, currently enabled %d", __func__, pContext->EffectType,
+          enabled, pContext->pBundledContext->NumberEffectsEnabled);
+    int& effectInDrain = pContext->pBundledContext->effectInDrain;
     if (enabled) {
         // Bass boost or Virtualizer can be temporarily disabled if playing over device speaker due
         // to their nature.
@@ -2972,15 +2780,15 @@
         switch (pContext->EffectType) {
             case LVM_BASS_BOOST:
                 if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
-                     ALOGV("\tEffect_setEnabled() LVM_BASS_BOOST is already enabled");
-                     return -EINVAL;
+                    ALOGV("\tEffect_setEnabled() LVM_BASS_BOOST is already enabled");
+                    return -EINVAL;
                 }
-                if(pContext->pBundledContext->SamplesToExitCountBb <= 0){
+                if (pContext->pBundledContext->SamplesToExitCountBb <= 0) {
                     pContext->pBundledContext->NumberEffectsEnabled++;
                 }
                 effectInDrain &= ~(1 << LVM_BASS_BOOST);
                 pContext->pBundledContext->SamplesToExitCountBb =
-                     (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1);
+                        (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond * 0.1);
                 pContext->pBundledContext->bBassEnabled = LVM_TRUE;
                 tempDisabled = pContext->pBundledContext->bBassTempDisabled;
                 break;
@@ -2989,12 +2797,12 @@
                     ALOGV("\tEffect_setEnabled() LVM_EQUALIZER is already enabled");
                     return -EINVAL;
                 }
-                if(pContext->pBundledContext->SamplesToExitCountEq <= 0){
+                if (pContext->pBundledContext->SamplesToExitCountEq <= 0) {
                     pContext->pBundledContext->NumberEffectsEnabled++;
                 }
                 effectInDrain &= ~(1 << LVM_EQUALIZER);
                 pContext->pBundledContext->SamplesToExitCountEq =
-                     (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1);
+                        (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond * 0.1);
                 pContext->pBundledContext->bEqualizerEnabled = LVM_TRUE;
                 break;
             case LVM_VIRTUALIZER:
@@ -3002,12 +2810,12 @@
                     ALOGV("\tEffect_setEnabled() LVM_VIRTUALIZER is already enabled");
                     return -EINVAL;
                 }
-                if(pContext->pBundledContext->SamplesToExitCountVirt <= 0){
+                if (pContext->pBundledContext->SamplesToExitCountVirt <= 0) {
                     pContext->pBundledContext->NumberEffectsEnabled++;
                 }
                 effectInDrain &= ~(1 << LVM_VIRTUALIZER);
                 pContext->pBundledContext->SamplesToExitCountVirt =
-                     (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1);
+                        (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond * 0.1);
                 pContext->pBundledContext->bVirtualizerEnabled = LVM_TRUE;
                 tempDisabled = pContext->pBundledContext->bVirtualizerTempDisabled;
                 break;
@@ -3084,41 +2892,39 @@
 //
 //-----------------------------------------------------------------------
 
-int16_t LVC_Convert_VolToDb(uint32_t vol){
-    int16_t  dB;
+int16_t LVC_Convert_VolToDb(uint32_t vol) {
+    int16_t dB;
 
-    dB = LVC_ToDB_s32Tos16(vol <<7);
-    dB = (dB +8)>>4;
-    dB = (dB <-96) ? -96 : dB ;
+    dB = LVC_ToDB_s32Tos16(vol << 7);
+    dB = (dB + 8) >> 4;
+    dB = (dB < -96) ? -96 : dB;
 
     return dB;
 }
 
-} // namespace
-} // namespace
+}  // namespace
+}  // namespace android
 
 extern "C" {
 /* Effect Control Interface Implementation: Process */
-int Effect_process(effect_handle_t     self,
-                              audio_buffer_t         *inBuffer,
-                              audio_buffer_t         *outBuffer){
-    EffectContext * pContext = (EffectContext *) self;
-    int    status = 0;
-    int    processStatus = 0;
+int Effect_process(effect_handle_t self, audio_buffer_t* inBuffer, audio_buffer_t* outBuffer) {
+    EffectContext* pContext = (EffectContext*)self;
+    int status = 0;
+    int processStatus = 0;
     const int NrChannels = audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
 
-//ALOGV("\tEffect_process Start : Enabled = %d     Called = %d (%8d %8d %8d)",
-//pContext->pBundledContext->NumberEffectsEnabled,pContext->pBundledContext->NumberEffectsCalled,
-//    pContext->pBundledContext->SamplesToExitCountBb,
-//    pContext->pBundledContext->SamplesToExitCountVirt,
-//    pContext->pBundledContext->SamplesToExitCountEq);
+    // ALOGV("\tEffect_process Start : Enabled = %d     Called = %d (%8d %8d %8d)",
+    // pContext->pBundledContext->NumberEffectsEnabled,pContext->pBundledContext->NumberEffectsCalled,
+    //    pContext->pBundledContext->SamplesToExitCountBb,
+    //    pContext->pBundledContext->SamplesToExitCountVirt,
+    //    pContext->pBundledContext->SamplesToExitCountEq);
 
-    if (pContext == NULL){
+    if (pContext == NULL) {
         ALOGV("\tLVM_ERROR : Effect_process() ERROR pContext == NULL");
         return -EINVAL;
     }
 
-    //if(pContext->EffectType == LVM_BASS_BOOST){
+    // if(pContext->EffectType == LVM_BASS_BOOST){
     //  ALOGV("\tEffect_process: Effect type is BASS_BOOST");
     //}else if(pContext->EffectType == LVM_EQUALIZER){
     //  ALOGV("\tEffect_process: Effect type is LVM_EQUALIZER");
@@ -3126,15 +2932,14 @@
     //  ALOGV("\tEffect_process: Effect type is LVM_VIRTUALIZER");
     //}
 
-    if (inBuffer == NULL  || inBuffer->raw == NULL  ||
-            outBuffer == NULL || outBuffer->raw == NULL ||
-            inBuffer->frameCount != outBuffer->frameCount){
+    if (inBuffer == NULL || inBuffer->raw == NULL || outBuffer == NULL || outBuffer->raw == NULL ||
+        inBuffer->frameCount != outBuffer->frameCount) {
         ALOGV("\tLVM_ERROR : Effect_process() ERROR NULL INPUT POINTER OR FRAME COUNT IS WRONG");
         return -EINVAL;
     }
 
-    int &effectProcessCalled = pContext->pBundledContext->effectProcessCalled;
-    int &effectInDrain = pContext->pBundledContext->effectInDrain;
+    int& effectProcessCalled = pContext->pBundledContext->effectProcessCalled;
+    int& effectInDrain = pContext->pBundledContext->effectInDrain;
     if ((effectProcessCalled & 1 << pContext->EffectType) != 0) {
         ALOGW("Effect %d already called", pContext->EffectType);
         const int undrainedEffects = effectInDrain & ~effectProcessCalled;
@@ -3164,12 +2969,12 @@
     }
     effectProcessCalled |= 1 << pContext->EffectType;
 
-    if ((pContext->pBundledContext->bBassEnabled == LVM_FALSE)&&
-        (pContext->EffectType == LVM_BASS_BOOST)){
-        //ALOGV("\tEffect_process() LVM_BASS_BOOST Effect is not enabled");
-        if(pContext->pBundledContext->SamplesToExitCountBb > 0){
+    if ((pContext->pBundledContext->bBassEnabled == LVM_FALSE) &&
+        (pContext->EffectType == LVM_BASS_BOOST)) {
+        // ALOGV("\tEffect_process() LVM_BASS_BOOST Effect is not enabled");
+        if (pContext->pBundledContext->SamplesToExitCountBb > 0) {
             pContext->pBundledContext->SamplesToExitCountBb -= outBuffer->frameCount * NrChannels;
-            //ALOGV("\tEffect_process: Waiting to turn off BASS_BOOST, %d samples left",
+            // ALOGV("\tEffect_process: Waiting to turn off BASS_BOOST, %d samples left",
             //    pContext->pBundledContext->SamplesToExitCountBb);
         }
         if (pContext->pBundledContext->SamplesToExitCountBb <= 0) {
@@ -3181,21 +2986,21 @@
             ALOGV("\tEffect_process() this is the last frame for LVM_BASS_BOOST");
         }
     }
-    if ((pContext->pBundledContext->bVolumeEnabled == LVM_FALSE)&&
-        (pContext->EffectType == LVM_VOLUME)){
-        //ALOGV("\tEffect_process() LVM_VOLUME Effect is not enabled");
+    if ((pContext->pBundledContext->bVolumeEnabled == LVM_FALSE) &&
+        (pContext->EffectType == LVM_VOLUME)) {
+        // ALOGV("\tEffect_process() LVM_VOLUME Effect is not enabled");
         status = -ENODATA;
         if ((effectInDrain & 1 << LVM_VOLUME) != 0) {
             pContext->pBundledContext->NumberEffectsEnabled--;
             effectInDrain &= ~(1 << LVM_VOLUME);
         }
     }
-    if ((pContext->pBundledContext->bEqualizerEnabled == LVM_FALSE)&&
-        (pContext->EffectType == LVM_EQUALIZER)){
-        //ALOGV("\tEffect_process() LVM_EQUALIZER Effect is not enabled");
-        if(pContext->pBundledContext->SamplesToExitCountEq > 0){
+    if ((pContext->pBundledContext->bEqualizerEnabled == LVM_FALSE) &&
+        (pContext->EffectType == LVM_EQUALIZER)) {
+        // ALOGV("\tEffect_process() LVM_EQUALIZER Effect is not enabled");
+        if (pContext->pBundledContext->SamplesToExitCountEq > 0) {
             pContext->pBundledContext->SamplesToExitCountEq -= outBuffer->frameCount * NrChannels;
-            //ALOGV("\tEffect_process: Waiting to turn off EQUALIZER, %d samples left",
+            // ALOGV("\tEffect_process: Waiting to turn off EQUALIZER, %d samples left",
             //    pContext->pBundledContext->SamplesToExitCountEq);
         }
         if (pContext->pBundledContext->SamplesToExitCountEq <= 0) {
@@ -3207,13 +3012,12 @@
             ALOGV("\tEffect_process() this is the last frame for LVM_EQUALIZER");
         }
     }
-    if ((pContext->pBundledContext->bVirtualizerEnabled == LVM_FALSE)&&
-        (pContext->EffectType == LVM_VIRTUALIZER)){
-        //ALOGV("\tEffect_process() LVM_VIRTUALIZER Effect is not enabled");
-        if(pContext->pBundledContext->SamplesToExitCountVirt > 0){
-            pContext->pBundledContext->SamplesToExitCountVirt -=
-                outBuffer->frameCount * NrChannels;
-            //ALOGV("\tEffect_process: Waiting for to turn off VIRTUALIZER, %d samples left",
+    if ((pContext->pBundledContext->bVirtualizerEnabled == LVM_FALSE) &&
+        (pContext->EffectType == LVM_VIRTUALIZER)) {
+        // ALOGV("\tEffect_process() LVM_VIRTUALIZER Effect is not enabled");
+        if (pContext->pBundledContext->SamplesToExitCountVirt > 0) {
+            pContext->pBundledContext->SamplesToExitCountVirt -= outBuffer->frameCount * NrChannels;
+            // ALOGV("\tEffect_process: Waiting for to turn off VIRTUALIZER, %d samples left",
             //    pContext->pBundledContext->SamplesToExitCountVirt);
         }
         if (pContext->pBundledContext->SamplesToExitCountVirt <= 0) {
@@ -3226,37 +3030,34 @@
         }
     }
 
-    if(status != -ENODATA){
+    if (status != -ENODATA) {
         pContext->pBundledContext->NumberEffectsCalled++;
     }
 
     if (pContext->pBundledContext->NumberEffectsCalled >=
-            pContext->pBundledContext->NumberEffectsEnabled) {
-
+        pContext->pBundledContext->NumberEffectsEnabled) {
         // We expect the # effects called to be equal to # effects enabled in sequence (including
         // draining effects).  Warn if this is not the case due to inconsistent calls.
         ALOGW_IF(pContext->pBundledContext->NumberEffectsCalled >
-                pContext->pBundledContext->NumberEffectsEnabled,
-                "%s Number of effects called %d is greater than number of effects enabled %d",
-                __func__, pContext->pBundledContext->NumberEffectsCalled,
-                pContext->pBundledContext->NumberEffectsEnabled);
-        effectProcessCalled = 0; // reset our consistency check.
+                         pContext->pBundledContext->NumberEffectsEnabled,
+                 "%s Number of effects called %d is greater than number of effects enabled %d",
+                 __func__, pContext->pBundledContext->NumberEffectsCalled,
+                 pContext->pBundledContext->NumberEffectsEnabled);
+        effectProcessCalled = 0;  // reset our consistency check.
 
-        //ALOGV("\tEffect_process     Calling process with %d effects enabled, %d called: Effect %d",
-        //pContext->pBundledContext->NumberEffectsEnabled,
-        //pContext->pBundledContext->NumberEffectsCalled, pContext->EffectType);
+        // ALOGV("\tEffect_process     Calling process with %d effects enabled, %d called: Effect
+        // %d", pContext->pBundledContext->NumberEffectsEnabled,
+        // pContext->pBundledContext->NumberEffectsCalled, pContext->EffectType);
 
-        if (status == -ENODATA){
+        if (status == -ENODATA) {
             ALOGV("\tEffect_process() processing last frame");
         }
         pContext->pBundledContext->NumberEffectsCalled = 0;
         /* Process all the available frames, block processing is
            handled internalLY by the LVM bundle */
-        processStatus = android::LvmBundle_process(inBuffer->f32,
-                                                   outBuffer->f32,
-                                                   outBuffer->frameCount,
-                                                   pContext);
-        if (processStatus != 0){
+        processStatus = android::LvmBundle_process(inBuffer->f32, outBuffer->f32,
+                                                   outBuffer->frameCount, pContext);
+        if (processStatus != 0) {
             ALOGV("\tLVM_ERROR : LvmBundle_process returned error %d", processStatus);
             if (status == 0) {
                 status = processStatus;
@@ -3264,133 +3065,126 @@
             return status;
         }
     } else {
-        //ALOGV("\tEffect_process Not Calling process with %d effects enabled, %d called: Effect %d",
-        //pContext->pBundledContext->NumberEffectsEnabled,
-        //pContext->pBundledContext->NumberEffectsCalled, pContext->EffectType);
+        // ALOGV("\tEffect_process Not Calling process with %d effects enabled, %d called: Effect
+        // %d", pContext->pBundledContext->NumberEffectsEnabled,
+        // pContext->pBundledContext->NumberEffectsCalled, pContext->EffectType);
 
         if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
             for (size_t i = 0; i < outBuffer->frameCount * NrChannels; ++i) {
                 outBuffer->f32[i] += inBuffer->f32[i];
             }
         } else if (outBuffer->raw != inBuffer->raw) {
-            memcpy(outBuffer->raw,
-                    inBuffer->raw,
-                    outBuffer->frameCount * sizeof(effect_buffer_t) * FCC_2);
+            memcpy(outBuffer->raw, inBuffer->raw,
+                   outBuffer->frameCount * sizeof(effect_buffer_t) * FCC_2);
         }
     }
 
     return status;
-}   /* end Effect_process */
+} /* end Effect_process */
 
 // The value offset of an effect parameter is computed by rounding up
 // the parameter size to the next 32 bit alignment.
-static inline uint32_t computeParamVOffset(const effect_param_t *p) {
-    return ((p->psize + sizeof(int32_t) - 1) / sizeof(int32_t)) *
-            sizeof(int32_t);
+static inline uint32_t computeParamVOffset(const effect_param_t* p) {
+    return ((p->psize + sizeof(int32_t) - 1) / sizeof(int32_t)) * sizeof(int32_t);
 }
 
 /* Effect Control Interface Implementation: Command */
-int Effect_command(effect_handle_t  self,
-                              uint32_t            cmdCode,
-                              uint32_t            cmdSize,
-                              void                *pCmdData,
-                              uint32_t            *replySize,
-                              void                *pReplyData){
-    EffectContext * pContext = (EffectContext *) self;
+int Effect_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
+                   uint32_t* replySize, void* pReplyData) {
+    EffectContext* pContext = (EffectContext*)self;
 
-    //ALOGV("\t\nEffect_command start");
+    // ALOGV("\t\nEffect_command start");
 
-    if(pContext->EffectType == LVM_BASS_BOOST){
-        //ALOGV("\tEffect_command setting command for LVM_BASS_BOOST");
+    if (pContext->EffectType == LVM_BASS_BOOST) {
+        // ALOGV("\tEffect_command setting command for LVM_BASS_BOOST");
     }
-    if(pContext->EffectType == LVM_VIRTUALIZER){
-        //ALOGV("\tEffect_command setting command for LVM_VIRTUALIZER");
+    if (pContext->EffectType == LVM_VIRTUALIZER) {
+        // ALOGV("\tEffect_command setting command for LVM_VIRTUALIZER");
     }
-    if(pContext->EffectType == LVM_EQUALIZER){
-        //ALOGV("\tEffect_command setting command for LVM_EQUALIZER");
+    if (pContext->EffectType == LVM_EQUALIZER) {
+        // ALOGV("\tEffect_command setting command for LVM_EQUALIZER");
     }
-    if(pContext->EffectType == LVM_VOLUME){
-        //ALOGV("\tEffect_command setting command for LVM_VOLUME");
+    if (pContext->EffectType == LVM_VOLUME) {
+        // ALOGV("\tEffect_command setting command for LVM_VOLUME");
     }
 
-    if (pContext == NULL){
+    if (pContext == NULL) {
         ALOGV("\tLVM_ERROR : Effect_command ERROR pContext == NULL");
         return -EINVAL;
     }
 
-    //ALOGV("\tEffect_command INPUTS are: command %d cmdSize %d",cmdCode, cmdSize);
+    // ALOGV("\tEffect_command INPUTS are: command %d cmdSize %d",cmdCode, cmdSize);
 
     // Incase we disable an effect, next time process is
     // called the number of effect called could be greater
     // pContext->pBundledContext->NumberEffectsCalled = 0;
 
-    //ALOGV("\tEffect_command NumberEffectsCalled = %d, NumberEffectsEnabled = %d",
+    // ALOGV("\tEffect_command NumberEffectsCalled = %d, NumberEffectsEnabled = %d",
     //        pContext->pBundledContext->NumberEffectsCalled,
     //        pContext->pBundledContext->NumberEffectsEnabled);
 
-    switch (cmdCode){
+    switch (cmdCode) {
         case EFFECT_CMD_INIT:
-            if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)){
+            if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
                 ALOGV("\tLVM_ERROR, EFFECT_CMD_INIT: ERROR for effect type %d",
-                        pContext->EffectType);
+                      pContext->EffectType);
                 return -EINVAL;
             }
-            *(int *) pReplyData = 0;
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT start");
-            if(pContext->EffectType == LVM_BASS_BOOST){
-                //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_BASS_BOOST");
+            *(int*)pReplyData = 0;
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT start");
+            if (pContext->EffectType == LVM_BASS_BOOST) {
+                // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_BASS_BOOST");
                 android::BassSetStrength(pContext, 0);
             }
-            if(pContext->EffectType == LVM_VIRTUALIZER){
-                //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_VIRTUALIZER");
+            if (pContext->EffectType == LVM_VIRTUALIZER) {
+                // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_VIRTUALIZER");
                 android::VirtualizerSetStrength(pContext, 0);
             }
-            if(pContext->EffectType == LVM_EQUALIZER){
-                //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_EQUALIZER");
+            if (pContext->EffectType == LVM_EQUALIZER) {
+                // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_EQUALIZER");
                 android::EqualizerSetPreset(pContext, 0);
             }
-            if(pContext->EffectType == LVM_VOLUME){
-                //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_VOLUME");
-                *(int *) pReplyData = android::VolumeSetVolumeLevel(pContext, 0);
+            if (pContext->EffectType == LVM_VOLUME) {
+                // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_VOLUME");
+                *(int*)pReplyData = android::VolumeSetVolumeLevel(pContext, 0);
             }
             break;
 
         case EFFECT_CMD_SET_CONFIG:
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_CONFIG start");
-            if (pCmdData    == NULL || cmdSize     != sizeof(effect_config_t) ||
-                    pReplyData  == NULL || replySize == NULL || *replySize  != sizeof(int)) {
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_CONFIG start");
+            if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) || pReplyData == NULL ||
+                replySize == NULL || *replySize != sizeof(int)) {
                 ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: "
-                        "EFFECT_CMD_SET_CONFIG: ERROR");
+                      "EFFECT_CMD_SET_CONFIG: ERROR");
                 return -EINVAL;
             }
-            *(int *) pReplyData = android::Effect_setConfig(pContext, (effect_config_t *) pCmdData);
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_CONFIG end");
+            *(int*)pReplyData = android::Effect_setConfig(pContext, (effect_config_t*)pCmdData);
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_CONFIG end");
             break;
 
         case EFFECT_CMD_GET_CONFIG:
             if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(effect_config_t)) {
                 ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: "
-                        "EFFECT_CMD_GET_CONFIG: ERROR");
+                      "EFFECT_CMD_GET_CONFIG: ERROR");
                 return -EINVAL;
             }
 
-            android::Effect_getConfig(pContext, (effect_config_t *)pReplyData);
+            android::Effect_getConfig(pContext, (effect_config_t*)pReplyData);
             break;
 
         case EFFECT_CMD_RESET:
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_RESET start");
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_RESET start");
             android::Effect_setConfig(pContext, &pContext->config);
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_RESET end");
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_RESET end");
             break;
 
-        case EFFECT_CMD_GET_PARAM:{
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_GET_PARAM start");
+        case EFFECT_CMD_GET_PARAM: {
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_GET_PARAM start");
 
-            effect_param_t *p = (effect_param_t *)pCmdData;
+            effect_param_t* p = (effect_param_t*)pCmdData;
             if (pCmdData == NULL || cmdSize < sizeof(effect_param_t) ||
-                    cmdSize < (sizeof(effect_param_t) + p->psize) ||
-                    pReplyData == NULL || replySize == NULL ||
-                    *replySize < (sizeof(effect_param_t) + p->psize)) {
+                cmdSize < (sizeof(effect_param_t) + p->psize) || pReplyData == NULL ||
+                replySize == NULL || *replySize < (sizeof(effect_param_t) + p->psize)) {
                 ALOGV("\tLVM_ERROR : EFFECT_CMD_GET_PARAM: ERROR");
                 return -EINVAL;
             }
@@ -3401,75 +3195,62 @@
             }
             const uint32_t paddedParamSize = computeParamVOffset(p);
             if ((EFFECT_PARAM_SIZE_MAX - sizeof(effect_param_t) < paddedParamSize) ||
-                (EFFECT_PARAM_SIZE_MAX - sizeof(effect_param_t) - paddedParamSize <
-                    p->vsize)) {
+                (EFFECT_PARAM_SIZE_MAX - sizeof(effect_param_t) - paddedParamSize < p->vsize)) {
                 ALOGV("\tLVM_ERROR : EFFECT_CMD_GET_PARAM: padded_psize or vsize too big");
                 return -EINVAL;
             }
             uint32_t expectedReplySize = sizeof(effect_param_t) + paddedParamSize + p->vsize;
             if (*replySize < expectedReplySize) {
                 ALOGV("\tLVM_ERROR : EFFECT_CMD_GET_PARAM: min. replySize %u, got %u bytes",
-                        expectedReplySize, *replySize);
+                      expectedReplySize, *replySize);
                 android_errorWriteLog(0x534e4554, "32705438");
                 return -EINVAL;
             }
 
             memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + p->psize);
 
-            p = (effect_param_t *)pReplyData;
+            p = (effect_param_t*)pReplyData;
 
             uint32_t voffset = paddedParamSize;
-            if(pContext->EffectType == LVM_BASS_BOOST){
-                p->status = android::BassBoost_getParameter(pContext,
-                                                            p->psize,
-                                                            p->data,
-                                                            &p->vsize,
+            if (pContext->EffectType == LVM_BASS_BOOST) {
+                p->status = android::BassBoost_getParameter(pContext, p->psize, p->data, &p->vsize,
                                                             p->data + voffset);
-                //ALOGV("\tBassBoost_command EFFECT_CMD_GET_PARAM "
+                // ALOGV("\tBassBoost_command EFFECT_CMD_GET_PARAM "
                 //        "*pCmdData %d, *replySize %d, *pReplyData %d ",
                 //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
                 //        *replySize,
                 //        *(int16_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset));
             }
 
-            if(pContext->EffectType == LVM_VIRTUALIZER){
-                p->status = android::Virtualizer_getParameter(pContext,
-                                                              p->psize,
-                                                              (void *)p->data,
-                                                              &p->vsize,
-                                                              p->data + voffset);
+            if (pContext->EffectType == LVM_VIRTUALIZER) {
+                p->status = android::Virtualizer_getParameter(pContext, p->psize, (void*)p->data,
+                                                              &p->vsize, p->data + voffset);
 
-                //ALOGV("\tVirtualizer_command EFFECT_CMD_GET_PARAM "
+                // ALOGV("\tVirtualizer_command EFFECT_CMD_GET_PARAM "
                 //        "*pCmdData %d, *replySize %d, *pReplyData %d ",
                 //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
                 //        *replySize,
                 //        *(int16_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset));
             }
-            if(pContext->EffectType == LVM_EQUALIZER){
-                //ALOGV("\tEqualizer_command cmdCode Case: "
+            if (pContext->EffectType == LVM_EQUALIZER) {
+                // ALOGV("\tEqualizer_command cmdCode Case: "
                 //        "EFFECT_CMD_GET_PARAM start");
-                p->status = android::Equalizer_getParameter(pContext,
-                                                            p->psize,
-                                                            p->data,
-                                                            &p->vsize,
+                p->status = android::Equalizer_getParameter(pContext, p->psize, p->data, &p->vsize,
                                                             p->data + voffset);
 
-                //ALOGV("\tEqualizer_command EFFECT_CMD_GET_PARAM *pCmdData %d, *replySize %d, "
+                // ALOGV("\tEqualizer_command EFFECT_CMD_GET_PARAM *pCmdData %d, *replySize %d, "
                 //       "*pReplyData %08x %08x",
                 //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)), *replySize,
                 //        *(int32_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset),
                 //        *(int32_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset +
                 //        sizeof(int32_t)));
             }
-            if(pContext->EffectType == LVM_VOLUME){
-                //ALOGV("\tVolume_command cmdCode Case: EFFECT_CMD_GET_PARAM start");
-                p->status = android::Volume_getParameter(pContext,
-                                                         p->psize,
-                                                         (void *)p->data,
-                                                         &p->vsize,
-                                                         p->data + voffset);
+            if (pContext->EffectType == LVM_VOLUME) {
+                // ALOGV("\tVolume_command cmdCode Case: EFFECT_CMD_GET_PARAM start");
+                p->status = android::Volume_getParameter(pContext, p->psize, (void*)p->data,
+                                                         &p->vsize, p->data + voffset);
 
-                //ALOGV("\tVolume_command EFFECT_CMD_GET_PARAM "
+                // ALOGV("\tVolume_command EFFECT_CMD_GET_PARAM "
                 //        "*pCmdData %d, *replySize %d, *pReplyData %d ",
                 //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
                 //        *replySize,
@@ -3477,123 +3258,114 @@
             }
             *replySize = sizeof(effect_param_t) + voffset + p->vsize;
 
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_GET_PARAM end");
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_GET_PARAM end");
         } break;
-        case EFFECT_CMD_SET_PARAM:{
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_PARAM start");
-            if(pContext->EffectType == LVM_BASS_BOOST){
-                //ALOGV("\tBassBoost_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d",
+        case EFFECT_CMD_SET_PARAM: {
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_PARAM start");
+            if (pContext->EffectType == LVM_BASS_BOOST) {
+                // ALOGV("\tBassBoost_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value
+                // %d",
                 //       *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
                 //       *replySize,
                 //       *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t)));
 
-                if (pCmdData   == NULL ||
-                        cmdSize    != (sizeof(effect_param_t) + sizeof(int32_t) +sizeof(int16_t)) ||
-                        pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
+                if (pCmdData == NULL ||
+                    cmdSize != (sizeof(effect_param_t) + sizeof(int32_t) + sizeof(int16_t)) ||
+                    pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
                     ALOGV("\tLVM_ERROR : BassBoost_command cmdCode Case: "
-                            "EFFECT_CMD_SET_PARAM: ERROR");
+                          "EFFECT_CMD_SET_PARAM: ERROR");
                     return -EINVAL;
                 }
 
-                effect_param_t * const p = (effect_param_t *) pCmdData;
+                effect_param_t* const p = (effect_param_t*)pCmdData;
                 const uint32_t voffset = computeParamVOffset(p);
 
-                //ALOGV("\tnBassBoost_command cmdSize is %d\n"
+                // ALOGV("\tnBassBoost_command cmdSize is %d\n"
                 //        "\tsizeof(effect_param_t) is  %d\n"
                 //        "\tp->psize is %d\n"
                 //        "\tp->vsize is %d"
                 //        "\n",
                 //        cmdSize, sizeof(effect_param_t), p->psize, p->vsize );
 
-                *(int *)pReplyData = android::BassBoost_setParameter(pContext,
-                                                                     p->psize,
-                                                                     (void *)p->data,
-                                                                     p->vsize,
-                                                                     p->data + voffset);
+                *(int*)pReplyData = android::BassBoost_setParameter(
+                        pContext, p->psize, (void*)p->data, p->vsize, p->data + voffset);
             }
-            if(pContext->EffectType == LVM_VIRTUALIZER){
-              // Warning this log will fail to properly read an int32_t value, assumes int16_t
-              //ALOGV("\tVirtualizer_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d",
-              //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
-              //        *replySize,
-              //        *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t)));
+            if (pContext->EffectType == LVM_VIRTUALIZER) {
+                // Warning this log will fail to properly read an int32_t value, assumes int16_t
+                // ALOGV("\tVirtualizer_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value
+                // %d",
+                //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
+                //        *replySize,
+                //        *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) +
+                //        sizeof(int32_t)));
 
-                if (pCmdData   == NULL ||
-                        // legal parameters are int16_t or int32_t
-                        cmdSize    > (sizeof(effect_param_t) + sizeof(int32_t) +sizeof(int32_t)) ||
-                        cmdSize    < (sizeof(effect_param_t) + sizeof(int32_t) +sizeof(int16_t)) ||
-                        pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
+                if (pCmdData == NULL ||
+                    // legal parameters are int16_t or int32_t
+                    cmdSize > (sizeof(effect_param_t) + sizeof(int32_t) + sizeof(int32_t)) ||
+                    cmdSize < (sizeof(effect_param_t) + sizeof(int32_t) + sizeof(int16_t)) ||
+                    pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
                     ALOGV("\tLVM_ERROR : Virtualizer_command cmdCode Case: "
-                            "EFFECT_CMD_SET_PARAM: ERROR");
+                          "EFFECT_CMD_SET_PARAM: ERROR");
                     return -EINVAL;
                 }
 
-                effect_param_t * const p = (effect_param_t *) pCmdData;
+                effect_param_t* const p = (effect_param_t*)pCmdData;
                 const uint32_t voffset = computeParamVOffset(p);
 
-                //ALOGV("\tnVirtualizer_command cmdSize is %d\n"
+                // ALOGV("\tnVirtualizer_command cmdSize is %d\n"
                 //        "\tsizeof(effect_param_t) is  %d\n"
                 //        "\tp->psize is %d\n"
                 //        "\tp->vsize is %d"
                 //        "\n",
                 //        cmdSize, sizeof(effect_param_t), p->psize, p->vsize );
 
-                *(int *)pReplyData = android::Virtualizer_setParameter(pContext,
-                                                                       p->psize,
-                                                                       (void *)p->data,
-                                                                       p->vsize,
-                                                                       p->data + voffset);
+                *(int*)pReplyData = android::Virtualizer_setParameter(
+                        pContext, p->psize, (void*)p->data, p->vsize, p->data + voffset);
             }
-            if(pContext->EffectType == LVM_EQUALIZER){
-               //ALOGV("\tEqualizer_command cmdCode Case: "
-               //        "EFFECT_CMD_SET_PARAM start");
-               //ALOGV("\tEqualizer_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ",
-               //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
-               //        *replySize,
-               //        *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t)));
+            if (pContext->EffectType == LVM_EQUALIZER) {
+                // ALOGV("\tEqualizer_command cmdCode Case: "
+                //        "EFFECT_CMD_SET_PARAM start");
+                // ALOGV("\tEqualizer_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d
+                // ",
+                //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
+                //        *replySize,
+                //        *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) +
+                //        sizeof(int32_t)));
 
                 if (pCmdData == NULL || cmdSize < (sizeof(effect_param_t) + sizeof(int32_t)) ||
-                        pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
+                    pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
                     ALOGV("\tLVM_ERROR : Equalizer_command cmdCode Case: "
-                            "EFFECT_CMD_SET_PARAM: ERROR");
+                          "EFFECT_CMD_SET_PARAM: ERROR");
                     return -EINVAL;
                 }
 
-                effect_param_t * const p = (effect_param_t *) pCmdData;
+                effect_param_t* const p = (effect_param_t*)pCmdData;
                 const uint32_t voffset = computeParamVOffset(p);
 
-                *(int *)pReplyData = android::Equalizer_setParameter(pContext,
-                                                                     p->psize,
-                                                                     (void *)p->data,
-                                                                     p->vsize,
-                                                                     p->data + voffset);
+                *(int*)pReplyData = android::Equalizer_setParameter(
+                        pContext, p->psize, (void*)p->data, p->vsize, p->data + voffset);
             }
-            if(pContext->EffectType == LVM_VOLUME){
-                //ALOGV("\tVolume_command cmdCode Case: EFFECT_CMD_SET_PARAM start");
-                //ALOGV("\tVolume_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ",
+            if (pContext->EffectType == LVM_VOLUME) {
+                // ALOGV("\tVolume_command cmdCode Case: EFFECT_CMD_SET_PARAM start");
+                // ALOGV("\tVolume_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ",
                 //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
                 //        *replySize,
                 //        *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) +sizeof(int32_t)));
 
-                if (pCmdData   == NULL ||
-                        cmdSize    < (sizeof(effect_param_t) + sizeof(int32_t)) ||
-                        pReplyData == NULL || replySize == NULL ||
-                        *replySize != sizeof(int32_t)) {
+                if (pCmdData == NULL || cmdSize < (sizeof(effect_param_t) + sizeof(int32_t)) ||
+                    pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
                     ALOGV("\tLVM_ERROR : Volume_command cmdCode Case: "
-                            "EFFECT_CMD_SET_PARAM: ERROR");
+                          "EFFECT_CMD_SET_PARAM: ERROR");
                     return -EINVAL;
                 }
 
-                effect_param_t * const p = (effect_param_t *) pCmdData;
+                effect_param_t* const p = (effect_param_t*)pCmdData;
                 const uint32_t voffset = computeParamVOffset(p);
 
-                *(int *)pReplyData = android::Volume_setParameter(pContext,
-                                                                  p->psize,
-                                                                  (void *)p->data,
-                                                                  p->vsize,
-                                                                  p->data + voffset);
+                *(int*)pReplyData = android::Volume_setParameter(pContext, p->psize, (void*)p->data,
+                                                                 p->vsize, p->data + voffset);
             }
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_PARAM end");
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_PARAM end");
         } break;
 
         case EFFECT_CMD_ENABLE:
@@ -3603,57 +3375,56 @@
                 return -EINVAL;
             }
 
-            *(int *)pReplyData = android::Effect_setEnabled(pContext, LVM_TRUE);
+            *(int*)pReplyData = android::Effect_setEnabled(pContext, LVM_TRUE);
             break;
 
         case EFFECT_CMD_DISABLE:
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_DISABLE start");
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_DISABLE start");
             if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
                 ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: EFFECT_CMD_DISABLE: ERROR");
                 return -EINVAL;
             }
-            *(int *)pReplyData = android::Effect_setEnabled(pContext, LVM_FALSE);
+            *(int*)pReplyData = android::Effect_setEnabled(pContext, LVM_FALSE);
             break;
 
-        case EFFECT_CMD_SET_DEVICE:
-        {
+        case EFFECT_CMD_SET_DEVICE: {
             ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_DEVICE start");
-            if (pCmdData   == NULL){
+            if (pCmdData == NULL) {
                 ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: EFFECT_CMD_SET_DEVICE: ERROR");
                 return -EINVAL;
             }
 
-            uint32_t device = *(uint32_t *)pCmdData;
-            pContext->pBundledContext->nOutputDevice = (audio_devices_t) device;
+            audio_devices_t device = *(audio_devices_t *)pCmdData;
+            pContext->pBundledContext->nOutputDevice = device;
 
             if (pContext->EffectType == LVM_BASS_BOOST) {
-                if((device == AUDIO_DEVICE_OUT_SPEAKER) ||
-                        (device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT) ||
-                        (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)){
+                if ((device == AUDIO_DEVICE_OUT_SPEAKER) ||
+                    (device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT) ||
+                    (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)) {
                     ALOGV("\tEFFECT_CMD_SET_DEVICE device is invalid for LVM_BASS_BOOST %d",
-                          *(int32_t *)pCmdData);
+                          *(int32_t*)pCmdData);
                     ALOGV("\tEFFECT_CMD_SET_DEVICE temporary disable LVM_BAS_BOOST");
 
-                    // If a device doesnt support bassboost the effect must be temporarily disabled
+                    // If a device doesn't support bassboost the effect must be temporarily disabled
                     // the effect must still report its original state as this can only be changed
                     // by the ENABLE/DISABLE command
 
                     if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
                         ALOGV("\tEFFECT_CMD_SET_DEVICE disable LVM_BASS_BOOST %d",
-                             *(int32_t *)pCmdData);
+                              *(int32_t*)pCmdData);
                         android::LvmEffect_disable(pContext);
                     }
                     pContext->pBundledContext->bBassTempDisabled = LVM_TRUE;
                 } else {
                     ALOGV("\tEFFECT_CMD_SET_DEVICE device is valid for LVM_BASS_BOOST %d",
-                         *(int32_t *)pCmdData);
+                          *(int32_t*)pCmdData);
 
                     // If a device supports bassboost and the effect has been temporarily disabled
                     // previously then re-enable it
 
                     if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
                         ALOGV("\tEFFECT_CMD_SET_DEVICE re-enable LVM_BASS_BOOST %d",
-                             *(int32_t *)pCmdData);
+                              *(int32_t*)pCmdData);
                         android::LvmEffect_enable(pContext);
                     }
                     pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
@@ -3664,129 +3435,128 @@
                     // default case unless configuration is forced
                     if (android::VirtualizerIsDeviceSupported(device) != 0) {
                         ALOGV("\tEFFECT_CMD_SET_DEVICE device is invalid for LVM_VIRTUALIZER %d",
-                                *(int32_t *)pCmdData);
+                              *(int32_t*)pCmdData);
                         ALOGV("\tEFFECT_CMD_SET_DEVICE temporary disable LVM_VIRTUALIZER");
 
-                        //If a device doesnt support virtualizer the effect must be temporarily
+                        // If a device doesn't support virtualizer the effect must be temporarily
                         // disabled the effect must still report its original state as this can
                         // only be changed by the ENABLE/DISABLE command
 
                         if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) {
                             ALOGV("\tEFFECT_CMD_SET_DEVICE disable LVM_VIRTUALIZER %d",
-                                    *(int32_t *)pCmdData);
+                                  *(int32_t*)pCmdData);
                             android::LvmEffect_disable(pContext);
                         }
                         pContext->pBundledContext->bVirtualizerTempDisabled = LVM_TRUE;
                     } else {
                         ALOGV("\tEFFECT_CMD_SET_DEVICE device is valid for LVM_VIRTUALIZER %d",
-                                *(int32_t *)pCmdData);
+                              *(int32_t*)pCmdData);
 
                         // If a device supports virtualizer and the effect has been temporarily
                         // disabled previously then re-enable it
 
-                        if(pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE){
+                        if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) {
                             ALOGV("\tEFFECT_CMD_SET_DEVICE re-enable LVM_VIRTUALIZER %d",
-                                    *(int32_t *)pCmdData);
+                                  *(int32_t*)pCmdData);
                             android::LvmEffect_enable(pContext);
                         }
                         pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
                     }
-                } // else virtualization mode is forced to a certain device, nothing to do
+                }  // else virtualization mode is forced to a certain device, nothing to do
             }
             ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_DEVICE end");
             break;
         }
-        case EFFECT_CMD_SET_VOLUME:
-        {
+        case EFFECT_CMD_SET_VOLUME: {
             uint32_t leftVolume, rightVolume;
-            int16_t  leftdB, rightdB;
-            int16_t  maxdB, pandB;
-            int32_t  vol_ret[2] = {1<<24,1<<24}; // Apply no volume
-            LVM_ControlParams_t     ActiveParams;           /* Current control Parameters */
-            LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;  /* Function call status */
+            int16_t leftdB, rightdB;
+            int16_t maxdB, pandB;
+            int32_t vol_ret[2] = {1 << 24, 1 << 24};     // Apply no volume
+            LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+            LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
 
             // if pReplyData is NULL, VOL_CTRL is delegated to another effect
-            if(pReplyData == LVM_NULL){
+            if (pReplyData == LVM_NULL) {
                 break;
             }
 
             if (pCmdData == NULL || cmdSize != 2 * sizeof(uint32_t) || pReplyData == NULL ||
-                    replySize == NULL || *replySize < 2*sizeof(int32_t)) {
+                replySize == NULL || *replySize < 2 * sizeof(int32_t)) {
                 ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: "
-                        "EFFECT_CMD_SET_VOLUME: ERROR");
+                      "EFFECT_CMD_SET_VOLUME: ERROR");
                 return -EINVAL;
             }
 
-            leftVolume  = ((*(uint32_t *)pCmdData));
-            rightVolume = ((*((uint32_t *)pCmdData + 1)));
+            leftVolume = ((*(uint32_t*)pCmdData));
+            rightVolume = ((*((uint32_t*)pCmdData + 1)));
 
-            if(leftVolume == 0x1000000){
+            if (leftVolume == 0x1000000) {
                 leftVolume -= 1;
             }
-            if(rightVolume == 0x1000000){
+            if (rightVolume == 0x1000000) {
                 rightVolume -= 1;
             }
 
             // Convert volume to dB
-            leftdB  = android::LVC_Convert_VolToDb(leftVolume);
+            leftdB = android::LVC_Convert_VolToDb(leftVolume);
             rightdB = android::LVC_Convert_VolToDb(rightVolume);
 
             pandB = rightdB - leftdB;
 
             // Calculate max volume in dB
             maxdB = leftdB;
-            if(rightdB > maxdB){
+            if (rightdB > maxdB) {
                 maxdB = rightdB;
             }
-            //ALOGV("\tEFFECT_CMD_SET_VOLUME Session: %d, SessionID: %d VOLUME is %d dB, "
+            // ALOGV("\tEFFECT_CMD_SET_VOLUME Session: %d, SessionID: %d VOLUME is %d dB, "
             //      "effect is %d",
-            //pContext->pBundledContext->SessionNo, pContext->pBundledContext->SessionId,
+            // pContext->pBundledContext->SessionNo, pContext->pBundledContext->SessionId,
             //(int32_t)maxdB, pContext->EffectType);
-            //ALOGV("\tEFFECT_CMD_SET_VOLUME: Left is %d, Right is %d", leftVolume, rightVolume);
-            //ALOGV("\tEFFECT_CMD_SET_VOLUME: Left %ddB, Right %ddB, Position %ddB",
+            // ALOGV("\tEFFECT_CMD_SET_VOLUME: Left is %d, Right is %d", leftVolume, rightVolume);
+            // ALOGV("\tEFFECT_CMD_SET_VOLUME: Left %ddB, Right %ddB, Position %ddB",
             //        leftdB, rightdB, pandB);
 
-            memcpy(pReplyData, vol_ret, sizeof(int32_t)*2);
-            android::VolumeSetVolumeLevel(pContext, (int16_t)(maxdB*100));
+            memcpy(pReplyData, vol_ret, sizeof(int32_t) * 2);
+            android::VolumeSetVolumeLevel(pContext, (int16_t)(maxdB * 100));
 
             /* Get the current settings */
-            LvmStatus =LVM_GetControlParameters(pContext->pBundledContext->hInstance,&ActiveParams);
+            LvmStatus =
+                    LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
             LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VolumeSetStereoPosition")
-            if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+            if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
             /* Volume parameters */
-            ActiveParams.VC_Balance  = pandB;
-            ALOGV("\t\tVolumeSetStereoPosition() (-96dB -> +96dB)-> %d\n", ActiveParams.VC_Balance );
+            ActiveParams.VC_Balance = pandB;
+            ALOGV("\t\tVolumeSetStereoPosition() (-96dB -> +96dB)-> %d\n", ActiveParams.VC_Balance);
 
             /* Activate the initial settings */
-            LvmStatus =LVM_SetControlParameters(pContext->pBundledContext->hInstance,&ActiveParams);
+            LvmStatus =
+                    LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
             LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "VolumeSetStereoPosition")
-            if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+            if (LvmStatus != LVM_SUCCESS) return -EINVAL;
             break;
-         }
+        }
         case EFFECT_CMD_SET_AUDIO_MODE:
             break;
         default:
             return -EINVAL;
     }
 
-    //ALOGV("\tEffect_command end...\n\n");
+    // ALOGV("\tEffect_command end...\n\n");
     return 0;
-}    /* end Effect_command */
+} /* end Effect_command */
 
 /* Effect Control Interface Implementation: get_descriptor */
-int Effect_getDescriptor(effect_handle_t   self,
-                                    effect_descriptor_t *pDescriptor)
-{
-    EffectContext * pContext = (EffectContext *) self;
-    const effect_descriptor_t *desc;
+int Effect_getDescriptor(effect_handle_t self, effect_descriptor_t* pDescriptor) {
+    EffectContext* pContext = (EffectContext*)self;
+    const effect_descriptor_t* desc;
 
     if (pContext == NULL || pDescriptor == NULL) {
         ALOGV("Effect_getDescriptor() invalid param");
         return -EINVAL;
     }
 
-    switch(pContext->EffectType) {
+    switch (pContext->EffectType) {
         case LVM_BASS_BOOST:
             desc = &android::gBassBoostDescriptor;
             break;
@@ -3806,26 +3576,24 @@
     *pDescriptor = *desc;
 
     return 0;
-}   /* end Effect_getDescriptor */
+} /* end Effect_getDescriptor */
 
 // effect_handle_t interface implementation for effect
 const struct effect_interface_s gLvmEffectInterface = {
-    Effect_process,
-    Effect_command,
-    Effect_getDescriptor,
-    NULL,
-};    /* end gLvmEffectInterface */
+        Effect_process,
+        Effect_command,
+        Effect_getDescriptor,
+        NULL,
+}; /* end gLvmEffectInterface */
 
 // This is the only symbol that needs to be exported
-__attribute__ ((visibility ("default")))
-audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
-    .tag = AUDIO_EFFECT_LIBRARY_TAG,
-    .version = EFFECT_LIBRARY_API_VERSION,
-    .name = "Effect Bundle Library",
-    .implementor = "NXP Software Ltd.",
-    .create_effect = android::EffectCreate,
-    .release_effect = android::EffectRelease,
-    .get_descriptor = android::EffectGetDescriptor,
+__attribute__((visibility("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+        .tag = AUDIO_EFFECT_LIBRARY_TAG,
+        .version = EFFECT_LIBRARY_API_VERSION,
+        .name = "Effect Bundle Library",
+        .implementor = "NXP Software Ltd.",
+        .create_effect = android::EffectCreate,
+        .release_effect = android::EffectRelease,
+        .get_descriptor = android::EffectGetDescriptor,
 };
-
 }
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
index 524e103..f3e7884 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
@@ -23,176 +23,148 @@
 #include <LVM.h>
 #include <limits.h>
 
-#define FIVEBAND_NUMBANDS          5
-#define MAX_NUM_BANDS              5
-#define MAX_CALL_SIZE              256
-#define LVM_MAX_SESSIONS           32
-#define LVM_UNUSED_SESSION         INT_MAX
-#define BASS_BOOST_CUP_LOAD_ARM9E  150    // Expressed in 0.1 MIPS
-#define VIRTUALIZER_CUP_LOAD_ARM9E 120    // Expressed in 0.1 MIPS
-#define EQUALIZER_CUP_LOAD_ARM9E   220    // Expressed in 0.1 MIPS
-#define VOLUME_CUP_LOAD_ARM9E      0      // Expressed in 0.1 MIPS
-#define BUNDLE_MEM_USAGE           25     // Expressed in kB
+#define FIVEBAND_NUMBANDS 5
+#define MAX_NUM_BANDS 5
+#define MAX_CALL_SIZE 256
+#define LVM_MAX_SESSIONS 32
+#define LVM_UNUSED_SESSION INT_MAX
+#define BASS_BOOST_CUP_LOAD_ARM9E 150   // Expressed in 0.1 MIPS
+#define VIRTUALIZER_CUP_LOAD_ARM9E 120  // Expressed in 0.1 MIPS
+#define EQUALIZER_CUP_LOAD_ARM9E 220    // Expressed in 0.1 MIPS
+#define VOLUME_CUP_LOAD_ARM9E 0         // Expressed in 0.1 MIPS
+#define BUNDLE_MEM_USAGE 25             // Expressed in kB
 
 #ifndef OPENSL_ES_H_
-static const effect_uuid_t SL_IID_VOLUME_ = { 0x09e8ede0, 0xddde, 0x11db, 0xb4f6,
-                                            { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } };
-const effect_uuid_t * const SL_IID_VOLUME = &SL_IID_VOLUME_;
-#endif //OPENSL_ES_H_
+static const effect_uuid_t SL_IID_VOLUME_ = {
+        0x09e8ede0, 0xddde, 0x11db, 0xb4f6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+const effect_uuid_t* const SL_IID_VOLUME = &SL_IID_VOLUME_;
+#endif  // OPENSL_ES_H_
 
-typedef enum
-{
-    LVM_BASS_BOOST,
-    LVM_VIRTUALIZER,
-    LVM_EQUALIZER,
-    LVM_VOLUME
-} lvm_effect_en;
+typedef enum { LVM_BASS_BOOST, LVM_VIRTUALIZER, LVM_EQUALIZER, LVM_VOLUME } lvm_effect_en;
 
 // Preset configuration.
 struct PresetConfig {
     // Human-readable name.
-    const char * name;
+    const char* name;
     // An array of size nBands where each element is a configuration for the
     // corresponding band.
-    //const BandConfig * bandConfigs;
+    // const BandConfig * bandConfigs;
 };
 
 /* BundledEffectContext : One per session */
-struct BundledEffectContext{
-    LVM_Handle_t                    hInstance;                /* Instance handle */
-    int                             SessionNo;                /* Current session number */
-    int                             SessionId;                /* Current session id */
-    bool                            bVolumeEnabled;           /* Flag for Volume */
-    bool                            bEqualizerEnabled;        /* Flag for EQ */
-    bool                            bBassEnabled;             /* Flag for Bass */
-    bool                            bBassTempDisabled;        /* Flag for Bass to be re-enabled */
-    bool                            bVirtualizerEnabled;      /* Flag for Virtualizer */
-    bool                            bVirtualizerTempDisabled; /* Flag for effect to be re-enabled */
-    audio_devices_t                 nOutputDevice;            /* Output device for the effect */
-    audio_devices_t                 nVirtualizerForcedDevice; /* Forced device virtualization mode*/
-    int                             NumberEffectsEnabled;     /* Effects in this session */
-    int                             NumberEffectsCalled;      /* Effects called so far */
-    bool                            firstVolume;              /* No smoothing on first Vol change */
+struct BundledEffectContext {
+    LVM_Handle_t hInstance;                   /* Instance handle */
+    int SessionNo;                            /* Current session number */
+    int SessionId;                            /* Current session id */
+    bool bVolumeEnabled;                      /* Flag for Volume */
+    bool bEqualizerEnabled;                   /* Flag for EQ */
+    bool bBassEnabled;                        /* Flag for Bass */
+    bool bBassTempDisabled;                   /* Flag for Bass to be re-enabled */
+    bool bVirtualizerEnabled;                 /* Flag for Virtualizer */
+    bool bVirtualizerTempDisabled;            /* Flag for effect to be re-enabled */
+    audio_devices_t nOutputDevice;            /* Output device for the effect */
+    audio_devices_t nVirtualizerForcedDevice; /* Forced device virtualization mode*/
+    int NumberEffectsEnabled;                 /* Effects in this session */
+    int NumberEffectsCalled;                  /* Effects called so far */
+    bool firstVolume;                         /* No smoothing on first Vol change */
     // Saved parameters for each effect */
     // Bass Boost
-    int                             BassStrengthSaved;        /* Conversion between Get/Set */
+    int BassStrengthSaved; /* Conversion between Get/Set */
     // Equalizer
-    int                             CurPreset;                /* Current preset being used */
+    int CurPreset; /* Current preset being used */
     // Virtualzer
-    int                             VirtStrengthSaved;        /* Conversion between Get/Set */
+    int VirtStrengthSaved; /* Conversion between Get/Set */
     // Volume
-    int                             levelSaved;     /* for when mute is set, level must be saved */
-    int                             positionSaved;
-    bool                            bMuteEnabled;   /* Must store as mute = -96dB level */
-    bool                            bStereoPositionEnabled;
-    LVM_Fs_en                       SampleRate;
-    int                             SamplesPerSecond;
-    int                             SamplesToExitCountEq;
-    int                             SamplesToExitCountBb;
-    int                             SamplesToExitCountVirt;
-    effect_buffer_t                 *workBuffer;
-    int                             frameCount;
-    int32_t                         bandGaindB[FIVEBAND_NUMBANDS];
-    int                             volume;
-#ifdef SUPPORT_MC
-    LVM_INT32                       ChMask;
-#endif
+    int levelSaved; /* for when mute is set, level must be saved */
+    int positionSaved;
+    bool bMuteEnabled; /* Must store as mute = -96dB level */
+    bool bStereoPositionEnabled;
+    LVM_Fs_en SampleRate;
+    int SamplesPerSecond;
+    int SamplesToExitCountEq;
+    int SamplesToExitCountBb;
+    int SamplesToExitCountVirt;
+    effect_buffer_t* workBuffer;
+    int frameCount;
+    int32_t bandGaindB[FIVEBAND_NUMBANDS];
+    int volume;
+    LVM_INT32 ChMask;
 
     /* Bitmask whether drain is in progress due to disabling the effect.
        The corresponding bit to an effect is set by 1 << lvm_effect_en. */
-    int                             effectInDrain;
+    int effectInDrain;
 
     /* Bitmask whether process() was called for a particular effect.
        The corresponding bit to an effect is set by 1 << lvm_effect_en. */
-    int                             effectProcessCalled;
+    int effectProcessCalled;
 };
 
 /* SessionContext : One session */
-struct SessionContext{
-    bool                            bBundledEffectsEnabled;
-    bool                            bVolumeInstantiated;
-    bool                            bEqualizerInstantiated;
-    bool                            bBassInstantiated;
-    bool                            bVirtualizerInstantiated;
-    BundledEffectContext            *pBundledContext;
+struct SessionContext {
+    bool bBundledEffectsEnabled;
+    bool bVolumeInstantiated;
+    bool bEqualizerInstantiated;
+    bool bBassInstantiated;
+    bool bVirtualizerInstantiated;
+    BundledEffectContext* pBundledContext;
 };
 
-struct EffectContext{
-    const struct effect_interface_s *itfe;
-    effect_config_t                 config;
-    lvm_effect_en                   EffectType;
-    BundledEffectContext            *pBundledContext;
+struct EffectContext {
+    const struct effect_interface_s* itfe;
+    effect_config_t config;
+    lvm_effect_en EffectType;
+    BundledEffectContext* pBundledContext;
 };
 
 /* enumerated parameter settings for Volume effect */
-typedef enum
-{
-    VOLUME_PARAM_LEVEL,                       // type SLmillibel = typedef SLuint16 (set & get)
-    VOLUME_PARAM_MAXLEVEL,                    // type SLmillibel = typedef SLuint16 (get)
-    VOLUME_PARAM_MUTE,                        // type SLboolean  = typedef SLuint32 (set & get)
-    VOLUME_PARAM_ENABLESTEREOPOSITION,        // type SLboolean  = typedef SLuint32 (set & get)
-    VOLUME_PARAM_STEREOPOSITION,              // type SLpermille = typedef SLuint16 (set & get)
+typedef enum {
+    VOLUME_PARAM_LEVEL,                 // type SLmillibel = typedef SLuint16 (set & get)
+    VOLUME_PARAM_MAXLEVEL,              // type SLmillibel = typedef SLuint16 (get)
+    VOLUME_PARAM_MUTE,                  // type SLboolean  = typedef SLuint32 (set & get)
+    VOLUME_PARAM_ENABLESTEREOPOSITION,  // type SLboolean  = typedef SLuint32 (set & get)
+    VOLUME_PARAM_STEREOPOSITION,        // type SLpermille = typedef SLuint16 (set & get)
 } t_volume_params;
 
 static const int PRESET_CUSTOM = -1;
 
-static const uint32_t bandFreqRange[FIVEBAND_NUMBANDS][2] = {
-                                       {30000, 120000},
-                                       {120001, 460000},
-                                       {460001, 1800000},
-                                       {1800001, 7000000},
-                                       {7000001, 20000000}};
+static const uint32_t bandFreqRange[FIVEBAND_NUMBANDS][2] = {{30000, 120000},
+                                                             {120001, 460000},
+                                                             {460001, 1800000},
+                                                             {1800001, 7000000},
+                                                             {7000001, 20000000}};
 
-//Note: If these frequencies change, please update LimitLevel values accordingly.
-static const LVM_UINT16  EQNB_5BandPresetsFrequencies[] = {
-                                       60,           /* Frequencies in Hz */
-                                       230,
-                                       910,
-                                       3600,
-                                       14000};
+// Note: If these frequencies change, please update LimitLevel values accordingly.
+static const LVM_UINT16 EQNB_5BandPresetsFrequencies[] = {60, /* Frequencies in Hz */
+                                                          230, 910, 3600, 14000};
 
-static const LVM_UINT16 EQNB_5BandPresetsQFactors[] = {
-                                       96,               /* Q factor multiplied by 100 */
-                                       96,
-                                       96,
-                                       96,
-                                       96};
+static const LVM_UINT16 EQNB_5BandPresetsQFactors[] = {96, /* Q factor multiplied by 100 */
+                                                       96, 96, 96, 96};
 
-static const LVM_INT16 EQNB_5BandNormalPresets[] = {
-                                       3, 0, 0, 0, 3,       /* Normal Preset */
-                                       8, 5, -3, 5, 6,      /* Classical Preset */
-                                       15, -6, 7, 13, 10,   /* Dance Preset */
-                                       0, 0, 0, 0, 0,       /* Flat Preset */
-                                       6, -2, -2, 6, -3,    /* Folk Preset */
-                                       8, -8, 13, -1, -4,   /* Heavy Metal Preset */
-                                       10, 6, -4, 5, 8,     /* Hip Hop Preset */
-                                       8, 5, -4, 5, 9,      /* Jazz Preset */
-                                      -6, 4, 9, 4, -5,      /* Pop Preset */
-                                       10, 6, -1, 8, 10};   /* Rock Preset */
+static const LVM_INT16 EQNB_5BandNormalPresets[] = {3,  0,  0,  0,  3,   /* Normal Preset */
+                                                    8,  5,  -3, 5,  6,   /* Classical Preset */
+                                                    15, -6, 7,  13, 10,  /* Dance Preset */
+                                                    0,  0,  0,  0,  0,   /* Flat Preset */
+                                                    6,  -2, -2, 6,  -3,  /* Folk Preset */
+                                                    8,  -8, 13, -1, -4,  /* Heavy Metal Preset */
+                                                    10, 6,  -4, 5,  8,   /* Hip Hop Preset */
+                                                    8,  5,  -4, 5,  9,   /* Jazz Preset */
+                                                    -6, 4,  9,  4,  -5,  /* Pop Preset */
+                                                    10, 6,  -1, 8,  10}; /* Rock Preset */
 
-static const LVM_INT16 EQNB_5BandSoftPresets[] = {
-                                        3, 0, 0, 0, 3,      /* Normal Preset */
-                                        5, 3, -2, 4, 4,     /* Classical Preset */
-                                        6, 0, 2, 4, 1,      /* Dance Preset */
-                                        0, 0, 0, 0, 0,      /* Flat Preset */
-                                        3, 0, 0, 2, -1,     /* Folk Preset */
-                                        4, 1, 9, 3, 0,      /* Heavy Metal Preset */
-                                        5, 3, 0, 1, 3,      /* Hip Hop Preset */
-                                        4, 2, -2, 2, 5,     /* Jazz Preset */
-                                       -1, 2, 5, 1, -2,     /* Pop Preset */
-                                        5, 3, -1, 3, 5};    /* Rock Preset */
+static const LVM_INT16 EQNB_5BandSoftPresets[] = {3,  0, 0,  0, 3,  /* Normal Preset */
+                                                  5,  3, -2, 4, 4,  /* Classical Preset */
+                                                  6,  0, 2,  4, 1,  /* Dance Preset */
+                                                  0,  0, 0,  0, 0,  /* Flat Preset */
+                                                  3,  0, 0,  2, -1, /* Folk Preset */
+                                                  4,  1, 9,  3, 0,  /* Heavy Metal Preset */
+                                                  5,  3, 0,  1, 3,  /* Hip Hop Preset */
+                                                  4,  2, -2, 2, 5,  /* Jazz Preset */
+                                                  -1, 2, 5,  1, -2, /* Pop Preset */
+                                                  5,  3, -1, 3, 5}; /* Rock Preset */
 
-static const PresetConfig gEqualizerPresets[] = {
-                                        {"Normal"},
-                                        {"Classical"},
-                                        {"Dance"},
-                                        {"Flat"},
-                                        {"Folk"},
-                                        {"Heavy Metal"},
-                                        {"Hip Hop"},
-                                        {"Jazz"},
-                                        {"Pop"},
-                                        {"Rock"}};
+static const PresetConfig gEqualizerPresets[] = {{"Normal"}, {"Classical"},   {"Dance"},   {"Flat"},
+                                                 {"Folk"},   {"Heavy Metal"}, {"Hip Hop"}, {"Jazz"},
+                                                 {"Pop"},    {"Rock"}};
 
 /* The following tables have been computed using the actual levels measured by the output of
  * white noise or pink noise (IEC268-1) for the EQ and BassBoost Effects. These are estimates of
@@ -201,14 +173,14 @@
  * updated.
  */
 
-static const float LimitLevel_bandEnergyCoefficient[FIVEBAND_NUMBANDS] = {
-        7.56, 9.69, 9.59, 7.37, 2.88};
+static const float LimitLevel_bandEnergyCoefficient[FIVEBAND_NUMBANDS] = {7.56, 9.69, 9.59, 7.37,
+                                                                          2.88};
 
-static const float LimitLevel_bandEnergyCrossCoefficient[FIVEBAND_NUMBANDS-1] = {
-        126.0, 115.0, 125.0, 104.0 };
+static const float LimitLevel_bandEnergyCrossCoefficient[FIVEBAND_NUMBANDS - 1] = {126.0, 115.0,
+                                                                                   125.0, 104.0};
 
 static const float LimitLevel_bassBoostEnergyCrossCoefficient[FIVEBAND_NUMBANDS] = {
-        221.21, 208.10, 28.16, 0.0, 0.0 };
+        221.21, 208.10, 28.16, 0.0, 0.0};
 
 static const float LimitLevel_bassBoostEnergyCoefficient = 9.00;
 
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
index 39f5bb6..9ea70ce 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
@@ -18,7 +18,7 @@
 typedef float LVM_FLOAT;
 #endif
 #define LOG_TAG "Reverb"
-#define ARRAY_SIZE(array) (sizeof (array) / sizeof (array)[0])
+#define ARRAY_SIZE(array) (sizeof(array) / sizeof(array)[0])
 //#define LOG_NDEBUG 0
 
 #include <assert.h>
@@ -37,19 +37,23 @@
 // effect_handle_t interface implementation for reverb
 extern "C" const struct effect_interface_s gReverbInterface;
 
-#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc){\
-        if ((LvmStatus) == LVREV_NULLADDRESS){\
-            ALOGV("\tLVREV_ERROR : Parameter error - "\
-                    "null pointer returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\
-        }\
-        if ((LvmStatus) == LVREV_INVALIDNUMSAMPLES){\
-            ALOGV("\tLVREV_ERROR : Parameter error - "\
-                    "bad number of samples returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\
-        }\
-        if ((LvmStatus) == LVREV_OUTOFRANGE){\
-            ALOGV("\tLVREV_ERROR : Parameter error - "\
-                    "out of range returned by %s in %s\n", callingFunc, calledFunc);\
-        }\
+#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc)             \
+    {                                                                   \
+        if ((LvmStatus) == LVREV_NULLADDRESS) {                         \
+            ALOGV("\tLVREV_ERROR : Parameter error - "                  \
+                  "null pointer returned by %s in %s\n\n\n\n",          \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+        if ((LvmStatus) == LVREV_INVALIDNUMSAMPLES) {                   \
+            ALOGV("\tLVREV_ERROR : Parameter error - "                  \
+                  "bad number of samples returned by %s in %s\n\n\n\n", \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+        if ((LvmStatus) == LVREV_OUTOFRANGE) {                          \
+            ALOGV("\tLVREV_ERROR : Parameter error - "                  \
+                  "out of range returned by %s in %s\n",                \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
     }
 
 // Namespaces
@@ -81,8 +85,8 @@
 
 // NXP SW auxiliary environmental reverb
 const effect_descriptor_t gAuxEnvReverbDescriptor = {
-        { 0xc2e5d5f0, 0x94bd, 0x4763, 0x9cac, { 0x4e, 0x23, 0x4d, 0x06, 0x83, 0x9e } },
-        { 0x4a387fc0, 0x8ab3, 0x11df, 0x8bad, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } },
+        {0xc2e5d5f0, 0x94bd, 0x4763, 0x9cac, {0x4e, 0x23, 0x4d, 0x06, 0x83, 0x9e}},
+        {0x4a387fc0, 0x8ab3, 0x11df, 0x8bad, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
         EFFECT_CONTROL_API_VERSION,
         EFFECT_FLAG_TYPE_AUXILIARY,
         LVREV_CUP_LOAD_ARM9E,
@@ -128,42 +132,39 @@
 };
 
 // gDescriptors contains pointers to all defined effect descriptor in this library
-static const effect_descriptor_t * const gDescriptors[] = {
-        &gAuxEnvReverbDescriptor,
-        &gInsertEnvReverbDescriptor,
-        &gAuxPresetReverbDescriptor,
-        &gInsertPresetReverbDescriptor
-};
+static const effect_descriptor_t* const gDescriptors[] = {
+        &gAuxEnvReverbDescriptor, &gInsertEnvReverbDescriptor, &gAuxPresetReverbDescriptor,
+        &gInsertPresetReverbDescriptor};
 
-typedef     float               process_buffer_t; // process in float
+typedef float process_buffer_t;  // process in float
 
-struct ReverbContext{
-    const struct effect_interface_s *itfe;
-    effect_config_t                 config;
-    LVREV_Handle_t                  hInstance;
-    int16_t                         SavedRoomLevel;
-    int16_t                         SavedHfLevel;
-    int16_t                         SavedDecayTime;
-    int16_t                         SavedDecayHfRatio;
-    int16_t                         SavedReverbLevel;
-    int16_t                         SavedDiffusion;
-    int16_t                         SavedDensity;
-    bool                            bEnabled;
-    LVM_Fs_en                       SampleRate;
-    process_buffer_t                *InFrames;
-    process_buffer_t                *OutFrames;
-    size_t                          bufferSizeIn;
-    size_t                          bufferSizeOut;
-    bool                            auxiliary;
-    bool                            preset;
-    uint16_t                        curPreset;
-    uint16_t                        nextPreset;
-    int                             SamplesToExitCount;
-    LVM_INT16                       leftVolume;
-    LVM_INT16                       rightVolume;
-    LVM_INT16                       prevLeftVolume;
-    LVM_INT16                       prevRightVolume;
-    int                             volumeMode;
+struct ReverbContext {
+    const struct effect_interface_s* itfe;
+    effect_config_t config;
+    LVREV_Handle_t hInstance;
+    int16_t SavedRoomLevel;
+    int16_t SavedHfLevel;
+    int16_t SavedDecayTime;
+    int16_t SavedDecayHfRatio;
+    int16_t SavedReverbLevel;
+    int16_t SavedDiffusion;
+    int16_t SavedDensity;
+    bool bEnabled;
+    LVM_Fs_en SampleRate;
+    process_buffer_t* InFrames;
+    process_buffer_t* OutFrames;
+    size_t bufferSizeIn;
+    size_t bufferSizeOut;
+    bool auxiliary;
+    bool preset;
+    uint16_t curPreset;
+    uint16_t nextPreset;
+    int SamplesToExitCount;
+    LVM_INT16 leftVolume;
+    LVM_INT16 rightVolume;
+    LVM_INT16 prevLeftVolume;
+    LVM_INT16 prevRightVolume;
+    int volumeMode;
 };
 
 enum {
@@ -174,44 +175,38 @@
 
 #define REVERB_DEFAULT_PRESET REVERB_PRESET_NONE
 
-#define REVERB_SEND_LEVEL   0.75f // 0.75 in 4.12 format
-#define REVERB_UNIT_VOLUME  (0x1000) // 1.0 in 4.12 format
+#define REVERB_SEND_LEVEL 0.75f      // 0.75 in 4.12 format
+#define REVERB_UNIT_VOLUME (0x1000)  // 1.0 in 4.12 format
 
 //--- local function prototypes
-int  Reverb_init            (ReverbContext *pContext);
-void Reverb_free            (ReverbContext *pContext);
-int  Reverb_setConfig       (ReverbContext *pContext, effect_config_t *pConfig);
-void Reverb_getConfig       (ReverbContext *pContext, effect_config_t *pConfig);
-int  Reverb_setParameter    (ReverbContext *pContext, void *pParam, void *pValue, int vsize);
-int  Reverb_getParameter    (ReverbContext *pContext,
-                             void          *pParam,
-                             uint32_t      *pValueSize,
-                             void          *pValue);
-int Reverb_LoadPreset       (ReverbContext   *pContext);
-int Reverb_paramValueSize   (int32_t param);
+int Reverb_init(ReverbContext* pContext);
+void Reverb_free(ReverbContext* pContext);
+int Reverb_setConfig(ReverbContext* pContext, effect_config_t* pConfig);
+void Reverb_getConfig(ReverbContext* pContext, effect_config_t* pConfig);
+int Reverb_setParameter(ReverbContext* pContext, void* pParam, void* pValue, int vsize);
+int Reverb_getParameter(ReverbContext* pContext, void* pParam, uint32_t* pValueSize, void* pValue);
+int Reverb_LoadPreset(ReverbContext* pContext);
+int Reverb_paramValueSize(int32_t param);
 
 /* Effect Library Interface Implementation */
 
-extern "C" int EffectCreate(const effect_uuid_t *uuid,
-                            int32_t             sessionId __unused,
-                            int32_t             ioId __unused,
-                            effect_handle_t  *pHandle){
+extern "C" int EffectCreate(const effect_uuid_t* uuid, int32_t sessionId __unused,
+                            int32_t ioId __unused, effect_handle_t* pHandle) {
     int ret;
     int i;
-    int length = sizeof(gDescriptors) / sizeof(const effect_descriptor_t *);
-    const effect_descriptor_t *desc;
+    int length = sizeof(gDescriptors) / sizeof(const effect_descriptor_t*);
+    const effect_descriptor_t* desc;
 
     ALOGV("\t\nEffectCreate start");
 
-    if (pHandle == NULL || uuid == NULL){
+    if (pHandle == NULL || uuid == NULL) {
         ALOGV("\tLVM_ERROR : EffectCreate() called with NULL pointer");
         return -EINVAL;
     }
 
     for (i = 0; i < length; i++) {
         desc = gDescriptors[i];
-        if (memcmp(uuid, &desc->uuid, sizeof(effect_uuid_t))
-                == 0) {
+        if (memcmp(uuid, &desc->uuid, sizeof(effect_uuid_t)) == 0) {
             ALOGV("\tEffectCreate - UUID matched Reverb type %d, UUID = %x", i, desc->uuid.timeLow);
             break;
         }
@@ -221,16 +216,16 @@
         return -ENOENT;
     }
 
-    ReverbContext *pContext = new ReverbContext;
+    ReverbContext* pContext = new ReverbContext;
 
-    pContext->itfe      = &gReverbInterface;
+    pContext->itfe = &gReverbInterface;
     pContext->hInstance = NULL;
 
     pContext->auxiliary = false;
-    if ((desc->flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY){
+    if ((desc->flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
         pContext->auxiliary = true;
         ALOGV("\tEffectCreate - AUX");
-    }else{
+    } else {
         ALOGV("\tEffectCreate - INS");
     }
 
@@ -241,14 +236,14 @@
         pContext->curPreset = REVERB_PRESET_LAST + 1;
         pContext->nextPreset = REVERB_DEFAULT_PRESET;
         ALOGV("\tEffectCreate - PRESET");
-    }else{
+    } else {
         ALOGV("\tEffectCreate - ENVIRONMENTAL");
     }
 
     ALOGV("\tEffectCreate - Calling Reverb_init");
     ret = Reverb_init(pContext);
 
-    if (ret < 0){
+    if (ret < 0) {
         ALOGV("\tLVM_ERROR : EffectCreate() init failed");
         delete pContext;
         return ret;
@@ -256,25 +251,25 @@
 
     *pHandle = (effect_handle_t)pContext;
 
-
     int channels = audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
 
+    channels = (pContext->auxiliary == true) ? channels : FCC_2;
     // Allocate memory for reverb process (*2 is for STEREO)
     pContext->bufferSizeIn = LVREV_MAX_FRAME_SIZE * sizeof(process_buffer_t) * channels;
     pContext->bufferSizeOut = LVREV_MAX_FRAME_SIZE * sizeof(process_buffer_t) * FCC_2;
-    pContext->InFrames  = (process_buffer_t *)calloc(pContext->bufferSizeIn, 1 /* size */);
-    pContext->OutFrames = (process_buffer_t *)calloc(pContext->bufferSizeOut, 1 /* size */);
+    pContext->InFrames = (process_buffer_t*)calloc(pContext->bufferSizeIn, 1 /* size */);
+    pContext->OutFrames = (process_buffer_t*)calloc(pContext->bufferSizeOut, 1 /* size */);
 
     ALOGV("\tEffectCreate %p, size %zu", pContext, sizeof(ReverbContext));
     ALOGV("\tEffectCreate end\n");
     return 0;
 } /* end EffectCreate */
 
-extern "C" int EffectRelease(effect_handle_t handle){
-    ReverbContext * pContext = (ReverbContext *)handle;
+extern "C" int EffectRelease(effect_handle_t handle) {
+    ReverbContext* pContext = (ReverbContext*)handle;
 
     ALOGV("\tEffectRelease %p", handle);
-    if (pContext == NULL){
+    if (pContext == NULL) {
         ALOGV("\tLVM_ERROR : EffectRelease called with NULL pointer");
         return -EINVAL;
     }
@@ -288,12 +283,11 @@
     return 0;
 } /* end EffectRelease */
 
-extern "C" int EffectGetDescriptor(const effect_uuid_t *uuid,
-                                   effect_descriptor_t *pDescriptor) {
+extern "C" int EffectGetDescriptor(const effect_uuid_t* uuid, effect_descriptor_t* pDescriptor) {
     int i;
-    int length = sizeof(gDescriptors) / sizeof(const effect_descriptor_t *);
+    int length = sizeof(gDescriptors) / sizeof(const effect_descriptor_t*);
 
-    if (pDescriptor == NULL || uuid == NULL){
+    if (pDescriptor == NULL || uuid == NULL) {
         ALOGV("EffectGetDescriptor() called with NULL pointer");
         return -EINVAL;
     }
@@ -301,8 +295,8 @@
     for (i = 0; i < length; i++) {
         if (memcmp(uuid, &gDescriptors[i]->uuid, sizeof(effect_uuid_t)) == 0) {
             *pDescriptor = *gDescriptors[i];
-            ALOGV("EffectGetDescriptor - UUID matched Reverb type %d, UUID = %x",
-                 i, gDescriptors[i]->uuid.timeLow);
+            ALOGV("EffectGetDescriptor - UUID matched Reverb type %d, UUID = %x", i,
+                  gDescriptors[i]->uuid.timeLow);
             return 0;
         }
     }
@@ -311,12 +305,13 @@
 } /* end EffectGetDescriptor */
 
 /* local functions */
-#define CHECK_ARG(cond) {                     \
-    if (!(cond)) {                            \
-        ALOGV("\tLVM_ERROR : Invalid argument: "#cond);      \
-        return -EINVAL;                       \
-    }                                         \
-}
+#define CHECK_ARG(cond)                                      \
+    {                                                        \
+        if (!(cond)) {                                       \
+            ALOGV("\tLVM_ERROR : Invalid argument: " #cond); \
+            return -EINVAL;                                  \
+        }                                                    \
+    }
 
 //----------------------------------------------------------------------------
 // process()
@@ -335,91 +330,100 @@
 //  pOut:       pointer to updated stereo 16 bit output data
 //
 //----------------------------------------------------------------------------
-int process( effect_buffer_t   *pIn,
-             effect_buffer_t   *pOut,
-             int           frameCount,
-             ReverbContext *pContext){
-
+int process(effect_buffer_t* pIn, effect_buffer_t* pOut, int frameCount, ReverbContext* pContext) {
     int channels = audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
-    LVREV_ReturnStatus_en   LvmStatus = LVREV_SUCCESS;              /* Function call status */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
-    // Check that the input is either mono or stereo
-    if (!(channels == 1 || channels == FCC_2) ) {
-        ALOGE("\tLVREV_ERROR : process invalid PCM format");
+    // Reverb only effects the stereo channels in multichannel source.
+    if (channels < 1 || channels > LVM_MAX_CHANNELS) {
+        ALOGE("\tLVREV_ERROR : process invalid PCM channels %d", channels);
         return -EINVAL;
     }
 
     size_t inSize = frameCount * sizeof(process_buffer_t) * channels;
     size_t outSize = frameCount * sizeof(process_buffer_t) * FCC_2;
-    if (pContext->InFrames == NULL ||
-            pContext->bufferSizeIn < inSize) {
+    if (pContext->InFrames == NULL || pContext->bufferSizeIn < inSize) {
         free(pContext->InFrames);
         pContext->bufferSizeIn = inSize;
-        pContext->InFrames = (process_buffer_t *)calloc(1, pContext->bufferSizeIn);
+        pContext->InFrames = (process_buffer_t*)calloc(1, pContext->bufferSizeIn);
     }
-    if (pContext->OutFrames == NULL ||
-            pContext->bufferSizeOut < outSize) {
+    if (pContext->OutFrames == NULL || pContext->bufferSizeOut < outSize) {
         free(pContext->OutFrames);
         pContext->bufferSizeOut = outSize;
-        pContext->OutFrames = (process_buffer_t *)calloc(1, pContext->bufferSizeOut);
+        pContext->OutFrames = (process_buffer_t*)calloc(1, pContext->bufferSizeOut);
     }
 
-
     // Check for NULL pointers
     if ((pContext->InFrames == NULL) || (pContext->OutFrames == NULL)) {
         ALOGE("\tLVREV_ERROR : process failed to allocate memory for temporary buffers ");
         return -EINVAL;
     }
 
-
     if (pContext->preset && pContext->nextPreset != pContext->curPreset) {
         Reverb_LoadPreset(pContext);
     }
 
     if (pContext->auxiliary) {
         static_assert(std::is_same<decltype(*pIn), decltype(*pContext->InFrames)>::value,
-                "pIn and InFrames must be same type");
+                      "pIn and InFrames must be same type");
         memcpy(pContext->InFrames, pIn, frameCount * channels * sizeof(*pIn));
+    } else {
+        // mono input is duplicated
+        if (channels >= FCC_2) {
+            for (int i = 0; i < frameCount; i++) {
+                pContext->InFrames[FCC_2 * i] =
+                        (process_buffer_t)pIn[channels * i] * REVERB_SEND_LEVEL;
+                pContext->InFrames[FCC_2 * i + 1] =
+                        (process_buffer_t)pIn[channels * i + 1] * REVERB_SEND_LEVEL;
+            }
         } else {
-        // insert reverb input is always stereo
-        for (int i = 0; i < frameCount; i++) {
-            pContext->InFrames[2 * i] = (process_buffer_t)pIn[2 * i] * REVERB_SEND_LEVEL;
-            pContext->InFrames[2 * i + 1] = (process_buffer_t)pIn[2 * i + 1] * REVERB_SEND_LEVEL;
+            for (int i = 0; i < frameCount; i++) {
+                pContext->InFrames[FCC_2 * i] = pContext->InFrames[FCC_2 * i + 1] =
+                        (process_buffer_t)pIn[i] * REVERB_SEND_LEVEL;
+            }
         }
     }
 
     if (pContext->preset && pContext->curPreset == REVERB_PRESET_NONE) {
         memset(pContext->OutFrames, 0,
-                frameCount * sizeof(*pContext->OutFrames) * FCC_2); //always stereo here
+               frameCount * sizeof(*pContext->OutFrames) * FCC_2);  // always stereo here
     } else {
-        if(pContext->bEnabled == LVM_FALSE && pContext->SamplesToExitCount > 0) {
-            memset(pContext->InFrames, 0,
-                    frameCount * sizeof(*pContext->OutFrames) * channels);
+        if (pContext->bEnabled == LVM_FALSE && pContext->SamplesToExitCount > 0) {
+            memset(pContext->InFrames, 0, frameCount * sizeof(*pContext->OutFrames) * channels);
             ALOGV("\tZeroing %d samples per frame at the end of call", channels);
         }
 
         /* Process the samples, producing a stereo output */
-        LvmStatus = LVREV_Process(pContext->hInstance,      /* Instance handle */
-                                  pContext->InFrames,     /* Input buffer */
-                                  pContext->OutFrames,    /* Output buffer */
-                                  frameCount);              /* Number of samples to read */
+        LvmStatus = LVREV_Process(pContext->hInstance, /* Instance handle */
+                                  pContext->InFrames,  /* Input buffer */
+                                  pContext->OutFrames, /* Output buffer */
+                                  frameCount);         /* Number of samples to read */
     }
 
     LVM_ERROR_CHECK(LvmStatus, "LVREV_Process", "process")
-    if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
 
     // Convert to 16 bits
     if (pContext->auxiliary) {
         // nothing to do here
     } else {
-        for (int i = 0; i < frameCount * FCC_2; i++) { // always stereo here
-            // Mix with dry input
-            pContext->OutFrames[i] += pIn[i];
+        if (channels >= FCC_2) {
+            for (int i = 0; i < frameCount; i++) {
+                // Mix with dry input
+                pContext->OutFrames[FCC_2 * i] += pIn[channels * i];
+                pContext->OutFrames[FCC_2 * i + 1] += pIn[channels * i + 1];
+            }
+        } else {
+            for (int i = 0; i < frameCount; i++) {
+                // Mix with dry input
+                pContext->OutFrames[FCC_2 * i] += pIn[i];
+                pContext->OutFrames[FCC_2 * i + 1] += pIn[i];
+            }
         }
         // apply volume with ramp if needed
         if ((pContext->leftVolume != pContext->prevLeftVolume ||
-                pContext->rightVolume != pContext->prevRightVolume) &&
-                pContext->volumeMode == REVERB_VOLUME_RAMP) {
+             pContext->rightVolume != pContext->prevRightVolume) &&
+            pContext->volumeMode == REVERB_VOLUME_RAMP) {
             // FIXME: still using int16 volumes.
             // For reference: REVERB_UNIT_VOLUME  (0x1000) // 1.0 in 4.12 format
             float vl = (float)pContext->prevLeftVolume / 4096;
@@ -450,20 +454,35 @@
         }
     }
 
-
-    // Accumulate if required
-    if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE){
-        //ALOGV("\tBuffer access is ACCUMULATE");
-        for (int i = 0; i < frameCount * FCC_2; i++) { // always stereo here
-            pOut[i] += pContext->OutFrames[i];
+    if (channels > 2) {
+        // Accumulate if required
+        if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+            for (int i = 0; i < frameCount; i++) {
+                pOut[channels * i] += pContext->OutFrames[FCC_2 * i];
+                pOut[channels * i + 1] += pContext->OutFrames[FCC_2 * i + 1];
+            }
+        } else {
+            for (int i = 0; i < frameCount; i++) {
+                pOut[channels * i] = pContext->OutFrames[FCC_2 * i];
+                pOut[channels * i + 1] = pContext->OutFrames[FCC_2 * i + 1];
+            }
         }
-    }else{
-        //ALOGV("\tBuffer access is WRITE");
-        memcpy(pOut, pContext->OutFrames, frameCount * sizeof(*pOut) * FCC_2);
+        for (int i = 0; i < frameCount; i++) {
+            for (int j = FCC_2; j < channels; j++) {
+                pOut[channels * i + j] = pIn[channels * i + j];
+            }
+        }
+    } else {
+        if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+            for (int i = 0; i < frameCount * FCC_2; i++) {
+                pOut[i] += pContext->OutFrames[i];
+            }
+        } else {
+            memcpy(pOut, pContext->OutFrames, frameCount * sizeof(*pOut) * FCC_2);
+        }
     }
-
     return 0;
-}    /* end process */
+} /* end process */
 
 //----------------------------------------------------------------------------
 // Reverb_free()
@@ -477,30 +496,28 @@
 //
 //----------------------------------------------------------------------------
 
-void Reverb_free(ReverbContext *pContext){
-
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;         /* Function call status */
-    LVREV_MemoryTable_st      MemTab;
+void Reverb_free(ReverbContext* pContext) {
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+    LVREV_MemoryTable_st MemTab;
 
     /* Free the algorithm memory */
-    LvmStatus = LVREV_GetMemoryTable(pContext->hInstance,
-                                   &MemTab,
-                                   LVM_NULL);
+    LvmStatus = LVREV_GetMemoryTable(pContext->hInstance, &MemTab, LVM_NULL);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "Reverb_free")
 
-    for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
-        if (MemTab.Region[i].Size != 0){
-            if (MemTab.Region[i].pBaseAddress != NULL){
+    for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
+        if (MemTab.Region[i].Size != 0) {
+            if (MemTab.Region[i].pBaseAddress != NULL) {
                 free(MemTab.Region[i].pBaseAddress);
-            }else{
-                ALOGV("\tLVM_ERROR : free() - trying to free with NULL pointer %" PRIu32 " bytes "
-                        "for region %u at %p ERROR\n",
-                        MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
+            } else {
+                ALOGV("\tLVM_ERROR : free() - trying to free with NULL pointer %" PRIu32
+                      " bytes "
+                      "for region %u at %p ERROR\n",
+                      MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
             }
         }
     }
-}    /* end Reverb_free */
+} /* end Reverb_free */
 
 //----------------------------------------------------------------------------
 // Reverb_setConfig()
@@ -516,89 +533,89 @@
 //
 //----------------------------------------------------------------------------
 
-int Reverb_setConfig(ReverbContext *pContext, effect_config_t *pConfig){
-    LVM_Fs_en   SampleRate;
-    //ALOGV("\tReverb_setConfig start");
+int Reverb_setConfig(ReverbContext* pContext, effect_config_t* pConfig) {
+    LVM_Fs_en SampleRate;
+    // ALOGV("\tReverb_setConfig start");
 
     CHECK_ARG(pContext != NULL);
     CHECK_ARG(pConfig != NULL);
 
     CHECK_ARG(pConfig->inputCfg.samplingRate == pConfig->outputCfg.samplingRate);
     CHECK_ARG(pConfig->inputCfg.format == pConfig->outputCfg.format);
+    int inputChannels = audio_channel_count_from_out_mask(pConfig->inputCfg.channels);
     CHECK_ARG((pContext->auxiliary && pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_MONO) ||
-              ((!pContext->auxiliary) && pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_STEREO));
-    CHECK_ARG(pConfig->outputCfg.channels == AUDIO_CHANNEL_OUT_STEREO);
-    CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE
-              || pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
+              ((!pContext->auxiliary) && (inputChannels <= LVM_MAX_CHANNELS)));
+    int outputChannels = audio_channel_count_from_out_mask(pConfig->outputCfg.channels);
+    CHECK_ARG(outputChannels >= FCC_2 && outputChannels <= LVM_MAX_CHANNELS);
+    CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE ||
+              pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
     CHECK_ARG(pConfig->inputCfg.format == EFFECT_BUFFER_FORMAT);
-    //ALOGV("\tReverb_setConfig calling memcpy");
+    // ALOGV("\tReverb_setConfig calling memcpy");
     pContext->config = *pConfig;
 
     switch (pConfig->inputCfg.samplingRate) {
-    case 8000:
-        SampleRate = LVM_FS_8000;
-        break;
-    case 16000:
-        SampleRate = LVM_FS_16000;
-        break;
-    case 22050:
-        SampleRate = LVM_FS_22050;
-        break;
-    case 32000:
-        SampleRate = LVM_FS_32000;
-        break;
-    case 44100:
-        SampleRate = LVM_FS_44100;
-        break;
-    case 48000:
-        SampleRate = LVM_FS_48000;
-        break;
-    case 88200:
-        SampleRate = LVM_FS_88200;
-        break;
-    case 96000:
-        SampleRate = LVM_FS_96000;
-        break;
-    case 176400:
-        SampleRate = LVM_FS_176400;
-        break;
-    case 192000:
-        SampleRate = LVM_FS_192000;
-        break;
-    default:
-        ALOGV("\rReverb_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
-        return -EINVAL;
+        case 8000:
+            SampleRate = LVM_FS_8000;
+            break;
+        case 16000:
+            SampleRate = LVM_FS_16000;
+            break;
+        case 22050:
+            SampleRate = LVM_FS_22050;
+            break;
+        case 32000:
+            SampleRate = LVM_FS_32000;
+            break;
+        case 44100:
+            SampleRate = LVM_FS_44100;
+            break;
+        case 48000:
+            SampleRate = LVM_FS_48000;
+            break;
+        case 88200:
+            SampleRate = LVM_FS_88200;
+            break;
+        case 96000:
+            SampleRate = LVM_FS_96000;
+            break;
+        case 176400:
+            SampleRate = LVM_FS_176400;
+            break;
+        case 192000:
+            SampleRate = LVM_FS_192000;
+            break;
+        default:
+            ALOGV("\rReverb_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
+            return -EINVAL;
     }
 
     if (pContext->SampleRate != SampleRate) {
+        LVREV_ControlParams_st ActiveParams;
+        LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS;
 
-        LVREV_ControlParams_st    ActiveParams;
-        LVREV_ReturnStatus_en     LvmStatus = LVREV_SUCCESS;
-
-        //ALOGV("\tReverb_setConfig change sampling rate to %d", SampleRate);
+        // ALOGV("\tReverb_setConfig change sampling rate to %d", SampleRate);
 
         /* Get the current settings */
-        LvmStatus = LVREV_GetControlParameters(pContext->hInstance,
-                                         &ActiveParams);
+        LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
 
         LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "Reverb_setConfig")
-        if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
+        if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
 
         ActiveParams.SampleRate = SampleRate;
 
         LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
 
         LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "Reverb_setConfig")
-        if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
-        //ALOGV("\tReverb_setConfig Succesfully called LVREV_SetControlParameters\n");
+        if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
+        // ALOGV("\tReverb_setConfig Successfully called LVREV_SetControlParameters\n");
         pContext->SampleRate = SampleRate;
-    }else{
-        //ALOGV("\tReverb_setConfig keep sampling rate at %d", SampleRate);
+    } else {
+        // ALOGV("\tReverb_setConfig keep sampling rate at %d", SampleRate);
     }
 
-    //ALOGV("\tReverb_setConfig End");
+    // ALOGV("\tReverb_setConfig End");
     return 0;
-}   /* end Reverb_setConfig */
+} /* end Reverb_setConfig */
 
 //----------------------------------------------------------------------------
 // Reverb_getConfig()
@@ -614,10 +631,9 @@
 //
 //----------------------------------------------------------------------------
 
-void Reverb_getConfig(ReverbContext *pContext, effect_config_t *pConfig)
-{
+void Reverb_getConfig(ReverbContext* pContext, effect_config_t* pConfig) {
     *pConfig = pContext->config;
-}   /* end Reverb_getConfig */
+} /* end Reverb_getConfig */
 
 //----------------------------------------------------------------------------
 // Reverb_init()
@@ -631,35 +647,35 @@
 //
 //----------------------------------------------------------------------------
 
-int Reverb_init(ReverbContext *pContext){
+int Reverb_init(ReverbContext* pContext) {
     ALOGV("\tReverb_init start");
 
     CHECK_ARG(pContext != NULL);
 
-    if (pContext->hInstance != NULL){
+    if (pContext->hInstance != NULL) {
         Reverb_free(pContext);
     }
 
-    pContext->config.inputCfg.accessMode                    = EFFECT_BUFFER_ACCESS_READ;
+    pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
     if (pContext->auxiliary) {
-        pContext->config.inputCfg.channels                  = AUDIO_CHANNEL_OUT_MONO;
+        pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_MONO;
     } else {
-        pContext->config.inputCfg.channels                  = AUDIO_CHANNEL_OUT_STEREO;
+        pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
     }
-    pContext->config.inputCfg.format                        = EFFECT_BUFFER_FORMAT;
-    pContext->config.inputCfg.samplingRate                  = 44100;
-    pContext->config.inputCfg.bufferProvider.getBuffer      = NULL;
-    pContext->config.inputCfg.bufferProvider.releaseBuffer  = NULL;
-    pContext->config.inputCfg.bufferProvider.cookie         = NULL;
-    pContext->config.inputCfg.mask                          = EFFECT_CONFIG_ALL;
-    pContext->config.outputCfg.accessMode                   = EFFECT_BUFFER_ACCESS_ACCUMULATE;
-    pContext->config.outputCfg.channels                     = AUDIO_CHANNEL_OUT_STEREO;
-    pContext->config.outputCfg.format                       = EFFECT_BUFFER_FORMAT;
-    pContext->config.outputCfg.samplingRate                 = 44100;
-    pContext->config.outputCfg.bufferProvider.getBuffer     = NULL;
+    pContext->config.inputCfg.format = EFFECT_BUFFER_FORMAT;
+    pContext->config.inputCfg.samplingRate = 44100;
+    pContext->config.inputCfg.bufferProvider.getBuffer = NULL;
+    pContext->config.inputCfg.bufferProvider.releaseBuffer = NULL;
+    pContext->config.inputCfg.bufferProvider.cookie = NULL;
+    pContext->config.inputCfg.mask = EFFECT_CONFIG_ALL;
+    pContext->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+    pContext->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    pContext->config.outputCfg.format = EFFECT_BUFFER_FORMAT;
+    pContext->config.outputCfg.samplingRate = 44100;
+    pContext->config.outputCfg.bufferProvider.getBuffer = NULL;
     pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL;
-    pContext->config.outputCfg.bufferProvider.cookie        = NULL;
-    pContext->config.outputCfg.mask                         = EFFECT_CONFIG_ALL;
+    pContext->config.outputCfg.bufferProvider.cookie = NULL;
+    pContext->config.outputCfg.mask = EFFECT_CONFIG_ALL;
 
     pContext->leftVolume = REVERB_UNIT_VOLUME;
     pContext->rightVolume = REVERB_UNIT_VOLUME;
@@ -667,40 +683,39 @@
     pContext->prevRightVolume = REVERB_UNIT_VOLUME;
     pContext->volumeMode = REVERB_VOLUME_FLAT;
 
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;        /* Function call status */
-    LVREV_ControlParams_st    params;                         /* Control Parameters */
-    LVREV_InstanceParams_st   InstParams;                     /* Instance parameters */
-    LVREV_MemoryTable_st      MemTab;                         /* Memory allocation table */
-    bool                      bMallocFailure = LVM_FALSE;
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+    LVREV_ControlParams_st params;                   /* Control Parameters */
+    LVREV_InstanceParams_st InstParams;              /* Instance parameters */
+    LVREV_MemoryTable_st MemTab;                     /* Memory allocation table */
+    bool bMallocFailure = LVM_FALSE;
 
     /* Set the capabilities */
-    InstParams.MaxBlockSize  = MAX_CALL_SIZE;
-    InstParams.SourceFormat  = LVM_STEREO;          // Max format, could be mono during process
-    InstParams.NumDelays     = LVREV_DELAYLINES_4;
+    InstParams.MaxBlockSize = MAX_CALL_SIZE;
+    InstParams.SourceFormat = LVM_STEREO;  // Max format, could be mono during process
+    InstParams.NumDelays = LVREV_DELAYLINES_4;
 
     /* Allocate memory, forcing alignment */
-    LvmStatus = LVREV_GetMemoryTable(LVM_NULL,
-                                  &MemTab,
-                                  &InstParams);
+    LvmStatus = LVREV_GetMemoryTable(LVM_NULL, &MemTab, &InstParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetMemoryTable", "Reverb_init")
-    if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
 
-    ALOGV("\tCreateInstance Succesfully called LVM_GetMemoryTable\n");
+    ALOGV("\tCreateInstance Successfully called LVM_GetMemoryTable\n");
 
     /* Allocate memory */
-    for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
-        if (MemTab.Region[i].Size != 0){
-            MemTab.Region[i].pBaseAddress = malloc(MemTab.Region[i].Size);
+    for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
+        if (MemTab.Region[i].Size != 0) {
+            MemTab.Region[i].pBaseAddress = calloc(1, MemTab.Region[i].Size);
 
-            if (MemTab.Region[i].pBaseAddress == LVM_NULL){
+            if (MemTab.Region[i].pBaseAddress == LVM_NULL) {
                 ALOGV("\tLVREV_ERROR :Reverb_init CreateInstance Failed to allocate %" PRIu32
-                        " bytes for region %u\n", MemTab.Region[i].Size, i );
+                      " bytes for region %u\n",
+                      MemTab.Region[i].Size, i);
                 bMallocFailure = LVM_TRUE;
-            }else{
+            } else {
                 ALOGV("\tReverb_init CreateInstance allocate %" PRIu32
-                        " bytes for region %u at %p\n",
-                        MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
+                      " bytes for region %u at %p\n",
+                      MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
             }
         }
     }
@@ -708,82 +723,83 @@
     /* If one or more of the memory regions failed to allocate, free the regions that were
      * succesfully allocated and return with an error
      */
-    if(bMallocFailure == LVM_TRUE){
-        for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
-            if (MemTab.Region[i].pBaseAddress == LVM_NULL){
+    if (bMallocFailure == LVM_TRUE) {
+        for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
+            if (MemTab.Region[i].pBaseAddress == LVM_NULL) {
                 ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed to allocate %" PRIu32
-                        " bytes for region %u - Not freeing\n", MemTab.Region[i].Size, i );
-            }else{
+                      " bytes for region %u - Not freeing\n",
+                      MemTab.Region[i].Size, i);
+            } else {
                 ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed: but allocated %" PRIu32
-                        " bytes for region %u at %p- free\n",
-                        MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
+                      " bytes for region %u at %p- free\n",
+                      MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
                 free(MemTab.Region[i].pBaseAddress);
             }
         }
         return -EINVAL;
     }
-    ALOGV("\tReverb_init CreateInstance Succesfully malloc'd memory\n");
+    ALOGV("\tReverb_init CreateInstance Successfully malloc'd memory\n");
 
     /* Initialise */
     pContext->hInstance = LVM_NULL;
 
     /* Init sets the instance handle */
-    LvmStatus = LVREV_GetInstanceHandle(&pContext->hInstance,
-                                        &MemTab,
-                                        &InstParams);
+    LvmStatus = LVREV_GetInstanceHandle(&pContext->hInstance, &MemTab, &InstParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetInstanceHandle", "Reverb_init")
-    if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
 
-    ALOGV("\tReverb_init CreateInstance Succesfully called LVM_GetInstanceHandle\n");
+    ALOGV("\tReverb_init CreateInstance Successfully called LVM_GetInstanceHandle\n");
 
     /* Set the initial process parameters */
     /* General parameters */
-    params.OperatingMode  = LVM_MODE_ON;
-    params.SampleRate     = LVM_FS_44100;
-    pContext->SampleRate  = LVM_FS_44100;
+    params.OperatingMode = LVM_MODE_ON;
+    params.SampleRate = LVM_FS_44100;
+    pContext->SampleRate = LVM_FS_44100;
 
-    if(pContext->config.inputCfg.channels == AUDIO_CHANNEL_OUT_MONO){
-        params.SourceFormat   = LVM_MONO;
+    if (pContext->config.inputCfg.channels == AUDIO_CHANNEL_OUT_MONO) {
+        params.SourceFormat = LVM_MONO;
     } else {
-        params.SourceFormat   = LVM_STEREO;
+        params.SourceFormat = LVM_STEREO;
     }
 
+    if ((pContext->auxiliary == false) && (params.SourceFormat == LVM_MONO)) {
+        params.SourceFormat = LVM_STEREO;
+    }
     /* Reverb parameters */
-    params.Level          = 0;
-    params.LPF            = 23999;
-    params.HPF            = 50;
-    params.T60            = 1490;
-    params.Density        = 100;
-    params.Damping        = 21;
-    params.RoomSize       = 100;
+    params.Level = 0;
+    params.LPF = 23999;
+    params.HPF = 50;
+    params.T60 = 1490;
+    params.Density = 100;
+    params.Damping = 21;
+    params.RoomSize = 100;
 
-    pContext->SamplesToExitCount = (params.T60 * pContext->config.inputCfg.samplingRate)/1000;
+    pContext->SamplesToExitCount = (params.T60 * pContext->config.inputCfg.samplingRate) / 1000;
 
     /* Saved strength is used to return the exact strength that was used in the set to the get
      * because we map the original strength range of 0:1000 to 1:15, and this will avoid
      * quantisation like effect when returning
      */
-    pContext->SavedRoomLevel    = -6000;
-    pContext->SavedHfLevel      = 0;
-    pContext->bEnabled          = LVM_FALSE;
-    pContext->SavedDecayTime    = params.T60;
-    pContext->SavedDecayHfRatio = params.Damping*20;
-    pContext->SavedDensity      = params.RoomSize*10;
-    pContext->SavedDiffusion    = params.Density*10;
-    pContext->SavedReverbLevel  = -6000;
+    pContext->SavedRoomLevel = -6000;
+    pContext->SavedHfLevel = 0;
+    pContext->bEnabled = LVM_FALSE;
+    pContext->SavedDecayTime = params.T60;
+    pContext->SavedDecayHfRatio = params.Damping * 20;
+    pContext->SavedDensity = params.RoomSize * 10;
+    pContext->SavedDiffusion = params.Density * 10;
+    pContext->SavedReverbLevel = -6000;
 
     /* Activate the initial settings */
-    LvmStatus = LVREV_SetControlParameters(pContext->hInstance,
-                                         &params);
+    LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &params);
 
     LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "Reverb_init")
-    if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
 
-    ALOGV("\tReverb_init CreateInstance Succesfully called LVREV_SetControlParameters\n");
+    ALOGV("\tReverb_init CreateInstance Successfully called LVREV_SetControlParameters\n");
     ALOGV("\tReverb_init End");
     return 0;
-}   /* end Reverb_init */
+} /* end Reverb_init */
 
 //----------------------------------------------------------------------------
 // ReverbConvertLevel()
@@ -796,27 +812,21 @@
 //
 //----------------------------------------------------------------------------
 
-int16_t ReverbConvertLevel(int16_t level){
-    static int16_t LevelArray[101] =
-    {
-       -12000, -4000,  -3398,  -3046,  -2796,  -2603,  -2444,  -2310,  -2194,  -2092,
-       -2000,  -1918,  -1842,  -1773,  -1708,  -1648,  -1592,  -1540,  -1490,  -1443,
-       -1398,  -1356,  -1316,  -1277,  -1240,  -1205,  -1171,  -1138,  -1106,  -1076,
-       -1046,  -1018,  -990,   -963,   -938,   -912,   -888,   -864,   -841,   -818,
-       -796,   -775,   -754,   -734,   -714,   -694,   -675,   -656,   -638,   -620,
-       -603,   -585,   -568,   -552,   -536,   -520,   -504,   -489,   -474,   -459,
-       -444,   -430,   -416,   -402,   -388,   -375,   -361,   -348,   -335,   -323,
-       -310,   -298,   -286,   -274,   -262,   -250,   -239,   -228,   -216,   -205,
-       -194,   -184,   -173,   -162,   -152,   -142,   -132,   -121,   -112,   -102,
-       -92,    -82,    -73,    -64,    -54,    -45,    -36,    -27,    -18,    -9,
-       0
-    };
+int16_t ReverbConvertLevel(int16_t level) {
+    static int16_t LevelArray[101] = {
+            -12000, -4000, -3398, -3046, -2796, -2603, -2444, -2310, -2194, -2092, -2000, -1918,
+            -1842,  -1773, -1708, -1648, -1592, -1540, -1490, -1443, -1398, -1356, -1316, -1277,
+            -1240,  -1205, -1171, -1138, -1106, -1076, -1046, -1018, -990,  -963,  -938,  -912,
+            -888,   -864,  -841,  -818,  -796,  -775,  -754,  -734,  -714,  -694,  -675,  -656,
+            -638,   -620,  -603,  -585,  -568,  -552,  -536,  -520,  -504,  -489,  -474,  -459,
+            -444,   -430,  -416,  -402,  -388,  -375,  -361,  -348,  -335,  -323,  -310,  -298,
+            -286,   -274,  -262,  -250,  -239,  -228,  -216,  -205,  -194,  -184,  -173,  -162,
+            -152,   -142,  -132,  -121,  -112,  -102,  -92,   -82,   -73,   -64,   -54,   -45,
+            -36,    -27,   -18,   -9,    0};
     int16_t i;
 
-    for(i = 0; i < 101; i++)
-    {
-       if(level <= LevelArray[i])
-           break;
+    for (i = 0; i < 101; i++) {
+        if (level <= LevelArray[i]) break;
     }
     return i;
 }
@@ -832,37 +842,31 @@
 //
 //----------------------------------------------------------------------------
 
-int16_t ReverbConvertHfLevel(int16_t Hflevel){
+int16_t ReverbConvertHfLevel(int16_t Hflevel) {
     int16_t i;
 
-    static LPFPair_t LPFArray[97] =
-    {   // Limit range to 50 for LVREV parameter range
-        {-10000, 50}, { -5000, 50 }, { -4000, 50},  { -3000, 158}, { -2000, 502},
-        {-1000, 1666},{ -900, 1897}, { -800, 2169}, { -700, 2496}, { -600, 2895},
-        {-500, 3400}, { -400, 4066}, { -300, 5011}, { -200, 6537}, { -100,  9826},
-        {-99, 9881 }, { -98, 9937 }, { -97, 9994 }, { -96, 10052}, { -95, 10111},
-        {-94, 10171}, { -93, 10231}, { -92, 10293}, { -91, 10356}, { -90, 10419},
-        {-89, 10484}, { -88, 10549}, { -87, 10616}, { -86, 10684}, { -85, 10753},
-        {-84, 10823}, { -83, 10895}, { -82, 10968}, { -81, 11042}, { -80, 11117},
-        {-79, 11194}, { -78, 11272}, { -77, 11352}, { -76, 11433}, { -75, 11516},
-        {-74, 11600}, { -73, 11686}, { -72, 11774}, { -71, 11864}, { -70, 11955},
-        {-69, 12049}, { -68, 12144}, { -67, 12242}, { -66, 12341}, { -65, 12443},
-        {-64, 12548}, { -63, 12654}, { -62, 12763}, { -61, 12875}, { -60, 12990},
-        {-59, 13107}, { -58, 13227}, { -57, 13351}, { -56, 13477}, { -55, 13607},
-        {-54, 13741}, { -53, 13878}, { -52, 14019}, { -51, 14164}, { -50, 14313},
-        {-49, 14467}, { -48, 14626}, { -47, 14789}, { -46, 14958}, { -45, 15132},
-        {-44, 15312}, { -43, 15498}, { -42, 15691}, { -41, 15890}, { -40, 16097},
-        {-39, 16311}, { -38, 16534}, { -37, 16766}, { -36, 17007}, { -35, 17259},
-        {-34, 17521}, { -33, 17795}, { -32, 18081}, { -31, 18381}, { -30, 18696},
-        {-29, 19027}, { -28, 19375}, { -27, 19742}, { -26, 20129}, { -25, 20540},
-        {-24, 20976}, { -23, 21439}, { -22, 21934}, { -21, 22463}, { -20, 23031},
-        {-19, 23643}, { -18, 23999}
-    };
+    static LPFPair_t LPFArray[97] = {
+            // Limit range to 50 for LVREV parameter range
+            {-10000, 50}, {-5000, 50},  {-4000, 50},  {-3000, 158}, {-2000, 502}, {-1000, 1666},
+            {-900, 1897}, {-800, 2169}, {-700, 2496}, {-600, 2895}, {-500, 3400}, {-400, 4066},
+            {-300, 5011}, {-200, 6537}, {-100, 9826}, {-99, 9881},  {-98, 9937},  {-97, 9994},
+            {-96, 10052}, {-95, 10111}, {-94, 10171}, {-93, 10231}, {-92, 10293}, {-91, 10356},
+            {-90, 10419}, {-89, 10484}, {-88, 10549}, {-87, 10616}, {-86, 10684}, {-85, 10753},
+            {-84, 10823}, {-83, 10895}, {-82, 10968}, {-81, 11042}, {-80, 11117}, {-79, 11194},
+            {-78, 11272}, {-77, 11352}, {-76, 11433}, {-75, 11516}, {-74, 11600}, {-73, 11686},
+            {-72, 11774}, {-71, 11864}, {-70, 11955}, {-69, 12049}, {-68, 12144}, {-67, 12242},
+            {-66, 12341}, {-65, 12443}, {-64, 12548}, {-63, 12654}, {-62, 12763}, {-61, 12875},
+            {-60, 12990}, {-59, 13107}, {-58, 13227}, {-57, 13351}, {-56, 13477}, {-55, 13607},
+            {-54, 13741}, {-53, 13878}, {-52, 14019}, {-51, 14164}, {-50, 14313}, {-49, 14467},
+            {-48, 14626}, {-47, 14789}, {-46, 14958}, {-45, 15132}, {-44, 15312}, {-43, 15498},
+            {-42, 15691}, {-41, 15890}, {-40, 16097}, {-39, 16311}, {-38, 16534}, {-37, 16766},
+            {-36, 17007}, {-35, 17259}, {-34, 17521}, {-33, 17795}, {-32, 18081}, {-31, 18381},
+            {-30, 18696}, {-29, 19027}, {-28, 19375}, {-27, 19742}, {-26, 20129}, {-25, 20540},
+            {-24, 20976}, {-23, 21439}, {-22, 21934}, {-21, 22463}, {-20, 23031}, {-19, 23643},
+            {-18, 23999}};
 
-    for(i = 0; i < 96; i++)
-    {
-        if(Hflevel <= LPFArray[i].Room_HF)
-            break;
+    for (i = 0; i < 96; i++) {
+        if (Hflevel <= LPFArray[i].Room_HF) break;
     }
     return LPFArray[i].LPF;
 }
@@ -879,26 +883,26 @@
 //
 //----------------------------------------------------------------------------
 
-void ReverbSetRoomHfLevel(ReverbContext *pContext, int16_t level){
-    //ALOGV("\tReverbSetRoomHfLevel start (%d)", level);
+void ReverbSetRoomHfLevel(ReverbContext* pContext, int16_t level) {
+    // ALOGV("\tReverbSetRoomHfLevel start (%d)", level);
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetRoomHfLevel")
-    //ALOGV("\tReverbSetRoomHfLevel Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbSetRoomHfLevel() just Got -> %d\n", ActiveParams.LPF);
+    // ALOGV("\tReverbSetRoomHfLevel Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbSetRoomHfLevel() just Got -> %d\n", ActiveParams.LPF);
 
     ActiveParams.LPF = ReverbConvertHfLevel(level);
 
     /* Activate the initial settings */
     LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetRoomHfLevel")
-    //ALOGV("\tReverbSetRoomhfLevel() just Set -> %d\n", ActiveParams.LPF);
+    // ALOGV("\tReverbSetRoomhfLevel() just Set -> %d\n", ActiveParams.LPF);
     pContext->SavedHfLevel = level;
-    //ALOGV("\tReverbSetHfRoomLevel end.. saving %d", pContext->SavedHfLevel);
+    // ALOGV("\tReverbSetHfRoomLevel end.. saving %d", pContext->SavedHfLevel);
     return;
 }
 
@@ -913,30 +917,31 @@
 //
 //----------------------------------------------------------------------------
 
-int16_t ReverbGetRoomHfLevel(ReverbContext *pContext){
+int16_t ReverbGetRoomHfLevel(ReverbContext* pContext) {
     int16_t level;
-    //ALOGV("\tReverbGetRoomHfLevel start, saved level is %d", pContext->SavedHfLevel);
+    // ALOGV("\tReverbGetRoomHfLevel start, saved level is %d", pContext->SavedHfLevel);
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetRoomHfLevel")
-    //ALOGV("\tReverbGetRoomHfLevel Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbGetRoomHfLevel() just Got -> %d\n", ActiveParams.LPF);
+    // ALOGV("\tReverbGetRoomHfLevel Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbGetRoomHfLevel() just Got -> %d\n", ActiveParams.LPF);
 
     level = ReverbConvertHfLevel(pContext->SavedHfLevel);
 
-    //ALOGV("\tReverbGetRoomHfLevel() ActiveParams.LPFL %d, pContext->SavedHfLevel: %d, "
+    // ALOGV("\tReverbGetRoomHfLevel() ActiveParams.LPFL %d, pContext->SavedHfLevel: %d, "
     //     "converted level: %d\n", ActiveParams.LPF, pContext->SavedHfLevel, level);
 
-    if((int16_t)ActiveParams.LPF != level){
-        ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetRoomHfLevel() has wrong level -> %d %d\n",
-               ActiveParams.Level, level);
+    if ((int16_t)ActiveParams.LPF != level) {
+        ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetRoomHfLevel() has wrong level -> %d "
+              "%d\n",
+              ActiveParams.Level, level);
     }
 
-    //ALOGV("\tReverbGetRoomHfLevel end");
+    // ALOGV("\tReverbGetRoomHfLevel end");
     return pContext->SavedHfLevel;
 }
 
@@ -952,35 +957,35 @@
 //
 //----------------------------------------------------------------------------
 
-void ReverbSetReverbLevel(ReverbContext *pContext, int16_t level){
-    //ALOGV("\n\tReverbSetReverbLevel start (%d)", level);
+void ReverbSetReverbLevel(ReverbContext* pContext, int16_t level) {
+    // ALOGV("\n\tReverbSetReverbLevel start (%d)", level);
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
-    LVM_INT32                 CombinedLevel;             // Sum of room and reverb level controls
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+    LVM_INT32 CombinedLevel;                         // Sum of room and reverb level controls
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetReverbLevel")
-    //ALOGV("\tReverbSetReverbLevel Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbSetReverbLevel just Got -> %d\n", ActiveParams.Level);
+    // ALOGV("\tReverbSetReverbLevel Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbSetReverbLevel just Got -> %d\n", ActiveParams.Level);
 
     // needs to subtract max levels for both RoomLevel and ReverbLevel
-    CombinedLevel = (level + pContext->SavedRoomLevel)-LVREV_MAX_REVERB_LEVEL;
-    //ALOGV("\tReverbSetReverbLevel() CombinedLevel is %d = %d + %d\n",
+    CombinedLevel = (level + pContext->SavedRoomLevel) - LVREV_MAX_REVERB_LEVEL;
+    // ALOGV("\tReverbSetReverbLevel() CombinedLevel is %d = %d + %d\n",
     //      CombinedLevel, level, pContext->SavedRoomLevel);
 
     ActiveParams.Level = ReverbConvertLevel(CombinedLevel);
 
-    //ALOGV("\tReverbSetReverbLevel() Trying to set -> %d\n", ActiveParams.Level);
+    // ALOGV("\tReverbSetReverbLevel() Trying to set -> %d\n", ActiveParams.Level);
 
     /* Activate the initial settings */
     LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetReverbLevel")
-    //ALOGV("\tReverbSetReverbLevel() just Set -> %d\n", ActiveParams.Level);
+    // ALOGV("\tReverbSetReverbLevel() just Set -> %d\n", ActiveParams.Level);
 
     pContext->SavedReverbLevel = level;
-    //ALOGV("\tReverbSetReverbLevel end pContext->SavedReverbLevel is %d\n\n",
+    // ALOGV("\tReverbSetReverbLevel end pContext->SavedReverbLevel is %d\n\n",
     //     pContext->SavedReverbLevel);
     return;
 }
@@ -996,37 +1001,40 @@
 //
 //----------------------------------------------------------------------------
 
-int16_t ReverbGetReverbLevel(ReverbContext *pContext){
+int16_t ReverbGetReverbLevel(ReverbContext* pContext) {
     int16_t level;
-    //ALOGV("\tReverbGetReverbLevel start");
+    // ALOGV("\tReverbGetReverbLevel start");
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
-    LVM_INT32                 CombinedLevel;             // Sum of room and reverb level controls
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+    LVM_INT32 CombinedLevel;                         // Sum of room and reverb level controls
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetReverbLevel")
-    //ALOGV("\tReverbGetReverbLevel Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbGetReverbLevel() just Got -> %d\n", ActiveParams.Level);
+    // ALOGV("\tReverbGetReverbLevel Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbGetReverbLevel() just Got -> %d\n", ActiveParams.Level);
 
     // needs to subtract max levels for both RoomLevel and ReverbLevel
-    CombinedLevel = (pContext->SavedReverbLevel + pContext->SavedRoomLevel)-LVREV_MAX_REVERB_LEVEL;
+    CombinedLevel =
+            (pContext->SavedReverbLevel + pContext->SavedRoomLevel) - LVREV_MAX_REVERB_LEVEL;
 
-    //ALOGV("\tReverbGetReverbLevel() CombinedLevel is %d = %d + %d\n",
-    //CombinedLevel, pContext->SavedReverbLevel, pContext->SavedRoomLevel);
+    // ALOGV("\tReverbGetReverbLevel() CombinedLevel is %d = %d + %d\n",
+    // CombinedLevel, pContext->SavedReverbLevel, pContext->SavedRoomLevel);
     level = ReverbConvertLevel(CombinedLevel);
 
-    //ALOGV("\tReverbGetReverbLevel(): ActiveParams.Level: %d, pContext->SavedReverbLevel: %d, "
+    // ALOGV("\tReverbGetReverbLevel(): ActiveParams.Level: %d, pContext->SavedReverbLevel: %d, "
     //"pContext->SavedRoomLevel: %d, CombinedLevel: %d, converted level: %d\n",
-    //ActiveParams.Level, pContext->SavedReverbLevel,pContext->SavedRoomLevel, CombinedLevel,level);
+    // ActiveParams.Level, pContext->SavedReverbLevel,pContext->SavedRoomLevel,
+    // CombinedLevel,level);
 
-    if(ActiveParams.Level != level){
-        ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetReverbLevel() has wrong level -> %d %d\n",
-                ActiveParams.Level, level);
+    if (ActiveParams.Level != level) {
+        ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetReverbLevel() has wrong level -> %d "
+              "%d\n",
+              ActiveParams.Level, level);
     }
 
-    //ALOGV("\tReverbGetReverbLevel end\n");
+    // ALOGV("\tReverbGetReverbLevel end\n");
 
     return pContext->SavedReverbLevel;
 }
@@ -1043,30 +1051,30 @@
 //
 //----------------------------------------------------------------------------
 
-void ReverbSetRoomLevel(ReverbContext *pContext, int16_t level){
-    //ALOGV("\tReverbSetRoomLevel start (%d)", level);
+void ReverbSetRoomLevel(ReverbContext* pContext, int16_t level) {
+    // ALOGV("\tReverbSetRoomLevel start (%d)", level);
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
-    LVM_INT32                 CombinedLevel;             // Sum of room and reverb level controls
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+    LVM_INT32 CombinedLevel;                         // Sum of room and reverb level controls
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetRoomLevel")
-    //ALOGV("\tReverbSetRoomLevel Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbSetRoomLevel() just Got -> %d\n", ActiveParams.Level);
+    // ALOGV("\tReverbSetRoomLevel Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbSetRoomLevel() just Got -> %d\n", ActiveParams.Level);
 
     // needs to subtract max levels for both RoomLevel and ReverbLevel
-    CombinedLevel = (level + pContext->SavedReverbLevel)-LVREV_MAX_REVERB_LEVEL;
+    CombinedLevel = (level + pContext->SavedReverbLevel) - LVREV_MAX_REVERB_LEVEL;
     ActiveParams.Level = ReverbConvertLevel(CombinedLevel);
 
     /* Activate the initial settings */
     LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetRoomLevel")
-    //ALOGV("\tReverbSetRoomLevel() just Set -> %d\n", ActiveParams.Level);
+    // ALOGV("\tReverbSetRoomLevel() just Set -> %d\n", ActiveParams.Level);
 
     pContext->SavedRoomLevel = level;
-    //ALOGV("\tReverbSetRoomLevel end");
+    // ALOGV("\tReverbSetRoomLevel end");
     return;
 }
 
@@ -1081,35 +1089,36 @@
 //
 //----------------------------------------------------------------------------
 
-int16_t ReverbGetRoomLevel(ReverbContext *pContext){
+int16_t ReverbGetRoomLevel(ReverbContext* pContext) {
     int16_t level;
-    //ALOGV("\tReverbGetRoomLevel start");
+    // ALOGV("\tReverbGetRoomLevel start");
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
-    LVM_INT32                 CombinedLevel;             // Sum of room and reverb level controls
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+    LVM_INT32 CombinedLevel;                         // Sum of room and reverb level controls
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetRoomLevel")
-    //ALOGV("\tReverbGetRoomLevel Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbGetRoomLevel() just Got -> %d\n", ActiveParams.Level);
+    // ALOGV("\tReverbGetRoomLevel Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbGetRoomLevel() just Got -> %d\n", ActiveParams.Level);
 
     // needs to subtract max levels for both RoomLevel and ReverbLevel
-    CombinedLevel = (pContext->SavedRoomLevel + pContext->SavedReverbLevel-LVREV_MAX_REVERB_LEVEL);
+    CombinedLevel =
+            (pContext->SavedRoomLevel + pContext->SavedReverbLevel - LVREV_MAX_REVERB_LEVEL);
     level = ReverbConvertLevel(CombinedLevel);
 
-    //ALOGV("\tReverbGetRoomLevel, Level = %d, pContext->SavedRoomLevel = %d, "
+    // ALOGV("\tReverbGetRoomLevel, Level = %d, pContext->SavedRoomLevel = %d, "
     //     "pContext->SavedReverbLevel = %d, CombinedLevel = %d, level = %d",
     //     ActiveParams.Level, pContext->SavedRoomLevel,
     //     pContext->SavedReverbLevel, CombinedLevel, level);
 
-    if(ActiveParams.Level != level){
+    if (ActiveParams.Level != level) {
         ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetRoomLevel() has wrong level -> %d %d\n",
               ActiveParams.Level, level);
     }
 
-    //ALOGV("\tReverbGetRoomLevel end");
+    // ALOGV("\tReverbGetRoomLevel end");
     return pContext->SavedRoomLevel;
 }
 
@@ -1125,34 +1134,35 @@
 //
 //----------------------------------------------------------------------------
 
-void ReverbSetDecayTime(ReverbContext *pContext, uint32_t time){
-    //ALOGV("\tReverbSetDecayTime start (%d)", time);
+void ReverbSetDecayTime(ReverbContext* pContext, uint32_t time) {
+    // ALOGV("\tReverbSetDecayTime start (%d)", time);
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetDecayTime")
-    //ALOGV("\tReverbSetDecayTime Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbSetDecayTime() just Got -> %d\n", ActiveParams.T60);
+    // ALOGV("\tReverbSetDecayTime Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbSetDecayTime() just Got -> %d\n", ActiveParams.T60);
 
     if (time <= LVREV_MAX_T60) {
         ActiveParams.T60 = (LVM_UINT16)time;
-    }
-    else {
+    } else {
         ActiveParams.T60 = LVREV_MAX_T60;
     }
 
     /* Activate the initial settings */
     LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetDecayTime")
-    //ALOGV("\tReverbSetDecayTime() just Set -> %d\n", ActiveParams.T60);
+    // ALOGV("\tReverbSetDecayTime() just Set -> %d\n", ActiveParams.T60);
 
-    pContext->SamplesToExitCount = (ActiveParams.T60 * pContext->config.inputCfg.samplingRate)/1000;
-    //ALOGV("\tReverbSetDecayTime() just Set SamplesToExitCount-> %d\n",pContext->SamplesToExitCount);
+    pContext->SamplesToExitCount =
+            (ActiveParams.T60 * pContext->config.inputCfg.samplingRate) / 1000;
+    // ALOGV("\tReverbSetDecayTime() just Set SamplesToExitCount->
+    // %d\n",pContext->SamplesToExitCount);
     pContext->SavedDecayTime = (int16_t)time;
-    //ALOGV("\tReverbSetDecayTime end");
+    // ALOGV("\tReverbSetDecayTime end");
     return;
 }
 
@@ -1167,25 +1177,25 @@
 //
 //----------------------------------------------------------------------------
 
-uint32_t ReverbGetDecayTime(ReverbContext *pContext){
-    //ALOGV("\tReverbGetDecayTime start");
+uint32_t ReverbGetDecayTime(ReverbContext* pContext) {
+    // ALOGV("\tReverbGetDecayTime start");
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetDecayTime")
-    //ALOGV("\tReverbGetDecayTime Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbGetDecayTime() just Got -> %d\n", ActiveParams.T60);
+    // ALOGV("\tReverbGetDecayTime Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbGetDecayTime() just Got -> %d\n", ActiveParams.T60);
 
-    if(ActiveParams.T60 != pContext->SavedDecayTime){
+    if (ActiveParams.T60 != pContext->SavedDecayTime) {
         // This will fail if the decay time is set to more than 7000
-        ALOGV("\tLVM_ERROR : ReverbGetDecayTime() has wrong level -> %d %d\n",
-         ActiveParams.T60, pContext->SavedDecayTime);
+        ALOGV("\tLVM_ERROR : ReverbGetDecayTime() has wrong level -> %d %d\n", ActiveParams.T60,
+              pContext->SavedDecayTime);
     }
 
-    //ALOGV("\tReverbGetDecayTime end");
+    // ALOGV("\tReverbGetDecayTime end");
     return (uint32_t)ActiveParams.T60;
 }
 
@@ -1201,27 +1211,27 @@
 //
 //----------------------------------------------------------------------------
 
-void ReverbSetDecayHfRatio(ReverbContext *pContext, int16_t ratio){
-    //ALOGV("\tReverbSetDecayHfRatioe start (%d)", ratio);
+void ReverbSetDecayHfRatio(ReverbContext* pContext, int16_t ratio) {
+    // ALOGV("\tReverbSetDecayHfRatioe start (%d)", ratio);
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;   /* Function call status */
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetDecayHfRatio")
-    //ALOGV("\tReverbSetDecayHfRatio Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbSetDecayHfRatio() just Got -> %d\n", ActiveParams.Damping);
+    // ALOGV("\tReverbSetDecayHfRatio Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbSetDecayHfRatio() just Got -> %d\n", ActiveParams.Damping);
 
-    ActiveParams.Damping = (LVM_INT16)(ratio/20);
+    ActiveParams.Damping = (LVM_INT16)(ratio / 20);
 
     /* Activate the initial settings */
     LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetDecayHfRatio")
-    //ALOGV("\tReverbSetDecayHfRatio() just Set -> %d\n", ActiveParams.Damping);
+    // ALOGV("\tReverbSetDecayHfRatio() just Set -> %d\n", ActiveParams.Damping);
 
     pContext->SavedDecayHfRatio = ratio;
-    //ALOGV("\tReverbSetDecayHfRatio end");
+    // ALOGV("\tReverbSetDecayHfRatio end");
     return;
 }
 
@@ -1236,24 +1246,24 @@
 //
 //----------------------------------------------------------------------------
 
-int32_t ReverbGetDecayHfRatio(ReverbContext *pContext){
-    //ALOGV("\tReverbGetDecayHfRatio start");
+int32_t ReverbGetDecayHfRatio(ReverbContext* pContext) {
+    // ALOGV("\tReverbGetDecayHfRatio start");
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;   /* Function call status */
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetDecayHfRatio")
-    //ALOGV("\tReverbGetDecayHfRatio Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbGetDecayHfRatio() just Got -> %d\n", ActiveParams.Damping);
+    // ALOGV("\tReverbGetDecayHfRatio Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbGetDecayHfRatio() just Got -> %d\n", ActiveParams.Damping);
 
-    if(ActiveParams.Damping != (LVM_INT16)(pContext->SavedDecayHfRatio / 20)){
+    if (ActiveParams.Damping != (LVM_INT16)(pContext->SavedDecayHfRatio / 20)) {
         ALOGV("\tLVM_ERROR : ReverbGetDecayHfRatio() has wrong level -> %d %d\n",
-         ActiveParams.Damping, pContext->SavedDecayHfRatio);
+              ActiveParams.Damping, pContext->SavedDecayHfRatio);
     }
 
-    //ALOGV("\tReverbGetDecayHfRatio end");
+    // ALOGV("\tReverbGetDecayHfRatio end");
     return pContext->SavedDecayHfRatio;
 }
 
@@ -1269,27 +1279,27 @@
 //
 //----------------------------------------------------------------------------
 
-void ReverbSetDiffusion(ReverbContext *pContext, int16_t level){
-    //ALOGV("\tReverbSetDiffusion start (%d)", level);
+void ReverbSetDiffusion(ReverbContext* pContext, int16_t level) {
+    // ALOGV("\tReverbSetDiffusion start (%d)", level);
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetDiffusion")
-    //ALOGV("\tReverbSetDiffusion Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbSetDiffusion() just Got -> %d\n", ActiveParams.Density);
+    // ALOGV("\tReverbSetDiffusion Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbSetDiffusion() just Got -> %d\n", ActiveParams.Density);
 
-    ActiveParams.Density = (LVM_INT16)(level/10);
+    ActiveParams.Density = (LVM_INT16)(level / 10);
 
     /* Activate the initial settings */
     LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetDiffusion")
-    //ALOGV("\tReverbSetDiffusion() just Set -> %d\n", ActiveParams.Density);
+    // ALOGV("\tReverbSetDiffusion() just Set -> %d\n", ActiveParams.Density);
 
     pContext->SavedDiffusion = level;
-    //ALOGV("\tReverbSetDiffusion end");
+    // ALOGV("\tReverbSetDiffusion end");
     return;
 }
 
@@ -1304,26 +1314,26 @@
 //
 //----------------------------------------------------------------------------
 
-int32_t ReverbGetDiffusion(ReverbContext *pContext){
-    //ALOGV("\tReverbGetDiffusion start");
+int32_t ReverbGetDiffusion(ReverbContext* pContext) {
+    // ALOGV("\tReverbGetDiffusion start");
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
-    LVM_INT16                 Temp;
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+    LVM_INT16 Temp;
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetDiffusion")
-    //ALOGV("\tReverbGetDiffusion Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbGetDiffusion just Got -> %d\n", ActiveParams.Density);
+    // ALOGV("\tReverbGetDiffusion Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbGetDiffusion just Got -> %d\n", ActiveParams.Density);
 
-    Temp = (LVM_INT16)(pContext->SavedDiffusion/10);
+    Temp = (LVM_INT16)(pContext->SavedDiffusion / 10);
 
-    if(ActiveParams.Density != Temp){
+    if (ActiveParams.Density != Temp) {
         ALOGV("\tLVM_ERROR : ReverbGetDiffusion invalid value %d %d", Temp, ActiveParams.Density);
     }
 
-    //ALOGV("\tReverbGetDiffusion end");
+    // ALOGV("\tReverbGetDiffusion end");
     return pContext->SavedDiffusion;
 }
 
@@ -1339,27 +1349,27 @@
 //
 //----------------------------------------------------------------------------
 
-void ReverbSetDensity(ReverbContext *pContext, int16_t level){
-    //ALOGV("\tReverbSetDensity start (%d)", level);
+void ReverbSetDensity(ReverbContext* pContext, int16_t level) {
+    // ALOGV("\tReverbSetDensity start (%d)", level);
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetDensity")
-    //ALOGV("\tReverbSetDensity Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbSetDensity just Got -> %d\n", ActiveParams.RoomSize);
+    // ALOGV("\tReverbSetDensity Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbSetDensity just Got -> %d\n", ActiveParams.RoomSize);
 
     ActiveParams.RoomSize = (LVM_INT16)(((level * 99) / 1000) + 1);
 
     /* Activate the initial settings */
     LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetDensity")
-    //ALOGV("\tReverbSetDensity just Set -> %d\n", ActiveParams.RoomSize);
+    // ALOGV("\tReverbSetDensity just Set -> %d\n", ActiveParams.RoomSize);
 
     pContext->SavedDensity = level;
-    //ALOGV("\tReverbSetDensity end");
+    // ALOGV("\tReverbSetDensity end");
     return;
 }
 
@@ -1374,25 +1384,25 @@
 //
 //----------------------------------------------------------------------------
 
-int32_t ReverbGetDensity(ReverbContext *pContext){
-    //ALOGV("\tReverbGetDensity start");
+int32_t ReverbGetDensity(ReverbContext* pContext) {
+    // ALOGV("\tReverbGetDensity start");
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
-    LVM_INT16                 Temp;
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+    LVM_INT16 Temp;
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetDensity")
-    //ALOGV("\tReverbGetDensity Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbGetDensity() just Got -> %d\n", ActiveParams.RoomSize);
+    // ALOGV("\tReverbGetDensity Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbGetDensity() just Got -> %d\n", ActiveParams.RoomSize);
 
     Temp = (LVM_INT16)(((pContext->SavedDensity * 99) / 1000) + 1);
 
-    if(Temp != ActiveParams.RoomSize){
+    if (Temp != ActiveParams.RoomSize) {
         ALOGV("\tLVM_ERROR : ReverbGetDensity invalid value %d %d", Temp, ActiveParams.RoomSize);
     }
 
-    //ALOGV("\tReverbGetDensity end");
+    // ALOGV("\tReverbGetDensity end");
     return pContext->SavedDensity;
 }
 
@@ -1410,20 +1420,19 @@
 // Side Effects:
 //
 //----------------------------------------------------------------------------
-int Reverb_LoadPreset(ReverbContext   *pContext)
-{
-    //TODO: add reflections delay, level and reverb delay when early reflections are
+int Reverb_LoadPreset(ReverbContext* pContext) {
+    // TODO: add reflections delay, level and reverb delay when early reflections are
     // implemented
     pContext->curPreset = pContext->nextPreset;
 
     if (pContext->curPreset != REVERB_PRESET_NONE) {
-        const t_reverb_settings *preset = &sReverbPresets[pContext->curPreset];
+        const t_reverb_settings* preset = &sReverbPresets[pContext->curPreset];
         ReverbSetRoomLevel(pContext, preset->roomLevel);
         ReverbSetRoomHfLevel(pContext, preset->roomHFLevel);
         ReverbSetDecayTime(pContext, preset->decayTime);
         ReverbSetDecayHfRatio(pContext, preset->decayHFRatio);
-        //reflectionsLevel
-        //reflectionsDelay
+        // reflectionsLevel
+        // reflectionsDelay
         ReverbSetReverbLevel(pContext, preset->reverbLevel);
         // reverbDelay
         ReverbSetDiffusion(pContext, preset->diffusion);
@@ -1454,99 +1463,96 @@
 //
 //----------------------------------------------------------------------------
 
-int Reverb_getParameter(ReverbContext *pContext,
-                        void          *pParam,
-                        uint32_t      *pValueSize,
-                        void          *pValue){
+int Reverb_getParameter(ReverbContext* pContext, void* pParam, uint32_t* pValueSize, void* pValue) {
     int status = 0;
-    int32_t *pParamTemp = (int32_t *)pParam;
+    int32_t* pParamTemp = (int32_t*)pParam;
     int32_t param = *pParamTemp++;
-    t_reverb_settings *pProperties;
+    t_reverb_settings* pProperties;
 
-    //ALOGV("\tReverb_getParameter start");
+    // ALOGV("\tReverb_getParameter start");
     if (pContext->preset) {
         if (param != REVERB_PARAM_PRESET || *pValueSize < sizeof(uint16_t)) {
             return -EINVAL;
         }
 
-        *(uint16_t *)pValue = pContext->nextPreset;
+        *(uint16_t*)pValue = pContext->nextPreset;
         ALOGV("get REVERB_PARAM_PRESET, preset %d", pContext->nextPreset);
         return 0;
     }
 
-    switch (param){
+    switch (param) {
         case REVERB_PARAM_ROOM_LEVEL:
-            if (*pValueSize != sizeof(int16_t)){
+            if (*pValueSize != sizeof(int16_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize1 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(int16_t);
             break;
         case REVERB_PARAM_ROOM_HF_LEVEL:
-            if (*pValueSize != sizeof(int16_t)){
+            if (*pValueSize != sizeof(int16_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize12 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(int16_t);
             break;
         case REVERB_PARAM_DECAY_TIME:
-            if (*pValueSize != sizeof(uint32_t)){
+            if (*pValueSize != sizeof(uint32_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize3 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(uint32_t);
             break;
         case REVERB_PARAM_DECAY_HF_RATIO:
-            if (*pValueSize != sizeof(int16_t)){
+            if (*pValueSize != sizeof(int16_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize4 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(int16_t);
             break;
         case REVERB_PARAM_REFLECTIONS_LEVEL:
-            if (*pValueSize != sizeof(int16_t)){
+            if (*pValueSize != sizeof(int16_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize5 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(int16_t);
             break;
         case REVERB_PARAM_REFLECTIONS_DELAY:
-            if (*pValueSize != sizeof(uint32_t)){
+            if (*pValueSize != sizeof(uint32_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize6 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(uint32_t);
             break;
         case REVERB_PARAM_REVERB_LEVEL:
-            if (*pValueSize != sizeof(int16_t)){
+            if (*pValueSize != sizeof(int16_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize7 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(int16_t);
             break;
         case REVERB_PARAM_REVERB_DELAY:
-            if (*pValueSize != sizeof(uint32_t)){
+            if (*pValueSize != sizeof(uint32_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize8 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(uint32_t);
             break;
         case REVERB_PARAM_DIFFUSION:
-            if (*pValueSize != sizeof(int16_t)){
+            if (*pValueSize != sizeof(int16_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize9 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(int16_t);
             break;
         case REVERB_PARAM_DENSITY:
-            if (*pValueSize != sizeof(int16_t)){
+            if (*pValueSize != sizeof(int16_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize10 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(int16_t);
             break;
         case REVERB_PARAM_PROPERTIES:
-            if (*pValueSize != sizeof(t_reverb_settings)){
+            if (*pValueSize != sizeof(t_reverb_settings)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize11 %d", *pValueSize);
                 return -EINVAL;
             }
@@ -1558,9 +1564,9 @@
             return -EINVAL;
     }
 
-    pProperties = (t_reverb_settings *) pValue;
+    pProperties = (t_reverb_settings*)pValue;
 
-    switch (param){
+    switch (param) {
         case REVERB_PARAM_PROPERTIES:
             pProperties->roomLevel = ReverbGetRoomLevel(pContext);
             pProperties->roomHFLevel = ReverbGetRoomHfLevel(pContext);
@@ -1574,74 +1580,74 @@
             pProperties->density = ReverbGetDensity(pContext);
 
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is roomLevel        %d",
-                pProperties->roomLevel);
+                  pProperties->roomLevel);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is roomHFLevel      %d",
-                pProperties->roomHFLevel);
+                  pProperties->roomHFLevel);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is decayTime        %d",
-                pProperties->decayTime);
+                  pProperties->decayTime);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is decayHFRatio     %d",
-                pProperties->decayHFRatio);
+                  pProperties->decayHFRatio);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is reflectionsLevel %d",
-                pProperties->reflectionsLevel);
+                  pProperties->reflectionsLevel);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is reflectionsDelay %d",
-                pProperties->reflectionsDelay);
+                  pProperties->reflectionsDelay);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is reverbDelay      %d",
-                pProperties->reverbDelay);
+                  pProperties->reverbDelay);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is reverbLevel      %d",
-                pProperties->reverbLevel);
+                  pProperties->reverbLevel);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is diffusion        %d",
-                pProperties->diffusion);
+                  pProperties->diffusion);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is density          %d",
-                pProperties->density);
+                  pProperties->density);
             break;
 
         case REVERB_PARAM_ROOM_LEVEL:
-            *(int16_t *)pValue = ReverbGetRoomLevel(pContext);
+            *(int16_t*)pValue = ReverbGetRoomLevel(pContext);
 
-            //ALOGV("\tReverb_getParameter() REVERB_PARAM_ROOM_LEVEL Value is %d",
+            // ALOGV("\tReverb_getParameter() REVERB_PARAM_ROOM_LEVEL Value is %d",
             //        *(int16_t *)pValue);
             break;
         case REVERB_PARAM_ROOM_HF_LEVEL:
-            *(int16_t *)pValue = ReverbGetRoomHfLevel(pContext);
+            *(int16_t*)pValue = ReverbGetRoomHfLevel(pContext);
 
-            //ALOGV("\tReverb_getParameter() REVERB_PARAM_ROOM_HF_LEVEL Value is %d",
+            // ALOGV("\tReverb_getParameter() REVERB_PARAM_ROOM_HF_LEVEL Value is %d",
             //        *(int16_t *)pValue);
             break;
         case REVERB_PARAM_DECAY_TIME:
-            *(uint32_t *)pValue = ReverbGetDecayTime(pContext);
+            *(uint32_t*)pValue = ReverbGetDecayTime(pContext);
 
-            //ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_TIME Value is %d",
+            // ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_TIME Value is %d",
             //        *(int32_t *)pValue);
             break;
         case REVERB_PARAM_DECAY_HF_RATIO:
-            *(int16_t *)pValue = ReverbGetDecayHfRatio(pContext);
+            *(int16_t*)pValue = ReverbGetDecayHfRatio(pContext);
 
-            //ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_HF_RATION Value is %d",
+            // ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_HF_RATION Value is %d",
             //        *(int16_t *)pValue);
             break;
         case REVERB_PARAM_REVERB_LEVEL:
-             *(int16_t *)pValue = ReverbGetReverbLevel(pContext);
+            *(int16_t*)pValue = ReverbGetReverbLevel(pContext);
 
-            //ALOGV("\tReverb_getParameter() REVERB_PARAM_REVERB_LEVEL Value is %d",
+            // ALOGV("\tReverb_getParameter() REVERB_PARAM_REVERB_LEVEL Value is %d",
             //        *(int16_t *)pValue);
             break;
         case REVERB_PARAM_DIFFUSION:
-            *(int16_t *)pValue = ReverbGetDiffusion(pContext);
+            *(int16_t*)pValue = ReverbGetDiffusion(pContext);
 
-            //ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_DIFFUSION Value is %d",
+            // ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_DIFFUSION Value is %d",
             //        *(int16_t *)pValue);
             break;
         case REVERB_PARAM_DENSITY:
-            *(int16_t *)pValue = ReverbGetDensity(pContext);
-            //ALOGV("\tReverb_getParameter() REVERB_PARAM_DENSITY Value is %d",
+            *(int16_t*)pValue = ReverbGetDensity(pContext);
+            // ALOGV("\tReverb_getParameter() REVERB_PARAM_DENSITY Value is %d",
             //        *(uint32_t *)pValue);
             break;
         case REVERB_PARAM_REFLECTIONS_LEVEL:
-            *(uint16_t *)pValue = 0;
+            *(uint16_t*)pValue = 0;
             break;
         case REVERB_PARAM_REFLECTIONS_DELAY:
         case REVERB_PARAM_REVERB_DELAY:
-            *(uint32_t *)pValue = 0;
+            *(uint32_t*)pValue = 0;
             break;
 
         default:
@@ -1650,7 +1656,7 @@
             break;
     }
 
-    //ALOGV("\tReverb_getParameter end");
+    // ALOGV("\tReverb_getParameter end");
     return status;
 } /* end Reverb_getParameter */
 
@@ -1670,16 +1676,16 @@
 //
 //----------------------------------------------------------------------------
 
-int Reverb_setParameter (ReverbContext *pContext, void *pParam, void *pValue, int vsize){
+int Reverb_setParameter(ReverbContext* pContext, void* pParam, void* pValue, int vsize) {
     int status = 0;
     int16_t level;
     int16_t ratio;
     uint32_t time;
-    t_reverb_settings *pProperties;
-    int32_t *pParamTemp = (int32_t *)pParam;
+    t_reverb_settings* pProperties;
+    int32_t* pParamTemp = (int32_t*)pParam;
     int32_t param = *pParamTemp++;
 
-    //ALOGV("\tReverb_setParameter start");
+    // ALOGV("\tReverb_setParameter start");
     if (pContext->preset) {
         if (param != REVERB_PARAM_PRESET) {
             return -EINVAL;
@@ -1689,7 +1695,7 @@
             return -EINVAL;
         }
 
-        uint16_t preset = *(uint16_t *)pValue;
+        uint16_t preset = *(uint16_t*)pValue;
         ALOGV("set REVERB_PARAM_PRESET, preset %d", preset);
         if (preset > REVERB_PRESET_LAST) {
             return -EINVAL;
@@ -1703,10 +1709,10 @@
         return -EINVAL;
     }
 
-    switch (param){
+    switch (param) {
         case REVERB_PARAM_PROPERTIES:
             ALOGV("\tReverb_setParameter() REVERB_PARAM_PROPERTIES");
-            pProperties = (t_reverb_settings *) pValue;
+            pProperties = (t_reverb_settings*)pValue;
             ReverbSetRoomLevel(pContext, pProperties->roomLevel);
             ReverbSetRoomHfLevel(pContext, pProperties->roomHFLevel);
             ReverbSetDecayTime(pContext, pProperties->decayTime);
@@ -1716,55 +1722,55 @@
             ReverbSetDensity(pContext, pProperties->density);
             break;
         case REVERB_PARAM_ROOM_LEVEL:
-            level = *(int16_t *)pValue;
-            //ALOGV("\tReverb_setParameter() REVERB_PARAM_ROOM_LEVEL value is %d", level);
-            //ALOGV("\tReverb_setParameter() Calling ReverbSetRoomLevel");
+            level = *(int16_t*)pValue;
+            // ALOGV("\tReverb_setParameter() REVERB_PARAM_ROOM_LEVEL value is %d", level);
+            // ALOGV("\tReverb_setParameter() Calling ReverbSetRoomLevel");
             ReverbSetRoomLevel(pContext, level);
-            //ALOGV("\tReverb_setParameter() Called ReverbSetRoomLevel");
-           break;
-        case REVERB_PARAM_ROOM_HF_LEVEL:
-            level = *(int16_t *)pValue;
-            //ALOGV("\tReverb_setParameter() REVERB_PARAM_ROOM_HF_LEVEL value is %d", level);
-            //ALOGV("\tReverb_setParameter() Calling ReverbSetRoomHfLevel");
-            ReverbSetRoomHfLevel(pContext, level);
-            //ALOGV("\tReverb_setParameter() Called ReverbSetRoomHfLevel");
-           break;
-        case REVERB_PARAM_DECAY_TIME:
-            time = *(uint32_t *)pValue;
-            //ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_TIME value is %d", time);
-            //ALOGV("\tReverb_setParameter() Calling ReverbSetDecayTime");
-            ReverbSetDecayTime(pContext, time);
-            //ALOGV("\tReverb_setParameter() Called ReverbSetDecayTime");
-           break;
-        case REVERB_PARAM_DECAY_HF_RATIO:
-            ratio = *(int16_t *)pValue;
-            //ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_HF_RATIO value is %d", ratio);
-            //ALOGV("\tReverb_setParameter() Calling ReverbSetDecayHfRatio");
-            ReverbSetDecayHfRatio(pContext, ratio);
-            //ALOGV("\tReverb_setParameter() Called ReverbSetDecayHfRatio");
+            // ALOGV("\tReverb_setParameter() Called ReverbSetRoomLevel");
             break;
-         case REVERB_PARAM_REVERB_LEVEL:
-            level = *(int16_t *)pValue;
-            //ALOGV("\tReverb_setParameter() REVERB_PARAM_REVERB_LEVEL value is %d", level);
-            //ALOGV("\tReverb_setParameter() Calling ReverbSetReverbLevel");
+        case REVERB_PARAM_ROOM_HF_LEVEL:
+            level = *(int16_t*)pValue;
+            // ALOGV("\tReverb_setParameter() REVERB_PARAM_ROOM_HF_LEVEL value is %d", level);
+            // ALOGV("\tReverb_setParameter() Calling ReverbSetRoomHfLevel");
+            ReverbSetRoomHfLevel(pContext, level);
+            // ALOGV("\tReverb_setParameter() Called ReverbSetRoomHfLevel");
+            break;
+        case REVERB_PARAM_DECAY_TIME:
+            time = *(uint32_t*)pValue;
+            // ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_TIME value is %d", time);
+            // ALOGV("\tReverb_setParameter() Calling ReverbSetDecayTime");
+            ReverbSetDecayTime(pContext, time);
+            // ALOGV("\tReverb_setParameter() Called ReverbSetDecayTime");
+            break;
+        case REVERB_PARAM_DECAY_HF_RATIO:
+            ratio = *(int16_t*)pValue;
+            // ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_HF_RATIO value is %d", ratio);
+            // ALOGV("\tReverb_setParameter() Calling ReverbSetDecayHfRatio");
+            ReverbSetDecayHfRatio(pContext, ratio);
+            // ALOGV("\tReverb_setParameter() Called ReverbSetDecayHfRatio");
+            break;
+        case REVERB_PARAM_REVERB_LEVEL:
+            level = *(int16_t*)pValue;
+            // ALOGV("\tReverb_setParameter() REVERB_PARAM_REVERB_LEVEL value is %d", level);
+            // ALOGV("\tReverb_setParameter() Calling ReverbSetReverbLevel");
             ReverbSetReverbLevel(pContext, level);
-            //ALOGV("\tReverb_setParameter() Called ReverbSetReverbLevel");
-           break;
+            // ALOGV("\tReverb_setParameter() Called ReverbSetReverbLevel");
+            break;
         case REVERB_PARAM_DIFFUSION:
-            ratio = *(int16_t *)pValue;
-            //ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_DIFFUSION value is %d", ratio);
-            //ALOGV("\tReverb_setParameter() Calling ReverbSetDiffusion");
+            ratio = *(int16_t*)pValue;
+            // ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_DIFFUSION value is %d", ratio);
+            // ALOGV("\tReverb_setParameter() Calling ReverbSetDiffusion");
             ReverbSetDiffusion(pContext, ratio);
-            //ALOGV("\tReverb_setParameter() Called ReverbSetDiffusion");
+            // ALOGV("\tReverb_setParameter() Called ReverbSetDiffusion");
             break;
         case REVERB_PARAM_DENSITY:
-            ratio = *(int16_t *)pValue;
-            //ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_DENSITY value is %d", ratio);
-            //ALOGV("\tReverb_setParameter() Calling ReverbSetDensity");
+            ratio = *(int16_t*)pValue;
+            // ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_DENSITY value is %d", ratio);
+            // ALOGV("\tReverb_setParameter() Calling ReverbSetDensity");
             ReverbSetDensity(pContext, ratio);
-            //ALOGV("\tReverb_setParameter() Called ReverbSetDensity");
+            // ALOGV("\tReverb_setParameter() Called ReverbSetDensity");
             break;
-           break;
+            break;
         case REVERB_PARAM_REFLECTIONS_LEVEL:
         case REVERB_PARAM_REFLECTIONS_DELAY:
         case REVERB_PARAM_REVERB_DELAY:
@@ -1774,7 +1780,7 @@
             break;
     }
 
-    //ALOGV("\tReverb_setParameter end");
+    // ALOGV("\tReverb_setParameter end");
     return status;
 } /* end Reverb_setParameter */
 
@@ -1783,52 +1789,46 @@
  */
 int Reverb_paramValueSize(int32_t param) {
     switch (param) {
-    case REVERB_PARAM_ROOM_LEVEL:
-    case REVERB_PARAM_ROOM_HF_LEVEL:
-    case REVERB_PARAM_REFLECTIONS_LEVEL:
-    case REVERB_PARAM_REVERB_LEVEL:
-        return sizeof(int16_t); // millibel
-    case REVERB_PARAM_DECAY_TIME:
-    case REVERB_PARAM_REFLECTIONS_DELAY:
-    case REVERB_PARAM_REVERB_DELAY:
-        return sizeof(uint32_t); // milliseconds
-    case REVERB_PARAM_DECAY_HF_RATIO:
-    case REVERB_PARAM_DIFFUSION:
-    case REVERB_PARAM_DENSITY:
-        return sizeof(int16_t); // permille
-    case REVERB_PARAM_PROPERTIES:
-        return sizeof(s_reverb_settings); // struct of all reverb properties
+        case REVERB_PARAM_ROOM_LEVEL:
+        case REVERB_PARAM_ROOM_HF_LEVEL:
+        case REVERB_PARAM_REFLECTIONS_LEVEL:
+        case REVERB_PARAM_REVERB_LEVEL:
+            return sizeof(int16_t);  // millibel
+        case REVERB_PARAM_DECAY_TIME:
+        case REVERB_PARAM_REFLECTIONS_DELAY:
+        case REVERB_PARAM_REVERB_DELAY:
+            return sizeof(uint32_t);  // milliseconds
+        case REVERB_PARAM_DECAY_HF_RATIO:
+        case REVERB_PARAM_DIFFUSION:
+        case REVERB_PARAM_DENSITY:
+            return sizeof(int16_t);  // permille
+        case REVERB_PARAM_PROPERTIES:
+            return sizeof(s_reverb_settings);  // struct of all reverb properties
     }
     return sizeof(int32_t);
 }
 
-} // namespace
-} // namespace
+}  // namespace
+}  // namespace android
 
 extern "C" {
 /* Effect Control Interface Implementation: Process */
-int Reverb_process(effect_handle_t   self,
-                                 audio_buffer_t         *inBuffer,
-                                 audio_buffer_t         *outBuffer){
-    android::ReverbContext * pContext = (android::ReverbContext *) self;
-    int    status = 0;
+int Reverb_process(effect_handle_t self, audio_buffer_t* inBuffer, audio_buffer_t* outBuffer) {
+    android::ReverbContext* pContext = (android::ReverbContext*)self;
+    int status = 0;
 
-    if (pContext == NULL){
+    if (pContext == NULL) {
         ALOGV("\tLVM_ERROR : Reverb_process() ERROR pContext == NULL");
         return -EINVAL;
     }
-    if (inBuffer == NULL  || inBuffer->raw == NULL  ||
-            outBuffer == NULL || outBuffer->raw == NULL ||
-            inBuffer->frameCount != outBuffer->frameCount){
+    if (inBuffer == NULL || inBuffer->raw == NULL || outBuffer == NULL || outBuffer->raw == NULL ||
+        inBuffer->frameCount != outBuffer->frameCount) {
         ALOGV("\tLVM_ERROR : Reverb_process() ERROR NULL INPUT POINTER OR FRAME COUNT IS WRONG");
         return -EINVAL;
     }
-    //ALOGV("\tReverb_process() Calling process with %d frames", outBuffer->frameCount);
+    // ALOGV("\tReverb_process() Calling process with %d frames", outBuffer->frameCount);
     /* Process all the available frames, block processing is handled internalLY by the LVM bundle */
-    status = process(    inBuffer->f32,
-                         outBuffer->f32,
-                         outBuffer->frameCount,
-                         pContext);
+    status = process(inBuffer->f32, outBuffer->f32, outBuffer->frameCount, pContext);
 
     if (pContext->bEnabled == LVM_FALSE) {
         if (pContext->SamplesToExitCount > 0) {
@@ -1840,72 +1840,67 @@
     }
 
     return status;
-}   /* end Reverb_process */
+} /* end Reverb_process */
 
 /* Effect Control Interface Implementation: Command */
-int Reverb_command(effect_handle_t  self,
-                              uint32_t            cmdCode,
-                              uint32_t            cmdSize,
-                              void                *pCmdData,
-                              uint32_t            *replySize,
-                              void                *pReplyData){
-    android::ReverbContext * pContext = (android::ReverbContext *) self;
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
+int Reverb_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
+                   uint32_t* replySize, void* pReplyData) {
+    android::ReverbContext* pContext = (android::ReverbContext*)self;
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
-    if (pContext == NULL){
+    if (pContext == NULL) {
         ALOGV("\tLVM_ERROR : Reverb_command ERROR pContext == NULL");
         return -EINVAL;
     }
 
-    //ALOGV("\tReverb_command INPUTS are: command %d cmdSize %d",cmdCode, cmdSize);
+    // ALOGV("\tReverb_command INPUTS are: command %d cmdSize %d",cmdCode, cmdSize);
 
-    switch (cmdCode){
+    switch (cmdCode) {
         case EFFECT_CMD_INIT:
-            //ALOGV("\tReverb_command cmdCode Case: "
+            // ALOGV("\tReverb_command cmdCode Case: "
             //        "EFFECT_CMD_INIT start");
 
-            if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)){
+            if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_INIT: ERROR");
+                      "EFFECT_CMD_INIT: ERROR");
                 return -EINVAL;
             }
-            *(int *) pReplyData = 0;
+            *(int*)pReplyData = 0;
             break;
 
         case EFFECT_CMD_SET_CONFIG:
-            //ALOGV("\tReverb_command cmdCode Case: "
+            // ALOGV("\tReverb_command cmdCode Case: "
             //        "EFFECT_CMD_SET_CONFIG start");
-            if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) ||
-                    pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
+            if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) || pReplyData == NULL ||
+                replySize == NULL || *replySize != sizeof(int)) {
                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_SET_CONFIG: ERROR");
+                      "EFFECT_CMD_SET_CONFIG: ERROR");
                 return -EINVAL;
             }
-            *(int *) pReplyData = android::Reverb_setConfig(pContext,
-                                                            (effect_config_t *) pCmdData);
+            *(int*)pReplyData = android::Reverb_setConfig(pContext, (effect_config_t*)pCmdData);
             break;
 
         case EFFECT_CMD_GET_CONFIG:
             if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(effect_config_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_GET_CONFIG: ERROR");
+                      "EFFECT_CMD_GET_CONFIG: ERROR");
                 return -EINVAL;
             }
 
-            android::Reverb_getConfig(pContext, (effect_config_t *)pReplyData);
+            android::Reverb_getConfig(pContext, (effect_config_t*)pReplyData);
             break;
 
         case EFFECT_CMD_RESET:
-            //ALOGV("\tReverb_command cmdCode Case: "
+            // ALOGV("\tReverb_command cmdCode Case: "
             //        "EFFECT_CMD_RESET start");
             Reverb_setConfig(pContext, &pContext->config);
             break;
 
-        case EFFECT_CMD_GET_PARAM:{
-            //ALOGV("\tReverb_command cmdCode Case: "
+        case EFFECT_CMD_GET_PARAM: {
+            // ALOGV("\tReverb_command cmdCode Case: "
             //        "EFFECT_CMD_GET_PARAM start");
-            effect_param_t *p = (effect_param_t *)pCmdData;
+            effect_param_t* p = (effect_param_t*)pCmdData;
             if (pCmdData == nullptr) {
                 ALOGW("\tLVM_ERROR : pCmdData is NULL");
                 return -EINVAL;
@@ -1914,163 +1909,156 @@
                 android_errorWriteLog(0x534e4554, "26347509");
                 return -EINVAL;
             }
-            if (cmdSize < sizeof(effect_param_t) ||
-                    cmdSize < (sizeof(effect_param_t) + p->psize) ||
-                    pReplyData == NULL || replySize == NULL ||
-                    *replySize < (sizeof(effect_param_t) + p->psize)) {
+            if (cmdSize < sizeof(effect_param_t) || cmdSize < (sizeof(effect_param_t) + p->psize) ||
+                pReplyData == NULL || replySize == NULL ||
+                *replySize < (sizeof(effect_param_t) + p->psize)) {
                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_GET_PARAM: ERROR");
+                      "EFFECT_CMD_GET_PARAM: ERROR");
                 return -EINVAL;
             }
 
             memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + p->psize);
 
-            p = (effect_param_t *)pReplyData;
+            p = (effect_param_t*)pReplyData;
 
             int voffset = ((p->psize - 1) / sizeof(int32_t) + 1) * sizeof(int32_t);
 
-            p->status = android::Reverb_getParameter(pContext,
-                                                         (void *)p->data,
-                                                          &p->vsize,
-                                                          p->data + voffset);
+            p->status = android::Reverb_getParameter(pContext, (void*)p->data, &p->vsize,
+                                                     p->data + voffset);
 
             *replySize = sizeof(effect_param_t) + voffset + p->vsize;
 
-            //ALOGV("\tReverb_command EFFECT_CMD_GET_PARAM "
+            // ALOGV("\tReverb_command EFFECT_CMD_GET_PARAM "
             //        "*pCmdData %d, *replySize %d, *pReplyData %d ",
             //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
             //        *replySize,
             //        *(int16_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset));
 
         } break;
-        case EFFECT_CMD_SET_PARAM:{
-
-            //ALOGV("\tReverb_command cmdCode Case: "
+        case EFFECT_CMD_SET_PARAM: {
+            // ALOGV("\tReverb_command cmdCode Case: "
             //        "EFFECT_CMD_SET_PARAM start");
-            //ALOGV("\tReverb_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ",
+            // ALOGV("\tReverb_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ",
             //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
             //        *replySize,
             //        *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t)));
 
             if (pCmdData == NULL || (cmdSize < (sizeof(effect_param_t) + sizeof(int32_t))) ||
-                    pReplyData == NULL ||  replySize == NULL || *replySize != sizeof(int32_t)) {
+                pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_SET_PARAM: ERROR");
+                      "EFFECT_CMD_SET_PARAM: ERROR");
                 return -EINVAL;
             }
 
-            effect_param_t *p = (effect_param_t *) pCmdData;
+            effect_param_t* p = (effect_param_t*)pCmdData;
 
-            if (p->psize != sizeof(int32_t)){
+            if (p->psize != sizeof(int32_t)) {
                 ALOGV("\t4LVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_SET_PARAM: ERROR, psize is not sizeof(int32_t)");
+                      "EFFECT_CMD_SET_PARAM: ERROR, psize is not sizeof(int32_t)");
                 return -EINVAL;
             }
 
-            //ALOGV("\tn5Reverb_command cmdSize is %d\n"
+            // ALOGV("\tn5Reverb_command cmdSize is %d\n"
             //        "\tsizeof(effect_param_t) is  %d\n"
             //        "\tp->psize is %d\n"
             //        "\tp->vsize is %d"
             //        "\n",
             //        cmdSize, sizeof(effect_param_t), p->psize, p->vsize );
 
-            *(int *)pReplyData = android::Reverb_setParameter(pContext,
-                                                             (void *)p->data,
-                                                              p->data + p->psize,
-                                                              p->vsize);
+            *(int*)pReplyData = android::Reverb_setParameter(pContext, (void*)p->data,
+                                                             p->data + p->psize, p->vsize);
         } break;
 
         case EFFECT_CMD_ENABLE:
-            //ALOGV("\tReverb_command cmdCode Case: "
+            // ALOGV("\tReverb_command cmdCode Case: "
             //        "EFFECT_CMD_ENABLE start");
 
-            if (pReplyData == NULL || *replySize != sizeof(int)){
+            if (pReplyData == NULL || *replySize != sizeof(int)) {
                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_ENABLE: ERROR");
+                      "EFFECT_CMD_ENABLE: ERROR");
                 return -EINVAL;
             }
-            if(pContext->bEnabled == LVM_TRUE){
-                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                         "EFFECT_CMD_ENABLE: ERROR-Effect is already enabled");
-                 return -EINVAL;
-             }
-            *(int *)pReplyData = 0;
+            if (pContext->bEnabled == LVM_TRUE) {
+                ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
+                      "EFFECT_CMD_ENABLE: ERROR-Effect is already enabled");
+                return -EINVAL;
+            }
+            *(int*)pReplyData = 0;
             pContext->bEnabled = LVM_TRUE;
             /* Get the current settings */
             LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
             LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "EFFECT_CMD_ENABLE")
             pContext->SamplesToExitCount =
-                    (ActiveParams.T60 * pContext->config.inputCfg.samplingRate)/1000;
+                    (ActiveParams.T60 * pContext->config.inputCfg.samplingRate) / 1000;
             // force no volume ramp for first buffer processed after enabling the effect
             pContext->volumeMode = android::REVERB_VOLUME_FLAT;
-            //ALOGV("\tEFFECT_CMD_ENABLE SamplesToExitCount = %d", pContext->SamplesToExitCount);
+            // ALOGV("\tEFFECT_CMD_ENABLE SamplesToExitCount = %d", pContext->SamplesToExitCount);
             break;
         case EFFECT_CMD_DISABLE:
-            //ALOGV("\tReverb_command cmdCode Case: "
+            // ALOGV("\tReverb_command cmdCode Case: "
             //        "EFFECT_CMD_DISABLE start");
 
-            if (pReplyData == NULL || *replySize != sizeof(int)){
+            if (pReplyData == NULL || *replySize != sizeof(int)) {
                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_DISABLE: ERROR");
+                      "EFFECT_CMD_DISABLE: ERROR");
                 return -EINVAL;
             }
-            if(pContext->bEnabled == LVM_FALSE){
-                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                         "EFFECT_CMD_DISABLE: ERROR-Effect is not yet enabled");
-                 return -EINVAL;
-             }
-            *(int *)pReplyData = 0;
+            if (pContext->bEnabled == LVM_FALSE) {
+                ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
+                      "EFFECT_CMD_DISABLE: ERROR-Effect is not yet enabled");
+                return -EINVAL;
+            }
+            *(int*)pReplyData = 0;
             pContext->bEnabled = LVM_FALSE;
             break;
 
         case EFFECT_CMD_SET_VOLUME:
-            if (pCmdData == NULL ||
-                cmdSize != 2 * sizeof(uint32_t)) {
+            if (pCmdData == NULL || cmdSize != 2 * sizeof(uint32_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_SET_VOLUME: ERROR");
+                      "EFFECT_CMD_SET_VOLUME: ERROR");
                 return -EINVAL;
             }
 
-            if (pReplyData != NULL) { // we have volume control
-                pContext->leftVolume = (LVM_INT16)((*(uint32_t *)pCmdData + (1 << 11)) >> 12);
-                pContext->rightVolume = (LVM_INT16)((*((uint32_t *)pCmdData + 1) + (1 << 11)) >> 12);
-                *(uint32_t *)pReplyData = (1 << 24);
-                *((uint32_t *)pReplyData + 1) = (1 << 24);
+            if (pReplyData != NULL) {  // we have volume control
+                pContext->leftVolume = (LVM_INT16)((*(uint32_t*)pCmdData + (1 << 11)) >> 12);
+                pContext->rightVolume = (LVM_INT16)((*((uint32_t*)pCmdData + 1) + (1 << 11)) >> 12);
+                *(uint32_t*)pReplyData = (1 << 24);
+                *((uint32_t*)pReplyData + 1) = (1 << 24);
                 if (pContext->volumeMode == android::REVERB_VOLUME_OFF) {
                     // force no volume ramp for first buffer processed after getting volume control
                     pContext->volumeMode = android::REVERB_VOLUME_FLAT;
                 }
-            } else { // we don't have volume control
+            } else {  // we don't have volume control
                 pContext->leftVolume = REVERB_UNIT_VOLUME;
                 pContext->rightVolume = REVERB_UNIT_VOLUME;
                 pContext->volumeMode = android::REVERB_VOLUME_OFF;
             }
-            ALOGV("EFFECT_CMD_SET_VOLUME left %d, right %d mode %d",
-                    pContext->leftVolume, pContext->rightVolume,  pContext->volumeMode);
+            ALOGV("EFFECT_CMD_SET_VOLUME left %d, right %d mode %d", pContext->leftVolume,
+                  pContext->rightVolume, pContext->volumeMode);
             break;
 
         case EFFECT_CMD_SET_DEVICE:
         case EFFECT_CMD_SET_AUDIO_MODE:
-        //ALOGV("\tReverb_command cmdCode Case: "
-        //        "EFFECT_CMD_SET_DEVICE/EFFECT_CMD_SET_VOLUME/EFFECT_CMD_SET_AUDIO_MODE start");
+            // ALOGV("\tReverb_command cmdCode Case: "
+            //        "EFFECT_CMD_SET_DEVICE/EFFECT_CMD_SET_VOLUME/EFFECT_CMD_SET_AUDIO_MODE
+            //        start");
             break;
 
         default:
             ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                    "DEFAULT start %d ERROR",cmdCode);
+                  "DEFAULT start %d ERROR",
+                  cmdCode);
             return -EINVAL;
     }
 
-    //ALOGV("\tReverb_command end\n\n");
+    // ALOGV("\tReverb_command end\n\n");
     return 0;
-}    /* end Reverb_command */
+} /* end Reverb_command */
 
 /* Effect Control Interface Implementation: get_descriptor */
-int Reverb_getDescriptor(effect_handle_t   self,
-                                    effect_descriptor_t *pDescriptor)
-{
-    android::ReverbContext * pContext = (android::ReverbContext *)self;
-    const effect_descriptor_t *desc;
+int Reverb_getDescriptor(effect_handle_t self, effect_descriptor_t* pDescriptor) {
+    android::ReverbContext* pContext = (android::ReverbContext*)self;
+    const effect_descriptor_t* desc;
 
     if (pContext == NULL || pDescriptor == NULL) {
         ALOGV("Reverb_getDescriptor() invalid param");
@@ -2094,26 +2082,24 @@
     *pDescriptor = *desc;
 
     return 0;
-}   /* end Reverb_getDescriptor */
+} /* end Reverb_getDescriptor */
 
 // effect_handle_t interface implementation for Reverb effect
 const struct effect_interface_s gReverbInterface = {
-    Reverb_process,
-    Reverb_command,
-    Reverb_getDescriptor,
-    NULL,
-};    /* end gReverbInterface */
+        Reverb_process,
+        Reverb_command,
+        Reverb_getDescriptor,
+        NULL,
+}; /* end gReverbInterface */
 
 // This is the only symbol that needs to be exported
-__attribute__ ((visibility ("default")))
-audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
-    .tag = AUDIO_EFFECT_LIBRARY_TAG,
-    .version = EFFECT_LIBRARY_API_VERSION,
-    .name = "Reverb Library",
-    .implementor = "NXP Software Ltd.",
-    .create_effect = android::EffectCreate,
-    .release_effect = android::EffectRelease,
-    .get_descriptor = android::EffectGetDescriptor,
+__attribute__((visibility("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+        .tag = AUDIO_EFFECT_LIBRARY_TAG,
+        .version = EFFECT_LIBRARY_API_VERSION,
+        .name = "Reverb Library",
+        .implementor = "NXP Software Ltd.",
+        .create_effect = android::EffectCreate,
+        .release_effect = android::EffectRelease,
+        .get_descriptor = android::EffectGetDescriptor,
 };
-
 }
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.h b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.h
index 96223a8..227d953 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.h
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.h
@@ -20,16 +20,15 @@
 #include <audio_effects/effect_environmentalreverb.h>
 #include <audio_effects/effect_presetreverb.h>
 
-#define MAX_NUM_BANDS           5
-#define MAX_CALL_SIZE           256
-#define LVREV_MAX_T60           7000
-#define LVREV_MAX_REVERB_LEVEL  2000
-#define LVREV_MAX_FRAME_SIZE    2560
-#define LVREV_CUP_LOAD_ARM9E    470    // Expressed in 0.1 MIPS
-#define LVREV_MEM_USAGE         (71+(LVREV_MAX_FRAME_SIZE>>7))     // Expressed in kB
+#define MAX_NUM_BANDS 5
+#define MAX_CALL_SIZE 256
+#define LVREV_MAX_T60 7000
+#define LVREV_MAX_REVERB_LEVEL 2000
+#define LVREV_MAX_FRAME_SIZE 2560
+#define LVREV_CUP_LOAD_ARM9E 470                            // Expressed in 0.1 MIPS
+#define LVREV_MEM_USAGE (71 + (LVREV_MAX_FRAME_SIZE >> 7))  // Expressed in kB
 
-typedef struct _LPFPair_t
-{
+typedef struct _LPFPair_t {
     int16_t Room_HF;
     int16_t LPF;
 } LPFPair_t;
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index c87635f..5217cf9 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -1,6 +1,6 @@
 // audio preprocessing wrapper
 cc_library_shared {
-    name: "libaudiopreprocessing",
+    name: "libaudiopreprocessing_legacy",
 
     vendor: true,
 
@@ -8,12 +8,6 @@
 
     srcs: ["PreProcessing.cpp"],
 
-    include_dirs: [
-        "external/webrtc",
-        "external/webrtc/webrtc/modules/include",
-        "external/webrtc/webrtc/modules/audio_processing/include",
-    ],
-
     shared_libs: [
         "libwebrtc_audio_preprocessing",
         "libspeexresampler",
@@ -23,6 +17,7 @@
 
     cflags: [
         "-DWEBRTC_POSIX",
+        "-DWEBRTC_LEGACY",
         "-fvisibility=hidden",
         "-Wall",
         "-Werror",
@@ -33,3 +28,34 @@
         "libhardware_headers",
     ],
 }
+
+cc_library_shared {
+    name: "libaudiopreprocessing",
+    vendor: true,
+    relative_install_path: "soundfx",
+    srcs: ["PreProcessing.cpp"],
+    local_include_dirs: [
+        ".",
+    ],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+        "-Wno-unused-parameter",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libutils",
+    ],
+
+    static_libs: [
+        "webrtc_audio_processing",
+    ],
+
+    header_libs: [
+        "libaudioeffects",
+        "libhardware_headers",
+        "libwebrtc_absl_headers",
+    ],
+}
diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp
index 5fab5be..f2f74a5 100644
--- a/media/libeffects/preprocessing/PreProcessing.cpp
+++ b/media/libeffects/preprocessing/PreProcessing.cpp
@@ -23,10 +23,15 @@
 #include <hardware/audio_effect.h>
 #include <audio_effects/effect_aec.h>
 #include <audio_effects/effect_agc.h>
+#ifndef WEBRTC_LEGACY
+#include <audio_effects/effect_agc2.h>
+#endif
 #include <audio_effects/effect_ns.h>
 #include <module_common_types.h>
 #include <audio_processing.h>
+#ifdef WEBRTC_LEGACY
 #include "speex/speex_resampler.h"
+#endif
 
 // undefine to perform multi channels API functional tests
 //#define DUAL_MIC_TEST
@@ -42,6 +47,9 @@
 enum preproc_id
 {
     PREPROC_AGC,        // Automatic Gain Control
+#ifndef WEBRTC_LEGACY
+    PREPROC_AGC2,       // Automatic Gain Control 2
+#endif
     PREPROC_AEC,        // Acoustic Echo Canceler
     PREPROC_NS,         // Noise Suppressor
     PREPROC_NUM_EFFECTS
@@ -103,6 +111,10 @@
     int id;                             // audio session ID
     int io;                             // handle of input stream this session is on
     webrtc::AudioProcessing* apm;       // handle on webRTC audio processing module (APM)
+#ifndef WEBRTC_LEGACY
+    // Audio Processing module builder
+    webrtc::AudioProcessingBuilder ap_builder;
+#endif
     size_t apmFrameCount;               // buffer size for webRTC process (10 ms)
     uint32_t apmSamplingRate;           // webRTC APM sampling rate (8/16 or 32 kHz)
     size_t frameCount;                  // buffer size before input resampler ( <=> apmFrameCount)
@@ -113,25 +125,42 @@
     uint32_t enabledMsk;                // bit field containing IDs of enabled pre processors
     uint32_t processedMsk;              // bit field containing IDs of pre processors already
                                         // processed in current round
+#ifdef WEBRTC_LEGACY
     webrtc::AudioFrame *procFrame;      // audio frame passed to webRTC AMP ProcessStream()
+#else
+    // audio config strucutre
+    webrtc::AudioProcessing::Config config;
+    webrtc::StreamConfig inputConfig;   // input stream configuration
+    webrtc::StreamConfig outputConfig;  // output stream configuration
+#endif
     int16_t *inBuf;                     // input buffer used when resampling
     size_t inBufSize;                   // input buffer size in frames
     size_t framesIn;                    // number of frames in input buffer
+#ifdef WEBRTC_LEGACY
     SpeexResamplerState *inResampler;   // handle on input speex resampler
+#endif
     int16_t *outBuf;                    // output buffer used when resampling
     size_t outBufSize;                  // output buffer size in frames
     size_t framesOut;                   // number of frames in output buffer
+#ifdef WEBRTC_LEGACY
     SpeexResamplerState *outResampler;  // handle on output speex resampler
+#endif
     uint32_t revChannelCount;           // number of channels on reverse stream
     uint32_t revEnabledMsk;             // bit field containing IDs of enabled pre processors
                                         // with reverse channel
     uint32_t revProcessedMsk;           // bit field containing IDs of pre processors with reverse
                                         // channel already processed in current round
+#ifdef WEBRTC_LEGACY
     webrtc::AudioFrame *revFrame;       // audio frame passed to webRTC AMP AnalyzeReverseStream()
+#else
+    webrtc::StreamConfig revConfig;     // reverse stream configuration.
+#endif
     int16_t *revBuf;                    // reverse channel input buffer
     size_t revBufSize;                  // reverse channel input buffer size
     size_t framesRev;                   // number of frames in reverse channel input buffer
+#ifdef WEBRTC_LEGACY
     SpeexResamplerState *revResampler;  // handle on reverse channel input speex resampler
+#endif
 };
 
 #ifdef DUAL_MIC_TEST
@@ -188,6 +217,20 @@
         "The Android Open Source Project"
 };
 
+#ifndef WEBRTC_LEGACY
+// Automatic Gain Control 2
+static const effect_descriptor_t sAgc2Descriptor = {
+        { 0xae3c653b, 0xbe18, 0x4ab8, 0x8938, { 0x41, 0x8f, 0x0a, 0x7f, 0x06, 0xac } }, // type
+        { 0x89f38e65, 0xd4d2, 0x4d64, 0xad0e, { 0x2b, 0x3e, 0x79, 0x9e, 0xa8, 0x86 } }, // uuid
+        EFFECT_CONTROL_API_VERSION,
+        (EFFECT_FLAG_TYPE_PRE_PROC|EFFECT_FLAG_DEVICE_IND),
+        0, //FIXME indicate CPU load
+        0, //FIXME indicate memory usage
+        "Automatic Gain Control 2",
+        "The Android Open Source Project"
+};
+#endif
+
 // Acoustic Echo Cancellation
 static const effect_descriptor_t sAecDescriptor = {
         { 0x7b491460, 0x8d4d, 0x11e0, 0xbd61, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }, // type
@@ -215,6 +258,9 @@
 
 static const effect_descriptor_t *sDescriptors[PREPROC_NUM_EFFECTS] = {
         &sAgcDescriptor,
+#ifndef WEBRTC_LEGACY
+        &sAgc2Descriptor,
+#endif
         &sAecDescriptor,
         &sNsDescriptor
 };
@@ -225,6 +271,9 @@
 
 const effect_uuid_t * const sUuidToPreProcTable[PREPROC_NUM_EFFECTS] = {
         FX_IID_AGC,
+#ifndef WEBRTC_LEGACY
+        FX_IID_AGC2,
+#endif
         FX_IID_AEC,
         FX_IID_NS
 };
@@ -266,19 +315,50 @@
 static const int kAgcDefaultCompGain = 9;
 static const bool kAgcDefaultLimiter = true;
 
+#ifndef WEBRTC_LEGACY
+int  Agc2Init (preproc_effect_t *effect)
+{
+    ALOGV("Agc2Init");
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.gain_controller2.fixed_digital.gain_db = 0.f;
+    effect->session->config.gain_controller2.adaptive_digital.level_estimator =
+        effect->session->config.gain_controller2.kRms;
+    effect->session->config.gain_controller2.adaptive_digital.extra_saturation_margin_db = 2.f;
+    effect->session->apm->ApplyConfig(effect->session->config);
+    return 0;
+}
+#endif
+
 int  AgcInit (preproc_effect_t *effect)
 {
     ALOGV("AgcInit");
+#ifdef WEBRTC_LEGACY
     webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
     agc->set_mode(webrtc::GainControl::kFixedDigital);
     agc->set_target_level_dbfs(kAgcDefaultTargetLevel);
     agc->set_compression_gain_db(kAgcDefaultCompGain);
     agc->enable_limiter(kAgcDefaultLimiter);
+#else
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.gain_controller1.target_level_dbfs = kAgcDefaultTargetLevel;
+    effect->session->config.gain_controller1.compression_gain_db = kAgcDefaultCompGain;
+    effect->session->config.gain_controller1.enable_limiter = kAgcDefaultLimiter;
+    effect->session->apm->ApplyConfig(effect->session->config);
+#endif
     return 0;
 }
 
+#ifndef WEBRTC_LEGACY
+int  Agc2Create(preproc_effect_t *effect)
+{
+    Agc2Init(effect);
+    return 0;
+}
+#endif
+
 int  AgcCreate(preproc_effect_t *effect)
 {
+#ifdef WEBRTC_LEGACY
     webrtc::GainControl *agc = effect->session->apm->gain_control();
     ALOGV("AgcCreate got agc %p", agc);
     if (agc == NULL) {
@@ -286,10 +366,93 @@
         return -ENOMEM;
     }
     effect->engine = static_cast<preproc_fx_handle_t>(agc);
+#endif
     AgcInit(effect);
     return 0;
 }
 
+#ifndef WEBRTC_LEGACY
+int Agc2GetParameter(preproc_effect_t *effect,
+                    void *pParam,
+                    uint32_t *pValueSize,
+                    void *pValue)
+{
+    int status = 0;
+    uint32_t param = *(uint32_t *)pParam;
+    agc2_settings_t *pProperties = (agc2_settings_t *)pValue;
+
+    switch (param) {
+    case AGC2_PARAM_FIXED_DIGITAL_GAIN:
+        if (*pValueSize < sizeof(float)) {
+            *pValueSize = 0.f;
+            return -EINVAL;
+        }
+        break;
+    case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR:
+        if (*pValueSize < sizeof(int32_t)) {
+            *pValueSize = 0;
+            return -EINVAL;
+        }
+        break;
+    case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN:
+        if (*pValueSize < sizeof(float)) {
+            *pValueSize = 0.f;
+            return -EINVAL;
+        }
+        break;
+    case AGC2_PARAM_PROPERTIES:
+        if (*pValueSize < sizeof(agc2_settings_t)) {
+            *pValueSize = 0;
+            return -EINVAL;
+        }
+        break;
+
+    default:
+        ALOGW("Agc2GetParameter() unknown param %08x", param);
+        status = -EINVAL;
+        break;
+    }
+
+    effect->session->config = effect->session->apm->GetConfig();
+    switch (param) {
+    case AGC2_PARAM_FIXED_DIGITAL_GAIN:
+        *(float *) pValue =
+                (float)(effect->session->config.gain_controller2.fixed_digital.gain_db);
+        ALOGV("Agc2GetParameter() target level %f dB", *(float *) pValue);
+        break;
+    case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR:
+        *(uint32_t *) pValue =
+                (uint32_t)(effect->session->config.gain_controller2.adaptive_digital.
+                level_estimator);
+        ALOGV("Agc2GetParameter() level estimator %d",
+                *(webrtc::AudioProcessing::Config::GainController2::LevelEstimator *) pValue);
+        break;
+    case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN:
+        *(float *) pValue =
+                (float)(effect->session->config.gain_controller2.adaptive_digital.
+                extra_saturation_margin_db);
+        ALOGV("Agc2GetParameter() extra saturation margin %f dB", *(float *) pValue);
+        break;
+    case AGC2_PARAM_PROPERTIES:
+        pProperties->fixedDigitalGain =
+                (float)(effect->session->config.gain_controller2.fixed_digital.gain_db);
+        pProperties->level_estimator =
+                (uint32_t)(effect->session->config.gain_controller2.adaptive_digital.
+                level_estimator);
+        pProperties->extraSaturationMargin =
+                (float)(effect->session->config.gain_controller2.adaptive_digital.
+                extra_saturation_margin_db);
+        break;
+    default:
+        ALOGW("Agc2GetParameter() unknown param %d", param);
+        status = -EINVAL;
+        break;
+    }
+
+    return status;
+}
+#endif
+
 int AgcGetParameter(preproc_effect_t *effect,
                     void *pParam,
                     uint32_t *pValueSize,
@@ -298,7 +461,9 @@
     int status = 0;
     uint32_t param = *(uint32_t *)pParam;
     t_agc_settings *pProperties = (t_agc_settings *)pValue;
+#ifdef WEBRTC_LEGACY
     webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
+#endif
 
     switch (param) {
     case AGC_PARAM_TARGET_LEVEL:
@@ -327,6 +492,7 @@
         break;
     }
 
+#ifdef WEBRTC_LEGACY
     switch (param) {
     case AGC_PARAM_TARGET_LEVEL:
         *(int16_t *) pValue = (int16_t)(agc->target_level_dbfs() * -100);
@@ -351,12 +517,98 @@
         status = -EINVAL;
         break;
     }
+#else
+    effect->session->config = effect->session->apm->GetConfig();
+    switch (param) {
+    case AGC_PARAM_TARGET_LEVEL:
+        *(int16_t *) pValue =
+                (int16_t)(effect->session->config.gain_controller1.target_level_dbfs * -100);
+        ALOGV("AgcGetParameter() target level %d milliBels", *(int16_t *) pValue);
+        break;
+    case AGC_PARAM_COMP_GAIN:
+        *(int16_t *) pValue =
+                (int16_t)(effect->session->config.gain_controller1.compression_gain_db * -100);
+        ALOGV("AgcGetParameter() comp gain %d milliBels", *(int16_t *) pValue);
+        break;
+    case AGC_PARAM_LIMITER_ENA:
+        *(bool *) pValue =
+                (bool)(effect->session->config.gain_controller1.enable_limiter);
+        ALOGV("AgcGetParameter() limiter enabled %s",
+                (*(int16_t *) pValue != 0) ? "true" : "false");
+        break;
+    case AGC_PARAM_PROPERTIES:
+        pProperties->targetLevel =
+                (int16_t)(effect->session->config.gain_controller1.target_level_dbfs * -100);
+        pProperties->compGain =
+                (int16_t)(effect->session->config.gain_controller1.compression_gain_db * -100);
+        pProperties->limiterEnabled =
+                (bool)(effect->session->config.gain_controller1.enable_limiter);
+        break;
+    default:
+        ALOGW("AgcGetParameter() unknown param %d", param);
+        status = -EINVAL;
+        break;
+    }
+#endif
     return status;
 }
 
+#ifndef WEBRTC_LEGACY
+int Agc2SetParameter (preproc_effect_t *effect, void *pParam, void *pValue)
+{
+    int status = 0;
+    uint32_t param = *(uint32_t *)pParam;
+    float valueFloat = 0.f;
+    agc2_settings_t *pProperties = (agc2_settings_t *)pValue;
+    effect->session->config = effect->session->apm->GetConfig();
+    switch (param) {
+    case AGC2_PARAM_FIXED_DIGITAL_GAIN:
+        valueFloat = (float)(*(int32_t *) pValue);
+        ALOGV("Agc2SetParameter() fixed digital gain %f dB", valueFloat);
+        effect->session->config.gain_controller2.fixed_digital.gain_db = valueFloat;
+        break;
+    case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR:
+        ALOGV("Agc2SetParameter() level estimator %d", *(webrtc::AudioProcessing::Config::
+                GainController2::LevelEstimator *) pValue);
+        effect->session->config.gain_controller2.adaptive_digital.level_estimator =
+                (*(webrtc::AudioProcessing::Config::GainController2::LevelEstimator *) pValue);
+        break;
+    case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN:
+        valueFloat = (float)(*(int32_t *) pValue);
+        ALOGV("Agc2SetParameter() extra saturation margin %f dB", valueFloat);
+        effect->session->config.gain_controller2.adaptive_digital.extra_saturation_margin_db =
+                valueFloat;
+        break;
+    case AGC2_PARAM_PROPERTIES:
+        ALOGV("Agc2SetParameter() properties gain %f, level %d margin %f",
+                pProperties->fixedDigitalGain,
+                pProperties->level_estimator,
+                pProperties->extraSaturationMargin);
+        effect->session->config.gain_controller2.fixed_digital.gain_db =
+                pProperties->fixedDigitalGain;
+        effect->session->config.gain_controller2.adaptive_digital.level_estimator =
+                (webrtc::AudioProcessing::Config::GainController2::LevelEstimator)pProperties->
+                level_estimator;
+        effect->session->config.gain_controller2.adaptive_digital.extra_saturation_margin_db =
+                pProperties->extraSaturationMargin;
+        break;
+    default:
+        ALOGW("Agc2SetParameter() unknown param %08x value %08x", param, *(uint32_t *)pValue);
+        status = -EINVAL;
+        break;
+    }
+    effect->session->apm->ApplyConfig(effect->session->config);
+
+    ALOGV("Agc2SetParameter() done status %d", status);
+
+    return status;
+}
+#endif
+
 int AgcSetParameter (preproc_effect_t *effect, void *pParam, void *pValue)
 {
     int status = 0;
+#ifdef WEBRTC_LEGACY
     uint32_t param = *(uint32_t *)pParam;
     t_agc_settings *pProperties = (t_agc_settings *)pValue;
     webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
@@ -390,27 +642,95 @@
         status = -EINVAL;
         break;
     }
+#else
+    uint32_t param = *(uint32_t *)pParam;
+    t_agc_settings *pProperties = (t_agc_settings *)pValue;
+    effect->session->config = effect->session->apm->GetConfig();
+    switch (param) {
+    case AGC_PARAM_TARGET_LEVEL:
+        ALOGV("AgcSetParameter() target level %d milliBels", *(int16_t *)pValue);
+        effect->session->config.gain_controller1.target_level_dbfs =
+             (-(*(int16_t *)pValue / 100));
+        break;
+    case AGC_PARAM_COMP_GAIN:
+        ALOGV("AgcSetParameter() comp gain %d milliBels", *(int16_t *)pValue);
+        effect->session->config.gain_controller1.compression_gain_db =
+             (*(int16_t *)pValue / 100);
+        break;
+    case AGC_PARAM_LIMITER_ENA:
+        ALOGV("AgcSetParameter() limiter enabled %s", *(bool *)pValue ? "true" : "false");
+        effect->session->config.gain_controller1.enable_limiter =
+             (*(bool *)pValue);
+        break;
+    case AGC_PARAM_PROPERTIES:
+        ALOGV("AgcSetParameter() properties level %d, gain %d limiter %d",
+              pProperties->targetLevel,
+              pProperties->compGain,
+              pProperties->limiterEnabled);
+        effect->session->config.gain_controller1.target_level_dbfs =
+              -(pProperties->targetLevel / 100);
+        effect->session->config.gain_controller1.compression_gain_db =
+              pProperties->compGain / 100;
+        effect->session->config.gain_controller1.enable_limiter =
+              pProperties->limiterEnabled;
+        break;
+    default:
+        ALOGW("AgcSetParameter() unknown param %08x value %08x", param, *(uint32_t *)pValue);
+        status = -EINVAL;
+        break;
+    }
+    effect->session->apm->ApplyConfig(effect->session->config);
+#endif
 
     ALOGV("AgcSetParameter() done status %d", status);
 
     return status;
 }
 
+#ifndef WEBRTC_LEGACY
+void Agc2Enable(preproc_effect_t *effect)
+{
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.gain_controller2.enabled = true;
+    effect->session->apm->ApplyConfig(effect->session->config);
+}
+#endif
+
 void AgcEnable(preproc_effect_t *effect)
 {
+#ifdef WEBRTC_LEGACY
     webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
     ALOGV("AgcEnable agc %p", agc);
     agc->Enable(true);
+#else
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.gain_controller1.enabled = true;
+    effect->session->apm->ApplyConfig(effect->session->config);
+#endif
 }
 
+#ifndef WEBRTC_LEGACY
+void Agc2Disable(preproc_effect_t *effect)
+{
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.gain_controller2.enabled = false;
+    effect->session->apm->ApplyConfig(effect->session->config);
+}
+#endif
+
 void AgcDisable(preproc_effect_t *effect)
 {
+#ifdef WEBRTC_LEGACY
     ALOGV("AgcDisable");
     webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
     agc->Enable(false);
+#else
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.gain_controller1.enabled = false;
+    effect->session->apm->ApplyConfig(effect->session->config);
+#endif
 }
 
-
 static const preproc_ops_t sAgcOps = {
         AgcCreate,
         AgcInit,
@@ -422,26 +742,48 @@
         NULL
 };
 
+#ifndef WEBRTC_LEGACY
+static const preproc_ops_t sAgc2Ops = {
+        Agc2Create,
+        Agc2Init,
+        NULL,
+        Agc2Enable,
+        Agc2Disable,
+        Agc2SetParameter,
+        Agc2GetParameter,
+        NULL
+};
+#endif
 
 //------------------------------------------------------------------------------
 // Acoustic Echo Canceler (AEC)
 //------------------------------------------------------------------------------
 
+#ifdef WEBRTC_LEGACY
 static const webrtc::EchoControlMobile::RoutingMode kAecDefaultMode =
         webrtc::EchoControlMobile::kEarpiece;
 static const bool kAecDefaultComfortNoise = true;
+#endif
 
 int  AecInit (preproc_effect_t *effect)
 {
     ALOGV("AecInit");
+#ifdef WEBRTC_LEGACY
     webrtc::EchoControlMobile *aec = static_cast<webrtc::EchoControlMobile *>(effect->engine);
     aec->set_routing_mode(kAecDefaultMode);
     aec->enable_comfort_noise(kAecDefaultComfortNoise);
+#else
+    effect->session->config =
+        effect->session->apm->GetConfig() ;
+    effect->session->config.echo_canceller.mobile_mode = false;
+    effect->session->apm->ApplyConfig(effect->session->config);
+#endif
     return 0;
 }
 
 int  AecCreate(preproc_effect_t *effect)
 {
+#ifdef WEBRTC_LEGACY
     webrtc::EchoControlMobile *aec = effect->session->apm->echo_control_mobile();
     ALOGV("AecCreate got aec %p", aec);
     if (aec == NULL) {
@@ -449,6 +791,7 @@
         return -ENOMEM;
     }
     effect->engine = static_cast<preproc_fx_handle_t>(aec);
+#endif
     AecInit (effect);
     return 0;
 }
@@ -470,6 +813,14 @@
         *(uint32_t *)pValue = 1000 * effect->session->apm->stream_delay_ms();
         ALOGV("AecGetParameter() echo delay %d us", *(uint32_t *)pValue);
         break;
+#ifndef WEBRTC_LEGACY
+    case AEC_PARAM_MOBILE_MODE:
+        effect->session->config =
+            effect->session->apm->GetConfig() ;
+        *(uint32_t *)pValue = effect->session->config.echo_canceller.mobile_mode;
+        ALOGV("AecGetParameter() mobile mode %d us", *(uint32_t *)pValue);
+        break;
+#endif
     default:
         ALOGW("AecGetParameter() unknown param %08x value %08x", param, *(uint32_t *)pValue);
         status = -EINVAL;
@@ -490,6 +841,15 @@
         status = effect->session->apm->set_stream_delay_ms(value/1000);
         ALOGV("AecSetParameter() echo delay %d us, status %d", value, status);
         break;
+#ifndef WEBRTC_LEGACY
+    case AEC_PARAM_MOBILE_MODE:
+        effect->session->config =
+            effect->session->apm->GetConfig() ;
+        effect->session->config.echo_canceller.mobile_mode = value;
+        ALOGV("AecSetParameter() mobile mode %d us", value);
+        effect->session->apm->ApplyConfig(effect->session->config);
+        break;
+#endif
     default:
         ALOGW("AecSetParameter() unknown param %08x value %08x", param, *(uint32_t *)pValue);
         status = -EINVAL;
@@ -500,28 +860,43 @@
 
 void AecEnable(preproc_effect_t *effect)
 {
+#ifdef WEBRTC_LEGACY
     webrtc::EchoControlMobile *aec = static_cast<webrtc::EchoControlMobile *>(effect->engine);
     ALOGV("AecEnable aec %p", aec);
     aec->Enable(true);
+#else
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.echo_canceller.enabled = true;
+    effect->session->apm->ApplyConfig(effect->session->config);
+#endif
 }
 
 void AecDisable(preproc_effect_t *effect)
 {
+#ifdef WEBRTC_LEGACY
     ALOGV("AecDisable");
     webrtc::EchoControlMobile *aec = static_cast<webrtc::EchoControlMobile *>(effect->engine);
     aec->Enable(false);
+#else
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.echo_canceller.enabled = false;
+    effect->session->apm->ApplyConfig(effect->session->config);
+#endif
 }
 
 int AecSetDevice(preproc_effect_t *effect, uint32_t device)
 {
     ALOGV("AecSetDevice %08x", device);
+#ifdef WEBRTC_LEGACY
     webrtc::EchoControlMobile *aec = static_cast<webrtc::EchoControlMobile *>(effect->engine);
     webrtc::EchoControlMobile::RoutingMode mode = webrtc::EchoControlMobile::kQuietEarpieceOrHeadset;
+#endif
 
     if (audio_is_input_device(device)) {
         return 0;
     }
 
+#ifdef WEBRTC_LEGACY
     switch(device) {
     case AUDIO_DEVICE_OUT_EARPIECE:
         mode = webrtc::EchoControlMobile::kEarpiece;
@@ -536,6 +911,7 @@
         break;
     }
     aec->set_routing_mode(mode);
+#endif
     return 0;
 }
 
@@ -554,11 +930,17 @@
 // Noise Suppression (NS)
 //------------------------------------------------------------------------------
 
+#ifdef WEBRTC_LEGACY
 static const webrtc::NoiseSuppression::Level kNsDefaultLevel = webrtc::NoiseSuppression::kModerate;
+#else
+static const webrtc::AudioProcessing::Config::NoiseSuppression::Level kNsDefaultLevel =
+                webrtc::AudioProcessing::Config::NoiseSuppression::kModerate;
+#endif
 
 int  NsInit (preproc_effect_t *effect)
 {
     ALOGV("NsInit");
+#ifdef WEBRTC_LEGACY
     webrtc::NoiseSuppression *ns = static_cast<webrtc::NoiseSuppression *>(effect->engine);
     ns->set_level(kNsDefaultLevel);
     webrtc::Config config;
@@ -575,12 +957,20 @@
     config.Set<webrtc::Beamforming>(
             new webrtc::Beamforming(false, geometry));
     effect->session->apm->SetExtraOptions(config);
+#else
+    effect->session->config =
+        effect->session->apm->GetConfig() ;
+    effect->session->config.noise_suppression.level =
+        kNsDefaultLevel;
+    effect->session->apm->ApplyConfig(effect->session->config);
+#endif
     effect->type = NS_TYPE_SINGLE_CHANNEL;
     return 0;
 }
 
 int  NsCreate(preproc_effect_t *effect)
 {
+#ifdef WEBRTC_LEGACY
     webrtc::NoiseSuppression *ns = effect->session->apm->noise_suppression();
     ALOGV("NsCreate got ns %p", ns);
     if (ns == NULL) {
@@ -588,6 +978,7 @@
         return -ENOMEM;
     }
     effect->engine = static_cast<preproc_fx_handle_t>(ns);
+#endif
     NsInit (effect);
     return 0;
 }
@@ -604,6 +995,7 @@
 int NsSetParameter (preproc_effect_t *effect, void *pParam, void *pValue)
 {
     int status = 0;
+#ifdef WEBRTC_LEGACY
     webrtc::NoiseSuppression *ns = static_cast<webrtc::NoiseSuppression *>(effect->engine);
     uint32_t param = *(uint32_t *)pParam;
     uint32_t value = *(uint32_t *)pValue;
@@ -629,12 +1021,30 @@
             ALOGW("NsSetParameter() unknown param %08x value %08x", param, value);
             status = -EINVAL;
     }
+#else
+    uint32_t param = *(uint32_t *)pParam;
+    uint32_t value = *(uint32_t *)pValue;
+    effect->session->config =
+        effect->session->apm->GetConfig();
+    switch (param) {
+        case NS_PARAM_LEVEL:
+            effect->session->config.noise_suppression.level =
+               (webrtc::AudioProcessing::Config::NoiseSuppression::Level)value;
+            ALOGV("NsSetParameter() level %d", value);
+            break;
+        default:
+            ALOGW("NsSetParameter() unknown param %08x value %08x", param, value);
+            status = -EINVAL;
+    }
+    effect->session->apm->ApplyConfig(effect->session->config);
+#endif
 
     return status;
 }
 
 void NsEnable(preproc_effect_t *effect)
 {
+#ifdef WEBRTC_LEGACY
     webrtc::NoiseSuppression *ns = static_cast<webrtc::NoiseSuppression *>(effect->engine);
     ALOGV("NsEnable ns %p", ns);
     ns->Enable(true);
@@ -644,17 +1054,30 @@
         config.Set<webrtc::Beamforming>(new webrtc::Beamforming(true, geometry));
         effect->session->apm->SetExtraOptions(config);
     }
+#else
+    effect->session->config =
+        effect->session->apm->GetConfig();
+    effect->session->config.noise_suppression.enabled = true;
+    effect->session->apm->ApplyConfig(effect->session->config);
+#endif
 }
 
 void NsDisable(preproc_effect_t *effect)
 {
     ALOGV("NsDisable");
+#ifdef WEBRTC_LEGACY
     webrtc::NoiseSuppression *ns = static_cast<webrtc::NoiseSuppression *>(effect->engine);
     ns->Enable(false);
     webrtc::Config config;
     std::vector<webrtc::Point> geometry;
     config.Set<webrtc::Beamforming>(new webrtc::Beamforming(false, geometry));
     effect->session->apm->SetExtraOptions(config);
+#else
+    effect->session->config =
+        effect->session->apm->GetConfig();
+    effect->session->config.noise_suppression.enabled = false;
+    effect->session->apm->ApplyConfig(effect->session->config);
+#endif
 }
 
 static const preproc_ops_t sNsOps = {
@@ -669,8 +1092,12 @@
 };
 
 
+
 static const preproc_ops_t *sPreProcOps[PREPROC_NUM_EFFECTS] = {
         &sAgcOps,
+#ifndef WEBRTC_LEGACY
+        &sAgc2Ops,
+#endif
         &sAecOps,
         &sNsOps
 };
@@ -812,7 +1239,9 @@
     session->id = 0;
     session->io = 0;
     session->createdMsk = 0;
+#ifdef WEBRTC_LEGACY
     session->apm = NULL;
+#endif
     for (i = 0; i < PREPROC_NUM_EFFECTS && status == 0; i++) {
         status = Effect_Init(&session->effects[i], i);
     }
@@ -829,6 +1258,7 @@
     ALOGV("Session_CreateEffect procId %d, createdMsk %08x", procId, session->createdMsk);
 
     if (session->createdMsk == 0) {
+#ifdef WEBRTC_LEGACY
         session->apm = webrtc::AudioProcessing::Create();
         if (session->apm == NULL) {
             ALOGW("Session_CreateEffect could not get apm engine");
@@ -850,28 +1280,53 @@
             ALOGW("Session_CreateEffect could not allocate reverse audio frame");
             goto error;
         }
+#else
+        session->apm = session->ap_builder.Create();
+        if (session->apm == NULL) {
+            ALOGW("Session_CreateEffect could not get apm engine");
+            goto error;
+        }
+#endif
         session->apmSamplingRate = kPreprocDefaultSr;
         session->apmFrameCount = (kPreprocDefaultSr) / 100;
         session->frameCount = session->apmFrameCount;
         session->samplingRate = kPreprocDefaultSr;
         session->inChannelCount = kPreProcDefaultCnl;
         session->outChannelCount = kPreProcDefaultCnl;
+#ifdef WEBRTC_LEGACY
         session->procFrame->sample_rate_hz_ = kPreprocDefaultSr;
         session->procFrame->num_channels_ = kPreProcDefaultCnl;
+#else
+        session->inputConfig.set_sample_rate_hz(kPreprocDefaultSr);
+        session->inputConfig.set_num_channels(kPreProcDefaultCnl);
+        session->outputConfig.set_sample_rate_hz(kPreprocDefaultSr);
+        session->outputConfig.set_num_channels(kPreProcDefaultCnl);
+#endif
         session->revChannelCount = kPreProcDefaultCnl;
+#ifdef WEBRTC_LEGACY
         session->revFrame->sample_rate_hz_ = kPreprocDefaultSr;
         session->revFrame->num_channels_ = kPreProcDefaultCnl;
+#else
+        session->revConfig.set_sample_rate_hz(kPreprocDefaultSr);
+        session->revConfig.set_num_channels(kPreProcDefaultCnl);
+#endif
         session->enabledMsk = 0;
         session->processedMsk = 0;
         session->revEnabledMsk = 0;
         session->revProcessedMsk = 0;
+#ifdef WEBRTC_LEGACY
         session->inResampler = NULL;
+#endif
         session->inBuf = NULL;
         session->inBufSize = 0;
+#ifdef WEBRTC_LEGACY
         session->outResampler = NULL;
+#endif
         session->outBuf = NULL;
         session->outBufSize = 0;
+#ifdef WEBRTC_LEGACY
         session->revResampler = NULL;
+#endif
         session->revBuf = NULL;
         session->revBufSize = 0;
     }
@@ -885,12 +1340,17 @@
 
 error:
     if (session->createdMsk == 0) {
+#ifdef WEBRTC_LEGACY
         delete session->revFrame;
         session->revFrame = NULL;
         delete session->procFrame;
         session->procFrame = NULL;
         delete session->apm;
         session->apm = NULL; // NOLINT(clang-analyzer-cplusplus.NewDelete)
+#else
+        delete session->apm;
+        session->apm = NULL;
+#endif
     }
     return status;
 }
@@ -901,6 +1361,7 @@
     ALOGW_IF(Effect_Release(fx) != 0, " Effect_Release() failed for proc ID %d", fx->procId);
     session->createdMsk &= ~(1<<fx->procId);
     if (session->createdMsk == 0) {
+#ifdef WEBRTC_LEGACY
         delete session->apm;
         session->apm = NULL;
         delete session->procFrame;
@@ -919,6 +1380,10 @@
             speex_resampler_destroy(session->revResampler);
             session->revResampler = NULL;
         }
+#else
+        delete session->apm;
+        session->apm = NULL;
+#endif
         delete session->inBuf;
         session->inBuf = NULL;
         delete session->outBuf;
@@ -946,7 +1411,9 @@
 
     ALOGV("Session_SetConfig sr %d cnl %08x",
          config->inputCfg.samplingRate, config->inputCfg.channels);
+#ifdef WEBRTC_LEGACY
     int status;
+#endif
 
     // AEC implementation is limited to 16kHz
     if (config->inputCfg.samplingRate >= 32000 && !(session->createdMsk & (1 << PREPROC_AEC))) {
@@ -958,6 +1425,7 @@
         session->apmSamplingRate = 8000;
     }
 
+#ifdef WEBRTC_LEGACY
     const webrtc::ProcessingConfig processing_config = {
       {{static_cast<int>(session->apmSamplingRate), inCnl},
        {static_cast<int>(session->apmSamplingRate), outCnl},
@@ -967,23 +1435,41 @@
     if (status < 0) {
         return -EINVAL;
     }
+#endif
 
     session->samplingRate = config->inputCfg.samplingRate;
     session->apmFrameCount = session->apmSamplingRate / 100;
     if (session->samplingRate == session->apmSamplingRate) {
         session->frameCount = session->apmFrameCount;
     } else {
+#ifdef WEBRTC_LEGACY
         session->frameCount = (session->apmFrameCount * session->samplingRate) /
                 session->apmSamplingRate  + 1;
+#else
+        session->frameCount = (session->apmFrameCount * session->samplingRate) /
+                session->apmSamplingRate;
+#endif
     }
     session->inChannelCount = inCnl;
     session->outChannelCount = outCnl;
+#ifdef WEBRTC_LEGACY
     session->procFrame->num_channels_ = inCnl;
     session->procFrame->sample_rate_hz_ = session->apmSamplingRate;
+#else
+    session->inputConfig.set_sample_rate_hz(session->samplingRate);
+    session->inputConfig.set_num_channels(inCnl);
+    session->outputConfig.set_sample_rate_hz(session->samplingRate);
+    session->outputConfig.set_num_channels(inCnl);
+#endif
 
     session->revChannelCount = inCnl;
+#ifdef WEBRTC_LEGACY
     session->revFrame->num_channels_ = inCnl;
     session->revFrame->sample_rate_hz_ = session->apmSamplingRate;
+#else
+    session->revConfig.set_sample_rate_hz(session->samplingRate);
+    session->revConfig.set_num_channels(inCnl);
+#endif
 
     // force process buffer reallocation
     session->inBufSize = 0;
@@ -992,6 +1478,7 @@
     session->framesOut = 0;
 
 
+#ifdef WEBRTC_LEGACY
     if (session->inResampler != NULL) {
         speex_resampler_destroy(session->inResampler);
         session->inResampler = NULL;
@@ -1043,6 +1530,7 @@
             return -EINVAL;
         }
     }
+#endif
 
     session->state = PREPROC_SESSION_STATE_CONFIG;
     return 0;
@@ -1079,6 +1567,7 @@
         return -EINVAL;
     }
     uint32_t inCnl = audio_channel_count_from_out_mask(config->inputCfg.channels);
+#ifdef WEBRTC_LEGACY
     const webrtc::ProcessingConfig processing_config = {
        {{static_cast<int>(session->apmSamplingRate), session->inChannelCount},
         {static_cast<int>(session->apmSamplingRate), session->outChannelCount},
@@ -1088,9 +1577,12 @@
     if (status < 0) {
         return -EINVAL;
     }
+#endif
     session->revChannelCount = inCnl;
+#ifdef WEBRTC_LEGACY
     session->revFrame->num_channels_ = inCnl;
     session->revFrame->sample_rate_hz_ = session->apmSamplingRate;
+#endif
     // force process buffer reallocation
     session->revBufSize = 0;
     session->framesRev = 0;
@@ -1114,6 +1606,7 @@
     if (enabled) {
         if(session->enabledMsk == 0) {
             session->framesIn = 0;
+#ifdef WEBRTC_LEGACY
             if (session->inResampler != NULL) {
                 speex_resampler_reset_mem(session->inResampler);
             }
@@ -1121,13 +1614,16 @@
             if (session->outResampler != NULL) {
                 speex_resampler_reset_mem(session->outResampler);
             }
+#endif
         }
         session->enabledMsk |= (1 << procId);
         if (HasReverseStream(procId)) {
             session->framesRev = 0;
+#ifdef WEBRTC_LEGACY
             if (session->revResampler != NULL) {
                 speex_resampler_reset_mem(session->revResampler);
             }
+#endif
             session->revEnabledMsk |= (1 << procId);
         }
     } else {
@@ -1240,9 +1736,9 @@
             memcpy(outBuffer->s16,
                   session->outBuf,
                   fr * session->outChannelCount * sizeof(int16_t));
-            memcpy(session->outBuf,
-                  session->outBuf + fr * session->outChannelCount,
-                  (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
+            memmove(session->outBuf,
+                    session->outBuf + fr * session->outChannelCount,
+                    (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
             session->framesOut -= fr;
             framesWr += fr;
         }
@@ -1252,6 +1748,7 @@
             return 0;
         }
 
+#ifdef WEBRTC_LEGACY
         if (session->inResampler != NULL) {
             size_t fr = session->frameCount - session->framesIn;
             if (inBuffer->frameCount < fr) {
@@ -1303,9 +1800,9 @@
                                                         session->procFrame->data_,
                                                         &frOut);
             }
-            memcpy(session->inBuf,
-                   session->inBuf + frIn * session->inChannelCount,
-                   (session->framesIn - frIn) * session->inChannelCount * sizeof(int16_t));
+            memmove(session->inBuf,
+                    session->inBuf + frIn * session->inChannelCount,
+                    (session->framesIn - frIn) * session->inChannelCount * sizeof(int16_t));
             session->framesIn -= frIn;
         } else {
             size_t fr = session->frameCount - session->framesIn;
@@ -1335,6 +1832,28 @@
         session->procFrame->samples_per_channel_ = session->apmFrameCount;
 
         effect->session->apm->ProcessStream(session->procFrame);
+#else
+        size_t fr = session->frameCount - session->framesIn;
+        if (inBuffer->frameCount < fr) {
+            fr = inBuffer->frameCount;
+        }
+        session->framesIn += fr;
+        inBuffer->frameCount = fr;
+        if (session->framesIn < session->frameCount) {
+            return 0;
+        }
+        session->framesIn = 0;
+        if (int status = effect->session->apm->ProcessStream(
+                                    (const int16_t* const)inBuffer->s16,
+                                    (const webrtc::StreamConfig)effect->session->inputConfig,
+                                    (const webrtc::StreamConfig)effect->session->outputConfig,
+                                    (int16_t* const)outBuffer->s16);
+             status != 0) {
+            ALOGE("Process Stream failed with error %d\n", status);
+            return status;
+        }
+        outBuffer->frameCount = inBuffer->frameCount;
+#endif
 
         if (session->outBufSize < session->framesOut + session->frameCount) {
             int16_t *buf;
@@ -1350,6 +1869,7 @@
             session->outBuf = buf;
         }
 
+#ifdef WEBRTC_LEGACY
         if (session->outResampler != NULL) {
             spx_uint32_t frIn = session->apmFrameCount;
             spx_uint32_t frOut = session->frameCount;
@@ -1375,15 +1895,18 @@
             session->framesOut += session->frameCount;
         }
         size_t fr = session->framesOut;
+#else
+        fr = session->framesOut;
+#endif
         if (framesRq - framesWr < fr) {
             fr = framesRq - framesWr;
         }
         memcpy(outBuffer->s16 + framesWr * session->outChannelCount,
               session->outBuf,
               fr * session->outChannelCount * sizeof(int16_t));
-        memcpy(session->outBuf,
-              session->outBuf + fr * session->outChannelCount,
-              (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
+        memmove(session->outBuf,
+                session->outBuf + fr * session->outChannelCount,
+                (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
         session->framesOut -= fr;
         outBuffer->frameCount += fr;
 
@@ -1794,6 +2317,7 @@
 
     if ((session->revProcessedMsk & session->revEnabledMsk) == session->revEnabledMsk) {
         effect->session->revProcessedMsk = 0;
+#ifdef WEBRTC_LEGACY
         if (session->revResampler != NULL) {
             size_t fr = session->frameCount - session->framesRev;
             if (inBuffer->frameCount < fr) {
@@ -1837,9 +2361,9 @@
                                                         session->revFrame->data_,
                                                         &frOut);
             }
-            memcpy(session->revBuf,
-                   session->revBuf + frIn * session->inChannelCount,
-                   (session->framesRev - frIn) * session->inChannelCount * sizeof(int16_t));
+            memmove(session->revBuf,
+                    session->revBuf + frIn * session->inChannelCount,
+                    (session->framesRev - frIn) * session->inChannelCount * sizeof(int16_t));
             session->framesRev -= frIn;
         } else {
             size_t fr = session->frameCount - session->framesRev;
@@ -1858,6 +2382,27 @@
         }
         session->revFrame->samples_per_channel_ = session->apmFrameCount;
         effect->session->apm->AnalyzeReverseStream(session->revFrame);
+#else
+        size_t fr = session->frameCount - session->framesRev;
+        if (inBuffer->frameCount < fr) {
+            fr = inBuffer->frameCount;
+        }
+        session->framesRev += fr;
+        inBuffer->frameCount = fr;
+        if (session->framesRev < session->frameCount) {
+            return 0;
+        }
+        session->framesRev = 0;
+        if (int status = effect->session->apm->ProcessReverseStream(
+                        (const int16_t* const)inBuffer->s16,
+                        (const webrtc::StreamConfig)effect->session->revConfig,
+                        (const webrtc::StreamConfig)effect->session->revConfig,
+                        (int16_t* const)outBuffer->s16);
+             status != 0) {
+            ALOGE("Process Reverse Stream failed with error %d\n", status);
+            return status;
+        }
+#endif
         return 0;
     } else {
         return -ENODATA;
diff --git a/media/libeffects/preprocessing/tests/Android.bp b/media/libeffects/preprocessing/tests/Android.bp
new file mode 100644
index 0000000..045b0d3
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/Android.bp
@@ -0,0 +1,53 @@
+// audio preprocessing unit test
+cc_test {
+    name: "AudioPreProcessingLegacyTest",
+
+    vendor: true,
+
+    relative_install_path: "soundfx",
+
+    srcs: ["PreProcessingTest.cpp"],
+
+    shared_libs: [
+        "libaudiopreprocessing_legacy",
+        "libaudioutils",
+        "liblog",
+        "libutils",
+        "libwebrtc_audio_preprocessing",
+    ],
+
+    cflags: [
+        "-DWEBRTC_POSIX",
+        "-DWEBRTC_LEGACY",
+        "-fvisibility=default",
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+
+    header_libs: [
+        "libaudioeffects",
+        "libhardware_headers",
+    ],
+}
+
+cc_test {
+    name: "AudioPreProcessingTest",
+
+    vendor: true,
+
+    relative_install_path: "soundfx",
+
+    srcs: ["PreProcessingTest.cpp"],
+
+    shared_libs: [
+        "libaudiopreprocessing",
+        "libaudioutils",
+        "liblog",
+        "libutils",
+    ],
+    header_libs: [
+        "libaudioeffects",
+        "libhardware_headers",
+    ],
+}
diff --git a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
new file mode 100644
index 0000000..3244c1f
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
@@ -0,0 +1,521 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <getopt.h>
+#include <stddef.h>
+#include <stdint.h>
+#include <sys/stat.h>
+#include <vector>
+
+#include <audio_effects/effect_aec.h>
+#include <audio_effects/effect_agc.h>
+#ifndef WEBRTC_LEGACY
+#include <audio_effects/effect_agc2.h>
+#endif
+#include <audio_effects/effect_ns.h>
+#include <log/log.h>
+
+// This is the only symbol that needs to be imported
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+
+//------------------------------------------------------------------------------
+// local definitions
+//------------------------------------------------------------------------------
+
+// types of pre processing modules
+enum PreProcId {
+  PREPROC_AGC,  // Automatic Gain Control
+#ifndef WEBRTC_LEGACY
+  PREPROC_AGC2,  // Automatic Gain Control 2
+#endif
+  PREPROC_AEC,  // Acoustic Echo Canceler
+  PREPROC_NS,   // Noise Suppressor
+  PREPROC_NUM_EFFECTS
+};
+
+enum PreProcParams {
+  ARG_HELP = 1,
+  ARG_INPUT,
+  ARG_OUTPUT,
+  ARG_FAR,
+  ARG_FS,
+  ARG_CH_MASK,
+  ARG_AGC_TGT_LVL,
+  ARG_AGC_COMP_LVL,
+  ARG_AEC_DELAY,
+  ARG_NS_LVL,
+#ifndef WEBRTC_LEGACY
+  ARG_AEC_MOBILE,
+  ARG_AGC2_GAIN,
+  ARG_AGC2_LVL,
+  ARG_AGC2_SAT_MGN
+#endif
+};
+
+struct preProcConfigParams_t {
+  int samplingFreq = 16000;
+  audio_channel_mask_t chMask = AUDIO_CHANNEL_IN_MONO;
+  int nsLevel = 0;         // a value between 0-3
+  int agcTargetLevel = 3;  // in dB
+  int agcCompLevel = 9;    // in dB
+#ifndef WEBRTC_LEGACY
+  float agc2Gain = 0.f;             // in dB
+  float agc2SaturationMargin = 2.f; // in dB
+  int agc2Level = 0;                // either kRms(0) or kPeak(1)
+#endif
+  int aecDelay = 0;        // in ms
+};
+
+const effect_uuid_t kPreProcUuids[PREPROC_NUM_EFFECTS] = {
+    {0xaa8130e0, 0x66fc, 0x11e0, 0xbad0, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // agc uuid
+#ifndef WEBRTC_LEGACY
+    {0x89f38e65, 0xd4d2, 0x4d64, 0xad0e, {0x2b, 0x3e, 0x79, 0x9e, 0xa8, 0x86}},  // agc2 uuid
+#endif
+    {0xbb392ec0, 0x8d4d, 0x11e0, 0xa896, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // aec uuid
+    {0xc06c8400, 0x8e06, 0x11e0, 0x9cb6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // ns  uuid
+};
+
+constexpr audio_channel_mask_t kPreProcConfigChMask[] = {
+    AUDIO_CHANNEL_IN_MONO,
+    AUDIO_CHANNEL_IN_STEREO,
+    AUDIO_CHANNEL_IN_FRONT_BACK,
+    AUDIO_CHANNEL_IN_6,
+    AUDIO_CHANNEL_IN_2POINT0POINT2,
+    AUDIO_CHANNEL_IN_2POINT1POINT2,
+    AUDIO_CHANNEL_IN_3POINT0POINT2,
+    AUDIO_CHANNEL_IN_3POINT1POINT2,
+    AUDIO_CHANNEL_IN_5POINT1,
+    AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO,
+    AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO,
+    AUDIO_CHANNEL_IN_VOICE_CALL_MONO,
+};
+
+constexpr int kPreProcConfigChMaskCount = std::size(kPreProcConfigChMask);
+
+void printUsage() {
+  printf("\nUsage: ");
+  printf("\n     <executable> [options]\n");
+  printf("\nwhere options are, ");
+  printf("\n     --input <inputfile>");
+  printf("\n           path to the input file");
+  printf("\n     --output <outputfile>");
+  printf("\n           path to the output file");
+  printf("\n     --help");
+  printf("\n           Prints this usage information");
+  printf("\n     --fs <sampling_freq>");
+  printf("\n           Sampling frequency in Hz, default 16000.");
+  printf("\n     -ch_mask <channel_mask>\n");
+  printf("\n         0  - AUDIO_CHANNEL_IN_MONO");
+  printf("\n         1  - AUDIO_CHANNEL_IN_STEREO");
+  printf("\n         2  - AUDIO_CHANNEL_IN_FRONT_BACK");
+  printf("\n         3  - AUDIO_CHANNEL_IN_6");
+  printf("\n         4  - AUDIO_CHANNEL_IN_2POINT0POINT2");
+  printf("\n         5  - AUDIO_CHANNEL_IN_2POINT1POINT2");
+  printf("\n         6  - AUDIO_CHANNEL_IN_3POINT0POINT2");
+  printf("\n         7  - AUDIO_CHANNEL_IN_3POINT1POINT2");
+  printf("\n         8  - AUDIO_CHANNEL_IN_5POINT1");
+  printf("\n         9  - AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO");
+  printf("\n         10 - AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO ");
+  printf("\n         11 - AUDIO_CHANNEL_IN_VOICE_CALL_MONO ");
+  printf("\n         default 0");
+  printf("\n     --far <farend_file>");
+  printf("\n           Path to far-end file needed for echo cancellation");
+  printf("\n     --aec");
+  printf("\n           Enable Echo Cancellation, default disabled");
+  printf("\n     --ns");
+  printf("\n           Enable Noise Suppression, default disabled");
+  printf("\n     --agc");
+  printf("\n           Enable Gain Control, default disabled");
+#ifndef WEBRTC_LEGACY
+  printf("\n     --agc2");
+  printf("\n           Enable Gain Controller 2, default disabled");
+#endif
+  printf("\n     --ns_lvl <ns_level>");
+  printf("\n           Noise Suppression level in dB, default value 0dB");
+  printf("\n     --agc_tgt_lvl <target_level>");
+  printf("\n           AGC Target Level in dB, default value 3dB");
+  printf("\n     --agc_comp_lvl <comp_level>");
+  printf("\n           AGC Comp Level in dB, default value 9dB");
+#ifndef WEBRTC_LEGACY
+  printf("\n     --agc2_gain <fixed_digital_gain>");
+  printf("\n           AGC Fixed Digital Gain in dB, default value 0dB");
+  printf("\n     --agc2_lvl <level_estimator>");
+  printf("\n           AGC Adaptive Digital Level Estimator, default value kRms");
+  printf("\n     --agc2_sat_mgn <saturation_margin>");
+  printf("\n           AGC Adaptive Digital Saturation Margin in dB, default value 2dB");
+#endif
+  printf("\n     --aec_delay <delay>");
+  printf("\n           AEC delay value in ms, default value 0ms");
+#ifndef WEBRTC_LEGACY
+  printf("\n     --aec_mobile");
+  printf("\n           Enable mobile mode of echo canceller, default disabled");
+#endif
+  printf("\n");
+}
+
+constexpr float kTenMilliSecVal = 0.01;
+
+int preProcCreateEffect(effect_handle_t *pEffectHandle, uint32_t effectType,
+                        effect_config_t *pConfig, int sessionId, int ioId) {
+  if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(&kPreProcUuids[effectType],
+                                                               sessionId, ioId, pEffectHandle);
+      status != 0) {
+    ALOGE("Audio Preprocessing create returned an error = %d\n", status);
+    return EXIT_FAILURE;
+  }
+  int reply = 0;
+  uint32_t replySize = sizeof(reply);
+  if (effectType == PREPROC_AEC) {
+    (**pEffectHandle)
+        ->command(*pEffectHandle, EFFECT_CMD_SET_CONFIG_REVERSE, sizeof(effect_config_t), pConfig,
+                  &replySize, &reply);
+  }
+  (**pEffectHandle)
+      ->command(*pEffectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t), pConfig,
+                &replySize, &reply);
+  return reply;
+}
+
+int preProcSetConfigParam(uint32_t paramType, uint32_t paramValue, effect_handle_t effectHandle) {
+  int reply = 0;
+  uint32_t replySize = sizeof(reply);
+  uint32_t paramData[2] = {paramType, paramValue};
+  effect_param_t *effectParam =
+      (effect_param_t *)malloc(sizeof(*effectParam) + sizeof(paramData));
+  memcpy(&effectParam->data[0], &paramData[0], sizeof(paramData));
+  effectParam->psize = sizeof(paramData[0]);
+  (*effectHandle)
+      ->command(effectHandle, EFFECT_CMD_SET_PARAM, sizeof(effect_param_t), effectParam,
+                &replySize, &reply);
+  free(effectParam);
+  return reply;
+}
+
+int main(int argc, const char *argv[]) {
+  if (argc == 1) {
+    printUsage();
+    return EXIT_FAILURE;
+  }
+  const char *inputFile = nullptr;
+  const char *outputFile = nullptr;
+  const char *farFile = nullptr;
+  int effectEn[PREPROC_NUM_EFFECTS] = {0};
+#ifndef WEBRTC_LEGACY
+  int aecMobileMode = 0;
+#endif
+
+  const option long_opts[] = {
+      {"help", no_argument, nullptr, ARG_HELP},
+      {"input", required_argument, nullptr, ARG_INPUT},
+      {"output", required_argument, nullptr, ARG_OUTPUT},
+      {"far", required_argument, nullptr, ARG_FAR},
+      {"fs", required_argument, nullptr, ARG_FS},
+      {"ch_mask", required_argument, nullptr, ARG_CH_MASK},
+      {"agc_tgt_lvl", required_argument, nullptr, ARG_AGC_TGT_LVL},
+      {"agc_comp_lvl", required_argument, nullptr, ARG_AGC_COMP_LVL},
+#ifndef WEBRTC_LEGACY
+      {"agc2_gain", required_argument, nullptr, ARG_AGC2_GAIN},
+      {"agc2_lvl", required_argument, nullptr, ARG_AGC2_LVL},
+      {"agc2_sat_mgn", required_argument, nullptr, ARG_AGC2_SAT_MGN},
+#endif
+      {"aec_delay", required_argument, nullptr, ARG_AEC_DELAY},
+      {"ns_lvl", required_argument, nullptr, ARG_NS_LVL},
+      {"aec", no_argument, &effectEn[PREPROC_AEC], 1},
+      {"agc", no_argument, &effectEn[PREPROC_AGC], 1},
+#ifndef WEBRTC_LEGACY
+      {"agc2", no_argument, &effectEn[PREPROC_AGC2], 1},
+#endif
+      {"ns", no_argument, &effectEn[PREPROC_NS], 1},
+#ifndef WEBRTC_LEGACY
+      {"aec_mobile", no_argument, &aecMobileMode, 1},
+#endif
+      {nullptr, 0, nullptr, 0},
+  };
+  struct preProcConfigParams_t preProcCfgParams {};
+
+  while (true) {
+    const int opt = getopt_long(argc, (char *const *)argv, "i:o:", long_opts, nullptr);
+    if (opt == -1) {
+      break;
+    }
+    switch (opt) {
+      case ARG_HELP:
+        printUsage();
+        return 0;
+      case ARG_INPUT: {
+        inputFile = (char *)optarg;
+        break;
+      }
+      case ARG_OUTPUT: {
+        outputFile = (char *)optarg;
+        break;
+      }
+      case ARG_FAR: {
+        farFile = (char *)optarg;
+        break;
+      }
+      case ARG_FS: {
+        preProcCfgParams.samplingFreq = atoi(optarg);
+        break;
+      }
+      case ARG_CH_MASK: {
+        int chMaskIdx = atoi(optarg);
+        if (chMaskIdx < 0 or chMaskIdx > kPreProcConfigChMaskCount) {
+          ALOGE("Channel Mask index not in correct range\n");
+          printUsage();
+          return EXIT_FAILURE;
+        }
+        preProcCfgParams.chMask = kPreProcConfigChMask[chMaskIdx];
+        break;
+      }
+      case ARG_AGC_TGT_LVL: {
+        preProcCfgParams.agcTargetLevel = atoi(optarg);
+        break;
+      }
+      case ARG_AGC_COMP_LVL: {
+        preProcCfgParams.agcCompLevel = atoi(optarg);
+        break;
+      }
+#ifndef WEBRTC_LEGACY
+      case ARG_AGC2_GAIN: {
+        preProcCfgParams.agc2Gain = atof(optarg);
+        break;
+      }
+      case ARG_AGC2_LVL: {
+        preProcCfgParams.agc2Level = atoi(optarg);
+        break;
+      }
+      case ARG_AGC2_SAT_MGN: {
+        preProcCfgParams.agc2SaturationMargin = atof(optarg);
+        break;
+      }
+#endif
+      case ARG_AEC_DELAY: {
+        preProcCfgParams.aecDelay = atoi(optarg);
+        break;
+      }
+      case ARG_NS_LVL: {
+        preProcCfgParams.nsLevel = atoi(optarg);
+        break;
+      }
+      default:
+        break;
+    }
+  }
+
+  if (inputFile == nullptr) {
+    ALOGE("Error: missing input file\n");
+    printUsage();
+    return EXIT_FAILURE;
+  }
+
+  std::unique_ptr<FILE, decltype(&fclose)> inputFp(fopen(inputFile, "rb"), &fclose);
+  if (inputFp == nullptr) {
+    ALOGE("Cannot open input file %s\n", inputFile);
+    return EXIT_FAILURE;
+  }
+
+  std::unique_ptr<FILE, decltype(&fclose)> farFp(fopen(farFile, "rb"), &fclose);
+  std::unique_ptr<FILE, decltype(&fclose)> outputFp(fopen(outputFile, "wb"), &fclose);
+  if (effectEn[PREPROC_AEC]) {
+    if (farFile == nullptr) {
+      ALOGE("Far end signal file required for echo cancellation \n");
+      return EXIT_FAILURE;
+    }
+    if (farFp == nullptr) {
+      ALOGE("Cannot open far end stream file %s\n", farFile);
+      return EXIT_FAILURE;
+    }
+    struct stat statInput, statFar;
+    (void)fstat(fileno(inputFp.get()), &statInput);
+    (void)fstat(fileno(farFp.get()), &statFar);
+    if (statInput.st_size != statFar.st_size) {
+      ALOGE("Near and far end signals are of different sizes");
+      return EXIT_FAILURE;
+    }
+  }
+  if (outputFile != nullptr && outputFp == nullptr) {
+    ALOGE("Cannot open output file %s\n", outputFile);
+    return EXIT_FAILURE;
+  }
+
+  int32_t sessionId = 1;
+  int32_t ioId = 1;
+  effect_handle_t effectHandle[PREPROC_NUM_EFFECTS] = {nullptr};
+  effect_config_t config;
+  config.inputCfg.samplingRate = config.outputCfg.samplingRate = preProcCfgParams.samplingFreq;
+  config.inputCfg.channels = config.outputCfg.channels = preProcCfgParams.chMask;
+  config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+
+  // Create all the effect handles
+  for (int i = 0; i < PREPROC_NUM_EFFECTS; i++) {
+    if (int status = preProcCreateEffect(&effectHandle[i], i, &config, sessionId, ioId);
+        status != 0) {
+      ALOGE("Create effect call returned error %i", status);
+      return EXIT_FAILURE;
+    }
+  }
+
+  for (int i = 0; i < PREPROC_NUM_EFFECTS; i++) {
+    if (effectEn[i] == 1) {
+      int reply = 0;
+      uint32_t replySize = sizeof(reply);
+      (*effectHandle[i])
+          ->command(effectHandle[i], EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+      if (reply != 0) {
+        ALOGE("Command enable call returned error %d\n", reply);
+        return EXIT_FAILURE;
+      }
+    }
+  }
+
+  // Set Config Params of the effects
+  if (effectEn[PREPROC_AGC]) {
+    if (int status = preProcSetConfigParam(AGC_PARAM_TARGET_LEVEL,
+                                           (uint32_t)preProcCfgParams.agcTargetLevel,
+                                           effectHandle[PREPROC_AGC]);
+        status != 0) {
+      ALOGE("Invalid AGC Target Level. Error %d\n", status);
+      return EXIT_FAILURE;
+    }
+    if (int status =
+            preProcSetConfigParam(AGC_PARAM_COMP_GAIN, (uint32_t)preProcCfgParams.agcCompLevel,
+                                  effectHandle[PREPROC_AGC]);
+        status != 0) {
+      ALOGE("Invalid AGC Comp Gain. Error %d\n", status);
+      return EXIT_FAILURE;
+    }
+  }
+#ifndef WEBRTC_LEGACY
+  if (effectEn[PREPROC_AGC2]) {
+    if (int status = preProcSetConfigParam(AGC2_PARAM_FIXED_DIGITAL_GAIN,
+                                           (float)preProcCfgParams.agc2Gain,
+                                           effectHandle[PREPROC_AGC2]);
+        status != 0) {
+      ALOGE("Invalid AGC2 Fixed Digital Gain. Error %d\n", status);
+      return EXIT_FAILURE;
+    }
+    if (int status = preProcSetConfigParam(AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR,
+                                           (uint32_t)preProcCfgParams.agc2Level,
+                                           effectHandle[PREPROC_AGC2]);
+        status != 0) {
+      ALOGE("Invalid AGC2 Level Estimator. Error %d\n", status);
+      return EXIT_FAILURE;
+    }
+    if (int status = preProcSetConfigParam(AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN,
+                                           (float)preProcCfgParams.agc2SaturationMargin,
+                                           effectHandle[PREPROC_AGC2]);
+        status != 0) {
+      ALOGE("Invalid AGC2 Saturation Margin. Error %d\n", status);
+      return EXIT_FAILURE;
+    }
+  }
+#endif
+  if (effectEn[PREPROC_NS]) {
+    if (int status = preProcSetConfigParam(NS_PARAM_LEVEL, (uint32_t)preProcCfgParams.nsLevel,
+                                           effectHandle[PREPROC_NS]);
+        status != 0) {
+      ALOGE("Invalid Noise Suppression level Error %d\n", status);
+      return EXIT_FAILURE;
+    }
+  }
+#ifndef WEBRTC_LEGACY
+  if (effectEn[PREPROC_AEC]) {
+    if (int status = preProcSetConfigParam(AEC_PARAM_MOBILE_MODE, (uint32_t)aecMobileMode,
+                                           effectHandle[PREPROC_AEC]);
+        status != 0) {
+      ALOGE("Invalid AEC mobile mode value %d\n", status);
+      return EXIT_FAILURE;
+    }
+  }
+#endif
+
+  // Process Call
+  const int frameLength = (int)(preProcCfgParams.samplingFreq * kTenMilliSecVal);
+  const int ioChannelCount = audio_channel_count_from_in_mask(preProcCfgParams.chMask);
+  const int ioFrameSize = ioChannelCount * sizeof(short);
+  int frameCounter = 0;
+  while (true) {
+    std::vector<short> in(frameLength * ioChannelCount);
+    std::vector<short> out(frameLength * ioChannelCount);
+    std::vector<short> farIn(frameLength * ioChannelCount);
+    size_t samplesRead = fread(in.data(), ioFrameSize, frameLength, inputFp.get());
+    if (samplesRead == 0) {
+      break;
+    }
+    audio_buffer_t inputBuffer, outputBuffer;
+    audio_buffer_t farInBuffer{};
+    inputBuffer.frameCount = samplesRead;
+    outputBuffer.frameCount = samplesRead;
+    inputBuffer.s16 = in.data();
+    outputBuffer.s16 = out.data();
+
+    if (farFp != nullptr) {
+      samplesRead = fread(farIn.data(), ioFrameSize, frameLength, farFp.get());
+      if (samplesRead == 0) {
+        break;
+      }
+      farInBuffer.frameCount = samplesRead;
+      farInBuffer.s16 = farIn.data();
+    }
+
+    for (int i = 0; i < PREPROC_NUM_EFFECTS; i++) {
+      if (effectEn[i] == 1) {
+        if (i == PREPROC_AEC) {
+          if (int status =
+                  preProcSetConfigParam(AEC_PARAM_ECHO_DELAY, (uint32_t)preProcCfgParams.aecDelay,
+                                        effectHandle[PREPROC_AEC]);
+              status != 0) {
+            ALOGE("preProcSetConfigParam returned Error %d\n", status);
+            return EXIT_FAILURE;
+          }
+        }
+        if (int status =
+                (*effectHandle[i])->process(effectHandle[i], &inputBuffer, &outputBuffer);
+            status != 0) {
+          ALOGE("\nError: Process i = %d returned with error %d\n", i, status);
+          return EXIT_FAILURE;
+        }
+        if (i == PREPROC_AEC) {
+          if (int status = (*effectHandle[i])
+                               ->process_reverse(effectHandle[i], &farInBuffer, &outputBuffer);
+              status != 0) {
+            ALOGE("\nError: Process reverse i = %d returned with error %d\n", i, status);
+            return EXIT_FAILURE;
+          }
+        }
+      }
+    }
+    if (outputFp != nullptr) {
+      size_t samplesWritten =
+          fwrite(out.data(), ioFrameSize, outputBuffer.frameCount, outputFp.get());
+      if (samplesWritten != outputBuffer.frameCount) {
+        ALOGE("\nError: Output file writing failed");
+        break;
+      }
+    }
+    frameCounter += frameLength;
+  }
+  // Release all the effect handles created
+  for (int i = 0; i < PREPROC_NUM_EFFECTS; i++) {
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle[i]);
+        status != 0) {
+      ALOGE("Audio Preprocessing release returned an error = %d\n", status);
+      return EXIT_FAILURE;
+    }
+  }
+  return EXIT_SUCCESS;
+}
diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp
index 42e44f0..c010d68 100644
--- a/media/libeffects/proxy/EffectProxy.cpp
+++ b/media/libeffects/proxy/EffectProxy.cpp
@@ -30,7 +30,7 @@
 #include <media/EffectsFactoryApi.h>
 
 namespace android {
-// This is a dummy proxy descriptor just to return to Factory during the initial
+// This is a stub proxy descriptor just to return to Factory during the initial
 // GetDescriptor call. Later in the factory, it is replaced with the
 // SW sub effect descriptor
 // proxy UUID af8da7e0-2ca1-11e3-b71d-0002a5d5c51b
diff --git a/media/libeffects/visualizer/Android.bp b/media/libeffects/visualizer/Android.bp
new file mode 100644
index 0000000..f6c585e
--- /dev/null
+++ b/media/libeffects/visualizer/Android.bp
@@ -0,0 +1,32 @@
+// Visualizer library
+cc_library_shared {
+    name: "libvisualizer",
+
+    vendor: true,
+
+    srcs: [
+        "EffectVisualizer.cpp",
+    ],
+
+    cflags: [
+        "-O2",
+        "-fvisibility=hidden",
+
+        "-DBUILD_FLOAT",
+        "-DSUPPORT_MC",
+
+        "-Wall",
+        "-Werror",
+    ],
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    relative_install_path: "soundfx",
+
+    header_libs: [
+        "libaudioeffects",
+        "libaudioutils_headers",
+    ],
+}
diff --git a/media/libeffects/visualizer/Android.mk b/media/libeffects/visualizer/Android.mk
deleted file mode 100644
index 35e2f3d..0000000
--- a/media/libeffects/visualizer/Android.mk
+++ /dev/null
@@ -1,28 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-# Visualizer library
-include $(CLEAR_VARS)
-
-LOCAL_VENDOR_MODULE := true
-LOCAL_SRC_FILES:= \
-	EffectVisualizer.cpp
-
-LOCAL_CFLAGS+= -O2 -fvisibility=hidden
-LOCAL_CFLAGS += -Wall -Werror
-LOCAL_CFLAGS += -DBUILD_FLOAT -DSUPPORT_MC
-
-LOCAL_SHARED_LIBRARIES := \
-	libcutils \
-	liblog \
-	libdl
-
-LOCAL_MODULE_RELATIVE_PATH := soundfx
-LOCAL_MODULE:= libvisualizer
-
-LOCAL_C_INCLUDES := \
-	$(call include-path-for, audio-effects) \
-	$(call include-path-for, audio-utils)
-
-
-LOCAL_HEADER_LIBRARIES += libhardware_headers
-include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 3fd3fc3..1a7eb6f 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -17,6 +17,22 @@
     ],
 }
 
+cc_library_headers {
+    name: "libmedia_datasource_headers",
+    export_include_dirs: ["include"],
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
+    min_sdk_version: "29",
+}
+
 filegroup {
     name: "libmedia_omx_aidl",
     srcs: [
@@ -33,28 +49,6 @@
     path: "aidl",
 }
 
-filegroup {
-    name: "resourcemanager_aidl",
-    srcs: [
-        "aidl/android/media/IResourceManagerClient.aidl",
-        "aidl/android/media/IResourceManagerService.aidl",
-        "aidl/android/media/MediaResourceType.aidl",
-        "aidl/android/media/MediaResourceSubType.aidl",
-        "aidl/android/media/MediaResourceParcel.aidl",
-        "aidl/android/media/MediaResourcePolicyParcel.aidl",
-    ],
-    path: "aidl",
-}
-
-aidl_interface {
-    name: "resourcemanager_aidl_interface",
-    unstable: true,
-    local_include_dir: "aidl",
-    srcs: [
-        ":resourcemanager_aidl",
-    ],
-}
-
 cc_library_shared {
     name: "libmedia_omx",
     vendor_available: true,
@@ -203,7 +197,7 @@
     ],
 
     header_libs: [
-        "libmedia_headers",
+        "libmedia_datasource_headers",
         "media_ndk_headers",
     ],
 
@@ -220,6 +214,14 @@
         ],
         cfi: true,
     },
+
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 cc_library_shared {
@@ -309,15 +311,19 @@
     header_libs: [
         "libstagefright_headers",
         "media_ndk_headers",
+        "jni_headers",
     ],
 
     export_header_lib_headers: [
         "libstagefright_headers",
         "media_ndk_headers",
+        "jni_headers",
     ],
 
     shared_libs: [
         "android.hidl.token@1.0-utils",
+        "audioclient-types-aidl-unstable-cpp",
+        "av-types-aidl-unstable-cpp",
         "liblog",
         "libcutils",
         "libprocessgroup",
@@ -372,3 +378,36 @@
         cfi: true,
     },
 }
+
+cc_library_static {
+    name: "libmedia_ndkformatpriv",
+
+    host_supported: true,
+
+    srcs: [
+        "NdkMediaFormatPriv.cpp",
+        "NdkMediaErrorPriv.cpp",
+    ],
+
+    header_libs: [
+        "libstagefright_foundation_headers",
+        "libstagefright_headers",
+        "media_ndk_headers",
+    ],
+
+    cflags: [
+        "-DEXPORT=__attribute__((visibility(\"default\")))",
+        "-Werror",
+        "-Wall",
+    ],
+
+    export_include_dirs: ["include"],
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
+    apex_available: ["com.android.media"],
+}
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 20bc23d..8a4b17c 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -40,6 +40,7 @@
     SET_DATA_SOURCE_FD,
     SET_DATA_SOURCE_STREAM,
     SET_DATA_SOURCE_CALLBACK,
+    SET_DATA_SOURCE_RTP,
     SET_BUFFERING_SETTINGS,
     GET_BUFFERING_SETTINGS,
     PREPARE_ASYNC,
@@ -161,6 +162,15 @@
         return reply.readInt32();
     }
 
+    status_t setDataSource(const String8& rtpParams) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+        data.writeString8(rtpParams);
+        remote()->transact(SET_DATA_SOURCE_RTP, data, &reply);
+
+        return reply.readInt32();
+    }
+
     // pass the buffered IGraphicBufferProducer to the media player service
     status_t setVideoSurfaceTexture(const sp<IGraphicBufferProducer>& bufferProducer)
     {
@@ -685,6 +695,12 @@
             }
             return NO_ERROR;
         }
+        case SET_DATA_SOURCE_RTP: {
+            CHECK_INTERFACE(IMediaPlayer, data, reply);
+            String8 rtpParams = data.readString8();
+            reply->writeInt32(setDataSource(rtpParams));
+            return NO_ERROR;
+        }
         case SET_VIDEO_SURFACETEXTURE: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
             sp<IGraphicBufferProducer> bufferProducer =
@@ -958,7 +974,7 @@
         case PREPARE_DRM: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
 
-            uint8_t uuid[16];
+            uint8_t uuid[16] = {};
             data.read(uuid, sizeof(uuid));
             Vector<uint8_t> drmSessionId;
             readVector(data, drmSessionId);
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index bd18a40..11005c6 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -62,11 +62,13 @@
     }
 
     virtual sp<IMediaPlayer> create(
-            const sp<IMediaPlayerClient>& client, audio_session_t audioSessionId) {
+            const sp<IMediaPlayerClient>& client, audio_session_t audioSessionId,
+            const std::string opPackageName) {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
         data.writeStrongBinder(IInterface::asBinder(client));
         data.writeInt32(audioSessionId);
+        data.writeCString(opPackageName.c_str());
 
         remote()->transact(CREATE, data, &reply);
         return interface_cast<IMediaPlayer>(reply.readStrongBinder());
@@ -127,7 +129,12 @@
             sp<IMediaPlayerClient> client =
                 interface_cast<IMediaPlayerClient>(data.readStrongBinder());
             audio_session_t audioSessionId = (audio_session_t) data.readInt32();
-            sp<IMediaPlayer> player = create(client, audioSessionId);
+            const char* opPackageName = data.readCString();
+            if (opPackageName == nullptr) {
+                return FAILED_TRANSACTION;
+            }
+            std::string opPackageNameStr(opPackageName);
+            sp<IMediaPlayer> player = create(client, audioSessionId, opPackageNameStr);
             reply->writeStrongBinder(IInterface::asBinder(player));
             return NO_ERROR;
         } break;
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 637322f..e8839ba 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -63,7 +63,7 @@
                      searchDirs[1] + fileName,
                      searchDirs[2] + fileName,
                      searchDirs[3] + fileName,
-                     "system/etc/media_profiles_V1_0.xml" // System fallback
+                     "system/etc/media_profiles.xml" // System fallback
                    };
         }();
     static std::array<char const*, 5> const cPaths = {
@@ -240,7 +240,10 @@
 
     const size_t nMappings = sizeof(sVideoEncoderNameMap)/sizeof(sVideoEncoderNameMap[0]);
     const int codec = findTagForName(sVideoEncoderNameMap, nMappings, atts[1]);
-    CHECK(codec != -1);
+    if (codec == -1) {
+      ALOGE("MediaProfiles::createVideoCodec failed to locate codec %s", atts[1]);
+      return nullptr;
+    }
 
     MediaProfiles::VideoCodec *videoCodec =
         new MediaProfiles::VideoCodec(static_cast<video_encoder>(codec),
@@ -262,7 +265,10 @@
           !strcmp("channels",   atts[6]));
     const size_t nMappings = sizeof(sAudioEncoderNameMap)/sizeof(sAudioEncoderNameMap[0]);
     const int codec = findTagForName(sAudioEncoderNameMap, nMappings, atts[1]);
-    CHECK(codec != -1);
+    if (codec == -1) {
+      ALOGE("MediaProfiles::createAudioCodec failed to locate codec %s", atts[1]);
+      return nullptr;
+    }
 
     MediaProfiles::AudioCodec *audioCodec =
         new MediaProfiles::AudioCodec(static_cast<audio_encoder>(codec),
@@ -282,7 +288,10 @@
 
     const size_t nMappings = sizeof(sAudioDecoderNameMap)/sizeof(sAudioDecoderNameMap[0]);
     const int codec = findTagForName(sAudioDecoderNameMap, nMappings, atts[1]);
-    CHECK(codec != -1);
+    if (codec == -1) {
+      ALOGE("MediaProfiles::createAudioDecoderCap failed to locate codec %s", atts[1]);
+      return nullptr;
+    }
 
     MediaProfiles::AudioDecoderCap *cap =
         new MediaProfiles::AudioDecoderCap(static_cast<audio_decoder>(codec));
@@ -298,7 +307,10 @@
 
     const size_t nMappings = sizeof(sVideoDecoderNameMap)/sizeof(sVideoDecoderNameMap[0]);
     const int codec = findTagForName(sVideoDecoderNameMap, nMappings, atts[1]);
-    CHECK(codec != -1);
+    if (codec == -1) {
+      ALOGE("MediaProfiles::createVideoDecoderCap failed to locate codec %s", atts[1]);
+      return nullptr;
+    }
 
     MediaProfiles::VideoDecoderCap *cap =
         new MediaProfiles::VideoDecoderCap(static_cast<video_decoder>(codec));
@@ -322,7 +334,10 @@
 
     const size_t nMappings = sizeof(sVideoEncoderNameMap)/sizeof(sVideoEncoderNameMap[0]);
     const int codec = findTagForName(sVideoEncoderNameMap, nMappings, atts[1]);
-    CHECK(codec != -1);
+    if (codec == -1) {
+      ALOGE("MediaProfiles::createVideoEncoderCap failed to locate codec %s", atts[1]);
+      return nullptr;
+    }
 
     MediaProfiles::VideoEncoderCap *cap =
         new MediaProfiles::VideoEncoderCap(static_cast<video_encoder>(codec),
@@ -346,7 +361,10 @@
 
     const size_t nMappings = sizeof(sAudioEncoderNameMap)/sizeof(sAudioEncoderNameMap[0]);
     const int codec = findTagForName(sAudioEncoderNameMap, nMappings, atts[1]);
-    CHECK(codec != -1);
+    if (codec == -1) {
+      ALOGE("MediaProfiles::createAudioEncoderCap failed to locate codec %s", atts[1]);
+      return nullptr;
+    }
 
     MediaProfiles::AudioEncoderCap *cap =
         new MediaProfiles::AudioEncoderCap(static_cast<audio_encoder>(codec), atoi(atts[5]),
@@ -386,11 +404,17 @@
     const size_t nProfileMappings = sizeof(sCamcorderQualityNameMap)/
             sizeof(sCamcorderQualityNameMap[0]);
     const int quality = findTagForName(sCamcorderQualityNameMap, nProfileMappings, atts[1]);
-    CHECK(quality != -1);
+    if (quality == -1) {
+      ALOGE("MediaProfiles::createCamcorderProfile failed to locate quality %s", atts[1]);
+      return nullptr;
+    }
 
     const size_t nFormatMappings = sizeof(sFileFormatMap)/sizeof(sFileFormatMap[0]);
     const int fileFormat = findTagForName(sFileFormatMap, nFormatMappings, atts[3]);
-    CHECK(fileFormat != -1);
+    if (fileFormat == -1) {
+      ALOGE("MediaProfiles::createCamcorderProfile failed to locate file format %s", atts[1]);
+      return nullptr;
+    }
 
     MediaProfiles::CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
     profile->mCameraId = cameraId;
@@ -462,24 +486,39 @@
         createAudioCodec(atts, profiles);
     } else if (strcmp("VideoEncoderCap", name) == 0 &&
                strcmp("true", atts[3]) == 0) {
-        profiles->mVideoEncoders.add(createVideoEncoderCap(atts));
+        MediaProfiles::VideoEncoderCap* cap = createVideoEncoderCap(atts);
+        if (cap != nullptr) {
+          profiles->mVideoEncoders.add(cap);
+        }
     } else if (strcmp("AudioEncoderCap", name) == 0 &&
                strcmp("true", atts[3]) == 0) {
-        profiles->mAudioEncoders.add(createAudioEncoderCap(atts));
+        MediaProfiles::AudioEncoderCap* cap = createAudioEncoderCap(atts);
+        if (cap != nullptr) {
+          profiles->mAudioEncoders.add(cap);
+        }
     } else if (strcmp("VideoDecoderCap", name) == 0 &&
                strcmp("true", atts[3]) == 0) {
-        profiles->mVideoDecoders.add(createVideoDecoderCap(atts));
+        MediaProfiles::VideoDecoderCap* cap = createVideoDecoderCap(atts);
+        if (cap != nullptr) {
+          profiles->mVideoDecoders.add(cap);
+        }
     } else if (strcmp("AudioDecoderCap", name) == 0 &&
                strcmp("true", atts[3]) == 0) {
-        profiles->mAudioDecoders.add(createAudioDecoderCap(atts));
+        MediaProfiles::AudioDecoderCap* cap = createAudioDecoderCap(atts);
+        if (cap != nullptr) {
+          profiles->mAudioDecoders.add(cap);
+        }
     } else if (strcmp("EncoderOutputFileFormat", name) == 0) {
         profiles->mEncoderOutputFileFormats.add(createEncoderOutputFileFormat(atts));
     } else if (strcmp("CamcorderProfiles", name) == 0) {
         profiles->mCurrentCameraId = getCameraId(atts);
         profiles->addStartTimeOffset(profiles->mCurrentCameraId, atts);
     } else if (strcmp("EncoderProfile", name) == 0) {
-        profiles->mCamcorderProfiles.add(
-            createCamcorderProfile(profiles->mCurrentCameraId, atts, profiles->mCameraIds));
+      MediaProfiles::CamcorderProfile* profile = createCamcorderProfile(
+          profiles->mCurrentCameraId, atts, profiles->mCameraIds);
+      if (profile != nullptr) {
+        profiles->mCamcorderProfiles.add(profile);
+      }
     } else if (strcmp("ImageEncoding", name) == 0) {
         profiles->addImageEncodingQualityLevel(profiles->mCurrentCameraId, atts);
     }
diff --git a/media/libmedia/MediaResource.cpp b/media/libmedia/MediaResource.cpp
index 0936a99..ec52a49 100644
--- a/media/libmedia/MediaResource.cpp
+++ b/media/libmedia/MediaResource.cpp
@@ -35,7 +35,7 @@
     this->value = value;
 }
 
-MediaResource::MediaResource(Type type, const std::vector<int8_t> &id, int64_t value) {
+MediaResource::MediaResource(Type type, const std::vector<uint8_t> &id, int64_t value) {
     this->type = type;
     this->subType = SubType::kUnspecifiedSubType;
     this->id = id;
@@ -43,11 +43,11 @@
 }
 
 //static
-MediaResource MediaResource::CodecResource(bool secure, bool video) {
+MediaResource MediaResource::CodecResource(bool secure, bool video, int64_t instanceCount) {
     return MediaResource(
             secure ? Type::kSecureCodec : Type::kNonSecureCodec,
             video ? SubType::kVideoCodec : SubType::kAudioCodec,
-            1);
+            instanceCount);
 }
 
 //static
@@ -66,11 +66,11 @@
 }
 
 //static
-MediaResource MediaResource::DrmSessionResource(const std::vector<int8_t> &id, int64_t value) {
+MediaResource MediaResource::DrmSessionResource(const std::vector<uint8_t> &id, int64_t value) {
     return MediaResource(Type::kDrmSession, id, value);
 }
 
-static String8 bytesToHexString(const std::vector<int8_t> &bytes) {
+static String8 bytesToHexString(const std::vector<uint8_t> &bytes) {
     String8 str;
     for (auto &b : bytes) {
         str.appendFormat("%02x", b);
diff --git a/media/libmedia/MidiIoWrapper.cpp b/media/libmedia/MidiIoWrapper.cpp
index e71ea2c..da272e3 100644
--- a/media/libmedia/MidiIoWrapper.cpp
+++ b/media/libmedia/MidiIoWrapper.cpp
@@ -18,8 +18,9 @@
 #define LOG_TAG "MidiIoWrapper"
 #include <utils/Log.h>
 
-#include <sys/stat.h>
 #include <fcntl.h>
+#include <sys/stat.h>
+#include <unistd.h>
 
 #include <media/MidiIoWrapper.h>
 #include <media/MediaExtractorPluginApi.h>
diff --git a/media/libmedia/NdkMediaFormatPriv.cpp b/media/libmedia/NdkMediaFormatPriv.cpp
index 3a9fb8b..7983184 100644
--- a/media/libmedia/NdkMediaFormatPriv.cpp
+++ b/media/libmedia/NdkMediaFormatPriv.cpp
@@ -24,8 +24,6 @@
 #include <media/NdkMediaFormatPriv.h>
 #include <media/stagefright/foundation/AMessage.h>
 
-#include <jni.h>
-
 using namespace android;
 
 namespace android {
diff --git a/media/libmedia/TEST_MAPPING b/media/libmedia/TEST_MAPPING
new file mode 100644
index 0000000..65390ed
--- /dev/null
+++ b/media/libmedia/TEST_MAPPING
@@ -0,0 +1,6 @@
+// test_mapping for frameworks/av/media/libmedia
+{
+  "presubmit": [
+    { "name": "CodecListTest" }
+  ]
+}
diff --git a/media/libmedia/aidl/android/media/IResourceManagerService.aidl b/media/libmedia/aidl/android/media/IResourceManagerService.aidl
deleted file mode 100644
index 1b2d522..0000000
--- a/media/libmedia/aidl/android/media/IResourceManagerService.aidl
+++ /dev/null
@@ -1,105 +0,0 @@
-/**
- * Copyright (c) 2019, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.IResourceManagerClient;
-import android.media.MediaResourceParcel;
-import android.media.MediaResourcePolicyParcel;
-
-/**
- * ResourceManagerService interface that keeps track of media resource
- * owned by clients, and reclaims resources based on configured policies
- * when necessary.
- *
- * {@hide}
- */
-interface IResourceManagerService {
-    const @utf8InCpp String kPolicySupportsMultipleSecureCodecs
-            = "supports-multiple-secure-codecs";
-    const @utf8InCpp String kPolicySupportsSecureWithNonSecureCodec
-            = "supports-secure-with-non-secure-codec";
-
-    /**
-     * Configure the ResourceManagerService to adopted particular policies when
-     * managing the resources.
-     *
-     * @param policies an array of policies to be adopted.
-     */
-    void config(in MediaResourcePolicyParcel[] policies);
-
-    /**
-     * Add a client to a process with a list of resources.
-     *
-     * @param pid pid of the client.
-     * @param uid uid of the client.
-     * @param clientId an identifier that uniquely identifies the client within the pid.
-     * @param client interface for the ResourceManagerService to call the client.
-     * @param resources an array of resources to be added.
-     */
-    void addResource(
-            int pid,
-            int uid,
-            long clientId,
-            IResourceManagerClient client,
-            in MediaResourceParcel[] resources);
-
-    /**
-     * Remove the listed resources from a client.
-     *
-     * @param pid pid from which the list of resources will be removed.
-     * @param clientId clientId within the pid from which the list of resources will be removed.
-     * @param resources an array of resources to be removed from the client.
-     */
-    void removeResource(int pid, long clientId, in MediaResourceParcel[] resources);
-
-    /**
-     * Remove all resources from a client.
-     *
-     * @param pid pid from which the client's resources will be removed.
-     * @param clientId clientId within the pid that will be removed.
-     */
-    void removeClient(int pid, long clientId);
-
-    /**
-     * Tries to reclaim resource from processes with lower priority than the
-     * calling process according to the requested resources.
-     *
-     * @param callingPid pid of the calling process.
-     * @param resources an array of resources to be reclaimed.
-     *
-     * @return true if the reclaim was successful and false otherwise.
-     */
-    boolean reclaimResource(int callingPid, in MediaResourceParcel[] resources);
-
-    /**
-     * Override the pid of original calling process with the pid of the process
-     * who actually use the requested resources.
-     *
-     * @param originalPid pid of the original calling process.
-     * @param newPid pid of the actual process who use the resources.
-     *        remove existing override on originalPid if newPid is -1.
-     */
-    void overridePid(int originalPid, int newPid);
-
-    /**
-     * Mark a client for pending removal
-     *
-     * @param pid pid from which the client's resources will be removed.
-     * @param clientId clientId within the pid that will be removed.
-     */
-    void markClientForPendingRemoval(int pid, long clientId);
-}
diff --git a/media/libmedia/include/media/IMediaPlayer.h b/media/libmedia/include/media/IMediaPlayer.h
index a4c0ec6..3548a1e 100644
--- a/media/libmedia/include/media/IMediaPlayer.h
+++ b/media/libmedia/include/media/IMediaPlayer.h
@@ -59,6 +59,7 @@
     virtual status_t        setDataSource(int fd, int64_t offset, int64_t length) = 0;
     virtual status_t        setDataSource(const sp<IStreamSource>& source) = 0;
     virtual status_t        setDataSource(const sp<IDataSource>& source) = 0;
+    virtual status_t        setDataSource(const String8& rtpParams) = 0;
     virtual status_t        setVideoSurfaceTexture(
                                     const sp<IGraphicBufferProducer>& bufferProducer) = 0;
     virtual status_t        getBufferingSettings(
diff --git a/media/libmedia/include/media/IMediaPlayerService.h b/media/libmedia/include/media/IMediaPlayerService.h
index f2e2060..a4207eb 100644
--- a/media/libmedia/include/media/IMediaPlayerService.h
+++ b/media/libmedia/include/media/IMediaPlayerService.h
@@ -28,6 +28,8 @@
 #include <media/IMediaPlayerClient.h>
 #include <media/IMediaMetadataRetriever.h>
 
+#include <string>
+
 namespace android {
 
 class IMediaPlayer;
@@ -47,7 +49,8 @@
     virtual sp<IMediaRecorder> createMediaRecorder(const String16 &opPackageName) = 0;
     virtual sp<IMediaMetadataRetriever> createMetadataRetriever() = 0;
     virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client,
-            audio_session_t audioSessionId = AUDIO_SESSION_ALLOCATE) = 0;
+            audio_session_t audioSessionId = AUDIO_SESSION_ALLOCATE,
+            const std::string opPackage = "") = 0;
     virtual sp<IMediaCodecList> getCodecList() const = 0;
 
     // Connects to a remote display.
diff --git a/media/libmedia/include/media/MediaResource.h b/media/libmedia/include/media/MediaResource.h
index e7362c1..4712528 100644
--- a/media/libmedia/include/media/MediaResource.h
+++ b/media/libmedia/include/media/MediaResource.h
@@ -35,13 +35,13 @@
     MediaResource() = delete;
     MediaResource(Type type, int64_t value);
     MediaResource(Type type, SubType subType, int64_t value);
-    MediaResource(Type type, const std::vector<int8_t> &id, int64_t value);
+    MediaResource(Type type, const std::vector<uint8_t> &id, int64_t value);
 
-    static MediaResource CodecResource(bool secure, bool video);
+    static MediaResource CodecResource(bool secure, bool video, int64_t instanceCount = 1);
     static MediaResource GraphicMemoryResource(int64_t value);
     static MediaResource CpuBoostResource();
     static MediaResource VideoBatteryResource();
-    static MediaResource DrmSessionResource(const std::vector<int8_t> &id, int64_t value);
+    static MediaResource DrmSessionResource(const std::vector<uint8_t> &id, int64_t value);
 };
 
 inline static const char *asString(MediaResource::Type i, const char *def = "??") {
diff --git a/media/libmedia/include/media/mediametadataretriever.h b/media/libmedia/include/media/mediametadataretriever.h
index 138a014..1fe6ffc 100644
--- a/media/libmedia/include/media/mediametadataretriever.h
+++ b/media/libmedia/include/media/mediametadataretriever.h
@@ -73,6 +73,7 @@
     METADATA_KEY_COLOR_RANGE     = 37,
     METADATA_KEY_SAMPLERATE      = 38,
     METADATA_KEY_BITS_PER_SAMPLE = 39,
+    METADATA_KEY_VIDEO_CODEC_MIME_TYPE = 40,
 
     // Add more here...
 };
diff --git a/media/libmedia/include/media/mediaplayer.h b/media/libmedia/include/media/mediaplayer.h
index 2335c5a..71c0bc5 100644
--- a/media/libmedia/include/media/mediaplayer.h
+++ b/media/libmedia/include/media/mediaplayer.h
@@ -33,6 +33,8 @@
 #include <utils/KeyedVector.h>
 #include <utils/String8.h>
 
+#include <string>
+
 struct ANativeWindow;
 
 namespace android {
@@ -60,6 +62,7 @@
     MEDIA_META_DATA         = 202,
     MEDIA_DRM_INFO          = 210,
     MEDIA_TIME_DISCONTINUITY = 211,
+    MEDIA_IMS_RX_NOTICE     = 300,
     MEDIA_AUDIO_ROUTING_CHANGED = 10000,
 };
 
@@ -177,7 +180,10 @@
     KEY_PARAMETER_PLAYBACK_RATE_PERMILLE = 1300,                // set only
 
     // Set a Parcel containing the value of a parcelled Java AudioAttribute instance
-    KEY_PARAMETER_AUDIO_ATTRIBUTES = 1400                       // set only
+    KEY_PARAMETER_AUDIO_ATTRIBUTES = 1400,                       // set only
+
+    // Set a Parcel containing the values of RTP attribute
+    KEY_PARAMETER_RTP_ATTRIBUTES = 2000                       // set only
 };
 
 // Keep INVOKE_ID_* in sync with MediaPlayer.java.
@@ -205,7 +211,7 @@
                     public virtual IMediaDeathNotifier
 {
 public:
-    MediaPlayer();
+    MediaPlayer(const std::string opPackageName = "");
     ~MediaPlayer();
             void            died();
             void            disconnect();
@@ -217,6 +223,7 @@
 
             status_t        setDataSource(int fd, int64_t offset, int64_t length);
             status_t        setDataSource(const sp<IDataSource> &source);
+            status_t        setDataSource(const String8& rtpParams);
             status_t        setVideoSurfaceTexture(
                                     const sp<IGraphicBufferProducer>& bufferProducer);
             status_t        setListener(const sp<MediaPlayerListener>& listener);
@@ -308,6 +315,7 @@
     float                       mSendLevel;
     struct sockaddr_in          mRetransmitEndpoint;
     bool                        mRetransmitEndpointValid;
+    const std::string           mOpPackageName;
 };
 
 }; // namespace android
diff --git a/media/libmedia/include/media/mediarecorder.h b/media/libmedia/include/media/mediarecorder.h
index 6e2d94d..fbcdb28 100644
--- a/media/libmedia/include/media/mediarecorder.h
+++ b/media/libmedia/include/media/mediarecorder.h
@@ -291,6 +291,8 @@
     bool                        mIsOutputFileSet;
     Mutex                       mLock;
     Mutex                       mNotifyLock;
+
+    output_format               mOutputFormat;
 };
 
 };  // namespace android
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 1fadc94..30c5006 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -41,7 +41,7 @@
 
 using media::VolumeShaper;
 
-MediaPlayer::MediaPlayer()
+MediaPlayer::MediaPlayer(const std::string opPackageName) : mOpPackageName(opPackageName)
 {
     ALOGV("constructor");
     mListener = NULL;
@@ -152,7 +152,7 @@
     if (url != NULL) {
         const sp<IMediaPlayerService> service(getMediaPlayerService());
         if (service != 0) {
-            sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
+            sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mOpPackageName));
             if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
                 (NO_ERROR != player->setDataSource(httpService, url, headers))) {
                 player.clear();
@@ -169,7 +169,7 @@
     status_t err = UNKNOWN_ERROR;
     const sp<IMediaPlayerService> service(getMediaPlayerService());
     if (service != 0) {
-        sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
+        sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mOpPackageName));
         if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
             (NO_ERROR != player->setDataSource(fd, offset, length))) {
             player.clear();
@@ -185,7 +185,7 @@
     status_t err = UNKNOWN_ERROR;
     const sp<IMediaPlayerService> service(getMediaPlayerService());
     if (service != 0) {
-        sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
+        sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mOpPackageName));
         if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
             (NO_ERROR != player->setDataSource(source))) {
             player.clear();
@@ -195,6 +195,22 @@
     return err;
 }
 
+status_t MediaPlayer::setDataSource(const String8& rtpParams)
+{
+    ALOGV("setDataSource(rtpParams)");
+    status_t err = UNKNOWN_ERROR;
+    const sp<IMediaPlayerService> service(getMediaPlayerService());
+    if (service != 0) {
+        sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mOpPackageName));
+        if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
+            (NO_ERROR != player->setDataSource(rtpParams))) {
+            player.clear();
+        }
+        err = attachNewPlayer(player);
+    }
+    return err;
+}
+
 status_t MediaPlayer::invoke(const Parcel& request, Parcel *reply)
 {
     Mutex::Autolock _l(mLock);
@@ -943,6 +959,9 @@
     case MEDIA_META_DATA:
         ALOGV("Received timed metadata message");
         break;
+    case MEDIA_IMS_RX_NOTICE:
+        ALOGV("Received IMS Rx notice message");
+        break;
     default:
         ALOGV("unrecognized message: (%d, %d, %d)", msg, ext1, ext2);
         break;
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 70655d5..d9d1f25 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -244,6 +244,7 @@
         mCurrentState = MEDIA_RECORDER_ERROR;
         return ret;
     }
+    mOutputFormat = (output_format)of;
     mCurrentState = MEDIA_RECORDER_DATASOURCE_CONFIGURED;
     return ret;
 }
@@ -479,6 +480,13 @@
                            (MEDIA_RECORDER_PREPARED |
                             MEDIA_RECORDER_RECORDING |
                             MEDIA_RECORDER_ERROR));
+
+    // For RTP video, parameter should be set dynamically.
+    if (isInvalidState) {
+        if (mCurrentState == MEDIA_RECORDER_RECORDING &&
+            mOutputFormat == OUTPUT_FORMAT_RTP_AVP)
+            isInvalidState = false;
+    }
     if (isInvalidState) {
         ALOGE("setParameters is called in an invalid state: %d", mCurrentState);
         return INVALID_OPERATION;
@@ -737,6 +745,7 @@
     mIsAudioEncoderSet = false;
     mIsVideoEncoderSet = false;
     mIsOutputFileSet   = false;
+    mOutputFormat      = OUTPUT_FORMAT_DEFAULT;
 }
 
 // Release should be OK in any state
diff --git a/media/libmedia/tests/codeclist/Android.bp b/media/libmedia/tests/codeclist/Android.bp
new file mode 100644
index 0000000..a930d6e
--- /dev/null
+++ b/media/libmedia/tests/codeclist/Android.bp
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+    name: "CodecListTest",
+    test_suites: ["device-tests"],
+    gtest: true,
+
+    srcs: [
+        "CodecListTest.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "liblog",
+        "libmedia_codeclist",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libstagefright_xmlparser",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libmedia/tests/codeclist/CodecListTest.cpp b/media/libmedia/tests/codeclist/CodecListTest.cpp
new file mode 100644
index 0000000..bd2adf7
--- /dev/null
+++ b/media/libmedia/tests/codeclist/CodecListTest.cpp
@@ -0,0 +1,222 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecListTest"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include <binder/Parcel.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
+
+#define kSwCodecXmlPath "/apex/com.android.media.swcodec/etc/"
+
+using namespace android;
+
+struct CddReq {
+    CddReq(const char *type, bool encoder) {
+        mediaType = type;
+        isEncoder = encoder;
+    }
+
+    const char *mediaType;
+    bool isEncoder;
+};
+
+TEST(CodecListTest, CodecListSanityTest) {
+    sp<IMediaCodecList> list = MediaCodecList::getInstance();
+    ASSERT_NE(list, nullptr) << "Unable to get MediaCodecList instance.";
+    EXPECT_GT(list->countCodecs(), 0) << "No codecs in CodecList";
+    for (size_t i = 0; i < list->countCodecs(); ++i) {
+        sp<MediaCodecInfo> info = list->getCodecInfo(i);
+        ASSERT_NE(info, nullptr) << "CodecInfo is null";
+        ssize_t index = list->findCodecByName(info->getCodecName());
+        EXPECT_GE(index, 0) << "Wasn't able to find existing codec: " << info->getCodecName();
+    }
+}
+
+TEST(CodecListTest, CodecListByTypeTest) {
+    sp<IMediaCodecList> list = MediaCodecList::getInstance();
+    ASSERT_NE(list, nullptr) << "Unable to get MediaCodecList instance.";
+
+    std::vector<CddReq> cddReq{
+            // media type, isEncoder
+            CddReq(MIMETYPE_AUDIO_AAC, false),
+            CddReq(MIMETYPE_AUDIO_AAC, true),
+
+            CddReq(MIMETYPE_VIDEO_AVC, false),
+            CddReq(MIMETYPE_VIDEO_HEVC, false),
+            CddReq(MIMETYPE_VIDEO_MPEG4, false),
+            CddReq(MIMETYPE_VIDEO_VP8, false),
+            CddReq(MIMETYPE_VIDEO_VP9, false),
+
+            CddReq(MIMETYPE_VIDEO_AVC, true),
+            CddReq(MIMETYPE_VIDEO_VP8, true),
+    };
+
+    for (CddReq codecReq : cddReq) {
+        ssize_t index = list->findCodecByType(codecReq.mediaType, codecReq.isEncoder);
+        EXPECT_GE(index, 0) << "Wasn't able to find codec for media type: " << codecReq.mediaType
+                            << (codecReq.isEncoder ? " encoder" : " decoder");
+    }
+}
+
+TEST(CodecInfoTest, ListInfoTest) {
+    ALOGV("Compare CodecInfo with info in XML");
+    MediaCodecsXmlParser parser;
+    status_t status = parser.parseXmlFilesInSearchDirs();
+    ASSERT_EQ(status, OK) << "XML Parsing failed for default paths";
+
+    const std::vector<std::string> &xmlFiles = MediaCodecsXmlParser::getDefaultXmlNames();
+    const std::vector<std::string> &searchDirsApex{std::string(kSwCodecXmlPath)};
+    status = parser.parseXmlFilesInSearchDirs(xmlFiles, searchDirsApex);
+    ASSERT_EQ(status, OK) << "XML Parsing of " << kSwCodecXmlPath << " failed";
+
+    MediaCodecsXmlParser::CodecMap codecMap = parser.getCodecMap();
+
+    sp<IMediaCodecList> list = MediaCodecList::getInstance();
+    ASSERT_NE(list, nullptr) << "Unable to get MediaCodecList instance";
+
+    // Compare CodecMap from XML to CodecList
+    for (auto mapIter : codecMap) {
+        ssize_t index = list->findCodecByName(mapIter.first.c_str());
+        if (index < 0) {
+            std::cout << "[   WARN   ] " << mapIter.first << " not found in CodecList \n";
+            continue;
+        }
+
+        sp<MediaCodecInfo> info = list->getCodecInfo(index);
+        ASSERT_NE(info, nullptr) << "CodecInfo is null";
+
+        MediaCodecsXmlParser::CodecProperties codecProperties = mapIter.second;
+        ASSERT_EQ(codecProperties.isEncoder, info->isEncoder()) << "Encoder property mismatch";
+
+        ALOGV("codec name: %s", info->getCodecName());
+        ALOGV("codec rank: %d", info->getRank());
+        ALOGV("codec ownername: %s", info->getOwnerName());
+        ALOGV("codec isEncoder: %d", info->isEncoder());
+
+        ALOGV("attributeFlags: kFlagIsHardwareAccelerated, kFlagIsSoftwareOnly, kFlagIsVendor, "
+              "kFlagIsEncoder");
+        std::bitset<4> attr(info->getAttributes());
+        ALOGV("codec attributes: %s", attr.to_string().c_str());
+
+        Vector<AString> mediaTypes;
+        info->getSupportedMediaTypes(&mediaTypes);
+        ALOGV("supported media types count: %zu", mediaTypes.size());
+        ASSERT_FALSE(mediaTypes.isEmpty())
+                << "no media type supported by codec: " << info->getCodecName();
+
+        MediaCodecsXmlParser::TypeMap typeMap = codecProperties.typeMap;
+        for (auto mediaType : mediaTypes) {
+            ALOGV("codec mediaTypes: %s", mediaType.c_str());
+            auto searchTypeMap = typeMap.find(mediaType.c_str());
+            ASSERT_NE(searchTypeMap, typeMap.end())
+                    << "CodecList doesn't contain codec media type: " << mediaType.c_str();
+            MediaCodecsXmlParser::AttributeMap attributeMap = searchTypeMap->second;
+
+            const sp<MediaCodecInfo::Capabilities> &capabilities =
+                    info->getCapabilitiesFor(mediaType.c_str());
+
+            Vector<uint32_t> colorFormats;
+            capabilities->getSupportedColorFormats(&colorFormats);
+            for (auto colorFormat : colorFormats) {
+                ALOGV("supported color formats: %d", colorFormat);
+            }
+
+            Vector<MediaCodecInfo::ProfileLevel> profileLevels;
+            capabilities->getSupportedProfileLevels(&profileLevels);
+            if (!profileLevels.empty()) {
+                ALOGV("supported profilelevel for media type: %s", mediaType.c_str());
+            }
+            for (auto profileLevel : profileLevels) {
+                ALOGV("profile: %d, level: %d", profileLevel.mProfile, profileLevel.mLevel);
+            }
+
+            sp<AMessage> details = capabilities->getDetails();
+            ASSERT_NE(details, nullptr) << "Details in codec capabilities is null";
+            ALOGV("no. of entries in details: %zu", details->countEntries());
+
+            for (size_t idxDetail = 0; idxDetail < details->countEntries(); idxDetail++) {
+                AMessage::Type type;
+                const char *name = details->getEntryNameAt(idxDetail, &type);
+                ALOGV("details entry name: %s", name);
+                AMessage::ItemData itemData = details->getEntryAt(idxDetail);
+                switch (type) {
+                    case AMessage::kTypeInt32:
+                        int32_t val32;
+                        if (itemData.find(&val32)) {
+                            ALOGV("entry int val: %d", val32);
+                            auto searchAttr = attributeMap.find(name);
+                            if (searchAttr == attributeMap.end()) {
+                                ALOGW("Parser doesn't have key: %s", name);
+                            } else if (stoi(searchAttr->second) != val32) {
+                                ALOGW("Values didn't match for key: %s", name);
+                                ALOGV("Values act/exp: %d / %d", val32, stoi(searchAttr->second));
+                            }
+                        }
+                        break;
+                    case AMessage::kTypeString:
+                        if (AString valStr; itemData.find(&valStr)) {
+                            ALOGV("entry str val: %s", valStr.c_str());
+                            auto searchAttr = attributeMap.find(name);
+                            if (searchAttr == attributeMap.end()) {
+                                ALOGW("Parser doesn't have key: %s", name);
+                            } else if (searchAttr->second != valStr.c_str()) {
+                                ALOGW("Values didn't match for key: %s", name);
+                                ALOGV("Values act/exp: %s / %s", valStr.c_str(),
+                                      searchAttr->second.c_str());
+                            }
+                        }
+                        break;
+                    default:
+                        ALOGV("data type: %d shouldn't be present in details", type);
+                        break;
+                }
+            }
+        }
+
+        Parcel *codecInfoParcel = new Parcel();
+        ASSERT_NE(codecInfoParcel, nullptr) << "Unable to create parcel";
+
+        status_t status = info->writeToParcel(codecInfoParcel);
+        ASSERT_EQ(status, OK) << "Writing to parcel failed";
+
+        codecInfoParcel->setDataPosition(0);
+        sp<MediaCodecInfo> parcelCodecInfo = info->FromParcel(*codecInfoParcel);
+        ASSERT_NE(parcelCodecInfo, nullptr) << "CodecInfo from parcel is null";
+        delete codecInfoParcel;
+
+        EXPECT_STREQ(info->getCodecName(), parcelCodecInfo->getCodecName())
+                << "Returned codec name in info doesn't match";
+        EXPECT_EQ(info->getRank(), parcelCodecInfo->getRank())
+                << "Returned component rank in info doesn't match";
+    }
+}
+
+TEST(CodecListTest, CodecListGlobalSettingsTest) {
+    sp<IMediaCodecList> list = MediaCodecList::getInstance();
+    ASSERT_NE(list, nullptr) << "Unable to get MediaCodecList instance";
+
+    sp<AMessage> globalSettings = list->getGlobalSettings();
+    ASSERT_NE(globalSettings, nullptr) << "GlobalSettings AMessage is null";
+    ALOGV("global settings: %s", globalSettings->debugString(0).c_str());
+}
diff --git a/media/libmedia/xsd/vts/Android.mk b/media/libmedia/xsd/vts/Android.mk
deleted file mode 100644
index 52c3779..0000000
--- a/media/libmedia/xsd/vts/Android.mk
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := VtsValidateMediaProfiles
-include test/vts/tools/build/Android.host_config.mk
diff --git a/media/libmedia/xsd/vts/AndroidTest.xml b/media/libmedia/xsd/vts/AndroidTest.xml
deleted file mode 100644
index e68721b..0000000
--- a/media/libmedia/xsd/vts/AndroidTest.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2019 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-<configuration description="Config for VTS VtsValidateMediaProfiles.">
-    <option name="config-descriptor:metadata" key="plan" value="vts-treble" />
-    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.VtsFilePusher">
-        <option name="abort-on-push-failure" value="false"/>
-        <option name="push-group" value="HostDrivenTest.push"/>
-        <option name="push" value="DATA/etc/media_profiles.xsd->/data/local/tmp/media_profiles.xsd"/>
-    </target_preparer>
-    <test class="com.android.tradefed.testtype.VtsMultiDeviceTest">
-        <option name="test-module-name" value="VtsValidateMediaProfiles"/>
-        <option name="binary-test-source" value="_32bit::DATA/nativetest/vts_mediaProfiles_validate_test/vts_mediaProfiles_validate_test" />
-        <option name="binary-test-source" value="_64bit::DATA/nativetest64/vts_mediaProfiles_validate_test/vts_mediaProfiles_validate_test" />
-        <option name="binary-test-type" value="gtest"/>
-        <option name="test-timeout" value="30s"/>
-    </test>
-</configuration>
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index 6fcbc7b..0779a8e 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -3,6 +3,12 @@
     vendor_available: true,
     min_sdk_version: "29",
     export_include_dirs: ["include"],
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 cc_library {
@@ -12,7 +18,11 @@
         enabled: true,
     },
     double_loadable: true,
-    srcs: ["AudioParameter.cpp", "TypeConverter.cpp"],
+    srcs: [
+        "AudioParameter.cpp",
+        "AudioSanitizer.cpp",
+        "TypeConverter.cpp",
+    ],
     cflags: [
         "-Werror",
         "-Wextra",
@@ -27,4 +37,10 @@
         "libmedia_helper_headers",
     ],
     clang: true,
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/libmediahelper/AudioSanitizer.cpp b/media/libmediahelper/AudioSanitizer.cpp
new file mode 100644
index 0000000..44ca956
--- /dev/null
+++ b/media/libmediahelper/AudioSanitizer.cpp
@@ -0,0 +1,116 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/AudioSanitizer.h>
+
+namespace android {
+
+    /** returns true if string overflow was prevented by zero termination */
+template <size_t size>
+bool preventStringOverflow(char (&s)[size]) {
+    if (strnlen(s, size) < size) return false;
+    s[size - 1] = '\0';
+    return true;
+}
+
+status_t safetyNetLog(status_t status, const char *bugNumber) {
+    if (status != NO_ERROR && bugNumber != nullptr) {
+        android_errorWriteLog(0x534e4554, bugNumber); // SafetyNet logging
+    }
+    return status;
+}
+
+status_t AudioSanitizer::sanitizeAudioAttributes(
+        audio_attributes_t *attr, const char *bugNumber)
+{
+    status_t status = NO_ERROR;
+    const size_t tagsMaxSize = AUDIO_ATTRIBUTES_TAGS_MAX_SIZE;
+    if (strnlen(attr->tags, tagsMaxSize) >= tagsMaxSize) {
+        status = BAD_VALUE;
+    }
+    attr->tags[tagsMaxSize - 1] = '\0';
+    return safetyNetLog(status, bugNumber);
+}
+
+/** returns BAD_VALUE if sanitization was required. */
+status_t AudioSanitizer::sanitizeEffectDescriptor(
+        effect_descriptor_t *desc, const char *bugNumber)
+{
+    status_t status = NO_ERROR;
+    if (preventStringOverflow(desc->name)
+        | /* always */ preventStringOverflow(desc->implementor)) {
+        status = BAD_VALUE;
+    }
+    return safetyNetLog(status, bugNumber);
+}
+
+/** returns BAD_VALUE if sanitization was required. */
+status_t AudioSanitizer::sanitizeAudioPortConfig(
+        struct audio_port_config *config, const char *bugNumber)
+{
+    status_t status = NO_ERROR;
+    if (config->type == AUDIO_PORT_TYPE_DEVICE &&
+        preventStringOverflow(config->ext.device.address)) {
+        status = BAD_VALUE;
+    }
+    return safetyNetLog(status, bugNumber);
+}
+
+/** returns BAD_VALUE if sanitization was required. */
+status_t AudioSanitizer::sanitizeAudioPort(
+        struct audio_port *port, const char *bugNumber)
+{
+    status_t status = NO_ERROR;
+    if (preventStringOverflow(port->name)) {
+        status = BAD_VALUE;
+    }
+    if (sanitizeAudioPortConfig(&port->active_config) != NO_ERROR) {
+        status = BAD_VALUE;
+    }
+    if (port->type == AUDIO_PORT_TYPE_DEVICE &&
+        preventStringOverflow(port->ext.device.address)) {
+        status = BAD_VALUE;
+    }
+    return safetyNetLog(status, bugNumber);
+}
+
+/** returns BAD_VALUE if sanitization was required. */
+status_t AudioSanitizer::sanitizeAudioPatch(
+        struct audio_patch *patch, const char *bugNumber)
+{
+    status_t status = NO_ERROR;
+    if (patch->num_sources > AUDIO_PATCH_PORTS_MAX) {
+        patch->num_sources = AUDIO_PATCH_PORTS_MAX;
+        status = BAD_VALUE;
+    }
+    if (patch->num_sinks > AUDIO_PATCH_PORTS_MAX) {
+        patch->num_sinks = AUDIO_PATCH_PORTS_MAX;
+        status = BAD_VALUE;
+    }
+    for (size_t i = 0; i < patch->num_sources; i++) {
+        if (sanitizeAudioPortConfig(&patch->sources[i]) != NO_ERROR) {
+            status = BAD_VALUE;
+        }
+    }
+    for (size_t i = 0; i < patch->num_sinks; i++) {
+        if (sanitizeAudioPortConfig(&patch->sinks[i]) != NO_ERROR) {
+            status = BAD_VALUE;
+        }
+    }
+    return safetyNetLog(status, bugNumber);
+}
+
+}; // namespace android
diff --git a/media/libmediahelper/TEST_MAPPING b/media/libmediahelper/TEST_MAPPING
new file mode 100644
index 0000000..f9594bd
--- /dev/null
+++ b/media/libmediahelper/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+  "presubmit": [
+    {
+      "name": "libmedia_helper_tests"
+    }
+  ]
+}
diff --git a/media/libmediahelper/TypeConverter.cpp b/media/libmediahelper/TypeConverter.cpp
index 6382ce4..d3a517f 100644
--- a/media/libmediahelper/TypeConverter.cpp
+++ b/media/libmediahelper/TypeConverter.cpp
@@ -18,307 +18,9 @@
 
 namespace android {
 
-#define MAKE_STRING_FROM_ENUM(string) { #string, string }
+#define MAKE_STRING_FROM_ENUM(enumval) { #enumval, enumval }
 #define TERMINATOR { .literal = nullptr }
 
-template <>
-const OutputDeviceConverter::Table OutputDeviceConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_NONE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_EARPIECE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPEAKER),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPEAKER_SAFE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_WIRED_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_WIRED_HEADPHONE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ALL_SCO),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ALL_A2DP),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_AUX_DIGITAL),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_HDMI),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_USB_ACCESSORY),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_USB_DEVICE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ALL_USB),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_REMOTE_SUBMIX),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_TELEPHONY_TX),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_LINE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_HDMI_ARC),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPDIF),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_FM),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_AUX_LINE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_IP),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BUS),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_PROXY),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_USB_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_HEARING_AID),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ECHO_CANCELLER),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_DEFAULT),
-    // STUB must be after DEFAULT, so the latter is picked up by toString first.
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_STUB),
-    TERMINATOR
-};
-
-template <>
-const InputDeviceConverter::Table InputDeviceConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_NONE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_COMMUNICATION),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_AMBIENT),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BUILTIN_MIC),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ALL_SCO),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_WIRED_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_AUX_DIGITAL),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_HDMI),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_HDMI_ARC),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_TELEPHONY_RX),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_VOICE_CALL),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BACK_MIC),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_REMOTE_SUBMIX),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_USB_ACCESSORY),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_USB_DEVICE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ALL_USB),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_FM_TUNER),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_TV_TUNER),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_LINE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_SPDIF),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_A2DP),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_LOOPBACK),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_IP),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BUS),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_PROXY),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_USB_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_BLE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ECHO_REFERENCE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_DEFAULT),
-    // STUB must be after DEFAULT, so the latter is picked up by toString first.
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_STUB),
-    TERMINATOR
-};
-
-
-template <>
-const OutputFlagConverter::Table OutputFlagConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_NONE),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_DIRECT),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_PRIMARY),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_FAST),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_DEEP_BUFFER),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_NON_BLOCKING),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_HW_AV_SYNC),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_TTS),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_RAW),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_SYNC),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_DIRECT_PCM),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_VOIP_RX),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_INCALL_MUSIC),
-    TERMINATOR
-};
-
-
-template <>
-const InputFlagConverter::Table InputFlagConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_NONE),
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_FAST),
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_HW_HOTWORD),
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_RAW),
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_SYNC),
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_MMAP_NOIRQ),
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_VOIP_TX),
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_HW_AV_SYNC),
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_DIRECT),
-    TERMINATOR
-};
-
-
-template <>
-const FormatConverter::Table FormatConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_16_BIT),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_8_BIT),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_32_BIT),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_8_24_BIT),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_FLOAT),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_24_BIT_PACKED),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MP3),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AMR_NB),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AMR_WB),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_MAIN),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_SSR),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LTP),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_HE_V1),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_SCALABLE),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ERLC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LD),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_HE_V2),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ELD),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_XHE),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_MAIN),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_LC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_SSR),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_LTP),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_HE_V1),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_SCALABLE),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_ERLC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_LD),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_HE_V2),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_ELD),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_XHE),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_VORBIS),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_HE_AAC_V1),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_HE_AAC_V2),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_OPUS),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AC3),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_E_AC3),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DTS),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DTS_HD),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_IEC61937),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DOLBY_TRUEHD),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_EVRC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_EVRCB),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_EVRCWB),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_EVRCNW),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADIF),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_WMA),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_WMA_PRO),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AMR_WB_PLUS),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MP2),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_QCELP),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DSD),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_FLAC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_ALAC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APE),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_SBC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX_HD),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AC4),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_LDAC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_E_AC3_JOC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT_1_0),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT_2_0),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT_2_1),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM_LC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM_HE_V1),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM_HE_V2),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_CELT),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX_ADAPTIVE),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_LHDC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_LHDC_LL),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX_TWSP),
-    TERMINATOR
-};
-
-
-template <>
-const OutputChannelConverter::Table OutputChannelConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_MONO),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_STEREO),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_2POINT1),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_2POINT0POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_2POINT1POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_TRI),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_TRI_BACK),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_3POINT1),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_3POINT0POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_3POINT1POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_QUAD),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_QUAD_BACK),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_QUAD_SIDE),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_SURROUND),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_PENTA),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1_BACK),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1_SIDE),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1POINT4),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_6POINT1),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_7POINT1),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_7POINT1POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_7POINT1POINT4),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_HAPTIC_A),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_MONO_HAPTIC_A),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_STEREO_HAPTIC_A),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_HAPTIC_AB),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_MONO_HAPTIC_AB),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_STEREO_HAPTIC_AB),
-    TERMINATOR
-};
-
-
-template <>
-const InputChannelConverter::Table InputChannelConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_MONO),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_STEREO),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_FRONT_BACK),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_6),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_2POINT0POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_2POINT1POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_3POINT0POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_3POINT1POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_5POINT1),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_VOICE_CALL_MONO),
-    TERMINATOR
-};
-
-template <>
-const ChannelIndexConverter::Table ChannelIndexConverter::mTable[] = {
-    {"AUDIO_CHANNEL_INDEX_MASK_1", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_1)},
-    {"AUDIO_CHANNEL_INDEX_MASK_2", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_2)},
-    {"AUDIO_CHANNEL_INDEX_MASK_3", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_3)},
-    {"AUDIO_CHANNEL_INDEX_MASK_4", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_4)},
-    {"AUDIO_CHANNEL_INDEX_MASK_5", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_5)},
-    {"AUDIO_CHANNEL_INDEX_MASK_6", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_6)},
-    {"AUDIO_CHANNEL_INDEX_MASK_7", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_7)},
-    {"AUDIO_CHANNEL_INDEX_MASK_8", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_8)},
-    TERMINATOR
-};
-
-
-template <>
-const GainModeConverter::Table GainModeConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_JOINT),
-    MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_CHANNELS),
-    MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_RAMP),
-    TERMINATOR
-};
-
-
-template <>
-const StreamTypeConverter::Table StreamTypeConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_DEFAULT),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_VOICE_CALL),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_SYSTEM),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_RING),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_MUSIC),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ALARM),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_NOTIFICATION),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_BLUETOOTH_SCO ),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ENFORCED_AUDIBLE),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_DTMF),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_TTS),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ACCESSIBILITY),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ASSISTANT),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_REROUTING),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_PATCH),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_CALL_ASSISTANT),
-    TERMINATOR
-};
-
 template<>
 const AudioModeConverter::Table AudioModeConverter::mTable[] = {
     MAKE_STRING_FROM_ENUM(AUDIO_MODE_INVALID),
@@ -331,62 +33,6 @@
     TERMINATOR
 };
 
-template<>
-const AudioContentTypeConverter::Table AudioContentTypeConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_UNKNOWN),
-    MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_SPEECH),
-    MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_MUSIC),
-    MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_MOVIE),
-    MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_SONIFICATION),
-    TERMINATOR
-};
-
-template <>
-const UsageTypeConverter::Table UsageTypeConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_UNKNOWN),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_MEDIA),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VOICE_COMMUNICATION),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ALARM),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_EVENT),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANCE_SONIFICATION),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_GAME),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VIRTUAL_SOURCE),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANT),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_CALL_ASSISTANT),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_EMERGENCY),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_SAFETY),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VEHICLE_STATUS),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ANNOUNCEMENT),
-    TERMINATOR
-};
-
-template <>
-const SourceTypeConverter::Table SourceTypeConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_DEFAULT),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_MIC),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_UPLINK),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_DOWNLINK),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_CALL),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_CAMCORDER),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_RECOGNITION),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_COMMUNICATION),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_REMOTE_SUBMIX),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_UNPROCESSED),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_PERFORMANCE),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_ECHO_REFERENCE),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_FM_TUNER),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_HOTWORD),
-    TERMINATOR
-};
-
 template <>
 const AudioFlagConverter::Table AudioFlagConverter::mTable[] = {
     MAKE_STRING_FROM_ENUM(AUDIO_FLAG_NONE),
@@ -409,6 +55,7 @@
 
 template class TypeConverter<OutputDeviceTraits>;
 template class TypeConverter<InputDeviceTraits>;
+template class TypeConverter<DeviceTraits>;
 template class TypeConverter<OutputFlagTraits>;
 template class TypeConverter<InputFlagTraits>;
 template class TypeConverter<FormatTraits>;
@@ -422,11 +69,6 @@
 template class TypeConverter<SourceTraits>;
 template class TypeConverter<AudioFlagTraits>;
 
-bool deviceFromString(const std::string& literalDevice, audio_devices_t& device) {
-    return InputDeviceConverter::fromString(literalDevice, device) ||
-            OutputDeviceConverter::fromString(literalDevice, device);
-}
-
 SampleRateTraits::Collection samplingRatesFromString(
         const std::string &samplingRates, const char *del)
 {
@@ -446,21 +88,20 @@
 audio_format_t formatFromString(const std::string &literalFormat, audio_format_t defaultFormat)
 {
     audio_format_t format;
-    if (literalFormat.empty()) {
-        return defaultFormat;
+    if (!literalFormat.empty() && FormatConverter::fromString(literalFormat, format)) {
+        return format;
     }
-    FormatConverter::fromString(literalFormat, format);
-    return format;
+    return defaultFormat;
 }
 
 audio_channel_mask_t channelMaskFromString(const std::string &literalChannels)
 {
     audio_channel_mask_t channels;
-    if (!OutputChannelConverter::fromString(literalChannels, channels) &&
-            !InputChannelConverter::fromString(literalChannels, channels)) {
-        return AUDIO_CHANNEL_INVALID;
+    if (!literalChannels.empty() &&
+            audio_channel_mask_from_string(literalChannels.c_str(), &channels)) {
+        return channels;
     }
-    return channels;
+    return AUDIO_CHANNEL_INVALID;
 }
 
 ChannelTraits::Collection channelMasksFromString(
diff --git a/media/libmediahelper/include/media/AudioSanitizer.h b/media/libmediahelper/include/media/AudioSanitizer.h
new file mode 100644
index 0000000..1475c7b
--- /dev/null
+++ b/media/libmediahelper/include/media/AudioSanitizer.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_SANITIZER_H_
+#define ANDROID_AUDIO_SANITIZER_H_
+
+#include <system/audio.h>
+#include <system/audio_effect.h>
+#include <utils/Errors.h>
+#include <utils/Log.h>
+
+namespace android {
+
+class AudioSanitizer {
+public:
+    static status_t sanitizeAudioAttributes(
+            audio_attributes_t *attr, const char *bugNumber = nullptr);
+
+    static status_t sanitizeEffectDescriptor(
+            effect_descriptor_t *desc, const char *bugNumber = nullptr);
+
+    static status_t sanitizeAudioPortConfig(
+            struct audio_port_config *config, const char *bugNumber = nullptr);
+
+    static status_t sanitizeAudioPort(
+            struct audio_port *port, const char *bugNumber = nullptr);
+
+    static status_t sanitizeAudioPatch(
+            struct audio_patch *patch, const char *bugNumber = nullptr);
+};
+
+}; // namespace android
+
+#endif  /*ANDROID_AUDIO_SANITIZER_H_*/
diff --git a/media/libmediahelper/include/media/TypeConverter.h b/media/libmediahelper/include/media/TypeConverter.h
index 011498a..42ccb5f 100644
--- a/media/libmediahelper/include/media/TypeConverter.h
+++ b/media/libmediahelper/include/media/TypeConverter.h
@@ -24,8 +24,6 @@
 
 #include <system/audio.h>
 #include <utils/Log.h>
-#include <utils/Vector.h>
-#include <utils/SortedVector.h>
 
 #include <media/AudioParameter.h>
 #include "convert.h"
@@ -43,16 +41,6 @@
     }
 };
 template <typename T>
-struct SortedVectorTraits
-{
-    typedef T Type;
-    typedef SortedVector<Type> Collection;
-    static void add(Collection &collection, Type value)
-    {
-        collection.add(value);
-    }
-};
-template <typename T>
 struct SetTraits
 {
     typedef T Type;
@@ -108,13 +96,20 @@
                                      typename Traits::Collection &collection,
                                      const char *del = AudioParameter::valueListSeparator);
 
-    static uint32_t maskFromString(
+    static typename Traits::Type maskFromString(
             const std::string &str, const char *del = AudioParameter::valueListSeparator);
 
     static void maskToString(
-            uint32_t mask, std::string &str, const char *del = AudioParameter::valueListSeparator);
+            typename Traits::Type mask, std::string &str,
+            const char *del = AudioParameter::valueListSeparator);
 
 protected:
+    // Default implementations use mTable for to/from string conversions
+    // of each individual enum value.
+    // These functions may be specialized to use external converters instead.
+    static bool toStringImpl(const typename Traits::Type &value, std::string &str);
+    static bool fromStringImpl(const std::string &str, typename Traits::Type &result);
+
     struct Table {
         const char *literal;
         typename Traits::Type value;
@@ -124,26 +119,22 @@
 };
 
 template <class Traits>
-inline bool TypeConverter<Traits>::toString(const typename Traits::Type &value, std::string &str)
-{
+inline bool TypeConverter<Traits>::toStringImpl(
+        const typename Traits::Type &value, std::string &str) {
     for (size_t i = 0; mTable[i].literal; i++) {
         if (mTable[i].value == value) {
             str = mTable[i].literal;
             return true;
         }
     }
-    char result[64];
-    snprintf(result, sizeof(result), "Unknown enum value %d", value);
-    str = result;
     return false;
 }
 
 template <class Traits>
-inline bool TypeConverter<Traits>::fromString(const std::string &str, typename Traits::Type &result)
-{
+inline bool TypeConverter<Traits>::fromStringImpl(
+        const std::string &str, typename Traits::Type &result) {
     for (size_t i = 0; mTable[i].literal; i++) {
         if (strcmp(mTable[i].literal, str.c_str()) == 0) {
-            ALOGV("stringToEnum() found %s", mTable[i].literal);
             result = mTable[i].value;
             return true;
         }
@@ -152,6 +143,26 @@
 }
 
 template <class Traits>
+inline bool TypeConverter<Traits>::toString(const typename Traits::Type &value, std::string &str)
+{
+    const bool success = toStringImpl(value, str);
+    if (!success) {
+        char result[64];
+        snprintf(result, sizeof(result), "Unknown enum value %d", value);
+        str = result;
+    }
+    return success;
+}
+
+template <class Traits>
+inline bool TypeConverter<Traits>::fromString(const std::string &str, typename Traits::Type &result)
+{
+    const bool success = fromStringImpl(str, result);
+    ALOGV_IF(success, "stringToEnum() found %s", str.c_str());
+    return success;
+}
+
+template <class Traits>
 inline void TypeConverter<Traits>::collectionFromString(const std::string &str,
         typename Traits::Collection &collection,
         const char *del)
@@ -168,7 +179,8 @@
 }
 
 template <class Traits>
-inline uint32_t TypeConverter<Traits>::maskFromString(const std::string &str, const char *del)
+inline typename Traits::Type TypeConverter<Traits>::maskFromString(
+        const std::string &str, const char *del)
 {
     char *literal = strdup(str.c_str());
     uint32_t value = 0;
@@ -179,20 +191,24 @@
         }
     }
     free(literal);
-    return value;
+    return static_cast<typename Traits::Type>(value);
 }
 
 template <class Traits>
-inline void TypeConverter<Traits>::maskToString(uint32_t mask, std::string &str, const char *del)
+inline void TypeConverter<Traits>::maskToString(
+        typename Traits::Type mask, std::string &str, const char *del)
 {
     if (mask != 0) {
         bool first_flag = true;
-        for (size_t i = 0; mTable[i].literal; i++) {
-            uint32_t value = static_cast<uint32_t>(mTable[i].value);
-            if (mTable[i].value != 0 && ((mask & value) == value)) {
-                if (!first_flag) str += del;
-                first_flag = false;
-                str += mTable[i].literal;
+        for (size_t bit = 0; bit < sizeof(uint32_t) * 8; ++bit) {
+            uint32_t flag = 1u << bit;
+            if ((flag & mask) == flag) {
+                std::string flag_str;
+                if (toString(static_cast<typename Traits::Type>(flag), flag_str)) {
+                    if (!first_flag) str += del;
+                    first_flag = false;
+                    str += flag_str;
+                }
             }
         }
     } else {
@@ -200,6 +216,7 @@
     }
 }
 
+typedef TypeConverter<DeviceTraits> DeviceConverter;
 typedef TypeConverter<OutputDeviceTraits> OutputDeviceConverter;
 typedef TypeConverter<InputDeviceTraits> InputDeviceConverter;
 typedef TypeConverter<OutputFlagTraits> OutputFlagConverter;
@@ -216,23 +233,227 @@
 typedef TypeConverter<SourceTraits> SourceTypeConverter;
 typedef TypeConverter<AudioFlagTraits> AudioFlagConverter;
 
-template<> const OutputDeviceConverter::Table OutputDeviceConverter::mTable[];
-template<> const InputDeviceConverter::Table InputDeviceConverter::mTable[];
-template<> const OutputFlagConverter::Table OutputFlagConverter::mTable[];
-template<> const InputFlagConverter::Table InputFlagConverter::mTable[];
-template<> const FormatConverter::Table FormatConverter::mTable[];
-template<> const OutputChannelConverter::Table OutputChannelConverter::mTable[];
-template<> const InputChannelConverter::Table InputChannelConverter::mTable[];
-template<> const ChannelIndexConverter::Table ChannelIndexConverter::mTable[];
-template<> const GainModeConverter::Table GainModeConverter::mTable[];
-template<> const StreamTypeConverter::Table StreamTypeConverter::mTable[];
 template<> const AudioModeConverter::Table AudioModeConverter::mTable[];
-template<> const AudioContentTypeConverter::Table AudioContentTypeConverter::mTable[];
-template<> const UsageTypeConverter::Table UsageTypeConverter::mTable[];
-template<> const SourceTypeConverter::Table SourceTypeConverter::mTable[];
 template<> const AudioFlagConverter::Table AudioFlagConverter::mTable[];
 
-bool deviceFromString(const std::string& literalDevice, audio_devices_t& device);
+template <>
+inline bool TypeConverter<DeviceTraits>::toStringImpl(
+        const DeviceTraits::Type &value, std::string &str) {
+    str = audio_device_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<DeviceTraits>::fromStringImpl(
+        const std::string &str, DeviceTraits::Type &result) {
+    return audio_device_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<OutputDeviceTraits>::toStringImpl(
+        const OutputDeviceTraits::Type &value, std::string &str) {
+    if (audio_is_output_device(value)) {
+        str = audio_device_to_string(value);
+        return !str.empty();
+    }
+    return false;
+}
+
+template <>
+inline bool TypeConverter<OutputDeviceTraits>::fromStringImpl(
+        const std::string &str, OutputDeviceTraits::Type &result) {
+    OutputDeviceTraits::Type temp;
+    if (audio_device_from_string(str.c_str(), &temp) &&
+            audio_is_output_device(temp)) {
+        result = temp;
+        return true;
+    }
+    return false;
+}
+
+template <>
+inline bool TypeConverter<InputDeviceTraits>::toStringImpl(
+        const InputDeviceTraits::Type &value, std::string &str) {
+    if (audio_is_input_device(value)) {
+        str = audio_device_to_string(value);
+        return !str.empty();
+    }
+    return false;
+}
+
+template <>
+inline bool TypeConverter<InputDeviceTraits>::fromStringImpl(
+        const std::string &str, InputDeviceTraits::Type &result) {
+    InputDeviceTraits::Type temp;
+    if (audio_device_from_string(str.c_str(), &temp) &&
+            audio_is_input_device(temp)) {
+        result = temp;
+        return true;
+    }
+    return false;
+}
+
+template <>
+inline bool TypeConverter<InputFlagTraits>::toStringImpl(
+        const audio_input_flags_t &value, std::string &str) {
+    str = audio_input_flag_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<InputFlagTraits>::fromStringImpl(
+        const std::string &str, audio_input_flags_t &result) {
+    return audio_input_flag_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<OutputFlagTraits>::toStringImpl(
+        const audio_output_flags_t &value, std::string &str) {
+    str = audio_output_flag_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<OutputFlagTraits>::fromStringImpl(
+        const std::string &str, audio_output_flags_t &result) {
+    return audio_output_flag_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<FormatTraits>::toStringImpl(
+        const audio_format_t &value, std::string &str) {
+    str = audio_format_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<FormatTraits>::fromStringImpl(
+        const std::string &str, audio_format_t &result) {
+    return audio_format_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<OutputChannelTraits>::toStringImpl(
+        const audio_channel_mask_t &value, std::string &str) {
+    str = audio_channel_out_mask_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<OutputChannelTraits>::fromStringImpl(
+        const std::string &str, audio_channel_mask_t &result) {
+    OutputChannelTraits::Type temp;
+    if (audio_channel_mask_from_string(str.c_str(), &temp) &&
+            audio_is_output_channel(temp)) {
+        result = temp;
+        return true;
+    }
+    return false;
+}
+
+template <>
+inline bool TypeConverter<InputChannelTraits>::toStringImpl(
+        const audio_channel_mask_t &value, std::string &str) {
+    str = audio_channel_in_mask_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<InputChannelTraits>::fromStringImpl(
+        const std::string &str, audio_channel_mask_t &result) {
+    InputChannelTraits::Type temp;
+    if (audio_channel_mask_from_string(str.c_str(), &temp) &&
+            audio_is_input_channel(temp)) {
+        result = temp;
+        return true;
+    }
+    return false;
+}
+
+template <>
+inline bool TypeConverter<ChannelIndexTraits>::toStringImpl(
+        const audio_channel_mask_t &value, std::string &str) {
+    str = audio_channel_index_mask_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<ChannelIndexTraits>::fromStringImpl(
+        const std::string &str, audio_channel_mask_t &result) {
+    ChannelIndexTraits::Type temp;
+    if (audio_channel_mask_from_string(str.c_str(), &temp) &&
+            audio_channel_mask_get_representation(temp) == AUDIO_CHANNEL_REPRESENTATION_INDEX) {
+        result = temp;
+        return true;
+    }
+    return false;
+}
+
+template <>
+inline bool TypeConverter<StreamTraits>::toStringImpl(
+        const audio_stream_type_t &value, std::string &str) {
+    str = audio_stream_type_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<StreamTraits>::fromStringImpl(
+        const std::string &str, audio_stream_type_t &result)
+{
+    return audio_stream_type_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<GainModeTraits>::toStringImpl(
+        const audio_gain_mode_t &value, std::string &str) {
+    str = audio_gain_mode_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<GainModeTraits>::fromStringImpl(
+        const std::string &str, audio_gain_mode_t &result) {
+    return audio_gain_mode_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<AudioContentTraits>::toStringImpl(
+        const audio_content_type_t &value, std::string &str) {
+    str = audio_content_type_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<AudioContentTraits>::fromStringImpl(
+        const std::string &str, audio_content_type_t &result) {
+    return audio_content_type_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<UsageTraits>::toStringImpl(const audio_usage_t &value, std::string &str)
+{
+    str = audio_usage_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<UsageTraits>::fromStringImpl(
+        const std::string &str, audio_usage_t &result) {
+    return audio_usage_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<SourceTraits>::toStringImpl(const audio_source_t &value, std::string &str)
+{
+    str = audio_source_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<SourceTraits>::fromStringImpl(
+        const std::string &str, audio_source_t &result) {
+    return audio_source_from_string(str.c_str(), &result);
+}
 
 SampleRateTraits::Collection samplingRatesFromString(
         const std::string &samplingRates, const char *del = AudioParameter::valueListSeparator);
@@ -256,6 +477,7 @@
 
 // counting enumerations
 template <typename T, std::enable_if_t<std::is_same<T, audio_content_type_t>::value
+                                    || std::is_same<T, audio_devices_t>::value
                                     || std::is_same<T, audio_mode_t>::value
                                     || std::is_same<T, audio_source_t>::value
                                     || std::is_same<T, audio_stream_type_t>::value
@@ -282,17 +504,6 @@
     return result;
 }
 
-static inline std::string toString(const audio_devices_t& devices)
-{
-    std::string result;
-    if ((devices & AUDIO_DEVICE_BIT_IN) != 0) {
-        InputDeviceConverter::maskToString(devices, result);
-    } else {
-        OutputDeviceConverter::maskToString(devices, result);
-    }
-    return result;
-}
-
 static inline std::string toString(const audio_attributes_t& attributes)
 {
     std::ostringstream result;
diff --git a/media/libmediahelper/tests/Android.bp b/media/libmediahelper/tests/Android.bp
new file mode 100644
index 0000000..c5ba122
--- /dev/null
+++ b/media/libmediahelper/tests/Android.bp
@@ -0,0 +1,22 @@
+cc_test {
+    name: "libmedia_helper_tests",
+
+    generated_headers: ["audio_policy_configuration_V7_0"],
+    generated_sources: ["audio_policy_configuration_V7_0"],
+    header_libs: ["libxsdc-utils"],
+    shared_libs: [
+        "libbase",
+        "liblog",
+        "libmedia_helper",
+        "libxml2",
+    ],
+
+    srcs: ["typeconverter_tests.cpp"],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    test_suites: ["device-tests"],
+}
diff --git a/media/libmediahelper/tests/typeconverter_tests.cpp b/media/libmediahelper/tests/typeconverter_tests.cpp
new file mode 100644
index 0000000..0c3b913
--- /dev/null
+++ b/media/libmediahelper/tests/typeconverter_tests.cpp
@@ -0,0 +1,226 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#define LOG_TAG "TypeConverter_Test"
+#include <log/log.h>
+
+#include <android_audio_policy_configuration_V7_0.h>
+#include <media/TypeConverter.h>
+#include <system/audio.h>
+#include <xsdc/XsdcSupport.h>
+
+using namespace android;
+namespace xsd {
+using namespace android::audio::policy::configuration::V7_0;
+}
+
+TEST(TypeConverter, ParseChannelMasks) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioChannelMask>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_channel_mask_t channelMask = channelMaskFromString(stringVal);
+        EXPECT_EQ(stringVal != "AUDIO_CHANNEL_NONE", audio_channel_mask_is_valid(channelMask))
+                << "Validity of \"" << stringVal << "\" is not as expected";
+    }
+}
+
+TEST(TypeConverter, ParseInputOutputIndexChannelMask) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioChannelMask>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_channel_mask_t channelMask, channelMaskBack;
+        std::string stringValBack;
+        if (stringVal.find("_CHANNEL_IN_") != std::string::npos) {
+            EXPECT_TRUE(InputChannelConverter::fromString(stringVal, channelMask))
+                    << "Conversion of \"" << stringVal << "\" failed (as input channel mask)";
+            EXPECT_TRUE(InputChannelConverter::toString(channelMask, stringValBack))
+                    << "Conversion of input channel mask " << channelMask << " failed";
+            // Due to aliased values, the result of 'toString' might not be the same
+            // as 'stringVal', thus we need to compare the results of parsing instead.
+            EXPECT_TRUE(InputChannelConverter::fromString(stringValBack, channelMaskBack))
+                    << "Conversion of \"" << stringValBack << "\" failed (as input channel mask)";
+            EXPECT_EQ(channelMask, channelMaskBack);
+        } else if (stringVal.find("_CHANNEL_OUT_") != std::string::npos) {
+            EXPECT_TRUE(OutputChannelConverter::fromString(stringVal, channelMask))
+                    << "Conversion of \"" << stringVal << "\" failed (as output channel mask)";
+            EXPECT_TRUE(OutputChannelConverter::toString(channelMask, stringValBack))
+                    << "Conversion of output channel mask " << channelMask << " failed";
+            EXPECT_TRUE(OutputChannelConverter::fromString(stringValBack, channelMaskBack))
+                    << "Conversion of \"" << stringValBack << "\" failed (as output channel mask)";
+            EXPECT_EQ(channelMask, channelMaskBack);
+        } else if (stringVal.find("_CHANNEL_INDEX_") != std::string::npos) {
+            EXPECT_TRUE(ChannelIndexConverter::fromString(stringVal, channelMask))
+                    << "Conversion of \"" << stringVal << "\" failed (as indexed channel mask)";
+            EXPECT_TRUE(ChannelIndexConverter::toString(channelMask, stringValBack))
+                    << "Conversion of indexed channel mask " << channelMask << " failed";
+            EXPECT_EQ(stringVal, stringValBack);
+        } else if (stringVal == "AUDIO_CHANNEL_NONE") {
+            EXPECT_FALSE(InputChannelConverter::fromString(stringVal, channelMask))
+                    << "Conversion of \"" << stringVal << "\" succeeded (as input channel mask)";
+            EXPECT_FALSE(OutputChannelConverter::fromString(stringVal, channelMask))
+                    << "Conversion of \"" << stringVal << "\" succeeded (as output channel mask)";
+            EXPECT_FALSE(ChannelIndexConverter::fromString(stringVal, channelMask))
+                    << "Conversion of \"" << stringVal << "\" succeeded (as index channel mask)";
+            // None of Converters could parse this because 'NONE' isn't a 'valid' channel mask.
+            channelMask = AUDIO_CHANNEL_NONE;
+            // However they all must succeed in converting it back.
+            EXPECT_TRUE(InputChannelConverter::toString(channelMask, stringValBack))
+                    << "Conversion of input channel mask " << channelMask << " failed";
+            EXPECT_EQ(stringVal, stringValBack);
+            EXPECT_TRUE(OutputChannelConverter::toString(channelMask, stringValBack))
+                    << "Conversion of output channel mask " << channelMask << " failed";
+            EXPECT_EQ(stringVal, stringValBack);
+            EXPECT_TRUE(ChannelIndexConverter::toString(channelMask, stringValBack))
+                    << "Conversion of indexed channel mask " << channelMask << " failed";
+            EXPECT_EQ(stringVal, stringValBack);
+        }
+    }
+}
+
+TEST(TypeConverter, ParseContentTypes) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioContentType>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_content_type_t contentType;
+        EXPECT_TRUE(AudioContentTypeConverter::fromString(stringVal, contentType))
+                << "Conversion of \"" << stringVal << "\" failed";
+        EXPECT_EQ(stringVal, toString(contentType));
+    }
+}
+
+TEST(TypeConverter, ParseDevices) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioDevice>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_devices_t device, deviceBack;
+        std::string stringValBack;
+        EXPECT_TRUE(DeviceConverter::fromString(stringVal, device))
+                << "Conversion of \"" << stringVal << "\" failed";
+        if (stringVal != "AUDIO_DEVICE_NONE") {
+            EXPECT_TRUE(audio_is_input_device(device) || audio_is_output_device(device))
+                    << "Device \"" << stringVal << "\" is neither input, nor output device";
+        } else {
+            EXPECT_FALSE(audio_is_input_device(device));
+            EXPECT_FALSE(audio_is_output_device(device));
+        }
+        // Due to aliased values, the result of 'toString' might not be the same
+        // as 'stringVal', thus we need to compare the results of parsing instead.
+        stringValBack = toString(device);
+        EXPECT_TRUE(DeviceConverter::fromString(stringValBack, deviceBack))
+                << "Conversion of \"" << stringValBack << "\" failed";
+        EXPECT_EQ(device, deviceBack);
+    }
+}
+
+TEST(TypeConverter, ParseInOutDevices) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioDevice>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_devices_t device, deviceBack;
+        std::string stringValBack;
+        if (stringVal.find("_DEVICE_IN_") != std::string::npos) {
+            EXPECT_TRUE(InputDeviceConverter::fromString(stringVal, device))
+                    << "Conversion of \"" << stringVal << "\" failed (as input device)";
+            // Due to aliased values, the result of 'toString' might not be the same
+            // as 'stringVal', thus we need to compare the results of parsing instead.
+            stringValBack = toString(device);
+            EXPECT_TRUE(InputDeviceConverter::fromString(stringValBack, deviceBack))
+                    << "Conversion of \"" << stringValBack << "\" failed";
+            EXPECT_EQ(device, deviceBack);
+        } else if (stringVal.find("_DEVICE_OUT_") != std::string::npos) {
+            EXPECT_TRUE(OutputDeviceConverter::fromString(stringVal, device))
+                    << "Conversion of \"" << stringVal << "\" failed (as output device)";
+            stringValBack = toString(device);
+            EXPECT_TRUE(OutputDeviceConverter::fromString(stringValBack, deviceBack))
+                    << "Conversion of \"" << stringValBack << "\" failed";
+            EXPECT_EQ(device, deviceBack);
+        } else if (stringVal == "AUDIO_DEVICE_NONE") {
+            EXPECT_FALSE(InputDeviceConverter::fromString(stringVal, device))
+                    << "Conversion of \"" << stringVal << "\" succeeded (as input device)";
+            EXPECT_FALSE(OutputDeviceConverter::fromString(stringVal, device))
+                    << "Conversion of \"" << stringVal << "\" succeeded (as output device)";
+            EXPECT_EQ(stringVal, toString(device));
+        }
+    }
+}
+
+TEST (TypeConverter, ParseInOutFlags) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioInOutFlag>{}) {
+        const std::string stringVal = toString(enumVal);
+        if (stringVal.find("_INPUT_FLAG_") != std::string::npos) {
+            audio_input_flags_t flag;
+            EXPECT_TRUE(InputFlagConverter::fromString(stringVal, flag))
+                    << "Conversion of \"" << stringVal << "\" failed (as input flag)";
+            EXPECT_EQ(stringVal, toString(flag));
+        } else {
+            audio_output_flags_t flag;
+            EXPECT_TRUE(OutputFlagConverter::fromString(stringVal, flag))
+                    << "Conversion of \"" << stringVal << "\" failed (as output flag)";
+            EXPECT_EQ(stringVal, toString(flag));
+        }
+    }
+}
+
+TEST(TypeConverter, ParseFormats) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioFormat>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_format_t format;
+        EXPECT_TRUE(FormatConverter::fromString(stringVal, format))
+                << "Conversion of \"" << stringVal << "\" failed";
+        EXPECT_TRUE(audio_is_valid_format(format))
+                << "Converted format \"" << stringVal << "\" is invalid";
+        EXPECT_EQ(stringVal, toString(format));
+    }
+}
+
+TEST(TypeConverter, ParseGainModes) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioGainMode>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_gain_mode_t gainMode;
+        EXPECT_TRUE(GainModeConverter::fromString(stringVal, gainMode))
+                << "Conversion of \"" << stringVal << "\" failed";
+        EXPECT_EQ(stringVal, toString(gainMode));
+    }
+}
+
+TEST(TypeConverter, ParseSources) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioSource>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_source_t source;
+        EXPECT_TRUE(SourceTypeConverter::fromString(stringVal, source))
+                << "Conversion of \"" << stringVal << "\" failed";
+        EXPECT_EQ(source != AUDIO_SOURCE_DEFAULT, audio_is_valid_audio_source(source))
+                << "Validity of \"" << stringVal << "\" is not as expected";
+        EXPECT_EQ(stringVal, toString(source));
+    }
+}
+
+TEST(TypeConverter, ParseStreamTypes) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioStreamType>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_stream_type_t streamType;
+        EXPECT_TRUE(StreamTypeConverter::fromString(stringVal, streamType))
+                << "Conversion of \"" << stringVal << "\" failed";
+        EXPECT_EQ(stringVal, toString(streamType));
+    }
+}
+
+TEST(TypeConverter, ParseUsages) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioUsage>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_usage_t usage;
+        EXPECT_TRUE(UsageTypeConverter::fromString(stringVal, usage))
+                << "Conversion of \"" << stringVal << "\" failed";
+        EXPECT_EQ(stringVal, toString(usage));
+    }
+}
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index 5301f5c..b62317a 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -15,7 +15,9 @@
     shared_libs: [
         "android.hardware.media.c2@1.0",
         "android.hardware.media.omx@1.0",
+        "av-types-aidl-unstable-cpp",
         "libbase",
+        "libandroid_net",
         "libaudioclient",
         "libbinder",
         "libcamera_client",
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index c0da0ce..4d90d98 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -480,14 +480,14 @@
 }
 
 sp<IMediaPlayer> MediaPlayerService::create(const sp<IMediaPlayerClient>& client,
-        audio_session_t audioSessionId)
+        audio_session_t audioSessionId, std::string opPackageName)
 {
     pid_t pid = IPCThreadState::self()->getCallingPid();
     int32_t connId = android_atomic_inc(&mNextConnId);
 
     sp<Client> c = new Client(
             this, pid, connId, client, audioSessionId,
-            IPCThreadState::self()->getCallingUid());
+            IPCThreadState::self()->getCallingUid(), opPackageName);
 
     ALOGV("Create new client(%d) from pid %d, uid %d, ", connId, pid,
          IPCThreadState::self()->getCallingUid());
@@ -733,7 +733,8 @@
 MediaPlayerService::Client::Client(
         const sp<MediaPlayerService>& service, pid_t pid,
         int32_t connId, const sp<IMediaPlayerClient>& client,
-        audio_session_t audioSessionId, uid_t uid)
+        audio_session_t audioSessionId, uid_t uid, const std::string& opPackageName)
+        : mOpPackageName(opPackageName)
 {
     ALOGV("Client(%d) constructor", connId);
     mPid = pid;
@@ -922,7 +923,7 @@
 
     if (!p->hardwareOutput()) {
         mAudioOutput = new AudioOutput(mAudioSessionId, IPCThreadState::self()->getCallingUid(),
-                mPid, mAudioAttributes, mAudioDeviceUpdatedListener);
+                mPid, mAudioAttributes, mAudioDeviceUpdatedListener, mOpPackageName);
         static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
     }
 
@@ -1062,6 +1063,17 @@
     return mStatus = setDataSource_post(p, p->setDataSource(dataSource));
 }
 
+status_t MediaPlayerService::Client::setDataSource(
+        const String8& rtpParams) {
+    player_type playerType = NU_PLAYER;
+    sp<MediaPlayerBase> p = setDataSource_pre(playerType);
+    if (p == NULL) {
+        return NO_INIT;
+    }
+    // now set data source
+    return mStatus = setDataSource_post(p, p->setDataSource(rtpParams));
+}
+
 void MediaPlayerService::Client::disconnectNativeWindow_l() {
     if (mConnectedWindow != NULL) {
         status_t err = nativeWindowDisconnect(
@@ -1761,7 +1773,8 @@
 #undef LOG_TAG
 #define LOG_TAG "AudioSink"
 MediaPlayerService::AudioOutput::AudioOutput(audio_session_t sessionId, uid_t uid, int pid,
-        const audio_attributes_t* attr, const sp<AudioSystem::AudioDeviceCallback>& deviceCallback)
+        const audio_attributes_t* attr, const sp<AudioSystem::AudioDeviceCallback>& deviceCallback,
+        const std::string& opPackageName)
     : mCallback(NULL),
       mCallbackCookie(NULL),
       mCallbackData(NULL),
@@ -1782,7 +1795,8 @@
       mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
       mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
       mDeviceCallbackEnabled(false),
-      mDeviceCallback(deviceCallback)
+      mDeviceCallback(deviceCallback),
+      mOpPackageName(opPackageName)
 {
     ALOGV("AudioOutput(%d)", sessionId);
     if (attr != NULL) {
@@ -2176,7 +2190,8 @@
                     mAttributes,
                     doNotReconnect,
                     1.0f,  // default value for maxRequiredSpeed
-                    mSelectedDeviceId);
+                    mSelectedDeviceId,
+                    mOpPackageName);
         } else {
             // TODO: Due to buffer memory concerns, we use a max target playback speed
             // based on mPlaybackRate at the time of open (instead of kMaxRequiredSpeed),
@@ -2204,7 +2219,8 @@
                     mAttributes,
                     doNotReconnect,
                     targetSpeed,
-                    mSelectedDeviceId);
+                    mSelectedDeviceId,
+                    mOpPackageName);
         }
         // Set caller name so it can be logged in destructor.
         // MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_MEDIA
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 6431ca1..b2f1b9b 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -19,6 +19,7 @@
 #define ANDROID_MEDIAPLAYERSERVICE_H
 
 #include <arpa/inet.h>
+#include <string>
 
 #include <utils/threads.h>
 #include <utils/Errors.h>
@@ -81,7 +82,8 @@
                                         uid_t uid,
                                         int pid,
                                         const audio_attributes_t * attr,
-                                        const sp<AudioSystem::AudioDeviceCallback>& deviceCallback);
+                                        const sp<AudioSystem::AudioDeviceCallback>& deviceCallback,
+                                        const std::string& opPackageName);
         virtual                 ~AudioOutput();
 
         virtual bool            ready() const { return mTrack != 0; }
@@ -178,6 +180,7 @@
         bool                    mDeviceCallbackEnabled;
         wp<AudioSystem::AudioDeviceCallback>        mDeviceCallback;
         mutable Mutex           mLock;
+        const std::string       mOpPackageName;
 
         // static variables below not protected by mutex
         static bool             mIsOnEmulator;
@@ -235,7 +238,8 @@
     virtual sp<IMediaMetadataRetriever> createMetadataRetriever();
 
     virtual sp<IMediaPlayer>    create(const sp<IMediaPlayerClient>& client,
-                                       audio_session_t audioSessionId);
+                                       audio_session_t audioSessionId,
+                                       const std::string opPackageName);
 
     virtual sp<IMediaCodecList> getCodecList() const;
 
@@ -368,6 +372,7 @@
 
         virtual status_t        setDataSource(const sp<IStreamSource> &source);
         virtual status_t        setDataSource(const sp<IDataSource> &source);
+        virtual status_t        setDataSource(const String8& rtpParams);
 
 
         sp<MediaPlayerBase>     setDataSource_pre(player_type playerType);
@@ -410,7 +415,8 @@
                                         int32_t connId,
                                         const sp<IMediaPlayerClient>& client,
                                         audio_session_t audioSessionId,
-                                        uid_t uid);
+                                        uid_t uid,
+                                        const std::string& opPackageName);
                                 Client();
         virtual                 ~Client();
 
@@ -467,6 +473,7 @@
                     bool                          mRetransmitEndpointValid;
                     sp<Client>                    mNextClient;
                     sp<MediaPlayerBase::Listener> mListener;
+                    const std::string             mOpPackageName;
 
         // Metadata filters.
         media::Metadata::Filter mMetadataAllow;  // protected by mLock
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 9b1974b..1cc255d 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -28,6 +28,7 @@
 #include <binder/IServiceManager.h>
 #include <binder/MemoryHeapBase.h>
 #include <binder/MemoryBase.h>
+#include <camera/CameraUtils.h>
 #include <codec2/hidl/client.h>
 #include <cutils/atomic.h>
 #include <cutils/properties.h> // for property_get
@@ -423,30 +424,35 @@
 
     sp<IServiceManager> sm = defaultServiceManager();
 
-    // WORKAROUND: We don't know if camera exists here and getService might block for 5 seconds.
-    // Use checkService for camera if we don't know it exists.
-    static std::atomic<bool> sCameraChecked(false);  // once true never becomes false.
-    static std::atomic<bool> sCameraVerified(false); // once true never becomes false.
-    sp<IBinder> binder = (sCameraVerified || !sCameraChecked)
-        ? sm->getService(String16("media.camera")) : sm->checkService(String16("media.camera"));
-    // If the device does not have a camera, do not create a death listener for it.
-    if (binder != NULL) {
-        sCameraVerified = true;
-        mDeathNotifiers.emplace_back(
-                binder, [l = wp<IMediaRecorderClient>(listener)](){
-            sp<IMediaRecorderClient> listener = l.promote();
-            if (listener) {
-                ALOGV("media.camera service died. "
-                      "Sending death notification.");
-                listener->notify(
-                        MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED,
-                        MediaPlayerService::CAMERA_PROCESS_DEATH);
-            } else {
-                ALOGW("media.camera service died without a death handler.");
-            }
-        });
+    static const bool sCameraDisabled = CameraUtils::isCameraServiceDisabled();
+
+    if (!sCameraDisabled) {
+        // WORKAROUND: We don't know if camera exists here and getService might block for 5 seconds.
+        // Use checkService for camera if we don't know it exists.
+        static std::atomic<bool> sCameraChecked(false);  // once true never becomes false.
+        static std::atomic<bool> sCameraVerified(false); // once true never becomes false.
+
+        sp<IBinder> binder = (sCameraVerified || !sCameraChecked)
+            ? sm->getService(String16("media.camera")) : sm->checkService(String16("media.camera"));
+        // If the device does not have a camera, do not create a death listener for it.
+        if (binder != NULL) {
+            sCameraVerified = true;
+            mDeathNotifiers.emplace_back(
+                    binder, [l = wp<IMediaRecorderClient>(listener)](){
+                sp<IMediaRecorderClient> listener = l.promote();
+                if (listener) {
+                    ALOGV("media.camera service died. "
+                          "Sending death notification.");
+                    listener->notify(
+                            MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED,
+                            MediaPlayerService::CAMERA_PROCESS_DEATH);
+                } else {
+                    ALOGW("media.camera service died without a death handler.");
+                }
+            });
+        }
+        sCameraChecked = true;
     }
-    sCameraChecked = true;
 
     {
         using ::android::hidl::base::V1_0::IBase;
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index 41b6f72..02fb6bb 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -174,9 +174,7 @@
         ALOGV("getting track %zu of %zu, meta=%s", i, n, meta->toString().c_str());
 
         const char *mime;
-        CHECK(meta->findCString(kKeyMIMEType, &mime));
-
-        if (!strncasecmp(mime, "image/", 6)) {
+        if (meta->findCString(kKeyMIMEType, &mime) && !strncasecmp(mime, "image/", 6)) {
             int32_t isPrimary;
             if ((index < 0 && meta->findInt32(
                     kKeyTrackIsDefault, &isPrimary) && isPrimary)
@@ -208,12 +206,19 @@
     }
 
     const char *mime;
-    CHECK(trackMeta->findCString(kKeyMIMEType, &mime));
+    if (!trackMeta->findCString(kKeyMIMEType, &mime)) {
+        ALOGE("image track has no mime type");
+        return NULL;
+    }
     ALOGV("extracting from %s track", mime);
     if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
         mime = MEDIA_MIMETYPE_VIDEO_HEVC;
         trackMeta = new MetaData(*trackMeta);
         trackMeta->setCString(kKeyMIMEType, mime);
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
+        mime = MEDIA_MIMETYPE_VIDEO_AV1;
+        trackMeta = new MetaData(*trackMeta);
+        trackMeta->setCString(kKeyMIMEType, mime);
     }
 
     bool preferhw = property_get_bool(
@@ -299,9 +304,7 @@
         }
 
         const char *mime;
-        CHECK(meta->findCString(kKeyMIMEType, &mime));
-
-        if (!strncasecmp(mime, "video/", 6)) {
+        if (meta->findCString(kKeyMIMEType, &mime) && !strncasecmp(mime, "video/", 6)) {
             break;
         }
     }
@@ -337,7 +340,10 @@
     }
 
     const char *mime;
-    CHECK(trackMeta->findCString(kKeyMIMEType, &mime));
+    if (!trackMeta->findCString(kKeyMIMEType, &mime)) {
+        ALOGE("video track has no mime information.");
+        return NULL;
+    }
 
     bool preferhw = property_get_bool(
             "media.stagefright.thumbnail.prefer_hw_codecs", false);
@@ -531,14 +537,14 @@
     int32_t audioBitrate = -1;
     int32_t rotationAngle = -1;
     int32_t imageCount = 0;
-    int32_t imagePrimary = 0;
+    int32_t imagePrimary = -1;
     int32_t imageWidth = -1;
     int32_t imageHeight = -1;
     int32_t imageRotation = -1;
 
     // The overall duration is the duration of the longest track.
     int64_t maxDurationUs = 0;
-    String8 timedTextLang;
+    String8 timedTextLang, videoMime;
     for (size_t i = 0; i < numTracks; ++i) {
         sp<MetaData> trackMeta = mExtractor->getTrackMetaData(i);
         if (!trackMeta) {
@@ -574,28 +580,33 @@
                     mMetaData.add(METADATA_KEY_SAMPLERATE, String8(tmp));
                 }
             } else if (!hasVideo && !strncasecmp("video/", mime, 6)) {
-                hasVideo = true;
-
-                CHECK(trackMeta->findInt32(kKeyWidth, &videoWidth));
-                CHECK(trackMeta->findInt32(kKeyHeight, &videoHeight));
                 if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
                     rotationAngle = 0;
                 }
                 if (!trackMeta->findInt32(kKeyFrameCount, &videoFrameCount)) {
                     videoFrameCount = 0;
                 }
-
-                parseColorAspects(trackMeta);
+                if (trackMeta->findInt32(kKeyWidth, &videoWidth)
+                    && trackMeta->findInt32(kKeyHeight, &videoHeight)) {
+                    hasVideo = true;
+                    videoMime = String8(mime);
+                    parseColorAspects(trackMeta);
+                } else {
+                    ALOGE("video track ignored for missing dimensions");
+                }
             } else if (!strncasecmp("image/", mime, 6)) {
                 int32_t isPrimary;
                 if (trackMeta->findInt32(
                         kKeyTrackIsDefault, &isPrimary) && isPrimary) {
-                    imagePrimary = imageCount;
-                    CHECK(trackMeta->findInt32(kKeyWidth, &imageWidth));
-                    CHECK(trackMeta->findInt32(kKeyHeight, &imageHeight));
                     if (!trackMeta->findInt32(kKeyRotation, &imageRotation)) {
                         imageRotation = 0;
                     }
+                    if (trackMeta->findInt32(kKeyWidth, &imageWidth)
+                        && trackMeta->findInt32(kKeyHeight, &imageHeight)) {
+                        imagePrimary = imageCount;
+                    } else {
+                        ALOGE("primary image track ignored for missing dimensions");
+                    }
                 }
                 imageCount++;
             } else if (!strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) {
@@ -628,22 +639,27 @@
     if (hasVideo) {
         mMetaData.add(METADATA_KEY_HAS_VIDEO, String8("yes"));
 
+        CHECK(videoWidth >= 0);
         sprintf(tmp, "%d", videoWidth);
         mMetaData.add(METADATA_KEY_VIDEO_WIDTH, String8(tmp));
 
+        CHECK(videoHeight >= 0);
         sprintf(tmp, "%d", videoHeight);
         mMetaData.add(METADATA_KEY_VIDEO_HEIGHT, String8(tmp));
 
         sprintf(tmp, "%d", rotationAngle);
         mMetaData.add(METADATA_KEY_VIDEO_ROTATION, String8(tmp));
 
+        mMetaData.add(METADATA_KEY_VIDEO_CODEC_MIME_TYPE, videoMime);
+
         if (videoFrameCount > 0) {
             sprintf(tmp, "%d", videoFrameCount);
             mMetaData.add(METADATA_KEY_VIDEO_FRAME_COUNT, String8(tmp));
         }
     }
 
-    if (imageCount > 0) {
+    // only if we have a primary image
+    if (imageCount > 0 && imagePrimary >= 0) {
         mMetaData.add(METADATA_KEY_HAS_IMAGE, String8("yes"));
 
         sprintf(tmp, "%d", imageCount);
@@ -652,9 +668,11 @@
         sprintf(tmp, "%d", imagePrimary);
         mMetaData.add(METADATA_KEY_IMAGE_PRIMARY, String8(tmp));
 
+        CHECK(imageWidth >= 0);
         sprintf(tmp, "%d", imageWidth);
         mMetaData.add(METADATA_KEY_IMAGE_WIDTH, String8(tmp));
 
+        CHECK(imageHeight >= 0);
         sprintf(tmp, "%d", imageHeight);
         mMetaData.add(METADATA_KEY_IMAGE_HEIGHT, String8(tmp));
 
@@ -682,10 +700,9 @@
                 !strcasecmp(fileMIME, "video/x-matroska")) {
             sp<MetaData> trackMeta = mExtractor->getTrackMetaData(0);
             const char *trackMIME;
-            if (trackMeta != nullptr) {
-                CHECK(trackMeta->findCString(kKeyMIMEType, &trackMIME));
-            }
-            if (!strncasecmp("audio/", trackMIME, 6)) {
+            if (trackMeta != nullptr
+                && trackMeta->findCString(kKeyMIMEType, &trackMIME)
+                && !strncasecmp("audio/", trackMIME, 6)) {
                 // The matroska file only contains a single audio track,
                 // rewrite its mime type.
                 mMetaData.add(
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 7897959..3e7ee50 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -17,6 +17,9 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "StagefrightRecorder"
 #include <inttypes.h>
+// TODO/workaround: including base logging now as it conflicts with ADebug.h
+// and it must be included first.
+#include <android-base/logging.h>
 #include <utils/Log.h>
 
 #include "WebmWriter.h"
@@ -44,6 +47,7 @@
 #include <media/stagefright/CameraSourceTimeLapse.h>
 #include <media/stagefright/MPEG2TSWriter.h>
 #include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/MediaCodecSource.h>
@@ -117,6 +121,11 @@
       mAudioSource((audio_source_t)AUDIO_SOURCE_CNT), // initialize with invalid value
       mPrivacySensitive(PRIVACY_SENSITIVE_DEFAULT),
       mVideoSource(VIDEO_SOURCE_LIST_END),
+      mRTPCVOExtMap(-1),
+      mRTPCVODegrees(0),
+      mRTPSockDscp(0),
+      mRTPSockNetwork(0),
+      mLastSeqNo(0),
       mStarted(false),
       mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
       mDeviceCallbackEnabled(false),
@@ -567,6 +576,32 @@
     // range that a specific encoder supports. The mismatch between the
     // the target and requested bit rate will NOT be treated as an error.
     mVideoBitRate = bitRate;
+
+    // A new bitrate(TMMBR) should be applied on runtime as well if OutputFormat is RTP_AVP
+    if (mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
+        // Regular I frames may overload the network so we reduce the bitrate to allow
+        // margins for the I frame overruns.
+        // Still send requested bitrate (TMMBR) in the reply (TMMBN).
+        const float coefficient = 0.8f;
+        mVideoBitRate = (bitRate * coefficient) / 1000 * 1000;
+    }
+    if (mOutputFormat == OUTPUT_FORMAT_RTP_AVP && mStarted && mPauseStartTimeUs == 0) {
+        mVideoEncoderSource->setEncodingBitrate(mVideoBitRate);
+        ARTPWriter* rtpWriter  = static_cast<ARTPWriter*>(mWriter.get());
+        rtpWriter->setTMMBNInfo(mOpponentID, bitRate);
+    }
+
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamVideoBitRateMode(int32_t bitRateMode) {
+    ALOGV("setParamVideoBitRateMode: %d", bitRateMode);
+    // TODO: clarify what bitrate mode of -1 is as these start from 0
+    if (bitRateMode < -1) {
+        ALOGE("Unsupported video bitrate mode: %d", bitRateMode);
+        return BAD_VALUE;
+    }
+    mVideoBitRateMode = bitRateMode;
     return OK;
 }
 
@@ -776,6 +811,105 @@
     return OK;
 }
 
+status_t StagefrightRecorder::setParamRtpLocalIp(const String8 &localIp) {
+    ALOGV("setParamVideoLocalIp: %s", localIp.string());
+
+    mLocalIp.setTo(localIp.string());
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamRtpLocalPort(int32_t localPort) {
+    ALOGV("setParamVideoLocalPort: %d", localPort);
+
+    mLocalPort = localPort;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamRtpRemoteIp(const String8 &remoteIp) {
+    ALOGV("setParamVideoRemoteIp: %s", remoteIp.string());
+
+    mRemoteIp.setTo(remoteIp.string());
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamRtpRemotePort(int32_t remotePort) {
+    ALOGV("setParamVideoRemotePort: %d", remotePort);
+
+    mRemotePort = remotePort;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamSelfID(int32_t selfID) {
+    ALOGV("setParamSelfID: %x", selfID);
+
+    mSelfID = selfID;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamVideoOpponentID(int32_t opponentID) {
+    mOpponentID = opponentID;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamPayloadType(int32_t payloadType) {
+    ALOGV("setParamPayloadType: %d", payloadType);
+
+    mPayloadType = payloadType;
+
+    if (mStarted && mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
+        mWriter->updatePayloadType(mPayloadType);
+    }
+
+    return OK;
+}
+
+status_t StagefrightRecorder::setRTPCVOExtMap(int32_t extmap) {
+    ALOGV("setRtpCvoExtMap: %d", extmap);
+
+    mRTPCVOExtMap = extmap;
+    return OK;
+}
+
+status_t StagefrightRecorder::setRTPCVODegrees(int32_t cvoDegrees) {
+    Mutex::Autolock autolock(mLock);
+    ALOGV("setRtpCvoDegrees: %d", cvoDegrees);
+
+    mRTPCVODegrees = cvoDegrees;
+
+    if (mStarted && mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
+        mWriter->updateCVODegrees(mRTPCVODegrees);
+    }
+
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamRtpDscp(int32_t dscp) {
+    ALOGV("setParamRtpDscp: %d", dscp);
+
+    mRTPSockDscp = dscp;
+    return OK;
+}
+
+status_t StagefrightRecorder::setSocketNetwork(int64_t networkHandle) {
+    ALOGV("setSocketNetwork: %llu", (unsigned long long) networkHandle);
+
+    mRTPSockNetwork = networkHandle;
+    if (mStarted && mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
+        mWriter->updateSocketNetwork(mRTPSockNetwork);
+    }
+    return OK;
+}
+
+status_t StagefrightRecorder::requestIDRFrame() {
+    status_t ret = BAD_VALUE;
+    if (mVideoEncoderSource != NULL) {
+        ret = mVideoEncoderSource->requestIDRFrame();
+    } else {
+        ALOGV("requestIDRFrame: Encoder not ready");
+    }
+    return ret;
+}
+
 status_t StagefrightRecorder::setParameter(
         const String8 &key, const String8 &value) {
     ALOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
@@ -844,6 +978,11 @@
         if (safe_strtoi32(value.string(), &video_bitrate)) {
             return setParamVideoEncodingBitRate(video_bitrate);
         }
+    } else if (key == "video-param-bitrate-mode") {
+        int32_t video_bitrate_mode;
+        if (safe_strtoi32(value.string(), &video_bitrate_mode)) {
+            return setParamVideoBitRateMode(video_bitrate_mode);
+        }
     } else if (key == "video-param-rotation-angle-degrees") {
         int32_t degrees;
         if (safe_strtoi32(value.string(), &degrees)) {
@@ -884,6 +1023,61 @@
         if (safe_strtod(value.string(), &fps)) {
             return setParamCaptureFps(fps);
         }
+    } else if (key == "rtp-param-local-ip") {
+        return setParamRtpLocalIp(value);
+    } else if (key == "rtp-param-local-port") {
+        int32_t localPort;
+        if (safe_strtoi32(value.string(), &localPort)) {
+            return setParamRtpLocalPort(localPort);
+        }
+    } else if (key == "rtp-param-remote-ip") {
+        return setParamRtpRemoteIp(value);
+    } else if (key == "rtp-param-remote-port") {
+        int32_t remotePort;
+        if (safe_strtoi32(value.string(), &remotePort)) {
+            return setParamRtpRemotePort(remotePort);
+        }
+    } else if (key == "rtp-param-self-id") {
+        int32_t selfID;
+        int64_t temp;
+        if (safe_strtoi64(value.string(), &temp)) {
+            selfID = static_cast<int32_t>(temp);
+            return setParamSelfID(selfID);
+        }
+    } else if (key == "rtp-param-opponent-id") {
+        int32_t opnId;
+        int64_t temp;
+        if (safe_strtoi64(value.string(), &temp)) {
+            opnId = static_cast<int32_t>(temp);
+            return setParamVideoOpponentID(opnId);
+        }
+    } else if (key == "rtp-param-payload-type") {
+        int32_t payloadType;
+        if (safe_strtoi32(value.string(), &payloadType)) {
+            return setParamPayloadType(payloadType);
+        }
+    } else if (key == "rtp-param-ext-cvo-extmap") {
+        int32_t extmap;
+        if (safe_strtoi32(value.string(), &extmap)) {
+            return setRTPCVOExtMap(extmap);
+        }
+    } else if (key == "rtp-param-ext-cvo-degrees") {
+        int32_t degrees;
+        if (safe_strtoi32(value.string(), &degrees)) {
+            return setRTPCVODegrees(degrees);
+        }
+    } else if (key == "video-param-request-i-frame") {
+        return requestIDRFrame();
+    } else if (key == "rtp-param-set-socket-dscp") {
+        int32_t dscp;
+        if (safe_strtoi32(value.string(), &dscp)) {
+            return setParamRtpDscp(dscp);
+        }
+    } else if (key == "rtp-param-set-socket-network") {
+        int64_t networkHandle;
+        if (safe_strtoi64(value.string(), &networkHandle)) {
+            return setSocketNetwork(networkHandle);
+        }
     } else {
         ALOGE("setParameter: failed to find key %s", key.string());
     }
@@ -1050,6 +1244,17 @@
             sp<MetaData> meta = new MetaData;
             int64_t startTimeUs = systemTime() / 1000;
             meta->setInt64(kKeyTime, startTimeUs);
+            meta->setInt32(kKeySelfID, mSelfID);
+            meta->setInt32(kKeyPayloadType, mPayloadType);
+            meta->setInt64(kKeySocketNetwork, mRTPSockNetwork);
+            if (mRTPCVOExtMap > 0) {
+                meta->setInt32(kKeyRtpExtMap, mRTPCVOExtMap);
+                meta->setInt32(kKeyRtpCvoDegrees, mRTPCVODegrees);
+            }
+            if (mRTPSockDscp > 0) {
+                meta->setInt32(kKeyRtpDscp, mRTPSockDscp);
+            }
+
             status = mWriter->start(meta.get());
             break;
         }
@@ -1113,7 +1318,7 @@
     if (mPrivacySensitive == PRIVACY_SENSITIVE_DEFAULT) {
         if (attr.source == AUDIO_SOURCE_VOICE_COMMUNICATION
                 || attr.source == AUDIO_SOURCE_CAMCORDER) {
-            attr.flags |= AUDIO_FLAG_CAPTURE_PRIVATE;
+            attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_CAPTURE_PRIVATE);
             mPrivacySensitive = PRIVACY_SENSITIVE_ENABLED;
         } else {
             mPrivacySensitive = PRIVACY_SENSITIVE_DISABLED;
@@ -1129,7 +1334,7 @@
             return NULL;
         }
         if (mPrivacySensitive == PRIVACY_SENSITIVE_ENABLED) {
-            attr.flags |= AUDIO_FLAG_CAPTURE_PRIVATE;
+            attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_CAPTURE_PRIVATE);
         }
     }
 
@@ -1330,7 +1535,7 @@
         mVideoEncoderSource = source;
     }
 
-    mWriter = new ARTPWriter(mOutputFd);
+    mWriter = new ARTPWriter(mOutputFd, mLocalIp, mLocalPort, mRemoteIp, mRemotePort, mLastSeqNo);
     mWriter->addSource(source);
     mWriter->setListener(mListener);
 
@@ -1784,7 +1989,13 @@
         }
     }
 
+    if (mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
+        // This indicates that a raw image provided to encoder needs to be rotated.
+        format->setInt32("rotation-degrees", mRotationDegrees);
+    }
+
     format->setInt32("bitrate", mVideoBitRate);
+    format->setInt32("bitrate-mode", mVideoBitRateMode);
     format->setInt32("frame-rate", mFrameRate);
     format->setInt32("i-frame-interval", mIFramesIntervalSec);
 
@@ -2130,6 +2341,7 @@
 
     if (mWriter != NULL) {
         err = mWriter->stop();
+        mLastSeqNo = mWriter->getSequenceNum();
         mWriter.clear();
     }
 
@@ -2206,6 +2418,8 @@
     mVideoHeight   = 144;
     mFrameRate     = -1;
     mVideoBitRate  = 192000;
+    // Following MediaCodec's default
+    mVideoBitRateMode = BITRATE_MODE_VBR;
     mSampleRate    = 8000;
     mAudioChannels = 1;
     mAudioBitRate  = 12200;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index a725bee..0362edd 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -119,6 +119,7 @@
     int32_t mVideoWidth, mVideoHeight;
     int32_t mFrameRate;
     int32_t mVideoBitRate;
+    int32_t mVideoBitRateMode;
     int32_t mAudioBitRate;
     int32_t mAudioChannels;
     int32_t mSampleRate;
@@ -138,6 +139,18 @@
     int32_t mLongitudex10000;
     int32_t mStartTimeOffsetMs;
     int32_t mTotalBitRate;
+    String8 mLocalIp;
+    String8 mRemoteIp;
+    int32_t mLocalPort;
+    int32_t mRemotePort;
+    int32_t mSelfID;
+    int32_t mOpponentID;
+    int32_t mPayloadType;
+    int32_t mRTPCVOExtMap;
+    int32_t mRTPCVODegrees;
+    int32_t mRTPSockDscp;
+    int64_t mRTPSockNetwork;
+    uint32_t mLastSeqNo;
 
     int64_t mDurationRecordedUs;
     int64_t mStartedRecordingUs;
@@ -205,6 +218,7 @@
     status_t setParamCaptureFpsEnable(int32_t timeLapseEnable);
     status_t setParamCaptureFps(double fps);
     status_t setParamVideoEncodingBitRate(int32_t bitRate);
+    status_t setParamVideoBitRateMode(int32_t bitRateMode);
     status_t setParamVideoIFramesInterval(int32_t seconds);
     status_t setParamVideoEncoderProfile(int32_t profile);
     status_t setParamVideoEncoderLevel(int32_t level);
@@ -219,6 +233,18 @@
     status_t setParamMovieTimeScale(int32_t timeScale);
     status_t setParamGeoDataLongitude(int64_t longitudex10000);
     status_t setParamGeoDataLatitude(int64_t latitudex10000);
+    status_t setParamRtpLocalIp(const String8 &localIp);
+    status_t setParamRtpLocalPort(int32_t localPort);
+    status_t setParamRtpRemoteIp(const String8 &remoteIp);
+    status_t setParamRtpRemotePort(int32_t remotePort);
+    status_t setParamSelfID(int32_t selfID);
+    status_t setParamVideoOpponentID(int32_t opponentID);
+    status_t setParamPayloadType(int32_t payloadType);
+    status_t setRTPCVOExtMap(int32_t extmap);
+    status_t setRTPCVODegrees(int32_t cvoDegrees);
+    status_t setParamRtpDscp(int32_t dscp);
+    status_t setSocketNetwork(int64_t networkHandle);
+    status_t requestIDRFrame();
     void clipVideoBitRate();
     void clipVideoFrameRate();
     void clipVideoFrameWidth();
diff --git a/media/libmediaplayerservice/include/MediaPlayerInterface.h b/media/libmediaplayerservice/include/MediaPlayerInterface.h
index 436cb31..8d94698 100644
--- a/media/libmediaplayerservice/include/MediaPlayerInterface.h
+++ b/media/libmediaplayerservice/include/MediaPlayerInterface.h
@@ -60,7 +60,7 @@
 #define DEFAULT_AUDIOSINK_SAMPLERATE 44100
 
 // when the channel mask isn't known, use the channel count to derive a mask in AudioSink::open()
-#define CHANNEL_MASK_USE_CHANNEL_ORDER 0
+#define CHANNEL_MASK_USE_CHANNEL_ORDER AUDIO_CHANNEL_NONE
 
 // duration below which we do not allow deep audio buffering
 #define AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US 5000000
@@ -183,6 +183,10 @@
         return INVALID_OPERATION;
     }
 
+    virtual status_t    setDataSource(const String8& /* rtpParams */) {
+        return INVALID_OPERATION;
+    }
+
     // pass the buffered IGraphicBufferProducer to the media player service
     virtual status_t    setVideoSurfaceTexture(
                                 const sp<IGraphicBufferProducer>& bufferProducer) = 0;
diff --git a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
index 684ba2e..7bee002 100644
--- a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
+++ b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
@@ -52,18 +52,19 @@
             if (binder == NULL) {
                 ALOGW("could not get the power manager service");
             } else {
-                mPowerManager = interface_cast<IPowerManager>(binder);
+                mPowerManager = interface_cast<os::IPowerManager>(binder);
                 binder->linkToDeath(mDeathRecipient);
             }
         }
         if (mPowerManager != NULL) {
             sp<IBinder> binder = new BBinder();
             int64_t token = IPCThreadState::self()->clearCallingIdentity();
-            status_t status = mPowerManager->acquireWakeLock(
-                    POWERMANAGER_PARTIAL_WAKE_LOCK,
-                    binder, String16("AWakeLock"), String16("media"));
+            binder::Status status = mPowerManager->acquireWakeLock(
+                    binder, POWERMANAGER_PARTIAL_WAKE_LOCK,
+                    String16("AWakeLock"), String16("media"),
+                    {} /* workSource */, {} /* historyTag */);
             IPCThreadState::self()->restoreCallingIdentity(token);
-            if (status == NO_ERROR) {
+            if (status.isOk()) {
                 mWakeLockToken = binder;
                 mWakeLockCount++;
                 return true;
diff --git a/media/libmediaplayerservice/nuplayer/AWakeLock.h b/media/libmediaplayerservice/nuplayer/AWakeLock.h
index 323e7d7..8aa3b41 100644
--- a/media/libmediaplayerservice/nuplayer/AWakeLock.h
+++ b/media/libmediaplayerservice/nuplayer/AWakeLock.h
@@ -18,7 +18,7 @@
 #define A_WAKELOCK_H_
 
 #include <media/stagefright/foundation/ABase.h>
-#include <powermanager/IPowerManager.h>
+#include <android/os/IPowerManager.h>
 #include <utils/RefBase.h>
 
 namespace android {
@@ -37,7 +37,7 @@
     virtual ~AWakeLock();
 
 private:
-    sp<IPowerManager> mPowerManager;
+    sp<os::IPowerManager> mPowerManager;
     sp<IBinder>       mWakeLockToken;
     uint32_t          mWakeLockCount;
 
diff --git a/media/libmediaplayerservice/nuplayer/Android.bp b/media/libmediaplayerservice/nuplayer/Android.bp
index 32c97cf..f5e44c7 100644
--- a/media/libmediaplayerservice/nuplayer/Android.bp
+++ b/media/libmediaplayerservice/nuplayer/Android.bp
@@ -14,6 +14,7 @@
         "NuPlayerRenderer.cpp",
         "NuPlayerStreamListener.cpp",
         "RTSPSource.cpp",
+        "RTPSource.cpp",
         "StreamingSource.cpp",
     ],
 
@@ -30,6 +31,7 @@
         "frameworks/av/media/libstagefright/mpeg2ts",
         "frameworks/av/media/libstagefright/rtsp",
         "frameworks/av/media/libstagefright/timedtext",
+        "frameworks/native/include/android",
     ],
 
     cflags: [
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index c1c4b55..47362ef 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -31,6 +31,7 @@
 #include "NuPlayerDriver.h"
 #include "NuPlayerRenderer.h"
 #include "NuPlayerSource.h"
+#include "RTPSource.h"
 #include "RTSPSource.h"
 #include "StreamingSource.h"
 #include "GenericSource.h"
@@ -368,6 +369,18 @@
     return err;
 }
 
+void NuPlayer::setDataSourceAsync(const String8& rtpParams) {
+    ALOGD("setDataSourceAsync for RTP = %s", rtpParams.string());
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
+
+    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
+    sp<Source> source = new RTPSource(notify, rtpParams);
+
+    msg->setObject("source", source);
+    msg->post();
+    mDataSourceType = DATA_SOURCE_TYPE_RTP;
+}
+
 void NuPlayer::prepareAsync() {
     ALOGV("prepareAsync");
 
@@ -1689,6 +1702,12 @@
     updateRebufferingTimer(false /* stopping */, false /* exiting */);
 }
 
+void NuPlayer::setTargetBitrate(int bitrate) {
+    if (mSource != NULL) {
+        mSource->setTargetBitrate(bitrate);
+    }
+}
+
 void NuPlayer::onPause() {
 
     updatePlaybackTimer(true /* stopping */, "onPause");
@@ -1915,6 +1934,11 @@
 
     format->setInt32("priority", 0 /* realtime */);
 
+    if (mDataSourceType == DATA_SOURCE_TYPE_RTP) {
+        ALOGV("instantiateDecoder: set decoder error free on stream corrupt.");
+        format->setInt32("corrupt-free", true);
+    }
+
     if (!audio) {
         AString mime;
         CHECK(format->findString("mime", &mime));
@@ -2715,6 +2739,14 @@
             break;
         }
 
+        case Source::kWhatIMSRxNotice:
+        {
+            sp<AMessage> IMSRxNotice;
+            CHECK(msg->findMessage("message", &IMSRxNotice));
+            sendIMSRxNotice(IMSRxNotice);
+            break;
+        }
+
         default:
             TRESPASS();
     }
@@ -2817,11 +2849,74 @@
     }
 }
 
+void NuPlayer::sendIMSRxNotice(const sp<AMessage> &msg) {
+    int32_t payloadType;
+
+    CHECK(msg->findInt32("payload-type", &payloadType));
+
+    Parcel in;
+    in.writeInt32(payloadType);
+
+    switch (payloadType) {
+        case NuPlayer::RTPSource::RTCP_TSFB:   // RTCP TSFB
+        case NuPlayer::RTPSource::RTCP_PSFB:   // RTCP PSFB
+        case NuPlayer::RTPSource::RTP_AUTODOWN:
+        {
+            int32_t feedbackType, id;
+            CHECK(msg->findInt32("feedback-type", &feedbackType));
+            CHECK(msg->findInt32("sender", &id));
+            in.writeInt32(feedbackType);
+            in.writeInt32(id);
+            if (payloadType == NuPlayer::RTPSource::RTCP_TSFB) {
+                int32_t bitrate;
+                CHECK(msg->findInt32("bit-rate", &bitrate));
+                in.writeInt32(bitrate);
+            }
+            break;
+        }
+        case NuPlayer::RTPSource::RTP_QUALITY:
+        {
+            int32_t feedbackType, bitrate;
+            int32_t highestSeqNum, baseSeqNum, prevExpected;
+            int32_t numBufRecv, prevNumBufRecv;
+            CHECK(msg->findInt32("feedback-type", &feedbackType));
+            CHECK(msg->findInt32("bit-rate", &bitrate));
+            CHECK(msg->findInt32("highest-seq-num", &highestSeqNum));
+            CHECK(msg->findInt32("base-seq-num", &baseSeqNum));
+            CHECK(msg->findInt32("prev-expected", &prevExpected));
+            CHECK(msg->findInt32("num-buf-recv", &numBufRecv));
+            CHECK(msg->findInt32("prev-num-buf-recv", &prevNumBufRecv));
+            in.writeInt32(feedbackType);
+            in.writeInt32(bitrate);
+            in.writeInt32(highestSeqNum);
+            in.writeInt32(baseSeqNum);
+            in.writeInt32(prevExpected);
+            in.writeInt32(numBufRecv);
+            in.writeInt32(prevNumBufRecv);
+            break;
+        }
+        case NuPlayer::RTPSource::RTP_CVO:
+        {
+            int32_t cvo;
+            CHECK(msg->findInt32("cvo", &cvo));
+            in.writeInt32(cvo);
+            break;
+        }
+        default:
+        break;
+    }
+
+    notifyListener(MEDIA_IMS_RX_NOTICE, 0, 0, &in);
+}
+
 const char *NuPlayer::getDataSourceType() {
     switch (mDataSourceType) {
         case DATA_SOURCE_TYPE_HTTP_LIVE:
             return "HTTPLive";
 
+        case DATA_SOURCE_TYPE_RTP:
+            return "RTP";
+
         case DATA_SOURCE_TYPE_RTSP:
             return "RTSP";
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index ef4354c..adb7075 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -51,6 +51,8 @@
 
     void setDataSourceAsync(const sp<DataSource> &source);
 
+    void setDataSourceAsync(const String8& rtpParams);
+
     status_t getBufferingSettings(BufferingSettings* buffering /* nonnull */);
     status_t setBufferingSettings(const BufferingSettings& buffering);
 
@@ -100,6 +102,8 @@
 
     void updateInternalTimers();
 
+    void setTargetBitrate(int bitrate /* bps */);
+
 protected:
     virtual ~NuPlayer();
 
@@ -117,6 +121,7 @@
     struct GenericSource;
     struct HTTPLiveSource;
     struct Renderer;
+    struct RTPSource;
     struct RTSPSource;
     struct StreamingSource;
     struct Action;
@@ -257,6 +262,7 @@
     typedef enum {
         DATA_SOURCE_TYPE_NONE,
         DATA_SOURCE_TYPE_HTTP_LIVE,
+        DATA_SOURCE_TYPE_RTP,
         DATA_SOURCE_TYPE_RTSP,
         DATA_SOURCE_TYPE_GENERIC_URL,
         DATA_SOURCE_TYPE_GENERIC_FD,
@@ -334,6 +340,7 @@
     void sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex);
     void sendTimedMetaData(const sp<ABuffer> &buffer);
     void sendTimedTextData(const sp<ABuffer> &buffer);
+    void sendIMSRxNotice(const sp<AMessage> &msg);
 
     void writeTrackInfo(Parcel* reply, const sp<AMessage>& format) const;
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index f734439..8628edc 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -1050,7 +1050,7 @@
         uint32_t flags = 0;
         CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
 
-        int32_t eos, csd;
+        int32_t eos, csd, cvo;
         // we do not expect SYNCFRAME for decoder
         if (buffer->meta()->findInt32("eos", &eos) && eos) {
             flags |= MediaCodec::BUFFER_FLAG_EOS;
@@ -1058,6 +1058,24 @@
             flags |= MediaCodec::BUFFER_FLAG_CODECCONFIG;
         }
 
+        if (buffer->meta()->findInt32("cvo", (int32_t*)&cvo)) {
+            ALOGV("[%s] cvo(%d) found at %lld us", mComponentName.c_str(), cvo, (long long)timeUs);
+            switch (cvo) {
+                case 0:
+                    codecBuffer->meta()->setInt32("cvo", MediaCodec::CVO_DEGREE_0);
+                    break;
+                case 1:
+                    codecBuffer->meta()->setInt32("cvo", MediaCodec::CVO_DEGREE_90);
+                    break;
+                case 2:
+                    codecBuffer->meta()->setInt32("cvo", MediaCodec::CVO_DEGREE_180);
+                    break;
+                case 3:
+                    codecBuffer->meta()->setInt32("cvo", MediaCodec::CVO_DEGREE_270);
+                    break;
+            }
+        }
+
         // Modular DRM
         MediaBufferBase *mediaBuf = NULL;
         NuPlayerDrm::CryptoInfo *cryptInfo = NULL;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index dc144b2..2a50fc2 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -218,6 +218,26 @@
     return mAsyncResult;
 }
 
+status_t NuPlayerDriver::setDataSource(const String8& rtpParams) {
+    ALOGV("setDataSource(%p) rtp source", this);
+    Mutex::Autolock autoLock(mLock);
+
+    if (mState != STATE_IDLE) {
+        return INVALID_OPERATION;
+    }
+
+    mState = STATE_SET_DATASOURCE_PENDING;
+
+    mPlayer->setDataSourceAsync(rtpParams);
+
+    while (mState == STATE_SET_DATASOURCE_PENDING) {
+        mCondition.wait(mLock);
+    }
+
+    return mAsyncResult;
+}
+
+
 status_t NuPlayerDriver::setVideoSurfaceTexture(
         const sp<IGraphicBufferProducer> &bufferProducer) {
     ALOGV("setVideoSurfaceTexture(%p)", this);
@@ -797,7 +817,11 @@
 }
 
 status_t NuPlayerDriver::setParameter(
-        int /* key */, const Parcel & /* request */) {
+        int key, const Parcel &request ) {
+    if (key == KEY_PARAMETER_RTP_ATTRIBUTES) {
+        mPlayer->setTargetBitrate(request.readInt32());
+        return OK;
+    }
     return INVALID_OPERATION;
 }
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index f4b1968..55a0fad 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -43,6 +43,8 @@
 
     virtual status_t setDataSource(const sp<DataSource>& dataSource);
 
+    virtual status_t setDataSource(const String8& rtpParams);
+
     virtual status_t setVideoSurfaceTexture(
             const sp<IGraphicBufferProducer> &bufferProducer);
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index c30f048..6a8c708 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -922,6 +922,11 @@
             firstEntry = false;
             int64_t mediaTimeUs;
             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+            if (mediaTimeUs < 0) {
+                ALOGD("fillAudioBuffer: reset negative media time %.2f secs to zero",
+                       mediaTimeUs / 1E6);
+                mediaTimeUs = 0;
+            }
             ALOGV("fillAudioBuffer: rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
             setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
         }
@@ -1928,11 +1933,12 @@
     int32_t numChannels;
     CHECK(format->findInt32("channel-count", &numChannels));
 
-    int32_t channelMask;
-    if (!format->findInt32("channel-mask", &channelMask)) {
-        // signal to the AudioSink to derive the mask from count.
-        channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
-    }
+    int32_t rawChannelMask;
+    audio_channel_mask_t channelMask =
+            format->findInt32("channel-mask", &rawChannelMask) ?
+                    static_cast<audio_channel_mask_t>(rawChannelMask)
+                    // signal to the AudioSink to derive the mask from count.
+                    : CHANNEL_MASK_USE_CHANNEL_ORDER;
 
     int32_t sampleRate;
     CHECK(format->findInt32("sample-rate", &sampleRate));
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index f137c52..bf6b539 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -58,6 +58,7 @@
         kWhatInstantiateSecureDecoders,
         // Modular DRM
         kWhatDrmInfo,
+        kWhatIMSRxNotice,
     };
 
     // The provides message is used to notify the player about various
@@ -131,6 +132,8 @@
 
     virtual void setOffloadAudio(bool /* offload */) {}
 
+    virtual void setTargetBitrate(int32_t) {}
+
     // Modular DRM
     virtual status_t prepareDrm(
             const uint8_t /*uuid*/[16], const Vector<uint8_t> &/*drmSessionId*/,
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
new file mode 100644
index 0000000..b1901e8
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -0,0 +1,791 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RTPSource"
+#include <utils/Log.h>
+
+#include "RTPSource.h"
+
+
+
+
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <string.h>
+
+namespace android {
+
+const int64_t kNearEOSTimeoutUs = 2000000ll; // 2 secs
+static int32_t kMaxAllowedStaleAccessUnits = 20;
+
+NuPlayer::RTPSource::RTPSource(
+        const sp<AMessage> &notify,
+        const String8& rtpParams)
+    : Source(notify),
+      mRTPParams(rtpParams),
+      mFlags(0),
+      mState(DISCONNECTED),
+      mFinalResult(OK),
+      mBuffering(false),
+      mInPreparationPhase(true),
+      mRTPConn(new ARTPConnection(ARTPConnection::kViLTEConnection)),
+      mEOSTimeoutAudio(0),
+      mEOSTimeoutVideo(0),
+      mLastCVOUpdated(-1) {
+      ALOGD("RTPSource initialized with rtpParams=%s", rtpParams.string());
+}
+
+NuPlayer::RTPSource::~RTPSource() {
+    if (mLooper != NULL) {
+        mLooper->unregisterHandler(id());
+        mLooper->unregisterHandler(mRTPConn->id());
+        mLooper->stop();
+    }
+}
+
+status_t NuPlayer::RTPSource::getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) {
+    Mutex::Autolock _l(mBufferingSettingsLock);
+    *buffering = mBufferingSettings;
+    return OK;
+}
+
+status_t NuPlayer::RTPSource::setBufferingSettings(const BufferingSettings& buffering) {
+    Mutex::Autolock _l(mBufferingSettingsLock);
+    mBufferingSettings = buffering;
+    return OK;
+}
+
+void NuPlayer::RTPSource::prepareAsync() {
+    if (mLooper == NULL) {
+        mLooper = new ALooper;
+        mLooper->setName("rtp");
+        mLooper->start();
+
+        mLooper->registerHandler(this);
+        mLooper->registerHandler(mRTPConn);
+    }
+
+    CHECK_EQ(mState, (int)DISCONNECTED);
+    mState = CONNECTING;
+
+    setParameters(mRTPParams);
+
+    TrackInfo *info = NULL;
+    unsigned i;
+    for (i = 0; i < mTracks.size(); i++) {
+        info = &mTracks.editItemAt(i);
+
+        if (info == NULL)
+            break;
+
+        AString sdp;
+        ASessionDescription::SDPStringFactory(sdp, info->mLocalIp,
+                info->mIsAudio, info->mLocalPort, info->mPayloadType, info->mAS, info->mCodecName,
+                NULL, info->mWidth, info->mHeight, info->mCVOExtMap);
+        ALOGD("RTPSource SDP =>\n%s", sdp.c_str());
+
+        sp<ASessionDescription> desc = new ASessionDescription;
+        bool isValidSdp = desc->setTo(sdp.c_str(), sdp.size());
+        ALOGV("RTPSource isValidSdp => %d", isValidSdp);
+
+        int sockRtp, sockRtcp;
+        ARTPConnection::MakeRTPSocketPair(&sockRtp, &sockRtcp, info->mLocalIp, info->mRemoteIp,
+                info->mLocalPort, info->mRemotePort, info->mSocketNetwork);
+
+        sp<AMessage> notify = new AMessage('accu', this);
+
+        ALOGV("RTPSource addStream. track-index=%d", i);
+        notify->setSize("trackIndex", i);
+        // index(i) should be started from 1. 0 is reserved for [root]
+        mRTPConn->addStream(sockRtp, sockRtcp, desc, i + 1, notify, false);
+        mRTPConn->setSelfID(info->mSelfID);
+        mRTPConn->setJbTime(
+                (info->mJbTimeMs <= 3000 && info->mJbTimeMs >= 40) ? info->mJbTimeMs : 300);
+
+        info->mRTPSocket = sockRtp;
+        info->mRTCPSocket = sockRtcp;
+        info->mFirstSeqNumInSegment = 0;
+        info->mNewSegment = true;
+        info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
+        info->mRTPAnchor = 0;
+        info->mNTPAnchorUs = -1;
+        info->mNormalPlayTimeRTP = 0;
+        info->mNormalPlayTimeUs = 0ll;
+
+        // index(i) should be started from 1. 0 is reserved for [root]
+        info->mPacketSource = new APacketSource(desc, i + 1);
+
+        int32_t timeScale;
+        sp<MetaData> format = getTrackFormat(i, &timeScale);
+        sp<AnotherPacketSource> source = new AnotherPacketSource(format);
+
+        if (info->mIsAudio) {
+            mAudioTrack = source;
+            info->mTimeScale = 16000;
+        } else {
+            mVideoTrack = source;
+            info->mTimeScale = 90000;
+        }
+
+        info->mSource = source;
+        info->mRTPTime = 0;
+        info->mNormalPlaytimeUs = 0;
+        info->mNPTMappingValid = false;
+    }
+
+    if (mInPreparationPhase) {
+        mInPreparationPhase = false;
+        notifyPrepared();
+    }
+}
+
+void NuPlayer::RTPSource::start() {
+}
+
+void NuPlayer::RTPSource::pause() {
+    mState = PAUSED;
+}
+
+void NuPlayer::RTPSource::resume() {
+    mState = CONNECTING;
+}
+
+void NuPlayer::RTPSource::stop() {
+    if (mLooper == NULL) {
+        return;
+    }
+    sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
+
+    sp<AMessage> dummy;
+    msg->postAndAwaitResponse(&dummy);
+}
+
+status_t NuPlayer::RTPSource::feedMoreTSData() {
+    Mutex::Autolock _l(mBufferingLock);
+    return mFinalResult;
+}
+
+sp<MetaData> NuPlayer::RTPSource::getFormatMeta(bool audio) {
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    if (source == NULL) {
+        return NULL;
+    }
+
+    return source->getFormat();
+}
+
+bool NuPlayer::RTPSource::haveSufficientDataOnAllTracks() {
+    // We're going to buffer at least 2 secs worth data on all tracks before
+    // starting playback (both at startup and after a seek).
+
+    static const int64_t kMinDurationUs = 2000000ll;
+
+    int64_t mediaDurationUs = 0;
+    getDuration(&mediaDurationUs);
+    if ((mAudioTrack != NULL && mAudioTrack->isFinished(mediaDurationUs))
+            || (mVideoTrack != NULL && mVideoTrack->isFinished(mediaDurationUs))) {
+        return true;
+    }
+
+    status_t err;
+    int64_t durationUs;
+    if (mAudioTrack != NULL
+            && (durationUs = mAudioTrack->getBufferedDurationUs(&err))
+                    < kMinDurationUs
+            && err == OK) {
+        ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
+              durationUs / 1E6);
+        return false;
+    }
+
+    if (mVideoTrack != NULL
+            && (durationUs = mVideoTrack->getBufferedDurationUs(&err))
+                    < kMinDurationUs
+            && err == OK) {
+        ALOGV("video track doesn't have enough data yet. (%.2f secs buffered)",
+              durationUs / 1E6);
+        return false;
+    }
+
+    return true;
+}
+
+status_t NuPlayer::RTPSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    if (mState == PAUSED) {
+        ALOGV("-EWOULDBLOCK");
+        return -EWOULDBLOCK;
+    }
+
+    status_t finalResult;
+    if (!source->hasBufferAvailable(&finalResult)) {
+        if (finalResult == OK) {
+            int64_t mediaDurationUs = 0;
+            getDuration(&mediaDurationUs);
+            sp<AnotherPacketSource> otherSource = getSource(!audio);
+            status_t otherFinalResult;
+
+            // If other source already signaled EOS, this source should also signal EOS
+            if (otherSource != NULL &&
+                    !otherSource->hasBufferAvailable(&otherFinalResult) &&
+                    otherFinalResult == ERROR_END_OF_STREAM) {
+                source->signalEOS(ERROR_END_OF_STREAM);
+                return ERROR_END_OF_STREAM;
+            }
+
+            // If this source has detected near end, give it some time to retrieve more
+            // data before signaling EOS
+            if (source->isFinished(mediaDurationUs)) {
+                int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo;
+                if (eosTimeout == 0) {
+                    setEOSTimeout(audio, ALooper::GetNowUs());
+                } else if ((ALooper::GetNowUs() - eosTimeout) > kNearEOSTimeoutUs) {
+                    setEOSTimeout(audio, 0);
+                    source->signalEOS(ERROR_END_OF_STREAM);
+                    return ERROR_END_OF_STREAM;
+                }
+                return -EWOULDBLOCK;
+            }
+
+            if (!(otherSource != NULL && otherSource->isFinished(mediaDurationUs))) {
+                // We should not enter buffering mode
+                // if any of the sources already have detected EOS.
+                // TODO: needs to be checked whether below line is needed or not.
+                // startBufferingIfNecessary();
+            }
+
+            return -EWOULDBLOCK;
+        }
+        return finalResult;
+    }
+
+    setEOSTimeout(audio, 0);
+
+    finalResult = source->dequeueAccessUnit(accessUnit);
+    if (finalResult != OK) {
+        return finalResult;
+    }
+
+    int32_t cvo;
+    if ((*accessUnit) != NULL && (*accessUnit)->meta()->findInt32("cvo", &cvo) &&
+            cvo != mLastCVOUpdated) {
+        sp<AMessage> msg = new AMessage();
+        msg->setInt32("payload-type", NuPlayer::RTPSource::RTP_CVO);
+        msg->setInt32("cvo", cvo);
+
+        sp<AMessage> notify = dupNotify();
+        notify->setInt32("what", kWhatIMSRxNotice);
+        notify->setMessage("message", msg);
+        notify->post();
+
+        ALOGV("notify cvo updated (%d)->(%d) to upper layer", mLastCVOUpdated, cvo);
+        mLastCVOUpdated = cvo;
+    }
+
+    return finalResult;
+}
+
+sp<AnotherPacketSource> NuPlayer::RTPSource::getSource(bool audio) {
+    return audio ? mAudioTrack : mVideoTrack;
+}
+
+void NuPlayer::RTPSource::setEOSTimeout(bool audio, int64_t timeout) {
+    if (audio) {
+        mEOSTimeoutAudio = timeout;
+    } else {
+        mEOSTimeoutVideo = timeout;
+    }
+}
+
+status_t NuPlayer::RTPSource::getDuration(int64_t *durationUs) {
+    *durationUs = 0ll;
+
+    int64_t audioDurationUs;
+    if (mAudioTrack != NULL
+            && mAudioTrack->getFormat()->findInt64(
+                kKeyDuration, &audioDurationUs)
+            && audioDurationUs > *durationUs) {
+        *durationUs = audioDurationUs;
+    }
+
+    int64_t videoDurationUs;
+    if (mVideoTrack != NULL
+            && mVideoTrack->getFormat()->findInt64(
+                kKeyDuration, &videoDurationUs)
+            && videoDurationUs > *durationUs) {
+        *durationUs = videoDurationUs;
+    }
+
+    return OK;
+}
+
+status_t NuPlayer::RTPSource::seekTo(int64_t seekTimeUs, MediaPlayerSeekMode mode) {
+    ALOGV("RTPSource::seekTo=%d, mode=%d", (int)seekTimeUs, mode);
+    return OK;
+}
+
+void NuPlayer::RTPSource::schedulePollBuffering() {
+    sp<AMessage> msg = new AMessage(kWhatPollBuffering, this);
+    msg->post(kBufferingPollIntervalUs); // 1 second intervals
+}
+
+void NuPlayer::RTPSource::onPollBuffering() {
+    schedulePollBuffering();
+}
+
+bool NuPlayer::RTPSource::isRealTime() const {
+    ALOGD("RTPSource::isRealTime=%d", true);
+    return true;
+}
+
+void NuPlayer::RTPSource::onMessageReceived(const sp<AMessage> &msg) {
+    ALOGV("onMessageReceived =%d", msg->what());
+
+    switch (msg->what()) {
+        case kWhatAccessUnitComplete:
+        {
+            if (mState == CONNECTING) {
+                mState = CONNECTED;
+            }
+
+            int32_t timeUpdate;
+            //"time-update" raised from ARTPConnection::parseSR()
+            if (msg->findInt32("time-update", &timeUpdate) && timeUpdate) {
+                size_t trackIndex;
+                CHECK(msg->findSize("trackIndex", &trackIndex));
+
+                uint32_t rtpTime;
+                uint64_t ntpTime;
+                CHECK(msg->findInt32("rtp-time", (int32_t *)&rtpTime));
+                CHECK(msg->findInt64("ntp-time", (int64_t *)&ntpTime));
+
+                onTimeUpdate(trackIndex, rtpTime, ntpTime);
+                break;
+            }
+
+            int32_t firstRTCP;
+            if (msg->findInt32("first-rtcp", &firstRTCP)) {
+                // There won't be an access unit here, it's just a notification
+                // that the data communication worked since we got the first
+                // rtcp packet.
+                ALOGV("first-rtcp");
+                break;
+            }
+
+            int32_t IMSRxNotice;
+            if (msg->findInt32("rtcp-event", &IMSRxNotice)) {
+                int32_t payloadType, feedbackType;
+                CHECK(msg->findInt32("payload-type", &payloadType));
+                CHECK(msg->findInt32("feedback-type", &feedbackType));
+
+                sp<AMessage> notify = dupNotify();
+                notify->setInt32("what", kWhatIMSRxNotice);
+                notify->setMessage("message", msg);
+                notify->post();
+
+                ALOGV("IMSRxNotice \t\t payload : %d feedback : %d",
+                      payloadType, feedbackType);
+                break;
+            }
+
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+
+            sp<ABuffer> accessUnit;
+            if (msg->findBuffer("access-unit", &accessUnit) == false) {
+                break;
+            }
+
+            int32_t damaged;
+            if (accessUnit->meta()->findInt32("damaged", &damaged)
+                    && damaged) {
+                ALOGD("dropping damaged access unit.");
+                break;
+            }
+
+            // Implicitly assert on valid trackIndex here, which we ensure by
+            // never removing tracks.
+            TrackInfo *info = &mTracks.editItemAt(trackIndex);
+
+            sp<AnotherPacketSource> source = info->mSource;
+            if (source != NULL) {
+                uint32_t rtpTime;
+                CHECK(accessUnit->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+
+                /* AnotherPacketSource make an assertion if there is no ntp provided
+                   RTPSource should provide ntpUs all the times.
+                if (!info->mNPTMappingValid) {
+                    // This is a live stream, we didn't receive any normal
+                    // playtime mapping. We won't map to npt time.
+                    source->queueAccessUnit(accessUnit);
+                    break;
+                }
+
+                int64_t nptUs =
+                    ((double)rtpTime - (double)info->mRTPTime)
+                        / info->mTimeScale
+                        * 1000000ll
+                        + info->mNormalPlaytimeUs;
+
+                */
+                accessUnit->meta()->setInt64("timeUs", ALooper::GetNowUs());
+
+                source->queueAccessUnit(accessUnit);
+            }
+
+            break;
+        }
+        case kWhatDisconnect:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            for (size_t i = 0; i < mTracks.size(); ++i) {
+                TrackInfo *info = &mTracks.editItemAt(i);
+
+                if (info->mIsAudio) {
+                    mAudioTrack->signalEOS(ERROR_END_OF_STREAM);
+                    mAudioTrack = NULL;
+                    ALOGV("mAudioTrack disconnected");
+                } else {
+                    mVideoTrack->signalEOS(ERROR_END_OF_STREAM);
+                    mVideoTrack = NULL;
+                    ALOGV("mVideoTrack disconnected");
+                }
+
+                mRTPConn->removeStream(info->mRTPSocket, info->mRTCPSocket);
+                close(info->mRTPSocket);
+                close(info->mRTCPSocket);
+            }
+
+            mTracks.clear();
+            mFirstAccessUnit = true;
+            mAllTracksHaveTime = false;
+            mNTPAnchorUs = -1;
+            mMediaAnchorUs = -1;
+            mLastMediaTimeUs = -1;
+            mNumAccessUnitsReceived = 0;
+            mReceivedFirstRTCPPacket = false;
+            mReceivedFirstRTPPacket = false;
+            mPausing = false;
+            mPauseGeneration = 0;
+
+            (new AMessage)->postReply(replyID);
+
+            break;
+        }
+        case kWhatPollBuffering:
+            break;
+        default:
+            TRESPASS();
+    }
+}
+
+void NuPlayer::RTPSource::setTargetBitrate(int32_t bitrate) {
+    mRTPConn->setTargetBitrate(bitrate);
+}
+
+void NuPlayer::RTPSource::onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) {
+    ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = %#016llx",
+         trackIndex, rtpTime, (long long)ntpTime);
+
+    // convert ntpTime in Q32 seconds to microseconds. Note: this will not lose precision
+    // because ntpTimeUs is at most 52 bits (double holds 53 bits)
+    int64_t ntpTimeUs = (int64_t)(ntpTime * 1E6 / (1ll << 32));
+
+    TrackInfo *track = &mTracks.editItemAt(trackIndex);
+
+    track->mRTPAnchor = rtpTime;
+    track->mNTPAnchorUs = ntpTimeUs;
+
+    if (mNTPAnchorUs < 0) {
+        mNTPAnchorUs = ntpTimeUs;
+        mMediaAnchorUs = mLastMediaTimeUs;
+    }
+
+    if (!mAllTracksHaveTime) {
+        bool allTracksHaveTime = (mTracks.size() > 0);
+        for (size_t i = 0; i < mTracks.size(); ++i) {
+            TrackInfo *track = &mTracks.editItemAt(i);
+            if (track->mNTPAnchorUs < 0) {
+                allTracksHaveTime = false;
+                break;
+            }
+        }
+        if (allTracksHaveTime) {
+            mAllTracksHaveTime = true;
+            ALOGI("Time now established for all tracks.");
+        }
+    }
+    if (mAllTracksHaveTime && dataReceivedOnAllChannels()) {
+        // Time is now established, lets start timestamping immediately
+        for (size_t i = 0; i < mTracks.size(); ++i) {
+            TrackInfo *trackInfo = &mTracks.editItemAt(i);
+            while (!trackInfo->mPackets.empty()) {
+                sp<ABuffer> accessUnit = *trackInfo->mPackets.begin();
+                trackInfo->mPackets.erase(trackInfo->mPackets.begin());
+
+                if (addMediaTimestamp(i, trackInfo, accessUnit)) {
+                    postQueueAccessUnit(i, accessUnit);
+                }
+            }
+        }
+    }
+}
+
+bool NuPlayer::RTPSource::addMediaTimestamp(
+        int32_t trackIndex, const TrackInfo *track,
+        const sp<ABuffer> &accessUnit) {
+
+    uint32_t rtpTime;
+    CHECK(accessUnit->meta()->findInt32(
+                "rtp-time", (int32_t *)&rtpTime));
+
+    int64_t relRtpTimeUs =
+        (((int64_t)rtpTime - (int64_t)track->mRTPAnchor) * 1000000ll)
+        / track->mTimeScale;
+
+    int64_t ntpTimeUs = track->mNTPAnchorUs + relRtpTimeUs;
+
+    int64_t mediaTimeUs = mMediaAnchorUs + ntpTimeUs - mNTPAnchorUs;
+
+    if (mediaTimeUs > mLastMediaTimeUs) {
+        mLastMediaTimeUs = mediaTimeUs;
+    }
+
+    if (mediaTimeUs < 0) {
+        ALOGV("dropping early accessUnit.");
+        return false;
+    }
+
+    ALOGV("track %d rtpTime=%u mediaTimeUs = %lld us (%.2f secs)",
+            trackIndex, rtpTime, (long long)mediaTimeUs, mediaTimeUs / 1E6);
+
+    accessUnit->meta()->setInt64("timeUs", mediaTimeUs);
+
+    return true;
+}
+
+bool NuPlayer::RTPSource::dataReceivedOnAllChannels() {
+    TrackInfo *track;
+    for (size_t i = 0; i < mTracks.size(); ++i) {
+        track = &mTracks.editItemAt(i);
+        if (track->mPackets.empty()) {
+            return false;
+        }
+    }
+    return true;
+}
+
+void NuPlayer::RTPSource::postQueueAccessUnit(
+        size_t trackIndex, const sp<ABuffer> &accessUnit) {
+    sp<AMessage> msg = new AMessage(kWhatAccessUnit, this);
+    msg->setInt32("what", kWhatAccessUnit);
+    msg->setSize("trackIndex", trackIndex);
+    msg->setBuffer("accessUnit", accessUnit);
+    msg->post();
+}
+
+void NuPlayer::RTPSource::postQueueEOS(size_t trackIndex, status_t finalResult) {
+    sp<AMessage> msg = new AMessage(kWhatEOS, this);
+    msg->setInt32("what", kWhatEOS);
+    msg->setSize("trackIndex", trackIndex);
+    msg->setInt32("finalResult", finalResult);
+    msg->post();
+}
+
+sp<MetaData> NuPlayer::RTPSource::getTrackFormat(size_t index, int32_t *timeScale) {
+    CHECK_GE(index, 0u);
+    CHECK_LT(index, mTracks.size());
+
+    const TrackInfo &info = mTracks.itemAt(index);
+
+    *timeScale = info.mTimeScale;
+
+    return info.mPacketSource->getFormat();
+}
+
+void NuPlayer::RTPSource::onConnected() {
+    ALOGV("onConnected");
+    mState = CONNECTED;
+}
+
+void NuPlayer::RTPSource::onDisconnected(const sp<AMessage> &msg) {
+    if (mState == DISCONNECTED) {
+        return;
+    }
+
+    status_t err;
+    CHECK(msg->findInt32("result", &err));
+    CHECK_NE(err, (status_t)OK);
+
+//    mLooper->unregisterHandler(mHandler->id());
+//    mHandler.clear();
+
+    if (mState == CONNECTING) {
+        // We're still in the preparation phase, signal that it
+        // failed.
+        notifyPrepared(err);
+    }
+
+    mState = DISCONNECTED;
+//    setError(err);
+
+}
+
+status_t NuPlayer::RTPSource::setParameter(const String8 &key, const String8 &value) {
+    ALOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
+
+    bool isAudioKey = key.contains("audio");
+    TrackInfo *info = NULL;
+    for (unsigned i = 0; i < mTracks.size(); ++i) {
+        info = &mTracks.editItemAt(i);
+        if (info != NULL && info->mIsAudio == isAudioKey) {
+            ALOGV("setParameter: %s track (%d) found", isAudioKey ? "audio" : "video" , i);
+            break;
+        }
+    }
+
+    if (info == NULL) {
+        TrackInfo newTrackInfo;
+        newTrackInfo.mIsAudio = isAudioKey;
+        mTracks.push(newTrackInfo);
+        info = &mTracks.editTop();
+        info->mJbTimeMs = 300;
+    }
+
+    if (key == "rtp-param-mime-type") {
+        info->mMimeType = value;
+
+        const char *mime = value.string();
+        const char *delimiter = strchr(mime, '/');
+        info->mCodecName = delimiter ? (delimiter + 1) : "<none>";
+
+        ALOGV("rtp-param-mime-type: mMimeType (%s) => mCodecName (%s)",
+                info->mMimeType.string(), info->mCodecName.string());
+    } else if (key == "video-param-decoder-profile") {
+        info->mCodecProfile = atoi(value);
+    } else if (key == "video-param-decoder-level") {
+        info->mCodecLevel = atoi(value);
+    } else if (key == "video-param-width") {
+        info->mWidth = atoi(value);
+    } else if (key == "video-param-height") {
+        info->mHeight = atoi(value);
+    } else if (key == "rtp-param-local-ip") {
+        info->mLocalIp = value;
+    } else if (key == "rtp-param-local-port") {
+        info->mLocalPort = atoi(value);
+    } else if (key == "rtp-param-remote-ip") {
+        info->mRemoteIp = value;
+    } else if (key == "rtp-param-remote-port") {
+        info->mRemotePort = atoi(value);
+    } else if (key == "rtp-param-payload-type") {
+        info->mPayloadType = atoi(value);
+    } else if (key == "rtp-param-as") {
+        //AS means guaranteed bit rate that negotiated from sdp.
+        info->mAS = atoi(value);
+    } else if (key == "rtp-param-rtp-timeout") {
+    } else if (key == "rtp-param-rtcp-timeout") {
+    } else if (key == "rtp-param-time-scale") {
+    } else if (key == "rtp-param-self-id") {
+        info->mSelfID = atoi(value);
+    } else if (key == "rtp-param-ext-cvo-extmap") {
+        info->mCVOExtMap = atoi(value);
+    } else if (key == "rtp-param-set-socket-network") {
+        int64_t networkHandle = atoll(value);
+        setSocketNetwork(networkHandle);
+    } else if (key == "rtp-param-jitter-buffer-time") {
+        info->mJbTimeMs = atoi(value);
+    }
+
+    return OK;
+}
+
+status_t NuPlayer::RTPSource::setParameters(const String8 &params) {
+    ALOGV("setParameters: %s", params.string());
+    const char *cparams = params.string();
+    const char *key_start = cparams;
+    for (;;) {
+        const char *equal_pos = strchr(key_start, '=');
+        if (equal_pos == NULL) {
+            ALOGE("Parameters %s miss a value", cparams);
+            return BAD_VALUE;
+        }
+        String8 key(key_start, equal_pos - key_start);
+        TrimString(&key);
+        if (key.length() == 0) {
+            ALOGE("Parameters %s contains an empty key", cparams);
+            return BAD_VALUE;
+        }
+        const char *value_start = equal_pos + 1;
+        const char *semicolon_pos = strchr(value_start, ';');
+        String8 value;
+        if (semicolon_pos == NULL) {
+            value.setTo(value_start);
+        } else {
+            value.setTo(value_start, semicolon_pos - value_start);
+        }
+        if (setParameter(key, value) != OK) {
+            return BAD_VALUE;
+        }
+        if (semicolon_pos == NULL) {
+            break;  // Reaches the end
+        }
+        key_start = semicolon_pos + 1;
+    }
+    return OK;
+}
+
+void NuPlayer::RTPSource::setSocketNetwork(int64_t networkHandle) {
+    ALOGV("setSocketNetwork: %llu", (unsigned long long)networkHandle);
+
+    TrackInfo *info = NULL;
+    for (size_t i = 0; i < mTracks.size(); ++i) {
+        info = &mTracks.editItemAt(i);
+
+        if (info == NULL)
+            break;
+
+        info->mSocketNetwork = networkHandle;
+    }
+}
+
+// Trim both leading and trailing whitespace from the given string.
+//static
+void NuPlayer::RTPSource::TrimString(String8 *s) {
+    size_t num_bytes = s->bytes();
+    const char *data = s->string();
+
+    size_t leading_space = 0;
+    while (leading_space < num_bytes && isspace(data[leading_space])) {
+        ++leading_space;
+    }
+
+    size_t i = num_bytes;
+    while (i > leading_space && isspace(data[i - 1])) {
+        --i;
+    }
+
+    s->setTo(String8(&data[leading_space], i - leading_space));
+}
+
+}  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.h b/media/libmediaplayerservice/nuplayer/RTPSource.h
new file mode 100644
index 0000000..fb2d3b9
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.h
@@ -0,0 +1,226 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RTP_SOURCE_H_
+
+#define RTP_SOURCE_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/Utils.h>
+#include <media/BufferingSettings.h>
+
+#include <utils/KeyedVector.h>
+#include <utils/Vector.h>
+#include <utils/RefBase.h>
+
+#include "AnotherPacketSource.h"
+#include "APacketSource.h"
+#include "ARTPConnection.h"
+#include "ASessionDescription.h"
+#include "NuPlayerSource.h"
+
+
+
+
+
+
+namespace android {
+
+struct ALooper;
+struct AnotherPacketSource;
+
+struct NuPlayer::RTPSource : public NuPlayer::Source {
+    RTPSource(
+            const sp<AMessage> &notify,
+            const String8& rtpParams);
+
+    enum {
+        RTP_FIRST_PACKET = 100,
+        RTCP_FIRST_PACKET = 101,
+        RTP_QUALITY = 102,
+        RTCP_TSFB = 205,
+        RTCP_PSFB = 206,
+        RTP_CVO = 300,
+        RTP_AUTODOWN = 400,
+    };
+
+    virtual status_t getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) override;
+    virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
+    virtual void prepareAsync();
+    virtual void start();
+    virtual void stop();
+    virtual void pause();
+    virtual void resume();
+
+    virtual status_t feedMoreTSData();
+
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+
+    virtual status_t getDuration(int64_t *durationUs);
+    virtual status_t seekTo(
+            int64_t seekTimeUs,
+            MediaPlayerSeekMode mode = MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC) override;
+
+    virtual bool isRealTime() const;
+
+    void onMessageReceived(const sp<AMessage> &msg);
+
+    virtual void setTargetBitrate(int32_t bitrate) override;
+
+protected:
+    virtual ~RTPSource();
+
+    virtual sp<MetaData> getFormatMeta(bool audio);
+
+private:
+    enum {
+        kWhatAccessUnit = 'accU',
+        kWhatAccessUnitComplete = 'accu',
+        kWhatDisconnect = 'disc',
+        kWhatEOS = 'eos!',
+        kWhatPollBuffering = 'poll',
+        kWhatSetBufferingSettings = 'sBuS',
+    };
+
+    const int64_t kBufferingPollIntervalUs = 1000000ll;
+
+    enum State {
+        DISCONNECTED,
+        CONNECTING,
+        CONNECTED,
+        PAUSED,
+    };
+
+    struct TrackInfo {
+
+        /* SDP of track */
+        bool mIsAudio;
+        int32_t mPayloadType;
+        String8 mMimeType;
+        String8 mCodecName;
+        int32_t mCodecProfile;
+        int32_t mCodecLevel;
+        int32_t mWidth;
+        int32_t mHeight;
+        String8 mLocalIp;
+        String8 mRemoteIp;
+        int32_t mLocalPort;
+        int32_t mRemotePort;
+        int64_t mSocketNetwork;
+        int32_t mTimeScale;
+        int32_t mAS;
+
+        /* RTP jitter buffer time in milliseconds */
+        uint32_t mJbTimeMs;
+        /* Unique ID indicates itself */
+        uint32_t mSelfID;
+        /* extmap:<value> for CVO will be set to here */
+        int32_t mCVOExtMap;
+
+        /* a copy of TrackInfo in RTSPSource */
+        sp<AnotherPacketSource> mSource;
+        uint32_t mRTPTime;
+        int64_t mNormalPlaytimeUs;
+        bool mNPTMappingValid;
+
+        /* a copy of TrackInfo in MyHandler.h */
+        int mRTPSocket;
+        int mRTCPSocket;
+        uint32_t mFirstSeqNumInSegment;
+        bool mNewSegment;
+        int32_t mAllowedStaleAccessUnits;
+        uint32_t mRTPAnchor;
+        int64_t mNTPAnchorUs;
+        bool mEOSReceived;
+        uint32_t mNormalPlayTimeRTP;
+        int64_t mNormalPlayTimeUs;
+        sp<APacketSource> mPacketSource;
+        List<sp<ABuffer>> mPackets;
+    };
+
+    const String8 mRTPParams;
+    uint32_t mFlags;
+    State mState;
+    status_t mFinalResult;
+
+    // below 3 parameters need to be checked whether it needed or not.
+    Mutex mBufferingLock;
+    bool mBuffering;
+    bool mInPreparationPhase;
+    Mutex mBufferingSettingsLock;
+    BufferingSettings mBufferingSettings;
+
+    sp<ALooper> mLooper;
+
+    sp<ARTPConnection> mRTPConn;
+
+    Vector<TrackInfo> mTracks;
+    sp<AnotherPacketSource> mAudioTrack;
+    sp<AnotherPacketSource> mVideoTrack;
+
+    int64_t mEOSTimeoutAudio;
+    int64_t mEOSTimeoutVideo;
+
+    /* MyHandler.h */
+    bool mFirstAccessUnit;
+    bool mAllTracksHaveTime;
+    int64_t mNTPAnchorUs;
+    int64_t mMediaAnchorUs;
+    int64_t mLastMediaTimeUs;
+    int64_t mNumAccessUnitsReceived;
+    int32_t mLastCVOUpdated;
+    bool mReceivedFirstRTCPPacket;
+    bool mReceivedFirstRTPPacket;
+    bool mPausing;
+    int32_t mPauseGeneration;
+
+    sp<AnotherPacketSource> getSource(bool audio);
+
+    /* MyHandler.h */
+    void onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime);
+    bool addMediaTimestamp(int32_t trackIndex, const TrackInfo *track,
+            const sp<ABuffer> &accessUnit);
+    bool dataReceivedOnAllChannels();
+    void postQueueAccessUnit(size_t trackIndex, const sp<ABuffer> &accessUnit);
+    void postQueueEOS(size_t trackIndex, status_t finalResult);
+    sp<MetaData> getTrackFormat(size_t index, int32_t *timeScale);
+    void onConnected();
+    void onDisconnected(const sp<AMessage> &msg);
+
+    void schedulePollBuffering();
+    void onPollBuffering();
+
+    bool haveSufficientDataOnAllTracks();
+
+    void setEOSTimeout(bool audio, int64_t timeout);
+
+    status_t setParameters(const String8 &params);
+    status_t setParameter(const String8 &key, const String8 &value);
+    void setSocketNetwork(int64_t networkHandle);
+    static void TrimString(String8 *s);
+
+    DISALLOW_EVIL_CONSTRUCTORS(RTPSource);
+};
+
+}  // namespace android
+
+#endif  // RTP_SOURCE_H_
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 83da092..9533ae5 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -146,7 +146,9 @@
     }
 
     // Close socket before posting message to RTSPSource message handler.
-    close(mHandler->getARTSPConnection()->getSocket());
+    if (mHandler != NULL) {
+        close(mHandler->getARTSPConnection()->getSocket());
+    }
 
     sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
 
diff --git a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
index f114046..c81a659 100644
--- a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
+++ b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
@@ -65,6 +65,14 @@
         return true;
     }
 
+    virtual bool overrideProcessInfo(
+            int /* pid */, int /* procState */, int /* oomScore */) {
+        return true;
+    }
+
+    virtual void removeProcessInfoOverride(int /* pid */) {
+    }
+
 private:
     DISALLOW_EVIL_CONSTRUCTORS(FakeProcessInfo);
 };
diff --git a/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp b/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp
new file mode 100644
index 0000000..5a52ea5
--- /dev/null
+++ b/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+    name: "StagefrightRecorderTest",
+    gtest: true,
+
+    srcs: [
+        "StagefrightRecorderTest.cpp",
+    ],
+
+    include_dirs: [
+        "system/media/audio/include",
+        "frameworks/av/include",
+        "frameworks/av/camera/include",
+        "frameworks/av/media/libmediaplayerservice",
+        "frameworks/av/media/libmediametrics/include",
+        "frameworks/av/media/ndk/include",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmedia",
+        "libbinder",
+        "libutils",
+        "libmediaplayerservice",
+        "libstagefright",
+        "libmediandk",
+    ],
+
+    compile_multilib: "32",
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp b/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp
new file mode 100644
index 0000000..5751631
--- /dev/null
+++ b/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp
@@ -0,0 +1,318 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "StagefrightRecorderTest"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include <chrono>
+#include <ctime>
+#include <iostream>
+#include <string>
+#include <thread>
+
+#include <MediaPlayerService.h>
+#include <media/NdkMediaExtractor.h>
+#include <media/stagefright/MediaCodec.h>
+#include <system/audio.h>
+
+#include "StagefrightRecorder.h"
+
+#define OUTPUT_INFO_FILE_NAME "/data/local/tmp/stfrecorder_audio.info"
+#define OUTPUT_FILE_NAME_AUDIO "/data/local/tmp/stfrecorder_audio.raw"
+
+const bool kDebug = false;
+constexpr int32_t kMaxLoopCount = 10;
+constexpr int32_t kClipDurationInSec = 4;
+constexpr int32_t kPauseTimeInSec = 2;
+// Tolerance value for extracted clipduration is maximum 10% of total clipduration
+constexpr int32_t kToleranceValueInUs = kClipDurationInSec * 100000;
+
+using namespace android;
+
+class StagefrightRecorderTest
+    : public ::testing::TestWithParam<std::pair<output_format, audio_encoder>> {
+  public:
+    StagefrightRecorderTest() : mStfRecorder(nullptr), mOutputAudioFp(nullptr) {
+        mExpectedDurationInMs = 0;
+        mExpectedPauseInMs = 0;
+    }
+
+    ~StagefrightRecorderTest() {
+        if (mStfRecorder) free(mStfRecorder);
+        if (mOutputAudioFp) fclose(mOutputAudioFp);
+    }
+
+    void SetUp() override {
+        mStfRecorder = new StagefrightRecorder(String16(LOG_TAG));
+        ASSERT_NE(mStfRecorder, nullptr) << "Failed to create the instance of recorder";
+
+        mOutputAudioFp = fopen(OUTPUT_FILE_NAME_AUDIO, "wb");
+        ASSERT_NE(mOutputAudioFp, nullptr) << "Failed to open output file "
+                                           << OUTPUT_FILE_NAME_AUDIO << " for stagefright recorder";
+
+        int32_t fd = fileno(mOutputAudioFp);
+        ASSERT_GE(fd, 0) << "Failed to get the file descriptor of the output file for "
+                         << OUTPUT_FILE_NAME_AUDIO;
+
+        status_t status = mStfRecorder->setOutputFile(fd);
+        ASSERT_EQ(status, OK) << "Failed to set the output file " << OUTPUT_FILE_NAME_AUDIO
+                              << " for stagefright recorder";
+    }
+
+    void TearDown() override {
+        if (mOutputAudioFp) {
+            fclose(mOutputAudioFp);
+            mOutputAudioFp = nullptr;
+        }
+        if (!kDebug) {
+            int32_t status = remove(OUTPUT_FILE_NAME_AUDIO);
+            ASSERT_EQ(status, 0) << "Unable to delete the output file " << OUTPUT_FILE_NAME_AUDIO;
+        }
+    }
+
+    void setAudioRecorderFormat(output_format outputFormat, audio_encoder encoder,
+                                audio_source_t audioSource = AUDIO_SOURCE_DEFAULT);
+    void recordMedia(bool isPaused = false, int32_t numStart = 0, int32_t numPause = 0);
+    void dumpInfo();
+    void setupExtractor(AMediaExtractor *extractor, int32_t &trackCount);
+    void validateOutput();
+
+    MediaRecorderBase *mStfRecorder;
+    FILE *mOutputAudioFp;
+    double mExpectedDurationInMs;
+    double mExpectedPauseInMs;
+};
+
+void StagefrightRecorderTest::setAudioRecorderFormat(output_format outputFormat,
+                                                     audio_encoder encoder,
+                                                     audio_source_t audioSource) {
+    status_t status = mStfRecorder->setAudioSource(audioSource);
+    ASSERT_EQ(status, OK) << "Failed to set the audio source: " << audioSource;
+
+    status = mStfRecorder->setOutputFormat(outputFormat);
+    ASSERT_EQ(status, OK) << "Failed to set the output format: " << outputFormat;
+
+    status = mStfRecorder->setAudioEncoder(encoder);
+    ASSERT_EQ(status, OK) << "Failed to set the audio encoder: " << encoder;
+}
+
+void StagefrightRecorderTest::recordMedia(bool isPause, int32_t numStart, int32_t numPause) {
+    status_t status = mStfRecorder->init();
+    ASSERT_EQ(status, OK) << "Failed to initialize stagefright recorder";
+
+    status = mStfRecorder->prepare();
+    ASSERT_EQ(status, OK) << "Failed to preapre the reorder";
+
+    // first start should succeed.
+    status = mStfRecorder->start();
+    ASSERT_EQ(status, OK) << "Failed to start the recorder";
+
+    for (int32_t count = 0; count < numStart; count++) {
+        status = mStfRecorder->start();
+    }
+
+    auto tStart = std::chrono::high_resolution_clock::now();
+    // Recording media for 4 secs
+    std::this_thread::sleep_for(std::chrono::seconds(kClipDurationInSec));
+    auto tEnd = std::chrono::high_resolution_clock::now();
+    mExpectedDurationInMs = std::chrono::duration<double, std::milli>(tEnd - tStart).count();
+
+    if (isPause) {
+        // first pause should succeed.
+        status = mStfRecorder->pause();
+        ASSERT_EQ(status, OK) << "Failed to pause the recorder";
+
+        tStart = std::chrono::high_resolution_clock::now();
+        // Paused recorder for 2 secs
+        std::this_thread::sleep_for(std::chrono::seconds(kPauseTimeInSec));
+
+        for (int32_t count = 0; count < numPause; count++) {
+            status = mStfRecorder->pause();
+        }
+
+        tEnd = std::chrono::high_resolution_clock::now();
+        mExpectedPauseInMs = std::chrono::duration<double, std::milli>(tEnd - tStart).count();
+
+        status = mStfRecorder->resume();
+        ASSERT_EQ(status, OK) << "Failed to resume the recorder";
+
+        auto tStart = std::chrono::high_resolution_clock::now();
+        // Recording media for 4 secs
+        std::this_thread::sleep_for(std::chrono::seconds(kClipDurationInSec));
+        auto tEnd = std::chrono::high_resolution_clock::now();
+        mExpectedDurationInMs += std::chrono::duration<double, std::milli>(tEnd - tStart).count();
+    }
+    status = mStfRecorder->stop();
+    ASSERT_EQ(status, OK) << "Failed to stop the recorder";
+}
+
+void StagefrightRecorderTest::dumpInfo() {
+    FILE *dumpOutput = fopen(OUTPUT_INFO_FILE_NAME, "wb");
+    int32_t dumpFd = fileno(dumpOutput);
+    Vector<String16> args;
+    status_t status = mStfRecorder->dump(dumpFd, args);
+    ASSERT_EQ(status, OK) << "Failed to dump the info for the recorder";
+    fclose(dumpOutput);
+}
+
+void StagefrightRecorderTest::setupExtractor(AMediaExtractor *extractor, int32_t &trackCount) {
+    int32_t fd = open(OUTPUT_FILE_NAME_AUDIO, O_RDONLY);
+    ASSERT_GE(fd, 0) << "Failed to open recorder's output file " << OUTPUT_FILE_NAME_AUDIO
+                     << " to validate";
+
+    struct stat buf;
+    int32_t status = fstat(fd, &buf);
+    ASSERT_EQ(status, 0) << "Failed to get properties of input file " << OUTPUT_FILE_NAME_AUDIO
+                         << " for extractor";
+
+    size_t fileSize = buf.st_size;
+    ASSERT_GT(fileSize, 0) << "Size of input file " << OUTPUT_FILE_NAME_AUDIO
+                           << " to extractor cannot be zero";
+    ALOGV("Size of input file to extractor: %zu", fileSize);
+
+    status = AMediaExtractor_setDataSourceFd(extractor, fd, 0, fileSize);
+    ASSERT_EQ(status, AMEDIA_OK) << "Failed to set data source for extractor";
+
+    trackCount = AMediaExtractor_getTrackCount(extractor);
+    ALOGV("Number of tracks reported by extractor : %d", trackCount);
+}
+
+// Validate recoder's output using extractor
+void StagefrightRecorderTest::validateOutput() {
+    int32_t trackCount = -1;
+    AMediaExtractor *extractor = AMediaExtractor_new();
+    ASSERT_NE(extractor, nullptr) << "Failed to create extractor";
+    ASSERT_NO_FATAL_FAILURE(setupExtractor(extractor, trackCount));
+    ASSERT_EQ(trackCount, 1) << "Expected 1 track, saw " << trackCount;
+
+    for (int32_t idx = 0; idx < trackCount; idx++) {
+        AMediaExtractor_selectTrack(extractor, idx);
+        AMediaFormat *format = AMediaExtractor_getTrackFormat(extractor, idx);
+        ASSERT_NE(format, nullptr) << "Track format is NULL";
+        ALOGI("Track format = %s", AMediaFormat_toString(format));
+
+        int64_t clipDurationUs;
+        AMediaFormat_getInt64(format, AMEDIAFORMAT_KEY_DURATION, &clipDurationUs);
+        int32_t diff = abs((mExpectedDurationInMs * 1000) - clipDurationUs);
+        ASSERT_LE(diff, kToleranceValueInUs)
+                << "Expected duration: " << (mExpectedDurationInMs * 1000)
+                << " Actual duration: " << clipDurationUs << " Difference: " << diff
+                << " Difference is expected to be less than tolerance value: " << kToleranceValueInUs;
+
+        const char *mime = nullptr;
+        AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
+        ASSERT_NE(mime, nullptr) << "Track mime is NULL";
+        ALOGI("Track mime = %s", mime);
+
+        int32_t sampleRate, channelCount, bitRate;
+        AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &channelCount);
+        ALOGI("Channel count reported by extractor: %d", channelCount);
+        AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, &sampleRate);
+        ALOGI("Sample Rate reported by extractor: %d", sampleRate);
+        AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, &bitRate);
+        ALOGI("Bit Rate reported by extractor: %d", bitRate);
+    }
+}
+
+TEST_F(StagefrightRecorderTest, RecordingAudioSanityTest) {
+    ASSERT_NO_FATAL_FAILURE(setAudioRecorderFormat(OUTPUT_FORMAT_DEFAULT, AUDIO_ENCODER_DEFAULT));
+
+    int32_t maxAmplitude = -1;
+    status_t status = mStfRecorder->getMaxAmplitude(&maxAmplitude);
+    ASSERT_EQ(maxAmplitude, 0) << "Invalid value of max amplitude";
+
+    ASSERT_NO_FATAL_FAILURE(recordMedia());
+
+    // Verify getMetrics() behavior
+    Parcel parcel;
+    status = mStfRecorder->getMetrics(&parcel);
+    ASSERT_EQ(status, OK) << "Failed to get the parcel from getMetrics";
+    ALOGV("Size of the Parcel returned by getMetrics: %zu", parcel.dataSize());
+    ASSERT_GT(parcel.dataSize(), 0) << "Parcel size reports empty record";
+    ASSERT_NO_FATAL_FAILURE(validateOutput());
+    if (kDebug) {
+        ASSERT_NO_FATAL_FAILURE(dumpInfo());
+    }
+}
+
+TEST_P(StagefrightRecorderTest, MultiFormatAudioRecordTest) {
+    output_format outputFormat = GetParam().first;
+    audio_encoder audioEncoder = GetParam().second;
+    ASSERT_NO_FATAL_FAILURE(setAudioRecorderFormat(outputFormat, audioEncoder));
+    ASSERT_NO_FATAL_FAILURE(recordMedia());
+    // TODO(b/161687761)
+    // Skip for AMR-NB/WB output format
+    if (!(outputFormat == OUTPUT_FORMAT_AMR_NB || outputFormat == OUTPUT_FORMAT_AMR_WB)) {
+        ASSERT_NO_FATAL_FAILURE(validateOutput());
+    }
+    if (kDebug) {
+        ASSERT_NO_FATAL_FAILURE(dumpInfo());
+    }
+}
+
+TEST_F(StagefrightRecorderTest, GetActiveMicrophonesTest) {
+    ASSERT_NO_FATAL_FAILURE(
+            setAudioRecorderFormat(OUTPUT_FORMAT_DEFAULT, AUDIO_ENCODER_DEFAULT, AUDIO_SOURCE_MIC));
+
+    status_t status = mStfRecorder->init();
+    ASSERT_EQ(status, OK) << "Init failed for stagefright recorder";
+
+    status = mStfRecorder->prepare();
+    ASSERT_EQ(status, OK) << "Failed to preapre the reorder";
+
+    status = mStfRecorder->start();
+    ASSERT_EQ(status, OK) << "Failed to start the recorder";
+
+    // Record media for 4 secs
+    std::this_thread::sleep_for(std::chrono::seconds(kClipDurationInSec));
+
+    std::vector<media::MicrophoneInfo> activeMicrophones{};
+    status = mStfRecorder->getActiveMicrophones(&activeMicrophones);
+    ASSERT_EQ(status, OK) << "Failed to get Active Microphones";
+    ASSERT_GT(activeMicrophones.size(), 0) << "No active microphones are found";
+
+    status = mStfRecorder->stop();
+    ASSERT_EQ(status, OK) << "Failed to stop the recorder";
+    if (kDebug) {
+        ASSERT_NO_FATAL_FAILURE(dumpInfo());
+    }
+}
+
+TEST_F(StagefrightRecorderTest, MultiStartPauseTest) {
+    ASSERT_NO_FATAL_FAILURE(setAudioRecorderFormat(OUTPUT_FORMAT_DEFAULT, AUDIO_ENCODER_DEFAULT));
+    ASSERT_NO_FATAL_FAILURE(recordMedia(true, kMaxLoopCount, kMaxLoopCount));
+    ASSERT_NO_FATAL_FAILURE(validateOutput());
+    if (kDebug) {
+        ASSERT_NO_FATAL_FAILURE(dumpInfo());
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        StagefrightRecorderTestAll, StagefrightRecorderTest,
+        ::testing::Values(std::make_pair(OUTPUT_FORMAT_AMR_NB, AUDIO_ENCODER_AMR_NB),
+                          std::make_pair(OUTPUT_FORMAT_AMR_WB, AUDIO_ENCODER_AMR_WB),
+                          std::make_pair(OUTPUT_FORMAT_AAC_ADTS, AUDIO_ENCODER_AAC),
+                          std::make_pair(OUTPUT_FORMAT_OGG, AUDIO_ENCODER_OPUS)));
+
+int main(int argc, char **argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = RUN_ALL_TESTS();
+    ALOGV("Test result = %d\n", status);
+    return status;
+}
diff --git a/media/libmediatranscoding/.clang-format b/media/libmediatranscoding/.clang-format
index 3198d00..f23b842 100644
--- a/media/libmediatranscoding/.clang-format
+++ b/media/libmediatranscoding/.clang-format
@@ -26,4 +26,26 @@
 DerivePointerAlignment: false
 IndentWidth: 4
 PointerAlignment: Left
-TabWidth: 4
\ No newline at end of file
+TabWidth: 4
+
+# Deviations from the above file:
+# "Don't indent the section label"
+AccessModifierOffset: -4
+# "Each line of text in your code should be at most 100 columns long."
+ColumnLimit: 100
+# "Constructor initializer lists can be all on one line or with subsequent
+# lines indented eight spaces.". clang-format does not support having the colon
+# on the same line as the constructor function name, so this is the best
+# approximation of that rule, which makes all entries in the list (except the
+# first one) have an eight space indentation.
+ConstructorInitializerIndentWidth: 6
+# There is nothing in go/droidcppstyle about case labels, but there seems to be
+# more code that does not indent the case labels in frameworks/base.
+IndentCaseLabels: false
+# There have been some bugs in which subsequent formatting operations introduce
+# weird comment jumps.
+ReflowComments: false
+# Android supports C++17 now, but it seems only Cpp11 will work now.
+# "Cpp11 is a deprecated alias for Latest" according to
+# https://clang.llvm.org/docs/ClangFormatStyleOptions.html
+Standard: Cpp11
diff --git a/media/libmediatranscoding/Android.bp b/media/libmediatranscoding/Android.bp
index f948bd8..7329c63 100644
--- a/media/libmediatranscoding/Android.bp
+++ b/media/libmediatranscoding/Android.bp
@@ -14,29 +14,49 @@
  * limitations under the License.
  */
 
+filegroup {
+    name: "libmediatranscoding_aidl",
+    srcs: [
+        "aidl/android/media/IMediaTranscodingService.aidl",
+        "aidl/android/media/ITranscodingClient.aidl",
+        "aidl/android/media/ITranscodingClientCallback.aidl",
+        "aidl/android/media/TranscodingErrorCode.aidl",
+        "aidl/android/media/TranscodingSessionPriority.aidl",
+        "aidl/android/media/TranscodingSessionStats.aidl",
+        "aidl/android/media/TranscodingType.aidl",
+        "aidl/android/media/TranscodingVideoCodecType.aidl",
+        "aidl/android/media/TranscodingVideoTrackFormat.aidl",
+        "aidl/android/media/TranscodingSessionParcel.aidl",
+        "aidl/android/media/TranscodingRequestParcel.aidl",
+        "aidl/android/media/TranscodingResultParcel.aidl",
+        "aidl/android/media/TranscodingTestConfig.aidl",
+    ],
+    path: "aidl",
+}
+
 // AIDL interfaces of MediaTranscoding.
 aidl_interface {
     name: "mediatranscoding_aidl_interface",
     unstable: true,
     local_include_dir: "aidl",
-    srcs: [
-        "aidl/android/media/IMediaTranscodingService.aidl",
-        "aidl/android/media/ITranscodingServiceClient.aidl",
-        "aidl/android/media/TranscodingErrorCode.aidl",
-        "aidl/android/media/TranscodingJobPriority.aidl",
-        "aidl/android/media/TranscodingType.aidl",
-        "aidl/android/media/TranscodingVideoCodecType.aidl",
-        "aidl/android/media/TranscodingJobParcel.aidl",
-        "aidl/android/media/TranscodingRequestParcel.aidl",
-        "aidl/android/media/TranscodingResultParcel.aidl",
-    ],
+    srcs: [":libmediatranscoding_aidl"],
+    backend:
+    {
+        java: {
+            enabled: true,
+        },
+    },
 }
 
 cc_library_shared {
     name: "libmediatranscoding",
 
     srcs: [
-        "TranscodingClientManager.cpp"
+        "TranscodingClientManager.cpp",
+        "TranscodingSessionController.cpp",
+        "TranscodingResourcePolicy.cpp",
+        "TranscodingUidPolicy.cpp",
+        "TranscoderWrapper.cpp",
     ],
 
     shared_libs: [
@@ -44,18 +64,30 @@
         "libcutils",
         "liblog",
         "libutils",
+        "libmediatranscoder",
+        "libbinder",
+        "libmediandk",
+    ],
+    export_shared_lib_headers: [
+        "libmediandk",
     ],
 
     export_include_dirs: ["include"],
 
     static_libs: [
         "mediatranscoding_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk_platform",
+        "resourceobserver_aidl_interface-ndk_platform",
     ],
 
     cflags: [
-        "-Werror",
-        "-Wno-error=deprecated-declarations",
         "-Wall",
+        "-Werror",
+        "-Wformat",
+        "-Wno-error=deprecated-declarations",
+        "-Wthread-safety",
+        "-Wunused",
+        "-Wunreachable-code",
     ],
 
     sanitize: {
diff --git a/media/libmediatranscoding/OWNERS b/media/libmediatranscoding/OWNERS
index 02287cb..b08d573 100644
--- a/media/libmediatranscoding/OWNERS
+++ b/media/libmediatranscoding/OWNERS
@@ -1,3 +1,5 @@
-akersten@google.com
+chz@google.com
+gokrishnan@google.com
 hkuang@google.com
 lnilsson@google.com
+pawin@google.com
diff --git a/media/libmediatranscoding/TranscoderWrapper.cpp b/media/libmediatranscoding/TranscoderWrapper.cpp
new file mode 100644
index 0000000..fffbfe9
--- /dev/null
+++ b/media/libmediatranscoding/TranscoderWrapper.cpp
@@ -0,0 +1,507 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TranscoderWrapper"
+
+#include <aidl/android/media/TranscodingErrorCode.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+#include <media/TranscoderWrapper.h>
+#include <utils/Log.h>
+
+#include <thread>
+
+namespace android {
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::TranscodingErrorCode;
+using ::aidl::android::media::TranscodingVideoCodecType;
+using ::aidl::android::media::TranscodingVideoTrackFormat;
+
+static TranscodingErrorCode toTranscodingError(media_status_t status) {
+    switch (status) {
+    case AMEDIA_OK:
+        return TranscodingErrorCode::kNoError;
+    case AMEDIACODEC_ERROR_INSUFFICIENT_RESOURCE:  // FALLTHRU
+    case AMEDIACODEC_ERROR_RECLAIMED:
+        return TranscodingErrorCode::kInsufficientResources;
+    case AMEDIA_ERROR_MALFORMED:
+        return TranscodingErrorCode::kMalformed;
+    case AMEDIA_ERROR_UNSUPPORTED:
+        return TranscodingErrorCode::kUnsupported;
+    case AMEDIA_ERROR_INVALID_OBJECT:  // FALLTHRU
+    case AMEDIA_ERROR_INVALID_PARAMETER:
+        return TranscodingErrorCode::kInvalidParameter;
+    case AMEDIA_ERROR_INVALID_OPERATION:
+        return TranscodingErrorCode::kInvalidOperation;
+    case AMEDIA_ERROR_IO:
+        return TranscodingErrorCode::kErrorIO;
+    case AMEDIA_ERROR_UNKNOWN:  // FALLTHRU
+    default:
+        return TranscodingErrorCode::kUnknown;
+    }
+}
+
+static AMediaFormat* getVideoFormat(
+        const char* originalMime,
+        const std::optional<TranscodingVideoTrackFormat>& requestedFormat) {
+    if (requestedFormat == std::nullopt) {
+        return nullptr;
+    }
+
+    AMediaFormat* format = AMediaFormat_new();
+    bool changed = false;
+    if (requestedFormat->codecType == TranscodingVideoCodecType::kHevc &&
+        strcmp(originalMime, AMEDIA_MIMETYPE_VIDEO_HEVC)) {
+        AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_HEVC);
+        changed = true;
+    } else if (requestedFormat->codecType == TranscodingVideoCodecType::kAvc &&
+               strcmp(originalMime, AMEDIA_MIMETYPE_VIDEO_AVC)) {
+        AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_AVC);
+        changed = true;
+    }
+    if (requestedFormat->bitrateBps > 0) {
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, requestedFormat->bitrateBps);
+        changed = true;
+    }
+    // TODO: translate other fields from requestedFormat to the format for MediaTranscoder.
+    // Also need to determine more settings to expose in TranscodingVideoTrackFormat.
+    if (!changed) {
+        AMediaFormat_delete(format);
+        // Use null format for passthru.
+        format = nullptr;
+    }
+    return format;
+}
+
+//static
+std::string TranscoderWrapper::toString(const Event& event) {
+    std::string typeStr;
+    switch (event.type) {
+    case Event::Start:
+        typeStr = "Start";
+        break;
+    case Event::Pause:
+        typeStr = "Pause";
+        break;
+    case Event::Resume:
+        typeStr = "Resume";
+        break;
+    case Event::Stop:
+        typeStr = "Stop";
+        break;
+    case Event::Finish:
+        typeStr = "Finish";
+        break;
+    case Event::Error:
+        typeStr = "Error";
+        break;
+    case Event::Progress:
+        typeStr = "Progress";
+        break;
+    default:
+        return "(unknown)";
+    }
+    std::string result;
+    result = "session {" + std::to_string(event.clientId) + "," + std::to_string(event.sessionId) +
+             "}: " + typeStr;
+    if (event.type == Event::Error || event.type == Event::Progress) {
+        result += " " + std::to_string(event.arg);
+    }
+    return result;
+}
+
+class TranscoderWrapper::CallbackImpl : public MediaTranscoder::CallbackInterface {
+public:
+    CallbackImpl(const std::shared_ptr<TranscoderWrapper>& owner, ClientIdType clientId,
+                 SessionIdType sessionId)
+          : mOwner(owner), mClientId(clientId), mSessionId(sessionId) {}
+
+    virtual void onFinished(const MediaTranscoder* transcoder __unused) override {
+        auto owner = mOwner.lock();
+        if (owner != nullptr) {
+            owner->onFinish(mClientId, mSessionId);
+        }
+    }
+
+    virtual void onError(const MediaTranscoder* transcoder __unused,
+                         media_status_t error) override {
+        auto owner = mOwner.lock();
+        if (owner != nullptr) {
+            owner->onError(mClientId, mSessionId, error);
+        }
+    }
+
+    virtual void onProgressUpdate(const MediaTranscoder* transcoder __unused,
+                                  int32_t progress) override {
+        auto owner = mOwner.lock();
+        if (owner != nullptr) {
+            owner->onProgress(mClientId, mSessionId, progress);
+        }
+    }
+
+    virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
+                                     const std::shared_ptr<ndk::ScopedAParcel>& pausedState
+                                             __unused) override {
+        ALOGV("%s: session {%lld, %d}", __FUNCTION__, (long long)mClientId, mSessionId);
+    }
+
+private:
+    std::weak_ptr<TranscoderWrapper> mOwner;
+    ClientIdType mClientId;
+    SessionIdType mSessionId;
+};
+
+TranscoderWrapper::TranscoderWrapper() : mCurrentClientId(0), mCurrentSessionId(-1) {
+    std::thread(&TranscoderWrapper::threadLoop, this).detach();
+}
+
+void TranscoderWrapper::setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) {
+    mCallback = cb;
+}
+
+static bool isResourceError(media_status_t err) {
+    return err == AMEDIACODEC_ERROR_RECLAIMED || err == AMEDIACODEC_ERROR_INSUFFICIENT_RESOURCE;
+}
+
+void TranscoderWrapper::reportError(ClientIdType clientId, SessionIdType sessionId,
+                                    media_status_t err) {
+    auto callback = mCallback.lock();
+    if (callback != nullptr) {
+        if (isResourceError(err)) {
+            // Add a placeholder pause state to mPausedStateMap. This is required when resuming.
+            // TODO: remove this when transcoder pause/resume logic is ready. New logic will
+            // no longer use the pause states.
+            auto it = mPausedStateMap.find(SessionKeyType(clientId, sessionId));
+            if (it == mPausedStateMap.end()) {
+                mPausedStateMap.emplace(SessionKeyType(clientId, sessionId),
+                                        new ndk::ScopedAParcel());
+            }
+
+            callback->onResourceLost();
+        } else {
+            callback->onError(clientId, sessionId, toTranscodingError(err));
+        }
+    }
+}
+
+void TranscoderWrapper::start(ClientIdType clientId, SessionIdType sessionId,
+                              const TranscodingRequestParcel& request,
+                              const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+    queueEvent(Event::Start, clientId, sessionId, [=] {
+        media_status_t err = handleStart(clientId, sessionId, request, clientCb);
+
+        if (err != AMEDIA_OK) {
+            cleanup();
+            reportError(clientId, sessionId, err);
+        } else {
+            auto callback = mCallback.lock();
+            if (callback != nullptr) {
+                callback->onStarted(clientId, sessionId);
+            }
+        }
+    });
+}
+
+void TranscoderWrapper::pause(ClientIdType clientId, SessionIdType sessionId) {
+    queueEvent(Event::Pause, clientId, sessionId, [=] {
+        media_status_t err = handlePause(clientId, sessionId);
+
+        cleanup();
+
+        if (err != AMEDIA_OK) {
+            reportError(clientId, sessionId, err);
+        } else {
+            auto callback = mCallback.lock();
+            if (callback != nullptr) {
+                callback->onPaused(clientId, sessionId);
+            }
+        }
+    });
+}
+
+void TranscoderWrapper::resume(ClientIdType clientId, SessionIdType sessionId,
+                               const TranscodingRequestParcel& request,
+                               const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+    queueEvent(Event::Resume, clientId, sessionId, [=] {
+        media_status_t err = handleResume(clientId, sessionId, request, clientCb);
+
+        if (err != AMEDIA_OK) {
+            cleanup();
+            reportError(clientId, sessionId, err);
+        } else {
+            auto callback = mCallback.lock();
+            if (callback != nullptr) {
+                callback->onResumed(clientId, sessionId);
+            }
+        }
+    });
+}
+
+void TranscoderWrapper::stop(ClientIdType clientId, SessionIdType sessionId) {
+    queueEvent(Event::Stop, clientId, sessionId, [=] {
+        if (mTranscoder != nullptr && clientId == mCurrentClientId &&
+            sessionId == mCurrentSessionId) {
+            // Cancelling the currently running session.
+            media_status_t err = mTranscoder->cancel();
+            if (err != AMEDIA_OK) {
+                ALOGW("failed to stop transcoder: %d", err);
+            } else {
+                ALOGI("transcoder stopped");
+            }
+            cleanup();
+        } else {
+            // For sessions that's not currently running, release any pausedState for the session.
+            mPausedStateMap.erase(SessionKeyType(clientId, sessionId));
+        }
+        // No callback needed for stop.
+    });
+}
+
+void TranscoderWrapper::onFinish(ClientIdType clientId, SessionIdType sessionId) {
+    queueEvent(Event::Finish, clientId, sessionId, [=] {
+        if (mTranscoder != nullptr && clientId == mCurrentClientId &&
+            sessionId == mCurrentSessionId) {
+            cleanup();
+        }
+
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onFinish(clientId, sessionId);
+        }
+    });
+}
+
+void TranscoderWrapper::onError(ClientIdType clientId, SessionIdType sessionId,
+                                media_status_t error) {
+    queueEvent(
+            Event::Error, clientId, sessionId,
+            [=] {
+                if (mTranscoder != nullptr && clientId == mCurrentClientId &&
+                    sessionId == mCurrentSessionId) {
+                    cleanup();
+                }
+                reportError(clientId, sessionId, error);
+            },
+            error);
+}
+
+void TranscoderWrapper::onProgress(ClientIdType clientId, SessionIdType sessionId,
+                                   int32_t progress) {
+    queueEvent(
+            Event::Progress, clientId, sessionId,
+            [=] {
+                auto callback = mCallback.lock();
+                if (callback != nullptr) {
+                    callback->onProgressUpdate(clientId, sessionId, progress);
+                }
+            },
+            progress);
+}
+
+media_status_t TranscoderWrapper::setupTranscoder(
+        ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+        const std::shared_ptr<ITranscodingClientCallback>& clientCb,
+        const std::shared_ptr<ndk::ScopedAParcel>& pausedState) {
+    if (clientCb == nullptr) {
+        ALOGE("client callback is null");
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (mTranscoder != nullptr) {
+        ALOGE("transcoder already running");
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    }
+
+    Status status;
+    ::ndk::ScopedFileDescriptor srcFd, dstFd;
+    status = clientCb->openFileDescriptor(request.sourceFilePath, "r", &srcFd);
+    if (!status.isOk() || srcFd.get() < 0) {
+        ALOGE("failed to open source");
+        return AMEDIA_ERROR_IO;
+    }
+
+    // Open dest file with "rw", as the transcoder could potentially reuse part of it
+    // for resume case. We might want the further differentiate and open with "w" only
+    // for start.
+    status = clientCb->openFileDescriptor(request.destinationFilePath, "rw", &dstFd);
+    if (!status.isOk() || dstFd.get() < 0) {
+        ALOGE("failed to open destination");
+        return AMEDIA_ERROR_IO;
+    }
+
+    mCurrentClientId = clientId;
+    mCurrentSessionId = sessionId;
+    mTranscoderCb = std::make_shared<CallbackImpl>(shared_from_this(), clientId, sessionId);
+    mTranscoder = MediaTranscoder::create(mTranscoderCb, pausedState);
+    if (mTranscoder == nullptr) {
+        ALOGE("failed to create transcoder");
+        return AMEDIA_ERROR_UNKNOWN;
+    }
+
+    media_status_t err = mTranscoder->configureSource(srcFd.get());
+    if (err != AMEDIA_OK) {
+        ALOGE("failed to configure source: %d", err);
+        return err;
+    }
+
+    std::vector<std::shared_ptr<AMediaFormat>> trackFormats = mTranscoder->getTrackFormats();
+    if (trackFormats.size() == 0) {
+        ALOGE("failed to get track formats!");
+        return AMEDIA_ERROR_MALFORMED;
+    }
+
+    for (int i = 0; i < trackFormats.size(); ++i) {
+        AMediaFormat* format = nullptr;
+        const char* mime = nullptr;
+        AMediaFormat_getString(trackFormats[i].get(), AMEDIAFORMAT_KEY_MIME, &mime);
+
+        if (!strncmp(mime, "video/", 6)) {
+            format = getVideoFormat(mime, request.requestedVideoTrackFormat);
+        }
+
+        err = mTranscoder->configureTrackFormat(i, format);
+        if (format != nullptr) {
+            AMediaFormat_delete(format);
+        }
+        if (err != AMEDIA_OK) {
+            ALOGE("failed to configure track format for track %d: %d", i, err);
+            return err;
+        }
+    }
+
+    err = mTranscoder->configureDestination(dstFd.get());
+    if (err != AMEDIA_OK) {
+        ALOGE("failed to configure dest: %d", err);
+        return err;
+    }
+
+    return AMEDIA_OK;
+}
+
+media_status_t TranscoderWrapper::handleStart(
+        ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+        const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+    ALOGI("%s: setting up transcoder for start", __FUNCTION__);
+    media_status_t err = setupTranscoder(clientId, sessionId, request, clientCb);
+    if (err != AMEDIA_OK) {
+        ALOGI("%s: failed to setup transcoder", __FUNCTION__);
+        return err;
+    }
+
+    err = mTranscoder->start();
+    if (err != AMEDIA_OK) {
+        ALOGE("%s: failed to start transcoder: %d", __FUNCTION__, err);
+        return err;
+    }
+
+    ALOGI("%s: transcoder started", __FUNCTION__);
+    return AMEDIA_OK;
+}
+
+media_status_t TranscoderWrapper::handlePause(ClientIdType clientId, SessionIdType sessionId) {
+    if (mTranscoder == nullptr) {
+        ALOGE("%s: transcoder is not running", __FUNCTION__);
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    }
+
+    if (clientId != mCurrentClientId || sessionId != mCurrentSessionId) {
+        ALOGW("%s: stopping session {%lld, %d} that's not current session {%lld, %d}", __FUNCTION__,
+              (long long)clientId, sessionId, (long long)mCurrentClientId, mCurrentSessionId);
+    }
+
+    ALOGI("%s: pausing transcoder", __FUNCTION__);
+
+    std::shared_ptr<ndk::ScopedAParcel> pauseStates;
+    media_status_t err = mTranscoder->pause(&pauseStates);
+    if (err != AMEDIA_OK) {
+        ALOGE("%s: failed to pause transcoder: %d", __FUNCTION__, err);
+        return err;
+    }
+    mPausedStateMap[SessionKeyType(clientId, sessionId)] = pauseStates;
+
+    ALOGI("%s: transcoder paused", __FUNCTION__);
+    return AMEDIA_OK;
+}
+
+media_status_t TranscoderWrapper::handleResume(
+        ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+        const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+    std::shared_ptr<ndk::ScopedAParcel> pausedState;
+    auto it = mPausedStateMap.find(SessionKeyType(clientId, sessionId));
+    if (it != mPausedStateMap.end()) {
+        pausedState = it->second;
+        mPausedStateMap.erase(it);
+    } else {
+        ALOGE("%s: can't find paused state", __FUNCTION__);
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    }
+
+    ALOGI("%s: setting up transcoder for resume", __FUNCTION__);
+    media_status_t err = setupTranscoder(clientId, sessionId, request, clientCb, pausedState);
+    if (err != AMEDIA_OK) {
+        ALOGE("%s: failed to setup transcoder: %d", __FUNCTION__, err);
+        return err;
+    }
+
+    err = mTranscoder->resume();
+    if (err != AMEDIA_OK) {
+        ALOGE("%s: failed to resume transcoder: %d", __FUNCTION__, err);
+        return err;
+    }
+
+    ALOGI("%s: transcoder resumed", __FUNCTION__);
+    return AMEDIA_OK;
+}
+
+void TranscoderWrapper::cleanup() {
+    mCurrentClientId = 0;
+    mCurrentSessionId = -1;
+    mTranscoderCb = nullptr;
+    mTranscoder = nullptr;
+}
+
+void TranscoderWrapper::queueEvent(Event::Type type, ClientIdType clientId, SessionIdType sessionId,
+                                   const std::function<void()> runnable, int32_t arg) {
+    std::scoped_lock lock{mLock};
+
+    mQueue.push_back({type, clientId, sessionId, runnable, arg});
+    mCondition.notify_one();
+}
+
+void TranscoderWrapper::threadLoop() {
+    std::unique_lock<std::mutex> lock{mLock};
+    // TranscoderWrapper currently lives in the transcoding service, as long as
+    // MediaTranscodingService itself.
+    while (true) {
+        // Wait for the next event.
+        while (mQueue.empty()) {
+            mCondition.wait(lock);
+        }
+
+        Event event = *mQueue.begin();
+        mQueue.pop_front();
+
+        ALOGD("%s: %s", __FUNCTION__, toString(event).c_str());
+
+        lock.unlock();
+        event.runnable();
+        lock.lock();
+    }
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/TranscodingClientManager.cpp b/media/libmediatranscoding/TranscodingClientManager.cpp
index 7252437..ae1f7a5 100644
--- a/media/libmediatranscoding/TranscodingClientManager.cpp
+++ b/media/libmediatranscoding/TranscodingClientManager.cpp
@@ -17,129 +17,386 @@
 // #define LOG_NDEBUG 0
 #define LOG_TAG "TranscodingClientManager"
 
+#include <aidl/android/media/BnTranscodingClient.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <android/binder_ibinder.h>
 #include <inttypes.h>
 #include <media/TranscodingClientManager.h>
+#include <media/TranscodingRequest.h>
+#include <media/TranscodingUidPolicy.h>
+#include <private/android_filesystem_config.h>
 #include <utils/Log.h>
-
+#include <utils/String16.h>
 namespace android {
 
-using Status = ::ndk::ScopedAStatus;
+static_assert(sizeof(ClientIdType) == sizeof(void*), "ClientIdType should be pointer-sized");
 
-// static
-TranscodingClientManager& TranscodingClientManager::getInstance() {
-    static TranscodingClientManager gInstance{};
-    return gInstance;
+static constexpr const char* MEDIA_PROVIDER_PKG_NAMES[] = {
+        "com.android.providers.media.module",
+        "com.google.android.providers.media.module",
+};
+
+using ::aidl::android::media::BnTranscodingClient;
+using ::aidl::android::media::IMediaTranscodingService;  // For service error codes
+using ::aidl::android::media::TranscodingRequestParcel;
+using ::aidl::android::media::TranscodingSessionParcel;
+using Status = ::ndk::ScopedAStatus;
+using ::ndk::SpAIBinder;
+
+//static
+std::atomic<ClientIdType> TranscodingClientManager::sCookieCounter = 0;
+//static
+std::mutex TranscodingClientManager::sCookie2ClientLock;
+//static
+std::map<ClientIdType, std::shared_ptr<TranscodingClientManager::ClientImpl>>
+        TranscodingClientManager::sCookie2Client;
+///////////////////////////////////////////////////////////////////////////////
+
+// Convenience methods for constructing binder::Status objects for error returns
+#define STATUS_ERROR_FMT(errorCode, errorString, ...) \
+    Status::fromServiceSpecificErrorWithMessage(      \
+            errorCode,                                \
+            String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, ##__VA_ARGS__))
+
+/**
+ * ClientImpl implements a single client and contains all its information.
+ */
+struct TranscodingClientManager::ClientImpl : public BnTranscodingClient {
+    /* The remote client callback that this ClientInfo is associated with.
+     * Once the ClientInfo is created, we hold an SpAIBinder so that the binder
+     * object doesn't get created again, otherwise the binder object pointer
+     * may not be unique.
+     */
+    SpAIBinder mClientBinder;
+    std::shared_ptr<ITranscodingClientCallback> mClientCallback;
+    /* A unique id assigned to the client by the service. This number is used
+     * by the service for indexing. Here we use the binder object's pointer
+     * (casted to int64t_t) as the client id.
+     */
+    ClientIdType mClientId;
+    std::string mClientName;
+    std::string mClientOpPackageName;
+
+    // Next sessionId to assign.
+    std::atomic<int32_t> mNextSessionId;
+    // Whether this client has been unregistered already.
+    std::atomic<bool> mAbandoned;
+    // Weak pointer to the client manager for this client.
+    std::weak_ptr<TranscodingClientManager> mOwner;
+
+    ClientImpl(const std::shared_ptr<ITranscodingClientCallback>& callback,
+               const std::string& clientName, const std::string& opPackageName,
+               const std::weak_ptr<TranscodingClientManager>& owner);
+
+    Status submitRequest(const TranscodingRequestParcel& /*in_request*/,
+                         TranscodingSessionParcel* /*out_session*/,
+                         bool* /*_aidl_return*/) override;
+
+    Status cancelSession(int32_t /*in_sessionId*/, bool* /*_aidl_return*/) override;
+
+    Status getSessionWithId(int32_t /*in_sessionId*/, TranscodingSessionParcel* /*out_session*/,
+                            bool* /*_aidl_return*/) override;
+
+    Status unregister() override;
+};
+
+TranscodingClientManager::ClientImpl::ClientImpl(
+        const std::shared_ptr<ITranscodingClientCallback>& callback, const std::string& clientName,
+        const std::string& opPackageName, const std::weak_ptr<TranscodingClientManager>& owner)
+      : mClientBinder((callback != nullptr) ? callback->asBinder() : nullptr),
+        mClientCallback(callback),
+        mClientId(sCookieCounter.fetch_add(1, std::memory_order_relaxed)),
+        mClientName(clientName),
+        mClientOpPackageName(opPackageName),
+        mNextSessionId(0),
+        mAbandoned(false),
+        mOwner(owner) {}
+
+Status TranscodingClientManager::ClientImpl::submitRequest(
+        const TranscodingRequestParcel& in_request, TranscodingSessionParcel* out_session,
+        bool* _aidl_return) {
+    *_aidl_return = false;
+
+    std::shared_ptr<TranscodingClientManager> owner;
+    if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
+        return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+    }
+
+    if (in_request.sourceFilePath.empty() || in_request.destinationFilePath.empty()) {
+        return Status::ok();
+    }
+
+    int32_t callingPid = AIBinder_getCallingPid();
+    int32_t callingUid = AIBinder_getCallingUid();
+    int32_t in_clientUid = in_request.clientUid;
+    int32_t in_clientPid = in_request.clientPid;
+
+    // Check if we can trust clientUid. Only privilege caller could forward the
+    // uid on app client's behalf.
+    if (in_clientUid == IMediaTranscodingService::USE_CALLING_UID) {
+        in_clientUid = callingUid;
+    } else if (in_clientUid < 0) {
+        return Status::ok();
+    } else if (in_clientUid != callingUid && !owner->isTrustedCallingUid(callingUid)) {
+        ALOGE("MediaTranscodingService::registerClient rejected (clientPid %d, clientUid %d) "
+              "(don't trust callingUid %d)",
+              in_clientPid, in_clientUid, callingUid);
+        return STATUS_ERROR_FMT(
+                IMediaTranscodingService::ERROR_PERMISSION_DENIED,
+                "MediaTranscodingService::registerClient rejected (clientPid %d, clientUid %d) "
+                "(don't trust callingUid %d)",
+                in_clientPid, in_clientUid, callingUid);
+    }
+
+    // Check if we can trust clientPid. Only privilege caller could forward the
+    // pid on app client's behalf.
+    if (in_clientPid == IMediaTranscodingService::USE_CALLING_PID) {
+        in_clientPid = callingPid;
+    } else if (in_clientPid < 0) {
+        return Status::ok();
+    } else if (in_clientPid != callingPid && !owner->isTrustedCallingUid(callingUid)) {
+        ALOGE("MediaTranscodingService::registerClient rejected (clientPid %d, clientUid %d) "
+              "(don't trust callingUid %d)",
+              in_clientPid, in_clientUid, callingUid);
+        return STATUS_ERROR_FMT(
+                IMediaTranscodingService::ERROR_PERMISSION_DENIED,
+                "MediaTranscodingService::registerClient rejected (clientPid %d, clientUid %d) "
+                "(don't trust callingUid %d)",
+                in_clientPid, in_clientUid, callingUid);
+    }
+
+    int32_t sessionId = mNextSessionId.fetch_add(1);
+
+    *_aidl_return = owner->mSessionController->submit(mClientId, sessionId, in_clientUid,
+                                                      in_request, mClientCallback);
+
+    if (*_aidl_return) {
+        out_session->sessionId = sessionId;
+
+        // TODO(chz): is some of this coming from SessionController?
+        *(TranscodingRequest*)&out_session->request = in_request;
+        out_session->awaitNumberOfSessions = 0;
+    }
+
+    return Status::ok();
 }
 
+Status TranscodingClientManager::ClientImpl::cancelSession(int32_t in_sessionId,
+                                                           bool* _aidl_return) {
+    *_aidl_return = false;
+
+    std::shared_ptr<TranscodingClientManager> owner;
+    if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
+        return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+    }
+
+    if (in_sessionId < 0) {
+        return Status::ok();
+    }
+
+    *_aidl_return = owner->mSessionController->cancel(mClientId, in_sessionId);
+    return Status::ok();
+}
+
+Status TranscodingClientManager::ClientImpl::getSessionWithId(int32_t in_sessionId,
+                                                              TranscodingSessionParcel* out_session,
+                                                              bool* _aidl_return) {
+    *_aidl_return = false;
+
+    std::shared_ptr<TranscodingClientManager> owner;
+    if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
+        return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+    }
+
+    if (in_sessionId < 0) {
+        return Status::ok();
+    }
+
+    *_aidl_return =
+            owner->mSessionController->getSession(mClientId, in_sessionId, &out_session->request);
+
+    if (*_aidl_return) {
+        out_session->sessionId = in_sessionId;
+        out_session->awaitNumberOfSessions = 0;
+    }
+    return Status::ok();
+}
+
+Status TranscodingClientManager::ClientImpl::unregister() {
+    bool abandoned = mAbandoned.exchange(true);
+
+    std::shared_ptr<TranscodingClientManager> owner;
+    if (abandoned || (owner = mOwner.lock()) == nullptr) {
+        return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+    }
+
+    // Use sessionId == -1 to cancel all realtime sessions for this client with the controller.
+    owner->mSessionController->cancel(mClientId, -1);
+    owner->removeClient(mClientId);
+
+    return Status::ok();
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
 // static
 void TranscodingClientManager::BinderDiedCallback(void* cookie) {
-    int32_t clientId = static_cast<int32_t>(reinterpret_cast<intptr_t>(cookie));
-    ALOGD("Client %" PRId32 " is dead", clientId);
-    // Don't check for pid validity since we know it's already dead.
-    TranscodingClientManager& manager = TranscodingClientManager::getInstance();
-    manager.removeClient(clientId);
+    ClientIdType clientId = reinterpret_cast<ClientIdType>(cookie);
+
+    ALOGD("Client %lld is dead", (long long)clientId);
+
+    std::shared_ptr<ClientImpl> client;
+
+    {
+        std::scoped_lock lock{sCookie2ClientLock};
+
+        auto it = sCookie2Client.find(clientId);
+        if (it != sCookie2Client.end()) {
+            client = it->second;
+        }
+    }
+
+    if (client != nullptr) {
+        client->unregister();
+    }
 }
 
-TranscodingClientManager::TranscodingClientManager()
-    : mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)) {
+TranscodingClientManager::TranscodingClientManager(
+        const std::shared_ptr<ControllerClientInterface>& controller)
+      : mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)),
+        mSessionController(controller) {
     ALOGD("TranscodingClientManager started");
+    uid_t mpuid;
+    for (const char* pkgName : MEDIA_PROVIDER_PKG_NAMES) {
+        if (TranscodingUidPolicy::getUidForPackage(String16(pkgName), mpuid) == NO_ERROR) {
+            ALOGI("Found %s's uid: %d", pkgName, mpuid);
+            mMediaProviderUid.insert(mpuid);
+        } else {
+            ALOGW("Couldn't get uid for %s.", pkgName);
+        }
+    }
 }
 
 TranscodingClientManager::~TranscodingClientManager() {
     ALOGD("TranscodingClientManager exited");
 }
 
-bool TranscodingClientManager::isClientIdRegistered(int32_t clientId) const {
-    std::scoped_lock lock{mLock};
-    return mClientIdToClientInfoMap.find(clientId) != mClientIdToClientInfoMap.end();
-}
-
 void TranscodingClientManager::dumpAllClients(int fd, const Vector<String16>& args __unused) {
     String8 result;
 
     const size_t SIZE = 256;
     char buffer[SIZE];
+    std::scoped_lock lock{mLock};
 
-    snprintf(buffer, SIZE, "    Total num of Clients: %zu\n", mClientIdToClientInfoMap.size());
-    result.append(buffer);
-
-    if (mClientIdToClientInfoMap.size() > 0) {
-        snprintf(buffer, SIZE, "========== Dumping all clients =========\n");
+    if (mClientIdToClientMap.size() > 0) {
+        snprintf(buffer, SIZE, "\n========== Dumping all clients =========\n");
         result.append(buffer);
     }
 
-    for (const auto& iter : mClientIdToClientInfoMap) {
-        const std::shared_ptr<ITranscodingServiceClient> client = iter.second->mClient;
-        std::string clientName;
-        Status status = client->getName(&clientName);
-        if (!status.isOk()) {
-            ALOGE("Failed to get client: %d information", iter.first);
-            continue;
-        }
-        snprintf(buffer, SIZE, "    -- Clients: %d  name: %s\n", iter.first, clientName.c_str());
+    snprintf(buffer, SIZE, "  Total num of Clients: %zu\n", mClientIdToClientMap.size());
+    result.append(buffer);
+
+    for (const auto& iter : mClientIdToClientMap) {
+        snprintf(buffer, SIZE, "    Client %lld:  pkg: %s\n", (long long)iter.first,
+                 iter.second->mClientName.c_str());
         result.append(buffer);
     }
 
     write(fd, result.string(), result.size());
 }
 
-status_t TranscodingClientManager::addClient(std::unique_ptr<ClientInfo> client) {
-    // Validate the client.
-    if (client == nullptr || client->mClientId < 0 || client->mClientPid < 0 ||
-        client->mClientUid < 0 || client->mClientOpPackageName.empty() ||
-        client->mClientOpPackageName == "") {
-        ALOGE("Invalid client");
-        return BAD_VALUE;
+bool TranscodingClientManager::isTrustedCallingUid(uid_t uid) {
+    if (uid > 0 && mMediaProviderUid.count(uid) > 0) {
+        return true;
     }
 
+    switch (uid) {
+    case AID_ROOT:  // root user
+    case AID_SYSTEM:
+    case AID_SHELL:
+    case AID_MEDIA:  // mediaserver
+        return true;
+    default:
+        return false;
+    }
+}
+
+status_t TranscodingClientManager::addClient(
+        const std::shared_ptr<ITranscodingClientCallback>& callback, const std::string& clientName,
+        const std::string& opPackageName, std::shared_ptr<ITranscodingClient>* outClient) {
+    // Validate the client.
+    if (callback == nullptr || clientName.empty() || opPackageName.empty()) {
+        ALOGE("Invalid client");
+        return IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT;
+    }
+
+    SpAIBinder binder = callback->asBinder();
+
     std::scoped_lock lock{mLock};
 
-    // Check if the client already exists.
-    if (mClientIdToClientInfoMap.count(client->mClientId) != 0) {
-        ALOGW("Client already exists.");
-        return ALREADY_EXISTS;
+    // Checks if the client already registers.
+    if (mRegisteredCallbacks.count((uintptr_t)binder.get()) > 0) {
+        return IMediaTranscodingService::ERROR_ALREADY_EXISTS;
     }
 
-    ALOGD("Adding client id %d pid: %d uid: %d %s", client->mClientId, client->mClientPid,
-          client->mClientUid, client->mClientOpPackageName.c_str());
+    // Creates the client (with the id assigned by ClientImpl).
+    std::shared_ptr<ClientImpl> client = ::ndk::SharedRefBase::make<ClientImpl>(
+            callback, clientName, opPackageName, shared_from_this());
 
-    AIBinder_linkToDeath(client->mClient->asBinder().get(), mDeathRecipient.get(),
+    ALOGD("Adding client id %lld, name %s, package %s", (long long)client->mClientId,
+          client->mClientName.c_str(), client->mClientOpPackageName.c_str());
+
+    {
+        std::scoped_lock lock{sCookie2ClientLock};
+        sCookie2Client.emplace(std::make_pair(client->mClientId, client));
+    }
+
+    AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(),
                          reinterpret_cast<void*>(client->mClientId));
 
     // Adds the new client to the map.
-    mClientIdToClientInfoMap[client->mClientId] = std::move(client);
+    mRegisteredCallbacks.insert((uintptr_t)binder.get());
+    mClientIdToClientMap[client->mClientId] = client;
+
+    *outClient = client;
 
     return OK;
 }
 
-status_t TranscodingClientManager::removeClient(int32_t clientId) {
-    ALOGD("Removing client id %d", clientId);
+status_t TranscodingClientManager::removeClient(ClientIdType clientId) {
+    ALOGD("Removing client id %lld", (long long)clientId);
     std::scoped_lock lock{mLock};
 
     // Checks if the client is valid.
-    auto it = mClientIdToClientInfoMap.find(clientId);
-    if (it == mClientIdToClientInfoMap.end()) {
-        ALOGE("Client id %d does not exist", clientId);
-        return INVALID_OPERATION;
+    auto it = mClientIdToClientMap.find(clientId);
+    if (it == mClientIdToClientMap.end()) {
+        ALOGE("Client id %lld does not exist", (long long)clientId);
+        return IMediaTranscodingService::ERROR_INVALID_OPERATION;
     }
 
-    std::shared_ptr<ITranscodingServiceClient> client = it->second->mClient;
+    SpAIBinder binder = it->second->mClientBinder;
 
     // Check if the client still live. If alive, unlink the death.
-    if (client) {
-        AIBinder_unlinkToDeath(client->asBinder().get(), mDeathRecipient.get(),
-                               reinterpret_cast<void*>(clientId));
+    if (binder.get() != nullptr) {
+        AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(),
+                               reinterpret_cast<void*>(it->second->mClientId));
+    }
+
+    {
+        std::scoped_lock lock{sCookie2ClientLock};
+        sCookie2Client.erase(it->second->mClientId);
     }
 
     // Erase the entry.
-    mClientIdToClientInfoMap.erase(it);
+    mClientIdToClientMap.erase(it);
+    mRegisteredCallbacks.erase((uintptr_t)binder.get());
 
     return OK;
 }
 
 size_t TranscodingClientManager::getNumOfClients() const {
     std::scoped_lock lock{mLock};
-    return mClientIdToClientInfoMap.size();
+    return mClientIdToClientMap.size();
 }
 
 }  // namespace android
diff --git a/media/libmediatranscoding/TranscodingResourcePolicy.cpp b/media/libmediatranscoding/TranscodingResourcePolicy.cpp
new file mode 100644
index 0000000..4fd8338
--- /dev/null
+++ b/media/libmediatranscoding/TranscodingResourcePolicy.cpp
@@ -0,0 +1,169 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingResourcePolicy"
+
+#include <aidl/android/media/BnResourceObserver.h>
+#include <aidl/android/media/IResourceObserverService.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <binder/IServiceManager.h>
+#include <media/TranscodingResourcePolicy.h>
+#include <utils/Log.h>
+
+namespace android {
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::BnResourceObserver;
+using ::aidl::android::media::IResourceObserverService;
+using ::aidl::android::media::MediaObservableEvent;
+using ::aidl::android::media::MediaObservableFilter;
+using ::aidl::android::media::MediaObservableParcel;
+using ::aidl::android::media::MediaObservableType;
+
+static std::string toString(const MediaObservableParcel& observable) {
+    return "{" + ::aidl::android::media::toString(observable.type) + ", " +
+           std::to_string(observable.value) + "}";
+}
+
+struct TranscodingResourcePolicy::ResourceObserver : public BnResourceObserver {
+    explicit ResourceObserver(TranscodingResourcePolicy* owner) : mOwner(owner), mPid(getpid()) {}
+
+    // IResourceObserver
+    ::ndk::ScopedAStatus onStatusChanged(
+            MediaObservableEvent event, int32_t uid, int32_t pid,
+            const std::vector<MediaObservableParcel>& observables) override {
+        ALOGD("%s: %s, uid %d, pid %d, %s", __FUNCTION__,
+              ::aidl::android::media::toString(event).c_str(), uid, pid,
+              toString(observables[0]).c_str());
+
+        // Only report kIdle event for codec resources from other processes.
+        if (((uint64_t)event & (uint64_t)MediaObservableEvent::kIdle) != 0 && (pid != mPid)) {
+            for (auto& observable : observables) {
+                if (observable.type == MediaObservableType::kVideoSecureCodec ||
+                    observable.type == MediaObservableType::kVideoNonSecureCodec) {
+                    mOwner->onResourceAvailable();
+                    break;
+                }
+            }
+        }
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    TranscodingResourcePolicy* mOwner;
+    const pid_t mPid;
+};
+
+// static
+void TranscodingResourcePolicy::BinderDiedCallback(void* cookie) {
+    TranscodingResourcePolicy* owner = reinterpret_cast<TranscodingResourcePolicy*>(cookie);
+    if (owner != nullptr) {
+        owner->unregisterSelf();
+    }
+    // TODO(chz): retry to connecting to IResourceObserverService after failure.
+    // Also need to have back-up logic if IResourceObserverService is offline for
+    // Prolonged period of time. A possible alternative could be, during period where
+    // IResourceObserverService is not available, trigger onResourceAvailable() everytime
+    // when top uid changes (in hope that'll free up some codec instances that we could
+    // reclaim).
+}
+
+TranscodingResourcePolicy::TranscodingResourcePolicy()
+      : mRegistered(false), mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)) {
+    registerSelf();
+}
+
+TranscodingResourcePolicy::~TranscodingResourcePolicy() {
+    unregisterSelf();
+}
+
+void TranscodingResourcePolicy::registerSelf() {
+    ALOGI("TranscodingResourcePolicy: registerSelf");
+
+    ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_observer"));
+
+    std::scoped_lock lock{mRegisteredLock};
+
+    if (mRegistered) {
+        return;
+    }
+
+    // TODO(chz): retry to connecting to IResourceObserverService after failure.
+    mService = IResourceObserverService::fromBinder(binder);
+    if (mService == nullptr) {
+        ALOGE("Failed to get IResourceObserverService");
+        return;
+    }
+
+    // Only register filters for codec resource available.
+    mObserver = ::ndk::SharedRefBase::make<ResourceObserver>(this);
+    std::vector<MediaObservableFilter> filters = {
+            {MediaObservableType::kVideoSecureCodec, MediaObservableEvent::kIdle},
+            {MediaObservableType::kVideoNonSecureCodec, MediaObservableEvent::kIdle}};
+
+    Status status = mService->registerObserver(mObserver, filters);
+    if (!status.isOk()) {
+        ALOGE("failed to register: error %d", status.getServiceSpecificError());
+        mService = nullptr;
+        mObserver = nullptr;
+        return;
+    }
+
+    AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(this));
+
+    ALOGD("@@@ registered observer");
+    mRegistered = true;
+}
+
+void TranscodingResourcePolicy::unregisterSelf() {
+    ALOGI("TranscodingResourcePolicy: unregisterSelf");
+
+    std::scoped_lock lock{mRegisteredLock};
+
+    if (!mRegistered) {
+        return;
+    }
+
+    ::ndk::SpAIBinder binder = mService->asBinder();
+    if (binder.get() != nullptr) {
+        Status status = mService->unregisterObserver(mObserver);
+        AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(this));
+    }
+
+    mService = nullptr;
+    mObserver = nullptr;
+    mRegistered = false;
+}
+
+void TranscodingResourcePolicy::setCallback(
+        const std::shared_ptr<ResourcePolicyCallbackInterface>& cb) {
+    std::scoped_lock lock{mCallbackLock};
+    mResourcePolicyCallback = cb;
+}
+
+void TranscodingResourcePolicy::onResourceAvailable() {
+    std::shared_ptr<ResourcePolicyCallbackInterface> cb;
+    {
+        std::scoped_lock lock{mCallbackLock};
+        cb = mResourcePolicyCallback.lock();
+    }
+
+    if (cb != nullptr) {
+        cb->onResourceAvailable();
+    }
+}
+}  // namespace android
diff --git a/media/libmediatranscoding/TranscodingSessionController.cpp b/media/libmediatranscoding/TranscodingSessionController.cpp
new file mode 100644
index 0000000..1c3ee7e
--- /dev/null
+++ b/media/libmediatranscoding/TranscodingSessionController.cpp
@@ -0,0 +1,579 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingSessionController"
+
+#define VALIDATE_STATE 1
+
+#include <inttypes.h>
+#include <media/TranscodingSessionController.h>
+#include <media/TranscodingUidPolicy.h>
+#include <utils/Log.h>
+
+#include <utility>
+
+namespace android {
+
+static_assert((SessionIdType)-1 < 0, "SessionIdType should be signed");
+
+constexpr static uid_t OFFLINE_UID = -1;
+
+//static
+String8 TranscodingSessionController::sessionToString(const SessionKeyType& sessionKey) {
+    return String8::format("{client:%lld, session:%d}", (long long)sessionKey.first,
+                           sessionKey.second);
+}
+
+//static
+const char* TranscodingSessionController::sessionStateToString(const Session::State sessionState) {
+    switch (sessionState) {
+    case Session::State::NOT_STARTED:
+        return "NOT_STARTED";
+    case Session::State::RUNNING:
+        return "RUNNING";
+    case Session::State::PAUSED:
+        return "PAUSED";
+    default:
+        break;
+    }
+    return "(unknown)";
+}
+
+TranscodingSessionController::TranscodingSessionController(
+        const std::shared_ptr<TranscoderInterface>& transcoder,
+        const std::shared_ptr<UidPolicyInterface>& uidPolicy,
+        const std::shared_ptr<ResourcePolicyInterface>& resourcePolicy)
+      : mTranscoder(transcoder),
+        mUidPolicy(uidPolicy),
+        mResourcePolicy(resourcePolicy),
+        mCurrentSession(nullptr),
+        mResourceLost(false) {
+    // Only push empty offline queue initially. Realtime queues are added when requests come in.
+    mUidSortedList.push_back(OFFLINE_UID);
+    mOfflineUidIterator = mUidSortedList.begin();
+    mSessionQueues.emplace(OFFLINE_UID, SessionQueueType());
+    mUidPackageNames[OFFLINE_UID] = "(offline)";
+}
+
+TranscodingSessionController::~TranscodingSessionController() {}
+
+void TranscodingSessionController::dumpAllSessions(int fd, const Vector<String16>& args __unused) {
+    String8 result;
+
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    std::scoped_lock lock{mLock};
+
+    snprintf(buffer, SIZE, "\n========== Dumping all sessions queues =========\n");
+    result.append(buffer);
+    snprintf(buffer, SIZE, "  Total num of Sessions: %zu\n", mSessionMap.size());
+    result.append(buffer);
+
+    std::vector<int32_t> uids(mUidSortedList.begin(), mUidSortedList.end());
+
+    for (int32_t i = 0; i < uids.size(); i++) {
+        const uid_t uid = uids[i];
+
+        if (mSessionQueues[uid].empty()) {
+            continue;
+        }
+        snprintf(buffer, SIZE, "    Uid: %d, pkg: %s\n", uid,
+                 mUidPackageNames.count(uid) > 0 ? mUidPackageNames[uid].c_str() : "(unknown)");
+        result.append(buffer);
+        snprintf(buffer, SIZE, "      Num of sessions: %zu\n", mSessionQueues[uid].size());
+        result.append(buffer);
+        for (auto& sessionKey : mSessionQueues[uid]) {
+            auto sessionIt = mSessionMap.find(sessionKey);
+            if (sessionIt == mSessionMap.end()) {
+                snprintf(buffer, SIZE, "Failed to look up Session %s  \n",
+                         sessionToString(sessionKey).c_str());
+                result.append(buffer);
+                continue;
+            }
+            Session& session = sessionIt->second;
+            TranscodingRequestParcel& request = session.request;
+            snprintf(buffer, SIZE, "      Session: %s, %s, %d%%\n",
+                     sessionToString(sessionKey).c_str(), sessionStateToString(session.state),
+                     session.lastProgress);
+            result.append(buffer);
+            snprintf(buffer, SIZE, "        Src: %s\n", request.sourceFilePath.c_str());
+            result.append(buffer);
+            snprintf(buffer, SIZE, "        Dst: %s\n", request.destinationFilePath.c_str());
+            result.append(buffer);
+            // For the offline queue, print out the original client.
+            if (uid == OFFLINE_UID) {
+                snprintf(buffer, SIZE, "        Original Client: %s\n",
+                         request.clientPackageName.c_str());
+                result.append(buffer);
+            }
+        }
+    }
+
+    write(fd, result.string(), result.size());
+}
+
+TranscodingSessionController::Session* TranscodingSessionController::getTopSession_l() {
+    if (mSessionMap.empty()) {
+        return nullptr;
+    }
+    uid_t topUid = *mUidSortedList.begin();
+    SessionKeyType topSessionKey = *mSessionQueues[topUid].begin();
+    return &mSessionMap[topSessionKey];
+}
+
+void TranscodingSessionController::updateCurrentSession_l() {
+    Session* topSession = getTopSession_l();
+    Session* curSession = mCurrentSession;
+    ALOGV("updateCurrentSession: topSession is %s, curSession is %s",
+          topSession == nullptr ? "null" : sessionToString(topSession->key).c_str(),
+          curSession == nullptr ? "null" : sessionToString(curSession->key).c_str());
+
+    // If we found a topSession that should be run, and it's not already running,
+    // take some actions to ensure it's running.
+    if (topSession != nullptr &&
+        (topSession != curSession || topSession->state != Session::RUNNING)) {
+        // If another session is currently running, pause it first.
+        if (curSession != nullptr && curSession->state == Session::RUNNING) {
+            mTranscoder->pause(curSession->key.first, curSession->key.second);
+            curSession->state = Session::PAUSED;
+        }
+        // If we are not experiencing resource loss, we can start or resume
+        // the topSession now.
+        if (!mResourceLost) {
+            if (topSession->state == Session::NOT_STARTED) {
+                mTranscoder->start(topSession->key.first, topSession->key.second,
+                                   topSession->request, topSession->callback.lock());
+            } else if (topSession->state == Session::PAUSED) {
+                mTranscoder->resume(topSession->key.first, topSession->key.second,
+                                    topSession->request, topSession->callback.lock());
+            }
+            topSession->state = Session::RUNNING;
+        }
+    }
+    mCurrentSession = topSession;
+}
+
+void TranscodingSessionController::removeSession_l(const SessionKeyType& sessionKey) {
+    ALOGV("%s: session %s", __FUNCTION__, sessionToString(sessionKey).c_str());
+
+    if (mSessionMap.count(sessionKey) == 0) {
+        ALOGE("session %s doesn't exist", sessionToString(sessionKey).c_str());
+        return;
+    }
+
+    // Remove session from uid's queue.
+    const uid_t uid = mSessionMap[sessionKey].uid;
+    SessionQueueType& sessionQueue = mSessionQueues[uid];
+    auto it = std::find(sessionQueue.begin(), sessionQueue.end(), sessionKey);
+    if (it == sessionQueue.end()) {
+        ALOGE("couldn't find session %s in queue for uid %d", sessionToString(sessionKey).c_str(),
+              uid);
+        return;
+    }
+    sessionQueue.erase(it);
+
+    // If this is the last session in a real-time queue, remove this uid's queue.
+    if (uid != OFFLINE_UID && sessionQueue.empty()) {
+        mUidSortedList.remove(uid);
+        mSessionQueues.erase(uid);
+        mUidPolicy->unregisterMonitorUid(uid);
+
+        std::unordered_set<uid_t> topUids = mUidPolicy->getTopUids();
+        moveUidsToTop_l(topUids, false /*preserveTopUid*/);
+    }
+
+    // Clear current session.
+    if (mCurrentSession == &mSessionMap[sessionKey]) {
+        mCurrentSession = nullptr;
+    }
+
+    // Remove session from session map.
+    mSessionMap.erase(sessionKey);
+}
+
+/**
+ * Moves the set of uids to the front of mUidSortedList (which is used to pick
+ * the next session to run).
+ *
+ * This is called when 1) we received a onTopUidsChanged() callback from UidPolicy,
+ * or 2) we removed the session queue for a uid because it becomes empty.
+ *
+ * In case of 1), if there are multiple uids in the set, and the current front
+ * uid in mUidSortedList is still in the set, we try to keep that uid at front
+ * so that current session run is not interrupted. (This is not a concern for case 2)
+ * because the queue for a uid was just removed entirely.)
+ */
+void TranscodingSessionController::moveUidsToTop_l(const std::unordered_set<uid_t>& uids,
+                                                   bool preserveTopUid) {
+    // If uid set is empty, nothing to do. Do not change the queue status.
+    if (uids.empty()) {
+        return;
+    }
+
+    // Save the current top uid.
+    uid_t curTopUid = *mUidSortedList.begin();
+    bool pushCurTopToFront = false;
+    int32_t numUidsMoved = 0;
+
+    // Go through the sorted uid list once, and move the ones in top set to front.
+    for (auto it = mUidSortedList.begin(); it != mUidSortedList.end();) {
+        uid_t uid = *it;
+
+        if (uid != OFFLINE_UID && uids.count(uid) > 0) {
+            it = mUidSortedList.erase(it);
+
+            // If this is the top we're preserving, don't push it here, push
+            // it after the for-loop.
+            if (uid == curTopUid && preserveTopUid) {
+                pushCurTopToFront = true;
+            } else {
+                mUidSortedList.push_front(uid);
+            }
+
+            // If we found all uids in the set, break out.
+            if (++numUidsMoved == uids.size()) {
+                break;
+            }
+        } else {
+            ++it;
+        }
+    }
+
+    if (pushCurTopToFront) {
+        mUidSortedList.push_front(curTopUid);
+    }
+}
+
+bool TranscodingSessionController::submit(
+        ClientIdType clientId, SessionIdType sessionId, uid_t uid,
+        const TranscodingRequestParcel& request,
+        const std::weak_ptr<ITranscodingClientCallback>& callback) {
+    SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+    ALOGV("%s: session %s, uid %d, prioirty %d", __FUNCTION__, sessionToString(sessionKey).c_str(),
+          uid, (int32_t)request.priority);
+
+    std::scoped_lock lock{mLock};
+
+    if (mSessionMap.count(sessionKey) > 0) {
+        ALOGE("session %s already exists", sessionToString(sessionKey).c_str());
+        return false;
+    }
+
+    // Add the uid package name to the store of package names we already know.
+    if (mUidPackageNames.count(uid) == 0) {
+        mUidPackageNames.emplace(uid, request.clientPackageName);
+    }
+
+    // TODO(chz): only support offline vs real-time for now. All kUnspecified sessions
+    // go to offline queue.
+    if (request.priority == TranscodingSessionPriority::kUnspecified) {
+        uid = OFFLINE_UID;
+    }
+
+    // Add session to session map.
+    mSessionMap[sessionKey].key = sessionKey;
+    mSessionMap[sessionKey].uid = uid;
+    mSessionMap[sessionKey].state = Session::NOT_STARTED;
+    mSessionMap[sessionKey].lastProgress = 0;
+    mSessionMap[sessionKey].request = request;
+    mSessionMap[sessionKey].callback = callback;
+
+    // If it's an offline session, the queue was already added in constructor.
+    // If it's a real-time sessions, check if a queue is already present for the uid,
+    // and add a new queue if needed.
+    if (uid != OFFLINE_UID) {
+        if (mSessionQueues.count(uid) == 0) {
+            mUidPolicy->registerMonitorUid(uid);
+            if (mUidPolicy->isUidOnTop(uid)) {
+                mUidSortedList.push_front(uid);
+            } else {
+                // Shouldn't be submitting real-time requests from non-top app,
+                // put it in front of the offline queue.
+                mUidSortedList.insert(mOfflineUidIterator, uid);
+            }
+        } else if (uid != *mUidSortedList.begin()) {
+            if (mUidPolicy->isUidOnTop(uid)) {
+                mUidSortedList.remove(uid);
+                mUidSortedList.push_front(uid);
+            }
+        }
+    }
+    // Append this session to the uid's queue.
+    mSessionQueues[uid].push_back(sessionKey);
+
+    updateCurrentSession_l();
+
+    validateState_l();
+    return true;
+}
+
+bool TranscodingSessionController::cancel(ClientIdType clientId, SessionIdType sessionId) {
+    SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+    ALOGV("%s: session %s", __FUNCTION__, sessionToString(sessionKey).c_str());
+
+    std::list<SessionKeyType> sessionsToRemove;
+
+    std::scoped_lock lock{mLock};
+
+    if (sessionId < 0) {
+        for (auto it = mSessionMap.begin(); it != mSessionMap.end(); ++it) {
+            if (it->first.first == clientId && it->second.uid != OFFLINE_UID) {
+                sessionsToRemove.push_back(it->first);
+            }
+        }
+    } else {
+        if (mSessionMap.count(sessionKey) == 0) {
+            ALOGE("session %s doesn't exist", sessionToString(sessionKey).c_str());
+            return false;
+        }
+        sessionsToRemove.push_back(sessionKey);
+    }
+
+    for (auto it = sessionsToRemove.begin(); it != sessionsToRemove.end(); ++it) {
+        // If the session has ever been started, stop it now.
+        // Note that stop() is needed even if the session is currently paused. This instructs
+        // the transcoder to discard any states for the session, otherwise the states may
+        // never be discarded.
+        if (mSessionMap[*it].state != Session::NOT_STARTED) {
+            mTranscoder->stop(it->first, it->second);
+        }
+
+        // Remove the session.
+        removeSession_l(*it);
+    }
+
+    // Start next session.
+    updateCurrentSession_l();
+
+    validateState_l();
+    return true;
+}
+
+bool TranscodingSessionController::getSession(ClientIdType clientId, SessionIdType sessionId,
+                                              TranscodingRequestParcel* request) {
+    SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+    std::scoped_lock lock{mLock};
+
+    if (mSessionMap.count(sessionKey) == 0) {
+        ALOGE("session %s doesn't exist", sessionToString(sessionKey).c_str());
+        return false;
+    }
+
+    *(TranscodingRequest*)request = mSessionMap[sessionKey].request;
+    return true;
+}
+
+void TranscodingSessionController::notifyClient(ClientIdType clientId, SessionIdType sessionId,
+                                                const char* reason,
+                                                std::function<void(const SessionKeyType&)> func) {
+    SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+    std::scoped_lock lock{mLock};
+
+    if (mSessionMap.count(sessionKey) == 0) {
+        ALOGW("%s: ignoring %s for session %s that doesn't exist", __FUNCTION__, reason,
+              sessionToString(sessionKey).c_str());
+        return;
+    }
+
+    // Only ignore if session was never started. In particular, propagate the status
+    // to client if the session is paused. Transcoder could have posted finish when
+    // we're pausing it, and the finish arrived after we changed current session.
+    if (mSessionMap[sessionKey].state == Session::NOT_STARTED) {
+        ALOGW("%s: ignoring %s for session %s that was never started", __FUNCTION__, reason,
+              sessionToString(sessionKey).c_str());
+        return;
+    }
+
+    ALOGV("%s: session %s %s", __FUNCTION__, sessionToString(sessionKey).c_str(), reason);
+    func(sessionKey);
+}
+
+void TranscodingSessionController::onStarted(ClientIdType clientId, SessionIdType sessionId) {
+    notifyClient(clientId, sessionId, "started", [=](const SessionKeyType& sessionKey) {
+        auto callback = mSessionMap[sessionKey].callback.lock();
+        if (callback != nullptr) {
+            callback->onTranscodingStarted(sessionId);
+        }
+    });
+}
+
+void TranscodingSessionController::onPaused(ClientIdType clientId, SessionIdType sessionId) {
+    notifyClient(clientId, sessionId, "paused", [=](const SessionKeyType& sessionKey) {
+        auto callback = mSessionMap[sessionKey].callback.lock();
+        if (callback != nullptr) {
+            callback->onTranscodingPaused(sessionId);
+        }
+    });
+}
+
+void TranscodingSessionController::onResumed(ClientIdType clientId, SessionIdType sessionId) {
+    notifyClient(clientId, sessionId, "resumed", [=](const SessionKeyType& sessionKey) {
+        auto callback = mSessionMap[sessionKey].callback.lock();
+        if (callback != nullptr) {
+            callback->onTranscodingResumed(sessionId);
+        }
+    });
+}
+
+void TranscodingSessionController::onFinish(ClientIdType clientId, SessionIdType sessionId) {
+    notifyClient(clientId, sessionId, "finish", [=](const SessionKeyType& sessionKey) {
+        {
+            auto clientCallback = mSessionMap[sessionKey].callback.lock();
+            if (clientCallback != nullptr) {
+                clientCallback->onTranscodingFinished(
+                        sessionId, TranscodingResultParcel({sessionId, -1 /*actualBitrateBps*/,
+                                                            std::nullopt /*sessionStats*/}));
+            }
+        }
+
+        // Remove the session.
+        removeSession_l(sessionKey);
+
+        // Start next session.
+        updateCurrentSession_l();
+
+        validateState_l();
+    });
+}
+
+void TranscodingSessionController::onError(ClientIdType clientId, SessionIdType sessionId,
+                                           TranscodingErrorCode err) {
+    notifyClient(clientId, sessionId, "error", [=](const SessionKeyType& sessionKey) {
+        {
+            auto clientCallback = mSessionMap[sessionKey].callback.lock();
+            if (clientCallback != nullptr) {
+                clientCallback->onTranscodingFailed(sessionId, err);
+            }
+        }
+
+        // Remove the session.
+        removeSession_l(sessionKey);
+
+        // Start next session.
+        updateCurrentSession_l();
+
+        validateState_l();
+    });
+}
+
+void TranscodingSessionController::onProgressUpdate(ClientIdType clientId, SessionIdType sessionId,
+                                                    int32_t progress) {
+    notifyClient(clientId, sessionId, "progress", [=](const SessionKeyType& sessionKey) {
+        auto callback = mSessionMap[sessionKey].callback.lock();
+        if (callback != nullptr) {
+            callback->onProgressUpdate(sessionId, progress);
+        }
+        mSessionMap[sessionKey].lastProgress = progress;
+    });
+}
+
+void TranscodingSessionController::onResourceLost() {
+    ALOGI("%s", __FUNCTION__);
+
+    std::scoped_lock lock{mLock};
+
+    if (mResourceLost) {
+        return;
+    }
+
+    // If we receive a resource loss event, the TranscoderLibrary already paused
+    // the transcoding, so we don't need to call onPaused to notify it to pause.
+    // Only need to update the session state here.
+    if (mCurrentSession != nullptr && mCurrentSession->state == Session::RUNNING) {
+        mCurrentSession->state = Session::PAUSED;
+        // Notify the client as a paused event.
+        auto clientCallback = mCurrentSession->callback.lock();
+        if (clientCallback != nullptr) {
+            clientCallback->onTranscodingPaused(mCurrentSession->key.second);
+        }
+    }
+    mResourceLost = true;
+
+    validateState_l();
+}
+
+void TranscodingSessionController::onTopUidsChanged(const std::unordered_set<uid_t>& uids) {
+    if (uids.empty()) {
+        ALOGW("%s: ignoring empty uids", __FUNCTION__);
+        return;
+    }
+
+    std::string uidStr;
+    for (auto it = uids.begin(); it != uids.end(); it++) {
+        if (!uidStr.empty()) {
+            uidStr += ", ";
+        }
+        uidStr += std::to_string(*it);
+    }
+
+    ALOGD("%s: topUids: size %zu, uids: %s", __FUNCTION__, uids.size(), uidStr.c_str());
+
+    std::scoped_lock lock{mLock};
+
+    moveUidsToTop_l(uids, true /*preserveTopUid*/);
+
+    updateCurrentSession_l();
+
+    validateState_l();
+}
+
+void TranscodingSessionController::onResourceAvailable() {
+    std::scoped_lock lock{mLock};
+
+    if (!mResourceLost) {
+        return;
+    }
+
+    ALOGI("%s", __FUNCTION__);
+
+    mResourceLost = false;
+    updateCurrentSession_l();
+
+    validateState_l();
+}
+
+void TranscodingSessionController::validateState_l() {
+#ifdef VALIDATE_STATE
+    LOG_ALWAYS_FATAL_IF(mSessionQueues.count(OFFLINE_UID) != 1,
+                        "mSessionQueues offline queue number is not 1");
+    LOG_ALWAYS_FATAL_IF(*mOfflineUidIterator != OFFLINE_UID,
+                        "mOfflineUidIterator not pointing to offline uid");
+    LOG_ALWAYS_FATAL_IF(mUidSortedList.size() != mSessionQueues.size(),
+                        "mUidList and mSessionQueues size mismatch");
+
+    int32_t totalSessions = 0;
+    for (auto uid : mUidSortedList) {
+        LOG_ALWAYS_FATAL_IF(mSessionQueues.count(uid) != 1,
+                            "mSessionQueues count for uid %d is not 1", uid);
+        for (auto& sessionKey : mSessionQueues[uid]) {
+            LOG_ALWAYS_FATAL_IF(mSessionMap.count(sessionKey) != 1,
+                                "mSessions count for session %s is not 1",
+                                sessionToString(sessionKey).c_str());
+        }
+
+        totalSessions += mSessionQueues[uid].size();
+    }
+    LOG_ALWAYS_FATAL_IF(mSessionMap.size() != totalSessions,
+                        "mSessions size doesn't match total sessions counted from uid queues");
+#endif  // VALIDATE_STATE
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/TranscodingUidPolicy.cpp b/media/libmediatranscoding/TranscodingUidPolicy.cpp
new file mode 100644
index 0000000..084a871
--- /dev/null
+++ b/media/libmediatranscoding/TranscodingUidPolicy.cpp
@@ -0,0 +1,323 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingUidPolicy"
+
+#include <aidl/android/media/BnResourceManagerClient.h>
+#include <aidl/android/media/IResourceManagerService.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <android/content/pm/IPackageManagerNative.h>
+#include <binder/ActivityManager.h>
+#include <binder/IServiceManager.h>
+#include <binder/PermissionController.h>
+#include <cutils/misc.h>  // FIRST_APPLICATION_UID
+#include <cutils/multiuser.h>
+#include <inttypes.h>
+#include <media/TranscodingUidPolicy.h>
+#include <utils/Log.h>
+
+#include <utility>
+
+namespace android {
+
+constexpr static uid_t OFFLINE_UID = -1;
+constexpr static const char* kTranscodingTag = "transcoding";
+
+/*
+ * The OOM score we're going to ask ResourceManager to use for our native transcoding
+ * service. ResourceManager issues reclaims based on these scores. It gets the scores
+ * from ActivityManagerService, which doesn't track native services. The values of the
+ * OOM scores are defined in:
+ * frameworks/base/services/core/java/com/android/server/am/ProcessList.java
+ * We use SERVICE_ADJ which is lower priority than an app possibly visible to the
+ * user, but higher priority than a cached app (which could be killed without disruption
+ * to the user).
+ */
+constexpr static int32_t SERVICE_ADJ = 500;
+
+using Status = ::ndk::ScopedAStatus;
+using aidl::android::media::BnResourceManagerClient;
+using aidl::android::media::IResourceManagerService;
+
+/*
+ * Placeholder ResourceManagerClient for registering process info override
+ * with the IResourceManagerService. This is only used as a token by the service
+ * to get notifications about binder death, not used for reclaiming resources.
+ */
+struct TranscodingUidPolicy::ResourceManagerClient : public BnResourceManagerClient {
+    explicit ResourceManagerClient() = default;
+
+    Status reclaimResource(bool* _aidl_return) override {
+        *_aidl_return = false;
+        return Status::ok();
+    }
+
+    Status getName(::std::string* _aidl_return) override {
+        _aidl_return->clear();
+        return Status::ok();
+    }
+
+    virtual ~ResourceManagerClient() = default;
+};
+
+struct TranscodingUidPolicy::UidObserver : public BnUidObserver,
+                                           public virtual IBinder::DeathRecipient {
+    explicit UidObserver(TranscodingUidPolicy* owner) : mOwner(owner) {}
+
+    // IUidObserver
+    void onUidGone(uid_t uid, bool disabled) override;
+    void onUidActive(uid_t uid) override;
+    void onUidIdle(uid_t uid, bool disabled) override;
+    void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
+                           int32_t capability) override;
+
+    // IBinder::DeathRecipient implementation
+    void binderDied(const wp<IBinder>& who) override;
+
+    TranscodingUidPolicy* mOwner;
+};
+
+void TranscodingUidPolicy::UidObserver::onUidGone(uid_t uid __unused, bool disabled __unused) {}
+
+void TranscodingUidPolicy::UidObserver::onUidActive(uid_t uid __unused) {}
+
+void TranscodingUidPolicy::UidObserver::onUidIdle(uid_t uid __unused, bool disabled __unused) {}
+
+void TranscodingUidPolicy::UidObserver::onUidStateChanged(uid_t uid, int32_t procState,
+                                                          int64_t procStateSeq __unused,
+                                                          int32_t capability __unused) {
+    mOwner->onUidStateChanged(uid, procState);
+}
+
+void TranscodingUidPolicy::UidObserver::binderDied(const wp<IBinder>& /*who*/) {
+    ALOGW("TranscodingUidPolicy: ActivityManager has died");
+    // TODO(chz): this is a rare event (since if the AMS is dead, the system is
+    // probably dead as well). But we should try to reconnect.
+    mOwner->setUidObserverRegistered(false);
+}
+
+////////////////////////////////////////////////////////////////////////////
+
+//static
+status_t TranscodingUidPolicy::getUidForPackage(String16 packageName, /*inout*/ uid_t& uid) {
+    PermissionController pc;
+    uid = pc.getPackageUid(packageName, 0);
+    if (uid <= 0) {
+        ALOGE("Unknown package: '%s'", String8(packageName).string());
+        return BAD_VALUE;
+    }
+
+    uid = multiuser_get_uid(0 /*userId*/, uid);
+    return NO_ERROR;
+}
+
+TranscodingUidPolicy::TranscodingUidPolicy()
+      : mAm(std::make_shared<ActivityManager>()),
+        mUidObserver(new UidObserver(this)),
+        mRegistered(false),
+        mTopUidState(ActivityManager::PROCESS_STATE_UNKNOWN) {
+    registerSelf();
+    setProcessInfoOverride();
+}
+
+TranscodingUidPolicy::~TranscodingUidPolicy() {
+    unregisterSelf();
+}
+
+void TranscodingUidPolicy::registerSelf() {
+    status_t res = mAm->linkToDeath(mUidObserver.get());
+    mAm->registerUidObserver(
+            mUidObserver.get(),
+            ActivityManager::UID_OBSERVER_GONE | ActivityManager::UID_OBSERVER_IDLE |
+                    ActivityManager::UID_OBSERVER_ACTIVE | ActivityManager::UID_OBSERVER_PROCSTATE,
+            ActivityManager::PROCESS_STATE_UNKNOWN, String16(kTranscodingTag));
+
+    if (res == OK) {
+        Mutex::Autolock _l(mUidLock);
+
+        mRegistered = true;
+        ALOGI("TranscodingUidPolicy: Registered with ActivityManager");
+    } else {
+        mAm->unregisterUidObserver(mUidObserver.get());
+    }
+}
+
+void TranscodingUidPolicy::unregisterSelf() {
+    mAm->unregisterUidObserver(mUidObserver.get());
+    mAm->unlinkToDeath(mUidObserver.get());
+
+    Mutex::Autolock _l(mUidLock);
+
+    mRegistered = false;
+
+    ALOGI("TranscodingUidPolicy: Unregistered with ActivityManager");
+}
+
+void TranscodingUidPolicy::setProcessInfoOverride() {
+    ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
+    std::shared_ptr<IResourceManagerService> service = IResourceManagerService::fromBinder(binder);
+    if (service == nullptr) {
+        ALOGE("Failed to get IResourceManagerService");
+        return;
+    }
+
+    mProcInfoOverrideClient = ::ndk::SharedRefBase::make<ResourceManagerClient>();
+    Status status = service->overrideProcessInfo(
+            mProcInfoOverrideClient, getpid(), ActivityManager::PROCESS_STATE_SERVICE, SERVICE_ADJ);
+    if (!status.isOk()) {
+        ALOGW("Failed to setProcessInfoOverride.");
+    }
+}
+
+void TranscodingUidPolicy::setUidObserverRegistered(bool registered) {
+    Mutex::Autolock _l(mUidLock);
+
+    mRegistered = registered;
+}
+
+void TranscodingUidPolicy::setCallback(const std::shared_ptr<UidPolicyCallbackInterface>& cb) {
+    mUidPolicyCallback = cb;
+}
+
+void TranscodingUidPolicy::registerMonitorUid(uid_t uid) {
+    Mutex::Autolock _l(mUidLock);
+    if (uid == OFFLINE_UID) {
+        ALOGW("Ignoring the offline uid");
+        return;
+    }
+    if (mUidStateMap.find(uid) != mUidStateMap.end()) {
+        ALOGE("%s: Trying to register uid: %d which is already monitored!", __FUNCTION__, uid);
+        return;
+    }
+
+    int32_t state = ActivityManager::PROCESS_STATE_UNKNOWN;
+    if (mRegistered && mAm->isUidActive(uid, String16(kTranscodingTag))) {
+        state = mAm->getUidProcessState(uid, String16(kTranscodingTag));
+    }
+
+    ALOGV("%s: inserting new uid: %u, procState %d", __FUNCTION__, uid, state);
+
+    mUidStateMap.emplace(std::pair<uid_t, int32_t>(uid, state));
+    mStateUidMap[state].insert(uid);
+
+    updateTopUid_l();
+}
+
+void TranscodingUidPolicy::unregisterMonitorUid(uid_t uid) {
+    Mutex::Autolock _l(mUidLock);
+
+    auto it = mUidStateMap.find(uid);
+    if (it == mUidStateMap.end()) {
+        ALOGE("%s: Trying to unregister uid: %d which is not monitored!", __FUNCTION__, uid);
+        return;
+    }
+
+    auto stateIt = mStateUidMap.find(it->second);
+    if (stateIt != mStateUidMap.end()) {
+        stateIt->second.erase(uid);
+        if (stateIt->second.empty()) {
+            mStateUidMap.erase(stateIt);
+        }
+    }
+    mUidStateMap.erase(it);
+
+    updateTopUid_l();
+}
+
+bool TranscodingUidPolicy::isUidOnTop(uid_t uid) {
+    Mutex::Autolock _l(mUidLock);
+
+    return mTopUidState != ActivityManager::PROCESS_STATE_UNKNOWN &&
+           mTopUidState == getProcState_l(uid);
+}
+
+std::unordered_set<uid_t> TranscodingUidPolicy::getTopUids() const {
+    Mutex::Autolock _l(mUidLock);
+
+    if (mTopUidState == ActivityManager::PROCESS_STATE_UNKNOWN) {
+        return std::unordered_set<uid_t>();
+    }
+
+    return mStateUidMap.at(mTopUidState);
+}
+
+void TranscodingUidPolicy::onUidStateChanged(uid_t uid, int32_t procState) {
+    ALOGV("onUidStateChanged: %u, procState %d", uid, procState);
+
+    bool topUidSetChanged = false;
+    std::unordered_set<uid_t> topUids;
+    {
+        Mutex::Autolock _l(mUidLock);
+        auto it = mUidStateMap.find(uid);
+        if (it != mUidStateMap.end() && it->second != procState) {
+            // Top set changed if 1) the uid is in the current top uid set, or 2) the
+            // new procState is at least the same priority as the current top uid state.
+            bool isUidCurrentTop = mTopUidState != ActivityManager::PROCESS_STATE_UNKNOWN &&
+                                   mStateUidMap[mTopUidState].count(uid) > 0;
+            bool isNewStateHigherThanTop = procState != ActivityManager::PROCESS_STATE_UNKNOWN &&
+                                           (procState <= mTopUidState ||
+                                            mTopUidState == ActivityManager::PROCESS_STATE_UNKNOWN);
+            topUidSetChanged = (isUidCurrentTop || isNewStateHigherThanTop);
+
+            // Move uid to the new procState.
+            mStateUidMap[it->second].erase(uid);
+            mStateUidMap[procState].insert(uid);
+            it->second = procState;
+
+            if (topUidSetChanged) {
+                updateTopUid_l();
+
+                // Make a copy of the uid set for callback.
+                topUids = mStateUidMap[mTopUidState];
+            }
+        }
+    }
+
+    ALOGV("topUidSetChanged: %d", topUidSetChanged);
+
+    if (topUidSetChanged) {
+        auto callback = mUidPolicyCallback.lock();
+        if (callback != nullptr) {
+            callback->onTopUidsChanged(topUids);
+        }
+    }
+}
+
+void TranscodingUidPolicy::updateTopUid_l() {
+    mTopUidState = ActivityManager::PROCESS_STATE_UNKNOWN;
+
+    // Find the lowest uid state (ignoring PROCESS_STATE_UNKNOWN) with some monitored uids.
+    for (auto stateIt = mStateUidMap.begin(); stateIt != mStateUidMap.end(); stateIt++) {
+        if (stateIt->first != ActivityManager::PROCESS_STATE_UNKNOWN && !stateIt->second.empty()) {
+            mTopUidState = stateIt->first;
+            break;
+        }
+    }
+
+    ALOGV("%s: top uid state is %d", __FUNCTION__, mTopUidState);
+}
+
+int32_t TranscodingUidPolicy::getProcState_l(uid_t uid) {
+    auto it = mUidStateMap.find(uid);
+    if (it != mUidStateMap.end()) {
+        return it->second;
+    }
+    return ActivityManager::PROCESS_STATE_UNKNOWN;
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl b/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
index 07b6c1a..ad2358e 100644
--- a/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
+++ b/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
@@ -16,9 +16,10 @@
 
 package android.media;
 
-import android.media.TranscodingJobParcel;
+import android.media.ITranscodingClient;
+import android.media.ITranscodingClientCallback;
+import android.media.TranscodingSessionParcel;
 import android.media.TranscodingRequestParcel;
-import android.media.ITranscodingServiceClient;
 
 /**
  * Binder interface for MediaTranscodingService.
@@ -48,64 +49,25 @@
     /**
      * Register the client with the MediaTranscodingService.
      *
-     * Client must call this function to register itself with the service in order to perform
-     * transcoding. This function will return a unique positive Id assigned by the service.
-     * Client should save this Id and use it for all the transaction with the service.
+     * Client must call this function to register itself with the service in
+     * order to perform transcoding tasks. This function will return an
+     * ITranscodingClient interface object. The client should save and use it
+     * for all future transactions with the service.
      *
-     * @param client interface for the MediaTranscodingService to call the client.
+     * @param callback client interface for the MediaTranscodingService to call
+     *        the client.
+     * @param clientName name of the client.
      * @param opPackageName op package name of the client.
-     * @param clientUid user id of the client.
-     * @param clientPid process id of the client.
-     * @return a unique positive Id assigned to the client by the service, -1  means failed to
-     * register.
+     * @return an ITranscodingClient interface object, with nullptr indicating
+     *         failure to register.
      */
-    int registerClient(in ITranscodingServiceClient client,
-                       in String opPackageName,
-                       in int clientUid,
-                       in int clientPid);
-
-    /**
-    * Unregister the client with the MediaTranscodingService.
-    *
-    * Client will not be able to perform any more transcoding after unregister.
-    *
-    * @param clientId assigned Id of the client.
-    * @return true if succeeds, false otherwise.
-    */
-    boolean unregisterClient(in int clientId);
+    ITranscodingClient registerClient(
+            in ITranscodingClientCallback callback,
+            in String clientName,
+            in String opPackageName);
 
     /**
     * Returns the number of clients. This is used for debugging.
     */
     int getNumOfClients();
-
-    /**
-     * Submits a transcoding request to MediaTranscodingService.
-     *
-     * @param clientId assigned Id of the client.
-     * @param request a TranscodingRequest contains transcoding configuration.
-     * @param job(output variable) a TranscodingJob generated by the MediaTranscodingService.
-     * @return a unique positive jobId generated by the MediaTranscodingService, -1 means failure.
-     */
-    int submitRequest(in int clientId,
-                      in TranscodingRequestParcel request,
-                      out TranscodingJobParcel job);
-
-    /**
-     * Cancels a transcoding job.
-     *
-     * @param clientId assigned id of the client.
-     * @param jobId a TranscodingJob generated by the MediaTranscodingService.
-     * @return true if succeeds, false otherwise.
-     */
-    boolean cancelJob(in int clientId, in int jobId);
-
-    /**
-     * Queries the job detail associated with a jobId.
-     *
-     * @param jobId a TranscodingJob generated by the MediaTranscodingService.
-     * @param job(output variable) the TranscodingJob associated with the jobId.
-     * @return true if succeeds, false otherwise.
-     */
-    boolean getJobWithId(in int jobId, out TranscodingJobParcel job);
 }
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl b/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
new file mode 100644
index 0000000..151e3d0
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
@@ -0,0 +1,63 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.TranscodingSessionParcel;
+import android.media.TranscodingRequestParcel;
+
+/**
+ * ITranscodingClient
+ *
+ * Interface for a client to communicate with MediaTranscodingService.
+ *
+ * {@hide}
+ */
+interface ITranscodingClient {
+    /**
+     * Submits a transcoding request to MediaTranscodingService.
+     *
+     * @param request a TranscodingRequest contains transcoding configuration.
+     * @param session(output variable) a TranscodingSession generated by MediaTranscodingService.
+     * @return true if success, false otherwise.
+     */
+    boolean submitRequest(in TranscodingRequestParcel request,
+                          out TranscodingSessionParcel session);
+
+    /**
+     * Cancels a transcoding session.
+     *
+     * @param sessionId a TranscodingSession generated by the MediaTranscodingService.
+     * @return true if succeeds, false otherwise.
+     */
+    boolean cancelSession(in int sessionId);
+
+    /**
+     * Queries the session detail associated with a sessionId.
+     *
+     * @param sessionId a TranscodingSession generated by the MediaTranscodingService.
+     * @param session(output variable) the TranscodingSession associated with the sessionId.
+     * @return true if succeeds, false otherwise.
+     */
+    boolean getSessionWithId(in int sessionId, out TranscodingSessionParcel session);
+
+    /**
+    * Unregister the client with the MediaTranscodingService.
+    *
+    * Client will not be able to perform any more transcoding after unregister.
+    */
+    void unregister();
+}
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl b/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl
new file mode 100644
index 0000000..d7d9b6f
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl
@@ -0,0 +1,107 @@
+/**
+ * Copyright (c) 2019, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.TranscodingErrorCode;
+import android.media.TranscodingSessionParcel;
+import android.media.TranscodingResultParcel;
+import android.os.ParcelFileDescriptor;
+
+/**
+ * ITranscodingClientCallback
+ *
+ * Interface for the MediaTranscodingService to communicate with the client.
+ *
+ * {@hide}
+ */
+interface ITranscodingClientCallback {
+    /**
+    * Called to open a raw file descriptor to access data under a URI
+    *
+    * @param fileUri The path of the filename.
+    * @param mode The file mode to use. Must be one of ("r, "w", "rw")
+    * @return ParcelFileDescriptor if open the file successfully, null otherwise.
+    */
+    ParcelFileDescriptor openFileDescriptor(in @utf8InCpp String fileUri,
+                                            in @utf8InCpp String mode);
+
+    /**
+    * Called when the transcoding associated with the sessionId finished.
+    * This will only be called if client request to get all the status of the session.
+    *
+    * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+    */
+    oneway void onTranscodingStarted(in int sessionId);
+
+    /**
+    * Called when the transcoding associated with the sessionId is paused.
+    * This will only be called if client request to get all the status of the session.
+    *
+    * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+    */
+    oneway void onTranscodingPaused(in int sessionId);
+
+    /**
+    * Called when the transcoding associated with the sessionId is resumed.
+    * This will only be called if client request to get all the status of the session.
+    *
+    * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+    */
+    oneway void onTranscodingResumed(in int sessionId);
+
+    /**
+    * Called when the transcoding associated with the sessionId finished.
+    *
+    * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+    * @param result contains the transcoded file stats and other transcoding metrics if requested.
+    */
+    oneway void onTranscodingFinished(in int sessionId, in TranscodingResultParcel result);
+
+    /**
+    * Called when the transcoding associated with the sessionId failed.
+    *
+    * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+    * @param errorCode error code that indicates the error.
+    */
+    oneway void onTranscodingFailed(in int sessionId, in TranscodingErrorCode errorCode);
+
+    /**
+    * Called when the transcoding configuration associated with the sessionId gets updated, i.e. wait
+    * number in the session queue.
+    *
+    * <p> This will only be called if client set requestUpdate to be true in the TranscodingRequest
+    * submitted to the MediaTranscodingService.
+    *
+    * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+    * @param oldAwaitNumber previous number of sessions ahead of current session.
+    * @param newAwaitNumber updated number of sessions ahead of current session.
+    */
+    oneway void onAwaitNumberOfSessionsChanged(in int sessionId,
+                                           in int oldAwaitNumber,
+                                           in int newAwaitNumber);
+
+    /**
+    * Called when there is an update on the progress of the TranscodingSession.
+    *
+    * <p> This will only be called if client set requestUpdate to be true in the TranscodingRequest
+    * submitted to the MediaTranscodingService.
+    *
+    * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+    * @param progress an integer number ranging from 0 ~ 100 inclusive.
+    */
+    oneway void onProgressUpdate(in int sessionId, in int progress);
+}
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl b/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl
deleted file mode 100644
index e23c833..0000000
--- a/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- * Copyright (c) 2019, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.TranscodingErrorCode;
-import android.media.TranscodingJobParcel;
-import android.media.TranscodingResultParcel;
-
-/**
- * ITranscodingServiceClient interface for the MediaTranscodingervice to communicate with the
- * client.
- *
- * {@hide}
- */
-//TODO(hkuang): Implement the interface.
-interface ITranscodingServiceClient {
-    /**
-     * Retrieves the name of the client.
-     */
-    @utf8InCpp String getName();
-
-    /**
-    * Called when the transcoding associated with the jobId finished.
-    *
-    * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
-    * @param result contains the transcoded file stats and other transcoding metrics if requested.
-    */
-    oneway void onTranscodingFinished(in int jobId, in TranscodingResultParcel result);
-
-    /**
-    * Called when the transcoding associated with the jobId failed.
-    *
-    * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
-    * @param errorCode error code that indicates the error.
-    */
-    oneway void onTranscodingFailed(in int jobId, in TranscodingErrorCode errorCode);
-
-    /**
-    * Called when the transcoding configuration associated with the jobId gets updated, i.e. wait
-    * number in the job queue.
-    *
-    * <p> This will only be called if client set requestUpdate to be true in the TranscodingRequest
-    * submitted to the MediaTranscodingService.
-    *
-    * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
-    * @param oldAwaitNumber previous number of jobs ahead of current job.
-    * @param newAwaitNumber updated number of jobs ahead of current job.
-    */
-    oneway void onAwaitNumberOfJobsChanged(in int jobId,
-                                           in int oldAwaitNumber,
-                                           in int newAwaitNumber);
-
-    /**
-    * Called when there is an update on the progress of the TranscodingJob.
-    *
-    * <p> This will only be called if client set requestUpdate to be true in the TranscodingRequest
-    * submitted to the MediaTranscodingService.
-    *
-    * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
-    * @param progress an integer number ranging from 0 ~ 100 inclusive.
-    */
-    oneway void onProgressUpdate(in int jobId, in int progress);
-}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
index 7f47fdc..b044d41 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
@@ -23,11 +23,12 @@
  */
 @Backing(type = "int")
 enum TranscodingErrorCode {
-    kUnknown = 0,
-    kUnsupported = 1,
-    kDecoderError = 2,
-    kEncoderError = 3,
-    kExtractorError = 4,
-    kMuxerError = 5,
-    kInvalidBitstream = 6
+    kNoError = 0,
+    kUnknown = 1,
+    kMalformed = 2,
+    kUnsupported = 3,
+    kInvalidParameter = 4,
+    kInvalidOperation = 5,
+    kErrorIO = 6,
+    kInsufficientResources = 7,
 }
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
deleted file mode 100644
index d912c38..0000000
--- a/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.TranscodingRequestParcel;
-
-/**
- * TranscodingJob is generated by the MediaTranscodingService upon receiving a TranscodingRequest.
- * It contains all the necessary configuration generated by the MediaTranscodingService for the
- * TranscodingRequest.
- *
- * {@hide}
- */
-//TODO(hkuang): Implement the parcelable.
-parcelable TranscodingJobParcel {
-    /**
-     * A unique positive Id generated by the MediaTranscodingService.
-     */
-    int jobId;
-
-    /**
-     * The request associated with the TranscodingJob.
-     */
-    TranscodingRequestParcel request;
-
-    /**
-    * Current number of jobs ahead of this job. The service schedules the job based on the priority
-    * passed from the client. Client could specify whether to receive updates when the
-    * awaitNumberOfJobs changes through setting requestProgressUpdate in the TranscodingRequest.
-    */
-    int awaitNumberOfJobs;
-}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl
deleted file mode 100644
index 1a5d81a..0000000
--- a/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-/**
- * Priority of a transcoding job.
- *
- * {@hide}
- */
-@Backing(type="int")
-enum TranscodingJobPriority {
-    // TODO(hkuang): define what each priority level actually mean.
-    kUnspecified = 0,
-    kLow = 1,
-    /**
-     * 2 ~ 20 is reserved for future use.
-     */
-    kNormal = 21,
-    /**
-     * 22 ~ 30 is reserved for future use.
-     */
-    kHigh = 31,
-}
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
index 7b7986d..4b19f6a 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
@@ -16,8 +16,10 @@
 
 package android.media;
 
-import android.media.TranscodingJobPriority;
+import android.media.TranscodingSessionPriority;
+import android.media.TranscodingTestConfig;
 import android.media.TranscodingType;
+import android.media.TranscodingVideoTrackFormat;
 
 /**
  * TranscodingRequest contains the desired configuration for the transcoding.
@@ -27,9 +29,33 @@
 //TODO(hkuang): Implement the parcelable.
 parcelable TranscodingRequestParcel {
     /**
-     * Name of file to be transcoded.
+     * The absolute file path of the source file.
      */
-    @utf8InCpp String fileName;
+    @utf8InCpp String sourceFilePath;
+
+    /**
+     * The absolute file path of the destination file.
+     */
+    @utf8InCpp String destinationFilePath;
+
+    /**
+     * The UID of the client that this transcoding request is for. Only privileged caller could
+     * set this Uid as only they could do the transcoding on behalf of the client.
+     * -1 means not available.
+     */
+    int clientUid = -1;
+
+    /**
+     * The PID of the client that this transcoding request is for. Only privileged caller could
+     * set this Uid as only they could do the transcoding on behalf of the client.
+     * -1 means not available.
+     */
+    int clientPid = -1;
+
+    /**
+     * The package name of the client whom this transcoding request is for.
+     */
+    @utf8InCpp String clientPackageName;
 
     /**
      * Type of the transcoding.
@@ -37,22 +63,44 @@
     TranscodingType transcodingType;
 
     /**
-     * Input source file descriptor.
+     * Requested video track format for the transcoding.
+     * Note that the transcoding service will try to fulfill the requested format as much as
+     * possbile, while subject to hardware and software limitation. The final video track format
+     * will be available in the TranscodingSessionParcel when the session is finished.
      */
-    ParcelFileDescriptor inFd;
-
-    /**
-     * Output transcoded file descriptor.
-     */
-    ParcelFileDescriptor outFd;
+    @nullable TranscodingVideoTrackFormat requestedVideoTrackFormat;
 
     /**
      * Priority of this transcoding. Service will schedule the transcoding based on the priority.
      */
-    TranscodingJobPriority priority;
+    TranscodingSessionPriority priority;
 
     /**
-     * Whether to receive update on progress and change of awaitNumJobs.
+     * Whether to receive update on progress and change of awaitNumSessions.
+     * Default to false.
      */
-    boolean requestUpdate;
+    boolean requestProgressUpdate = false;
+
+    /**
+     * Whether to receive update on session's start/stop/pause/resume.
+     * Default to false.
+     */
+    boolean requestSessionEventUpdate = false;
+
+    /**
+     * Whether this request is for testing.
+     */
+    boolean isForTesting = false;
+
+    /**
+     * Test configuration. This will be available only when isForTesting is set to true.
+     */
+    @nullable TranscodingTestConfig testConfig;
+
+     /**
+      * Whether to get the stats of the transcoding.
+      * If this is enabled, the TranscodingSessionStats will be returned in TranscodingResultParcel
+      * upon transcoding finishes.
+      */
+    boolean enableStats = false;
 }
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
index 65c49e7..7826e25 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
@@ -16,6 +16,8 @@
 
 package android.media;
 
+import android.media.TranscodingSessionStats;
+
 /**
  * Result of the transcoding.
  *
@@ -24,9 +26,9 @@
 //TODO(hkuang): Implement the parcelable.
 parcelable TranscodingResultParcel {
     /**
-     * The jobId associated with the TranscodingResult.
+     * The sessionId associated with the TranscodingResult.
      */
-    int jobId;
+    int sessionId;
 
     /**
      * Actual bitrate of the transcoded video in bits per second. This will only present for video
@@ -34,5 +36,9 @@
      */
     int actualBitrateBps;
 
-    // TODO(hkuang): Add more fields.
+    /**
+     * Stats of the transcoding session. This will only be available when client requests to get the
+     * stats in TranscodingRequestParcel.
+     */
+    @nullable TranscodingSessionStats sessionStats;
 }
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingSessionParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingSessionParcel.aidl
new file mode 100644
index 0000000..3a4a500
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingSessionParcel.aidl
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.TranscodingRequestParcel;
+import android.media.TranscodingVideoTrackFormat;
+
+/**
+ * TranscodingSession is generated by the MediaTranscodingService upon receiving a
+ * TranscodingRequest. It contains all the necessary configuration generated by the
+ * MediaTranscodingService for the TranscodingRequest.
+ *
+ * {@hide}
+ */
+//TODO(hkuang): Implement the parcelable.
+parcelable TranscodingSessionParcel {
+    /**
+     * A unique positive Id generated by the MediaTranscodingService.
+     */
+    int sessionId;
+
+    /**
+     * The request associated with the TranscodingSession.
+     */
+    TranscodingRequestParcel request;
+
+    /**
+     * Output video track's format. This will only be avaiable for video transcoding and it will
+     * be avaiable when the session is finished.
+     */
+    @nullable TranscodingVideoTrackFormat videoTrackFormat;
+
+    /**
+    * Current number of sessions ahead of this session. The service schedules the session based on
+    * the priority passed from the client. Client could specify whether to receive updates when the
+    * awaitNumberOfSessions changes through setting requestProgressUpdate in the TranscodingRequest.
+    */
+    int awaitNumberOfSessions;
+}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingSessionPriority.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingSessionPriority.aidl
new file mode 100644
index 0000000..f001484
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingSessionPriority.aidl
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Priority of a transcoding session.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+enum TranscodingSessionPriority {
+    // TODO(hkuang): define what each priority level actually mean.
+    kUnspecified = 0,
+    kLow = 1,
+    /**
+     * 2 ~ 20 is reserved for future use.
+     */
+    kNormal = 21,
+    /**
+     * 22 ~ 30 is reserved for future use.
+     */
+    kHigh = 31,
+}
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingSessionStats.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingSessionStats.aidl
new file mode 100644
index 0000000..b3e7eea
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingSessionStats.aidl
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * TranscodingSessionStats encapsulated the stats of the a TranscodingSession.
+ *
+ * {@hide}
+ */
+parcelable TranscodingSessionStats {
+    /**
+     * System time of when the session is created.
+     */
+    long sessionCreatedTimeUs;
+
+    /**
+     * System time of when the session is finished.
+     */
+    long sessionFinishedTimeUs;
+
+    /**
+     * Total time spend on transcoding, exclude the time in pause.
+     */
+    long totalProcessingTimeUs;
+
+    /**
+     * Total time spend on handling the session, include the time in pause.
+     * The totaltimeUs is actually the same as sessionFinishedTimeUs - sessionCreatedTimeUs.
+     */
+    long totalTimeUs;
+}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
new file mode 100644
index 0000000..12e0e94
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ package android.media;
+
+ /**
+  * TranscodingTestConfig contains the test configureation used in testing.
+  *
+  * {@hide}
+  */
+parcelable TranscodingTestConfig {
+    /**
+     * Whether to use SimulatedTranscoder for testing. Note that SimulatedTranscoder does not send
+     * transcoding sessions to real MediaTranscoder.
+     */
+    boolean useSimulatedTranscoder = false;
+
+    /**
+     * Passthrough mode used for testing. The transcoding service will assume the destination
+     * path already contains the transcoding of the source file and return it to client directly.
+     */
+    boolean passThroughMode = false;
+
+    /**
+     * Time of processing the session in milliseconds. Service will return the session result at
+     * least after processingTotalTimeMs from the time it starts to process the session. Note that
+     * if service uses real MediaTranscoder to do transcoding, the time spent on transcoding may be
+     * more than that.
+     */
+    int processingTotalTimeMs = 0;
+}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingVideoTrackFormat.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingVideoTrackFormat.aidl
new file mode 100644
index 0000000..8ed241a
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingVideoTrackFormat.aidl
@@ -0,0 +1,84 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.TranscodingVideoCodecType;
+
+/**
+ * TranscodingVideoTrackFormat contains the video track format of a video.
+ *
+ * TODO(hkuang): Switch to PersistableBundle when b/156428735 is fixed or after we remove
+ * aidl_interface
+ *
+ * Note that TranscodingVideoTrackFormat is used in TranscodingRequestParcel for the  client to
+ * specify the desired transcoded video format, and is also used in TranscodingSessionParcel for the
+ * service to notify client of the final video format for transcoding.
+ * When used as input in TranscodingRequestParcel, the client only needs to specify the config that
+ * they want to change, e.g. codec or resolution, and all the missing configs will be extracted
+ * from the source video and applied to the destination video.
+ * When used as output in TranscodingSessionParcel, all the configs will be populated to indicate
+ * the final encoder configs used for transcoding.
+ *
+ * {@hide}
+ */
+parcelable TranscodingVideoTrackFormat {
+    /**
+     * Video Codec type.
+     */
+    TranscodingVideoCodecType codecType; // TranscodingVideoCodecType::kUnspecified;
+
+    /**
+     * Width of the video in pixels. -1 means unavailable.
+     */
+    int width = -1;
+
+    /**
+     * Height of the video in pixels. -1 means unavailable.
+     */
+    int height = -1;
+
+    /**
+     * Bitrate in bits per second. -1 means unavailable.
+     */
+    int bitrateBps = -1;
+
+    /**
+     * Codec profile. This must be the same constant as used in MediaCodecInfo.CodecProfileLevel.
+     * -1 means unavailable.
+     */
+    int profile = -1;
+
+    /**
+     * Codec level. This must be the same constant as used in MediaCodecInfo.CodecProfileLevel.
+     * -1 means unavailable.
+     */
+    int level = -1;
+
+    /**
+     * Decoder operating rate. This is used to work around the fact that vendor does not boost the
+     * hardware to maximum speed in transcoding usage case. This operating rate will be applied
+     * to decoder inside MediaTranscoder. -1 means unavailable.
+     */
+    int decoderOperatingRate = -1;
+
+    /**
+     * Encoder operating rate. This is used to work around the fact that vendor does not boost the
+     * hardware to maximum speed in transcoding usage case. This operating rate will be applied
+     * to encoder inside MediaTranscoder. -1 means unavailable.
+     */
+    int encoderOperatingRate = -1;
+}
diff --git a/media/libmediatranscoding/build_and_run_all_unit_tests.sh b/media/libmediatranscoding/build_and_run_all_unit_tests.sh
new file mode 100755
index 0000000..388e2ea
--- /dev/null
+++ b/media/libmediatranscoding/build_and_run_all_unit_tests.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+#
+# Script to run all transcoding related tests from subfolders.
+# Run script from this folder.
+#
+
+if [ -z "$ANDROID_BUILD_TOP" ]; then
+    echo "Android build environment not set"
+    exit -1
+fi
+
+# ensure we have mm
+. $ANDROID_BUILD_TOP/build/envsetup.sh
+
+mm
+
+echo "waiting for device"
+
+adb root && adb wait-for-device remount && adb sync
+SYNC_FINISHED=true
+
+# Run the transcoding service tests.
+pushd tests
+. build_and_run_all_unit_tests.sh
+popd
+
+# Run the transcoder tests.
+pushd transcoder/tests/
+. build_and_run_all_unit_tests.sh
+popd
+
diff --git a/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h b/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h
index 0e8dcfd..5ba1ee2 100644
--- a/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h
+++ b/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h
@@ -26,7 +26,7 @@
 namespace android {
 
 /*
- * AdjustableMaxPriorityQueue is a custom max priority queue that helps managing jobs for
+ * AdjustableMaxPriorityQueue is a custom max priority queue that helps managing sessions for
  * MediaTranscodingService.
  *
  * AdjustableMaxPriorityQueue is a wrapper template around the STL's *_heap() functions.
@@ -38,7 +38,7 @@
  */
 template <class T, class Comparator = std::less<T>>
 class AdjustableMaxPriorityQueue {
-   public:
+public:
     typedef typename std::vector<T>::iterator iterator;
     typedef typename std::vector<T>::const_iterator const_iterator;
 
@@ -104,7 +104,7 @@
     /* Return the backbone storage of this PriorityQueue. Mainly used for debugging. */
     const std::vector<T>& getStorage() const { return mHeap; };
 
-   private:
+private:
     std::vector<T> mHeap;
 
     /* Implementation shared by both public push() methods. */
diff --git a/media/libmediatranscoding/include/media/ControllerClientInterface.h b/media/libmediatranscoding/include/media/ControllerClientInterface.h
new file mode 100644
index 0000000..3fd4f0c
--- /dev/null
+++ b/media/libmediatranscoding/include/media/ControllerClientInterface.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_CONTROLLER_CLIENT_INTERFACE_H
+#define ANDROID_MEDIA_CONTROLLER_CLIENT_INTERFACE_H
+
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <media/TranscodingDefs.h>
+
+namespace android {
+
+using ::aidl::android::media::ITranscodingClientCallback;
+using ::aidl::android::media::TranscodingRequestParcel;
+
+// Interface for a client to call the controller to schedule or retrieve
+// the status of a session.
+class ControllerClientInterface {
+public:
+    /**
+     * Submits one request to the controller.
+     *
+     * Returns true on success and false on failure. This call will fail is a session identified
+     * by <clientId, sessionId> already exists.
+     */
+    virtual bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t uid,
+                        const TranscodingRequestParcel& request,
+                        const std::weak_ptr<ITranscodingClientCallback>& clientCallback) = 0;
+
+    /**
+     * Cancels a session identified by <clientId, sessionId>.
+     *
+     * If sessionId is negative (<0), all sessions with a specified priority (that's not
+     * TranscodingSessionPriority::kUnspecified) will be cancelled. Otherwise, only the single
+     * session <clientId, sessionId> will be cancelled.
+     *
+     * Returns false if a single session is being cancelled but it doesn't exist. Returns
+     * true otherwise.
+     */
+    virtual bool cancel(ClientIdType clientId, SessionIdType sessionId) = 0;
+
+    /**
+     * Retrieves information about a session.
+     *
+     * Returns true and the session if it exists, and false otherwise.
+     */
+    virtual bool getSession(ClientIdType clientId, SessionIdType sessionId,
+                            TranscodingRequestParcel* request) = 0;
+
+protected:
+    virtual ~ControllerClientInterface() = default;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_CONTROLLER_CLIENT_INTERFACE_H
diff --git a/media/libmediatranscoding/include/media/ResourcePolicyInterface.h b/media/libmediatranscoding/include/media/ResourcePolicyInterface.h
new file mode 100644
index 0000000..4a92af8
--- /dev/null
+++ b/media/libmediatranscoding/include/media/ResourcePolicyInterface.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_RESOURCE_POLICY_INTERFACE_H
+#define ANDROID_MEDIA_RESOURCE_POLICY_INTERFACE_H
+#include <memory>
+namespace android {
+
+class ResourcePolicyCallbackInterface;
+
+// Interface for the SessionController to control the resource status updates.
+class ResourcePolicyInterface {
+public:
+    // Set the associated callback interface to send the events when resource
+    // status changes. (Set to nullptr will stop the updates.)
+    virtual void setCallback(const std::shared_ptr<ResourcePolicyCallbackInterface>& cb) = 0;
+
+protected:
+    virtual ~ResourcePolicyInterface() = default;
+};
+
+// Interface for notifying the SessionController of a change in resource status.
+class ResourcePolicyCallbackInterface {
+public:
+    // Called when codec resources become available. The controller may use this
+    // as a signal to attempt restart transcoding sessions that were previously
+    // paused due to temporary resource loss.
+    virtual void onResourceAvailable() = 0;
+
+protected:
+    virtual ~ResourcePolicyCallbackInterface() = default;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_RESOURCE_POLICY_INTERFACE_H
diff --git a/media/libmediatranscoding/include/media/TranscoderInterface.h b/media/libmediatranscoding/include/media/TranscoderInterface.h
new file mode 100644
index 0000000..e17cd5a
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscoderInterface.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODER_INTERFACE_H
+#define ANDROID_MEDIA_TRANSCODER_INTERFACE_H
+
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingErrorCode.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <media/TranscodingDefs.h>
+
+namespace android {
+
+using ::aidl::android::media::ITranscodingClientCallback;
+using ::aidl::android::media::TranscodingErrorCode;
+using ::aidl::android::media::TranscodingRequestParcel;
+class TranscoderCallbackInterface;
+
+// Interface for the controller to call the transcoder to take actions.
+class TranscoderInterface {
+public:
+    virtual void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) = 0;
+    virtual void start(ClientIdType clientId, SessionIdType sessionId,
+                       const TranscodingRequestParcel& request,
+                       const std::shared_ptr<ITranscodingClientCallback>& clientCallback) = 0;
+    virtual void pause(ClientIdType clientId, SessionIdType sessionId) = 0;
+    virtual void resume(ClientIdType clientId, SessionIdType sessionId,
+                        const TranscodingRequestParcel& request,
+                        const std::shared_ptr<ITranscodingClientCallback>& clientCallback) = 0;
+    virtual void stop(ClientIdType clientId, SessionIdType sessionId) = 0;
+
+protected:
+    virtual ~TranscoderInterface() = default;
+};
+
+// Interface for the transcoder to notify the controller of the status of
+// the currently running session, or temporary loss of transcoding resources.
+class TranscoderCallbackInterface {
+public:
+    // TODO(chz): determine what parameters are needed here.
+    virtual void onStarted(ClientIdType clientId, SessionIdType sessionId) = 0;
+    virtual void onPaused(ClientIdType clientId, SessionIdType sessionId) = 0;
+    virtual void onResumed(ClientIdType clientId, SessionIdType sessionId) = 0;
+    virtual void onFinish(ClientIdType clientId, SessionIdType sessionId) = 0;
+    virtual void onError(ClientIdType clientId, SessionIdType sessionId,
+                         TranscodingErrorCode err) = 0;
+    virtual void onProgressUpdate(ClientIdType clientId, SessionIdType sessionId,
+                                  int32_t progress) = 0;
+
+    // Called when transcoding becomes temporarily inaccessible due to loss of resource.
+    // If there is any session currently running, it will be paused. When resource contention
+    // is solved, the controller should call TranscoderInterface's to either start a new session,
+    // or resume a paused session.
+    virtual void onResourceLost() = 0;
+
+protected:
+    virtual ~TranscoderCallbackInterface() = default;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODER_INTERFACE_H
diff --git a/media/libmediatranscoding/include/media/TranscoderWrapper.h b/media/libmediatranscoding/include/media/TranscoderWrapper.h
new file mode 100644
index 0000000..9ec32d7
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscoderWrapper.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_TRANSCODER_WRAPPER_H
+#define ANDROID_TRANSCODER_WRAPPER_H
+
+#include <android-base/thread_annotations.h>
+#include <media/NdkMediaError.h>
+#include <media/TranscoderInterface.h>
+
+#include <list>
+#include <map>
+#include <mutex>
+
+namespace android {
+
+class MediaTranscoder;
+class Parcelable;
+
+/*
+ * Wrapper class around MediaTranscoder.
+ * Implements TranscoderInterface for TranscodingSessionController to use.
+ */
+class TranscoderWrapper : public TranscoderInterface,
+                          public std::enable_shared_from_this<TranscoderWrapper> {
+public:
+    TranscoderWrapper();
+
+    virtual void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) override;
+    virtual void start(ClientIdType clientId, SessionIdType sessionId,
+                       const TranscodingRequestParcel& request,
+                       const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+    virtual void pause(ClientIdType clientId, SessionIdType sessionId) override;
+    virtual void resume(ClientIdType clientId, SessionIdType sessionId,
+                        const TranscodingRequestParcel& request,
+                        const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+    virtual void stop(ClientIdType clientId, SessionIdType sessionId) override;
+
+private:
+    class CallbackImpl;
+    struct Event {
+        enum Type { NoEvent, Start, Pause, Resume, Stop, Finish, Error, Progress } type;
+        ClientIdType clientId;
+        SessionIdType sessionId;
+        std::function<void()> runnable;
+        int32_t arg;
+    };
+    using SessionKeyType = std::pair<ClientIdType, SessionIdType>;
+
+    std::shared_ptr<CallbackImpl> mTranscoderCb;
+    std::shared_ptr<MediaTranscoder> mTranscoder;
+    std::weak_ptr<TranscoderCallbackInterface> mCallback;
+    std::mutex mLock;
+    std::condition_variable mCondition;
+    std::list<Event> mQueue;  // GUARDED_BY(mLock);
+    std::map<SessionKeyType, std::shared_ptr<ndk::ScopedAParcel>> mPausedStateMap;
+    ClientIdType mCurrentClientId;
+    SessionIdType mCurrentSessionId;
+
+    static std::string toString(const Event& event);
+    void onFinish(ClientIdType clientId, SessionIdType sessionId);
+    void onError(ClientIdType clientId, SessionIdType sessionId, media_status_t status);
+    void onProgress(ClientIdType clientId, SessionIdType sessionId, int32_t progress);
+
+    media_status_t handleStart(ClientIdType clientId, SessionIdType sessionId,
+                               const TranscodingRequestParcel& request,
+                               const std::shared_ptr<ITranscodingClientCallback>& callback);
+    media_status_t handlePause(ClientIdType clientId, SessionIdType sessionId);
+    media_status_t handleResume(ClientIdType clientId, SessionIdType sessionId,
+                                const TranscodingRequestParcel& request,
+                                const std::shared_ptr<ITranscodingClientCallback>& callback);
+    media_status_t setupTranscoder(
+            ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+            const std::shared_ptr<ITranscodingClientCallback>& callback,
+            const std::shared_ptr<ndk::ScopedAParcel>& pausedState = nullptr);
+
+    void cleanup();
+    void reportError(ClientIdType clientId, SessionIdType sessionId, media_status_t err);
+    void queueEvent(Event::Type type, ClientIdType clientId, SessionIdType sessionId,
+                    const std::function<void()> runnable, int32_t arg = 0);
+    void threadLoop();
+};
+
+}  // namespace android
+#endif  // ANDROID_TRANSCODER_WRAPPER_H
diff --git a/media/libmediatranscoding/include/media/TranscodingClientManager.h b/media/libmediatranscoding/include/media/TranscodingClientManager.h
index eec120a..451f993 100644
--- a/media/libmediatranscoding/include/media/TranscodingClientManager.h
+++ b/media/libmediatranscoding/include/media/TranscodingClientManager.h
@@ -17,73 +17,77 @@
 #ifndef ANDROID_MEDIA_TRANSCODING_CLIENT_MANAGER_H
 #define ANDROID_MEDIA_TRANSCODING_CLIENT_MANAGER_H
 
-#include <aidl/android/media/BnTranscodingServiceClient.h>
-#include <android/binder_ibinder.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
 #include <sys/types.h>
 #include <utils/Condition.h>
-#include <utils/RefBase.h>
 #include <utils/String8.h>
 #include <utils/Vector.h>
 
+#include <map>
 #include <mutex>
 #include <unordered_map>
+#include <unordered_set>
+
+#include "ControllerClientInterface.h"
 
 namespace android {
 
-using ::aidl::android::media::ITranscodingServiceClient;
-
-class MediaTranscodingService;
+using ::aidl::android::media::ITranscodingClient;
+using ::aidl::android::media::ITranscodingClientCallback;
 
 /*
  * TranscodingClientManager manages all the transcoding clients across different processes.
  *
- * TranscodingClientManager is a global singleton that could only acquired by
- * MediaTranscodingService. It manages all the clients's registration/unregistration and clients'
- * information. It also bookkeeps all the clients' information. It also monitors to the death of the
+ * TranscodingClientManager manages all the clients's registration/unregistration and clients'
+ * information. It also bookkeeps all the clients' information. It also monitors the death of the
  * clients. Upon client's death, it will remove the client from it.
  *
  * TODO(hkuang): Hook up with ResourceManager for resource management.
  * TODO(hkuang): Hook up with MediaMetrics to log all the transactions.
  */
-class TranscodingClientManager {
-   public:
+class TranscodingClientManager : public std::enable_shared_from_this<TranscodingClientManager> {
+public:
     virtual ~TranscodingClientManager();
 
     /**
-     * ClientInfo contains a single client's information.
-     */
-    struct ClientInfo {
-        /* The remote client that this ClientInfo is associated with. */
-        std::shared_ptr<ITranscodingServiceClient> mClient;
-        /* A unique positive Id assigned to the client by the service. */
-        int32_t mClientId;
-        /* Process id of the client */
-        int32_t mClientPid;
-        /* User id of the client. */
-        int32_t mClientUid;
-        /* Package name of the client. */
-        std::string mClientOpPackageName;
-
-        ClientInfo(const std::shared_ptr<ITranscodingServiceClient>& client, int64_t clientId,
-                   int32_t pid, int32_t uid, const std::string& opPackageName)
-            : mClient(client),
-              mClientId(clientId),
-              mClientPid(pid),
-              mClientUid(uid),
-              mClientOpPackageName(opPackageName) {}
-    };
-
-    /**
      * Adds a new client to the manager.
      *
-     * The client must have valid clientId, pid, uid and opPackageName, otherwise, this will return
-     * a non-zero errorcode. If the client has already been added, it will also return non-zero
-     * errorcode.
+     * The client must have valid callback, pid, uid, clientName and opPackageName.
+     * Otherwise, this will return a non-zero errorcode. If the client callback has
+     * already been added, it will also return non-zero errorcode.
      *
-     * @param client to be added to the manager.
+     * @param callback client callback for the service to call this client.
+     * @param clientName client's name.
+     * @param opPackageName client's package name.
+     * @param client output holding the ITranscodingClient interface for the client
+     *        to use for subsequent communications with the service.
      * @return 0 if client is added successfully, non-zero errorcode otherwise.
      */
-    status_t addClient(std::unique_ptr<ClientInfo> client);
+    status_t addClient(const std::shared_ptr<ITranscodingClientCallback>& callback,
+                       const std::string& clientName, const std::string& opPackageName,
+                       std::shared_ptr<ITranscodingClient>* client);
+
+    /**
+     * Gets the number of clients.
+     */
+    size_t getNumOfClients() const;
+
+    /**
+     * Dump all the client information to the fd.
+     */
+    void dumpAllClients(int fd, const Vector<String16>& args);
+
+private:
+    friend class MediaTranscodingService;
+    friend class TranscodingClientManagerTest;
+    struct ClientImpl;
+
+    // Only allow MediaTranscodingService and unit tests to instantiate.
+    TranscodingClientManager(const std::shared_ptr<ControllerClientInterface>& controller);
+
+    // Checks if a user is trusted (and allowed to submit sessions on behalf of other uids)
+    bool isTrustedCallingUid(uid_t uid);
 
     /**
      * Removes an existing client from the manager.
@@ -93,39 +97,24 @@
      * @param clientId id of the client to be removed..
      * @return 0 if client is removed successfully, non-zero errorcode otherwise.
      */
-    status_t removeClient(int32_t clientId);
-
-    /**
-     * Gets the number of clients.
-     */
-    size_t getNumOfClients() const;
-
-    /**
-     * Checks if a client with clientId is already registered.
-     */
-    bool isClientIdRegistered(int32_t clientId) const;
-
-    /**
-     * Dump all the client information to the fd.
-     */
-    void dumpAllClients(int fd, const Vector<String16>& args);
-
-   private:
-    friend class MediaTranscodingService;
-    friend class TranscodingClientManagerTest;
-
-    /** Get the singleton instance of the TranscodingClientManager. */
-    static TranscodingClientManager& getInstance();
-
-    TranscodingClientManager();
+    status_t removeClient(ClientIdType clientId);
 
     static void BinderDiedCallback(void* cookie);
 
     mutable std::mutex mLock;
-    std::unordered_map<int32_t, std::unique_ptr<ClientInfo>> mClientIdToClientInfoMap
+    std::unordered_map<ClientIdType, std::shared_ptr<ClientImpl>> mClientIdToClientMap
             GUARDED_BY(mLock);
+    std::unordered_set<uintptr_t> mRegisteredCallbacks GUARDED_BY(mLock);
 
     ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+
+    std::shared_ptr<ControllerClientInterface> mSessionController;
+    std::unordered_set<uid_t> mMediaProviderUid;
+
+    static std::atomic<ClientIdType> sCookieCounter;
+    static std::mutex sCookie2ClientLock;
+    static std::map<ClientIdType, std::shared_ptr<ClientImpl>> sCookie2Client
+            GUARDED_BY(sCookie2ClientLock);
 };
 
 }  // namespace android
diff --git a/media/libmediatranscoding/include/media/TranscodingDefs.h b/media/libmediatranscoding/include/media/TranscodingDefs.h
new file mode 100644
index 0000000..8e02dd2
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingDefs.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_DEFS_H
+#define ANDROID_MEDIA_TRANSCODING_DEFS_H
+
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+
+namespace android {
+
+using ClientIdType = uintptr_t;
+using SessionIdType = int32_t;
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_DEFS_H
diff --git a/media/libmediatranscoding/include/media/TranscodingRequest.h b/media/libmediatranscoding/include/media/TranscodingRequest.h
new file mode 100644
index 0000000..485403f
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingRequest.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_REQUEST_H
+#define ANDROID_MEDIA_TRANSCODING_REQUEST_H
+
+#include <aidl/android/media/TranscodingRequestParcel.h>
+
+namespace android {
+
+using ::aidl::android::media::TranscodingRequestParcel;
+
+// Helper class for duplicating a TranscodingRequestParcel
+class TranscodingRequest : public TranscodingRequestParcel {
+public:
+    TranscodingRequest() = default;
+    TranscodingRequest(const TranscodingRequestParcel& parcel) { setTo(parcel); }
+    TranscodingRequest& operator=(const TranscodingRequest& request) {
+        setTo(request);
+        return *this;
+    }
+
+private:
+    void setTo(const TranscodingRequestParcel& parcel) {
+        sourceFilePath = parcel.sourceFilePath;
+        destinationFilePath = parcel.destinationFilePath;
+        clientUid = parcel.clientUid;
+        clientPid = parcel.clientPid;
+        clientPackageName = parcel.clientPackageName;
+        transcodingType = parcel.transcodingType;
+        requestedVideoTrackFormat = parcel.requestedVideoTrackFormat;
+        priority = parcel.priority;
+        requestProgressUpdate = parcel.requestProgressUpdate;
+        requestSessionEventUpdate = parcel.requestSessionEventUpdate;
+        isForTesting = parcel.isForTesting;
+        testConfig = parcel.testConfig;
+    }
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_REQUEST_H
diff --git a/media/libmediatranscoding/include/media/TranscodingResourcePolicy.h b/media/libmediatranscoding/include/media/TranscodingResourcePolicy.h
new file mode 100644
index 0000000..0836eda
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingResourcePolicy.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_RESOURCE_POLICY_H
+#define ANDROID_MEDIA_TRANSCODING_RESOURCE_POLICY_H
+
+#include <android/binder_auto_utils.h>
+#include <media/ResourcePolicyInterface.h>
+#include <utils/Condition.h>
+
+#include <mutex>
+namespace aidl {
+namespace android {
+namespace media {
+class IResourceObserverService;
+}
+}  // namespace android
+}  // namespace aidl
+
+namespace android {
+
+using ::aidl::android::media::IResourceObserverService;
+
+class TranscodingResourcePolicy : public ResourcePolicyInterface {
+public:
+    explicit TranscodingResourcePolicy();
+    ~TranscodingResourcePolicy();
+
+    void setCallback(const std::shared_ptr<ResourcePolicyCallbackInterface>& cb) override;
+
+private:
+    struct ResourceObserver;
+    mutable std::mutex mRegisteredLock;
+    bool mRegistered GUARDED_BY(mRegisteredLock);
+    std::shared_ptr<IResourceObserverService> mService GUARDED_BY(mRegisteredLock);
+    std::shared_ptr<ResourceObserver> mObserver;
+
+    mutable std::mutex mCallbackLock;
+    std::weak_ptr<ResourcePolicyCallbackInterface> mResourcePolicyCallback
+            GUARDED_BY(mCallbackLock);
+
+    ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+
+    static void BinderDiedCallback(void* cookie);
+
+    void registerSelf();
+    void unregisterSelf();
+    void onResourceAvailable();
+};  // class TranscodingUidPolicy
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_RESOURCE_POLICY_H
diff --git a/media/libmediatranscoding/include/media/TranscodingSessionController.h b/media/libmediatranscoding/include/media/TranscodingSessionController.h
new file mode 100644
index 0000000..c082074
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingSessionController.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_SESSION_CONTROLLER_H
+#define ANDROID_MEDIA_TRANSCODING_SESSION_CONTROLLER_H
+
+#include <aidl/android/media/TranscodingSessionPriority.h>
+#include <media/ControllerClientInterface.h>
+#include <media/ResourcePolicyInterface.h>
+#include <media/TranscoderInterface.h>
+#include <media/TranscodingRequest.h>
+#include <media/UidPolicyInterface.h>
+#include <utils/String8.h>
+#include <utils/Vector.h>
+
+#include <list>
+#include <map>
+#include <mutex>
+
+namespace android {
+using ::aidl::android::media::TranscodingResultParcel;
+using ::aidl::android::media::TranscodingSessionPriority;
+
+class TranscodingSessionController : public UidPolicyCallbackInterface,
+                                     public ControllerClientInterface,
+                                     public TranscoderCallbackInterface,
+                                     public ResourcePolicyCallbackInterface {
+public:
+    virtual ~TranscodingSessionController();
+
+    // ControllerClientInterface
+    bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t uid,
+                const TranscodingRequestParcel& request,
+                const std::weak_ptr<ITranscodingClientCallback>& clientCallback) override;
+    bool cancel(ClientIdType clientId, SessionIdType sessionId) override;
+    bool getSession(ClientIdType clientId, SessionIdType sessionId,
+                    TranscodingRequestParcel* request) override;
+    // ~ControllerClientInterface
+
+    // TranscoderCallbackInterface
+    void onStarted(ClientIdType clientId, SessionIdType sessionId) override;
+    void onPaused(ClientIdType clientId, SessionIdType sessionId) override;
+    void onResumed(ClientIdType clientId, SessionIdType sessionId) override;
+    void onFinish(ClientIdType clientId, SessionIdType sessionId) override;
+    void onError(ClientIdType clientId, SessionIdType sessionId, TranscodingErrorCode err) override;
+    void onProgressUpdate(ClientIdType clientId, SessionIdType sessionId,
+                          int32_t progress) override;
+    void onResourceLost() override;
+    // ~TranscoderCallbackInterface
+
+    // UidPolicyCallbackInterface
+    void onTopUidsChanged(const std::unordered_set<uid_t>& uids) override;
+    // ~UidPolicyCallbackInterface
+
+    // ResourcePolicyCallbackInterface
+    void onResourceAvailable() override;
+    // ~ResourcePolicyCallbackInterface
+
+    /**
+     * Dump all the session information to the fd.
+     */
+    void dumpAllSessions(int fd, const Vector<String16>& args);
+
+private:
+    friend class MediaTranscodingService;
+    friend class TranscodingSessionControllerTest;
+
+    using SessionKeyType = std::pair<ClientIdType, SessionIdType>;
+    using SessionQueueType = std::list<SessionKeyType>;
+
+    struct Session {
+        SessionKeyType key;
+        uid_t uid;
+        enum State {
+            NOT_STARTED,
+            RUNNING,
+            PAUSED,
+        } state;
+        int32_t lastProgress;
+        TranscodingRequest request;
+        std::weak_ptr<ITranscodingClientCallback> callback;
+    };
+
+    // TODO(chz): call transcoder without global lock.
+    // Use mLock for all entrypoints for now.
+    mutable std::mutex mLock;
+
+    std::map<SessionKeyType, Session> mSessionMap;
+
+    // uid->SessionQueue map (uid == -1: offline queue)
+    std::map<uid_t, SessionQueueType> mSessionQueues;
+
+    // uids, with the head being the most-recently-top app, 2nd item is the
+    // previous top app, etc.
+    std::list<uid_t> mUidSortedList;
+    std::list<uid_t>::iterator mOfflineUidIterator;
+    std::map<uid_t, std::string> mUidPackageNames;
+
+    std::shared_ptr<TranscoderInterface> mTranscoder;
+    std::shared_ptr<UidPolicyInterface> mUidPolicy;
+    std::shared_ptr<ResourcePolicyInterface> mResourcePolicy;
+
+    Session* mCurrentSession;
+    bool mResourceLost;
+
+    // Only allow MediaTranscodingService and unit tests to instantiate.
+    TranscodingSessionController(const std::shared_ptr<TranscoderInterface>& transcoder,
+                                 const std::shared_ptr<UidPolicyInterface>& uidPolicy,
+                                 const std::shared_ptr<ResourcePolicyInterface>& resourcePolicy);
+
+    Session* getTopSession_l();
+    void updateCurrentSession_l();
+    void removeSession_l(const SessionKeyType& sessionKey);
+    void moveUidsToTop_l(const std::unordered_set<uid_t>& uids, bool preserveTopUid);
+    void notifyClient(ClientIdType clientId, SessionIdType sessionId, const char* reason,
+                      std::function<void(const SessionKeyType&)> func);
+    // Internal state verifier (debug only)
+    void validateState_l();
+
+    static String8 sessionToString(const SessionKeyType& sessionKey);
+    static const char* sessionStateToString(const Session::State sessionState);
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_SESSION_CONTROLLER_H
diff --git a/media/libmediatranscoding/include/media/TranscodingUidPolicy.h b/media/libmediatranscoding/include/media/TranscodingUidPolicy.h
new file mode 100644
index 0000000..4c642de
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingUidPolicy.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_UID_POLICY_H
+#define ANDROID_MEDIA_TRANSCODING_UID_POLICY_H
+
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <media/UidPolicyInterface.h>
+#include <sys/types.h>
+#include <utils/Condition.h>
+#include <utils/RefBase.h>
+#include <utils/String8.h>
+#include <utils/Vector.h>
+
+#include <map>
+#include <mutex>
+#include <unordered_map>
+#include <unordered_set>
+
+namespace android {
+
+class ActivityManager;
+// Observer for UID lifecycle and provide information about the uid's app
+// priority used by the session controller.
+class TranscodingUidPolicy : public UidPolicyInterface {
+public:
+    explicit TranscodingUidPolicy();
+    ~TranscodingUidPolicy();
+
+    // UidPolicyInterface
+    bool isUidOnTop(uid_t uid) override;
+    void registerMonitorUid(uid_t uid) override;
+    void unregisterMonitorUid(uid_t uid) override;
+    std::unordered_set<uid_t> getTopUids() const override;
+    void setCallback(const std::shared_ptr<UidPolicyCallbackInterface>& cb) override;
+    // ~UidPolicyInterface
+
+    static status_t getUidForPackage(String16 packageName, /*inout*/ uid_t& uid);
+
+private:
+    void onUidStateChanged(uid_t uid, int32_t procState);
+    void setUidObserverRegistered(bool registerd);
+    void registerSelf();
+    void unregisterSelf();
+    void setProcessInfoOverride();
+    int32_t getProcState_l(uid_t uid) NO_THREAD_SAFETY_ANALYSIS;
+    void updateTopUid_l() NO_THREAD_SAFETY_ANALYSIS;
+
+    struct UidObserver;
+    struct ResourceManagerClient;
+    mutable Mutex mUidLock;
+    std::shared_ptr<ActivityManager> mAm;
+    sp<UidObserver> mUidObserver;
+    bool mRegistered GUARDED_BY(mUidLock);
+    int32_t mTopUidState GUARDED_BY(mUidLock);
+    std::unordered_map<uid_t, int32_t> mUidStateMap GUARDED_BY(mUidLock);
+    std::map<int32_t, std::unordered_set<uid_t>> mStateUidMap GUARDED_BY(mUidLock);
+    std::weak_ptr<UidPolicyCallbackInterface> mUidPolicyCallback;
+    std::shared_ptr<ResourceManagerClient> mProcInfoOverrideClient;
+};  // class TranscodingUidPolicy
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_SERVICE_H
diff --git a/media/libmediatranscoding/include/media/UidPolicyInterface.h b/media/libmediatranscoding/include/media/UidPolicyInterface.h
new file mode 100644
index 0000000..05d8db0
--- /dev/null
+++ b/media/libmediatranscoding/include/media/UidPolicyInterface.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_UID_POLICY_INTERFACE_H
+#define ANDROID_MEDIA_UID_POLICY_INTERFACE_H
+
+#include <unordered_set>
+
+namespace android {
+
+class UidPolicyCallbackInterface;
+
+// Interface for the controller to query a uid's info.
+class UidPolicyInterface {
+public:
+    // Instruct the uid policy to start monitoring a uid.
+    virtual void registerMonitorUid(uid_t uid) = 0;
+    // Instruct the uid policy to stop monitoring a uid.
+    virtual void unregisterMonitorUid(uid_t uid) = 0;
+    // Whether a uid is among the set of uids that's currently top priority.
+    virtual bool isUidOnTop(uid_t uid) = 0;
+    // Retrieves the set of uids that's currently top priority.
+    virtual std::unordered_set<uid_t> getTopUids() const = 0;
+    // Set the associated callback interface to send the events when uid states change.
+    virtual void setCallback(const std::shared_ptr<UidPolicyCallbackInterface>& cb) = 0;
+
+protected:
+    virtual ~UidPolicyInterface() = default;
+};
+
+// Interface for notifying the controller of a change in uid states.
+class UidPolicyCallbackInterface {
+public:
+    // Called when the set of uids that's top priority among the uids of interest
+    // has changed. The receiver of this callback should adjust accordingly.
+    virtual void onTopUidsChanged(const std::unordered_set<uid_t>& uids) = 0;
+
+protected:
+    virtual ~UidPolicyCallbackInterface() = default;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_UID_POLICY_INTERFACE_H
diff --git a/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp b/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp
index d58af4e..a35ca53 100644
--- a/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp
+++ b/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp
@@ -36,7 +36,7 @@
 namespace android {
 
 class IntUniquePtrComp {
-   public:
+public:
     bool operator()(const std::unique_ptr<int>& lhs, const std::unique_ptr<int>& rhs) const {
         return *lhs < *rhs;
     }
@@ -223,19 +223,19 @@
 }
 
 // Test the heap property and make sure it is the same as std::priority_queue.
-TEST(AdjustableMaxPriorityQueueTest, TranscodingJobTest) {
-    // Test data structure that mimics the Transcoding job.
-    struct TranscodingJob {
+TEST(AdjustableMaxPriorityQueueTest, TranscodingSessionTest) {
+    // Test data structure that mimics the Transcoding session.
+    struct TranscodingSession {
         int32_t priority;
         int64_t createTimeUs;
     };
 
-    // The job is arranging according to priority with highest priority comes first.
-    // For the job with the same priority, the job with early createTime will come first.
-    class TranscodingJobComp {
-       public:
-        bool operator()(const std::unique_ptr<TranscodingJob>& lhs,
-                        const std::unique_ptr<TranscodingJob>& rhs) const {
+    // The session is arranging according to priority with highest priority comes first.
+    // For the session with the same priority, the session with early createTime will come first.
+    class TranscodingSessionComp {
+    public:
+        bool operator()(const std::unique_ptr<TranscodingSession>& lhs,
+                        const std::unique_ptr<TranscodingSession>& rhs) const {
             if (lhs->priority != rhs->priority) {
                 return lhs->priority < rhs->priority;
             }
@@ -244,46 +244,47 @@
     };
 
     // Map to save each value's position in the heap.
-    std::unordered_map<int, TranscodingJob*> jobIdToJobMap;
+    std::unordered_map<int, TranscodingSession*> sessionIdToSessionMap;
 
-    TranscodingJob testJobs[] = {
-            {1 /*priority*/, 66 /*createTimeUs*/},  // First job,
-            {2 /*priority*/, 67 /*createTimeUs*/},  // Second job,
-            {2 /*priority*/, 66 /*createTimeUs*/},  // Third job,
-            {3 /*priority*/, 68 /*createTimeUs*/},  // Fourth job.
+    TranscodingSession testSessions[] = {
+            {1 /*priority*/, 66 /*createTimeUs*/},  // First session,
+            {2 /*priority*/, 67 /*createTimeUs*/},  // Second session,
+            {2 /*priority*/, 66 /*createTimeUs*/},  // Third session,
+            {3 /*priority*/, 68 /*createTimeUs*/},  // Fourth session.
     };
 
-    AdjustableMaxPriorityQueue<std::unique_ptr<TranscodingJob>, TranscodingJobComp> jobQueue;
+    AdjustableMaxPriorityQueue<std::unique_ptr<TranscodingSession>, TranscodingSessionComp>
+            sessionQueue;
 
-    // Pushes all the jobs into the heap.
-    for (int jobId = 0; jobId < 4; ++jobId) {
-        auto newJob = std::make_unique<TranscodingJob>(testJobs[jobId]);
-        jobIdToJobMap[jobId] = newJob.get();
-        EXPECT_TRUE(jobQueue.push(std::move(newJob)));
+    // Pushes all the sessions into the heap.
+    for (int sessionId = 0; sessionId < 4; ++sessionId) {
+        auto newSession = std::make_unique<TranscodingSession>(testSessions[sessionId]);
+        sessionIdToSessionMap[sessionId] = newSession.get();
+        EXPECT_TRUE(sessionQueue.push(std::move(newSession)));
     }
 
-    // Check the job queue size.
-    EXPECT_EQ(4, jobQueue.size());
+    // Check the session queue size.
+    EXPECT_EQ(4, sessionQueue.size());
 
-    // Check the top and it should be Forth job: (3, 68)
-    const std::unique_ptr<TranscodingJob>& topJob = jobQueue.top();
-    EXPECT_EQ(3, topJob->priority);
-    EXPECT_EQ(68, topJob->createTimeUs);
+    // Check the top and it should be Forth session: (3, 68)
+    const std::unique_ptr<TranscodingSession>& topSession = sessionQueue.top();
+    EXPECT_EQ(3, topSession->priority);
+    EXPECT_EQ(68, topSession->createTimeUs);
 
     // Consume the top.
-    std::unique_ptr<TranscodingJob> consumeJob = jobQueue.consume_top();
+    std::unique_ptr<TranscodingSession> consumeSession = sessionQueue.consume_top();
 
-    // Check the top and it should be Third Job (2, 66)
-    const std::unique_ptr<TranscodingJob>& topJob2 = jobQueue.top();
-    EXPECT_EQ(2, topJob2->priority);
-    EXPECT_EQ(66, topJob2->createTimeUs);
+    // Check the top and it should be Third Session (2, 66)
+    const std::unique_ptr<TranscodingSession>& topSession2 = sessionQueue.top();
+    EXPECT_EQ(2, topSession2->priority);
+    EXPECT_EQ(66, topSession2->createTimeUs);
 
-    // Change the Second job's priority to 4 from (2, 67) -> (4, 67). It should becomes top of the
-    // queue.
-    jobIdToJobMap[1]->priority = 4;
-    jobQueue.rebuild();
-    const std::unique_ptr<TranscodingJob>& topJob3 = jobQueue.top();
-    EXPECT_EQ(4, topJob3->priority);
-    EXPECT_EQ(67, topJob3->createTimeUs);
+    // Change the Second session's priority to 4 from (2, 67) -> (4, 67). It should becomes
+    // top of the queue.
+    sessionIdToSessionMap[1]->priority = 4;
+    sessionQueue.rebuild();
+    const std::unique_ptr<TranscodingSession>& topSession3 = sessionQueue.top();
+    EXPECT_EQ(4, topSession3->priority);
+    EXPECT_EQ(67, topSession3->createTimeUs);
 }
 }  // namespace android
\ No newline at end of file
diff --git a/media/libmediatranscoding/tests/Android.bp b/media/libmediatranscoding/tests/Android.bp
index 8191b00..7b15b1b 100644
--- a/media/libmediatranscoding/tests/Android.bp
+++ b/media/libmediatranscoding/tests/Android.bp
@@ -38,6 +38,16 @@
 }
 
 //
+// TranscodingSessionController unit test
+//
+cc_test {
+    name: "TranscodingSessionController_tests",
+    defaults: ["libmediatranscoding_test_defaults"],
+
+    srcs: ["TranscodingSessionController_tests.cpp"],
+}
+
+//
 // AdjustableMaxPriorityQueue unit test
 //
 cc_test {
@@ -45,4 +55,4 @@
     defaults: ["libmediatranscoding_test_defaults"],
 
     srcs: ["AdjustableMaxPriorityQueue_tests.cpp"],
-}
\ No newline at end of file
+}
diff --git a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp b/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
index 5d2419d..1a50923 100644
--- a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
+++ b/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
@@ -19,262 +19,522 @@
 // #define LOG_NDEBUG 0
 #define LOG_TAG "TranscodingClientManagerTest"
 
-#include <aidl/android/media/BnTranscodingServiceClient.h>
+#include <aidl/android/media/BnTranscodingClientCallback.h>
 #include <aidl/android/media/IMediaTranscodingService.h>
-#include <aidl/android/media/ITranscodingServiceClient.h>
 #include <android-base/logging.h>
 #include <android/binder_manager.h>
 #include <android/binder_process.h>
 #include <gtest/gtest.h>
+#include <media/ControllerClientInterface.h>
 #include <media/TranscodingClientManager.h>
+#include <media/TranscodingRequest.h>
 #include <utils/Log.h>
 
+#include <list>
+
 namespace android {
 
 using Status = ::ndk::ScopedAStatus;
-using aidl::android::media::BnTranscodingServiceClient;
-using aidl::android::media::IMediaTranscodingService;
-using aidl::android::media::ITranscodingServiceClient;
+using ::aidl::android::media::BnTranscodingClientCallback;
+using ::aidl::android::media::IMediaTranscodingService;
+using ::aidl::android::media::TranscodingErrorCode;
+using ::aidl::android::media::TranscodingRequestParcel;
+using ::aidl::android::media::TranscodingResultParcel;
+using ::aidl::android::media::TranscodingSessionParcel;
+using ::aidl::android::media::TranscodingSessionPriority;
 
-constexpr int32_t kInvalidClientId = -1;
-constexpr int32_t kInvalidClientPid = -1;
-constexpr int32_t kInvalidClientUid = -1;
-constexpr const char* kInvalidClientOpPackageName = "";
+constexpr pid_t kInvalidClientPid = -5;
+constexpr pid_t kInvalidClientUid = -10;
+constexpr const char* kInvalidClientName = "";
+constexpr const char* kInvalidClientPackage = "";
 
-constexpr int32_t kClientId = 1;
-constexpr int32_t kClientPid = 2;
-constexpr int32_t kClientUid = 3;
-constexpr const char* kClientOpPackageName = "TestClient";
+constexpr const char* kClientName = "TestClientName";
+constexpr const char* kClientPackage = "TestClientPackage";
 
-struct TestClient : public BnTranscodingServiceClient {
-    TestClient(const std::shared_ptr<IMediaTranscodingService>& service) : mService(service) {
-        ALOGD("TestClient Created");
-    }
+#define SESSION(n) (n)
 
-    Status getName(std::string* _aidl_return) override {
-        *_aidl_return = "test_client";
+struct TestClientCallback : public BnTranscodingClientCallback {
+    TestClientCallback() { ALOGI("TestClientCallback Created"); }
+
+    virtual ~TestClientCallback() { ALOGI("TestClientCallback destroyed"); };
+
+    Status openFileDescriptor(const std::string& /*in_fileUri*/, const std::string& /*in_mode*/,
+                              ::ndk::ScopedFileDescriptor* /*_aidl_return*/) override {
         return Status::ok();
     }
 
-    Status onTranscodingFinished(
-            int32_t /* in_jobId */,
-            const ::aidl::android::media::TranscodingResultParcel& /* in_result */) override {
+    Status onTranscodingStarted(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+    Status onTranscodingPaused(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+    Status onTranscodingResumed(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+    Status onTranscodingFinished(int32_t in_sessionId,
+                                 const TranscodingResultParcel& in_result) override {
+        EXPECT_EQ(in_sessionId, in_result.sessionId);
+        mEventQueue.push_back(Finished(in_sessionId));
         return Status::ok();
     }
 
-    Status onTranscodingFailed(
-            int32_t /* in_jobId */,
-            ::aidl::android::media::TranscodingErrorCode /*in_errorCode */) override {
+    Status onTranscodingFailed(int32_t in_sessionId,
+                               TranscodingErrorCode /*in_errorCode */) override {
+        mEventQueue.push_back(Failed(in_sessionId));
         return Status::ok();
     }
 
-    Status onAwaitNumberOfJobsChanged(int32_t /* in_jobId */, int32_t /* in_oldAwaitNumber */,
-                                      int32_t /* in_newAwaitNumber */) override {
+    Status onAwaitNumberOfSessionsChanged(int32_t /* in_sessionId */,
+                                          int32_t /* in_oldAwaitNumber */,
+                                          int32_t /* in_newAwaitNumber */) override {
         return Status::ok();
     }
 
-    Status onProgressUpdate(int32_t /* in_jobId */, int32_t /* in_progress */) override {
+    Status onProgressUpdate(int32_t /* in_sessionId */, int32_t /* in_progress */) override {
         return Status::ok();
     }
 
-    virtual ~TestClient() { ALOGI("TestClient destroyed"); };
+    struct Event {
+        enum {
+            NoEvent,
+            Finished,
+            Failed,
+        } type;
+        SessionIdType sessionId;
+    };
 
-   private:
-    std::shared_ptr<IMediaTranscodingService> mService;
-    TestClient(const TestClient&) = delete;
-    TestClient& operator=(const TestClient&) = delete;
+    static constexpr Event NoEvent = {Event::NoEvent, 0};
+#define DECLARE_EVENT(action) \
+    static Event action(SessionIdType sessionId) { return {Event::action, sessionId}; }
+
+    DECLARE_EVENT(Finished);
+    DECLARE_EVENT(Failed);
+
+    const Event& popEvent() {
+        if (mEventQueue.empty()) {
+            mPoppedEvent = NoEvent;
+        } else {
+            mPoppedEvent = *mEventQueue.begin();
+            mEventQueue.pop_front();
+        }
+        return mPoppedEvent;
+    }
+
+private:
+    Event mPoppedEvent;
+    std::list<Event> mEventQueue;
+
+    TestClientCallback(const TestClientCallback&) = delete;
+    TestClientCallback& operator=(const TestClientCallback&) = delete;
+};
+
+bool operator==(const TestClientCallback::Event& lhs, const TestClientCallback::Event& rhs) {
+    return lhs.type == rhs.type && lhs.sessionId == rhs.sessionId;
+}
+
+struct TestController : public ControllerClientInterface {
+    TestController() { ALOGI("TestController Created"); }
+
+    virtual ~TestController() { ALOGI("TestController Destroyed"); }
+
+    bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t /*uid*/,
+                const TranscodingRequestParcel& request,
+                const std::weak_ptr<ITranscodingClientCallback>& clientCallback) override {
+        SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+        if (mSessions.count(sessionKey) > 0) {
+            return false;
+        }
+
+        // This is the secret name we'll check, to test error propagation from
+        // the controller back to client.
+        if (request.sourceFilePath == "bad_source_file") {
+            return false;
+        }
+
+        mSessions[sessionKey].request = request;
+        mSessions[sessionKey].callback = clientCallback;
+
+        mLastSession = sessionKey;
+        return true;
+    }
+
+    bool cancel(ClientIdType clientId, SessionIdType sessionId) override {
+        SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+        if (mSessions.count(sessionKey) == 0) {
+            return false;
+        }
+        mSessions.erase(sessionKey);
+        return true;
+    }
+
+    bool getSession(ClientIdType clientId, SessionIdType sessionId,
+                    TranscodingRequestParcel* request) override {
+        SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+        if (mSessions.count(sessionKey) == 0) {
+            return false;
+        }
+
+        *(TranscodingRequest*)request = mSessions[sessionKey].request;
+        return true;
+    }
+
+    void finishLastSession() {
+        auto it = mSessions.find(mLastSession);
+        if (it == mSessions.end()) {
+            return;
+        }
+        {
+            auto clientCallback = it->second.callback.lock();
+            if (clientCallback != nullptr) {
+                clientCallback->onTranscodingFinished(
+                        mLastSession.second,
+                        TranscodingResultParcel({mLastSession.second, 0, std::nullopt}));
+            }
+        }
+        mSessions.erase(it);
+    }
+
+    void abortLastSession() {
+        auto it = mSessions.find(mLastSession);
+        if (it == mSessions.end()) {
+            return;
+        }
+        {
+            auto clientCallback = it->second.callback.lock();
+            if (clientCallback != nullptr) {
+                clientCallback->onTranscodingFailed(mLastSession.second,
+                                                    TranscodingErrorCode::kUnknown);
+            }
+        }
+        mSessions.erase(it);
+    }
+
+    struct Session {
+        TranscodingRequest request;
+        std::weak_ptr<ITranscodingClientCallback> callback;
+    };
+
+    typedef std::pair<ClientIdType, SessionIdType> SessionKeyType;
+    std::map<SessionKeyType, Session> mSessions;
+    SessionKeyType mLastSession;
 };
 
 class TranscodingClientManagerTest : public ::testing::Test {
-   public:
-    TranscodingClientManagerTest() : mClientManager(TranscodingClientManager::getInstance()) {
+public:
+    TranscodingClientManagerTest()
+          : mController(new TestController()),
+            mClientManager(new TranscodingClientManager(mController)) {
         ALOGD("TranscodingClientManagerTest created");
     }
 
     void SetUp() override {
-        ::ndk::SpAIBinder binder(AServiceManager_getService("media.transcoding"));
-        mService = IMediaTranscodingService::fromBinder(binder);
-        if (mService == nullptr) {
-            ALOGE("Failed to connect to the media.trascoding service.");
-            return;
-        }
-
-        mTestClient = ::ndk::SharedRefBase::make<TestClient>(mService);
+        mClientCallback1 = ::ndk::SharedRefBase::make<TestClientCallback>();
+        mClientCallback2 = ::ndk::SharedRefBase::make<TestClientCallback>();
+        mClientCallback3 = ::ndk::SharedRefBase::make<TestClientCallback>();
     }
 
-    void TearDown() override {
-        ALOGI("TranscodingClientManagerTest tear down");
-        mService = nullptr;
-    }
+    void TearDown() override { ALOGI("TranscodingClientManagerTest tear down"); }
 
     ~TranscodingClientManagerTest() { ALOGD("TranscodingClientManagerTest destroyed"); }
 
-    TranscodingClientManager& mClientManager;
-    std::shared_ptr<ITranscodingServiceClient> mTestClient = nullptr;
-    std::shared_ptr<IMediaTranscodingService> mService = nullptr;
+    void addMultipleClients() {
+        EXPECT_EQ(
+                mClientManager->addClient(mClientCallback1, kClientName, kClientPackage, &mClient1),
+                OK);
+        EXPECT_NE(mClient1, nullptr);
+
+        EXPECT_EQ(
+                mClientManager->addClient(mClientCallback2, kClientName, kClientPackage, &mClient2),
+                OK);
+        EXPECT_NE(mClient2, nullptr);
+
+        EXPECT_EQ(
+                mClientManager->addClient(mClientCallback3, kClientName, kClientPackage, &mClient3),
+                OK);
+        EXPECT_NE(mClient3, nullptr);
+
+        EXPECT_EQ(mClientManager->getNumOfClients(), 3);
+    }
+
+    void unregisterMultipleClients() {
+        EXPECT_TRUE(mClient1->unregister().isOk());
+        EXPECT_TRUE(mClient2->unregister().isOk());
+        EXPECT_TRUE(mClient3->unregister().isOk());
+        EXPECT_EQ(mClientManager->getNumOfClients(), 0);
+    }
+
+    std::shared_ptr<TestController> mController;
+    std::shared_ptr<TranscodingClientManager> mClientManager;
+    std::shared_ptr<ITranscodingClient> mClient1;
+    std::shared_ptr<ITranscodingClient> mClient2;
+    std::shared_ptr<ITranscodingClient> mClient3;
+    std::shared_ptr<TestClientCallback> mClientCallback1;
+    std::shared_ptr<TestClientCallback> mClientCallback2;
+    std::shared_ptr<TestClientCallback> mClientCallback3;
 };
 
-TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientId) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    // Create a client with invalid client id.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client, kInvalidClientId, kClientPid, kClientUid, kClientOpPackageName);
-
-    // Add the client to the manager and expect failure.
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err != OK);
+TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientCallback) {
+    // Add a client with null callback and expect failure.
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err = mClientManager->addClient(nullptr, kClientName, kClientPackage, &client);
+    EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
 }
+//
+//TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientPid) {
+//    // Add a client with invalid Pid and expect failure.
+//    std::shared_ptr<ITranscodingClient> client;
+//    status_t err = mClientManager->addClient(mClientCallback1,
+//                                             kClientName, kClientPackage, &client);
+//    EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
+//}
 
-TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientPid) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    // Create a client with invalid Pid.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client, kClientId, kInvalidClientPid, kClientUid, kClientOpPackageName);
-
-    // Add the client to the manager and expect failure.
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err != OK);
-}
-
-TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientUid) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    // Create a client with invalid Uid.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client, kClientId, kClientPid, kInvalidClientUid, kClientOpPackageName);
-
-    // Add the client to the manager and expect failure.
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err != OK);
+TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientName) {
+    // Add a client with invalid name and expect failure.
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err = mClientManager->addClient(mClientCallback1, kInvalidClientName, kClientPackage,
+                                             &client);
+    EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
 }
 
 TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientPackageName) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    // Create a client with invalid packagename.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client, kClientId, kClientPid, kClientUid, kInvalidClientOpPackageName);
-
-    // Add the client to the manager and expect failure.
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err != OK);
+    // Add a client with invalid packagename and expect failure.
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err = mClientManager->addClient(mClientCallback1, kClientName, kInvalidClientPackage,
+                                             &client);
+    EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
 }
 
 TEST_F(TranscodingClientManagerTest, TestAddingValidClient) {
-    std::shared_ptr<ITranscodingServiceClient> client1 =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
+    // Add a valid client, should succeed.
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err =
+            mClientManager->addClient(mClientCallback1, kClientName, kClientPackage, &client);
+    EXPECT_EQ(err, OK);
+    EXPECT_NE(client.get(), nullptr);
+    EXPECT_EQ(mClientManager->getNumOfClients(), 1);
 
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client1, kClientId, kClientPid, kClientUid, kClientOpPackageName);
-
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err == OK);
-
-    size_t numOfClients = mClientManager.getNumOfClients();
-    EXPECT_EQ(numOfClients, 1);
-
-    err = mClientManager.removeClient(kClientId);
-    EXPECT_TRUE(err == OK);
+    // Unregister client, should succeed.
+    Status status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+    EXPECT_EQ(mClientManager->getNumOfClients(), 0);
 }
 
 TEST_F(TranscodingClientManagerTest, TestAddingDupliacteClient) {
-    std::shared_ptr<ITranscodingServiceClient> client1 =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err =
+            mClientManager->addClient(mClientCallback1, kClientName, kClientPackage, &client);
+    EXPECT_EQ(err, OK);
+    EXPECT_NE(client.get(), nullptr);
+    EXPECT_EQ(mClientManager->getNumOfClients(), 1);
 
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client1, kClientId, kClientPid, kClientUid, kClientOpPackageName);
+    std::shared_ptr<ITranscodingClient> dupClient;
+    err = mClientManager->addClient(mClientCallback1, "dupClient", "dupPackage", &dupClient);
+    EXPECT_EQ(err, IMediaTranscodingService::ERROR_ALREADY_EXISTS);
+    EXPECT_EQ(dupClient.get(), nullptr);
+    EXPECT_EQ(mClientManager->getNumOfClients(), 1);
 
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err == OK);
+    Status status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+    EXPECT_EQ(mClientManager->getNumOfClients(), 0);
 
-    err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err != OK);
+    err = mClientManager->addClient(mClientCallback1, "dupClient", "dupPackage", &dupClient);
+    EXPECT_EQ(err, OK);
+    EXPECT_NE(dupClient.get(), nullptr);
+    EXPECT_EQ(mClientManager->getNumOfClients(), 1);
 
-    err = mClientManager.removeClient(kClientId);
-    EXPECT_TRUE(err == OK);
+    status = dupClient->unregister();
+    EXPECT_TRUE(status.isOk());
+    EXPECT_EQ(mClientManager->getNumOfClients(), 0);
 }
 
 TEST_F(TranscodingClientManagerTest, TestAddingMultipleClient) {
-    std::shared_ptr<ITranscodingServiceClient> client1 =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo1 =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client1, kClientId, kClientPid, kClientUid, kClientOpPackageName);
-
-    status_t err = mClientManager.addClient(std::move(clientInfo1));
-    EXPECT_TRUE(err == OK);
-
-    std::shared_ptr<ITranscodingServiceClient> client2 =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo2 =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client2, kClientId + 1, kClientPid, kClientUid, kClientOpPackageName);
-
-    err = mClientManager.addClient(std::move(clientInfo2));
-    EXPECT_TRUE(err == OK);
-
-    std::shared_ptr<ITranscodingServiceClient> client3 =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    // Create a client with invalid packagename.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo3 =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client3, kClientId + 2, kClientPid, kClientUid, kClientOpPackageName);
-
-    err = mClientManager.addClient(std::move(clientInfo3));
-    EXPECT_TRUE(err == OK);
-
-    size_t numOfClients = mClientManager.getNumOfClients();
-    EXPECT_EQ(numOfClients, 3);
-
-    err = mClientManager.removeClient(kClientId);
-    EXPECT_TRUE(err == OK);
-
-    err = mClientManager.removeClient(kClientId + 1);
-    EXPECT_TRUE(err == OK);
-
-    err = mClientManager.removeClient(kClientId + 2);
-    EXPECT_TRUE(err == OK);
+    addMultipleClients();
+    unregisterMultipleClients();
 }
 
-TEST_F(TranscodingClientManagerTest, TestRemovingNonExistClient) {
-    status_t err = mClientManager.removeClient(kInvalidClientId);
-    EXPECT_TRUE(err != OK);
+TEST_F(TranscodingClientManagerTest, TestSubmitCancelGetSessions) {
+    addMultipleClients();
 
-    err = mClientManager.removeClient(1000 /* clientId */);
-    EXPECT_TRUE(err != OK);
+    // Test sessionId assignment.
+    TranscodingRequestParcel request;
+    request.sourceFilePath = "test_source_file_0";
+    request.destinationFilePath = "test_desintaion_file_0";
+    TranscodingSessionParcel session;
+    bool result;
+    EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(0));
+
+    request.sourceFilePath = "test_source_file_1";
+    request.destinationFilePath = "test_desintaion_file_1";
+    EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(1));
+
+    request.sourceFilePath = "test_source_file_2";
+    request.destinationFilePath = "test_desintaion_file_2";
+    EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(2));
+
+    // Test submit bad request (no valid sourceFilePath) fails.
+    TranscodingRequestParcel badRequest;
+    badRequest.sourceFilePath = "bad_source_file";
+    badRequest.destinationFilePath = "bad_destination_file";
+    EXPECT_TRUE(mClient1->submitRequest(badRequest, &session, &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test submit with bad pid/uid.
+    badRequest.sourceFilePath = "test_source_file_3";
+    badRequest.destinationFilePath = "test_desintaion_file_3";
+    badRequest.clientPid = kInvalidClientPid;
+    badRequest.clientUid = kInvalidClientUid;
+    EXPECT_TRUE(mClient1->submitRequest(badRequest, &session, &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test get sessions by id.
+    EXPECT_TRUE(mClient1->getSessionWithId(SESSION(2), &session, &result).isOk());
+    EXPECT_EQ(session.sessionId, SESSION(2));
+    EXPECT_EQ(session.request.sourceFilePath, "test_source_file_2");
+    EXPECT_TRUE(result);
+
+    // Test get sessions by invalid id fails.
+    EXPECT_TRUE(mClient1->getSessionWithId(SESSION(100), &session, &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test cancel non-existent session fail.
+    EXPECT_TRUE(mClient2->cancelSession(SESSION(100), &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test cancel valid sessionId in arbitrary order.
+    EXPECT_TRUE(mClient1->cancelSession(SESSION(2), &result).isOk());
+    EXPECT_TRUE(result);
+
+    EXPECT_TRUE(mClient1->cancelSession(SESSION(0), &result).isOk());
+    EXPECT_TRUE(result);
+
+    EXPECT_TRUE(mClient1->cancelSession(SESSION(1), &result).isOk());
+    EXPECT_TRUE(result);
+
+    // Test cancel session again fails.
+    EXPECT_TRUE(mClient1->cancelSession(SESSION(1), &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test get session after cancel fails.
+    EXPECT_TRUE(mClient1->getSessionWithId(SESSION(2), &session, &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test sessionId independence for each client.
+    EXPECT_TRUE(mClient2->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(0));
+
+    EXPECT_TRUE(mClient2->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(1));
+
+    unregisterMultipleClients();
 }
 
-TEST_F(TranscodingClientManagerTest, TestCheckClientWithClientId) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
+TEST_F(TranscodingClientManagerTest, TestClientCallback) {
+    addMultipleClients();
 
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client, kClientId, kClientPid, kClientUid, kClientOpPackageName);
+    TranscodingRequestParcel request;
+    request.sourceFilePath = "test_source_file_name";
+    request.destinationFilePath = "test_destination_file_name";
+    TranscodingSessionParcel session;
+    bool result;
+    EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(0));
 
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err == OK);
+    mController->finishLastSession();
+    EXPECT_EQ(mClientCallback1->popEvent(), TestClientCallback::Finished(session.sessionId));
 
-    bool res = mClientManager.isClientIdRegistered(kClientId);
-    EXPECT_TRUE(res);
+    EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(1));
 
-    res = mClientManager.isClientIdRegistered(kInvalidClientId);
-    EXPECT_FALSE(res);
+    mController->abortLastSession();
+    EXPECT_EQ(mClientCallback1->popEvent(), TestClientCallback::Failed(session.sessionId));
+
+    EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(2));
+
+    EXPECT_TRUE(mClient2->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(0));
+
+    mController->finishLastSession();
+    EXPECT_EQ(mClientCallback2->popEvent(), TestClientCallback::Finished(session.sessionId));
+
+    unregisterMultipleClients();
 }
 
-}  // namespace android
\ No newline at end of file
+TEST_F(TranscodingClientManagerTest, TestUseAfterUnregister) {
+    // Add a client.
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err =
+            mClientManager->addClient(mClientCallback1, kClientName, kClientPackage, &client);
+    EXPECT_EQ(err, OK);
+    EXPECT_NE(client.get(), nullptr);
+
+    // Submit 2 requests, 1 offline and 1 realtime.
+    TranscodingRequestParcel request;
+    TranscodingSessionParcel session;
+    bool result;
+
+    request.sourceFilePath = "test_source_file_0";
+    request.destinationFilePath = "test_destination_file_0";
+    request.priority = TranscodingSessionPriority::kUnspecified;
+    EXPECT_TRUE(client->submitRequest(request, &session, &result).isOk() && result);
+    EXPECT_EQ(session.sessionId, SESSION(0));
+
+    request.sourceFilePath = "test_source_file_1";
+    request.destinationFilePath = "test_destination_file_1";
+    request.priority = TranscodingSessionPriority::kNormal;
+    EXPECT_TRUE(client->submitRequest(request, &session, &result).isOk() && result);
+    EXPECT_EQ(session.sessionId, SESSION(1));
+
+    // Unregister client, should succeed.
+    Status status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+
+    // Test submit new request after unregister, should fail with ERROR_DISCONNECTED.
+    request.sourceFilePath = "test_source_file_2";
+    request.destinationFilePath = "test_destination_file_2";
+    request.priority = TranscodingSessionPriority::kNormal;
+    status = client->submitRequest(request, &session, &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    // Test cancel sessions after unregister, should fail with ERROR_DISCONNECTED
+    // regardless of realtime or offline session, or whether the sessionId is valid.
+    status = client->cancelSession(SESSION(0), &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    status = client->cancelSession(SESSION(1), &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    status = client->cancelSession(SESSION(2), &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    // Test get sessions, should fail with ERROR_DISCONNECTED regardless of realtime
+    // or offline session, or whether the sessionId is valid.
+    status = client->getSessionWithId(SESSION(0), &session, &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    status = client->getSessionWithId(SESSION(1), &session, &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    status = client->getSessionWithId(SESSION(2), &session, &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp b/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
new file mode 100644
index 0000000..4809d7a
--- /dev/null
+++ b/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
@@ -0,0 +1,611 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for TranscodingSessionController
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingSessionControllerTest"
+
+#include <aidl/android/media/BnTranscodingClientCallback.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <gtest/gtest.h>
+#include <media/TranscodingClientManager.h>
+#include <media/TranscodingSessionController.h>
+#include <utils/Log.h>
+
+#include <unordered_set>
+
+namespace android {
+
+using Status = ::ndk::ScopedAStatus;
+using aidl::android::media::BnTranscodingClientCallback;
+using aidl::android::media::IMediaTranscodingService;
+using aidl::android::media::ITranscodingClient;
+using aidl::android::media::TranscodingRequestParcel;
+
+constexpr ClientIdType kClientId = 1000;
+constexpr SessionIdType kClientSessionId = 0;
+constexpr uid_t kClientUid = 5000;
+constexpr uid_t kInvalidUid = (uid_t)-1;
+
+#define CLIENT(n) (kClientId + (n))
+#define SESSION(n) (kClientSessionId + (n))
+#define UID(n) (kClientUid + (n))
+
+class TestUidPolicy : public UidPolicyInterface {
+public:
+    TestUidPolicy() = default;
+    virtual ~TestUidPolicy() = default;
+
+    // UidPolicyInterface
+    void registerMonitorUid(uid_t /*uid*/) override {}
+    void unregisterMonitorUid(uid_t /*uid*/) override {}
+    bool isUidOnTop(uid_t uid) override { return mTopUids.count(uid) > 0; }
+    std::unordered_set<uid_t> getTopUids() const override { return mTopUids; }
+    void setCallback(const std::shared_ptr<UidPolicyCallbackInterface>& cb) override {
+        mUidPolicyCallback = cb;
+    }
+    void setTop(uid_t uid) {
+        std::unordered_set<uid_t> uids = {uid};
+        setTop(uids);
+    }
+    void setTop(const std::unordered_set<uid_t>& uids) {
+        mTopUids = uids;
+        auto uidPolicyCb = mUidPolicyCallback.lock();
+        if (uidPolicyCb != nullptr) {
+            uidPolicyCb->onTopUidsChanged(mTopUids);
+        }
+    }
+
+    std::unordered_set<uid_t> mTopUids;
+    std::weak_ptr<UidPolicyCallbackInterface> mUidPolicyCallback;
+};
+
+class TestTranscoder : public TranscoderInterface {
+public:
+    TestTranscoder() : mLastError(TranscodingErrorCode::kUnknown) {}
+    virtual ~TestTranscoder() {}
+
+    // TranscoderInterface
+    void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& /*cb*/) override {}
+
+    void start(ClientIdType clientId, SessionIdType sessionId,
+               const TranscodingRequestParcel& /*request*/,
+               const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) override {
+        mEventQueue.push_back(Start(clientId, sessionId));
+    }
+    void pause(ClientIdType clientId, SessionIdType sessionId) override {
+        mEventQueue.push_back(Pause(clientId, sessionId));
+    }
+    void resume(ClientIdType clientId, SessionIdType sessionId,
+                const TranscodingRequestParcel& /*request*/,
+                const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) override {
+        mEventQueue.push_back(Resume(clientId, sessionId));
+    }
+    void stop(ClientIdType clientId, SessionIdType sessionId) override {
+        mEventQueue.push_back(Stop(clientId, sessionId));
+    }
+
+    void onFinished(ClientIdType clientId, SessionIdType sessionId) {
+        mEventQueue.push_back(Finished(clientId, sessionId));
+    }
+
+    void onFailed(ClientIdType clientId, SessionIdType sessionId, TranscodingErrorCode err) {
+        mLastError = err;
+        mEventQueue.push_back(Failed(clientId, sessionId));
+    }
+
+    TranscodingErrorCode getLastError() {
+        TranscodingErrorCode result = mLastError;
+        mLastError = TranscodingErrorCode::kUnknown;
+        return result;
+    }
+
+    struct Event {
+        enum { NoEvent, Start, Pause, Resume, Stop, Finished, Failed } type;
+        ClientIdType clientId;
+        SessionIdType sessionId;
+    };
+
+    static constexpr Event NoEvent = {Event::NoEvent, 0, 0};
+
+#define DECLARE_EVENT(action)                                             \
+    static Event action(ClientIdType clientId, SessionIdType sessionId) { \
+        return {Event::action, clientId, sessionId};                      \
+    }
+
+    DECLARE_EVENT(Start);
+    DECLARE_EVENT(Pause);
+    DECLARE_EVENT(Resume);
+    DECLARE_EVENT(Stop);
+    DECLARE_EVENT(Finished);
+    DECLARE_EVENT(Failed);
+
+    const Event& popEvent() {
+        if (mEventQueue.empty()) {
+            mPoppedEvent = NoEvent;
+        } else {
+            mPoppedEvent = *mEventQueue.begin();
+            mEventQueue.pop_front();
+        }
+        return mPoppedEvent;
+    }
+
+private:
+    Event mPoppedEvent;
+    std::list<Event> mEventQueue;
+    TranscodingErrorCode mLastError;
+};
+
+bool operator==(const TestTranscoder::Event& lhs, const TestTranscoder::Event& rhs) {
+    return lhs.type == rhs.type && lhs.clientId == rhs.clientId && lhs.sessionId == rhs.sessionId;
+}
+
+struct TestClientCallback : public BnTranscodingClientCallback {
+    TestClientCallback(TestTranscoder* owner, int64_t clientId)
+          : mOwner(owner), mClientId(clientId) {
+        ALOGD("TestClient Created");
+    }
+
+    Status openFileDescriptor(const std::string& /*in_fileUri*/, const std::string& /*in_mode*/,
+                              ::ndk::ScopedFileDescriptor* /*_aidl_return*/) override {
+        return Status::ok();
+    }
+
+    Status onTranscodingStarted(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+    Status onTranscodingPaused(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+    Status onTranscodingResumed(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+    Status onTranscodingFinished(int32_t in_sessionId,
+                                 const TranscodingResultParcel& in_result) override {
+        EXPECT_EQ(in_sessionId, in_result.sessionId);
+        ALOGD("TestClientCallback: received onTranscodingFinished");
+        mOwner->onFinished(mClientId, in_sessionId);
+        return Status::ok();
+    }
+
+    Status onTranscodingFailed(int32_t in_sessionId, TranscodingErrorCode in_errorCode) override {
+        mOwner->onFailed(mClientId, in_sessionId, in_errorCode);
+        return Status::ok();
+    }
+
+    Status onAwaitNumberOfSessionsChanged(int32_t /* in_sessionId */,
+                                          int32_t /* in_oldAwaitNumber */,
+                                          int32_t /* in_newAwaitNumber */) override {
+        return Status::ok();
+    }
+
+    Status onProgressUpdate(int32_t /* in_sessionId */, int32_t /* in_progress */) override {
+        return Status::ok();
+    }
+
+    virtual ~TestClientCallback() { ALOGI("TestClient destroyed"); };
+
+private:
+    TestTranscoder* mOwner;
+    int64_t mClientId;
+    TestClientCallback(const TestClientCallback&) = delete;
+    TestClientCallback& operator=(const TestClientCallback&) = delete;
+};
+
+class TranscodingSessionControllerTest : public ::testing::Test {
+public:
+    TranscodingSessionControllerTest() { ALOGI("TranscodingSessionControllerTest created"); }
+
+    void SetUp() override {
+        ALOGI("TranscodingSessionControllerTest set up");
+        mTranscoder.reset(new TestTranscoder());
+        mUidPolicy.reset(new TestUidPolicy());
+        mController.reset(new TranscodingSessionController(mTranscoder, mUidPolicy,
+                                                           nullptr /*resourcePolicy*/));
+        mUidPolicy->setCallback(mController);
+
+        // Set priority only, ignore other fields for now.
+        mOfflineRequest.priority = TranscodingSessionPriority::kUnspecified;
+        mRealtimeRequest.priority = TranscodingSessionPriority::kHigh;
+        mClientCallback0 =
+                ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(0));
+        mClientCallback1 =
+                ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(1));
+        mClientCallback2 =
+                ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(2));
+        mClientCallback3 =
+                ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(3));
+    }
+
+    void TearDown() override { ALOGI("TranscodingSessionControllerTest tear down"); }
+
+    ~TranscodingSessionControllerTest() { ALOGD("TranscodingSessionControllerTest destroyed"); }
+
+    std::shared_ptr<TestTranscoder> mTranscoder;
+    std::shared_ptr<TestUidPolicy> mUidPolicy;
+    std::shared_ptr<TranscodingSessionController> mController;
+    TranscodingRequestParcel mOfflineRequest;
+    TranscodingRequestParcel mRealtimeRequest;
+    std::shared_ptr<TestClientCallback> mClientCallback0;
+    std::shared_ptr<TestClientCallback> mClientCallback1;
+    std::shared_ptr<TestClientCallback> mClientCallback2;
+    std::shared_ptr<TestClientCallback> mClientCallback3;
+};
+
+TEST_F(TranscodingSessionControllerTest, TestSubmitSession) {
+    ALOGD("TestSubmitSession");
+
+    // Start with UID(1) on top.
+    mUidPolicy->setTop(UID(1));
+
+    // Submit offline session to CLIENT(0) in UID(0).
+    // Should start immediately (because this is the only session).
+    mController->submit(CLIENT(0), SESSION(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), 0));
+
+    // Submit real-time session to CLIENT(0).
+    // Should pause offline session and start new session,  even if UID(0) is not on top.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+    // Submit real-time session to CLIENT(0), should be queued after the previous session.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time session to CLIENT(1) in same uid, should be queued after the previous
+    // session.
+    mController->submit(CLIENT(1), SESSION(0), UID(0), mRealtimeRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time session to CLIENT(2) in UID(1).
+    // Should pause previous session and start new session, because UID(1) is (has been) top.
+    mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
+
+    // Submit offline session, shouldn't generate any event.
+    mController->submit(CLIENT(2), SESSION(1), UID(1), mOfflineRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Bring UID(0) to top.
+    mUidPolicy->setTop(UID(0));
+    // Should pause current session, and resume last session in UID(0).
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(1)));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestCancelSession) {
+    ALOGD("TestCancelSession");
+
+    // Submit real-time session SESSION(0), should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit real-time session SESSION(1), should not start.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit offline session SESSION(2), should not start.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Cancel queued real-time session.
+    // Cancel real-time session SESSION(1), should be cancelled.
+    EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(1)));
+
+    // Cancel queued offline session.
+    // Cancel offline session SESSION(2), should be cancelled.
+    EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(2)));
+
+    // Submit offline session SESSION(3), shouldn't cause any event.
+    mController->submit(CLIENT(0), SESSION(3), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Cancel running real-time session SESSION(0).
+    // - Should be stopped first then cancelled.
+    // - Should also start offline session SESSION(2) because real-time queue is empty.
+    EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(3)));
+
+    // Submit real-time session SESSION(4), offline SESSION(3) should pause and SESSION(4)
+    // should start.
+    mController->submit(CLIENT(0), SESSION(4), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(3)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(4)));
+
+    // Cancel paused SESSION(3). SESSION(3) should be stopped.
+    EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(3)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(3)));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestFinishSession) {
+    ALOGD("TestFinishSession");
+
+    // Start with unspecified top UID.
+    // Finish without any sessions submitted, should be ignored.
+    mController->onFinish(CLIENT(0), SESSION(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit offline session SESSION(0), should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit real-time session SESSION(1), should pause offline session and start immediately.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+    // Submit real-time session SESSION(2), should not start.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Finish when the session never started, should be ignored.
+    mController->onFinish(CLIENT(0), SESSION(2));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // UID(1) moves to top.
+    mUidPolicy->setTop(UID(1));
+    // Submit real-time session to CLIENT(1) in UID(1), should pause previous session and start
+    // new session.
+    mController->submit(CLIENT(1), SESSION(0), UID(1), mRealtimeRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
+
+    // Simulate Finish that arrived late, after pause issued by controller.
+    // Should still be propagated to client, but shouldn't trigger any new start.
+    mController->onFinish(CLIENT(0), SESSION(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(1)));
+
+    // Finish running real-time session, should start next real-time session in queue.
+    mController->onFinish(CLIENT(1), SESSION(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(1), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
+
+    // Finish running real-time session, should resume next session (offline session) in queue.
+    mController->onFinish(CLIENT(0), SESSION(2));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(2)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+    // Finish running offline session.
+    mController->onFinish(CLIENT(0), SESSION(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(0)));
+
+    // Duplicate finish for last session, should be ignored.
+    mController->onFinish(CLIENT(0), SESSION(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestFailSession) {
+    ALOGD("TestFailSession");
+
+    // Start with unspecified top UID.
+    // Fail without any sessions submitted, should be ignored.
+    mController->onError(CLIENT(0), SESSION(0), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit offline session SESSION(0), should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit real-time session SESSION(1), should pause offline session and start immediately.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+    // Submit real-time session SESSION(2), should not start.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Fail when the session never started, should be ignored.
+    mController->onError(CLIENT(0), SESSION(2), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // UID(1) moves to top.
+    mUidPolicy->setTop(UID(1));
+    // Submit real-time session to CLIENT(1) in UID(1), should pause previous session and start
+    // new session.
+    mController->submit(CLIENT(1), SESSION(0), UID(1), mRealtimeRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
+
+    // Simulate Fail that arrived late, after pause issued by controller.
+    // Should still be propagated to client, but shouldn't trigger any new start.
+    mController->onError(CLIENT(0), SESSION(1), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(1)));
+
+    // Fail running real-time session, should start next real-time session in queue.
+    mController->onError(CLIENT(1), SESSION(0), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(1), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
+
+    // Fail running real-time session, should resume next session (offline session) in queue.
+    mController->onError(CLIENT(0), SESSION(2), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(2)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+    // Fail running offline session, and test error code propagation.
+    mController->onError(CLIENT(0), SESSION(0), TranscodingErrorCode::kInvalidOperation);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kInvalidOperation);
+
+    // Duplicate fail for last session, should be ignored.
+    mController->onError(CLIENT(0), SESSION(0), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTopUidChanged) {
+    ALOGD("TestTopUidChanged");
+
+    // Start with unspecified top UID.
+    // Submit real-time session to CLIENT(0), session should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit offline session to CLIENT(0), should not start.
+    mController->submit(CLIENT(1), SESSION(0), UID(0), mOfflineRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Move UID(1) to top.
+    mUidPolicy->setTop(UID(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time session to CLIENT(2) in different uid UID(1).
+    // Should pause previous session and start new session.
+    mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
+
+    // Bring UID(0) back to top.
+    mUidPolicy->setTop(UID(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+    // Bring invalid uid to top.
+    mUidPolicy->setTop(kInvalidUid);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Finish session, next real-time session should resume.
+    mController->onFinish(CLIENT(0), SESSION(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
+
+    // Finish session, offline session should start.
+    mController->onFinish(CLIENT(2), SESSION(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(2), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTopUidSetChanged) {
+    ALOGD("TestTopUidChanged_MultipleUids");
+
+    // Start with unspecified top UID.
+    // Submit real-time session to CLIENT(0), session should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit offline session to CLIENT(0), should not start.
+    mController->submit(CLIENT(1), SESSION(0), UID(0), mOfflineRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Set UID(0), UID(1) to top set.
+    // UID(0) should continue to run.
+    mUidPolicy->setTop({UID(0), UID(1)});
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time session to CLIENT(2) in different uid UID(1).
+    // UID(0) should pause and UID(1) should start.
+    mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
+
+    // Remove UID(0) from top set, and only leave UID(1) in the set.
+    // UID(1) should continue to run.
+    mUidPolicy->setTop(UID(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Set UID(0), UID(2) to top set.
+    // UID(1) should continue to run.
+    mUidPolicy->setTop({UID(1), UID(2)});
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Bring UID(0) back to top.
+    mUidPolicy->setTop(UID(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+    // Bring invalid uid to top.
+    mUidPolicy->setTop(kInvalidUid);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Finish session, next real-time session from UID(1) should resume, even if UID(1)
+    // no longer top.
+    mController->onFinish(CLIENT(0), SESSION(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
+
+    // Finish session, offline session should start.
+    mController->onFinish(CLIENT(2), SESSION(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(2), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestResourceLost) {
+    ALOGD("TestResourceLost");
+
+    // Start with unspecified top UID.
+    // Submit real-time session to CLIENT(0), session should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit offline session to CLIENT(0), should not start.
+    mController->submit(CLIENT(1), SESSION(0), UID(0), mOfflineRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Move UID(1) to top.
+    mUidPolicy->setTop(UID(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time session to CLIENT(2) in different uid UID(1).
+    // Should pause previous session and start new session.
+    mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
+
+    // Test 1: No queue change during resource loss.
+    // Signal resource lost.
+    mController->onResourceLost();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Signal resource available, CLIENT(2) should resume.
+    mController->onResourceAvailable();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
+
+    // Test 2: Change of queue order during resource loss.
+    // Signal resource lost.
+    mController->onResourceLost();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Move UID(0) back to top, should have no resume due to no resource.
+    mUidPolicy->setTop(UID(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Signal resource available, CLIENT(0) should resume.
+    mController->onResourceAvailable();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+    // Test 3: Adding new queue during resource loss.
+    // Signal resource lost.
+    mController->onResourceLost();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Move UID(2) to top.
+    mUidPolicy->setTop(UID(2));
+
+    // Submit real-time session to CLIENT(3) in UID(2), session shouldn't start due to no resource.
+    mController->submit(CLIENT(3), SESSION(0), UID(2), mRealtimeRequest, mClientCallback3);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Signal resource available, CLIENT(3)'s session should start.
+    mController->onResourceAvailable();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(3), SESSION(0)));
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/tests/assets/backyard_hevc_1920x1080_20Mbps.mp4 b/media/libmediatranscoding/tests/assets/backyard_hevc_1920x1080_20Mbps.mp4
new file mode 100644
index 0000000..80d1ec3
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/backyard_hevc_1920x1080_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/cubicle_avc_480x240_aac_24KHz.mp4 b/media/libmediatranscoding/tests/assets/cubicle_avc_480x240_aac_24KHz.mp4
new file mode 100644
index 0000000..ef7e1b7
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/cubicle_avc_480x240_aac_24KHz.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4 b/media/libmediatranscoding/tests/assets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4
new file mode 100644
index 0000000..df42a15
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/jets_hevc_1280x720_20Mbps.mp4 b/media/libmediatranscoding/tests/assets/jets_hevc_1280x720_20Mbps.mp4
new file mode 100644
index 0000000..7794b99
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/jets_hevc_1280x720_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/longtest_15s.mp4 b/media/libmediatranscoding/tests/assets/longtest_15s.mp4
new file mode 100644
index 0000000..b50d8e4
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/longtest_15s.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_12Mbps.mp4 b/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_12Mbps.mp4
new file mode 100644
index 0000000..92dda3b
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_12Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_20Mbps.mp4 b/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_20Mbps.mp4
new file mode 100644
index 0000000..2fe37bd
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/push_assets.sh b/media/libmediatranscoding/tests/assets/push_assets.sh
new file mode 100755
index 0000000..8afc947
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/push_assets.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+#
+# Pushes the assets to the /data/local/tmp.
+#
+
+if [ "$SYNC_FINISHED" != true ]; then
+  if [ -z "$ANDROID_BUILD_TOP" ]; then
+      echo "Android build environment not set"
+      exit -1
+  fi
+
+  # ensure we have mm
+  . $ANDROID_BUILD_TOP/build/envsetup.sh
+
+  mm
+
+  echo "waiting for device"
+
+  adb root && adb wait-for-device remount
+fi
+
+echo "Copying files to device"
+
+adb shell mkdir -p /data/local/tmp/TranscodingTestAssets
+
+FILES=$ANDROID_BUILD_TOP/frameworks/av/media/libmediatranscoding/tests/assets/*
+for file in $FILES
+do 
+adb push --sync $file /data/local/tmp/TranscodingTestAssets
+done
+
+echo "Copy done"
diff --git a/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh b/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
index d8e4830..5db9258 100644
--- a/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
+++ b/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
@@ -3,24 +3,32 @@
 # Run tests in this directory.
 #
 
-if [ -z "$ANDROID_BUILD_TOP" ]; then
-    echo "Android build environment not set"
-    exit -1
+if [ "$SYNC_FINISHED" != true ]; then
+  if [ -z "$ANDROID_BUILD_TOP" ]; then
+      echo "Android build environment not set"
+      exit -1
+  fi
+
+  # ensure we have mm
+  . $ANDROID_BUILD_TOP/build/envsetup.sh
+
+  mm
+
+  echo "waiting for device"
+
+  adb root && adb wait-for-device remount && adb sync
 fi
 
-# ensure we have mm
-. $ANDROID_BUILD_TOP/build/envsetup.sh
-
-mm
-
-echo "waiting for device"
-
-adb root && adb wait-for-device remount && adb sync
-
 echo "========================================"
 
 echo "testing TranscodingClientManager"
-adb shell /data/nativetest64/TranscodingClientManager_tests/TranscodingClientManager_tests
+#adb shell /data/nativetest64/TranscodingClientManager_tests/TranscodingClientManager_tests
+adb shell /data/nativetest/TranscodingClientManager_tests/TranscodingClientManager_tests
 
 echo "testing AdjustableMaxPriorityQueue"
-adb shell /data/nativetest64/AdjustableMaxPriorityQueue_tests/AdjustableMaxPriorityQueue_tests
+#adb shell /data/nativetest64/AdjustableMaxPriorityQueue_tests/AdjustableMaxPriorityQueue_tests
+adb shell /data/nativetest/AdjustableMaxPriorityQueue_tests/AdjustableMaxPriorityQueue_tests
+
+echo "testing TranscodingSessionController"
+#adb shell /data/nativetest64/TranscodingSessionController_tests/TranscodingSessionController_tests
+adb shell /data/nativetest/TranscodingSessionController_tests/TranscodingSessionController_tests
diff --git a/media/libmediatranscoding/transcoder/Android.bp b/media/libmediatranscoding/transcoder/Android.bp
new file mode 100644
index 0000000..1896412
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/Android.bp
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_defaults {
+    name: "mediatranscoder_defaults",
+
+    srcs: [
+        "MediaSampleQueue.cpp",
+        "MediaSampleReaderNDK.cpp",
+        "MediaSampleWriter.cpp",
+        "MediaTrackTranscoder.cpp",
+        "MediaTranscoder.cpp",
+        "NdkCommon.cpp",
+        "PassthroughTrackTranscoder.cpp",
+        "VideoTrackTranscoder.cpp",
+    ],
+
+    shared_libs: [
+        "libbase",
+        "libcutils",
+        "libmediandk",
+        "libnativewindow",
+        "libutils",
+        "libbinder_ndk",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wformat",
+        "-Wno-error=deprecated-declarations",
+        "-Wthread-safety",
+        "-Wunused",
+        "-Wunreachable-code",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+}
+
+cc_library_shared {
+    name: "libmediatranscoder",
+    defaults: ["mediatranscoder_defaults"],
+}
+
+cc_library_shared {
+    name: "libmediatranscoder_asan",
+    defaults: ["mediatranscoder_defaults"],
+
+    sanitize: {
+        address: true,
+    },
+}
diff --git a/media/libmediatranscoding/transcoder/MediaSampleQueue.cpp b/media/libmediatranscoding/transcoder/MediaSampleQueue.cpp
new file mode 100644
index 0000000..b085c98
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaSampleQueue.cpp
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleQueue"
+
+#include <android-base/logging.h>
+#include <media/MediaSampleQueue.h>
+
+namespace android {
+
+bool MediaSampleQueue::enqueue(const std::shared_ptr<MediaSample>& sample) {
+    std::scoped_lock<std::mutex> lock(mMutex);
+    if (!mAborted) {
+        mSampleQueue.push(sample);
+        mCondition.notify_one();
+    }
+    return mAborted;
+}
+
+// Unfortunately std::unique_lock is incompatible with -Wthread-safety
+bool MediaSampleQueue::dequeue(std::shared_ptr<MediaSample>* sample) NO_THREAD_SAFETY_ANALYSIS {
+    std::unique_lock<std::mutex> lock(mMutex);
+    while (mSampleQueue.empty() && !mAborted) {
+        mCondition.wait(lock);
+    }
+
+    if (!mAborted) {
+        if (sample != nullptr) {
+            *sample = mSampleQueue.front();
+        }
+        mSampleQueue.pop();
+    }
+    return mAborted;
+}
+
+bool MediaSampleQueue::isEmpty() {
+    std::scoped_lock<std::mutex> lock(mMutex);
+    return mSampleQueue.empty();
+}
+
+void MediaSampleQueue::abort() {
+    std::scoped_lock<std::mutex> lock(mMutex);
+    // Clear the queue and notify consumers.
+    std::queue<std::shared_ptr<MediaSample>> empty = {};
+    std::swap(mSampleQueue, empty);
+    mAborted = true;
+    mCondition.notify_all();
+}
+}  // namespace android
\ No newline at end of file
diff --git a/media/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp b/media/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp
new file mode 100644
index 0000000..53d567e
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp
@@ -0,0 +1,437 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleReader"
+
+#include <android-base/logging.h>
+#include <media/MediaSampleReaderNDK.h>
+
+#include <algorithm>
+#include <cmath>
+
+namespace android {
+
+// Check that the extractor sample flags have the expected NDK meaning.
+static_assert(SAMPLE_FLAG_SYNC_SAMPLE == AMEDIAEXTRACTOR_SAMPLE_FLAG_SYNC,
+              "Sample flag mismatch: SYNC_SAMPLE");
+
+// static
+std::shared_ptr<MediaSampleReader> MediaSampleReaderNDK::createFromFd(int fd, size_t offset,
+                                                                      size_t size) {
+    AMediaExtractor* extractor = AMediaExtractor_new();
+    if (extractor == nullptr) {
+        LOG(ERROR) << "Unable to allocate AMediaExtractor";
+        return nullptr;
+    }
+
+    media_status_t status = AMediaExtractor_setDataSourceFd(extractor, fd, offset, size);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "AMediaExtractor_setDataSourceFd returned error: " << status;
+        AMediaExtractor_delete(extractor);
+        return nullptr;
+    }
+
+    auto sampleReader = std::shared_ptr<MediaSampleReaderNDK>(new MediaSampleReaderNDK(extractor));
+    return sampleReader;
+}
+
+MediaSampleReaderNDK::MediaSampleReaderNDK(AMediaExtractor* extractor)
+      : mExtractor(extractor), mTrackCount(AMediaExtractor_getTrackCount(mExtractor)) {
+    if (mTrackCount > 0) {
+        mTrackCursors.resize(mTrackCount);
+    }
+}
+
+MediaSampleReaderNDK::~MediaSampleReaderNDK() {
+    if (mExtractor != nullptr) {
+        AMediaExtractor_delete(mExtractor);
+    }
+}
+
+void MediaSampleReaderNDK::advanceTrack_l(int trackIndex) {
+    if (!mEnforceSequentialAccess) {
+        // Note: Positioning the extractor before advancing the track is needed for two reasons:
+        // 1. To enable multiple advances without explicitly letting the extractor catch up.
+        // 2. To prevent the extractor from being farther than "next".
+        (void)moveToTrack_l(trackIndex);
+    }
+
+    SampleCursor& cursor = mTrackCursors[trackIndex];
+    cursor.previous = cursor.current;
+    cursor.current = cursor.next;
+    cursor.next.reset();
+
+    if (mEnforceSequentialAccess && trackIndex == mExtractorTrackIndex) {
+        while (advanceExtractor_l()) {
+            SampleCursor& cursor = mTrackCursors[mExtractorTrackIndex];
+            if (cursor.current.isSet && cursor.current.index == mExtractorSampleIndex) {
+                if (mExtractorTrackIndex != trackIndex) {
+                    mTrackSignals[mExtractorTrackIndex].notify_all();
+                }
+                break;
+            }
+        }
+    }
+    return;
+}
+
+bool MediaSampleReaderNDK::advanceExtractor_l() {
+    // Reset the "next" sample time whenever the extractor advances past a sample that is current,
+    // to ensure that "next" is appropriately updated when the extractor advances over the next
+    // sample of that track.
+    if (mTrackCursors[mExtractorTrackIndex].current.isSet &&
+        mTrackCursors[mExtractorTrackIndex].current.index == mExtractorSampleIndex) {
+        mTrackCursors[mExtractorTrackIndex].next.reset();
+    }
+
+    if (!AMediaExtractor_advance(mExtractor)) {
+        mEosReached = true;
+        for (auto it = mTrackSignals.begin(); it != mTrackSignals.end(); ++it) {
+            it->second.notify_all();
+        }
+        return false;
+    }
+
+    mExtractorTrackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+    mExtractorSampleIndex++;
+
+    SampleCursor& cursor = mTrackCursors[mExtractorTrackIndex];
+    if (mExtractorSampleIndex > cursor.previous.index) {
+        if (!cursor.current.isSet) {
+            cursor.current.set(mExtractorSampleIndex, AMediaExtractor_getSampleTime(mExtractor));
+        } else if (!cursor.next.isSet && mExtractorSampleIndex > cursor.current.index) {
+            cursor.next.set(mExtractorSampleIndex, AMediaExtractor_getSampleTime(mExtractor));
+        }
+    }
+
+    return true;
+}
+
+media_status_t MediaSampleReaderNDK::seekExtractorBackwards_l(int64_t targetTimeUs,
+                                                              int targetTrackIndex,
+                                                              uint64_t targetSampleIndex) {
+    if (targetSampleIndex > mExtractorSampleIndex) {
+        LOG(ERROR) << "Error: Forward seek is not supported";
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    // AMediaExtractor supports reading negative timestamps but does not support seeking to them.
+    const int64_t seekToTimeUs = std::max(targetTimeUs, (int64_t)0);
+    media_status_t status =
+            AMediaExtractor_seekTo(mExtractor, seekToTimeUs, AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to seek to " << seekToTimeUs << ", target " << targetTimeUs;
+        return status;
+    }
+    mExtractorTrackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+    int64_t sampleTimeUs = AMediaExtractor_getSampleTime(mExtractor);
+
+    while (sampleTimeUs != targetTimeUs || mExtractorTrackIndex != targetTrackIndex) {
+        if (!AMediaExtractor_advance(mExtractor)) {
+            return AMEDIA_ERROR_END_OF_STREAM;
+        }
+        mExtractorTrackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+        sampleTimeUs = AMediaExtractor_getSampleTime(mExtractor);
+    }
+    mExtractorSampleIndex = targetSampleIndex;
+    return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::moveToSample_l(SamplePosition& pos, int trackIndex) {
+    // Seek backwards if the extractor is ahead of the sample.
+    if (pos.isSet && mExtractorSampleIndex > pos.index) {
+        media_status_t status = seekExtractorBackwards_l(pos.timeStampUs, trackIndex, pos.index);
+        if (status != AMEDIA_OK) return status;
+    }
+
+    // Advance until extractor points to the sample.
+    while (!(pos.isSet && pos.index == mExtractorSampleIndex)) {
+        if (!advanceExtractor_l()) {
+            return AMEDIA_ERROR_END_OF_STREAM;
+        }
+    }
+
+    return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::moveToTrack_l(int trackIndex) {
+    return moveToSample_l(mTrackCursors[trackIndex].current, trackIndex);
+}
+
+media_status_t MediaSampleReaderNDK::waitForTrack_l(int trackIndex,
+                                                    std::unique_lock<std::mutex>& lockHeld) {
+    while (trackIndex != mExtractorTrackIndex && !mEosReached && mEnforceSequentialAccess) {
+        mTrackSignals[trackIndex].wait(lockHeld);
+    }
+
+    if (mEosReached) {
+        return AMEDIA_ERROR_END_OF_STREAM;
+    }
+    return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::primeExtractorForTrack_l(
+        int trackIndex, std::unique_lock<std::mutex>& lockHeld) {
+    if (mExtractorTrackIndex < 0) {
+        mExtractorTrackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+        if (mExtractorTrackIndex < 0) {
+            return AMEDIA_ERROR_END_OF_STREAM;
+        }
+        mTrackCursors[mExtractorTrackIndex].current.set(mExtractorSampleIndex,
+                                                        AMediaExtractor_getSampleTime(mExtractor));
+    }
+
+    if (mEnforceSequentialAccess) {
+        return waitForTrack_l(trackIndex, lockHeld);
+    } else {
+        return moveToTrack_l(trackIndex);
+    }
+}
+
+media_status_t MediaSampleReaderNDK::selectTrack(int trackIndex) {
+    std::scoped_lock lock(mExtractorMutex);
+
+    if (trackIndex < 0 || trackIndex >= mTrackCount) {
+        LOG(ERROR) << "Invalid trackIndex " << trackIndex << " for trackCount " << mTrackCount;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    } else if (mTrackSignals.find(trackIndex) != mTrackSignals.end()) {
+        LOG(ERROR) << "TrackIndex " << trackIndex << " already selected";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    } else if (mExtractorTrackIndex >= 0) {
+        LOG(ERROR) << "Tracks must be selected before sample reading begins.";
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    media_status_t status = AMediaExtractor_selectTrack(mExtractor, trackIndex);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "AMediaExtractor_selectTrack returned error: " << status;
+        return status;
+    }
+
+    mTrackSignals.emplace(std::piecewise_construct, std::forward_as_tuple(trackIndex),
+                          std::forward_as_tuple());
+    return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::setEnforceSequentialAccess(bool enforce) {
+    std::scoped_lock lock(mExtractorMutex);
+
+    if (mEnforceSequentialAccess && !enforce) {
+        // If switching from enforcing to not enforcing sequential access there may be threads
+        // waiting that needs to be woken up.
+        for (auto it = mTrackSignals.begin(); it != mTrackSignals.end(); ++it) {
+            it->second.notify_all();
+        }
+    } else if (!mEnforceSequentialAccess && enforce && mExtractorTrackIndex >= 0) {
+        // If switching from not enforcing to enforcing sequential access the extractor needs to be
+        // positioned for the track farthest behind so that it won't get stuck waiting.
+        struct {
+            SamplePosition* pos = nullptr;
+            int trackIndex = -1;
+        } earliestSample;
+
+        for (int trackIndex = 0; trackIndex < mTrackCount; ++trackIndex) {
+            SamplePosition& lastKnownTrackPosition = mTrackCursors[trackIndex].current.isSet
+                                                             ? mTrackCursors[trackIndex].current
+                                                             : mTrackCursors[trackIndex].previous;
+
+            if (lastKnownTrackPosition.isSet) {
+                if (earliestSample.pos == nullptr ||
+                    earliestSample.pos->index > lastKnownTrackPosition.index) {
+                    earliestSample.pos = &lastKnownTrackPosition;
+                    earliestSample.trackIndex = trackIndex;
+                }
+            }
+        }
+
+        if (earliestSample.pos == nullptr) {
+            LOG(ERROR) << "No known sample position found";
+            return AMEDIA_ERROR_UNKNOWN;
+        }
+
+        media_status_t status = moveToSample_l(*earliestSample.pos, earliestSample.trackIndex);
+        if (status != AMEDIA_OK) return status;
+
+        while (!(mTrackCursors[mExtractorTrackIndex].current.isSet &&
+                 mTrackCursors[mExtractorTrackIndex].current.index == mExtractorSampleIndex)) {
+            if (!advanceExtractor_l()) {
+                return AMEDIA_ERROR_END_OF_STREAM;
+            }
+        }
+    }
+
+    mEnforceSequentialAccess = enforce;
+    return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::getEstimatedBitrateForTrack(int trackIndex, int32_t* bitrate) {
+    std::scoped_lock lock(mExtractorMutex);
+    media_status_t status = AMEDIA_OK;
+
+    if (mTrackSignals.find(trackIndex) == mTrackSignals.end()) {
+        LOG(ERROR) << "Track is not selected.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    } else if (bitrate == nullptr) {
+        LOG(ERROR) << "bitrate pointer is NULL.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    } else if (mExtractorTrackIndex >= 0) {
+        LOG(ERROR) << "getEstimatedBitrateForTrack must be called before sample reading begins.";
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    // Sample the track.
+    static constexpr int64_t kSamplingDurationUs = 10 * 1000 * 1000;  // 10 seconds
+    size_t lastSampleSize = 0;
+    size_t totalSampleSize = 0;
+    int64_t firstSampleTimeUs = 0;
+    int64_t lastSampleTimeUs = 0;
+
+    do {
+        if (AMediaExtractor_getSampleTrackIndex(mExtractor) == trackIndex) {
+            lastSampleTimeUs = AMediaExtractor_getSampleTime(mExtractor);
+            if (totalSampleSize == 0) {
+                firstSampleTimeUs = lastSampleTimeUs;
+            }
+
+            lastSampleSize = AMediaExtractor_getSampleSize(mExtractor);
+            totalSampleSize += lastSampleSize;
+        }
+    } while ((lastSampleTimeUs - firstSampleTimeUs) < kSamplingDurationUs &&
+             AMediaExtractor_advance(mExtractor));
+
+    // Reset the extractor to the beginning.
+    status = AMediaExtractor_seekTo(mExtractor, 0, AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to reset extractor: " << status;
+        return status;
+    }
+
+    int64_t durationUs = 0;
+    const int64_t sampledDurationUs = lastSampleTimeUs - firstSampleTimeUs;
+
+    if (sampledDurationUs < kSamplingDurationUs) {
+        // Track is shorter than the sampling duration so use the full track duration to get better
+        // accuracy (i.e. don't skip the last sample).
+        AMediaFormat* trackFormat = getTrackFormat(trackIndex);
+        if (!AMediaFormat_getInt64(trackFormat, AMEDIAFORMAT_KEY_DURATION, &durationUs)) {
+            durationUs = 0;
+        }
+        AMediaFormat_delete(trackFormat);
+    }
+
+    if (durationUs == 0) {
+        // The sampled duration does not account for the last sample's duration so its size should
+        // not be included either.
+        totalSampleSize -= lastSampleSize;
+        durationUs = sampledDurationUs;
+    }
+
+    if (totalSampleSize == 0 || durationUs <= 0) {
+        LOG(ERROR) << "Unable to estimate track bitrate";
+        return AMEDIA_ERROR_MALFORMED;
+    }
+
+    *bitrate = roundf((float)totalSampleSize * 8 * 1000000 / durationUs);
+    return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::getSampleInfoForTrack(int trackIndex, MediaSampleInfo* info) {
+    std::unique_lock<std::mutex> lock(mExtractorMutex);
+
+    if (mTrackSignals.find(trackIndex) == mTrackSignals.end()) {
+        LOG(ERROR) << "Track not selected.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    } else if (info == nullptr) {
+        LOG(ERROR) << "MediaSampleInfo pointer is NULL.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    media_status_t status = primeExtractorForTrack_l(trackIndex, lock);
+    if (status == AMEDIA_OK) {
+        info->presentationTimeUs = AMediaExtractor_getSampleTime(mExtractor);
+        info->flags = AMediaExtractor_getSampleFlags(mExtractor);
+        info->size = AMediaExtractor_getSampleSize(mExtractor);
+    } else if (status == AMEDIA_ERROR_END_OF_STREAM) {
+        info->presentationTimeUs = 0;
+        info->flags = SAMPLE_FLAG_END_OF_STREAM;
+        info->size = 0;
+    }
+    return status;
+}
+
+media_status_t MediaSampleReaderNDK::readSampleDataForTrack(int trackIndex, uint8_t* buffer,
+                                                            size_t bufferSize) {
+    std::unique_lock<std::mutex> lock(mExtractorMutex);
+
+    if (mTrackSignals.find(trackIndex) == mTrackSignals.end()) {
+        LOG(ERROR) << "Track not selected.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    } else if (buffer == nullptr) {
+        LOG(ERROR) << "buffer pointer is NULL";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    media_status_t status = primeExtractorForTrack_l(trackIndex, lock);
+    if (status != AMEDIA_OK) {
+        return status;
+    }
+
+    ssize_t sampleSize = AMediaExtractor_getSampleSize(mExtractor);
+    if (bufferSize < sampleSize) {
+        LOG(ERROR) << "Buffer is too small for sample, " << bufferSize << " vs " << sampleSize;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    ssize_t bytesRead = AMediaExtractor_readSampleData(mExtractor, buffer, bufferSize);
+    if (bytesRead < sampleSize) {
+        LOG(ERROR) << "Unable to read full sample, " << bytesRead << " vs " << sampleSize;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    advanceTrack_l(trackIndex);
+
+    return AMEDIA_OK;
+}
+
+void MediaSampleReaderNDK::advanceTrack(int trackIndex) {
+    std::scoped_lock lock(mExtractorMutex);
+
+    if (mTrackSignals.find(trackIndex) != mTrackSignals.end()) {
+        advanceTrack_l(trackIndex);
+    } else {
+        LOG(ERROR) << "Trying to advance a track that is not selected (#" << trackIndex << ")";
+    }
+}
+
+AMediaFormat* MediaSampleReaderNDK::getFileFormat() {
+    return AMediaExtractor_getFileFormat(mExtractor);
+}
+
+size_t MediaSampleReaderNDK::getTrackCount() const {
+    return mTrackCount;
+}
+
+AMediaFormat* MediaSampleReaderNDK::getTrackFormat(int trackIndex) {
+    if (trackIndex < 0 || trackIndex >= mTrackCount) {
+        LOG(ERROR) << "Invalid trackIndex " << trackIndex << " for trackCount " << mTrackCount;
+        return AMediaFormat_new();
+    }
+
+    return AMediaExtractor_getTrackFormat(mExtractor, trackIndex);
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp b/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
new file mode 100644
index 0000000..afa5021
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
@@ -0,0 +1,310 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleWriter"
+
+#include <android-base/logging.h>
+#include <media/MediaSampleWriter.h>
+#include <media/NdkMediaMuxer.h>
+
+namespace android {
+
+class DefaultMuxer : public MediaSampleWriterMuxerInterface {
+public:
+    // MediaSampleWriterMuxerInterface
+    ssize_t addTrack(AMediaFormat* trackFormat) override {
+        // If the track format has rotation, need to call AMediaMuxer_setOrientationHint
+        // to set the rotation. Muxer doesn't take rotation specified on the track.
+        const char* mime;
+        if (AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime) &&
+            strncmp(mime, "video/", 6) == 0) {
+            int32_t rotation;
+            if (AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_ROTATION, &rotation) &&
+                (rotation != 0)) {
+                AMediaMuxer_setOrientationHint(mMuxer, rotation);
+            }
+        }
+
+        return AMediaMuxer_addTrack(mMuxer, trackFormat);
+    }
+    media_status_t start() override { return AMediaMuxer_start(mMuxer); }
+    media_status_t writeSampleData(size_t trackIndex, const uint8_t* data,
+                                   const AMediaCodecBufferInfo* info) override {
+        return AMediaMuxer_writeSampleData(mMuxer, trackIndex, data, info);
+    }
+    media_status_t stop() override { return AMediaMuxer_stop(mMuxer); }
+    // ~MediaSampleWriterMuxerInterface
+
+    static std::shared_ptr<DefaultMuxer> create(int fd) {
+        AMediaMuxer* ndkMuxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);
+        if (ndkMuxer == nullptr) {
+            LOG(ERROR) << "Unable to create AMediaMuxer";
+            return nullptr;
+        }
+
+        return std::make_shared<DefaultMuxer>(ndkMuxer);
+    }
+
+    ~DefaultMuxer() {
+        if (mMuxer != nullptr) {
+            AMediaMuxer_delete(mMuxer);
+        }
+    }
+
+    DefaultMuxer(AMediaMuxer* muxer) : mMuxer(muxer){};
+    DefaultMuxer() = delete;
+
+private:
+    AMediaMuxer* mMuxer;
+};
+
+// static
+std::shared_ptr<MediaSampleWriter> MediaSampleWriter::Create() {
+    return std::shared_ptr<MediaSampleWriter>(new MediaSampleWriter());
+}
+
+MediaSampleWriter::~MediaSampleWriter() {
+    if (mState == STARTED) {
+        stop();  // Join thread.
+    }
+}
+
+bool MediaSampleWriter::init(int fd, const std::weak_ptr<CallbackInterface>& callbacks) {
+    return init(DefaultMuxer::create(fd), callbacks);
+}
+
+bool MediaSampleWriter::init(const std::shared_ptr<MediaSampleWriterMuxerInterface>& muxer,
+                             const std::weak_ptr<CallbackInterface>& callbacks) {
+    if (callbacks.lock() == nullptr) {
+        LOG(ERROR) << "Callback object cannot be null";
+        return false;
+    } else if (muxer == nullptr) {
+        LOG(ERROR) << "Muxer cannot be null";
+        return false;
+    }
+
+    std::scoped_lock lock(mMutex);
+    if (mState != UNINITIALIZED) {
+        LOG(ERROR) << "Sample writer is already initialized";
+        return false;
+    }
+
+    mState = INITIALIZED;
+    mMuxer = muxer;
+    mCallbacks = callbacks;
+    return true;
+}
+
+MediaSampleWriter::MediaSampleConsumerFunction MediaSampleWriter::addTrack(
+        const std::shared_ptr<AMediaFormat>& trackFormat) {
+    if (trackFormat == nullptr) {
+        LOG(ERROR) << "Track format must be non-null";
+        return nullptr;
+    }
+
+    std::scoped_lock lock(mMutex);
+    if (mState != INITIALIZED) {
+        LOG(ERROR) << "Muxer needs to be initialized when adding tracks.";
+        return nullptr;
+    }
+    ssize_t trackIndexOrError = mMuxer->addTrack(trackFormat.get());
+    if (trackIndexOrError < 0) {
+        LOG(ERROR) << "Failed to add media track to muxer: " << trackIndexOrError;
+        return nullptr;
+    }
+    const size_t trackIndex = static_cast<size_t>(trackIndexOrError);
+
+    int64_t durationUs;
+    if (!AMediaFormat_getInt64(trackFormat.get(), AMEDIAFORMAT_KEY_DURATION, &durationUs)) {
+        durationUs = 0;
+    }
+
+    mTracks.emplace(trackIndex, durationUs);
+    std::shared_ptr<MediaSampleWriter> thisWriter = shared_from_this();
+
+    return [self = shared_from_this(), trackIndex](const std::shared_ptr<MediaSample>& sample) {
+        self->addSampleToTrack(trackIndex, sample);
+    };
+}
+
+void MediaSampleWriter::addSampleToTrack(size_t trackIndex,
+                                         const std::shared_ptr<MediaSample>& sample) {
+    if (sample == nullptr) return;
+
+    bool wasEmpty;
+    {
+        std::scoped_lock lock(mMutex);
+        wasEmpty = mSampleQueue.empty();
+        mSampleQueue.push(std::make_pair(trackIndex, sample));
+    }
+
+    if (wasEmpty) {
+        mSampleSignal.notify_one();
+    }
+}
+
+bool MediaSampleWriter::start() {
+    std::scoped_lock lock(mMutex);
+
+    if (mTracks.size() == 0) {
+        LOG(ERROR) << "No tracks to write.";
+        return false;
+    } else if (mState != INITIALIZED) {
+        LOG(ERROR) << "Sample writer is not initialized";
+        return false;
+    }
+
+    mState = STARTED;
+    mThread = std::thread([this] {
+        media_status_t status = writeSamples();
+        if (auto callbacks = mCallbacks.lock()) {
+            callbacks->onFinished(this, status);
+        }
+    });
+    return true;
+}
+
+bool MediaSampleWriter::stop() {
+    {
+        std::scoped_lock lock(mMutex);
+        if (mState != STARTED) {
+            LOG(ERROR) << "Sample writer is not started.";
+            return false;
+        }
+        mState = STOPPED;
+    }
+
+    mSampleSignal.notify_all();
+    mThread.join();
+    return true;
+}
+
+media_status_t MediaSampleWriter::writeSamples() {
+    media_status_t muxerStatus = mMuxer->start();
+    if (muxerStatus != AMEDIA_OK) {
+        LOG(ERROR) << "Error starting muxer: " << muxerStatus;
+        return muxerStatus;
+    }
+
+    media_status_t writeStatus = runWriterLoop();
+    if (writeStatus != AMEDIA_OK) {
+        LOG(ERROR) << "Error writing samples: " << writeStatus;
+    }
+
+    muxerStatus = mMuxer->stop();
+    if (muxerStatus != AMEDIA_OK) {
+        LOG(ERROR) << "Error stopping muxer: " << muxerStatus;
+    }
+
+    return writeStatus != AMEDIA_OK ? writeStatus : muxerStatus;
+}
+
+media_status_t MediaSampleWriter::runWriterLoop() NO_THREAD_SAFETY_ANALYSIS {
+    AMediaCodecBufferInfo bufferInfo;
+    int32_t lastProgressUpdate = 0;
+    int trackEosCount = 0;
+
+    // Set the "primary" track that will be used to determine progress to the track with longest
+    // duration.
+    int primaryTrackIndex = -1;
+    int64_t longestDurationUs = 0;
+    for (auto it = mTracks.begin(); it != mTracks.end(); ++it) {
+        if (it->second.mDurationUs > longestDurationUs) {
+            primaryTrackIndex = it->first;
+            longestDurationUs = it->second.mDurationUs;
+        }
+    }
+
+    while (true) {
+        if (trackEosCount >= mTracks.size()) {
+            break;
+        }
+
+        size_t trackIndex;
+        std::shared_ptr<MediaSample> sample;
+        {
+            std::unique_lock lock(mMutex);
+            while (mSampleQueue.empty() && mState == STARTED) {
+                mSampleSignal.wait(lock);
+            }
+
+            if (mState != STARTED) {
+                return AMEDIA_ERROR_UNKNOWN;  // TODO(lnilsson): Custom error code.
+            }
+
+            auto& topEntry = mSampleQueue.top();
+            trackIndex = topEntry.first;
+            sample = topEntry.second;
+            mSampleQueue.pop();
+        }
+
+        TrackRecord& track = mTracks[trackIndex];
+
+        if (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) {
+            if (track.mReachedEos) {
+                continue;
+            }
+
+            // Track reached end of stream.
+            track.mReachedEos = true;
+            trackEosCount++;
+
+            // Preserve source track duration by setting the appropriate timestamp on the
+            // empty End-Of-Stream sample.
+            if (track.mDurationUs > 0 && track.mFirstSampleTimeSet) {
+                sample->info.presentationTimeUs = track.mDurationUs + track.mFirstSampleTimeUs;
+            }
+        }
+
+        track.mPrevSampleTimeUs = sample->info.presentationTimeUs;
+        if (!track.mFirstSampleTimeSet) {
+            // Record the first sample's timestamp in order to translate duration to EOS
+            // time for tracks that does not start at 0.
+            track.mFirstSampleTimeUs = sample->info.presentationTimeUs;
+            track.mFirstSampleTimeSet = true;
+        }
+
+        bufferInfo.offset = sample->dataOffset;
+        bufferInfo.size = sample->info.size;
+        bufferInfo.flags = sample->info.flags;
+        bufferInfo.presentationTimeUs = sample->info.presentationTimeUs;
+
+        media_status_t status = mMuxer->writeSampleData(trackIndex, sample->buffer, &bufferInfo);
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "writeSampleData returned " << status;
+            return status;
+        }
+        sample.reset();
+
+        // TODO(lnilsson): Add option to toggle progress reporting on/off.
+        if (trackIndex == primaryTrackIndex) {
+            const int64_t elapsed = track.mPrevSampleTimeUs - track.mFirstSampleTimeUs;
+            int32_t progress = (elapsed * 100) / track.mDurationUs;
+            progress = std::clamp(progress, 0, 100);
+
+            if (progress > lastProgressUpdate) {
+                if (auto callbacks = mCallbacks.lock()) {
+                    callbacks->onProgressUpdate(this, progress);
+                }
+                lastProgressUpdate = progress;
+            }
+        }
+    }
+
+    return AMEDIA_OK;
+}
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp
new file mode 100644
index 0000000..698594f
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp
@@ -0,0 +1,135 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTrackTranscoder"
+
+#include <android-base/logging.h>
+#include <media/MediaTrackTranscoder.h>
+#include <media/MediaTrackTranscoderCallback.h>
+
+namespace android {
+
+media_status_t MediaTrackTranscoder::configure(
+        const std::shared_ptr<MediaSampleReader>& mediaSampleReader, int trackIndex,
+        const std::shared_ptr<AMediaFormat>& destinationFormat) {
+    std::scoped_lock lock{mStateMutex};
+
+    if (mState != UNINITIALIZED) {
+        LOG(ERROR) << "Configure can only be called once";
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    if (mediaSampleReader == nullptr) {
+        LOG(ERROR) << "MediaSampleReader is null";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+    if (trackIndex < 0 || trackIndex >= mediaSampleReader->getTrackCount()) {
+        LOG(ERROR) << "TrackIndex is invalid " << trackIndex;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    mMediaSampleReader = mediaSampleReader;
+    mTrackIndex = trackIndex;
+
+    mSourceFormat = std::shared_ptr<AMediaFormat>(mMediaSampleReader->getTrackFormat(mTrackIndex),
+                                                  &AMediaFormat_delete);
+    if (mSourceFormat == nullptr) {
+        LOG(ERROR) << "Unable to get format for track #" << mTrackIndex;
+        return AMEDIA_ERROR_MALFORMED;
+    }
+
+    media_status_t status = configureDestinationFormat(destinationFormat);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "configure failed with error " << status;
+        return status;
+    }
+
+    mState = CONFIGURED;
+    return AMEDIA_OK;
+}
+
+bool MediaTrackTranscoder::start() {
+    std::scoped_lock lock{mStateMutex};
+
+    if (mState != CONFIGURED) {
+        LOG(ERROR) << "TrackTranscoder must be configured before started";
+        return false;
+    }
+
+    mTranscodingThread = std::thread([this] {
+        media_status_t status = runTranscodeLoop();
+
+        // Notify the client.
+        if (auto callbacks = mTranscoderCallback.lock()) {
+            if (status != AMEDIA_OK) {
+                callbacks->onTrackError(this, status);
+            } else {
+                callbacks->onTrackFinished(this);
+            }
+        }
+    });
+
+    mState = STARTED;
+    return true;
+}
+
+bool MediaTrackTranscoder::stop() {
+    std::scoped_lock lock{mStateMutex};
+
+    if (mState == STARTED) {
+        abortTranscodeLoop();
+        mMediaSampleReader->setEnforceSequentialAccess(false);
+        mTranscodingThread.join();
+        {
+            std::scoped_lock lock{mSampleMutex};
+            mSampleQueue.abort();  // Release any buffered samples.
+        }
+        mState = STOPPED;
+        return true;
+    }
+
+    LOG(ERROR) << "TrackTranscoder must be started before stopped";
+    return false;
+}
+
+void MediaTrackTranscoder::notifyTrackFormatAvailable() {
+    if (auto callbacks = mTranscoderCallback.lock()) {
+        callbacks->onTrackFormatAvailable(this);
+    }
+}
+
+void MediaTrackTranscoder::onOutputSampleAvailable(const std::shared_ptr<MediaSample>& sample) {
+    std::scoped_lock lock{mSampleMutex};
+    if (mSampleConsumer == nullptr) {
+        mSampleQueue.enqueue(sample);
+    } else {
+        mSampleConsumer(sample);
+    }
+}
+
+void MediaTrackTranscoder::setSampleConsumer(
+        const MediaSampleWriter::MediaSampleConsumerFunction& sampleConsumer) {
+    std::scoped_lock lock{mSampleMutex};
+    mSampleConsumer = sampleConsumer;
+
+    std::shared_ptr<MediaSample> sample;
+    while (!mSampleQueue.isEmpty() && !mSampleQueue.dequeue(&sample)) {
+        mSampleConsumer(sample);
+    }
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/MediaTranscoder.cpp b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
new file mode 100644
index 0000000..d89b58f
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
@@ -0,0 +1,354 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscoder"
+
+#include <android-base/logging.h>
+#include <fcntl.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/MediaSampleWriter.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+#include <media/PassthroughTrackTranscoder.h>
+#include <media/VideoTrackTranscoder.h>
+#include <unistd.h>
+
+namespace android {
+
+static AMediaFormat* mergeMediaFormats(AMediaFormat* base, AMediaFormat* overlay) {
+    if (base == nullptr || overlay == nullptr) {
+        LOG(ERROR) << "Cannot merge null formats";
+        return nullptr;
+    }
+
+    AMediaFormat* format = AMediaFormat_new();
+    if (AMediaFormat_copy(format, base) != AMEDIA_OK) {
+        AMediaFormat_delete(format);
+        return nullptr;
+    }
+
+    // Note: AMediaFormat does not expose a function for appending values from another format or for
+    // iterating over all values and keys in a format. Instead we define a static list of known keys
+    // along with their value types and copy the ones that are present. A better solution would be
+    // to either implement required functions in NDK or to parse the overlay format's string
+    // representation and copy all existing keys.
+    static const AMediaFormatUtils::EntryCopier kSupportedFormatEntries[] = {
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_MIME, String),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_DURATION, Int64),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_WIDTH, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_HEIGHT, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_BIT_RATE, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_PROFILE, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_LEVEL, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_FORMAT, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_RANGE, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_STANDARD, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_TRANSFER, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_FRAME_RATE, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_PRIORITY, Int32),
+            ENTRY_COPIER2(AMEDIAFORMAT_KEY_OPERATING_RATE, Float, Int32),
+    };
+    const size_t entryCount = sizeof(kSupportedFormatEntries) / sizeof(kSupportedFormatEntries[0]);
+
+    AMediaFormatUtils::CopyFormatEntries(overlay, format, kSupportedFormatEntries, entryCount);
+    return format;
+}
+
+void MediaTranscoder::sendCallback(media_status_t status) {
+    // If the transcoder is already cancelled explicitly, don't send any error callbacks.
+    // Tracks and sample writer will report errors for abort. However, currently we can't
+    // tell it apart from real errors. Ideally we still want to report real errors back
+    // to client, as there is a small chance that explicit abort and the real error come
+    // at around the same time, we should report that if abort has a specific error code.
+    // On the other hand, if the transcoder actually finished (status is AMEDIA_OK) at around
+    // the same time of the abort, we should still report the finish back to the client.
+    if (mCancelled && status != AMEDIA_OK) {
+        return;
+    }
+
+    bool expected = false;
+    if (mCallbackSent.compare_exchange_strong(expected, true)) {
+        if (status == AMEDIA_OK) {
+            mCallbacks->onFinished(this);
+        } else {
+            mCallbacks->onError(this, status);
+        }
+
+        // Transcoding is done and the callback to the client has been sent, so tear down the
+        // pipeline but do it asynchronously to avoid deadlocks. If an error occurred, client
+        // should clean up the file.
+        std::thread asyncCancelThread{[self = shared_from_this()] { self->cancel(); }};
+        asyncCancelThread.detach();
+    }
+}
+
+void MediaTranscoder::onTrackFormatAvailable(const MediaTrackTranscoder* transcoder) {
+    LOG(INFO) << "TrackTranscoder " << transcoder << " format available.";
+
+    std::scoped_lock lock{mTracksAddedMutex};
+
+    // Ignore duplicate format change.
+    if (mTracksAdded.count(transcoder) > 0) {
+        return;
+    }
+
+    // Add track to the writer.
+    auto consumer = mSampleWriter->addTrack(transcoder->getOutputFormat());
+    if (consumer == nullptr) {
+        LOG(ERROR) << "Unable to add track to sample writer.";
+        sendCallback(AMEDIA_ERROR_UNKNOWN);
+        return;
+    }
+
+    MediaTrackTranscoder* mutableTranscoder = const_cast<MediaTrackTranscoder*>(transcoder);
+    mutableTranscoder->setSampleConsumer(consumer);
+
+    mTracksAdded.insert(transcoder);
+    if (mTracksAdded.size() == mTrackTranscoders.size()) {
+        // Enable sequential access mode on the sample reader to achieve optimal read performance.
+        // This has to wait until all tracks have delivered their output formats and the sample
+        // writer is started. Otherwise the tracks will not get their output sample queues drained
+        // and the transcoder could hang due to one track running out of buffers and blocking the
+        // other tracks from reading source samples before they could output their formats.
+        mSampleReader->setEnforceSequentialAccess(true);
+        LOG(INFO) << "Starting sample writer.";
+        bool started = mSampleWriter->start();
+        if (!started) {
+            LOG(ERROR) << "Unable to start sample writer.";
+            sendCallback(AMEDIA_ERROR_UNKNOWN);
+        }
+    }
+}
+
+void MediaTranscoder::onTrackFinished(const MediaTrackTranscoder* transcoder) {
+    LOG(DEBUG) << "TrackTranscoder " << transcoder << " finished";
+}
+
+void MediaTranscoder::onTrackError(const MediaTrackTranscoder* transcoder, media_status_t status) {
+    LOG(ERROR) << "TrackTranscoder " << transcoder << " returned error " << status;
+    sendCallback(status);
+}
+
+void MediaTranscoder::onFinished(const MediaSampleWriter* writer __unused, media_status_t status) {
+    LOG((status != AMEDIA_OK) ? ERROR : DEBUG) << "Sample writer finished with status " << status;
+    sendCallback(status);
+}
+
+void MediaTranscoder::onProgressUpdate(const MediaSampleWriter* writer __unused, int32_t progress) {
+    // Dispatch progress updated to the client.
+    mCallbacks->onProgressUpdate(this, progress);
+}
+
+MediaTranscoder::MediaTranscoder(const std::shared_ptr<CallbackInterface>& callbacks)
+      : mCallbacks(callbacks) {}
+
+std::shared_ptr<MediaTranscoder> MediaTranscoder::create(
+        const std::shared_ptr<CallbackInterface>& callbacks,
+        const std::shared_ptr<ndk::ScopedAParcel>& pausedState) {
+    if (pausedState != nullptr) {
+        LOG(INFO) << "Initializing from paused state.";
+    }
+    if (callbacks == nullptr) {
+        LOG(ERROR) << "Callbacks cannot be null";
+        return nullptr;
+    }
+
+    return std::shared_ptr<MediaTranscoder>(new MediaTranscoder(callbacks));
+}
+
+media_status_t MediaTranscoder::configureSource(int fd) {
+    if (fd < 0) {
+        LOG(ERROR) << "Invalid source fd: " << fd;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    const size_t fileSize = lseek(fd, 0, SEEK_END);
+    lseek(fd, 0, SEEK_SET);
+
+    mSampleReader = MediaSampleReaderNDK::createFromFd(fd, 0 /* offset */, fileSize);
+
+    if (mSampleReader == nullptr) {
+        LOG(ERROR) << "Unable to parse source fd: " << fd;
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    const size_t trackCount = mSampleReader->getTrackCount();
+    for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
+        AMediaFormat* trackFormat = mSampleReader->getTrackFormat(static_cast<int>(trackIndex));
+        if (trackFormat == nullptr) {
+            LOG(ERROR) << "Track #" << trackIndex << " has no format";
+            return AMEDIA_ERROR_MALFORMED;
+        }
+
+        mSourceTrackFormats.emplace_back(trackFormat, &AMediaFormat_delete);
+    }
+
+    return AMEDIA_OK;
+}
+
+std::vector<std::shared_ptr<AMediaFormat>> MediaTranscoder::getTrackFormats() const {
+    // Return a deep copy of the formats to avoid the caller modifying our internal formats.
+    std::vector<std::shared_ptr<AMediaFormat>> trackFormats;
+    for (const std::shared_ptr<AMediaFormat>& sourceFormat : mSourceTrackFormats) {
+        AMediaFormat* copy = AMediaFormat_new();
+        AMediaFormat_copy(copy, sourceFormat.get());
+        trackFormats.emplace_back(copy, &AMediaFormat_delete);
+    }
+    return trackFormats;
+}
+
+media_status_t MediaTranscoder::configureTrackFormat(size_t trackIndex, AMediaFormat* trackFormat) {
+    if (mSampleReader == nullptr) {
+        LOG(ERROR) << "Source must be configured before tracks";
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    } else if (trackIndex >= mSourceTrackFormats.size()) {
+        LOG(ERROR) << "Track index " << trackIndex
+                   << " is out of bounds. Track count: " << mSourceTrackFormats.size();
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    media_status_t status = mSampleReader->selectTrack(trackIndex);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to select track " << trackIndex;
+        return status;
+    }
+
+    std::shared_ptr<MediaTrackTranscoder> transcoder;
+    std::shared_ptr<AMediaFormat> format;
+
+    if (trackFormat == nullptr) {
+        transcoder = std::make_shared<PassthroughTrackTranscoder>(shared_from_this());
+    } else {
+        const char* srcMime = nullptr;
+        if (!AMediaFormat_getString(mSourceTrackFormats[trackIndex].get(), AMEDIAFORMAT_KEY_MIME,
+                                    &srcMime)) {
+            LOG(ERROR) << "Source track #" << trackIndex << " has no mime type";
+            return AMEDIA_ERROR_MALFORMED;
+        }
+
+        if (strncmp(srcMime, "video/", 6) != 0) {
+            LOG(ERROR) << "Only video tracks are supported for transcoding. Unable to configure "
+                          "track #"
+                       << trackIndex << " with mime " << srcMime;
+            return AMEDIA_ERROR_UNSUPPORTED;
+        }
+
+        const char* dstMime = nullptr;
+        if (AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &dstMime)) {
+            if (strncmp(dstMime, "video/", 6) != 0) {
+                LOG(ERROR) << "Unable to convert media types for track #" << trackIndex << ", from "
+                           << srcMime << " to " << dstMime;
+                return AMEDIA_ERROR_UNSUPPORTED;
+            }
+        }
+
+        transcoder = VideoTrackTranscoder::create(shared_from_this());
+
+        AMediaFormat* mergedFormat =
+                mergeMediaFormats(mSourceTrackFormats[trackIndex].get(), trackFormat);
+        if (mergedFormat == nullptr) {
+            LOG(ERROR) << "Unable to merge source and destination formats";
+            return AMEDIA_ERROR_UNKNOWN;
+        }
+
+        format = std::shared_ptr<AMediaFormat>(mergedFormat, &AMediaFormat_delete);
+    }
+
+    status = transcoder->configure(mSampleReader, trackIndex, format);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Configure track transcoder for track #" << trackIndex << " returned error "
+                   << status;
+        return status;
+    }
+
+    mTrackTranscoders.emplace_back(std::move(transcoder));
+    return AMEDIA_OK;
+}
+
+media_status_t MediaTranscoder::configureDestination(int fd) {
+    if (fd < 0) {
+        LOG(ERROR) << "Invalid destination fd: " << fd;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (mSampleWriter != nullptr) {
+        LOG(ERROR) << "Destination is already configured.";
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    }
+
+    mSampleWriter = MediaSampleWriter::Create();
+    const bool initOk = mSampleWriter->init(fd, shared_from_this());
+
+    if (!initOk) {
+        LOG(ERROR) << "Unable to initialize sample writer with destination fd: " << fd;
+        mSampleWriter.reset();
+        return AMEDIA_ERROR_UNKNOWN;
+    }
+
+    return AMEDIA_OK;
+}
+
+media_status_t MediaTranscoder::start() {
+    if (mTrackTranscoders.size() < 1) {
+        LOG(ERROR) << "Unable to start, no tracks are configured.";
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    } else if (mSampleWriter == nullptr) {
+        LOG(ERROR) << "Unable to start, destination is not configured";
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    }
+
+    // Start transcoders
+    for (auto& transcoder : mTrackTranscoders) {
+        bool started = transcoder->start();
+        if (!started) {
+            LOG(ERROR) << "Unable to start track transcoder.";
+            cancel();
+            return AMEDIA_ERROR_UNKNOWN;
+        }
+    }
+    return AMEDIA_OK;
+}
+
+media_status_t MediaTranscoder::pause(std::shared_ptr<ndk::ScopedAParcel>* pausedState) {
+    // TODO: write internal states to parcel.
+    *pausedState = std::shared_ptr<::ndk::ScopedAParcel>(new ::ndk::ScopedAParcel());
+    return cancel();
+}
+
+media_status_t MediaTranscoder::resume() {
+    // TODO: restore internal states from parcel.
+    return start();
+}
+
+media_status_t MediaTranscoder::cancel() {
+    bool expected = false;
+    if (!mCancelled.compare_exchange_strong(expected, true)) {
+        // Already cancelled.
+        return AMEDIA_OK;
+    }
+
+    mSampleWriter->stop();
+    mSampleReader->setEnforceSequentialAccess(false);
+    for (auto& transcoder : mTrackTranscoders) {
+        transcoder->stop();
+    }
+
+    return AMEDIA_OK;
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/NdkCommon.cpp b/media/libmediatranscoding/transcoder/NdkCommon.cpp
new file mode 100644
index 0000000..a7b79dc
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/NdkCommon.cpp
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkCommon"
+
+#include <android-base/logging.h>
+#include <media/NdkCommon.h>
+
+#include <cstdio>
+#include <cstring>
+#include <utility>
+
+/* TODO(b/153592281)
+ * Note: constants used by the native media tests but not available in media ndk api
+ */
+const char* AMEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
+const char* AMEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
+const char* AMEDIA_MIMETYPE_VIDEO_AV1 = "video/av01";
+const char* AMEDIA_MIMETYPE_VIDEO_AVC = "video/avc";
+const char* AMEDIA_MIMETYPE_VIDEO_HEVC = "video/hevc";
+const char* AMEDIA_MIMETYPE_VIDEO_MPEG4 = "video/mp4v-es";
+const char* AMEDIA_MIMETYPE_VIDEO_H263 = "video/3gpp";
+
+/* TODO(b/153592281) */
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_ALLOW_FRAME_DROP = "allow-frame-drop";
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_REQUEST_SYNC_FRAME = "request-sync";
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_VIDEO_BITRATE = "video-bitrate";
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_MAX_B_FRAMES = "max-bframes";
+
+namespace AMediaFormatUtils {
+
+#define DEFINE_FORMAT_VALUE_COPY_FUNC(_type, _typeName)                                      \
+    bool CopyFormatEntry##_typeName(const char* key, AMediaFormat* from, AMediaFormat* to) { \
+        _type value;                                                                         \
+        if (AMediaFormat_get##_typeName(from, key, &value)) {                                \
+            AMediaFormat_set##_typeName(to, key, value);                                     \
+            return true;                                                                     \
+        }                                                                                    \
+        return false;                                                                        \
+    }
+
+DEFINE_FORMAT_VALUE_COPY_FUNC(const char*, String);
+DEFINE_FORMAT_VALUE_COPY_FUNC(int64_t, Int64);
+DEFINE_FORMAT_VALUE_COPY_FUNC(int32_t, Int32);
+DEFINE_FORMAT_VALUE_COPY_FUNC(float, Float);
+
+void CopyFormatEntries(AMediaFormat* from, AMediaFormat* to, const EntryCopier* entries,
+                       size_t entryCount) {
+    if (from == nullptr || to == nullptr) {
+        LOG(ERROR) << "Cannot copy null formats";
+        return;
+    } else if (entries == nullptr || entryCount < 1) {
+        LOG(WARNING) << "No entries to copy";
+        return;
+    }
+
+    for (size_t i = 0; i < entryCount; ++i) {
+        if (!entries[i].copy(entries[i].key, from, to) && entries[i].copy2 != nullptr) {
+            entries[i].copy2(entries[i].key, from, to);
+        }
+    }
+}
+
+#define DEFINE_SET_DEFAULT_FORMAT_VALUE_FUNC(_type, _typeName)                                  \
+    bool SetDefaultFormatValue##_typeName(const char* key, AMediaFormat* format, _type value) { \
+        _type tmp;                                                                              \
+        if (!AMediaFormat_get##_typeName(format, key, &tmp)) {                                  \
+            AMediaFormat_set##_typeName(format, key, value);                                    \
+            return true;                                                                        \
+        }                                                                                       \
+        return false;                                                                           \
+    }
+
+DEFINE_SET_DEFAULT_FORMAT_VALUE_FUNC(float, Float);
+DEFINE_SET_DEFAULT_FORMAT_VALUE_FUNC(int32_t, Int32);
+
+}  // namespace AMediaFormatUtils
\ No newline at end of file
diff --git a/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp
new file mode 100644
index 0000000..35b1d33
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp
@@ -0,0 +1,158 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "PassthroughTrackTranscoder"
+
+#include <android-base/logging.h>
+#include <media/PassthroughTrackTranscoder.h>
+
+namespace android {
+
+PassthroughTrackTranscoder::BufferPool::~BufferPool() {
+    for (auto it = mAddressSizeMap.begin(); it != mAddressSizeMap.end(); ++it) {
+        delete[] it->first;
+    }
+}
+
+uint8_t* PassthroughTrackTranscoder::BufferPool::getBufferWithSize(size_t minimumBufferSize)
+        NO_THREAD_SAFETY_ANALYSIS {
+    std::unique_lock lock(mMutex);
+
+    // Wait if maximum number of buffers are allocated but none are free.
+    while (mAddressSizeMap.size() >= mMaxBufferCount && mFreeBufferMap.empty() && !mAborted) {
+        mCondition.wait(lock);
+    }
+
+    if (mAborted) {
+        return nullptr;
+    }
+
+    // Check if the free list contains a large enough buffer.
+    auto it = mFreeBufferMap.lower_bound(minimumBufferSize);
+    if (it != mFreeBufferMap.end()) {
+        uint8_t* buffer = it->second;
+        mFreeBufferMap.erase(it);
+        return buffer;
+    }
+
+    // If the maximum buffer count is reached, remove an existing free buffer.
+    if (mAddressSizeMap.size() >= mMaxBufferCount) {
+        auto it = mFreeBufferMap.begin();
+        mAddressSizeMap.erase(it->second);
+        delete[] it->second;
+        mFreeBufferMap.erase(it);
+    }
+
+    // Allocate a new buffer.
+    uint8_t* buffer = new (std::nothrow) uint8_t[minimumBufferSize];
+    if (buffer == nullptr) {
+        LOG(ERROR) << "Unable to allocate new buffer of size: " << minimumBufferSize;
+        return nullptr;
+    }
+
+    // Add the buffer to the tracking set.
+    mAddressSizeMap.emplace(buffer, minimumBufferSize);
+    return buffer;
+}
+
+void PassthroughTrackTranscoder::BufferPool::returnBuffer(uint8_t* buffer) {
+    std::scoped_lock lock(mMutex);
+
+    if (buffer == nullptr || mAddressSizeMap.find(buffer) == mAddressSizeMap.end()) {
+        LOG(WARNING) << "Ignoring untracked buffer " << buffer;
+        return;
+    }
+
+    mFreeBufferMap.emplace(mAddressSizeMap[buffer], buffer);
+    mCondition.notify_one();
+}
+
+void PassthroughTrackTranscoder::BufferPool::abort() {
+    std::scoped_lock lock(mMutex);
+    mAborted = true;
+    mCondition.notify_all();
+}
+
+media_status_t PassthroughTrackTranscoder::configureDestinationFormat(
+        const std::shared_ptr<AMediaFormat>& destinationFormat __unused) {
+    // Called by MediaTrackTranscoder. Passthrough doesn't care about destination so just return ok.
+    return AMEDIA_OK;
+}
+
+media_status_t PassthroughTrackTranscoder::runTranscodeLoop() {
+    MediaSampleInfo info;
+    std::shared_ptr<MediaSample> sample;
+
+    // Notify the track format as soon as we start. It's same as the source format.
+    notifyTrackFormatAvailable();
+
+    MediaSample::OnSampleReleasedCallback bufferReleaseCallback =
+            [bufferPool = mBufferPool](MediaSample* sample) {
+                bufferPool->returnBuffer(const_cast<uint8_t*>(sample->buffer));
+            };
+
+    // Move samples until EOS is reached or transcoding is stopped.
+    while (!mStopRequested && !mEosFromSource) {
+        media_status_t status = mMediaSampleReader->getSampleInfoForTrack(mTrackIndex, &info);
+
+        if (status == AMEDIA_OK) {
+            uint8_t* buffer = mBufferPool->getBufferWithSize(info.size);
+            if (buffer == nullptr) {
+                if (mStopRequested) {
+                    break;
+                }
+
+                LOG(ERROR) << "Unable to get buffer from pool";
+                return AMEDIA_ERROR_IO;  // TODO: Custom error codes?
+            }
+
+            sample = MediaSample::createWithReleaseCallback(
+                    buffer, 0 /* offset */, 0 /* bufferId */, bufferReleaseCallback);
+
+            status = mMediaSampleReader->readSampleDataForTrack(mTrackIndex, buffer, info.size);
+            if (status != AMEDIA_OK) {
+                LOG(ERROR) << "Unable to read next sample data. Aborting transcode.";
+                return status;
+            }
+
+        } else if (status == AMEDIA_ERROR_END_OF_STREAM) {
+            sample = std::make_shared<MediaSample>();
+            mEosFromSource = true;
+        } else {
+            LOG(ERROR) << "Unable to get next sample info. Aborting transcode.";
+            return status;
+        }
+
+        sample->info = info;
+        onOutputSampleAvailable(sample);
+    }
+
+    if (mStopRequested && !mEosFromSource) {
+        return AMEDIA_ERROR_UNKNOWN;  // TODO: Custom error codes?
+    }
+    return AMEDIA_OK;
+}
+
+void PassthroughTrackTranscoder::abortTranscodeLoop() {
+    mStopRequested = true;
+    mBufferPool->abort();
+}
+
+std::shared_ptr<AMediaFormat> PassthroughTrackTranscoder::getOutputFormat() const {
+    return mSourceFormat;
+}
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
new file mode 100644
index 0000000..4cf54f1
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
@@ -0,0 +1,535 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "VideoTrackTranscoder"
+
+#include <android-base/logging.h>
+#include <media/NdkCommon.h>
+#include <media/VideoTrackTranscoder.h>
+#include <utils/AndroidThreads.h>
+
+using namespace AMediaFormatUtils;
+
+namespace android {
+
+// Check that the codec sample flags have the expected NDK meaning.
+static_assert(SAMPLE_FLAG_CODEC_CONFIG == AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG,
+              "Sample flag mismatch: CODEC_CONFIG");
+static_assert(SAMPLE_FLAG_END_OF_STREAM == AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM,
+              "Sample flag mismatch: END_OF_STREAM");
+static_assert(SAMPLE_FLAG_PARTIAL_FRAME == AMEDIACODEC_BUFFER_FLAG_PARTIAL_FRAME,
+              "Sample flag mismatch: PARTIAL_FRAME");
+
+// Color format defined by surface. (See MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface.)
+static constexpr int32_t kColorFormatSurface = 0x7f000789;
+// Default key frame interval in seconds.
+static constexpr float kDefaultKeyFrameIntervalSeconds = 1.0f;
+// Default codec operating rate.
+static constexpr int32_t kDefaultCodecOperatingRate = 240;
+// Default codec priority.
+static constexpr int32_t kDefaultCodecPriority = 1;
+// Default bitrate, in case source estimation fails.
+static constexpr int32_t kDefaultBitrateMbps = 10 * 1000 * 1000;
+
+template <typename T>
+void VideoTrackTranscoder::BlockingQueue<T>::push(T const& value, bool front) {
+    {
+        std::scoped_lock lock(mMutex);
+        if (mAborted) {
+            return;
+        }
+
+        if (front) {
+            mQueue.push_front(value);
+        } else {
+            mQueue.push_back(value);
+        }
+    }
+    mCondition.notify_one();
+}
+
+template <typename T>
+T VideoTrackTranscoder::BlockingQueue<T>::pop() {
+    std::unique_lock lock(mMutex);
+    while (mQueue.empty()) {
+        mCondition.wait(lock);
+    }
+    T value = mQueue.front();
+    mQueue.pop_front();
+    return value;
+}
+
+// Note: Do not call if another thread might waiting in pop.
+template <typename T>
+void VideoTrackTranscoder::BlockingQueue<T>::abort() {
+    std::scoped_lock lock(mMutex);
+    mAborted = true;
+    mQueue.clear();
+}
+
+// The CodecWrapper class is used to let AMediaCodec instances outlive the transcoder object itself
+// by giving the codec a weak pointer to the transcoder. Codecs wrapped in this object are kept
+// alive by the transcoder and the codec's outstanding buffers. Once the transcoder stops and all
+// output buffers have been released by downstream components the codec will also be released.
+class VideoTrackTranscoder::CodecWrapper {
+public:
+    CodecWrapper(AMediaCodec* codec, const std::weak_ptr<VideoTrackTranscoder>& transcoder)
+          : mCodec(codec), mTranscoder(transcoder), mCodecStarted(false) {}
+    ~CodecWrapper() {
+        if (mCodecStarted) {
+            AMediaCodec_stop(mCodec);
+        }
+        AMediaCodec_delete(mCodec);
+    }
+
+    AMediaCodec* getCodec() { return mCodec; }
+    std::shared_ptr<VideoTrackTranscoder> getTranscoder() const { return mTranscoder.lock(); };
+    void setStarted() { mCodecStarted = true; }
+
+private:
+    AMediaCodec* mCodec;
+    std::weak_ptr<VideoTrackTranscoder> mTranscoder;
+    bool mCodecStarted;
+};
+
+// Dispatch responses to codec callbacks onto the message queue.
+struct AsyncCodecCallbackDispatch {
+    static void onAsyncInputAvailable(AMediaCodec* codec, void* userdata, int32_t index) {
+        VideoTrackTranscoder::CodecWrapper* wrapper =
+                static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
+        if (auto transcoder = wrapper->getTranscoder()) {
+            if (codec == transcoder->mDecoder) {
+                transcoder->mCodecMessageQueue.push(
+                        [transcoder, index] { transcoder->enqueueInputSample(index); });
+            }
+        }
+    }
+
+    static void onAsyncOutputAvailable(AMediaCodec* codec, void* userdata, int32_t index,
+                                       AMediaCodecBufferInfo* bufferInfoPtr) {
+        VideoTrackTranscoder::CodecWrapper* wrapper =
+                static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
+        AMediaCodecBufferInfo bufferInfo = *bufferInfoPtr;
+        if (auto transcoder = wrapper->getTranscoder()) {
+            transcoder->mCodecMessageQueue.push([transcoder, index, codec, bufferInfo] {
+                if (codec == transcoder->mDecoder) {
+                    transcoder->transferBuffer(index, bufferInfo);
+                } else if (codec == transcoder->mEncoder->getCodec()) {
+                    transcoder->dequeueOutputSample(index, bufferInfo);
+                }
+            });
+        }
+    }
+
+    static void onAsyncFormatChanged(AMediaCodec* codec, void* userdata, AMediaFormat* format) {
+        VideoTrackTranscoder::CodecWrapper* wrapper =
+                static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
+        if (auto transcoder = wrapper->getTranscoder()) {
+            const char* kCodecName = (codec == transcoder->mDecoder ? "Decoder" : "Encoder");
+            LOG(DEBUG) << kCodecName << " format changed: " << AMediaFormat_toString(format);
+            if (codec == transcoder->mEncoder->getCodec()) {
+                transcoder->mCodecMessageQueue.push(
+                        [transcoder, format] { transcoder->updateTrackFormat(format); });
+            }
+        }
+    }
+
+    static void onAsyncError(AMediaCodec* codec, void* userdata, media_status_t error,
+                             int32_t actionCode, const char* detail) {
+        LOG(ERROR) << "Error from codec " << codec << ", userdata " << userdata << ", error "
+                   << error << ", action " << actionCode << ", detail " << detail;
+        VideoTrackTranscoder::CodecWrapper* wrapper =
+                static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
+        if (auto transcoder = wrapper->getTranscoder()) {
+            transcoder->mCodecMessageQueue.push(
+                    [transcoder, error] {
+                        transcoder->mStatus = error;
+                        transcoder->mStopRequested = true;
+                    },
+                    true);
+        }
+    }
+};
+
+// static
+std::shared_ptr<VideoTrackTranscoder> VideoTrackTranscoder::create(
+        const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback) {
+    return std::shared_ptr<VideoTrackTranscoder>(new VideoTrackTranscoder(transcoderCallback));
+}
+
+VideoTrackTranscoder::~VideoTrackTranscoder() {
+    if (mDecoder != nullptr) {
+        AMediaCodec_delete(mDecoder);
+    }
+
+    if (mSurface != nullptr) {
+        ANativeWindow_release(mSurface);
+    }
+}
+
+// Creates and configures the codecs.
+media_status_t VideoTrackTranscoder::configureDestinationFormat(
+        const std::shared_ptr<AMediaFormat>& destinationFormat) {
+    media_status_t status = AMEDIA_OK;
+
+    if (destinationFormat == nullptr) {
+        LOG(ERROR) << "Destination format is null, use passthrough transcoder";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    AMediaFormat* encoderFormat = AMediaFormat_new();
+    if (!encoderFormat || AMediaFormat_copy(encoderFormat, destinationFormat.get()) != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to copy destination format";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    int32_t bitrate;
+    if (!AMediaFormat_getInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, &bitrate)) {
+        status = mMediaSampleReader->getEstimatedBitrateForTrack(mTrackIndex, &bitrate);
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "Unable to estimate bitrate. Using default " << kDefaultBitrateMbps;
+            bitrate = kDefaultBitrateMbps;
+        }
+
+        LOG(INFO) << "Configuring bitrate " << bitrate;
+        AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, bitrate);
+    }
+
+    SetDefaultFormatValueFloat(AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, encoderFormat,
+                               kDefaultKeyFrameIntervalSeconds);
+    SetDefaultFormatValueInt32(AMEDIAFORMAT_KEY_OPERATING_RATE, encoderFormat,
+                               kDefaultCodecOperatingRate);
+    SetDefaultFormatValueInt32(AMEDIAFORMAT_KEY_PRIORITY, encoderFormat, kDefaultCodecPriority);
+
+    AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT, kColorFormatSurface);
+
+    // Always encode without rotation. The rotation degree will be transferred directly to
+    // MediaSampleWriter track format, and MediaSampleWriter will call AMediaMuxer_setOrientationHint.
+    AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_ROTATION, 0);
+
+    mDestinationFormat = std::shared_ptr<AMediaFormat>(encoderFormat, &AMediaFormat_delete);
+
+    // Create and configure the encoder.
+    const char* destinationMime = nullptr;
+    bool ok = AMediaFormat_getString(mDestinationFormat.get(), AMEDIAFORMAT_KEY_MIME,
+                                     &destinationMime);
+    if (!ok) {
+        LOG(ERROR) << "Destination MIME type is required for transcoding.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    AMediaCodec* encoder = AMediaCodec_createEncoderByType(destinationMime);
+    if (encoder == nullptr) {
+        LOG(ERROR) << "Unable to create encoder for type " << destinationMime;
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+    mEncoder = std::make_shared<CodecWrapper>(encoder, shared_from_this());
+
+    status = AMediaCodec_configure(mEncoder->getCodec(), mDestinationFormat.get(),
+                                   NULL /* surface */, NULL /* crypto */,
+                                   AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to configure video encoder: " << status;
+        return status;
+    }
+
+    status = AMediaCodec_createInputSurface(mEncoder->getCodec(), &mSurface);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to create an encoder input surface: %d" << status;
+        return status;
+    }
+
+    // Create and configure the decoder.
+    const char* sourceMime = nullptr;
+    ok = AMediaFormat_getString(mSourceFormat.get(), AMEDIAFORMAT_KEY_MIME, &sourceMime);
+    if (!ok) {
+        LOG(ERROR) << "Source MIME type is required for transcoding.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    mDecoder = AMediaCodec_createDecoderByType(sourceMime);
+    if (mDecoder == nullptr) {
+        LOG(ERROR) << "Unable to create decoder for type " << sourceMime;
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    auto decoderFormat = std::shared_ptr<AMediaFormat>(AMediaFormat_new(), &AMediaFormat_delete);
+    if (!decoderFormat ||
+        AMediaFormat_copy(decoderFormat.get(), mSourceFormat.get()) != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to copy source format";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    // Prevent decoder from overwriting frames that the encoder has not yet consumed.
+    AMediaFormat_setInt32(decoderFormat.get(), TBD_AMEDIACODEC_PARAMETER_KEY_ALLOW_FRAME_DROP, 0);
+
+    // Copy over configurations that apply to both encoder and decoder.
+    static const EntryCopier kEncoderEntriesToCopy[] = {
+            ENTRY_COPIER2(AMEDIAFORMAT_KEY_OPERATING_RATE, Float, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_PRIORITY, Int32),
+    };
+    const size_t entryCount = sizeof(kEncoderEntriesToCopy) / sizeof(kEncoderEntriesToCopy[0]);
+    CopyFormatEntries(mDestinationFormat.get(), decoderFormat.get(), kEncoderEntriesToCopy,
+                      entryCount);
+
+    status = AMediaCodec_configure(mDecoder, decoderFormat.get(), mSurface, NULL /* crypto */,
+                                   0 /* flags */);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to configure video decoder: " << status;
+        return status;
+    }
+
+    // Configure codecs to run in async mode.
+    AMediaCodecOnAsyncNotifyCallback asyncCodecCallbacks = {
+            .onAsyncInputAvailable = AsyncCodecCallbackDispatch::onAsyncInputAvailable,
+            .onAsyncOutputAvailable = AsyncCodecCallbackDispatch::onAsyncOutputAvailable,
+            .onAsyncFormatChanged = AsyncCodecCallbackDispatch::onAsyncFormatChanged,
+            .onAsyncError = AsyncCodecCallbackDispatch::onAsyncError};
+
+    // Note: The decoder does not need its own wrapper because its lifetime is tied to the
+    // transcoder. But the same callbacks are reused for decoder and encoder so we pass the encoder
+    // wrapper as userdata here but never read the codec from it in the callback.
+    status = AMediaCodec_setAsyncNotifyCallback(mDecoder, asyncCodecCallbacks, mEncoder.get());
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to set decoder to async mode: " << status;
+        return status;
+    }
+
+    status = AMediaCodec_setAsyncNotifyCallback(mEncoder->getCodec(), asyncCodecCallbacks,
+                                                mEncoder.get());
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to set encoder to async mode: " << status;
+        return status;
+    }
+
+    return AMEDIA_OK;
+}
+
+void VideoTrackTranscoder::enqueueInputSample(int32_t bufferIndex) {
+    media_status_t status = AMEDIA_OK;
+
+    if (mEosFromSource) {
+        return;
+    }
+
+    status = mMediaSampleReader->getSampleInfoForTrack(mTrackIndex, &mSampleInfo);
+    if (status != AMEDIA_OK && status != AMEDIA_ERROR_END_OF_STREAM) {
+        LOG(ERROR) << "Error getting next sample info: " << status;
+        mStatus = status;
+        return;
+    }
+    const bool endOfStream = (status == AMEDIA_ERROR_END_OF_STREAM);
+
+    if (!endOfStream) {
+        size_t bufferSize = 0;
+        uint8_t* sourceBuffer = AMediaCodec_getInputBuffer(mDecoder, bufferIndex, &bufferSize);
+        if (sourceBuffer == nullptr) {
+            LOG(ERROR) << "Decoder returned a NULL input buffer.";
+            mStatus = AMEDIA_ERROR_UNKNOWN;
+            return;
+        } else if (bufferSize < mSampleInfo.size) {
+            LOG(ERROR) << "Decoder returned an input buffer that is smaller than the sample.";
+            mStatus = AMEDIA_ERROR_UNKNOWN;
+            return;
+        }
+
+        status = mMediaSampleReader->readSampleDataForTrack(mTrackIndex, sourceBuffer,
+                                                            mSampleInfo.size);
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "Unable to read next sample data. Aborting transcode.";
+            mStatus = status;
+            return;
+        }
+    } else {
+        LOG(DEBUG) << "EOS from source.";
+        mEosFromSource = true;
+    }
+
+    status = AMediaCodec_queueInputBuffer(mDecoder, bufferIndex, 0, mSampleInfo.size,
+                                          mSampleInfo.presentationTimeUs, mSampleInfo.flags);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to queue input buffer for decode: " << status;
+        mStatus = status;
+        return;
+    }
+}
+
+void VideoTrackTranscoder::transferBuffer(int32_t bufferIndex, AMediaCodecBufferInfo bufferInfo) {
+    if (bufferIndex >= 0) {
+        bool needsRender = bufferInfo.size > 0;
+        AMediaCodec_releaseOutputBuffer(mDecoder, bufferIndex, needsRender);
+    }
+
+    if (bufferInfo.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
+        LOG(DEBUG) << "EOS from decoder.";
+        media_status_t status = AMediaCodec_signalEndOfInputStream(mEncoder->getCodec());
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "SignalEOS on encoder returned error: " << status;
+            mStatus = status;
+        }
+    }
+}
+
+void VideoTrackTranscoder::dequeueOutputSample(int32_t bufferIndex,
+                                               AMediaCodecBufferInfo bufferInfo) {
+    if (bufferIndex >= 0) {
+        size_t sampleSize = 0;
+        uint8_t* buffer =
+                AMediaCodec_getOutputBuffer(mEncoder->getCodec(), bufferIndex, &sampleSize);
+
+        MediaSample::OnSampleReleasedCallback bufferReleaseCallback =
+                [encoder = mEncoder](MediaSample* sample) {
+                    AMediaCodec_releaseOutputBuffer(encoder->getCodec(), sample->bufferId,
+                                                    false /* render */);
+                };
+
+        std::shared_ptr<MediaSample> sample = MediaSample::createWithReleaseCallback(
+                buffer, bufferInfo.offset, bufferIndex, bufferReleaseCallback);
+        sample->info.size = bufferInfo.size;
+        sample->info.flags = bufferInfo.flags;
+        sample->info.presentationTimeUs = bufferInfo.presentationTimeUs;
+
+        onOutputSampleAvailable(sample);
+    } else if (bufferIndex == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
+        AMediaFormat* newFormat = AMediaCodec_getOutputFormat(mEncoder->getCodec());
+        LOG(DEBUG) << "Encoder output format changed: " << AMediaFormat_toString(newFormat);
+    }
+
+    if (bufferInfo.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
+        LOG(DEBUG) << "EOS from encoder.";
+        mEosFromEncoder = true;
+    }
+}
+
+void VideoTrackTranscoder::updateTrackFormat(AMediaFormat* outputFormat) {
+    if (mActualOutputFormat != nullptr) {
+        LOG(WARNING) << "Ignoring duplicate format change.";
+        return;
+    }
+
+    AMediaFormat* formatCopy = AMediaFormat_new();
+    if (!formatCopy || AMediaFormat_copy(formatCopy, outputFormat) != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to copy outputFormat";
+        AMediaFormat_delete(formatCopy);
+        mStatus = AMEDIA_ERROR_INVALID_PARAMETER;
+        return;
+    }
+
+    // Generate the actual track format for muxer based on the encoder output format,
+    // since many vital information comes in the encoder format (eg. CSD).
+    // Transfer necessary fields from the user-configured track format (derived from
+    // source track format and user transcoding request) where needed.
+
+    // Transfer SAR settings:
+    // If mDestinationFormat has SAR set, it means the original source has SAR specified
+    // at container level. This is supposed to override any SAR settings in the bitstream,
+    // thus should always be transferred to the container of the transcoded file.
+    int32_t sarWidth, sarHeight;
+    if (AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_SAR_WIDTH, &sarWidth) &&
+        (sarWidth > 0) &&
+        AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_SAR_HEIGHT, &sarHeight) &&
+        (sarHeight > 0)) {
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_SAR_WIDTH, sarWidth);
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_SAR_HEIGHT, sarHeight);
+    }
+    // Transfer DAR settings.
+    int32_t displayWidth, displayHeight;
+    if (AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_DISPLAY_WIDTH, &displayWidth) &&
+        (displayWidth > 0) &&
+        AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_DISPLAY_HEIGHT,
+                              &displayHeight) &&
+        (displayHeight > 0)) {
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_DISPLAY_WIDTH, displayWidth);
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_DISPLAY_HEIGHT, displayHeight);
+    }
+
+    // Transfer rotation settings.
+    // Note that muxer itself doesn't take rotation from the track format. It requires
+    // AMediaMuxer_setOrientationHint to set the rotation. Here we pass the rotation to
+    // MediaSampleWriter using the track format. MediaSampleWriter will then call
+    // AMediaMuxer_setOrientationHint as needed.
+    int32_t rotation;
+    if (AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_ROTATION, &rotation) &&
+        (rotation != 0)) {
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_ROTATION, rotation);
+    }
+
+    // Transfer track duration.
+    // Preserve the source track duration by sending it to MediaSampleWriter.
+    int64_t durationUs;
+    if (AMediaFormat_getInt64(mSourceFormat.get(), AMEDIAFORMAT_KEY_DURATION, &durationUs) &&
+        durationUs > 0) {
+        AMediaFormat_setInt64(formatCopy, AMEDIAFORMAT_KEY_DURATION, durationUs);
+    }
+
+    // TODO: transfer other fields as required.
+
+    mActualOutputFormat = std::shared_ptr<AMediaFormat>(formatCopy, &AMediaFormat_delete);
+
+    notifyTrackFormatAvailable();
+}
+
+media_status_t VideoTrackTranscoder::runTranscodeLoop() {
+    androidSetThreadPriority(0 /* tid (0 = current) */, ANDROID_PRIORITY_VIDEO);
+
+    // Push start decoder and encoder as two messages, so that these are subject to the
+    // stop request as well. If the session is cancelled (or paused) immediately after start,
+    // we don't need to waste time start then stop the codecs.
+    mCodecMessageQueue.push([this] {
+        media_status_t status = AMediaCodec_start(mDecoder);
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "Unable to start video decoder: " << status;
+            mStatus = status;
+        }
+    });
+
+    mCodecMessageQueue.push([this] {
+        media_status_t status = AMediaCodec_start(mEncoder->getCodec());
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "Unable to start video encoder: " << status;
+            mStatus = status;
+        }
+        mEncoder->setStarted();
+    });
+
+    // Process codec events until EOS is reached, transcoding is stopped or an error occurs.
+    while (!mStopRequested && !mEosFromEncoder && mStatus == AMEDIA_OK) {
+        std::function<void()> message = mCodecMessageQueue.pop();
+        message();
+    }
+
+    mCodecMessageQueue.abort();
+    AMediaCodec_stop(mDecoder);
+
+    // Return error if transcoding was stopped before it finished.
+    if (mStopRequested && !mEosFromEncoder && mStatus == AMEDIA_OK) {
+        mStatus = AMEDIA_ERROR_UNKNOWN;  // TODO: Define custom error codes?
+    }
+
+    return mStatus;
+}
+
+void VideoTrackTranscoder::abortTranscodeLoop() {
+    // Push abort message to the front of the codec event queue.
+    mCodecMessageQueue.push([this] { mStopRequested = true; }, true /* front */);
+}
+
+std::shared_ptr<AMediaFormat> VideoTrackTranscoder::getOutputFormat() const {
+    return mActualOutputFormat;
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/benchmark/Android.bp b/media/libmediatranscoding/transcoder/benchmark/Android.bp
new file mode 100644
index 0000000..ce34702
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/benchmark/Android.bp
@@ -0,0 +1,23 @@
+cc_defaults {
+    name: "benchmarkdefaults",
+    shared_libs: ["libmediatranscoder", "libmediandk", "libbase", "libbinder_ndk"],
+    static_libs: ["libgoogle-benchmark"],
+}
+
+cc_test {
+    name: "MediaTranscoderBenchmark",
+    srcs: ["MediaTranscoderBenchmark.cpp"],
+    defaults: ["benchmarkdefaults"],
+}
+
+cc_test {
+    name: "MediaSampleReaderBenchmark",
+    srcs: ["MediaSampleReaderBenchmark.cpp"],
+    defaults: ["benchmarkdefaults"],
+}
+
+cc_test {
+    name: "MediaTrackTranscoderBenchmark",
+    srcs: ["MediaTrackTranscoderBenchmark.cpp"],
+    defaults: ["benchmarkdefaults"],
+}
diff --git a/media/libmediatranscoding/transcoder/benchmark/MediaSampleReaderBenchmark.cpp b/media/libmediatranscoding/transcoder/benchmark/MediaSampleReaderBenchmark.cpp
new file mode 100644
index 0000000..f0b9304
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/benchmark/MediaSampleReaderBenchmark.cpp
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * MediaSampleReader benchmark tests.
+ *
+ * How to run the benchmark:
+ *
+ * 1. Download the media assets from http://go/transcodingbenchmark and push the directory
+ *    ("TranscodingBenchmark") to /data/local/tmp.
+ *
+ * 2. Compile the benchmark and sync to device:
+ *      $ mm -j72 && adb sync
+ *
+ * 3. Run:
+ *      $ adb shell /data/nativetest64/MediaSampleReaderBenchmark/MediaSampleReaderBenchmark
+ */
+
+#define LOG_TAG "MediaSampleReaderBenchmark"
+
+#include <android-base/logging.h>
+#include <benchmark/benchmark.h>
+#include <fcntl.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <unistd.h>
+
+#include <thread>
+
+using namespace android;
+
+static void ReadMediaSamples(benchmark::State& state, const std::string& srcFileName,
+                             bool readAudio, bool sequentialAccess = false) {
+    // Asset directory.
+    static const std::string kAssetDirectory = "/data/local/tmp/TranscodingBenchmark/";
+
+    int srcFd = 0;
+    std::string srcPath = kAssetDirectory + srcFileName;
+
+    if ((srcFd = open(srcPath.c_str(), O_RDONLY)) < 0) {
+        state.SkipWithError("Unable to open source file");
+        return;
+    }
+
+    const size_t fileSize = lseek(srcFd, 0, SEEK_END);
+    lseek(srcFd, 0, SEEK_SET);
+
+    for (auto _ : state) {
+        auto sampleReader = MediaSampleReaderNDK::createFromFd(srcFd, 0, fileSize);
+        if (sampleReader->setEnforceSequentialAccess(sequentialAccess) != AMEDIA_OK) {
+            state.SkipWithError("setEnforceSequentialAccess failed");
+            return;
+        }
+
+        // Select tracks.
+        std::vector<int> trackIndices;
+        for (int trackIndex = 0; trackIndex < sampleReader->getTrackCount(); ++trackIndex) {
+            const char* mime = nullptr;
+
+            AMediaFormat* trackFormat = sampleReader->getTrackFormat(trackIndex);
+            AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+
+            if (strncmp(mime, "video/", 6) == 0) {
+                int32_t frameCount;
+                if (AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_FRAME_COUNT, &frameCount)) {
+                    state.counters["VideoFrameRate"] =
+                            benchmark::Counter(frameCount, benchmark::Counter::kIsRate);
+                }
+            } else if (!readAudio && strncmp(mime, "audio/", 6) == 0) {
+                continue;
+            }
+
+            trackIndices.push_back(trackIndex);
+            sampleReader->selectTrack(trackIndex);
+        }
+
+        // Start threads.
+        std::vector<std::thread> trackThreads;
+        for (auto trackIndex : trackIndices) {
+            trackThreads.emplace_back([trackIndex, sampleReader, &state] {
+                LOG(INFO) << "Track " << trackIndex << " started";
+                MediaSampleInfo info;
+
+                size_t bufferSize = 0;
+                std::unique_ptr<uint8_t[]> buffer;
+
+                while (true) {
+                    media_status_t status = sampleReader->getSampleInfoForTrack(trackIndex, &info);
+                    if (status == AMEDIA_ERROR_END_OF_STREAM) {
+                        break;
+                    }
+
+                    if (info.size > bufferSize) {
+                        bufferSize = info.size;
+                        buffer.reset(new uint8_t[bufferSize]);
+                    }
+
+                    status = sampleReader->readSampleDataForTrack(trackIndex, buffer.get(),
+                                                                  bufferSize);
+                    if (status != AMEDIA_OK) {
+                        state.SkipWithError("Error reading sample data");
+                        break;
+                    }
+                }
+
+                LOG(INFO) << "Track " << trackIndex << " finished";
+            });
+        }
+
+        // Join threads.
+        for (auto& thread : trackThreads) {
+            thread.join();
+        }
+    }
+
+    close(srcFd);
+}
+
+// Benchmark registration wrapper for transcoding.
+#define TRANSCODER_BENCHMARK(func) \
+    BENCHMARK(func)->UseRealTime()->MeasureProcessCPUTime()->Unit(benchmark::kMillisecond)
+
+static void BM_MediaSampleReader_AudioVideo_Parallel(benchmark::State& state) {
+    ReadMediaSamples(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+                     true /* readAudio */);
+}
+
+static void BM_MediaSampleReader_AudioVideo_Sequential(benchmark::State& state) {
+    ReadMediaSamples(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+                     true /* readAudio */, true /* sequentialAccess */);
+}
+
+static void BM_MediaSampleReader_Video(benchmark::State& state) {
+    ReadMediaSamples(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+                     false /* readAudio */);
+}
+
+TRANSCODER_BENCHMARK(BM_MediaSampleReader_AudioVideo_Parallel);
+TRANSCODER_BENCHMARK(BM_MediaSampleReader_AudioVideo_Sequential);
+TRANSCODER_BENCHMARK(BM_MediaSampleReader_Video);
+
+BENCHMARK_MAIN();
diff --git a/media/libmediatranscoding/transcoder/benchmark/MediaTrackTranscoderBenchmark.cpp b/media/libmediatranscoding/transcoder/benchmark/MediaTrackTranscoderBenchmark.cpp
new file mode 100644
index 0000000..aee0ed6
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/benchmark/MediaTrackTranscoderBenchmark.cpp
@@ -0,0 +1,446 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Native media track transcoder benchmark tests.
+ *
+ * How to run the benchmark:
+ *
+ * 1. Download the media assets from http://go/transcodingbenchmark and push the directory
+ *    ("TranscodingBenchmark") to /data/local/tmp.
+ *
+ * 2. Compile the benchmark and sync to device:
+ *      $ mm -j72 && adb sync
+ *
+ * 3. Run:
+ *      $ adb shell /data/nativetest64/MediaTrackTranscoderBenchmark/MediaTrackTranscoderBenchmark
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTrackTranscoderBenchmark"
+
+#include <android-base/logging.h>
+#include <android/binder_process.h>
+#include <benchmark/benchmark.h>
+#include <fcntl.h>
+#include <media/MediaSampleReader.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/MediaTrackTranscoder.h>
+#include <media/MediaTrackTranscoderCallback.h>
+#include <media/NdkCommon.h>
+#include <media/PassthroughTrackTranscoder.h>
+#include <media/VideoTrackTranscoder.h>
+
+using namespace android;
+
+typedef enum {
+    kVideo,
+    kAudio,
+} MediaType;
+
+class TrackTranscoderCallbacks : public MediaTrackTranscoderCallback {
+public:
+    virtual void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder __unused) override {}
+
+    virtual void onTrackFinished(const MediaTrackTranscoder* transcoder __unused) override {
+        std::unique_lock lock(mMutex);
+        mFinished = true;
+        mCondition.notify_all();
+    }
+
+    virtual void onTrackError(const MediaTrackTranscoder* transcoder __unused,
+                              media_status_t status) override {
+        std::unique_lock lock(mMutex);
+        mFinished = true;
+        mStatus = status;
+        mCondition.notify_all();
+    }
+
+    void waitForTranscodingFinished() {
+        std::unique_lock lock(mMutex);
+        while (!mFinished) {
+            mCondition.wait(lock);
+        }
+    }
+
+    media_status_t mStatus = AMEDIA_OK;
+
+private:
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mFinished = false;
+};
+
+/**
+ * MockSampleReader holds a ringbuffer of the first samples in the provided source track. Samples
+ * are returned to the caller from the ringbuffer in a round-robin fashion with increasing
+ * timestamps. The number of samples returned before EOS matches the number of frames in the source
+ * track.
+ */
+class MockSampleReader : public MediaSampleReader {
+public:
+    static std::shared_ptr<MediaSampleReader> createFromFd(int fd, size_t offset, size_t size) {
+        AMediaExtractor* extractor = AMediaExtractor_new();
+        media_status_t status = AMediaExtractor_setDataSourceFd(extractor, fd, offset, size);
+        if (status != AMEDIA_OK) return nullptr;
+
+        auto sampleReader = std::shared_ptr<MockSampleReader>(new MockSampleReader(extractor));
+        return sampleReader;
+    }
+
+    AMediaFormat* getFileFormat() override { return AMediaExtractor_getFileFormat(mExtractor); }
+
+    size_t getTrackCount() const override { return AMediaExtractor_getTrackCount(mExtractor); }
+
+    AMediaFormat* getTrackFormat(int trackIndex) override {
+        return AMediaExtractor_getTrackFormat(mExtractor, trackIndex);
+    }
+
+    media_status_t selectTrack(int trackIndex) override {
+        if (mSelectedTrack >= 0) return AMEDIA_ERROR_UNSUPPORTED;
+        mSelectedTrack = trackIndex;
+
+        media_status_t status = AMediaExtractor_selectTrack(mExtractor, trackIndex);
+        if (status != AMEDIA_OK) return status;
+
+        // Get the sample count.
+        AMediaFormat* format = getTrackFormat(trackIndex);
+        const bool haveSampleCount =
+                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_FRAME_COUNT, &mSampleCount);
+        AMediaFormat_delete(format);
+
+        if (!haveSampleCount) {
+            LOG(ERROR) << "No sample count in track format.";
+            return AMEDIA_ERROR_UNSUPPORTED;
+        }
+
+        // Buffer samples.
+        const int32_t targetBufferCount = 60;
+        std::unique_ptr<uint8_t[]> buffer;
+        MediaSampleInfo info;
+        while (true) {
+            info.presentationTimeUs = AMediaExtractor_getSampleTime(mExtractor);
+            info.flags = AMediaExtractor_getSampleFlags(mExtractor);
+            info.size = AMediaExtractor_getSampleSize(mExtractor);
+
+            // Finish buffering after either reading all the samples in the track or after
+            // completing the GOP satisfying the target count.
+            if (mSamples.size() == mSampleCount ||
+                (mSamples.size() >= targetBufferCount && info.flags & SAMPLE_FLAG_SYNC_SAMPLE)) {
+                break;
+            }
+
+            buffer.reset(new uint8_t[info.size]);
+
+            ssize_t bytesRead = AMediaExtractor_readSampleData(mExtractor, buffer.get(), info.size);
+            if (bytesRead != info.size) {
+                return AMEDIA_ERROR_UNKNOWN;
+            }
+
+            mSamples.emplace_back(std::move(buffer), info);
+
+            AMediaExtractor_advance(mExtractor);
+        }
+
+        mFirstPtsUs = mSamples[0].second.presentationTimeUs;
+        mPtsDiff = mSamples[1].second.presentationTimeUs - mSamples[0].second.presentationTimeUs;
+
+        return AMEDIA_OK;
+    }
+
+    media_status_t setEnforceSequentialAccess(bool enforce __unused) override { return AMEDIA_OK; }
+
+    media_status_t getEstimatedBitrateForTrack(int trackIndex __unused,
+                                               int32_t* bitrate __unused) override {
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    media_status_t getSampleInfoForTrack(int trackIndex, MediaSampleInfo* info) override {
+        if (trackIndex != mSelectedTrack) return AMEDIA_ERROR_INVALID_PARAMETER;
+
+        if (mCurrentSampleIndex >= mSampleCount) {
+            info->presentationTimeUs = 0;
+            info->size = 0;
+            info->flags = SAMPLE_FLAG_END_OF_STREAM;
+            return AMEDIA_ERROR_END_OF_STREAM;
+        }
+
+        *info = mSamples[mCurrentSampleIndex % mSamples.size()].second;
+        info->presentationTimeUs = mFirstPtsUs + mCurrentSampleIndex * mPtsDiff;
+        return AMEDIA_OK;
+    }
+
+    media_status_t readSampleDataForTrack(int trackIndex, uint8_t* buffer,
+                                          size_t bufferSize) override {
+        if (trackIndex != mSelectedTrack) return AMEDIA_ERROR_INVALID_PARAMETER;
+
+        if (mCurrentSampleIndex >= mSampleCount) return AMEDIA_ERROR_END_OF_STREAM;
+
+        auto& p = mSamples[mCurrentSampleIndex % mSamples.size()];
+
+        if (bufferSize < p.second.size) return AMEDIA_ERROR_INVALID_PARAMETER;
+        memcpy(buffer, p.first.get(), p.second.size);
+
+        advanceTrack(trackIndex);
+        return AMEDIA_OK;
+    }
+
+    void advanceTrack(int trackIndex) {
+        if (trackIndex != mSelectedTrack) return;
+        ++mCurrentSampleIndex;
+    }
+
+    virtual ~MockSampleReader() override { AMediaExtractor_delete(mExtractor); }
+
+private:
+    MockSampleReader(AMediaExtractor* extractor) : mExtractor(extractor) {}
+    AMediaExtractor* mExtractor = nullptr;
+    int32_t mSampleCount = 0;
+    std::vector<std::pair<std::unique_ptr<uint8_t[]>, MediaSampleInfo>> mSamples;
+    int mSelectedTrack = -1;
+    int32_t mCurrentSampleIndex = 0;
+    int64_t mFirstPtsUs = 0;
+    int64_t mPtsDiff = 0;
+};
+
+static std::shared_ptr<AMediaFormat> GetDefaultTrackFormat(MediaType mediaType,
+                                                           AMediaFormat* sourceFormat) {
+    // Default video config.
+    static constexpr int32_t kVideoBitRate = 20 * 1000 * 1000;  // 20 mbps
+    static constexpr float kVideoFrameRate = 30.0f;             // 30 fps
+
+    AMediaFormat* format = nullptr;
+
+    if (mediaType == kVideo) {
+        format = AMediaFormat_new();
+        AMediaFormat_copy(format, sourceFormat);
+        AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_AVC);
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, kVideoBitRate);
+        AMediaFormat_setFloat(format, AMEDIAFORMAT_KEY_FRAME_RATE, kVideoFrameRate);
+    }
+    // nothing for audio.
+
+    return std::shared_ptr<AMediaFormat>(format, &AMediaFormat_delete);
+}
+
+/** Gets a MediaSampleReader for the source file */
+static std::shared_ptr<MediaSampleReader> GetSampleReader(const std::string& srcFileName,
+                                                          bool mock) {
+    // Asset directory
+    static const std::string kAssetDirectory = "/data/local/tmp/TranscodingBenchmark/";
+
+    int srcFd = 0;
+    std::string srcPath = kAssetDirectory + srcFileName;
+
+    if ((srcFd = open(srcPath.c_str(), O_RDONLY)) < 0) {
+        return nullptr;
+    }
+
+    const size_t fileSize = lseek(srcFd, 0, SEEK_END);
+    lseek(srcFd, 0, SEEK_SET);
+
+    std::shared_ptr<MediaSampleReader> sampleReader;
+
+    if (mock) {
+        sampleReader = MockSampleReader::createFromFd(srcFd, 0 /* offset */, fileSize);
+    } else {
+        sampleReader = MediaSampleReaderNDK::createFromFd(srcFd, 0 /* offset */, fileSize);
+    }
+
+    if (srcFd > 0) close(srcFd);
+    return sampleReader;
+}
+
+/**
+ * Configures a MediaTrackTranscoder with an empty sample consumer so that the samples are returned
+ * to the transcoder immediately.
+ */
+static void ConfigureEmptySampleConsumer(const std::shared_ptr<MediaTrackTranscoder>& transcoder,
+                                         uint32_t& sampleCount) {
+    transcoder->setSampleConsumer([&sampleCount](const std::shared_ptr<MediaSample>& sample) {
+        if (!(sample->info.flags & SAMPLE_FLAG_CODEC_CONFIG) && sample->info.size > 0) {
+            ++sampleCount;
+        }
+    });
+}
+
+/**
+ * Callback to edit track format for transcoding.
+ * @param dstFormat The default track format for the track type.
+ */
+using TrackFormatEditCallback = std::function<void(AMediaFormat* dstFormat)>;
+
+/**
+ * Configures a MediaTrackTranscoder with the provided MediaSampleReader, reading from the first
+ * track that matches the specified media type.
+ */
+static bool ConfigureSampleReader(const std::shared_ptr<MediaTrackTranscoder>& transcoder,
+                                  const std::shared_ptr<MediaSampleReader>& sampleReader,
+                                  MediaType mediaType,
+                                  const TrackFormatEditCallback& formatEditor) {
+    int srcTrackIndex = -1;
+    std::shared_ptr<AMediaFormat> srcTrackFormat = nullptr;
+
+    for (int trackIndex = 0; trackIndex < sampleReader->getTrackCount(); ++trackIndex) {
+        AMediaFormat* trackFormat = sampleReader->getTrackFormat(trackIndex);
+
+        const char* mime = nullptr;
+        AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+
+        if ((mediaType == kVideo && strncmp(mime, "video/", 6) == 0) ||
+            (mediaType == kAudio && strncmp(mime, "audio/", 6) == 0)) {
+            srcTrackIndex = trackIndex;
+            srcTrackFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
+            break;
+        }
+        AMediaFormat_delete(trackFormat);
+    }
+
+    if (srcTrackIndex == -1) {
+        LOG(ERROR) << "No matching source track found";
+        return false;
+    }
+
+    media_status_t status = sampleReader->selectTrack(srcTrackIndex);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to select track";
+        return false;
+    }
+
+    auto destinationFormat = GetDefaultTrackFormat(mediaType, srcTrackFormat.get());
+    if (formatEditor != nullptr) {
+        formatEditor(destinationFormat.get());
+    }
+    status = transcoder->configure(sampleReader, srcTrackIndex, destinationFormat);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "transcoder configure returned " << status;
+        return false;
+    }
+
+    return true;
+}
+
+static void BenchmarkTranscoder(benchmark::State& state, const std::string& srcFileName,
+                                bool mockReader, MediaType mediaType,
+                                const TrackFormatEditCallback& formatEditor = nullptr) {
+    static pthread_once_t once = PTHREAD_ONCE_INIT;
+    pthread_once(&once, ABinderProcess_startThreadPool);
+
+    for (auto _ : state) {
+        std::shared_ptr<TrackTranscoderCallbacks> callbacks =
+                std::make_shared<TrackTranscoderCallbacks>();
+        std::shared_ptr<MediaTrackTranscoder> transcoder;
+
+        if (mediaType == kVideo) {
+            transcoder = VideoTrackTranscoder::create(callbacks);
+        } else {
+            transcoder = std::make_shared<PassthroughTrackTranscoder>(callbacks);
+        }
+
+        std::shared_ptr<MediaSampleReader> sampleReader = GetSampleReader(srcFileName, mockReader);
+        if (sampleReader == nullptr) {
+            state.SkipWithError("Unable to create sample reader");
+            return;
+        }
+
+        if (!ConfigureSampleReader(transcoder, sampleReader, mediaType, formatEditor)) {
+            state.SkipWithError("Unable to configure the transcoder");
+            return;
+        }
+
+        uint32_t sampleCount = 0;
+        ConfigureEmptySampleConsumer(transcoder, sampleCount);
+
+        if (!transcoder->start()) {
+            state.SkipWithError("Unable to start the transcoder");
+            return;
+        }
+
+        callbacks->waitForTranscodingFinished();
+        transcoder->stop();
+
+        if (callbacks->mStatus != AMEDIA_OK) {
+            state.SkipWithError("Transcoder failed with error");
+            return;
+        }
+
+        LOG(DEBUG) << "Number of samples received: " << sampleCount;
+        state.counters["FrameRate"] = benchmark::Counter(sampleCount, benchmark::Counter::kIsRate);
+    }
+}
+
+static void BenchmarkTranscoderWithOperatingRate(benchmark::State& state,
+                                                 const std::string& srcFile, bool mockReader,
+                                                 MediaType mediaType) {
+    TrackFormatEditCallback editor;
+    const int32_t operatingRate = state.range(0);
+    const int32_t priority = state.range(1);
+
+    if (operatingRate >= 0 && priority >= 0) {
+        editor = [operatingRate, priority](AMediaFormat* format) {
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_OPERATING_RATE, operatingRate);
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_PRIORITY, priority);
+        };
+    }
+    BenchmarkTranscoder(state, srcFile, mockReader, mediaType, editor);
+}
+
+//-------------------------------- AVC to AVC Benchmarks -------------------------------------------
+
+static void BM_VideoTranscode_AVC2AVC(benchmark::State& state) {
+    const char* srcFile = "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4";
+    BenchmarkTranscoderWithOperatingRate(state, srcFile, false /* mockReader */, kVideo);
+}
+
+static void BM_VideoTranscode_AVC2AVC_NoExtractor(benchmark::State& state) {
+    const char* srcFile = "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4";
+    BenchmarkTranscoderWithOperatingRate(state, srcFile, true /* mockReader */, kVideo);
+}
+
+//-------------------------------- HEVC to AVC Benchmarks ------------------------------------------
+
+static void BM_VideoTranscode_HEVC2AVC(benchmark::State& state) {
+    const char* srcFile = "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac.mp4";
+    BenchmarkTranscoderWithOperatingRate(state, srcFile, false /* mockReader */, kVideo);
+}
+
+static void BM_VideoTranscode_HEVC2AVC_NoExtractor(benchmark::State& state) {
+    const char* srcFile = "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac.mp4";
+    BenchmarkTranscoderWithOperatingRate(state, srcFile, true /* mockReader */, kVideo);
+}
+
+//-------------------------------- Benchmark Registration ------------------------------------------
+
+// Benchmark registration wrapper for transcoding.
+#define TRANSCODER_BENCHMARK(func) \
+    BENCHMARK(func)->UseRealTime()->MeasureProcessCPUTime()->Unit(benchmark::kMillisecond)
+
+// Benchmark registration for testing different operating rate and priority combinations.
+#define TRANSCODER_OPERATING_RATE_BENCHMARK(func)  \
+    TRANSCODER_BENCHMARK(func)                     \
+            ->Args({-1, -1}) /* <-- Use default */ \
+            ->Args({240, 0})                       \
+            ->Args({INT32_MAX, 0})                 \
+            ->Args({240, 1})                       \
+            ->Args({INT32_MAX, 1})
+
+TRANSCODER_OPERATING_RATE_BENCHMARK(BM_VideoTranscode_AVC2AVC);
+TRANSCODER_OPERATING_RATE_BENCHMARK(BM_VideoTranscode_AVC2AVC_NoExtractor);
+
+TRANSCODER_OPERATING_RATE_BENCHMARK(BM_VideoTranscode_HEVC2AVC);
+TRANSCODER_OPERATING_RATE_BENCHMARK(BM_VideoTranscode_HEVC2AVC_NoExtractor);
+
+BENCHMARK_MAIN();
diff --git a/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp b/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp
new file mode 100644
index 0000000..465632f
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp
@@ -0,0 +1,335 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Native media transcoder library benchmark tests.
+ *
+ * How to run the benchmark:
+ *
+ * 1. Download the media assets from http://go/transcodingbenchmark and push the directory
+ *    ("TranscodingBenchmark") to /data/local/tmp.
+ *
+ * 2. Compile the benchmark and sync to device:
+ *      $ mm -j72 && adb sync
+ *
+ * 3. Run:
+ *      $ adb shell /data/nativetest64/MediaTranscoderBenchmark/MediaTranscoderBenchmark
+ */
+
+#include <benchmark/benchmark.h>
+#include <fcntl.h>
+#include <media/MediaTranscoder.h>
+
+using namespace android;
+
+class TranscoderCallbacks : public MediaTranscoder::CallbackInterface {
+public:
+    virtual void onFinished(const MediaTranscoder* transcoder __unused) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mFinished = true;
+        mCondition.notify_all();
+    }
+
+    virtual void onError(const MediaTranscoder* transcoder __unused,
+                         media_status_t error) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mFinished = true;
+        mStatus = error;
+        mCondition.notify_all();
+    }
+
+    virtual void onProgressUpdate(const MediaTranscoder* transcoder __unused,
+                                  int32_t progress __unused) override {}
+
+    virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
+                                     const std::shared_ptr<ndk::ScopedAParcel>& pausedState
+                                             __unused) override {}
+
+    bool waitForTranscodingFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mFinished) {
+            if (mCondition.wait_for(lock, std::chrono::minutes(5)) == std::cv_status::timeout) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    media_status_t mStatus = AMEDIA_OK;
+
+private:
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mFinished = false;
+};
+
+static AMediaFormat* CreateDefaultVideoFormat() {
+    // Default bitrate
+    static constexpr int32_t kVideoBitRate = 20 * 1000 * 1000;  // 20Mbs
+
+    AMediaFormat* videoFormat = AMediaFormat_new();
+    AMediaFormat_setInt32(videoFormat, AMEDIAFORMAT_KEY_BIT_RATE, kVideoBitRate);
+    return videoFormat;
+}
+
+/**
+ * Callback to configure tracks for transcoding.
+ * @param mime The source track mime type.
+ * @param dstFormat The destination format if the track should be transcoded or nullptr if the track
+ * should be passed through.
+ * @return True if the track should be included in the output file.
+ */
+using TrackSelectionCallback = std::function<bool(const char* mime, AMediaFormat** dstFormat)>;
+
+static void TranscodeMediaFile(benchmark::State& state, const std::string& srcFileName,
+                               const std::string& dstFileName,
+                               TrackSelectionCallback trackSelectionCallback) {
+    // Write-only, create file if non-existent.
+    static constexpr int kDstOpenFlags = O_WRONLY | O_CREAT;
+    // User R+W permission.
+    static constexpr int kDstFileMode = S_IRUSR | S_IWUSR;
+    // Asset directory
+    static const std::string kAssetDirectory = "/data/local/tmp/TranscodingBenchmark/";
+
+    int srcFd = 0;
+    int dstFd = 0;
+
+    std::string srcPath = kAssetDirectory + srcFileName;
+    std::string dstPath = kAssetDirectory + dstFileName;
+
+    auto callbacks = std::make_shared<TranscoderCallbacks>();
+    media_status_t status = AMEDIA_OK;
+
+    if ((srcFd = open(srcPath.c_str(), O_RDONLY)) < 0) {
+        state.SkipWithError("Unable to open source file");
+        goto exit;
+    }
+    if ((dstFd = open(dstPath.c_str(), kDstOpenFlags, kDstFileMode)) < 0) {
+        state.SkipWithError("Unable to open destination file");
+        goto exit;
+    }
+
+    for (auto _ : state) {
+        auto transcoder = MediaTranscoder::create(callbacks, nullptr);
+
+        status = transcoder->configureSource(srcFd);
+        if (status != AMEDIA_OK) {
+            state.SkipWithError("Unable to configure transcoder source");
+            goto exit;
+        }
+
+        status = transcoder->configureDestination(dstFd);
+        if (status != AMEDIA_OK) {
+            state.SkipWithError("Unable to configure transcoder destination");
+            goto exit;
+        }
+
+        std::vector<std::shared_ptr<AMediaFormat>> trackFormats = transcoder->getTrackFormats();
+        for (int i = 0; i < trackFormats.size(); ++i) {
+            AMediaFormat* srcFormat = trackFormats[i].get();
+            AMediaFormat* dstFormat = nullptr;
+
+            const char* mime = nullptr;
+            if (!AMediaFormat_getString(srcFormat, AMEDIAFORMAT_KEY_MIME, &mime)) {
+                state.SkipWithError("Source track format does not have MIME type");
+                goto exit;
+            }
+
+            if (strncmp(mime, "video/", 6) == 0) {
+                int32_t frameCount;
+                if (AMediaFormat_getInt32(srcFormat, AMEDIAFORMAT_KEY_FRAME_COUNT, &frameCount)) {
+                    state.counters["VideoFrameRate"] =
+                            benchmark::Counter(frameCount, benchmark::Counter::kIsRate);
+                }
+            }
+
+            if (trackSelectionCallback(mime, &dstFormat)) {
+                status = transcoder->configureTrackFormat(i, dstFormat);
+            }
+
+            if (dstFormat != nullptr) {
+                AMediaFormat_delete(dstFormat);
+            }
+            if (status != AMEDIA_OK) {
+                state.SkipWithError("Unable to configure track");
+                goto exit;
+            }
+        }
+
+        status = transcoder->start();
+        if (status != AMEDIA_OK) {
+            state.SkipWithError("Unable to start transcoder");
+            goto exit;
+        }
+
+        if (!callbacks->waitForTranscodingFinished()) {
+            transcoder->cancel();
+            state.SkipWithError("Transcoder timed out");
+            goto exit;
+        }
+        if (callbacks->mStatus != AMEDIA_OK) {
+            state.SkipWithError("Transcoder error when running");
+            goto exit;
+        }
+    }
+
+exit:
+    if (srcFd > 0) close(srcFd);
+    if (dstFd > 0) close(dstFd);
+}
+
+/**
+ * Callback to edit track format for transcoding.
+ * @param dstFormat The default track format for the track type.
+ */
+using TrackFormatEditCallback = std::function<void(AMediaFormat* dstFormat)>;
+
+static void TranscodeMediaFile(benchmark::State& state, const std::string& srcFileName,
+                               const std::string& dstFileName, bool includeAudio,
+                               bool transcodeVideo,
+                               const TrackFormatEditCallback& videoFormatEditor = nullptr) {
+    TranscodeMediaFile(state, srcFileName, dstFileName,
+                       [=](const char* mime, AMediaFormat** dstFormatOut) -> bool {
+                           *dstFormatOut = nullptr;
+                           if (strncmp(mime, "video/", 6) == 0 && transcodeVideo) {
+                               *dstFormatOut = CreateDefaultVideoFormat();
+                               if (videoFormatEditor != nullptr) {
+                                   videoFormatEditor(*dstFormatOut);
+                               }
+                           } else if (strncmp(mime, "audio/", 6) == 0 && !includeAudio) {
+                               return false;
+                           }
+                           return true;
+                       });
+}
+
+static void SetMaxOperatingRate(AMediaFormat* format) {
+    AMediaFormat_setFloat(format, AMEDIAFORMAT_KEY_OPERATING_RATE, INT32_MAX);
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_PRIORITY, 1);
+}
+
+//-------------------------------- AVC to AVC Benchmarks -------------------------------------------
+
+static void BM_TranscodeAvc2AvcAudioVideo2AudioVideo(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+                       "video_1920x1080_3648frame_h264_22Mbps_30fps_aac_transcoded_AV.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeAvc2AvcVideo2Video(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps.mp4",
+                       "video_1920x1080_3648frame_h264_22Mbps_30fps_transcoded_V.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeAvc2AvcAV2AVMaxOperatingRate(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+                       "video_1920x1080_3648frame_h264_22Mbps_30fps_aac_transcoded_AV.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+
+static void BM_TranscodeAvc2AvcV2VMaxOperatingRate(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps.mp4",
+                       "video_1920x1080_3648frame_h264_22Mbps_30fps_transcoded_V.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+
+static void BM_TranscodeAvc2AvcAV2AV720P(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1280x720_3648frame_h264_16Mbps_30fps_aac.mp4",
+                       "video_1280x720_3648frame_h264_16Mbps_30fps_aac_transcoded_AV.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeAvc2AvcAV2AV720PMaxOperatingRate(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1280x720_3648frame_h264_16Mbps_30fps_aac.mp4",
+                       "video_1280x720_3648frame_h264_16Mbps_30fps_aac_transcoded_AV.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+//-------------------------------- HEVC to AVC Benchmarks ------------------------------------------
+
+static void BM_TranscodeHevc2AvcAudioVideo2AudioVideo(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac.mp4",
+                       "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac_transcoded_AV.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeHevc2AvcVideo2Video(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3863frame_hevc_4Mbps_30fps.mp4",
+                       "video_1920x1080_3863frame_hevc_4Mbps_30fps_transcoded_V.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeHevc2AvcAV2AVMaxOperatingRate(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac.mp4",
+                       "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac_transcoded_AV.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+
+static void BM_TranscodeHevc2AvcV2VMaxOperatingRate(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3863frame_hevc_4Mbps_30fps.mp4",
+                       "video_1920x1080_3863frame_hevc_4Mbps_30fps_transcoded_V.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+
+static void BM_TranscodeHevc2AvcAV2AV720P(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1280x720_3863frame_hevc_16Mbps_30fps_aac.mp4",
+                       "video_1280x720_3863frame_hevc_16Mbps_30fps_aac_transcoded_AV.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeHevc2AvcAV2AV720PMaxOperatingRate(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1280x720_3863frame_hevc_16Mbps_30fps_aac.mp4",
+                       "video_1280x720_3863frame_hevc_16Mbps_30fps_aac_transcoded_AV.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+
+//-------------------------------- Passthrough Benchmarks ------------------------------------------
+
+static void BM_TranscodeAudioVideoPassthrough(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+                       "video_1920x1080_3648frame_h264_22Mbps_30fps_aac_passthrough_AV.mp4",
+                       true /* includeAudio */, false /* transcodeVideo */);
+}
+static void BM_TranscodeVideoPassthrough(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps.mp4",
+                       "video_1920x1080_3648frame_h264_22Mbps_30fps_passthrough_AV.mp4",
+                       false /* includeAudio */, false /* transcodeVideo */);
+}
+
+//-------------------------------- Benchmark Registration ------------------------------------------
+
+// Benchmark registration wrapper for transcoding.
+#define TRANSCODER_BENCHMARK(func) \
+    BENCHMARK(func)->UseRealTime()->MeasureProcessCPUTime()->Unit(benchmark::kMillisecond)
+
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcAudioVideo2AudioVideo);
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcVideo2Video);
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcAV2AVMaxOperatingRate);
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcV2VMaxOperatingRate);
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcAV2AV720P);
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcAV2AV720PMaxOperatingRate);
+
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcAudioVideo2AudioVideo);
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcVideo2Video);
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcAV2AVMaxOperatingRate);
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcV2VMaxOperatingRate);
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcAV2AV720P);
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcAV2AV720PMaxOperatingRate);
+
+TRANSCODER_BENCHMARK(BM_TranscodeAudioVideoPassthrough);
+TRANSCODER_BENCHMARK(BM_TranscodeVideoPassthrough);
+
+BENCHMARK_MAIN();
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSample.h b/media/libmediatranscoding/transcoder/include/media/MediaSample.h
new file mode 100644
index 0000000..8a239a6
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSample.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SAMPLE_H
+#define ANDROID_MEDIA_SAMPLE_H
+
+#include <cstdint>
+#include <functional>
+#include <memory>
+
+namespace android {
+
+/**
+ * Media sample flags.
+ * These flags purposely match the media NDK's buffer and extractor flags with one exception. The
+ * NDK extractor's flag for encrypted samples (AMEDIAEXTRACTOR_SAMPLE_FLAG_ENCRYPTED) is equal to 2,
+ * i.e. the same as SAMPLE_FLAG_CODEC_CONFIG below and NDK's AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG.
+ * Sample producers based on the NDK's extractor is responsible for catching those values.
+ * Note that currently the media transcoder does not support encrypted samples.
+ */
+enum : uint32_t {
+    SAMPLE_FLAG_SYNC_SAMPLE = 1,
+    SAMPLE_FLAG_CODEC_CONFIG = 2,
+    SAMPLE_FLAG_END_OF_STREAM = 4,
+    SAMPLE_FLAG_PARTIAL_FRAME = 8,
+};
+
+/**
+ * MediaSampleInfo is an object that carries information about a compressed media sample without
+ * holding any sample data.
+ */
+struct MediaSampleInfo {
+    /** The sample's presentation timestamp in microseconds. */
+    int64_t presentationTimeUs = 0;
+
+    /** The size of the compressed sample data in bytes. */
+    size_t size = 0;
+
+    /** Sample flags. */
+    uint32_t flags = 0;
+};
+
+/**
+ * MediaSample holds a compressed media sample in memory.
+ */
+struct MediaSample {
+    /**
+     * Callback to notify that a media sample is about to be released, giving the creator a chance
+     * to reclaim the data buffer backing the sample. Once this callback returns, the media sample
+     * instance *will* be released so it cannot be used outside of the callback. To enable the
+     * callback, create the media sample with {@link #createWithReleaseCallback}.
+     * @param sample The sample to be released.
+     */
+    using OnSampleReleasedCallback = std::function<void(MediaSample* sample)>;
+
+    /**
+     * Creates a new media sample instance with a registered release callback. The release callback
+     * will get called right before the media sample is released giving the creator a chance to
+     * reclaim the buffer.
+     * @param buffer Byte buffer containing the sample's compressed data.
+     * @param dataOffset Offset, in bytes, to the sample's compressed data inside the buffer.
+     * @param bufferId Buffer identifier that can be used to identify the buffer on release.
+     * @param releaseCallback The sample release callback.
+     * @return A new media sample instance.
+     */
+    static std::shared_ptr<MediaSample> createWithReleaseCallback(
+            uint8_t* buffer, size_t dataOffset, uint32_t bufferId,
+            OnSampleReleasedCallback releaseCallback) {
+        MediaSample* sample = new MediaSample(buffer, dataOffset, bufferId, releaseCallback);
+        return std::shared_ptr<MediaSample>(
+                sample, std::bind(&MediaSample::releaseSample, std::placeholders::_1));
+    }
+
+    /**
+     * Byte buffer containing the sample's compressed data. The media sample instance does not take
+     * ownership of the buffer and will not automatically release the memory, but the caller can
+     * register a release callback by creating the media sample with
+     * {@link #createWithReleaseCallback}.
+     */
+    const uint8_t* buffer = nullptr;
+
+    /** Offset, in bytes, to the sample's compressed data inside the buffer. */
+    size_t dataOffset = 0;
+
+    /**
+     * Buffer identifier. This identifier is likely only meaningful to the sample data producer and
+     * can be used for reclaiming the buffer once a consumer is done processing it.
+     */
+    uint32_t bufferId = 0xBAADF00D;
+
+    /** Media sample information. */
+    MediaSampleInfo info;
+
+    MediaSample() = default;
+
+private:
+    MediaSample(uint8_t* buffer, size_t dataOffset, uint32_t bufferId,
+                OnSampleReleasedCallback releaseCallback)
+          : buffer(buffer),
+            dataOffset(dataOffset),
+            bufferId(bufferId),
+            mReleaseCallback(releaseCallback){};
+
+    static void releaseSample(MediaSample* sample) {
+        if (sample->mReleaseCallback != nullptr) {
+            sample->mReleaseCallback(sample);
+        }
+        delete sample;
+    }
+
+    // Do not allow copying to prevent dangling pointers in the copied object after the original is
+    // released.
+    MediaSample(const MediaSample&) = delete;
+    MediaSample& operator=(const MediaSample&) = delete;
+
+    const OnSampleReleasedCallback mReleaseCallback = nullptr;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_SAMPLE_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h
new file mode 100644
index 0000000..c6cf1a4
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SAMPLE_QUEUE_H
+#define ANDROID_MEDIA_SAMPLE_QUEUE_H
+
+#include <media/MediaSample.h>
+#include <utils/Mutex.h>
+
+#include <memory>
+#include <mutex>
+#include <queue>
+
+namespace android {
+
+/**
+ * MediaSampleQueue asynchronously connects a producer and a consumer of media samples.
+ * Media samples flows through the queue in FIFO order. If the queue is empty the consumer will be
+ * blocked until a new media sample is added or until the producer aborts the queue operation.
+ */
+class MediaSampleQueue {
+public:
+    /**
+     * Enqueues a media sample at the end of the queue and notifies potentially waiting consumers.
+     * If the queue has previously been aborted this method does nothing.
+     * @param sample The media sample to enqueue.
+     * @return True if the queue has been aborted.
+     */
+    bool enqueue(const std::shared_ptr<MediaSample>& sample);
+
+    /**
+     * Removes the next media sample from the queue and returns it. If the queue has previously been
+     * aborted this method returns null. Note that this method will block while the queue is empty.
+     * @param[out] sample The next media sample in the queue.
+     * @return True if the queue has been aborted.
+     */
+    bool dequeue(std::shared_ptr<MediaSample>* sample /* nonnull */);
+
+    /**
+     * Checks if the queue currently holds any media samples.
+     * @return True if the queue is empty or has been aborted. False otherwise.
+     */
+    bool isEmpty();
+
+    /**
+     * Aborts the queue operation. This clears the queue and notifies waiting consumers. After the
+     * has been aborted it is not possible to enqueue more samples, and dequeue will return null.
+     */
+    void abort();
+
+private:
+    std::queue<std::shared_ptr<MediaSample>> mSampleQueue GUARDED_BY(mMutex);
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mAborted GUARDED_BY(mMutex) = false;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_SAMPLE_QUEUE_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleReader.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleReader.h
new file mode 100644
index 0000000..7b6fbef
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleReader.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SAMPLE_READER_H
+#define ANDROID_MEDIA_SAMPLE_READER_H
+
+#include <media/MediaSample.h>
+#include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
+
+namespace android {
+
+/**
+ * MediaSampleReader is an interface for reading media samples from a container. MediaSampleReader
+ * allows for reading samples from multiple tracks on individual threads independently of each other
+ * while preserving the order of samples. Due to poor non-sequential access performance of the
+ * underlying extractor, MediaSampleReader can optionally enforce sequential sample access by
+ * blocking requests for tracks that the underlying extractor does not currently point to. Waiting
+ * threads are serviced once the reader advances to a sample from the specified track. Due to this
+ * it is important to read samples and advance the reader from all selected tracks to avoid hanging
+ * other tracks. MediaSampleReader implementations are thread safe and sample access should be done
+ * on one thread per selected track.
+ */
+class MediaSampleReader {
+public:
+    /**
+     * Returns the file format of the media container as a AMediaFormat.
+     * The caller is responsible for releasing the format when finished with it using
+     * AMediaFormat_delete().
+     * @return The file media format.
+     */
+    virtual AMediaFormat* getFileFormat() = 0;
+
+    /**
+     * Returns the number of tracks in the media container.
+     * @return The number of tracks.
+     */
+    virtual size_t getTrackCount() const = 0;
+
+    /**
+     * Returns the media format of a specific track as a AMediaFormat.
+     * The caller is responsible for releasing the format when finished with it using
+     * AMediaFormat_delete().
+     * @param trackIndex The track index (zero-based).
+     * @return The track media format.
+     */
+    virtual AMediaFormat* getTrackFormat(int trackIndex) = 0;
+
+    /**
+     * Select a track for sample access. Tracks must be selected in order for sample information and
+     * sample data to be available for that track. Samples for selected tracks must be accessed on
+     * its own thread to avoid blocking other tracks.
+     * @param trackIndex The track to select.
+     * @return AMEDIA_OK on success.
+     */
+    virtual media_status_t selectTrack(int trackIndex) = 0;
+
+    /**
+     * Toggles sequential access enforcement on or off. When the reader enforces sequential access
+     * calls to read sample information will block unless the underlying extractor points to the
+     * specified track.
+     * @param enforce True to enforce sequential access.
+     * @return AMEDIA_OK on success.
+     */
+    virtual media_status_t setEnforceSequentialAccess(bool enforce) = 0;
+
+    /**
+     * Estimates the bitrate of a source track by sampling sample sizes. The bitrate is returned in
+     * megabits per second (Mbps). This method will fail if the track only contains a single sample
+     * and does not have an associated duration.
+     * @param trackIndex The source track index.
+     * @param bitrate Output param for the bitrate.
+     * @return AMEDIA_OK on success.
+     */
+    virtual media_status_t getEstimatedBitrateForTrack(int trackIndex, int32_t* bitrate);
+
+    /**
+     * Returns the sample information for the current sample in the specified track. Note that this
+     * method will block until the reader advances to a sample belonging to the requested track if
+     * the reader is in sequential access mode.
+     * @param trackIndex The track index (zero-based).
+     * @param info Pointer to a MediaSampleInfo object where the sample information is written.
+     * @return AMEDIA_OK on success, AMEDIA_ERROR_END_OF_STREAM if there are no more samples to read
+     * from the track and AMEDIA_ERROR_INVALID_PARAMETER if trackIndex is out of bounds or the
+     * info pointer is NULL. Other AMEDIA_ERROR_* return values may not be recoverable.
+     */
+    virtual media_status_t getSampleInfoForTrack(int trackIndex, MediaSampleInfo* info) = 0;
+
+    /**
+     * Returns the sample data for the current sample in the specified track into the supplied
+     * buffer. Note that this method will block until the reader advances to a sample belonging to
+     * the requested track if the reader is in sequential access mode. Upon successful return this
+     * method will also advance the specified track to the next sample.
+     * @param trackIndex The track index (zero-based).
+     * @param buffer The buffer to write the sample's data to.
+     * @param bufferSize The size of the supplied buffer.
+     * @return AMEDIA_OK on success, AMEDIA_ERROR_END_OF_STREAM if there are no more samples to read
+     * from the track and AMEDIA_ERROR_INVALID_PARAMETER if trackIndex is out of bounds, if the
+     * buffer pointer is NULL or if bufferSize is too small for the sample. Other AMEDIA_ERROR_*
+     * return values may not be recoverable.
+     */
+    virtual media_status_t readSampleDataForTrack(int trackIndex, uint8_t* buffer,
+                                                  size_t bufferSize) = 0;
+
+    /**
+     * Advance the specified track to the next sample. If the reader is in sequential access mode
+     * and the current sample belongs to the specified track, the reader will also advance to the
+     * next sample and wake up any threads waiting on the new track.
+     * @param trackIndex The track index (zero-based).
+     */
+    virtual void advanceTrack(int trackIndex) = 0;
+
+    /** Destructor. */
+    virtual ~MediaSampleReader() = default;
+
+    /** Constructor. */
+    MediaSampleReader() = default;
+
+private:
+    MediaSampleReader(const MediaSampleReader&) = delete;
+    MediaSampleReader& operator=(const MediaSampleReader&) = delete;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_SAMPLE_READER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h
new file mode 100644
index 0000000..2032def
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h
@@ -0,0 +1,140 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SAMPLE_READER_NDK_H
+#define ANDROID_MEDIA_SAMPLE_READER_NDK_H
+
+#include <media/MediaSampleReader.h>
+#include <media/NdkMediaExtractor.h>
+
+#include <map>
+#include <memory>
+#include <mutex>
+#include <vector>
+
+namespace android {
+
+/**
+ * MediaSampleReaderNDK is a concrete implementation of the MediaSampleReader interface based on the
+ * media NDK extractor.
+ */
+class MediaSampleReaderNDK : public MediaSampleReader {
+public:
+    /**
+     * Creates a new MediaSampleReaderNDK instance wrapped in a shared pointer.
+     * @param fd Source file descriptor. The caller is responsible for closing the fd and it is safe
+     *           to do so when this method returns.
+     * @param offset Source data offset.
+     * @param size Source data size.
+     * @return A shared pointer referencing the new MediaSampleReaderNDK instance on success, or an
+     *         empty shared pointer if an error occurred.
+     */
+    static std::shared_ptr<MediaSampleReader> createFromFd(int fd, size_t offset, size_t size);
+
+    AMediaFormat* getFileFormat() override;
+    size_t getTrackCount() const override;
+    AMediaFormat* getTrackFormat(int trackIndex) override;
+    media_status_t selectTrack(int trackIndex) override;
+    media_status_t setEnforceSequentialAccess(bool enforce) override;
+    media_status_t getEstimatedBitrateForTrack(int trackIndex, int32_t* bitrate) override;
+    media_status_t getSampleInfoForTrack(int trackIndex, MediaSampleInfo* info) override;
+    media_status_t readSampleDataForTrack(int trackIndex, uint8_t* buffer,
+                                          size_t bufferSize) override;
+    void advanceTrack(int trackIndex) override;
+
+    virtual ~MediaSampleReaderNDK() override;
+
+private:
+    /**
+     * SamplePosition describes the position of a single sample in the media file using its
+     * timestamp and index in the file.
+     */
+    struct SamplePosition {
+        uint64_t index = 0;
+        int64_t timeStampUs = 0;
+        bool isSet = false;
+
+        void set(uint64_t sampleIndex, int64_t sampleTimeUs) {
+            index = sampleIndex;
+            timeStampUs = sampleTimeUs;
+            isSet = true;
+        }
+
+        void reset() { isSet = false; }
+    };
+
+    /**
+     * SampleCursor keeps track of the sample position for a specific track. When the track is
+     * advanced, previous is set to current, current to next and next is reset. As the extractor
+     * advances over the combined timeline of tracks, it updates current and next for the track it
+     * points to if they are not already set.
+     */
+    struct SampleCursor {
+        SamplePosition previous;
+        SamplePosition current;
+        SamplePosition next;
+    };
+
+    /**
+     * Creates a new MediaSampleReaderNDK object from an AMediaExtractor. The extractor needs to be
+     * initialized with a valid data source before attempting to create a MediaSampleReaderNDK.
+     * @param extractor The initialized media extractor.
+     */
+    MediaSampleReaderNDK(AMediaExtractor* extractor);
+
+    /** Advances the track to next sample. */
+    void advanceTrack_l(int trackIndex);
+
+    /** Advances the extractor to next sample. */
+    bool advanceExtractor_l();
+
+    /** Moves the extractor backwards to the specified sample. */
+    media_status_t seekExtractorBackwards_l(int64_t targetTimeUs, int targetTrackIndex,
+                                            uint64_t targetSampleIndex);
+
+    /** Moves the extractor to the specified sample. */
+    media_status_t moveToSample_l(SamplePosition& pos, int trackIndex);
+
+    /** Moves the extractor to the next sample of the specified track. */
+    media_status_t moveToTrack_l(int trackIndex);
+
+    /** In sequential mode, waits for the extractor to reach the next sample for the track. */
+    media_status_t waitForTrack_l(int trackIndex, std::unique_lock<std::mutex>& lockHeld);
+
+    /**
+     * Ensures the extractor is ready for the next sample of the track regardless of access mode.
+     */
+    media_status_t primeExtractorForTrack_l(int trackIndex, std::unique_lock<std::mutex>& lockHeld);
+
+    AMediaExtractor* mExtractor = nullptr;
+    std::mutex mExtractorMutex;
+    const size_t mTrackCount;
+
+    int mExtractorTrackIndex = -1;
+    uint64_t mExtractorSampleIndex = 0;
+
+    bool mEosReached = false;
+    bool mEnforceSequentialAccess = false;
+
+    // Maps selected track indices to condition variables for sequential sample access control.
+    std::map<int, std::condition_variable> mTrackSignals;
+
+    // Samples cursor for each track in the file.
+    std::vector<SampleCursor> mTrackCursors;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_SAMPLE_READER_NDK_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
new file mode 100644
index 0000000..f762556
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
@@ -0,0 +1,208 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SAMPLE_WRITER_H
+#define ANDROID_MEDIA_SAMPLE_WRITER_H
+
+#include <media/MediaSample.h>
+#include <media/NdkMediaCodec.h>
+#include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
+#include <utils/Mutex.h>
+
+#include <condition_variable>
+#include <functional>
+#include <memory>
+#include <mutex>
+#include <queue>
+#include <thread>
+#include <unordered_map>
+
+namespace android {
+
+/**
+ * Muxer interface used by MediaSampleWriter.
+ * Methods in this interface are guaranteed to be called sequentially by MediaSampleWriter.
+ */
+class MediaSampleWriterMuxerInterface {
+public:
+    /**
+     * Adds a new track to the muxer.
+     * @param trackFormat Format of the new track.
+     * @return A non-negative track index on success, or a negative number on failure.
+     */
+    virtual ssize_t addTrack(AMediaFormat* trackFormat) = 0;
+
+    /** Starts the muxer. */
+    virtual media_status_t start() = 0;
+    /**
+     * Writes sample data to a previously added track.
+     * @param trackIndex Index of the track the sample data belongs to.
+     * @param data The sample data.
+     * @param info The sample information.
+     * @return The number of bytes written.
+     */
+    virtual media_status_t writeSampleData(size_t trackIndex, const uint8_t* data,
+                                           const AMediaCodecBufferInfo* info) = 0;
+
+    /** Stops the muxer. */
+    virtual media_status_t stop() = 0;
+    virtual ~MediaSampleWriterMuxerInterface() = default;
+};
+
+/**
+ * MediaSampleWriter is a wrapper around a muxer. The sample writer puts samples on a queue that
+ * is serviced by an internal thread to minimize blocking time for clients. MediaSampleWriter also
+ * provides progress reporting. The default muxer interface implementation is based
+ * directly on AMediaMuxer.
+ */
+class MediaSampleWriter : public std::enable_shared_from_this<MediaSampleWriter> {
+public:
+    /** Function prototype for delivering media samples to the writer. */
+    using MediaSampleConsumerFunction =
+            std::function<void(const std::shared_ptr<MediaSample>& sample)>;
+
+    /** Callback interface. */
+    class CallbackInterface {
+    public:
+        /**
+         * Sample writer finished. The finished callback is only called after the sample writer has
+         * been successfully started.
+         */
+        virtual void onFinished(const MediaSampleWriter* writer, media_status_t status) = 0;
+
+        /** Sample writer progress update in percent. */
+        virtual void onProgressUpdate(const MediaSampleWriter* writer, int32_t progress) = 0;
+
+        virtual ~CallbackInterface() = default;
+    };
+
+    static std::shared_ptr<MediaSampleWriter> Create();
+
+    /**
+     * Initializes the sample writer with its default muxer implementation. MediaSampleWriter needs
+     * to be initialized before tracks are added and can only be initialized once.
+     * @param fd An open file descriptor to write to. The caller is responsible for closing this
+     *        file descriptor and it is safe to do so once this method returns.
+     * @param callbacks Client callback object that gets called by the sample writer.
+     * @return True if the writer was successfully initialized.
+     */
+    bool init(int fd, const std::weak_ptr<CallbackInterface>& callbacks /* nonnull */);
+
+    /**
+     * Initializes the sample writer with a custom muxer interface implementation.
+     * @param muxer The custom muxer interface implementation.
+     * @param @param callbacks Client callback object that gets called by the sample writer.
+     * @return True if the writer was successfully initialized.
+     */
+    bool init(const std::shared_ptr<MediaSampleWriterMuxerInterface>& muxer /* nonnull */,
+              const std::weak_ptr<CallbackInterface>& callbacks /* nonnull */);
+
+    /**
+     * Adds a new track to the sample writer. Tracks must be added after the sample writer has been
+     * initialized and before it is started.
+     * @param trackFormat The format of the track to add.
+     * @return A sample consumer to add samples to if the track was successfully added, or nullptr
+     * if the track could not be added.
+     */
+    MediaSampleConsumerFunction addTrack(
+            const std::shared_ptr<AMediaFormat>& trackFormat /* nonnull */);
+
+    /**
+     * Starts the sample writer. The sample writer will start processing samples and writing them to
+     * its muxer on an internal thread. MediaSampleWriter can only be started once.
+     * @return True if the sample writer was successfully started.
+     */
+    bool start();
+
+    /**
+     * Stops the sample writer. If the sample writer is not yet finished its operation will be
+     * aborted and an error value will be returned to the client in the callback supplied to
+     * {@link #start}. If the sample writer has already finished and the client callback has fired
+     * the writer has already automatically stopped and there is no need to call stop manually. Once
+     * the sample writer has been stopped it cannot be restarted.
+     * @return True if the sample writer was successfully stopped on this call. False if the sample
+     *         writer was already stopped or was never started.
+     */
+    bool stop();
+
+    /** Destructor. */
+    ~MediaSampleWriter();
+
+private:
+    struct TrackRecord {
+        TrackRecord(int64_t durationUs)
+              : mDurationUs(durationUs),
+                mFirstSampleTimeUs(0),
+                mPrevSampleTimeUs(INT64_MIN),
+                mFirstSampleTimeSet(false),
+                mReachedEos(false){};
+
+        TrackRecord() : TrackRecord(0){};
+
+        int64_t mDurationUs;
+        int64_t mFirstSampleTimeUs;
+        int64_t mPrevSampleTimeUs;
+        bool mFirstSampleTimeSet;
+        bool mReachedEos;
+    };
+
+    // Track index and sample.
+    using SampleEntry = std::pair<size_t, std::shared_ptr<MediaSample>>;
+
+    struct SampleComparator {
+        // Return true if lhs should come after rhs in the sample queue.
+        bool operator()(const SampleEntry& lhs, const SampleEntry& rhs) {
+            const bool lhsEos = lhs.second->info.flags & SAMPLE_FLAG_END_OF_STREAM;
+            const bool rhsEos = rhs.second->info.flags & SAMPLE_FLAG_END_OF_STREAM;
+
+            if (lhsEos && !rhsEos) {
+                return true;
+            } else if (!lhsEos && rhsEos) {
+                return false;
+            } else if (lhsEos && rhsEos) {
+                return lhs.first > rhs.first;
+            }
+
+            return lhs.second->info.presentationTimeUs > rhs.second->info.presentationTimeUs;
+        }
+    };
+
+    std::weak_ptr<CallbackInterface> mCallbacks;
+    std::shared_ptr<MediaSampleWriterMuxerInterface> mMuxer;
+
+    std::mutex mMutex;  // Protects sample queue and state.
+    std::condition_variable mSampleSignal;
+    std::thread mThread;
+    std::unordered_map<size_t, TrackRecord> mTracks;
+    std::priority_queue<SampleEntry, std::vector<SampleEntry>, SampleComparator> mSampleQueue
+            GUARDED_BY(mMutex);
+
+    enum : int {
+        UNINITIALIZED,
+        INITIALIZED,
+        STARTED,
+        STOPPED,
+    } mState GUARDED_BY(mMutex);
+
+    MediaSampleWriter() : mState(UNINITIALIZED){};
+    void addSampleToTrack(size_t trackIndex, const std::shared_ptr<MediaSample>& sample);
+    media_status_t writeSamples();
+    media_status_t runWriterLoop();
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_SAMPLE_WRITER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h
new file mode 100644
index 0000000..c5e161c
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRACK_TRANSCODER_H
+#define ANDROID_MEDIA_TRACK_TRANSCODER_H
+
+#include <media/MediaSampleQueue.h>
+#include <media/MediaSampleReader.h>
+#include <media/MediaSampleWriter.h>
+#include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
+#include <utils/Mutex.h>
+
+#include <functional>
+#include <memory>
+#include <mutex>
+#include <thread>
+
+namespace android {
+
+class MediaTrackTranscoderCallback;
+
+/**
+ * Base class for all track transcoders. MediaTrackTranscoder operates asynchronously on an internal
+ * thread and communicates through a MediaTrackTranscoderCallback instance. Transcoded samples are
+ * enqueued on the MediaTrackTranscoder's output queue. Samples need to be dequeued from the output
+ * queue or the transcoder will run out of buffers and stall. Once the consumer is done with a
+ * transcoded sample it is the consumer's responsibility to as soon as possible release all
+ * references to that sample in order to return the buffer to the transcoder. MediaTrackTranscoder
+ * is an abstract class and instances are created through one of the concrete subclasses.
+ *
+ * The base class MediaTrackTranscoder is responsible for thread and state management and guarantees
+ * that operations {configure, start, stop} are sent to the derived class in correct order.
+ * MediaTrackTranscoder is also responsible for delivering callback notifications once the
+ * transcoder has been successfully started.
+ */
+class MediaTrackTranscoder {
+public:
+    /**
+     * Configures the track transcoder with an input MediaSampleReader and a destination format.
+     * A track transcoder have to be configured before it is started.
+     * @param mediaSampleReader The MediaSampleReader to read input samples from.
+     * @param trackIndex The index of the track to transcode in mediaSampleReader.
+     * @param destinationFormat The destination format.
+     * @return AMEDIA_OK if the track transcoder was successfully configured.
+     */
+    media_status_t configure(const std::shared_ptr<MediaSampleReader>& mediaSampleReader,
+                             int trackIndex,
+                             const std::shared_ptr<AMediaFormat>& destinationFormat);
+
+    /**
+     * Starts the track transcoder. Once started the track transcoder have to be stopped by calling
+     * {@link #stop}, even after completing successfully. Start should only be called once.
+     * @return True if the track transcoder started, or false if it had already been started.
+     */
+    bool start();
+
+    /**
+     * Stops the track transcoder. Once the transcoding has been stopped it cannot be restarted
+     * again. It is safe to call stop multiple times.
+     * @return True if the track transcoder stopped, or false if it was already stopped.
+     */
+    bool stop();
+
+    /**
+     * Set the sample consumer function. The MediaTrackTranscoder will deliver transcoded samples to
+     * this function. If the MediaTrackTranscoder is started before a consumer is set the transcoder
+     * will buffer a limited number of samples internally before stalling. Once a consumer has been
+     * set the internally buffered samples will be delivered to the consumer.
+     * @param sampleConsumer The sample consumer function.
+     */
+    void setSampleConsumer(const MediaSampleWriter::MediaSampleConsumerFunction& sampleConsumer);
+
+    /**
+      * Retrieves the track transcoder's final output format. The output is available after the
+      * track transcoder has been successfully configured.
+      * @return The track output format.
+      */
+    virtual std::shared_ptr<AMediaFormat> getOutputFormat() const = 0;
+
+    virtual ~MediaTrackTranscoder() = default;
+
+protected:
+    MediaTrackTranscoder(const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback)
+          : mTranscoderCallback(transcoderCallback){};
+
+    // Called by subclasses when the actual track format becomes available.
+    void notifyTrackFormatAvailable();
+
+    // Called by subclasses when a transcoded sample is available.
+    void onOutputSampleAvailable(const std::shared_ptr<MediaSample>& sample);
+
+    // configureDestinationFormat needs to be implemented by subclasses, and gets called on an
+    // external thread before start.
+    virtual media_status_t configureDestinationFormat(
+            const std::shared_ptr<AMediaFormat>& destinationFormat) = 0;
+
+    // runTranscodeLoop needs to be implemented by subclasses, and gets called on
+    // MediaTrackTranscoder's internal thread when the track transcoder is started.
+    virtual media_status_t runTranscodeLoop() = 0;
+
+    // abortTranscodeLoop needs to be implemented by subclasses, and should request transcoding to
+    // be aborted as soon as possible. It should be safe to call abortTranscodeLoop multiple times.
+    virtual void abortTranscodeLoop() = 0;
+
+    std::shared_ptr<MediaSampleReader> mMediaSampleReader;
+    int mTrackIndex;
+    std::shared_ptr<AMediaFormat> mSourceFormat;
+
+private:
+    std::mutex mSampleMutex;
+    MediaSampleQueue mSampleQueue GUARDED_BY(mSampleMutex);
+    MediaSampleWriter::MediaSampleConsumerFunction mSampleConsumer GUARDED_BY(mSampleMutex);
+    const std::weak_ptr<MediaTrackTranscoderCallback> mTranscoderCallback;
+    std::mutex mStateMutex;
+    std::thread mTranscodingThread GUARDED_BY(mStateMutex);
+    enum {
+        UNINITIALIZED,
+        CONFIGURED,
+        STARTED,
+        STOPPED,
+    } mState GUARDED_BY(mStateMutex) = UNINITIALIZED;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRACK_TRANSCODER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h
new file mode 100644
index 0000000..654171e
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRACK_TRANSCODER_CALLBACK_H
+#define ANDROID_MEDIA_TRACK_TRANSCODER_CALLBACK_H
+
+#include <media/NdkMediaError.h>
+
+namespace android {
+
+class MediaTrackTranscoder;
+
+/** Callback interface for MediaTrackTranscoder. */
+class MediaTrackTranscoderCallback {
+public:
+    /**
+     * Called when the MediaTrackTranscoder's actual track format becomes available.
+     * @param transcoder The MediaTrackTranscoder whose track format becomes available.
+     */
+    virtual void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder);
+    /**
+     * Called when the MediaTrackTranscoder instance have finished transcoding all media samples
+     * successfully.
+     * @param transcoder The MediaTrackTranscoder that finished the transcoding.
+     */
+    virtual void onTrackFinished(const MediaTrackTranscoder* transcoder);
+
+    /**
+     * Called when the MediaTrackTranscoder instance encountered an error it could not recover from.
+     * @param transcoder The MediaTrackTranscoder that encountered the error.
+     * @param status The non-zero error code describing the encountered error.
+     */
+    virtual void onTrackError(const MediaTrackTranscoder* transcoder, media_status_t status);
+
+protected:
+    virtual ~MediaTrackTranscoderCallback() = default;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRACK_TRANSCODER_CALLBACK_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h b/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
new file mode 100644
index 0000000..555cfce
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
@@ -0,0 +1,149 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODER_H
+#define ANDROID_MEDIA_TRANSCODER_H
+
+#include <android/binder_auto_utils.h>
+#include <media/MediaSampleWriter.h>
+#include <media/MediaTrackTranscoderCallback.h>
+#include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
+#include <utils/Mutex.h>
+
+#include <atomic>
+#include <memory>
+#include <mutex>
+#include <unordered_set>
+
+namespace android {
+
+class MediaSampleReader;
+
+class MediaTranscoder : public std::enable_shared_from_this<MediaTranscoder>,
+                        public MediaTrackTranscoderCallback,
+                        public MediaSampleWriter::CallbackInterface {
+public:
+    /** Callbacks from transcoder to client. */
+    class CallbackInterface {
+    public:
+        /** Transcoder finished successfully. */
+        virtual void onFinished(const MediaTranscoder* transcoder) = 0;
+
+        /** Transcoder encountered an unrecoverable error. */
+        virtual void onError(const MediaTranscoder* transcoder, media_status_t error) = 0;
+
+        /** Transcoder progress update reported in percent from 0 to 100. */
+        virtual void onProgressUpdate(const MediaTranscoder* transcoder, int32_t progress) = 0;
+
+        /**
+         * Transcoder lost codec resources and paused operations. The client can resume transcoding
+         * again when resources are available by either:
+         *   1) Calling resume on the same MediaTranscoder instance.
+         *   2) Creating a new MediaTranscoding instance with the paused state and then calling
+         *      resume.
+         */
+        virtual void onCodecResourceLost(
+                const MediaTranscoder* transcoder,
+                const std::shared_ptr<ndk::ScopedAParcel>& pausedState) = 0;
+
+        virtual ~CallbackInterface() = default;
+    };
+
+    /**
+     * Creates a new MediaTranscoder instance. If the supplied paused state is valid, the transcoder
+     * will be initialized with the paused state and be ready to be resumed right away. It is not
+     * possible to change any configurations on a paused transcoder.
+     */
+    static std::shared_ptr<MediaTranscoder> create(
+            const std::shared_ptr<CallbackInterface>& callbacks,
+            const std::shared_ptr<ndk::ScopedAParcel>& pausedState = nullptr);
+
+    /** Configures source from path fd. */
+    media_status_t configureSource(int fd);
+
+    /** Gets the media formats of all tracks in the file. */
+    std::vector<std::shared_ptr<AMediaFormat>> getTrackFormats() const;
+
+    /**
+     * Configures transcoding of a track. Tracks that are not configured will not present in the
+     * final transcoded file, i.e. tracks will be dropped by default. Passing nullptr for
+     * trackFormat means the track will be copied unchanged ("passthrough") to the destination.
+     * Track configurations must be done after the source has been configured.
+     * Note: trackFormat is not modified but cannot be const.
+     */
+    media_status_t configureTrackFormat(size_t trackIndex, AMediaFormat* trackFormat);
+
+    /** Configures destination from fd. */
+    media_status_t configureDestination(int fd);
+
+    /** Starts transcoding. No configurations can be made once the transcoder has started. */
+    media_status_t start();
+
+    /**
+     * Pauses transcoding. The transcoder's paused state is returned through pausedState. The
+     * paused state is only needed for resuming transcoding with a new MediaTranscoder instance. The
+     * caller can resume transcoding with the current MediaTranscoder instance at any time by
+     * calling resume(). It is not required to cancel a paused transcoder. The paused state is
+     * independent and the caller can always initialize a new transcoder instance with the same
+     * paused state. If the caller wishes to abandon a paused transcoder's operation they can
+     * release the transcoder instance, clear the paused state and delete the partial destination
+     * file. The caller can optionally call cancel to let the transcoder clean up the partial
+     * destination file.
+     */
+    media_status_t pause(std::shared_ptr<ndk::ScopedAParcel>* pausedState);
+
+    /** Resumes a paused transcoding. */
+    media_status_t resume();
+
+    /** Cancels the transcoding. Once canceled the transcoding can not be restarted. Client
+     * will be responsible for cleaning up the abandoned file. */
+    media_status_t cancel();
+
+    virtual ~MediaTranscoder() = default;
+
+private:
+    MediaTranscoder(const std::shared_ptr<CallbackInterface>& callbacks);
+
+    // MediaTrackTranscoderCallback
+    virtual void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder) override;
+    virtual void onTrackFinished(const MediaTrackTranscoder* transcoder) override;
+    virtual void onTrackError(const MediaTrackTranscoder* transcoder,
+                              media_status_t status) override;
+    // ~MediaTrackTranscoderCallback
+
+    // MediaSampleWriter::CallbackInterface
+    virtual void onFinished(const MediaSampleWriter* writer, media_status_t status) override;
+    virtual void onProgressUpdate(const MediaSampleWriter* writer, int32_t progress) override;
+    // ~MediaSampleWriter::CallbackInterface
+
+    void onSampleWriterFinished(media_status_t status);
+    void sendCallback(media_status_t status);
+
+    std::shared_ptr<CallbackInterface> mCallbacks;
+    std::shared_ptr<MediaSampleReader> mSampleReader;
+    std::shared_ptr<MediaSampleWriter> mSampleWriter;
+    std::vector<std::shared_ptr<AMediaFormat>> mSourceTrackFormats;
+    std::vector<std::shared_ptr<MediaTrackTranscoder>> mTrackTranscoders;
+    std::mutex mTracksAddedMutex;
+    std::unordered_set<const MediaTrackTranscoder*> mTracksAdded GUARDED_BY(mTracksAddedMutex);
+
+    std::atomic_bool mCallbackSent = false;
+    std::atomic_bool mCancelled = false;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/NdkCommon.h b/media/libmediatranscoding/transcoder/include/media/NdkCommon.h
new file mode 100644
index 0000000..1a72be3
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/NdkCommon.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
+#define ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
+
+#include <media/NdkMediaFormat.h>
+
+extern const char* AMEDIA_MIMETYPE_VIDEO_VP8;
+extern const char* AMEDIA_MIMETYPE_VIDEO_VP9;
+extern const char* AMEDIA_MIMETYPE_VIDEO_AV1;
+extern const char* AMEDIA_MIMETYPE_VIDEO_AVC;
+extern const char* AMEDIA_MIMETYPE_VIDEO_HEVC;
+extern const char* AMEDIA_MIMETYPE_VIDEO_MPEG4;
+extern const char* AMEDIA_MIMETYPE_VIDEO_H263;
+
+// TODO(b/146420990)
+// TODO: make MediaTranscoder use the consts from this header.
+typedef enum {
+    OUTPUT_FORMAT_START = 0,
+    OUTPUT_FORMAT_MPEG_4 = OUTPUT_FORMAT_START,
+    OUTPUT_FORMAT_WEBM = OUTPUT_FORMAT_START + 1,
+    OUTPUT_FORMAT_THREE_GPP = OUTPUT_FORMAT_START + 2,
+    OUTPUT_FORMAT_HEIF = OUTPUT_FORMAT_START + 3,
+    OUTPUT_FORMAT_OGG = OUTPUT_FORMAT_START + 4,
+    OUTPUT_FORMAT_LIST_END = OUTPUT_FORMAT_START + 4,
+} MuxerFormat;
+
+// Color formats supported by encoder - should mirror supportedColorList
+// from MediaCodecConstants.h (are these going to be deprecated)
+static constexpr int COLOR_FormatYUV420SemiPlanar = 21;
+static constexpr int COLOR_FormatYUV420Flexible = 0x7F420888;
+static constexpr int COLOR_FormatSurface = 0x7f000789;
+
+// constants not defined in NDK
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_ALLOW_FRAME_DROP;
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_REQUEST_SYNC_FRAME;
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_VIDEO_BITRATE;
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_MAX_B_FRAMES;
+static constexpr int TBD_AMEDIACODEC_BUFFER_FLAG_KEY_FRAME = 0x1;
+
+static constexpr int kBitrateModeConstant = 2;
+
+namespace AMediaFormatUtils {
+
+typedef struct {
+    const char* key;
+    bool (*copy)(const char* key, AMediaFormat* from, AMediaFormat* to);
+    bool (*copy2)(const char* key, AMediaFormat* from, AMediaFormat* to);
+} EntryCopier;
+
+#define ENTRY_COPIER(keyName, typeName) \
+    { keyName, AMediaFormatUtils::CopyFormatEntry##typeName, nullptr }
+#define ENTRY_COPIER2(keyName, typeName, typeName2)            \
+    {                                                          \
+        keyName, AMediaFormatUtils::CopyFormatEntry##typeName, \
+                AMediaFormatUtils::CopyFormatEntry##typeName2  \
+    }
+
+bool CopyFormatEntryString(const char* key, AMediaFormat* from, AMediaFormat* to);
+bool CopyFormatEntryInt64(const char* key, AMediaFormat* from, AMediaFormat* to);
+bool CopyFormatEntryInt32(const char* key, AMediaFormat* from, AMediaFormat* to);
+bool CopyFormatEntryFloat(const char* key, AMediaFormat* from, AMediaFormat* to);
+
+void CopyFormatEntries(AMediaFormat* from, AMediaFormat* to, const EntryCopier* entries,
+                       size_t entryCount);
+
+bool SetDefaultFormatValueFloat(const char* key, AMediaFormat* format, float value);
+bool SetDefaultFormatValueInt32(const char* key, AMediaFormat* format, int32_t value);
+
+}  // namespace AMediaFormatUtils
+#endif  // ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
diff --git a/media/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h
new file mode 100644
index 0000000..b9491ed
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_PASSTHROUGH_TRACK_TRANSCODER_H
+#define ANDROID_PASSTHROUGH_TRACK_TRANSCODER_H
+
+#include <media/MediaTrackTranscoder.h>
+#include <media/NdkMediaFormat.h>
+
+#include <condition_variable>
+#include <map>
+#include <mutex>
+#include <unordered_map>
+
+namespace android {
+
+/**
+ * Track transcoder for passthrough mode. Passthrough mode copies sample data from a track unchanged
+ * from source file to destination file. This track transcoder uses an internal pool of buffers.
+ * When the maximum number of buffers are allocated and all of them are waiting on the output queue
+ * the transcoder will stall until samples are dequeued from the output queue and released.
+ */
+class PassthroughTrackTranscoder : public MediaTrackTranscoder {
+public:
+    /** Maximum number of buffers to be allocated at a given time. */
+    static constexpr int kMaxBufferCountDefault = 16;
+
+    PassthroughTrackTranscoder(
+            const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback)
+          : MediaTrackTranscoder(transcoderCallback),
+            mBufferPool(std::make_shared<BufferPool>(kMaxBufferCountDefault)){};
+    virtual ~PassthroughTrackTranscoder() override = default;
+
+private:
+    friend class BufferPoolTests;
+
+    /** Class to pool and reuse buffers. */
+    class BufferPool {
+    public:
+        explicit BufferPool(int maxBufferCount) : mMaxBufferCount(maxBufferCount){};
+        ~BufferPool();
+
+        /**
+         * Retrieve a buffer from the pool. Buffers are allocated on demand. This method will block
+         * if the maximum number of buffers is reached and there are no free buffers available.
+         * @param minimumBufferSize The minimum size of the buffer.
+         * @return The buffer or nullptr if allocation failed or the pool was aborted.
+         */
+        uint8_t* getBufferWithSize(size_t minimumBufferSize);
+
+        /**
+         * Return a buffer to the pool.
+         * @param buffer The buffer to return.
+         */
+        void returnBuffer(uint8_t* buffer);
+
+        /** Wakes up threads waiting on buffers and prevents new buffers from being returned. */
+        void abort();
+
+    private:
+        // Maximum number of active buffers at a time.
+        const int mMaxBufferCount;
+
+        // Map containing all tracked buffers.
+        std::unordered_map<uint8_t*, size_t> mAddressSizeMap GUARDED_BY(mMutex);
+
+        // Map containing the currently free buffers.
+        std::multimap<size_t, uint8_t*> mFreeBufferMap GUARDED_BY(mMutex);
+
+        std::mutex mMutex;
+        std::condition_variable mCondition;
+        bool mAborted GUARDED_BY(mMutex) = false;
+    };
+
+    // MediaTrackTranscoder
+    media_status_t runTranscodeLoop() override;
+    void abortTranscodeLoop() override;
+    media_status_t configureDestinationFormat(
+            const std::shared_ptr<AMediaFormat>& destinationFormat) override;
+    std::shared_ptr<AMediaFormat> getOutputFormat() const override;
+    // ~MediaTrackTranscoder
+
+    std::shared_ptr<BufferPool> mBufferPool;
+    bool mEosFromSource = false;
+    std::atomic_bool mStopRequested = false;
+};
+
+}  // namespace android
+#endif  // ANDROID_PASSTHROUGH_TRACK_TRANSCODER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
new file mode 100644
index 0000000..d000d7f
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_VIDEO_TRACK_TRANSCODER_H
+#define ANDROID_VIDEO_TRACK_TRANSCODER_H
+
+#include <android/native_window.h>
+#include <media/MediaTrackTranscoder.h>
+#include <media/NdkMediaCodec.h>
+#include <media/NdkMediaFormat.h>
+
+#include <condition_variable>
+#include <deque>
+#include <mutex>
+
+namespace android {
+
+/**
+ * Track transcoder for video tracks. VideoTrackTranscoder uses AMediaCodec from the Media NDK
+ * internally. The two media codecs are run in asynchronous mode and shares uncompressed buffers
+ * using a native surface (ANativeWindow). Codec callback events are placed on a message queue and
+ * serviced in order on the transcoding thread managed by MediaTrackTranscoder.
+ */
+class VideoTrackTranscoder : public std::enable_shared_from_this<VideoTrackTranscoder>,
+                             public MediaTrackTranscoder {
+public:
+    static std::shared_ptr<VideoTrackTranscoder> create(
+            const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback);
+
+    virtual ~VideoTrackTranscoder() override;
+
+private:
+    friend struct AsyncCodecCallbackDispatch;
+
+    // Minimal blocking queue used as a message queue by VideoTrackTranscoder.
+    template <typename T>
+    class BlockingQueue {
+    public:
+        void push(T const& value, bool front = false);
+        T pop();
+        void abort();
+
+    private:
+        std::mutex mMutex;
+        std::condition_variable mCondition;
+        std::deque<T> mQueue;
+        bool mAborted = false;
+    };
+    class CodecWrapper;
+
+    VideoTrackTranscoder(const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback)
+          : MediaTrackTranscoder(transcoderCallback){};
+
+    // MediaTrackTranscoder
+    media_status_t runTranscodeLoop() override;
+    void abortTranscodeLoop() override;
+    media_status_t configureDestinationFormat(
+            const std::shared_ptr<AMediaFormat>& destinationFormat) override;
+    std::shared_ptr<AMediaFormat> getOutputFormat() const override;
+    // ~MediaTrackTranscoder
+
+    // Enqueues an input sample with the decoder.
+    void enqueueInputSample(int32_t bufferIndex);
+
+    // Moves a decoded buffer from the decoder's output to the encoder's input.
+    void transferBuffer(int32_t bufferIndex, AMediaCodecBufferInfo bufferInfo);
+
+    // Dequeues an encoded buffer from the encoder and adds it to the output queue.
+    void dequeueOutputSample(int32_t bufferIndex, AMediaCodecBufferInfo bufferInfo);
+
+    // Updates the video track's actual format based on encoder output format.
+    void updateTrackFormat(AMediaFormat* outputFormat);
+
+    AMediaCodec* mDecoder = nullptr;
+    std::shared_ptr<CodecWrapper> mEncoder;
+    ANativeWindow* mSurface = nullptr;
+    bool mEosFromSource = false;
+    bool mEosFromEncoder = false;
+    bool mStopRequested = false;
+    media_status_t mStatus = AMEDIA_OK;
+    MediaSampleInfo mSampleInfo;
+    BlockingQueue<std::function<void()>> mCodecMessageQueue;
+    std::shared_ptr<AMediaFormat> mDestinationFormat;
+    std::shared_ptr<AMediaFormat> mActualOutputFormat;
+};
+
+}  // namespace android
+#endif  // ANDROID_VIDEO_TRACK_TRANSCODER_H
diff --git a/media/libmediatranscoding/transcoder/tests/Android.bp b/media/libmediatranscoding/transcoder/tests/Android.bp
new file mode 100644
index 0000000..7ae6261
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/Android.bp
@@ -0,0 +1,93 @@
+// Unit tests for libmediatranscoder.
+
+filegroup {
+    name: "test_assets",
+    srcs: ["assets/*"],
+}
+
+cc_defaults {
+    name: "testdefaults",
+
+    header_libs: [
+        "libbase_headers",
+        "libmedia_headers",
+    ],
+
+    shared_libs: [
+        "libbase",
+        "libcutils",
+        "libmediandk",
+        "libmediatranscoder_asan",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+        address: true,
+    },
+
+    data: [":test_assets"],
+    test_config_template: "AndroidTestTemplate.xml",
+    test_suites: ["device-tests", "TranscoderTests"],
+}
+
+// MediaSampleReaderNDK unit test
+cc_test {
+    name: "MediaSampleReaderNDKTests",
+    defaults: ["testdefaults"],
+    srcs: ["MediaSampleReaderNDKTests.cpp"],
+}
+
+// MediaSampleQueue unit test
+cc_test {
+    name: "MediaSampleQueueTests",
+    defaults: ["testdefaults"],
+    srcs: ["MediaSampleQueueTests.cpp"],
+}
+
+// MediaTrackTranscoder unit test
+cc_test {
+    name: "MediaTrackTranscoderTests",
+    defaults: ["testdefaults"],
+    srcs: ["MediaTrackTranscoderTests.cpp"],
+    shared_libs: ["libbinder_ndk"],
+}
+
+// VideoTrackTranscoder unit test
+cc_test {
+    name: "VideoTrackTranscoderTests",
+    defaults: ["testdefaults"],
+    srcs: ["VideoTrackTranscoderTests.cpp"],
+}
+
+// PassthroughTrackTranscoder unit test
+cc_test {
+    name: "PassthroughTrackTranscoderTests",
+    defaults: ["testdefaults"],
+    srcs: ["PassthroughTrackTranscoderTests.cpp"],
+    shared_libs: ["libcrypto"],
+}
+
+// MediaSampleWriter unit test
+cc_test {
+    name: "MediaSampleWriterTests",
+    defaults: ["testdefaults"],
+    srcs: ["MediaSampleWriterTests.cpp"],
+}
+
+// MediaTranscoder unit test
+cc_test {
+    name: "MediaTranscoderTests",
+    defaults: ["testdefaults"],
+    srcs: ["MediaTranscoderTests.cpp"],
+    shared_libs: ["libbinder_ndk"],
+}
diff --git a/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml b/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
new file mode 100644
index 0000000..a9a7e2e
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Unit test configuration for {MODULE}">
+    <option name="test-suite-tag" value="TranscoderTests" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="false" />
+        <option name="push-file"
+            key="assets"
+            value="/data/local/tmp/TranscodingTestAssets" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="module-name" value="{MODULE}" />
+    </test>
+</configuration>
+
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp
new file mode 100644
index 0000000..6357e4d
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp
@@ -0,0 +1,237 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaSampleQueue
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleQueueTests"
+
+#include <android-base/logging.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleQueue.h>
+
+#include <thread>
+
+namespace android {
+
+/** Duration to use when delaying threads to order operations. */
+static constexpr int64_t kThreadDelayDurationMs = 100;
+
+class MediaSampleQueueTests : public ::testing::Test {
+public:
+    MediaSampleQueueTests() { LOG(DEBUG) << "MediaSampleQueueTests created"; }
+    ~MediaSampleQueueTests() { LOG(DEBUG) << "MediaSampleQueueTests destroyed"; }
+};
+
+static std::shared_ptr<MediaSample> newSample(uint32_t id) {
+    return MediaSample::createWithReleaseCallback(nullptr /* buffer */, 0 /* offset */, id,
+                                                  nullptr /* callback */);
+}
+
+TEST_F(MediaSampleQueueTests, TestSequentialDequeueOrder) {
+    LOG(DEBUG) << "TestSequentialDequeueOrder Starts";
+
+    static constexpr int kNumSamples = 4;
+    MediaSampleQueue sampleQueue;
+    EXPECT_TRUE(sampleQueue.isEmpty());
+
+    // Enqueue loop.
+    for (int i = 0; i < kNumSamples; ++i) {
+        sampleQueue.enqueue(newSample(i));
+        EXPECT_FALSE(sampleQueue.isEmpty());
+    }
+
+    // Dequeue loop.
+    for (int i = 0; i < kNumSamples; ++i) {
+        std::shared_ptr<MediaSample> sample;
+        bool aborted = sampleQueue.dequeue(&sample);
+        EXPECT_NE(sample, nullptr);
+        EXPECT_EQ(sample->bufferId, i);
+        EXPECT_FALSE(aborted);
+    }
+    EXPECT_TRUE(sampleQueue.isEmpty());
+}
+
+TEST_F(MediaSampleQueueTests, TestInterleavedDequeueOrder) {
+    LOG(DEBUG) << "TestInterleavedDequeueOrder Starts";
+
+    static constexpr int kNumSamples = 4;
+    MediaSampleQueue sampleQueue;
+
+    // Enqueue and dequeue.
+    for (int i = 0; i < kNumSamples; ++i) {
+        sampleQueue.enqueue(newSample(i));
+        EXPECT_FALSE(sampleQueue.isEmpty());
+
+        std::shared_ptr<MediaSample> sample;
+        bool aborted = sampleQueue.dequeue(&sample);
+        EXPECT_NE(sample, nullptr);
+        EXPECT_EQ(sample->bufferId, i);
+        EXPECT_FALSE(aborted);
+        EXPECT_TRUE(sampleQueue.isEmpty());
+    }
+}
+
+TEST_F(MediaSampleQueueTests, TestBlockingDequeue) {
+    LOG(DEBUG) << "TestBlockingDequeue Starts";
+
+    MediaSampleQueue sampleQueue;
+
+    std::thread enqueueThread([&sampleQueue] {
+        // Note: This implementation is a bit racy. Any amount of sleep will not guarantee that the
+        // main thread will be blocked on the sample queue by the time this thread calls enqueue.
+        // But we can say with high confidence that it will and the test will not fail regardless.
+        std::this_thread::sleep_for(std::chrono::milliseconds(kThreadDelayDurationMs));
+        sampleQueue.enqueue(newSample(1));
+    });
+
+    std::shared_ptr<MediaSample> sample;
+    bool aborted = sampleQueue.dequeue(&sample);
+    EXPECT_NE(sample, nullptr);
+    EXPECT_EQ(sample->bufferId, 1);
+    EXPECT_FALSE(aborted);
+    EXPECT_TRUE(sampleQueue.isEmpty());
+
+    enqueueThread.join();
+}
+
+TEST_F(MediaSampleQueueTests, TestDequeueBufferRelease) {
+    LOG(DEBUG) << "TestDequeueBufferRelease Starts";
+
+    static constexpr int kNumSamples = 4;
+    std::vector<bool> bufferReleased(kNumSamples, false);
+
+    MediaSample::OnSampleReleasedCallback callback = [&bufferReleased](MediaSample* sample) {
+        bufferReleased[sample->bufferId] = true;
+    };
+
+    MediaSampleQueue sampleQueue;
+    for (int i = 0; i < kNumSamples; ++i) {
+        bool aborted = sampleQueue.enqueue(
+                MediaSample::createWithReleaseCallback(nullptr, 0, i, callback));
+        EXPECT_FALSE(aborted);
+    }
+
+    for (int i = 0; i < kNumSamples; ++i) {
+        EXPECT_FALSE(bufferReleased[i]);
+    }
+
+    for (int i = 0; i < kNumSamples; ++i) {
+        {
+            std::shared_ptr<MediaSample> sample;
+            bool aborted = sampleQueue.dequeue(&sample);
+            EXPECT_NE(sample, nullptr);
+            EXPECT_EQ(sample->bufferId, i);
+            EXPECT_FALSE(bufferReleased[i]);
+            EXPECT_FALSE(aborted);
+        }
+
+        for (int j = 0; j < kNumSamples; ++j) {
+            EXPECT_EQ(bufferReleased[j], j <= i);
+        }
+    }
+}
+
+TEST_F(MediaSampleQueueTests, TestAbortBufferRelease) {
+    LOG(DEBUG) << "TestAbortBufferRelease Starts";
+
+    static constexpr int kNumSamples = 4;
+    std::vector<bool> bufferReleased(kNumSamples, false);
+
+    MediaSample::OnSampleReleasedCallback callback = [&bufferReleased](MediaSample* sample) {
+        bufferReleased[sample->bufferId] = true;
+    };
+
+    MediaSampleQueue sampleQueue;
+    for (int i = 0; i < kNumSamples; ++i) {
+        bool aborted = sampleQueue.enqueue(
+                MediaSample::createWithReleaseCallback(nullptr, 0, i, callback));
+        EXPECT_FALSE(aborted);
+    }
+
+    for (int i = 0; i < kNumSamples; ++i) {
+        EXPECT_FALSE(bufferReleased[i]);
+    }
+
+    EXPECT_FALSE(sampleQueue.isEmpty());
+    sampleQueue.abort();
+    EXPECT_TRUE(sampleQueue.isEmpty());
+
+    for (int i = 0; i < kNumSamples; ++i) {
+        EXPECT_TRUE(bufferReleased[i]);
+    }
+}
+
+TEST_F(MediaSampleQueueTests, TestNonEmptyAbort) {
+    LOG(DEBUG) << "TestNonEmptyAbort Starts";
+
+    MediaSampleQueue sampleQueue;
+    bool aborted = sampleQueue.enqueue(newSample(1));
+    EXPECT_FALSE(aborted);
+
+    sampleQueue.abort();
+
+    std::shared_ptr<MediaSample> sample;
+    aborted = sampleQueue.dequeue(&sample);
+    EXPECT_TRUE(aborted);
+    EXPECT_EQ(sample, nullptr);
+
+    aborted = sampleQueue.enqueue(sample);
+    EXPECT_TRUE(aborted);
+}
+
+TEST_F(MediaSampleQueueTests, TestEmptyAbort) {
+    LOG(DEBUG) << "TestEmptyAbort Starts";
+
+    MediaSampleQueue sampleQueue;
+    sampleQueue.abort();
+
+    std::shared_ptr<MediaSample> sample;
+    bool aborted = sampleQueue.dequeue(&sample);
+    EXPECT_TRUE(aborted);
+    EXPECT_EQ(sample, nullptr);
+
+    aborted = sampleQueue.enqueue(sample);
+    EXPECT_TRUE(aborted);
+}
+
+TEST_F(MediaSampleQueueTests, TestBlockingAbort) {
+    LOG(DEBUG) << "TestBlockingAbort Starts";
+
+    MediaSampleQueue sampleQueue;
+
+    std::thread abortingThread([&sampleQueue] {
+        // Note: This implementation is a bit racy. Any amount of sleep will not guarantee that the
+        // main thread will be blocked on the sample queue by the time this thread calls abort.
+        // But we can say with high confidence that it will and the test will not fail regardless.
+        std::this_thread::sleep_for(std::chrono::milliseconds(kThreadDelayDurationMs));
+        sampleQueue.abort();
+    });
+
+    std::shared_ptr<MediaSample> sample;
+    bool aborted = sampleQueue.dequeue(&sample);
+    EXPECT_TRUE(aborted);
+    EXPECT_EQ(sample, nullptr);
+
+    abortingThread.join();
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp
new file mode 100644
index 0000000..9c9c8b5
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp
@@ -0,0 +1,225 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaSampleReaderNDK
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleReaderNDKTests"
+
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <utils/Timers.h>
+
+#include <cmath>
+#include <mutex>
+#include <thread>
+
+// TODO(b/153453392): Test more asset types and validate sample data from readSampleDataForTrack.
+// TODO(b/153453392): Test for sequential and parallel (single thread and multi thread) access.
+// TODO(b/153453392): Test for switching between sequential and parallel access in different points
+//  of time.
+
+namespace android {
+
+#define SEC_TO_USEC(s) ((s)*1000 * 1000)
+
+class MediaSampleReaderNDKTests : public ::testing::Test {
+public:
+    MediaSampleReaderNDKTests() { LOG(DEBUG) << "MediaSampleReaderNDKTests created"; }
+
+    void SetUp() override {
+        LOG(DEBUG) << "MediaSampleReaderNDKTests set up";
+        const char* sourcePath =
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+
+        mExtractor = AMediaExtractor_new();
+        ASSERT_NE(mExtractor, nullptr);
+
+        mSourceFd = open(sourcePath, O_RDONLY);
+        ASSERT_GT(mSourceFd, 0);
+
+        mFileSize = lseek(mSourceFd, 0, SEEK_END);
+        lseek(mSourceFd, 0, SEEK_SET);
+
+        media_status_t status =
+                AMediaExtractor_setDataSourceFd(mExtractor, mSourceFd, 0, mFileSize);
+        ASSERT_EQ(status, AMEDIA_OK);
+
+        mTrackCount = AMediaExtractor_getTrackCount(mExtractor);
+        for (size_t trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+            AMediaExtractor_selectTrack(mExtractor, trackIndex);
+        }
+    }
+
+    void initExtractorTimestamps() {
+        // Save all sample timestamps, per track, as reported by the extractor.
+        mExtractorTimestamps.resize(mTrackCount);
+        do {
+            const int trackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+            const int64_t sampleTime = AMediaExtractor_getSampleTime(mExtractor);
+
+            mExtractorTimestamps[trackIndex].push_back(sampleTime);
+        } while (AMediaExtractor_advance(mExtractor));
+
+        AMediaExtractor_seekTo(mExtractor, 0, AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC);
+    }
+
+    std::vector<int32_t> getTrackBitrates() {
+        size_t totalSize[mTrackCount];
+        memset(totalSize, 0, sizeof(totalSize));
+
+        do {
+            const int trackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+            totalSize[trackIndex] += AMediaExtractor_getSampleSize(mExtractor);
+        } while (AMediaExtractor_advance(mExtractor));
+
+        AMediaExtractor_seekTo(mExtractor, 0, AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC);
+
+        std::vector<int32_t> bitrates;
+        for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+            int64_t durationUs;
+            AMediaFormat* trackFormat = AMediaExtractor_getTrackFormat(mExtractor, trackIndex);
+            EXPECT_NE(trackFormat, nullptr);
+            EXPECT_TRUE(AMediaFormat_getInt64(trackFormat, AMEDIAFORMAT_KEY_DURATION, &durationUs));
+            bitrates.push_back(roundf((float)totalSize[trackIndex] * 8 * 1000000 / durationUs));
+        }
+
+        return bitrates;
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "MediaSampleReaderNDKTests tear down";
+        AMediaExtractor_delete(mExtractor);
+        close(mSourceFd);
+    }
+
+    ~MediaSampleReaderNDKTests() { LOG(DEBUG) << "MediaSampleReaderNDKTests destroyed"; }
+
+    AMediaExtractor* mExtractor = nullptr;
+    size_t mTrackCount;
+    int mSourceFd;
+    size_t mFileSize;
+    std::vector<std::vector<int64_t>> mExtractorTimestamps;
+};
+
+TEST_F(MediaSampleReaderNDKTests, TestSampleTimes) {
+    LOG(DEBUG) << "TestSampleTimes Starts";
+
+    std::shared_ptr<MediaSampleReader> sampleReader =
+            MediaSampleReaderNDK::createFromFd(mSourceFd, 0, mFileSize);
+    ASSERT_TRUE(sampleReader);
+
+    for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+        EXPECT_EQ(sampleReader->selectTrack(trackIndex), AMEDIA_OK);
+    }
+
+    // Initialize the extractor timestamps.
+    initExtractorTimestamps();
+
+    std::mutex timestampMutex;
+    std::vector<std::thread> trackThreads;
+    std::vector<std::vector<int64_t>> readerTimestamps(mTrackCount);
+
+    for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+        trackThreads.emplace_back([sampleReader, trackIndex, &timestampMutex, &readerTimestamps] {
+            MediaSampleInfo info;
+            while (true) {
+                media_status_t status = sampleReader->getSampleInfoForTrack(trackIndex, &info);
+                if (status != AMEDIA_OK) {
+                    EXPECT_EQ(status, AMEDIA_ERROR_END_OF_STREAM);
+                    EXPECT_TRUE((info.flags & SAMPLE_FLAG_END_OF_STREAM) != 0);
+                    break;
+                }
+                ASSERT_TRUE((info.flags & SAMPLE_FLAG_END_OF_STREAM) == 0);
+                timestampMutex.lock();
+                readerTimestamps[trackIndex].push_back(info.presentationTimeUs);
+                timestampMutex.unlock();
+                sampleReader->advanceTrack(trackIndex);
+            }
+        });
+    }
+
+    for (auto& thread : trackThreads) {
+        thread.join();
+    }
+
+    for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+        LOG(DEBUG) << "Track " << trackIndex << ", comparing "
+                   << readerTimestamps[trackIndex].size() << " samples.";
+        EXPECT_EQ(readerTimestamps[trackIndex].size(), mExtractorTimestamps[trackIndex].size());
+        for (size_t sampleIndex = 0; sampleIndex < readerTimestamps[trackIndex].size();
+             sampleIndex++) {
+            EXPECT_EQ(readerTimestamps[trackIndex][sampleIndex],
+                      mExtractorTimestamps[trackIndex][sampleIndex]);
+        }
+    }
+}
+
+TEST_F(MediaSampleReaderNDKTests, TestEstimatedBitrateAccuracy) {
+    // Just put a somewhat reasonable upper bound on the estimated bitrate expected in our test
+    // assets. This is mostly to make sure the estimation is not way off.
+    static constexpr int32_t kMaxEstimatedBitrate = 100 * 1000 * 1000;  // 100 Mbps
+
+    auto sampleReader = MediaSampleReaderNDK::createFromFd(mSourceFd, 0, mFileSize);
+    ASSERT_TRUE(sampleReader);
+
+    std::vector<int32_t> actualTrackBitrates = getTrackBitrates();
+    for (int trackIndex = 0; trackIndex < mTrackCount; ++trackIndex) {
+        EXPECT_EQ(sampleReader->selectTrack(trackIndex), AMEDIA_OK);
+
+        int32_t bitrate;
+        EXPECT_EQ(sampleReader->getEstimatedBitrateForTrack(trackIndex, &bitrate), AMEDIA_OK);
+        EXPECT_GT(bitrate, 0);
+        EXPECT_LT(bitrate, kMaxEstimatedBitrate);
+
+        // Note: The test asset currently used in this test is shorter than the sampling duration
+        // used to estimate the bitrate in the sample reader. So for now the estimation should be
+        // exact but if/when a longer asset is used a reasonable delta needs to be defined.
+        EXPECT_EQ(bitrate, actualTrackBitrates[trackIndex]);
+    }
+}
+
+TEST_F(MediaSampleReaderNDKTests, TestInvalidFd) {
+    std::shared_ptr<MediaSampleReader> sampleReader =
+            MediaSampleReaderNDK::createFromFd(0, 0, mFileSize);
+    ASSERT_TRUE(sampleReader == nullptr);
+
+    sampleReader = MediaSampleReaderNDK::createFromFd(-1, 0, mFileSize);
+    ASSERT_TRUE(sampleReader == nullptr);
+}
+
+TEST_F(MediaSampleReaderNDKTests, TestZeroSize) {
+    std::shared_ptr<MediaSampleReader> sampleReader =
+            MediaSampleReaderNDK::createFromFd(mSourceFd, 0, 0);
+    ASSERT_TRUE(sampleReader == nullptr);
+}
+
+TEST_F(MediaSampleReaderNDKTests, TestInvalidOffset) {
+    std::shared_ptr<MediaSampleReader> sampleReader =
+            MediaSampleReaderNDK::createFromFd(mSourceFd, mFileSize, mFileSize);
+    ASSERT_TRUE(sampleReader == nullptr);
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
new file mode 100644
index 0000000..46f3e9b
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
@@ -0,0 +1,597 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaSampleWriter
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleWriterTests"
+
+#include <android-base/logging.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleQueue.h>
+#include <media/MediaSampleWriter.h>
+#include <media/NdkMediaExtractor.h>
+
+#include <condition_variable>
+#include <list>
+#include <mutex>
+
+namespace android {
+
+/** Muxer interface to enable MediaSampleWriter testing. */
+class TestMuxer : public MediaSampleWriterMuxerInterface {
+public:
+    // MuxerInterface
+    ssize_t addTrack(AMediaFormat* trackFormat) override {
+        mEventQueue.push_back(AddTrack(trackFormat));
+        return mTrackCount++;
+    }
+    media_status_t start() override {
+        mEventQueue.push_back(Start());
+        return AMEDIA_OK;
+    }
+
+    media_status_t writeSampleData(size_t trackIndex, const uint8_t* data,
+                                   const AMediaCodecBufferInfo* info) override {
+        mEventQueue.push_back(WriteSample(trackIndex, data, info));
+        return AMEDIA_OK;
+    }
+    media_status_t stop() override {
+        mEventQueue.push_back(Stop());
+        return AMEDIA_OK;
+    }
+    // ~MuxerInterface
+
+    struct Event {
+        enum { NoEvent, AddTrack, Start, WriteSample, Stop } type = NoEvent;
+        const AMediaFormat* format = nullptr;
+        size_t trackIndex = 0;
+        const uint8_t* data = nullptr;
+        AMediaCodecBufferInfo info{};
+    };
+
+    static constexpr Event NoEvent = {Event::NoEvent, nullptr, 0, nullptr, {}};
+
+    static Event AddTrack(const AMediaFormat* format) {
+        return {.type = Event::AddTrack, .format = format};
+    }
+
+    static Event Start() { return {.type = Event::Start}; }
+    static Event Stop() { return {.type = Event::Stop}; }
+
+    static Event WriteSample(size_t trackIndex, const uint8_t* data,
+                             const AMediaCodecBufferInfo* info) {
+        return {.type = Event::WriteSample, .trackIndex = trackIndex, .data = data, .info = *info};
+    }
+
+    static Event WriteSampleWithPts(size_t trackIndex, int64_t pts) {
+        return {.type = Event::WriteSample, .trackIndex = trackIndex, .info = {0, 0, pts, 0}};
+    }
+
+    void pushEvent(const Event& e) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mEventQueue.push_back(e);
+        mCondition.notify_one();
+    }
+
+    const Event& popEvent(bool wait = false) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (wait && mEventQueue.empty()) {
+            mCondition.wait_for(lock, std::chrono::milliseconds(200));
+        }
+
+        if (mEventQueue.empty()) {
+            mPoppedEvent = NoEvent;
+        } else {
+            mPoppedEvent = *mEventQueue.begin();
+            mEventQueue.pop_front();
+        }
+        return mPoppedEvent;
+    }
+
+private:
+    Event mPoppedEvent;
+    std::list<Event> mEventQueue;
+    ssize_t mTrackCount = 0;
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+};
+
+bool operator==(const AMediaCodecBufferInfo& lhs, const AMediaCodecBufferInfo& rhs) {
+    return lhs.offset == rhs.offset && lhs.size == rhs.size &&
+           lhs.presentationTimeUs == rhs.presentationTimeUs && lhs.flags == rhs.flags;
+}
+
+bool operator==(const TestMuxer::Event& lhs, const TestMuxer::Event& rhs) {
+    return lhs.type == rhs.type && lhs.format == rhs.format && lhs.trackIndex == rhs.trackIndex &&
+           lhs.data == rhs.data && lhs.info == rhs.info;
+}
+
+/** Represents a media source file. */
+class TestMediaSource {
+public:
+    void init() {
+        static const char* sourcePath =
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+
+        mExtractor = AMediaExtractor_new();
+        ASSERT_NE(mExtractor, nullptr);
+
+        int sourceFd = open(sourcePath, O_RDONLY);
+        ASSERT_GT(sourceFd, 0);
+
+        off_t fileSize = lseek(sourceFd, 0, SEEK_END);
+        lseek(sourceFd, 0, SEEK_SET);
+
+        media_status_t status = AMediaExtractor_setDataSourceFd(mExtractor, sourceFd, 0, fileSize);
+        ASSERT_EQ(status, AMEDIA_OK);
+        close(sourceFd);
+
+        mTrackCount = AMediaExtractor_getTrackCount(mExtractor);
+        ASSERT_GT(mTrackCount, 1);
+        for (size_t trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+            AMediaFormat* trackFormat = AMediaExtractor_getTrackFormat(mExtractor, trackIndex);
+            ASSERT_NE(trackFormat, nullptr);
+
+            const char* mime = nullptr;
+            AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+            if (strncmp(mime, "video/", 6) == 0) {
+                mVideoTrackIndex = trackIndex;
+            } else if (strncmp(mime, "audio/", 6) == 0) {
+                mAudioTrackIndex = trackIndex;
+            }
+
+            mTrackFormats.push_back(
+                    std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete));
+
+            AMediaExtractor_selectTrack(mExtractor, trackIndex);
+        }
+        EXPECT_GE(mVideoTrackIndex, 0);
+        EXPECT_GE(mAudioTrackIndex, 0);
+    }
+
+    void reset() const {
+        media_status_t status = AMediaExtractor_seekTo(mExtractor, 0 /* seekPosUs */,
+                                                       AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC);
+        ASSERT_EQ(status, AMEDIA_OK);
+    }
+
+    AMediaExtractor* mExtractor = nullptr;
+    size_t mTrackCount = 0;
+    std::vector<std::shared_ptr<AMediaFormat>> mTrackFormats;
+    int mVideoTrackIndex = -1;
+    int mAudioTrackIndex = -1;
+};
+
+class TestCallbacks : public MediaSampleWriter::CallbackInterface {
+public:
+    TestCallbacks(bool expectSuccess = true) : mExpectSuccess(expectSuccess) {}
+
+    bool hasFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        return mFinished;
+    }
+
+    // MediaSampleWriter::CallbackInterface
+    virtual void onFinished(const MediaSampleWriter* writer __unused,
+                            media_status_t status) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        EXPECT_FALSE(mFinished);
+        if (mExpectSuccess) {
+            EXPECT_EQ(status, AMEDIA_OK);
+        } else {
+            EXPECT_NE(status, AMEDIA_OK);
+        }
+        mFinished = true;
+        mCondition.notify_all();
+    }
+
+    virtual void onProgressUpdate(const MediaSampleWriter* writer __unused,
+                                  int32_t progress) override {
+        EXPECT_GT(progress, mLastProgress);
+        EXPECT_GE(progress, 0);
+        EXPECT_LE(progress, 100);
+
+        mLastProgress = progress;
+        mProgressUpdateCount++;
+    }
+    // ~MediaSampleWriter::CallbackInterface
+
+    void waitForWritingFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mFinished) {
+            mCondition.wait(lock);
+        }
+    }
+
+    uint32_t getProgressUpdateCount() const { return mProgressUpdateCount; }
+
+private:
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mFinished = false;
+    bool mExpectSuccess;
+    int32_t mLastProgress = -1;
+    uint32_t mProgressUpdateCount = 0;
+};
+
+class MediaSampleWriterTests : public ::testing::Test {
+public:
+    MediaSampleWriterTests() { LOG(DEBUG) << "MediaSampleWriterTests created"; }
+    ~MediaSampleWriterTests() { LOG(DEBUG) << "MediaSampleWriterTests destroyed"; }
+
+    static const TestMediaSource& getMediaSource() {
+        static TestMediaSource sMediaSource;
+        static std::once_flag sOnceToken;
+
+        std::call_once(sOnceToken, [] { sMediaSource.init(); });
+
+        sMediaSource.reset();
+        return sMediaSource;
+    }
+
+    static std::shared_ptr<MediaSample> newSample(int64_t ptsUs, uint32_t flags, size_t size,
+                                                  size_t offset, const uint8_t* buffer) {
+        auto sample = std::make_shared<MediaSample>();
+        sample->info.presentationTimeUs = ptsUs;
+        sample->info.flags = flags;
+        sample->info.size = size;
+        sample->dataOffset = offset;
+        sample->buffer = buffer;
+        return sample;
+    }
+
+    static std::shared_ptr<MediaSample> newSampleEos() {
+        return newSample(0, SAMPLE_FLAG_END_OF_STREAM, 0, 0, nullptr);
+    }
+
+    static std::shared_ptr<MediaSample> newSampleWithPts(int64_t ptsUs) {
+        static uint32_t sampleCount = 0;
+
+        // Use sampleCount to get a unique mock sample.
+        uint32_t sampleId = ++sampleCount;
+        return newSample(ptsUs, 0, sampleId, sampleId, reinterpret_cast<const uint8_t*>(sampleId));
+    }
+
+    static std::shared_ptr<MediaSample> newSampleWithPtsOnly(int64_t ptsUs) {
+        return newSample(ptsUs, 0, 0, 0, nullptr);
+    }
+
+    void SetUp() override {
+        LOG(DEBUG) << "MediaSampleWriterTests set up";
+        mTestMuxer = std::make_shared<TestMuxer>();
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "MediaSampleWriterTests tear down";
+        mTestMuxer.reset();
+    }
+
+protected:
+    std::shared_ptr<TestMuxer> mTestMuxer;
+    std::shared_ptr<TestCallbacks> mTestCallbacks = std::make_shared<TestCallbacks>();
+};
+
+TEST_F(MediaSampleWriterTests, TestAddTrackWithoutInit) {
+    const TestMediaSource& mediaSource = getMediaSource();
+
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+    EXPECT_EQ(writer->addTrack(mediaSource.mTrackFormats[0]), nullptr);
+}
+
+TEST_F(MediaSampleWriterTests, TestStartWithoutInit) {
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+    EXPECT_FALSE(writer->start());
+}
+
+TEST_F(MediaSampleWriterTests, TestStartWithoutTracks) {
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+    EXPECT_TRUE(writer->init(mTestMuxer, mTestCallbacks));
+    EXPECT_FALSE(writer->start());
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::NoEvent);
+}
+
+TEST_F(MediaSampleWriterTests, TestAddInvalidTrack) {
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+    EXPECT_TRUE(writer->init(mTestMuxer, mTestCallbacks));
+
+    EXPECT_EQ(writer->addTrack(nullptr), nullptr);
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::NoEvent);
+}
+
+TEST_F(MediaSampleWriterTests, TestDoubleStartStop) {
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+
+    std::shared_ptr<TestCallbacks> callbacks =
+            std::make_shared<TestCallbacks>(false /* expectSuccess */);
+    EXPECT_TRUE(writer->init(mTestMuxer, callbacks));
+
+    const TestMediaSource& mediaSource = getMediaSource();
+    EXPECT_NE(writer->addTrack(mediaSource.mTrackFormats[0]), nullptr);
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::AddTrack(mediaSource.mTrackFormats[0].get()));
+
+    ASSERT_TRUE(writer->start());
+    EXPECT_FALSE(writer->start());
+
+    EXPECT_TRUE(writer->stop());
+    EXPECT_TRUE(callbacks->hasFinished());
+    EXPECT_FALSE(writer->stop());
+}
+
+TEST_F(MediaSampleWriterTests, TestStopWithoutStart) {
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+    EXPECT_TRUE(writer->init(mTestMuxer, mTestCallbacks));
+
+    const TestMediaSource& mediaSource = getMediaSource();
+    EXPECT_NE(writer->addTrack(mediaSource.mTrackFormats[0]), nullptr);
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::AddTrack(mediaSource.mTrackFormats[0].get()));
+
+    EXPECT_FALSE(writer->stop());
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::NoEvent);
+}
+
+TEST_F(MediaSampleWriterTests, TestStartWithoutCallback) {
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+
+    std::weak_ptr<MediaSampleWriter::CallbackInterface> unassignedWp;
+    EXPECT_FALSE(writer->init(mTestMuxer, unassignedWp));
+
+    std::shared_ptr<MediaSampleWriter::CallbackInterface> unassignedSp;
+    EXPECT_FALSE(writer->init(mTestMuxer, unassignedSp));
+
+    const TestMediaSource& mediaSource = getMediaSource();
+    EXPECT_EQ(writer->addTrack(mediaSource.mTrackFormats[0]), nullptr);
+    ASSERT_FALSE(writer->start());
+}
+
+TEST_F(MediaSampleWriterTests, TestProgressUpdate) {
+    const TestMediaSource& mediaSource = getMediaSource();
+
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+    EXPECT_TRUE(writer->init(mTestMuxer, mTestCallbacks));
+
+    std::shared_ptr<AMediaFormat> videoFormat =
+            std::shared_ptr<AMediaFormat>(AMediaFormat_new(), &AMediaFormat_delete);
+    AMediaFormat_copy(videoFormat.get(),
+                      mediaSource.mTrackFormats[mediaSource.mVideoTrackIndex].get());
+
+    AMediaFormat_setInt64(videoFormat.get(), AMEDIAFORMAT_KEY_DURATION, 100);
+    auto sampleConsumer = writer->addTrack(videoFormat);
+    EXPECT_NE(sampleConsumer, nullptr);
+    ASSERT_TRUE(writer->start());
+
+    for (int64_t pts = 0; pts < 100; ++pts) {
+        sampleConsumer(newSampleWithPts(pts));
+    }
+    sampleConsumer(newSampleEos());
+    mTestCallbacks->waitForWritingFinished();
+
+    EXPECT_EQ(mTestCallbacks->getProgressUpdateCount(), 100);
+}
+
+TEST_F(MediaSampleWriterTests, TestInterleaving) {
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+    EXPECT_TRUE(writer->init(mTestMuxer, mTestCallbacks));
+
+    // Use two tracks for this test.
+    static constexpr int kNumTracks = 2;
+    MediaSampleWriter::MediaSampleConsumerFunction sampleConsumers[kNumTracks];
+    std::vector<std::pair<std::shared_ptr<MediaSample>, size_t>> addedSamples;
+    const TestMediaSource& mediaSource = getMediaSource();
+
+    for (int trackIdx = 0; trackIdx < kNumTracks; ++trackIdx) {
+        auto trackFormat = mediaSource.mTrackFormats[trackIdx % mediaSource.mTrackCount];
+        sampleConsumers[trackIdx] = writer->addTrack(trackFormat);
+        EXPECT_NE(sampleConsumers[trackIdx], nullptr);
+        EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::AddTrack(trackFormat.get()));
+    }
+
+    // Create samples in the expected interleaved order for easy verification.
+    auto addSampleToTrackWithPts = [&addedSamples, &sampleConsumers](int trackIndex, int64_t pts) {
+        auto sample = newSampleWithPts(pts);
+        sampleConsumers[trackIndex](sample);
+        addedSamples.emplace_back(sample, trackIndex);
+    };
+
+    addSampleToTrackWithPts(0, 0);
+    addSampleToTrackWithPts(1, 4);
+
+    addSampleToTrackWithPts(0, 1);
+    addSampleToTrackWithPts(0, 2);
+    addSampleToTrackWithPts(0, 3);
+    addSampleToTrackWithPts(0, 10);
+
+    addSampleToTrackWithPts(1, 5);
+    addSampleToTrackWithPts(1, 6);
+    addSampleToTrackWithPts(1, 11);
+
+    addSampleToTrackWithPts(0, 12);
+    addSampleToTrackWithPts(1, 13);
+
+    for (int trackIndex = 0; trackIndex < kNumTracks; ++trackIndex) {
+        sampleConsumers[trackIndex](newSampleEos());
+    }
+
+    // Start the writer.
+    ASSERT_TRUE(writer->start());
+
+    // Wait for writer to complete.
+    mTestCallbacks->waitForWritingFinished();
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::Start());
+
+    std::sort(addedSamples.begin(), addedSamples.end(),
+              [](const std::pair<std::shared_ptr<MediaSample>, size_t>& left,
+                 const std::pair<std::shared_ptr<MediaSample>, size_t>& right) {
+                  return left.first->info.presentationTimeUs < right.first->info.presentationTimeUs;
+              });
+
+    // Verify sample order.
+    for (auto entry : addedSamples) {
+        auto sample = entry.first;
+        auto trackIndex = entry.second;
+
+        const TestMuxer::Event& event = mTestMuxer->popEvent();
+        EXPECT_EQ(event.type, TestMuxer::Event::WriteSample);
+        EXPECT_EQ(event.trackIndex, trackIndex);
+        EXPECT_EQ(event.data, sample->buffer);
+        EXPECT_EQ(event.info.offset, sample->dataOffset);
+        EXPECT_EQ(event.info.size, sample->info.size);
+        EXPECT_EQ(event.info.presentationTimeUs, sample->info.presentationTimeUs);
+        EXPECT_EQ(event.info.flags, sample->info.flags);
+    }
+
+    // Verify EOS samples.
+    for (int trackIndex = 0; trackIndex < kNumTracks; ++trackIndex) {
+        auto trackFormat = mediaSource.mTrackFormats[trackIndex % mediaSource.mTrackCount];
+        int64_t duration = 0;
+        AMediaFormat_getInt64(trackFormat.get(), AMEDIAFORMAT_KEY_DURATION, &duration);
+
+        // EOS timestamp = first sample timestamp + duration.
+        const int64_t endTime = duration + (trackIndex == 1 ? 4 : 0);
+        const AMediaCodecBufferInfo info = {0, 0, endTime, AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM};
+
+        EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::WriteSample(trackIndex, nullptr, &info));
+    }
+
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::Stop());
+    EXPECT_TRUE(writer->stop());
+    EXPECT_TRUE(mTestCallbacks->hasFinished());
+}
+
+// Convenience function for reading a sample from an AMediaExtractor represented as a MediaSample.
+static std::shared_ptr<MediaSample> readSampleAndAdvance(AMediaExtractor* extractor,
+                                                         size_t* trackIndexOut) {
+    int trackIndex = AMediaExtractor_getSampleTrackIndex(extractor);
+    if (trackIndex < 0) {
+        return nullptr;
+    }
+
+    if (trackIndexOut != nullptr) {
+        *trackIndexOut = trackIndex;
+    }
+
+    ssize_t sampleSize = AMediaExtractor_getSampleSize(extractor);
+    int64_t sampleTimeUs = AMediaExtractor_getSampleTime(extractor);
+    uint32_t flags = AMediaExtractor_getSampleFlags(extractor);
+
+    size_t bufferSize = static_cast<size_t>(sampleSize);
+    uint8_t* buffer = new uint8_t[bufferSize];
+
+    ssize_t dataRead = AMediaExtractor_readSampleData(extractor, buffer, bufferSize);
+    EXPECT_EQ(dataRead, sampleSize);
+
+    auto sample = MediaSample::createWithReleaseCallback(
+            buffer, 0 /* offset */, 0 /* id */, [buffer](MediaSample*) { delete[] buffer; });
+    sample->info.size = bufferSize;
+    sample->info.presentationTimeUs = sampleTimeUs;
+    sample->info.flags = flags;
+
+    (void)AMediaExtractor_advance(extractor);
+    return sample;
+}
+
+TEST_F(MediaSampleWriterTests, TestDefaultMuxer) {
+    // Write samples straight from an extractor and validate output file.
+    static const char* destinationPath =
+            "/data/local/tmp/MediaSampleWriterTests_TestDefaultMuxer_output.MP4";
+    const int destinationFd =
+            open(destinationPath, O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR | S_IROTH);
+    ASSERT_GT(destinationFd, 0);
+
+    // Initialize writer.
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+    EXPECT_TRUE(writer->init(destinationFd, mTestCallbacks));
+    close(destinationFd);
+
+    // Add tracks.
+    const TestMediaSource& mediaSource = getMediaSource();
+    std::vector<MediaSampleWriter::MediaSampleConsumerFunction> sampleConsumers;
+
+    for (size_t trackIndex = 0; trackIndex < mediaSource.mTrackCount; trackIndex++) {
+        auto consumer = writer->addTrack(mediaSource.mTrackFormats[trackIndex]);
+        sampleConsumers.push_back(consumer);
+    }
+
+    // Start the writer.
+    ASSERT_TRUE(writer->start());
+
+    // Enqueue samples and finally End Of Stream.
+    std::shared_ptr<MediaSample> sample;
+    size_t trackIndex;
+    while ((sample = readSampleAndAdvance(mediaSource.mExtractor, &trackIndex)) != nullptr) {
+        sampleConsumers[trackIndex](sample);
+    }
+    for (trackIndex = 0; trackIndex < mediaSource.mTrackCount; trackIndex++) {
+        sampleConsumers[trackIndex](newSampleEos());
+    }
+
+    // Wait for writer.
+    mTestCallbacks->waitForWritingFinished();
+    EXPECT_TRUE(writer->stop());
+
+    // Compare output file with source.
+    mediaSource.reset();
+
+    AMediaExtractor* extractor = AMediaExtractor_new();
+    ASSERT_NE(extractor, nullptr);
+
+    int sourceFd = open(destinationPath, O_RDONLY);
+    ASSERT_GT(sourceFd, 0);
+
+    off_t fileSize = lseek(sourceFd, 0, SEEK_END);
+    lseek(sourceFd, 0, SEEK_SET);
+
+    media_status_t status = AMediaExtractor_setDataSourceFd(extractor, sourceFd, 0, fileSize);
+    ASSERT_EQ(status, AMEDIA_OK);
+    close(sourceFd);
+
+    size_t trackCount = AMediaExtractor_getTrackCount(extractor);
+    EXPECT_EQ(trackCount, mediaSource.mTrackCount);
+
+    for (size_t trackIndex = 0; trackIndex < trackCount; trackIndex++) {
+        AMediaFormat* trackFormat = AMediaExtractor_getTrackFormat(extractor, trackIndex);
+        ASSERT_NE(trackFormat, nullptr);
+
+        AMediaExtractor_selectTrack(extractor, trackIndex);
+    }
+
+    // Compare samples.
+    std::shared_ptr<MediaSample> sample1 = readSampleAndAdvance(mediaSource.mExtractor, nullptr);
+    std::shared_ptr<MediaSample> sample2 = readSampleAndAdvance(extractor, nullptr);
+
+    while (sample1 != nullptr && sample2 != nullptr) {
+        EXPECT_EQ(sample1->info.presentationTimeUs, sample2->info.presentationTimeUs);
+        EXPECT_EQ(sample1->info.size, sample2->info.size);
+        EXPECT_EQ(sample1->info.flags, sample2->info.flags);
+
+        EXPECT_EQ(memcmp(sample1->buffer, sample2->buffer, sample1->info.size), 0);
+
+        sample1 = readSampleAndAdvance(mediaSource.mExtractor, nullptr);
+        sample2 = readSampleAndAdvance(extractor, nullptr);
+    }
+    EXPECT_EQ(sample1, nullptr);
+    EXPECT_EQ(sample2, nullptr);
+
+    AMediaExtractor_delete(extractor);
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
new file mode 100644
index 0000000..83f0a4a
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
@@ -0,0 +1,285 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTrackTranscoder
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTrackTranscoderTests"
+
+#include <android-base/logging.h>
+#include <android/binder_process.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/MediaTrackTranscoder.h>
+#include <media/PassthroughTrackTranscoder.h>
+#include <media/VideoTrackTranscoder.h>
+
+#include "TrackTranscoderTestUtils.h"
+
+namespace android {
+
+/** TrackTranscoder types to test. */
+enum TrackTranscoderType {
+    VIDEO,
+    PASSTHROUGH,
+};
+
+class MediaTrackTranscoderTests : public ::testing::TestWithParam<TrackTranscoderType> {
+public:
+    MediaTrackTranscoderTests() { LOG(DEBUG) << "MediaTrackTranscoderTests created"; }
+
+    void SetUp() override {
+        LOG(DEBUG) << "MediaTrackTranscoderTests set up";
+
+        // Need to start a thread pool to prevent AMediaExtractor binder calls from starving
+        // (b/155663561).
+        ABinderProcess_startThreadPool();
+
+        mCallback = std::make_shared<TestCallback>();
+
+        switch (GetParam()) {
+        case VIDEO:
+            mTranscoder = VideoTrackTranscoder::create(mCallback);
+            break;
+        case PASSTHROUGH:
+            mTranscoder = std::make_shared<PassthroughTrackTranscoder>(mCallback);
+            break;
+        }
+        ASSERT_NE(mTranscoder, nullptr);
+
+        initSampleReader();
+    }
+
+    void initSampleReader() {
+        const char* sourcePath =
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+
+        const int sourceFd = open(sourcePath, O_RDONLY);
+        ASSERT_GT(sourceFd, 0);
+
+        const size_t fileSize = lseek(sourceFd, 0, SEEK_END);
+        lseek(sourceFd, 0, SEEK_SET);
+
+        mMediaSampleReader = MediaSampleReaderNDK::createFromFd(sourceFd, 0 /* offset */, fileSize);
+        ASSERT_NE(mMediaSampleReader, nullptr);
+        close(sourceFd);
+
+        for (size_t trackIndex = 0; trackIndex < mMediaSampleReader->getTrackCount();
+             ++trackIndex) {
+            AMediaFormat* trackFormat = mMediaSampleReader->getTrackFormat(trackIndex);
+            ASSERT_NE(trackFormat, nullptr);
+
+            const char* mime = nullptr;
+            AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+            ASSERT_NE(mime, nullptr);
+
+            if (GetParam() == VIDEO && strncmp(mime, "video/", 6) == 0) {
+                mTrackIndex = trackIndex;
+
+                mSourceFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
+                ASSERT_NE(mSourceFormat, nullptr);
+
+                mDestinationFormat =
+                        TrackTranscoderTestUtils::getDefaultVideoDestinationFormat(trackFormat);
+                ASSERT_NE(mDestinationFormat, nullptr);
+                break;
+            } else if (GetParam() == PASSTHROUGH && strncmp(mime, "audio/", 6) == 0) {
+                // TODO(lnilsson): Test metadata track passthrough after hkuang@ provides sample.
+                mTrackIndex = trackIndex;
+
+                mSourceFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
+                ASSERT_NE(mSourceFormat, nullptr);
+                break;
+            }
+
+            AMediaFormat_delete(trackFormat);
+        }
+
+        ASSERT_NE(mSourceFormat, nullptr);
+        EXPECT_EQ(mMediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
+    }
+
+    // Drains the transcoder's output queue in a loop.
+    void drainOutputSamples(int numSamplesToSave = 0) {
+        mTranscoder->setSampleConsumer(
+                [this, numSamplesToSave](const std::shared_ptr<MediaSample>& sample) {
+                    ASSERT_NE(sample, nullptr);
+
+                    mGotEndOfStream = (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) != 0;
+
+                    if (mSavedSamples.size() < numSamplesToSave) {
+                        mSavedSamples.push_back(sample);
+                    }
+
+                    if (mSavedSamples.size() == numSamplesToSave || mGotEndOfStream) {
+                        mSamplesSavedSemaphore.signal();
+                    }
+                });
+    }
+
+    void TearDown() override { LOG(DEBUG) << "MediaTrackTranscoderTests tear down"; }
+
+    ~MediaTrackTranscoderTests() { LOG(DEBUG) << "MediaTrackTranscoderTests destroyed"; }
+
+protected:
+    std::shared_ptr<MediaTrackTranscoder> mTranscoder;
+    std::shared_ptr<TestCallback> mCallback;
+
+    std::shared_ptr<MediaSampleReader> mMediaSampleReader;
+    int mTrackIndex;
+
+    std::shared_ptr<AMediaFormat> mSourceFormat;
+    std::shared_ptr<AMediaFormat> mDestinationFormat;
+
+    std::vector<std::shared_ptr<MediaSample>> mSavedSamples;
+    OneShotSemaphore mSamplesSavedSemaphore;
+    bool mGotEndOfStream = false;
+};
+
+TEST_P(MediaTrackTranscoderTests, WaitNormalOperation) {
+    LOG(DEBUG) << "Testing WaitNormalOperation";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(mTranscoder->start());
+    drainOutputSamples();
+    EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+    EXPECT_TRUE(mTranscoder->stop());
+    EXPECT_TRUE(mGotEndOfStream);
+}
+
+TEST_P(MediaTrackTranscoderTests, StopNormalOperation) {
+    LOG(DEBUG) << "Testing StopNormalOperation";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    EXPECT_TRUE(mTranscoder->start());
+    EXPECT_TRUE(mTranscoder->stop());
+}
+
+TEST_P(MediaTrackTranscoderTests, StartWithoutConfigure) {
+    LOG(DEBUG) << "Testing StartWithoutConfigure";
+    EXPECT_FALSE(mTranscoder->start());
+}
+
+TEST_P(MediaTrackTranscoderTests, StopWithoutStart) {
+    LOG(DEBUG) << "Testing StopWithoutStart";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    EXPECT_FALSE(mTranscoder->stop());
+}
+
+TEST_P(MediaTrackTranscoderTests, DoubleStartStop) {
+    LOG(DEBUG) << "Testing DoubleStartStop";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    EXPECT_TRUE(mTranscoder->start());
+    EXPECT_FALSE(mTranscoder->start());
+    EXPECT_TRUE(mTranscoder->stop());
+    EXPECT_FALSE(mTranscoder->stop());
+}
+
+TEST_P(MediaTrackTranscoderTests, DoubleConfigure) {
+    LOG(DEBUG) << "Testing DoubleConfigure";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_ERROR_UNSUPPORTED);
+}
+
+TEST_P(MediaTrackTranscoderTests, ConfigureAfterFail) {
+    LOG(DEBUG) << "Testing ConfigureAfterFail";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, -1, mDestinationFormat),
+              AMEDIA_ERROR_INVALID_PARAMETER);
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+}
+
+TEST_P(MediaTrackTranscoderTests, RestartAfterStop) {
+    LOG(DEBUG) << "Testing RestartAfterStop";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    EXPECT_TRUE(mTranscoder->start());
+    EXPECT_TRUE(mTranscoder->stop());
+    EXPECT_FALSE(mTranscoder->start());
+}
+
+TEST_P(MediaTrackTranscoderTests, RestartAfterFinish) {
+    LOG(DEBUG) << "Testing RestartAfterFinish";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(mTranscoder->start());
+    drainOutputSamples();
+    EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+    EXPECT_TRUE(mTranscoder->stop());
+    EXPECT_FALSE(mTranscoder->start());
+    EXPECT_TRUE(mGotEndOfStream);
+}
+
+TEST_P(MediaTrackTranscoderTests, HoldSampleAfterTranscoderRelease) {
+    LOG(DEBUG) << "Testing HoldSampleAfterTranscoderRelease";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(mTranscoder->start());
+    drainOutputSamples(1 /* numSamplesToSave */);
+    EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+    EXPECT_TRUE(mTranscoder->stop());
+    EXPECT_TRUE(mGotEndOfStream);
+
+    mTranscoder.reset();
+
+    std::this_thread::sleep_for(std::chrono::milliseconds(20));
+    mSavedSamples.clear();
+}
+
+TEST_P(MediaTrackTranscoderTests, HoldSampleAfterTranscoderStop) {
+    LOG(DEBUG) << "Testing HoldSampleAfterTranscoderStop";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(mTranscoder->start());
+    drainOutputSamples(1 /* numSamplesToSave */);
+    mSamplesSavedSemaphore.wait();
+    EXPECT_TRUE(mTranscoder->stop());
+
+    std::this_thread::sleep_for(std::chrono::milliseconds(20));
+    mSavedSamples.clear();
+}
+
+TEST_P(MediaTrackTranscoderTests, NullSampleReader) {
+    LOG(DEBUG) << "Testing NullSampleReader";
+    std::shared_ptr<MediaSampleReader> nullSampleReader;
+    EXPECT_NE(mTranscoder->configure(nullSampleReader, mTrackIndex, mDestinationFormat), AMEDIA_OK);
+    ASSERT_FALSE(mTranscoder->start());
+}
+
+TEST_P(MediaTrackTranscoderTests, InvalidTrackIndex) {
+    LOG(DEBUG) << "Testing InvalidTrackIndex";
+    EXPECT_NE(mTranscoder->configure(mMediaSampleReader, -1, mDestinationFormat), AMEDIA_OK);
+    EXPECT_NE(mTranscoder->configure(mMediaSampleReader, mMediaSampleReader->getTrackCount(),
+                                     mDestinationFormat),
+              AMEDIA_OK);
+}
+
+};  // namespace android
+
+using namespace android;
+
+INSTANTIATE_TEST_SUITE_P(MediaTrackTranscoderTestsAll, MediaTrackTranscoderTests,
+                         ::testing::Values(VIDEO, PASSTHROUGH));
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
new file mode 100644
index 0000000..1bf2d8c
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
@@ -0,0 +1,386 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscoder
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscoderTests"
+
+#include <android-base/logging.h>
+#include <android/binder_process.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+
+namespace android {
+
+#define DEFINE_FORMAT_VALUE_EQUAL_FUNC(_type, _typeName)                                  \
+    static bool equal##_typeName(const char* key, AMediaFormat* src, AMediaFormat* dst) { \
+        _type srcVal, dstVal;                                                             \
+        bool srcPresent = AMediaFormat_get##_typeName(src, key, &srcVal);                 \
+        bool dstPresent = AMediaFormat_get##_typeName(dst, key, &dstVal);                 \
+        return (srcPresent == dstPresent) && (!srcPresent || (srcVal == dstVal));         \
+    }
+
+DEFINE_FORMAT_VALUE_EQUAL_FUNC(int64_t, Int64);
+DEFINE_FORMAT_VALUE_EQUAL_FUNC(int32_t, Int32);
+
+struct FormatVerifierEntry {
+    const char* key;
+    std::function<bool(const char*, AMediaFormat*, AMediaFormat*)> equal;
+};
+
+static const FormatVerifierEntry kFieldsToPreserve[] = {
+        {AMEDIAFORMAT_KEY_DURATION, equalInt64},       {AMEDIAFORMAT_KEY_WIDTH, equalInt32},
+        {AMEDIAFORMAT_KEY_HEIGHT, equalInt32},         {AMEDIAFORMAT_KEY_FRAME_RATE, equalInt32},
+        {AMEDIAFORMAT_KEY_FRAME_COUNT, equalInt32},    {AMEDIAFORMAT_KEY_DISPLAY_WIDTH, equalInt32},
+        {AMEDIAFORMAT_KEY_DISPLAY_HEIGHT, equalInt32}, {AMEDIAFORMAT_KEY_SAR_WIDTH, equalInt32},
+        {AMEDIAFORMAT_KEY_SAR_HEIGHT, equalInt32},     {AMEDIAFORMAT_KEY_ROTATION, equalInt32},
+};
+
+class TestCallbacks : public MediaTranscoder::CallbackInterface {
+public:
+    virtual void onFinished(const MediaTranscoder* transcoder __unused) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        EXPECT_FALSE(mFinished);
+        mFinished = true;
+        mCondition.notify_all();
+    }
+
+    virtual void onError(const MediaTranscoder* transcoder __unused,
+                         media_status_t error) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        EXPECT_NE(error, AMEDIA_OK);
+        EXPECT_FALSE(mFinished);
+        mFinished = true;
+        mStatus = error;
+        mCondition.notify_all();
+    }
+
+    virtual void onProgressUpdate(const MediaTranscoder* transcoder __unused,
+                                  int32_t progress) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        if (progress > 0 && !mProgressMade) {
+            mProgressMade = true;
+            mCondition.notify_all();
+        }
+    }
+
+    virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
+                                     const std::shared_ptr<ndk::ScopedAParcel>& pausedState
+                                             __unused) override {}
+
+    void waitForTranscodingFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mFinished) {
+            mCondition.wait(lock);
+        }
+    }
+
+    void waitForProgressMade() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mProgressMade && !mFinished) {
+            mCondition.wait(lock);
+        }
+    }
+    media_status_t mStatus = AMEDIA_OK;
+
+private:
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mFinished = false;
+    bool mProgressMade = false;
+};
+
+// Write-only, create file if non-existent, don't overwrite existing file.
+static constexpr int kOpenFlags = O_WRONLY | O_CREAT | O_EXCL;
+// User R+W permission.
+static constexpr int kFileMode = S_IRUSR | S_IWUSR;
+
+class MediaTranscoderTests : public ::testing::Test {
+public:
+    MediaTranscoderTests() { LOG(DEBUG) << "MediaTranscoderTests created"; }
+    ~MediaTranscoderTests() { LOG(DEBUG) << "MediaTranscoderTests destroyed"; }
+
+    void SetUp() override {
+        LOG(DEBUG) << "MediaTranscoderTests set up";
+        mCallbacks = std::make_shared<TestCallbacks>();
+        ABinderProcess_startThreadPool();
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "MediaTranscoderTests tear down";
+        mCallbacks.reset();
+    }
+
+    void deleteFile(const char* path) { unlink(path); }
+
+    float getFileSizeDiffPercent(const char* path1, const char* path2, bool absolute = false) {
+        struct stat s1, s2;
+        EXPECT_EQ(stat(path1, &s1), 0);
+        EXPECT_EQ(stat(path2, &s2), 0);
+
+        int64_t diff = s2.st_size - s1.st_size;
+        if (absolute && diff < 0) diff = -diff;
+
+        return (float)diff * 100.0f / s1.st_size;
+    }
+
+    typedef enum {
+        kRunToCompletion,
+        kCancelAfterProgress,
+        kCancelAfterStart,
+    } TranscodeExecutionControl;
+
+    using FormatConfigurationCallback = std::function<AMediaFormat*(AMediaFormat*)>;
+    media_status_t transcodeHelper(const char* srcPath, const char* destPath,
+                                   FormatConfigurationCallback formatCallback,
+                                   TranscodeExecutionControl executionControl = kRunToCompletion) {
+        auto transcoder = MediaTranscoder::create(mCallbacks, nullptr);
+        EXPECT_NE(transcoder, nullptr);
+
+        const int srcFd = open(srcPath, O_RDONLY);
+        EXPECT_EQ(transcoder->configureSource(srcFd), AMEDIA_OK);
+
+        std::vector<std::shared_ptr<AMediaFormat>> trackFormats = transcoder->getTrackFormats();
+        EXPECT_GT(trackFormats.size(), 0);
+
+        for (int i = 0; i < trackFormats.size(); ++i) {
+            AMediaFormat* format = formatCallback(trackFormats[i].get());
+            EXPECT_EQ(transcoder->configureTrackFormat(i, format), AMEDIA_OK);
+
+            // Save original video track format for verification.
+            const char* mime = nullptr;
+            AMediaFormat_getString(trackFormats[i].get(), AMEDIAFORMAT_KEY_MIME, &mime);
+            if (strncmp(mime, "video/", 6) == 0) {
+                mSourceVideoFormat = trackFormats[i];
+            }
+
+            if (format != nullptr) {
+                AMediaFormat_delete(format);
+            }
+        }
+        deleteFile(destPath);
+        const int dstFd = open(destPath, kOpenFlags, kFileMode);
+        EXPECT_EQ(transcoder->configureDestination(dstFd), AMEDIA_OK);
+
+        media_status_t startStatus = transcoder->start();
+        EXPECT_EQ(startStatus, AMEDIA_OK);
+        if (startStatus == AMEDIA_OK) {
+            switch (executionControl) {
+            case kCancelAfterProgress:
+                mCallbacks->waitForProgressMade();
+                FALLTHROUGH_INTENDED;
+            case kCancelAfterStart:
+                transcoder->cancel();
+                break;
+            case kRunToCompletion:
+            default:
+                mCallbacks->waitForTranscodingFinished();
+                break;
+            }
+        }
+        close(srcFd);
+        close(dstFd);
+
+        return mCallbacks->mStatus;
+    }
+
+    void testTranscodeVideo(const char* srcPath, const char* destPath, const char* dstMime,
+                            int32_t bitrate = 0) {
+        EXPECT_EQ(transcodeHelper(srcPath, destPath,
+                                  [dstMime, bitrate](AMediaFormat* sourceFormat) {
+                                      AMediaFormat* format = nullptr;
+                                      const char* mime = nullptr;
+                                      AMediaFormat_getString(sourceFormat, AMEDIAFORMAT_KEY_MIME,
+                                                             &mime);
+
+                                      if (strncmp(mime, "video/", 6) == 0 &&
+                                          (bitrate > 0 || dstMime != nullptr)) {
+                                          format = AMediaFormat_new();
+
+                                          if (bitrate > 0) {
+                                              AMediaFormat_setInt32(
+                                                      format, AMEDIAFORMAT_KEY_BIT_RATE, bitrate);
+                                          }
+
+                                          if (dstMime != nullptr) {
+                                              AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME,
+                                                                     dstMime);
+                                          }
+                                      }
+                                      return format;
+                                  }),
+                  AMEDIA_OK);
+
+        if (dstMime != nullptr) {
+            std::vector<FormatVerifierEntry> extraVerifiers = {
+                    {AMEDIAFORMAT_KEY_MIME,
+                     [dstMime](const char* key, AMediaFormat* src __unused, AMediaFormat* dst) {
+                         const char* mime = nullptr;
+                         AMediaFormat_getString(dst, key, &mime);
+                         return !strcmp(mime, dstMime);
+                     }},
+            };
+            verifyOutputFormat(destPath, &extraVerifiers);
+        } else {
+            verifyOutputFormat(destPath);
+        }
+    }
+
+    void verifyOutputFormat(const char* destPath,
+                            const std::vector<FormatVerifierEntry>* extraVerifiers = nullptr) {
+        int dstFd = open(destPath, O_RDONLY);
+        EXPECT_GT(dstFd, 0);
+        ssize_t fileSize = lseek(dstFd, 0, SEEK_END);
+        lseek(dstFd, 0, SEEK_SET);
+
+        std::shared_ptr<MediaSampleReader> sampleReader =
+                MediaSampleReaderNDK::createFromFd(dstFd, 0, fileSize);
+        ASSERT_NE(sampleReader, nullptr);
+
+        std::shared_ptr<AMediaFormat> videoFormat;
+        const size_t trackCount = sampleReader->getTrackCount();
+        for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
+            AMediaFormat* trackFormat = sampleReader->getTrackFormat(static_cast<int>(trackIndex));
+            if (trackFormat != nullptr) {
+                const char* mime = nullptr;
+                AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+
+                if (strncmp(mime, "video/", 6) == 0) {
+                    LOG(INFO) << "Track # " << trackIndex << ": "
+                              << AMediaFormat_toString(trackFormat);
+                    videoFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
+                    break;
+                }
+            }
+        }
+
+        EXPECT_NE(videoFormat, nullptr);
+
+        LOG(INFO) << "source video format: " << AMediaFormat_toString(mSourceVideoFormat.get());
+        LOG(INFO) << "transcoded video format: " << AMediaFormat_toString(videoFormat.get());
+
+        for (int i = 0; i < (sizeof(kFieldsToPreserve) / sizeof(kFieldsToPreserve[0])); ++i) {
+            EXPECT_TRUE(kFieldsToPreserve[i].equal(kFieldsToPreserve[i].key,
+                                                   mSourceVideoFormat.get(), videoFormat.get()))
+                    << "Failed at key " << kFieldsToPreserve[i].key;
+        }
+
+        if (extraVerifiers != nullptr) {
+            for (int i = 0; i < extraVerifiers->size(); ++i) {
+                const FormatVerifierEntry& entry = (*extraVerifiers)[i];
+                EXPECT_TRUE(entry.equal(entry.key, mSourceVideoFormat.get(), videoFormat.get()));
+            }
+        }
+
+        close(dstFd);
+    }
+
+    std::shared_ptr<TestCallbacks> mCallbacks;
+    std::shared_ptr<AMediaFormat> mSourceVideoFormat;
+};
+
+TEST_F(MediaTranscoderTests, TestPassthrough) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_Passthrough.MP4";
+    testTranscodeVideo(srcPath, destPath, nullptr);
+}
+
+TEST_F(MediaTranscoderTests, TestVideoTranscode_AvcToAvc_Basic) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_VideoTranscode_AvcToAvc_Basic.MP4";
+    testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
+}
+
+TEST_F(MediaTranscoderTests, TestVideoTranscode_HevcToAvc_Basic) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/jets_hevc_1280x720_20Mbps.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_VideoTranscode_HevcToAvc_Basic.MP4";
+    testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
+}
+
+TEST_F(MediaTranscoderTests, TestVideoTranscode_HevcToAvc_Rotation) {
+    const char* srcPath =
+            "/data/local/tmp/TranscodingTestAssets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_VideoTranscode_HevcToAvc_Rotation.MP4";
+    testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
+}
+
+TEST_F(MediaTranscoderTests, TestPreserveBitrate) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_PreserveBitrate.MP4";
+    testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
+
+    // Require maximum of 10% difference in file size.
+    EXPECT_LT(getFileSizeDiffPercent(srcPath, destPath, true /* absolute*/), 10);
+}
+
+TEST_F(MediaTranscoderTests, TestCustomBitrate) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+    const char* destPath1 = "/data/local/tmp/MediaTranscoder_CustomBitrate_2Mbps.MP4";
+    const char* destPath2 = "/data/local/tmp/MediaTranscoder_CustomBitrate_8Mbps.MP4";
+    testTranscodeVideo(srcPath, destPath1, AMEDIA_MIMETYPE_VIDEO_AVC, 2 * 1000 * 1000);
+    mCallbacks = std::make_shared<TestCallbacks>();
+    testTranscodeVideo(srcPath, destPath2, AMEDIA_MIMETYPE_VIDEO_AVC, 8 * 1000 * 1000);
+
+    // The source asset is very short and heavily compressed from the beginning so don't expect the
+    // requested bitrate to be exactly matched. However 40% difference seems reasonable.
+    EXPECT_GT(getFileSizeDiffPercent(destPath1, destPath2), 40);
+}
+
+static AMediaFormat* getAVCVideoFormat(AMediaFormat* sourceFormat) {
+    AMediaFormat* format = nullptr;
+    const char* mime = nullptr;
+    AMediaFormat_getString(sourceFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+
+    if (strncmp(mime, "video/", 6) == 0) {
+        format = AMediaFormat_new();
+        AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_AVC);
+    }
+
+    return format;
+}
+
+TEST_F(MediaTranscoderTests, TestCancelAfterProgress) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_Cancel.MP4";
+
+    for (int i = 0; i < 32; ++i) {
+        EXPECT_EQ(transcodeHelper(srcPath, destPath, getAVCVideoFormat, kCancelAfterProgress),
+                  AMEDIA_OK);
+        mCallbacks = std::make_shared<TestCallbacks>();
+    }
+}
+
+TEST_F(MediaTranscoderTests, TestCancelAfterStart) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_Cancel.MP4";
+
+    for (int i = 0; i < 32; ++i) {
+        EXPECT_EQ(transcodeHelper(srcPath, destPath, getAVCVideoFormat, kCancelAfterStart),
+                  AMEDIA_OK);
+        mCallbacks = std::make_shared<TestCallbacks>();
+    }
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
new file mode 100644
index 0000000..9713e17
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
@@ -0,0 +1,309 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for PassthroughTrackTranscoder
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "PassthroughTrackTranscoderTests"
+
+#include <android-base/logging.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/NdkMediaExtractor.h>
+#include <media/PassthroughTrackTranscoder.h>
+#include <openssl/md5.h>
+
+#include <vector>
+
+#include "TrackTranscoderTestUtils.h"
+
+namespace android {
+
+class PassthroughTrackTranscoderTests : public ::testing::Test {
+public:
+    PassthroughTrackTranscoderTests() { LOG(DEBUG) << "PassthroughTrackTranscoderTests created"; }
+
+    void SetUp() override { LOG(DEBUG) << "PassthroughTrackTranscoderTests set up"; }
+
+    void initSourceAndExtractor() {
+        const char* sourcePath =
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+
+        mExtractor = AMediaExtractor_new();
+        ASSERT_NE(mExtractor, nullptr);
+
+        mSourceFd = open(sourcePath, O_RDONLY);
+        ASSERT_GT(mSourceFd, 0);
+
+        mSourceFileSize = lseek(mSourceFd, 0, SEEK_END);
+        lseek(mSourceFd, 0, SEEK_SET);
+
+        media_status_t status =
+                AMediaExtractor_setDataSourceFd(mExtractor, mSourceFd, 0, mSourceFileSize);
+        ASSERT_EQ(status, AMEDIA_OK);
+
+        const size_t trackCount = AMediaExtractor_getTrackCount(mExtractor);
+        for (size_t trackIndex = 0; trackIndex < trackCount; trackIndex++) {
+            AMediaFormat* trackFormat = AMediaExtractor_getTrackFormat(mExtractor, trackIndex);
+            ASSERT_NE(trackFormat, nullptr);
+
+            const char* mime = nullptr;
+            AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+            ASSERT_NE(mime, nullptr);
+
+            if (strncmp(mime, "audio/", 6) == 0) {
+                mTrackIndex = trackIndex;
+                AMediaExtractor_selectTrack(mExtractor, trackIndex);
+                break;
+            }
+
+            AMediaFormat_delete(trackFormat);
+        }
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "PassthroughTrackTranscoderTests tear down";
+        if (mExtractor != nullptr) {
+            AMediaExtractor_delete(mExtractor);
+            mExtractor = nullptr;
+        }
+        if (mSourceFd > 0) {
+            close(mSourceFd);
+            mSourceFd = -1;
+        }
+    }
+
+    ~PassthroughTrackTranscoderTests() {
+        LOG(DEBUG) << "PassthroughTrackTranscoderTests destroyed";
+    }
+
+    int mSourceFd = -1;
+    size_t mSourceFileSize;
+    int mTrackIndex;
+    AMediaExtractor* mExtractor = nullptr;
+};
+
+/** Helper class for comparing sample data using checksums. */
+class SampleID {
+public:
+    SampleID(const uint8_t* sampleData, ssize_t sampleSize) : mSize{sampleSize} {
+        MD5_CTX md5Ctx;
+        MD5_Init(&md5Ctx);
+        MD5_Update(&md5Ctx, sampleData, sampleSize);
+        MD5_Final(mChecksum, &md5Ctx);
+    }
+
+    bool operator==(const SampleID& rhs) const {
+        return mSize == rhs.mSize && memcmp(mChecksum, rhs.mChecksum, MD5_DIGEST_LENGTH) == 0;
+    }
+
+    uint8_t mChecksum[MD5_DIGEST_LENGTH];
+    ssize_t mSize;
+};
+
+/**
+ * Tests that the output samples of PassthroughTrackTranscoder are identical to the source samples
+ * and in correct order.
+ */
+TEST_F(PassthroughTrackTranscoderTests, SampleEquality) {
+    LOG(DEBUG) << "Testing SampleEquality";
+
+    ssize_t bufferSize = 1024;
+    auto buffer = std::make_unique<uint8_t[]>(bufferSize);
+
+    initSourceAndExtractor();
+
+    // Loop through all samples of a track and store size and checksums.
+    std::vector<SampleID> sampleChecksums;
+
+    int64_t sampleTime = AMediaExtractor_getSampleTime(mExtractor);
+    while (sampleTime != -1) {
+        if (AMediaExtractor_getSampleTrackIndex(mExtractor) == mTrackIndex) {
+            ssize_t sampleSize = AMediaExtractor_getSampleSize(mExtractor);
+            if (bufferSize < sampleSize) {
+                bufferSize = sampleSize;
+                buffer = std::make_unique<uint8_t[]>(bufferSize);
+            }
+
+            ssize_t bytesRead =
+                    AMediaExtractor_readSampleData(mExtractor, buffer.get(), bufferSize);
+            ASSERT_EQ(bytesRead, sampleSize);
+
+            SampleID sampleId{buffer.get(), sampleSize};
+            sampleChecksums.push_back(sampleId);
+        }
+
+        AMediaExtractor_advance(mExtractor);
+        sampleTime = AMediaExtractor_getSampleTime(mExtractor);
+    }
+
+    // Create and start the transcoder.
+    std::shared_ptr<TestCallback> callback = std::make_shared<TestCallback>();
+    PassthroughTrackTranscoder transcoder{callback};
+
+    std::shared_ptr<MediaSampleReader> mediaSampleReader =
+            MediaSampleReaderNDK::createFromFd(mSourceFd, 0, mSourceFileSize);
+    EXPECT_NE(mediaSampleReader, nullptr);
+
+    EXPECT_EQ(mediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
+    EXPECT_EQ(transcoder.configure(mediaSampleReader, mTrackIndex, nullptr /* destinationFormat */),
+              AMEDIA_OK);
+    ASSERT_TRUE(transcoder.start());
+
+    // Pull transcoder's output samples and compare against input checksums.
+    bool eos = false;
+    uint64_t sampleCount = 0;
+    transcoder.setSampleConsumer(
+            [&sampleCount, &sampleChecksums, &eos](const std::shared_ptr<MediaSample>& sample) {
+                ASSERT_NE(sample, nullptr);
+                EXPECT_FALSE(eos);
+
+                if (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) {
+                    eos = true;
+                } else {
+                    SampleID sampleId{sample->buffer, static_cast<ssize_t>(sample->info.size)};
+                    EXPECT_TRUE(sampleId == sampleChecksums[sampleCount]);
+                    ++sampleCount;
+                }
+            });
+
+    callback->waitUntilFinished();
+    EXPECT_EQ(sampleCount, sampleChecksums.size());
+    EXPECT_TRUE(transcoder.stop());
+}
+
+/** Class for testing PassthroughTrackTranscoder's built in buffer pool. */
+class BufferPoolTests : public ::testing::Test {
+public:
+    static constexpr int kMaxBuffers = 5;
+
+    void SetUp() override {
+        LOG(DEBUG) << "BufferPoolTests set up";
+        mBufferPool = std::make_shared<PassthroughTrackTranscoder::BufferPool>(kMaxBuffers);
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "BufferPoolTests tear down";
+        mBufferPool.reset();
+    }
+
+    std::shared_ptr<PassthroughTrackTranscoder::BufferPool> mBufferPool;
+};
+
+TEST_F(BufferPoolTests, BufferReuse) {
+    LOG(DEBUG) << "Testing BufferReuse";
+
+    uint8_t* buffer1 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer1, nullptr);
+
+    uint8_t* buffer2 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer2, nullptr);
+    EXPECT_NE(buffer2, buffer1);
+
+    mBufferPool->returnBuffer(buffer1);
+
+    uint8_t* buffer3 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer3, nullptr);
+    EXPECT_NE(buffer3, buffer2);
+    EXPECT_EQ(buffer3, buffer1);
+
+    mBufferPool->returnBuffer(buffer2);
+
+    uint8_t* buffer4 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer4, nullptr);
+    EXPECT_NE(buffer4, buffer1);
+    EXPECT_EQ(buffer4, buffer2);
+}
+
+TEST_F(BufferPoolTests, SmallestAvailableBuffer) {
+    LOG(DEBUG) << "Testing SmallestAvailableBuffer";
+
+    uint8_t* buffer1 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer1, nullptr);
+
+    uint8_t* buffer2 = mBufferPool->getBufferWithSize(15);
+    EXPECT_NE(buffer2, nullptr);
+    EXPECT_NE(buffer2, buffer1);
+
+    uint8_t* buffer3 = mBufferPool->getBufferWithSize(20);
+    EXPECT_NE(buffer3, nullptr);
+    EXPECT_NE(buffer3, buffer1);
+    EXPECT_NE(buffer3, buffer2);
+
+    mBufferPool->returnBuffer(buffer1);
+    mBufferPool->returnBuffer(buffer2);
+    mBufferPool->returnBuffer(buffer3);
+
+    uint8_t* buffer4 = mBufferPool->getBufferWithSize(11);
+    EXPECT_NE(buffer4, nullptr);
+    EXPECT_EQ(buffer4, buffer2);
+
+    uint8_t* buffer5 = mBufferPool->getBufferWithSize(11);
+    EXPECT_NE(buffer5, nullptr);
+    EXPECT_EQ(buffer5, buffer3);
+}
+
+TEST_F(BufferPoolTests, AddAfterAbort) {
+    LOG(DEBUG) << "Testing AddAfterAbort";
+
+    uint8_t* buffer1 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer1, nullptr);
+    mBufferPool->returnBuffer(buffer1);
+
+    mBufferPool->abort();
+    uint8_t* buffer2 = mBufferPool->getBufferWithSize(10);
+    EXPECT_EQ(buffer2, nullptr);
+}
+
+TEST_F(BufferPoolTests, MaximumBuffers) {
+    LOG(DEBUG) << "Testing MaximumBuffers";
+
+    static constexpr size_t kBufferBaseSize = 10;
+    std::unordered_map<uint8_t*, size_t> addressSizeMap;
+
+    // Get kMaxBuffers * 2 new buffers with increasing size.
+    // (Note: Once kMaxBuffers have been allocated, the pool will delete old buffers to accommodate
+    // new ones making the deleted buffers free to be reused by the system's heap memory allocator.
+    // So we cannot test that each new pointer is unique here.)
+    for (int i = 0; i < kMaxBuffers * 2; i++) {
+        size_t size = kBufferBaseSize + i;
+        uint8_t* buffer = mBufferPool->getBufferWithSize(size);
+        EXPECT_NE(buffer, nullptr);
+        addressSizeMap[buffer] = size;
+        mBufferPool->returnBuffer(buffer);
+    }
+
+    // Verify that the pool now contains the kMaxBuffers largest buffers allocated above and that
+    // the buffer of matching size is returned.
+    for (int i = kMaxBuffers; i < kMaxBuffers * 2; i++) {
+        size_t size = kBufferBaseSize + i;
+        uint8_t* buffer = mBufferPool->getBufferWithSize(size);
+        EXPECT_NE(buffer, nullptr);
+
+        auto it = addressSizeMap.find(buffer);
+        ASSERT_NE(it, addressSizeMap.end());
+        EXPECT_EQ(it->second, size);
+        mBufferPool->returnBuffer(buffer);
+    }
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/README.md b/media/libmediatranscoding/transcoder/tests/README.md
new file mode 100644
index 0000000..59417b0
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/README.md
@@ -0,0 +1,14 @@
+## Transcoder Testing ##
+---
+#### Transcoder unit tests :
+To run all transcoder unit tests, run the supplied script from this folder:
+
+```
+./build_and_run_all_unit_tests.sh
+```
+
+To run individual unit test modules, use atest:
+
+```
+atest MediaSampleReaderNDK
+```
diff --git a/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h b/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h
new file mode 100644
index 0000000..8d05353
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h
@@ -0,0 +1,126 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/MediaTrackTranscoder.h>
+#include <media/MediaTrackTranscoderCallback.h>
+
+#include <condition_variable>
+#include <memory>
+#include <mutex>
+
+namespace android {
+
+//
+// This file contains test utilities used by more than one track transcoder test.
+//
+
+class TrackTranscoderTestUtils {
+public:
+    static std::shared_ptr<AMediaFormat> getDefaultVideoDestinationFormat(
+            AMediaFormat* sourceFormat, bool includeBitrate = true) {
+        // Default video destination format setup.
+        static constexpr float kFrameRate = 30.0f;
+        static constexpr float kIFrameInterval = 30.0f;
+        static constexpr int32_t kBitRate = 2 * 1000 * 1000;
+        static constexpr int32_t kColorFormatSurface = 0x7f000789;
+
+        AMediaFormat* destinationFormat = AMediaFormat_new();
+        AMediaFormat_copy(destinationFormat, sourceFormat);
+        AMediaFormat_setFloat(destinationFormat, AMEDIAFORMAT_KEY_FRAME_RATE, kFrameRate);
+        AMediaFormat_setFloat(destinationFormat, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL,
+                              kIFrameInterval);
+        if (includeBitrate) {
+            AMediaFormat_setInt32(destinationFormat, AMEDIAFORMAT_KEY_BIT_RATE, kBitRate);
+        }
+        AMediaFormat_setInt32(destinationFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT,
+                              kColorFormatSurface);
+
+        return std::shared_ptr<AMediaFormat>(destinationFormat, &AMediaFormat_delete);
+    }
+};
+
+class TestCallback : public MediaTrackTranscoderCallback {
+public:
+    TestCallback() = default;
+    ~TestCallback() = default;
+
+    // MediaTrackTranscoderCallback
+    void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder __unused) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mTrackFormatAvailable = true;
+        mTrackFormatAvailableCondition.notify_all();
+    }
+
+    void onTrackFinished(const MediaTrackTranscoder* transcoder __unused) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mTranscodingFinished = true;
+        mTranscodingFinishedCondition.notify_all();
+    }
+
+    void onTrackError(const MediaTrackTranscoder* transcoder __unused, media_status_t status) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mTranscodingFinished = true;
+        mStatus = status;
+        mTranscodingFinishedCondition.notify_all();
+    }
+    // ~MediaTrackTranscoderCallback
+
+    media_status_t waitUntilFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mTranscodingFinished) {
+            mTranscodingFinishedCondition.wait(lock);
+        }
+        return mStatus;
+    }
+
+    void waitUntilTrackFormatAvailable() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mTrackFormatAvailable) {
+            mTrackFormatAvailableCondition.wait(lock);
+        }
+    }
+
+private:
+    media_status_t mStatus = AMEDIA_OK;
+    std::mutex mMutex;
+    std::condition_variable mTranscodingFinishedCondition;
+    std::condition_variable mTrackFormatAvailableCondition;
+    bool mTranscodingFinished = false;
+    bool mTrackFormatAvailable = false;
+};
+
+class OneShotSemaphore {
+public:
+    void wait() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mSignaled) {
+            mCondition.wait(lock);
+        }
+    }
+
+    void signal() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mSignaled = true;
+        mCondition.notify_all();
+    }
+
+private:
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mSignaled = false;
+};
+
+};  // namespace android
diff --git a/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
new file mode 100644
index 0000000..1b5bd13
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
@@ -0,0 +1,224 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for VideoTrackTranscoder
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "VideoTrackTranscoderTests"
+
+#include <android-base/logging.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/NdkCommon.h>
+#include <media/VideoTrackTranscoder.h>
+#include <utils/Timers.h>
+
+#include "TrackTranscoderTestUtils.h"
+
+namespace android {
+
+// TODO(b/155304421): Implement more advanced video specific tests:
+//  - Codec conversions (HEVC -> AVC).
+//  - Bitrate validation.
+//  - Output frame validation through PSNR.
+
+class VideoTrackTranscoderTests : public ::testing::Test {
+public:
+    VideoTrackTranscoderTests() { LOG(DEBUG) << "VideoTrackTranscoderTests created"; }
+
+    void SetUp() override {
+        LOG(DEBUG) << "VideoTrackTranscoderTests set up";
+        const char* sourcePath =
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+
+        const int sourceFd = open(sourcePath, O_RDONLY);
+        ASSERT_GT(sourceFd, 0);
+
+        const off_t fileSize = lseek(sourceFd, 0, SEEK_END);
+        lseek(sourceFd, 0, SEEK_SET);
+
+        mMediaSampleReader = MediaSampleReaderNDK::createFromFd(sourceFd, 0, fileSize);
+        ASSERT_NE(mMediaSampleReader, nullptr);
+        close(sourceFd);
+
+        for (size_t trackIndex = 0; trackIndex < mMediaSampleReader->getTrackCount();
+             ++trackIndex) {
+            AMediaFormat* trackFormat = mMediaSampleReader->getTrackFormat(trackIndex);
+            ASSERT_NE(trackFormat, nullptr);
+
+            const char* mime = nullptr;
+            AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+            ASSERT_NE(mime, nullptr);
+
+            if (strncmp(mime, "video/", 6) == 0) {
+                mTrackIndex = trackIndex;
+
+                mSourceFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
+                ASSERT_NE(mSourceFormat, nullptr);
+
+                mDestinationFormat =
+                        TrackTranscoderTestUtils::getDefaultVideoDestinationFormat(trackFormat);
+                ASSERT_NE(mDestinationFormat, nullptr);
+                break;
+            }
+
+            AMediaFormat_delete(trackFormat);
+        }
+
+        ASSERT_NE(mSourceFormat, nullptr);
+    }
+
+    void TearDown() override { LOG(DEBUG) << "VideoTrackTranscoderTests tear down"; }
+
+    ~VideoTrackTranscoderTests() { LOG(DEBUG) << "VideoTrackTranscoderTests destroyed"; }
+
+    std::shared_ptr<MediaSampleReader> mMediaSampleReader;
+    int mTrackIndex;
+    std::shared_ptr<AMediaFormat> mSourceFormat;
+    std::shared_ptr<AMediaFormat> mDestinationFormat;
+};
+
+TEST_F(VideoTrackTranscoderTests, SampleSoundness) {
+    LOG(DEBUG) << "Testing SampleSoundness";
+    std::shared_ptr<TestCallback> callback = std::make_shared<TestCallback>();
+    auto transcoder = VideoTrackTranscoder::create(callback);
+
+    EXPECT_EQ(mMediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
+    EXPECT_EQ(transcoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(transcoder->start());
+
+    bool eos = false;
+    uint64_t sampleCount = 0;
+    transcoder->setSampleConsumer([&sampleCount, &eos](const std::shared_ptr<MediaSample>& sample) {
+        ASSERT_NE(sample, nullptr);
+        const uint32_t flags = sample->info.flags;
+
+        if (sampleCount == 0) {
+            // Expect first sample to be a codec config.
+            EXPECT_TRUE((flags & SAMPLE_FLAG_CODEC_CONFIG) != 0);
+            EXPECT_TRUE((flags & SAMPLE_FLAG_SYNC_SAMPLE) == 0);
+            EXPECT_TRUE((flags & SAMPLE_FLAG_END_OF_STREAM) == 0);
+            EXPECT_TRUE((flags & SAMPLE_FLAG_PARTIAL_FRAME) == 0);
+        } else if (sampleCount == 1) {
+            // Expect second sample to be a sync sample.
+            EXPECT_TRUE((flags & SAMPLE_FLAG_CODEC_CONFIG) == 0);
+            EXPECT_TRUE((flags & SAMPLE_FLAG_SYNC_SAMPLE) != 0);
+            EXPECT_TRUE((flags & SAMPLE_FLAG_END_OF_STREAM) == 0);
+        }
+
+        if (!(flags & SAMPLE_FLAG_END_OF_STREAM)) {
+            // Expect a valid buffer unless it is EOS.
+            EXPECT_NE(sample->buffer, nullptr);
+            EXPECT_NE(sample->bufferId, 0xBAADF00D);
+            EXPECT_GT(sample->info.size, 0);
+        } else {
+            EXPECT_FALSE(eos);
+            eos = true;
+        }
+
+        ++sampleCount;
+    });
+
+    EXPECT_EQ(callback->waitUntilFinished(), AMEDIA_OK);
+    EXPECT_TRUE(transcoder->stop());
+}
+
+TEST_F(VideoTrackTranscoderTests, PreserveBitrate) {
+    LOG(DEBUG) << "Testing PreserveBitrate";
+    std::shared_ptr<TestCallback> callback = std::make_shared<TestCallback>();
+    std::shared_ptr<MediaTrackTranscoder> transcoder = VideoTrackTranscoder::create(callback);
+
+    auto destFormat = TrackTranscoderTestUtils::getDefaultVideoDestinationFormat(
+            mSourceFormat.get(), false /* includeBitrate*/);
+    EXPECT_NE(destFormat, nullptr);
+
+    EXPECT_EQ(mMediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
+
+    int32_t srcBitrate;
+    EXPECT_EQ(mMediaSampleReader->getEstimatedBitrateForTrack(mTrackIndex, &srcBitrate), AMEDIA_OK);
+
+    ASSERT_EQ(transcoder->configure(mMediaSampleReader, mTrackIndex, destFormat), AMEDIA_OK);
+    ASSERT_TRUE(transcoder->start());
+
+    callback->waitUntilTrackFormatAvailable();
+
+    auto outputFormat = transcoder->getOutputFormat();
+    ASSERT_NE(outputFormat, nullptr);
+
+    ASSERT_TRUE(transcoder->stop());
+
+    int32_t outBitrate;
+    EXPECT_TRUE(AMediaFormat_getInt32(outputFormat.get(), AMEDIAFORMAT_KEY_BIT_RATE, &outBitrate));
+
+    EXPECT_EQ(srcBitrate, outBitrate);
+}
+
+// VideoTrackTranscoder needs a valid destination format.
+TEST_F(VideoTrackTranscoderTests, NullDestinationFormat) {
+    LOG(DEBUG) << "Testing NullDestinationFormat";
+    std::shared_ptr<TestCallback> callback = std::make_shared<TestCallback>();
+    std::shared_ptr<AMediaFormat> nullFormat;
+
+    auto transcoder = VideoTrackTranscoder::create(callback);
+    EXPECT_EQ(transcoder->configure(mMediaSampleReader, 0 /* trackIndex */, nullFormat),
+              AMEDIA_ERROR_INVALID_PARAMETER);
+}
+
+TEST_F(VideoTrackTranscoderTests, LingeringEncoder) {
+    OneShotSemaphore semaphore;
+    auto callback = std::make_shared<TestCallback>();
+    auto transcoder = VideoTrackTranscoder::create(callback);
+
+    EXPECT_EQ(mMediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
+    EXPECT_EQ(transcoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(transcoder->start());
+
+    std::vector<std::shared_ptr<MediaSample>> samples;
+    transcoder->setSampleConsumer(
+            [&samples, &semaphore](const std::shared_ptr<MediaSample>& sample) {
+                if (samples.size() >= 4) return;
+
+                ASSERT_NE(sample, nullptr);
+                samples.push_back(sample);
+
+                if (samples.size() == 4 || sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) {
+                    semaphore.signal();
+                }
+            });
+
+    // Wait for the encoder to output samples before stopping and releasing the transcoder.
+    semaphore.wait();
+
+    EXPECT_TRUE(transcoder->stop());
+    transcoder.reset();
+
+    // Return buffers to the codec so that it can resume processing, but keep one buffer to avoid
+    // the codec being released.
+    samples.resize(1);
+
+    // Wait for async codec events.
+    std::this_thread::sleep_for(std::chrono::seconds(1));
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh b/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh
new file mode 100755
index 0000000..b848b4c
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+#
+# Run tests in this directory.
+#
+
+if [ "$SYNC_FINISHED" != true ]; then
+  if [ -z "$ANDROID_BUILD_TOP" ]; then
+      echo "Android build environment not set"
+      exit -1
+  fi
+
+  # ensure we have mm
+  . $ANDROID_BUILD_TOP/build/envsetup.sh
+
+  mm
+
+  echo "waiting for device"
+
+  adb root && adb wait-for-device remount && adb sync
+fi
+
+# Push the files onto the device.
+. $ANDROID_BUILD_TOP/frameworks/av/media/libmediatranscoding/tests/assets/push_assets.sh
+
+echo "========================================"
+
+echo "testing MediaSampleReaderNDK"
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/MediaSampleReaderNDKTests/MediaSampleReaderNDKTests
+
+echo "testing MediaSampleQueue"
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/MediaSampleQueueTests/MediaSampleQueueTests
+
+echo "testing MediaTrackTranscoder"
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/MediaTrackTranscoderTests/MediaTrackTranscoderTests
+
+echo "testing VideoTrackTranscoder"
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/VideoTrackTranscoderTests/VideoTrackTranscoderTests
+
+echo "testing PassthroughTrackTranscoder"
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/PassthroughTrackTranscoderTests/PassthroughTrackTranscoderTests
+
+echo "testing MediaSampleWriter"
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/MediaSampleWriterTests/MediaSampleWriterTests
+
+echo "testing MediaTranscoder"
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/MediaTranscoderTests/MediaTranscoderTests
diff --git a/media/libnbaio/include/media/nbaio/Pipe.h b/media/libnbaio/include/media/nbaio/Pipe.h
index 0431976..54dc08f 100644
--- a/media/libnbaio/include/media/nbaio/Pipe.h
+++ b/media/libnbaio/include/media/nbaio/Pipe.h
@@ -23,7 +23,7 @@
 namespace android {
 
 // Pipe is multi-thread safe for readers (see PipeReader), but safe for only a single writer thread.
-// It cannot UNDERRUN on write, unless we allow designation of a master reader that provides the
+// It cannot UNDERRUN on write, unless we allow designation of a primary reader that provides the
 // time-base. Readers can be added and removed dynamically, and it's OK to have no readers.
 class Pipe : public NBAIO_Sink {
 
diff --git a/media/libshmem/Android.bp b/media/libshmem/Android.bp
new file mode 100644
index 0000000..b549b5d
--- /dev/null
+++ b/media/libshmem/Android.bp
@@ -0,0 +1,54 @@
+aidl_interface {
+    name: "shared-file-region-aidl",
+    unstable: true,
+    host_supported: true,
+    vendor_available: true,
+    double_loadable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        "aidl/android/media/SharedFileRegion.aidl",
+    ],
+}
+
+cc_library {
+    name: "libshmemcompat",
+    export_include_dirs: ["include"],
+    srcs: ["ShmemCompat.cpp"],
+    shared_libs: [
+        "libbinder",
+        "libshmemutil",
+        "libutils",
+        "shared-file-region-aidl-unstable-cpp",
+    ],
+    export_shared_lib_headers: [
+        "libbinder",
+        "libutils",
+        "shared-file-region-aidl-unstable-cpp",
+    ],
+}
+
+cc_library {
+    name: "libshmemutil",
+    export_include_dirs: ["include"],
+    srcs: ["ShmemUtil.cpp"],
+    shared_libs: [
+        "shared-file-region-aidl-unstable-cpp",
+    ],
+    export_shared_lib_headers: [
+        "shared-file-region-aidl-unstable-cpp",
+    ],
+}
+
+cc_test {
+    name: "shmemTest",
+    srcs: ["ShmemTest.cpp"],
+    shared_libs: [
+        "libbinder",
+        "libcutils",
+        "libshmemcompat",
+        "libshmemutil",
+        "libutils",
+        "shared-file-region-aidl-unstable-cpp",
+    ],
+    test_suites: ["device-tests"],
+}
diff --git a/media/libshmem/OWNERS b/media/libshmem/OWNERS
new file mode 100644
index 0000000..29fa2f5
--- /dev/null
+++ b/media/libshmem/OWNERS
@@ -0,0 +1,3 @@
+ytai@google.com
+mnaganov@google.com
+elaurent@google.com
diff --git a/media/libshmem/README.md b/media/libshmem/README.md
new file mode 100644
index 0000000..c25fa7f
--- /dev/null
+++ b/media/libshmem/README.md
@@ -0,0 +1,6 @@
+# libshmem
+
+This library provides facilities for sharing memory across processes over (stable) AIDL. The main
+feature is the definition of the `android.media.SharedMemory` AIDL type, which represents a block of
+memory that can be shared between processes. In addition, a few utilities are provided to facilitate
+the use of shared memory and to integrate with legacy code that uses older facilities.
\ No newline at end of file
diff --git a/media/libshmem/ShmemCompat.cpp b/media/libshmem/ShmemCompat.cpp
new file mode 100644
index 0000000..246cb24
--- /dev/null
+++ b/media/libshmem/ShmemCompat.cpp
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "media/ShmemCompat.h"
+
+#include "binder/MemoryBase.h"
+#include "binder/MemoryHeapBase.h"
+#include "media/ShmemUtil.h"
+
+namespace android {
+namespace media {
+
+bool convertSharedFileRegionToIMemory(const SharedFileRegion& shmem,
+                                      sp<IMemory>* result) {
+    assert(result != nullptr);
+
+    if (!validateSharedFileRegion(shmem)) {
+        return false;
+    }
+
+    // Heap offset and size must be page aligned.
+    const size_t pageSize = getpagesize();
+    const size_t pageMask = ~(pageSize - 1);
+
+    // OK if this wraps.
+    const uint64_t endOffset = static_cast<uint64_t>(shmem.offset) +
+            static_cast<uint64_t>(shmem.size);
+
+    // Round down to page boundary.
+    const uint64_t heapStartOffset = shmem.offset & pageMask;
+    // Round up to page boundary.
+    const uint64_t heapEndOffset = (endOffset + pageSize - 1) & pageMask;
+    const uint64_t heapSize = heapEndOffset - heapStartOffset;
+
+    if (heapStartOffset > std::numeric_limits<size_t>::max() ||
+        heapSize > std::numeric_limits<size_t>::max()) {
+        return false;
+    }
+
+    uint32_t flags = !shmem.writeable ? IMemoryHeap::READ_ONLY : 0;
+
+    const sp<MemoryHeapBase> heap =
+            new MemoryHeapBase(shmem.fd.get(), heapSize, flags, heapStartOffset);
+    *result = sp<MemoryBase>::make(heap,
+                                   shmem.offset - heapStartOffset,
+                                   shmem.size);
+    return true;
+}
+
+bool convertIMemoryToSharedFileRegion(const sp<IMemory>& mem,
+                                      SharedFileRegion* result) {
+    assert(mem != nullptr);
+    assert(result != nullptr);
+
+    *result = SharedFileRegion();
+
+    ssize_t offset;
+    size_t size;
+
+    sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
+    if (size > 0) {
+        if (heap == nullptr) {
+            return false;
+        }
+        // Make sure the offset and size do not overflow from int64 boundaries.
+        if (size > std::numeric_limits<int64_t>::max() ||
+                offset > std::numeric_limits<int64_t>::max() ||
+                heap->getOffset() > std::numeric_limits<int64_t>::max() ||
+                static_cast<uint64_t>(heap->getOffset()) +
+                static_cast<uint64_t>(offset)
+                        > std::numeric_limits<int64_t>::max()) {
+            return false;
+        }
+
+        const int fd = fcntl(heap->getHeapID(), F_DUPFD_CLOEXEC, 0);
+        if (fd < 0) {
+            return false;
+        }
+        result->fd.reset(base::unique_fd(fd));
+        result->size = size;
+        result->offset = heap->getOffset() + offset;
+        result->writeable = (heap->getFlags() & IMemoryHeap::READ_ONLY) == 0;
+    }
+    return true;
+}
+
+bool convertNullableSharedFileRegionToIMemory(const std::optional<SharedFileRegion>& shmem,
+                                              sp<IMemory>* result) {
+    assert(result != nullptr);
+
+    if (!shmem.has_value()) {
+        result->clear();
+        return true;
+    }
+
+    return convertSharedFileRegionToIMemory(shmem.value(), result);
+}
+
+bool convertNullableIMemoryToSharedFileRegion(const sp<IMemory>& mem,
+                                              std::optional<SharedFileRegion>* result) {
+    assert(result != nullptr);
+
+    if (mem == nullptr) {
+        result->reset();
+        return true;
+    }
+
+    result->emplace();
+    return convertIMemoryToSharedFileRegion(mem, &result->value());
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libshmem/ShmemTest.cpp b/media/libshmem/ShmemTest.cpp
new file mode 100644
index 0000000..874f34c
--- /dev/null
+++ b/media/libshmem/ShmemTest.cpp
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <gtest/gtest.h>
+
+#include "binder/MemoryBase.h"
+#include "binder/MemoryHeapBase.h"
+#include "cutils/ashmem.h"
+#include "media/ShmemCompat.h"
+#include "media/ShmemUtil.h"
+
+namespace android {
+namespace media {
+namespace {
+
+// Creates a SharedFileRegion instance.
+SharedFileRegion makeSharedFileRegion(int64_t offset, int64_t size) {
+    SharedFileRegion shmem;
+    shmem.offset = offset;
+    shmem.size = size;
+    int fd = ashmem_create_region("", size + offset);
+    assert(fd >= 0);
+    shmem.fd = os::ParcelFileDescriptor(base::unique_fd(fd));
+    return shmem;
+}
+
+// Creates a SharedFileRegion instance with an invalid FD.
+SharedFileRegion makeInvalidSharedFileRegion(int64_t offset, int64_t size) {
+    SharedFileRegion shmem;
+    shmem.offset = offset;
+    shmem.size = size;
+    return shmem;
+}
+
+sp<IMemory> makeIMemory(const std::vector<uint8_t>& content, bool writeable = true) {
+    constexpr size_t kOffset = 19;
+
+    sp<MemoryHeapBase> heap = new MemoryHeapBase(content.size(),
+                                                 !writeable ? IMemoryHeap::READ_ONLY : 0);
+    sp<IMemory> result = sp<MemoryBase>::make(heap, kOffset, content.size());
+    memcpy(result->unsecurePointer(), content.data(), content.size());
+    return result;
+}
+
+TEST(ShmemTest, Validate) {
+    EXPECT_TRUE(validateSharedFileRegion(makeSharedFileRegion(0, 0)));
+    EXPECT_TRUE(validateSharedFileRegion(makeSharedFileRegion(1, 2)));
+    EXPECT_FALSE(validateSharedFileRegion(makeSharedFileRegion(-1, 2)));
+    EXPECT_FALSE(validateSharedFileRegion(makeSharedFileRegion(2, -1)));
+    EXPECT_FALSE(validateSharedFileRegion(makeInvalidSharedFileRegion(1, 2)));
+}
+
+TEST(ShmemTest, Conversion) {
+    sp<IMemory> reconstructed;
+    {
+        SharedFileRegion shmem;
+        sp<IMemory> imem = makeIMemory({6, 5, 3});
+        ASSERT_TRUE(convertIMemoryToSharedFileRegion(imem, &shmem));
+        ASSERT_EQ(3, shmem.size);
+        ASSERT_GE(shmem.fd.get(), 0);
+        ASSERT_TRUE(shmem.writeable);
+        ASSERT_TRUE(convertSharedFileRegionToIMemory(shmem, &reconstructed));
+    }
+    ASSERT_EQ(3, reconstructed->size());
+    ASSERT_EQ(reconstructed->getMemory()->getFlags() & IMemoryHeap::READ_ONLY,  0);
+    const uint8_t* p =
+            reinterpret_cast<const uint8_t*>(reconstructed->unsecurePointer());
+    EXPECT_EQ(6, p[0]);
+    EXPECT_EQ(5, p[1]);
+    EXPECT_EQ(3, p[2]);
+}
+
+TEST(ShmemTest, ConversionReadOnly) {
+    sp<IMemory> reconstructed;
+    {
+        SharedFileRegion shmem;
+        sp<IMemory> imem = makeIMemory({6, 5, 3}, false);
+        ASSERT_TRUE(convertIMemoryToSharedFileRegion(imem, &shmem));
+        ASSERT_EQ(3, shmem.size);
+        ASSERT_GE(shmem.fd.get(), 0);
+        ASSERT_FALSE(shmem.writeable);
+        ASSERT_TRUE(convertSharedFileRegionToIMemory(shmem, &reconstructed));
+    }
+    ASSERT_EQ(3, reconstructed->size());
+    ASSERT_NE(reconstructed->getMemory()->getFlags() & IMemoryHeap::READ_ONLY,  0);
+    const uint8_t* p =
+            reinterpret_cast<const uint8_t*>(reconstructed->unsecurePointer());
+    EXPECT_EQ(6, p[0]);
+    EXPECT_EQ(5, p[1]);
+    EXPECT_EQ(3, p[2]);
+}
+
+TEST(ShmemTest, NullConversion) {
+    sp<IMemory> reconstructed;
+    {
+        std::optional<SharedFileRegion> shmem;
+        sp<IMemory> imem;
+        ASSERT_TRUE(convertNullableIMemoryToSharedFileRegion(imem, &shmem));
+        ASSERT_FALSE(shmem.has_value());
+        ASSERT_TRUE(convertNullableSharedFileRegionToIMemory(shmem, &reconstructed));
+    }
+    ASSERT_EQ(nullptr, reconstructed);
+}
+
+}  // namespace
+}  // namespace media
+}  // namespace android
diff --git a/media/libshmem/ShmemUtil.cpp b/media/libshmem/ShmemUtil.cpp
new file mode 100644
index 0000000..e075346
--- /dev/null
+++ b/media/libshmem/ShmemUtil.cpp
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "media/ShmemUtil.h"
+
+namespace android {
+namespace media {
+
+bool validateSharedFileRegion(const SharedFileRegion& shmem) {
+    // FD must be valid.
+    if (shmem.fd.get() < 0) {
+        return false;
+    }
+
+    // Size and offset must be non-negative.
+    if (shmem.size < 0 || shmem.offset < 0) {
+        return false;
+    }
+
+    uint64_t size = shmem.size;
+    uint64_t offset = shmem.offset;
+
+    // Must not wrap.
+    if (offset > offset + size) {
+        return false;
+    }
+
+    return true;
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libshmem/aidl/android/media/SharedFileRegion.aidl b/media/libshmem/aidl/android/media/SharedFileRegion.aidl
new file mode 100644
index 0000000..199b647
--- /dev/null
+++ b/media/libshmem/aidl/android/media/SharedFileRegion.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * A shared file region.
+ *
+ * This type contains the required information to share a region of a file between processes over
+ * AIDL.
+ * An instance of this type represents a valid FD. For representing a null SharedFileRegion, use a
+ * @nullable SharedFileRegion.
+ * Primarily, this is intended for shared memory blocks.
+ *
+ * @hide
+ */
+parcelable SharedFileRegion {
+    /** File descriptor of the region. Must be valid. */
+    ParcelFileDescriptor fd;
+    /** Offset, in bytes within the file of the start of the region. Must be non-negative. */
+    long offset;
+    /** Size, in bytes of the memory region. Must be non-negative. */
+    long size;
+    /** Whether the region is writeable. */
+    boolean writeable;
+}
diff --git a/media/libshmem/include/media/ShmemCompat.h b/media/libshmem/include/media/ShmemCompat.h
new file mode 100644
index 0000000..ba59f25
--- /dev/null
+++ b/media/libshmem/include/media/ShmemCompat.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+// This module contains utilities for interfacing between legacy code that is using IMemory and new
+// code that is using android.os.SharedFileRegion.
+
+#include <optional>
+
+#include "android/media/SharedFileRegion.h"
+#include "binder/IMemory.h"
+#include "utils/StrongPointer.h"
+
+namespace android {
+namespace media {
+
+/**
+ * Converts a SharedFileRegion parcelable to an IMemory instance.
+ * @param shmem The SharedFileRegion instance.
+ * @param result The resulting IMemory instance. May not be null.
+ * @return true if the conversion is successful (should always succeed under normal circumstances,
+ *         failure usually means corrupt data).
+ */
+bool convertSharedFileRegionToIMemory(const SharedFileRegion& shmem,
+                                      sp<IMemory>* result);
+
+/**
+ * Converts a nullable SharedFileRegion parcelable to an IMemory instance.
+ * @param shmem The SharedFileRegion instance.
+ * @param result The resulting IMemory instance. May not be null. Pointee assigned to null,
+ *               if the input is null.
+ * @return true if the conversion is successful (should always succeed under normal circumstances,
+ *         failure usually means corrupt data).
+ */
+bool convertNullableSharedFileRegionToIMemory(const std::optional<SharedFileRegion>& shmem,
+                                              sp<IMemory>* result);
+
+/**
+ * Converts an IMemory instance to SharedFileRegion.
+ * @param mem The IMemory instance. May not be null.
+ * @param result The resulting SharedFileRegion instance.
+ * @return true if the conversion is successful (should always succeed under normal circumstances,
+ *         failure usually means corrupt data).
+ */
+bool convertIMemoryToSharedFileRegion(const sp<IMemory>& mem,
+                                      SharedFileRegion* result);
+
+/**
+ * Converts a nullable IMemory instance to a nullable SharedFileRegion.
+ * @param mem The IMemory instance. May be null.
+ * @param result The resulting SharedFileRegion instance. May not be null. Assigned to empty,
+ *               if the input is null.
+ * @return true if the conversion is successful (should always succeed under normal circumstances,
+ *         failure usually means corrupt data).
+ */
+bool convertNullableIMemoryToSharedFileRegion(const sp<IMemory>& mem,
+                                              std::optional<SharedFileRegion>* result);
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libshmem/include/media/ShmemUtil.h b/media/libshmem/include/media/ShmemUtil.h
new file mode 100644
index 0000000..3a7a5a5
--- /dev/null
+++ b/media/libshmem/include/media/ShmemUtil.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+// This module contains utilities for working with android.os.SharedFileRegion.
+
+#include "android/media/SharedFileRegion.h"
+
+namespace android {
+namespace media {
+
+/**
+ * Checks whether a SharedFileRegion instance is valid (all the fields have sane values).
+ */
+bool validateSharedFileRegion(const SharedFileRegion& shmem);
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 63ab654..44ee2ac 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -279,6 +279,13 @@
 
     void postFillThisBuffer(BufferInfo *info);
 
+    void maybePostExtraOutputMetadataBufferRequest() {
+        if (!mPendingExtraOutputMetadataBufferRequest) {
+            (new AMessage(kWhatSubmitExtraOutputMetadataBuffer, mCodec))->post();
+            mPendingExtraOutputMetadataBufferRequest = true;
+        }
+    }
+
 private:
     // Handles an OMX message. Returns true iff message was handled.
     bool onOMXMessage(const sp<AMessage> &msg);
@@ -302,6 +309,8 @@
 
     void getMoreInputDataIfPossible();
 
+    bool mPendingExtraOutputMetadataBufferRequest;
+
     DISALLOW_EVIL_CONSTRUCTORS(BaseState);
 };
 
@@ -555,6 +564,8 @@
       mShutdownInProgress(false),
       mExplicitShutdown(false),
       mIsLegacyVP9Decoder(false),
+      mIsStreamCorruptFree(false),
+      mIsLowLatency(false),
       mEncoderDelay(0),
       mEncoderPadding(0),
       mRotationDegrees(0),
@@ -887,7 +898,7 @@
 
             sp<DataConverter> converter = mConverter[portIndex];
             if (converter != NULL) {
-                // here we assume sane conversions of max 4:1, so result fits in int32
+                // here we assume conversions of max 4:1, so result fits in int32
                 if (portIndex == kPortIndexInput) {
                     conversionBufferSize = converter->sourceSize(bufSize);
                 } else {
@@ -2237,6 +2248,12 @@
             }
             err = setupG711Codec(encoder, sampleRate, numChannels);
         }
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_OPUS)) {
+        int32_t numChannels = 1, sampleRate = 48000;
+        if (msg->findInt32("channel-count", &numChannels) &&
+            msg->findInt32("sample-rate", &sampleRate)) {
+            err = setupOpusCodec(encoder, sampleRate, numChannels);
+        }
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) {
         // numChannels needs to be set to properly communicate PCM values.
         int32_t numChannels = 2, sampleRate = 44100, compressionLevel = -1;
@@ -2323,6 +2340,12 @@
         mChannelMaskPresent = false;
     }
 
+    int32_t isCorruptFree = 0;
+    if (msg->findInt32("corrupt-free", &isCorruptFree)) {
+        mIsStreamCorruptFree = isCorruptFree == 1 ? true : false;
+        ALOGV("corrupt-free=[%d]", mIsStreamCorruptFree);
+    }
+
     int32_t maxInputSize;
     if (msg->findInt32("max-input-size", &maxInputSize)) {
         err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize);
@@ -2409,6 +2432,7 @@
     if (err != OK) {
         ALOGE("decoder can not set low-latency to %d (err %d)", lowLatency, err);
     }
+    mIsLowLatency = (lowLatency && err == OK);
     return err;
 }
 
@@ -2602,15 +2626,15 @@
     unsigned int numLayers = 0;
     unsigned int numBLayers = 0;
     int tags;
-    char dummy;
+    char tmp;
     OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE pattern =
         OMX_VIDEO_AndroidTemporalLayeringPatternNone;
-    if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1
+    if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &tmp) == 1
             && numLayers > 0) {
         pattern = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC;
     } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c",
-                    &numLayers, &dummy, &numBLayers, &dummy))
-            && (tags == 1 || (tags == 3 && dummy == '+'))
+                    &numLayers, &tmp, &numBLayers, &tmp))
+            && (tags == 1 || (tags == 3 && tmp == '+'))
             && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) {
         numLayers += numBLayers;
         pattern = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid;
@@ -3110,6 +3134,26 @@
             kPortIndexInput, sampleRate, numChannels);
 }
 
+status_t ACodec::setupOpusCodec(bool encoder, int32_t sampleRate, int32_t numChannels) {
+    if (encoder) {
+        return INVALID_OPERATION;
+    }
+    OMX_AUDIO_PARAM_ANDROID_OPUSTYPE def;
+    InitOMXParams(&def);
+    def.nPortIndex = kPortIndexInput;
+    status_t err = mOMXNode->getParameter(
+            (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, &def, sizeof(def));
+    if (err != OK) {
+        ALOGE("setupOpusCodec(): Error %d getting OMX_IndexParamAudioAndroidOpus parameter", err);
+        return err;
+    }
+    def.nSampleRate = sampleRate;
+    def.nChannels = numChannels;
+    err = mOMXNode->setParameter(
+           (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, &def, sizeof(def));
+    return err;
+}
+
 status_t ACodec::setupFlacCodec(
         bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel,
         AudioEncoding encoding) {
@@ -4127,6 +4171,29 @@
         ALOGI("setupVideoEncoder succeeded");
     }
 
+    // Video should be encoded as stand straight because RTP protocol
+    // can provide rotation information only if CVO is supported.
+    // This needs to be added to support non-CVO case for video streaming scenario.
+    int32_t rotation = 0;
+    if (msg->findInt32("rotation-degrees", &rotation)) {
+        OMX_CONFIG_ROTATIONTYPE config;
+        InitOMXParams(&config);
+        config.nPortIndex = kPortIndexOutput;
+        status_t err = mOMXNode->getConfig(
+                (OMX_INDEXTYPE)OMX_IndexConfigCommonRotate, &config, sizeof(config));
+        if (err != OK) {
+            ALOGW("Failed to getConfig of OMX_IndexConfigCommonRotate(err %d)", err);
+        }
+        config.nRotation = rotation;
+        err = mOMXNode->setConfig(
+                (OMX_INDEXTYPE)OMX_IndexConfigCommonRotate, &config, sizeof(config));
+
+        ALOGD("Applying encoder-rotation=[%d] to video encoder.", config.nRotation);
+        if (err != OK) {
+            ALOGW("Failed to setConfig of OMX_IndexConfigCommonRotate(err %d)", err);
+        }
+    }
+
     return err;
 }
 
@@ -4757,15 +4824,15 @@
         unsigned int numLayers = 0;
         unsigned int numBLayers = 0;
         int tags;
-        char dummy;
-        if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1
+        char tmp;
+        if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &tmp) == 1
                 && numLayers > 0) {
             pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
             tsType = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC;
             tsLayers = numLayers;
         } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c",
-                        &numLayers, &dummy, &numBLayers, &dummy))
-                && (tags == 1 || (tags == 3 && dummy == '+'))
+                        &numLayers, &tmp, &numBLayers, &tmp))
+                && (tags == 1 || (tags == 3 && tmp == '+'))
                 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) {
             pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
             // VPX does not have a concept of B-frames, so just count all layers
@@ -5722,7 +5789,8 @@
 
 ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState)
     : AState(parentState),
-      mCodec(codec) {
+      mCodec(codec),
+      mPendingExtraOutputMetadataBufferRequest(false) {
 }
 
 ACodec::BaseState::PortMode ACodec::BaseState::getPortMode(
@@ -5766,17 +5834,19 @@
 
         case ACodec::kWhatSetSurface:
         {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
             sp<RefBase> obj;
             CHECK(msg->findObject("surface", &obj));
 
             status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get()));
 
-            sp<AMessage> response = new AMessage;
-            response->setInt32("err", err);
-            response->postReply(replyID);
+            sp<AReplyToken> replyID;
+            if (msg->senderAwaitsResponse(&replyID)) {
+                sp<AMessage> response = new AMessage;
+                response->setInt32("err", err);
+                response->postReply(replyID);
+            } else if (err != OK) {
+                mCodec->signalError(OMX_ErrorUndefined, err);
+            }
             break;
         }
 
@@ -5823,6 +5893,21 @@
             break;
         }
 
+        case kWhatSubmitExtraOutputMetadataBuffer: {
+            mPendingExtraOutputMetadataBufferRequest = false;
+            if (getPortMode(kPortIndexOutput) == RESUBMIT_BUFFERS && mCodec->mIsLowLatency) {
+                // Decoders often need more than one output buffer to be
+                // submitted before processing a single input buffer.
+                // For low latency codecs, we don't want to wait for more input
+                // to be queued to get those output buffers submitted.
+                if (mCodec->submitOutputMetadataBuffer() == OK
+                        && mCodec->mMetadataBuffersToSubmit > 0) {
+                    maybePostExtraOutputMetadataBufferRequest();
+                }
+            }
+            break;
+        }
+
         default:
             return false;
     }
@@ -5974,6 +6059,12 @@
         return false;
     }
 
+    if (mCodec->mIsStreamCorruptFree && data1 == (OMX_U32)OMX_ErrorStreamCorrupt) {
+        ALOGV("[%s] handle OMX_ErrorStreamCorrupt as a normal operation",
+                mCodec->mComponentName.c_str());
+        return true;
+    }
+
     ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1);
 
     // verify OMX component sends back an error we expect.
@@ -6081,6 +6172,13 @@
         return;
     }
 
+    int32_t cvo;
+    if (mCodec->mNativeWindow != NULL && buffer != NULL &&
+            buffer->meta()->findInt32("cvo", &cvo)) {
+        ALOGV("cvo(%d) found in buffer #%u", cvo, bufferID);
+        setNativeWindowRotation(mCodec->mNativeWindow.get(), cvo);
+    }
+
     info->mStatus = BufferInfo::OWNED_BY_US;
     info->mData = buffer;
 
@@ -6179,7 +6277,12 @@
                             (outputMode == FREE_BUFFERS ? "FREE" :
                              outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT"));
                     if (outputMode == RESUBMIT_BUFFERS) {
-                        mCodec->submitOutputMetadataBuffer();
+                        status_t err = mCodec->submitOutputMetadataBuffer();
+                        if (mCodec->mIsLowLatency
+                                && err == OK
+                                && mCodec->mMetadataBuffersToSubmit > 0) {
+                            maybePostExtraOutputMetadataBufferRequest();
+                        }
                     }
                 }
                 info->checkReadFence("onInputBufferFilled");
@@ -7325,6 +7428,9 @@
                 break;
         }
     }
+    if (mCodec->mIsLowLatency) {
+        maybePostExtraOutputMetadataBufferRequest();
+    }
 
     // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
     mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround();
@@ -7631,8 +7737,8 @@
         mInputFormat->setInt64("android._stop-time-offset-us", stopTimeOffsetUs);
     }
 
-    int32_t dummy;
-    if (params->findInt32("request-sync", &dummy)) {
+    int32_t tmp;
+    if (params->findInt32("request-sync", &tmp)) {
         status_t err = requestIDRFrame();
 
         if (err != OK) {
@@ -8207,17 +8313,38 @@
             FALLTHROUGH_INTENDED;
         }
         case kWhatResume:
-        case kWhatSetParameters:
         {
-            if (msg->what() == kWhatResume) {
-                ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str());
-            }
+            ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str());
 
             mCodec->deferMessage(msg);
             handled = true;
             break;
         }
 
+        case kWhatSetParameters:
+        {
+            sp<AMessage> params;
+            CHECK(msg->findMessage("params", &params));
+
+            sp<ABuffer> hdr10PlusInfo;
+            if (params->findBuffer("hdr10-plus-info", &hdr10PlusInfo)) {
+                if (hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
+                    (void)mCodec->setHdr10PlusInfo(hdr10PlusInfo);
+                }
+                params->removeEntryAt(params->findEntryByName("hdr10-plus-info"));
+
+                if (params->countEntries() == 0) {
+                    msg->removeEntryAt(msg->findEntryByName("params"));
+                }
+            }
+
+            if (msg->countEntries() > 0) {
+                mCodec->deferMessage(msg);
+            }
+            handled = true;
+            break;
+        }
+
         case kWhatForceStateTransition:
         {
             int32_t generation = 0;
@@ -8228,6 +8355,23 @@
             break;
         }
 
+        case kWhatSetSurface:
+        {
+            ALOGV("[%s] Deferring setSurface", mCodec->mComponentName.c_str());
+
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            mCodec->deferMessage(msg);
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", OK);
+            response->postReply(replyID);
+
+            handled = true;
+            break;
+        }
+
         case kWhatCheckIfStuck:
         {
             int32_t generation = 0;
@@ -8328,6 +8472,15 @@
             return false;
         }
 
+        case OMX_EventConfigUpdate:
+        {
+            CHECK_EQ(data1, (OMX_U32)kPortIndexOutput);
+
+            mCodec->onConfigUpdate((OMX_INDEXTYPE)data2);
+
+            return true;
+        }
+
         default:
             return BaseState::onOMXEvent(event, data1, data2);
     }
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 3bccb7b..16977d7 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -8,6 +8,12 @@
         "com.android.media.swcodec",
     ],
     min_sdk_version: "29",
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 cc_library_static {
@@ -34,6 +40,12 @@
         "libstagefright_foundation",
         "libutils"
     ],
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 cc_library_static {
@@ -58,9 +70,18 @@
     },
 
     header_libs: [
+        "libaudioclient_headers",
         "libstagefright_foundation_headers",
+        "media_ndk_headers",
     ],
-    shared_libs: ["libmediandk"],
+
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
     export_include_dirs: ["include"],
 }
 
@@ -132,8 +153,10 @@
 
     header_libs: [
         "libaudioclient_headers",
-        "libmedia_headers",
+        "libbase_headers",
+        "libmedia_datasource_headers",
         "media_ndk_headers",
+        "media_plugin_headers",
     ],
 
     cflags: [
@@ -150,6 +173,18 @@
             "signed-integer-overflow",
         ],
     },
+
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+        linux: {
+            cflags: [
+                "-DDISABLE_AUDIO_SYSTEM_OFFLOAD",
+            ],
+        }
+    },
 }
 
 cc_library_shared {
@@ -176,7 +211,7 @@
     ],
 
     static_libs: [
-        "librenderengine",
+        "librenderfright",
     ],
 
     export_include_dirs: [
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 9b3f420..bcf418a 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -46,88 +46,6 @@
 
 static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
 
-struct CameraSourceListener : public CameraListener {
-    explicit CameraSourceListener(const sp<CameraSource> &source);
-
-    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
-    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
-                          camera_frame_metadata_t *metadata);
-
-    virtual void postDataTimestamp(
-            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
-
-    virtual void postRecordingFrameHandleTimestamp(nsecs_t timestamp, native_handle_t* handle);
-
-    virtual void postRecordingFrameHandleTimestampBatch(
-                const std::vector<nsecs_t>& timestamps,
-                const std::vector<native_handle_t*>& handles);
-
-protected:
-    virtual ~CameraSourceListener();
-
-private:
-    wp<CameraSource> mSource;
-
-    CameraSourceListener(const CameraSourceListener &);
-    CameraSourceListener &operator=(const CameraSourceListener &);
-};
-
-CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
-    : mSource(source) {
-}
-
-CameraSourceListener::~CameraSourceListener() {
-}
-
-void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
-    UNUSED_UNLESS_VERBOSE(msgType);
-    UNUSED_UNLESS_VERBOSE(ext1);
-    UNUSED_UNLESS_VERBOSE(ext2);
-    ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
-}
-
-void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
-                                    camera_frame_metadata_t * /* metadata */) {
-    ALOGV("postData(%d, ptr:%p, size:%zu)",
-         msgType, dataPtr->unsecurePointer(), dataPtr->size());
-
-    sp<CameraSource> source = mSource.promote();
-    if (source.get() != NULL) {
-        source->dataCallback(msgType, dataPtr);
-    }
-}
-
-void CameraSourceListener::postDataTimestamp(
-        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
-
-    sp<CameraSource> source = mSource.promote();
-    if (source.get() != NULL) {
-        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
-    }
-}
-
-void CameraSourceListener::postRecordingFrameHandleTimestamp(nsecs_t timestamp,
-        native_handle_t* handle) {
-    sp<CameraSource> source = mSource.promote();
-    if (source.get() != nullptr) {
-        source->recordingFrameHandleCallbackTimestamp(timestamp/1000, handle);
-    }
-}
-
-void CameraSourceListener::postRecordingFrameHandleTimestampBatch(
-        const std::vector<nsecs_t>& timestamps,
-        const std::vector<native_handle_t*>& handles) {
-    sp<CameraSource> source = mSource.promote();
-    if (source.get() != nullptr) {
-        int n = timestamps.size();
-        std::vector<nsecs_t> modifiedTimestamps(n);
-        for (int i = 0; i < n; i++) {
-            modifiedTimestamps[i] = timestamps[i] / 1000;
-        }
-        source->recordingFrameHandleCallbackTimestampBatch(modifiedTimestamps, handles);
-    }
-}
-
 static int32_t getColorFormat(const char* colorFormat) {
     if (!colorFormat) {
         ALOGE("Invalid color format");
@@ -169,16 +87,6 @@
     return -1;
 }
 
-CameraSource *CameraSource::Create(const String16 &clientName) {
-    Size size;
-    size.width = -1;
-    size.height = -1;
-
-    sp<hardware::ICamera> camera;
-    return new CameraSource(camera, NULL, 0, clientName, Camera::USE_CALLING_UID,
-            Camera::USE_CALLING_PID, size, -1, NULL, false);
-}
-
 // static
 CameraSource *CameraSource::CreateFromCamera(
     const sp<hardware::ICamera>& camera,
@@ -189,12 +97,10 @@
     pid_t clientPid,
     Size videoSize,
     int32_t frameRate,
-    const sp<IGraphicBufferProducer>& surface,
-    bool storeMetaDataInVideoBuffers) {
+    const sp<IGraphicBufferProducer>& surface) {
 
     CameraSource *source = new CameraSource(camera, proxy, cameraId,
-            clientName, clientUid, clientPid, videoSize, frameRate, surface,
-            storeMetaDataInVideoBuffers);
+            clientName, clientUid, clientPid, videoSize, frameRate, surface);
     return source;
 }
 
@@ -207,8 +113,7 @@
     pid_t clientPid,
     Size videoSize,
     int32_t frameRate,
-    const sp<IGraphicBufferProducer>& surface,
-    bool storeMetaDataInVideoBuffers)
+    const sp<IGraphicBufferProducer>& surface)
     : mCameraFlags(0),
       mNumInputBuffers(0),
       mVideoFrameRate(-1),
@@ -231,8 +136,7 @@
 
     mInitCheck = init(camera, proxy, cameraId,
                     clientName, clientUid, clientPid,
-                    videoSize, frameRate,
-                    storeMetaDataInVideoBuffers);
+                    videoSize, frameRate);
     if (mInitCheck != OK) releaseCamera();
 }
 
@@ -531,15 +435,13 @@
         uid_t clientUid,
         pid_t clientPid,
         Size videoSize,
-        int32_t frameRate,
-        bool storeMetaDataInVideoBuffers) {
+        int32_t frameRate) {
 
     ALOGV("init");
     status_t err = OK;
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
     err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid, clientPid,
-                               videoSize, frameRate,
-                               storeMetaDataInVideoBuffers);
+                               videoSize, frameRate);
     IPCThreadState::self()->restoreCallingIdentity(token);
     return err;
 }
@@ -626,8 +528,7 @@
         uid_t clientUid,
         pid_t clientPid,
         Size videoSize,
-        int32_t frameRate,
-        bool storeMetaDataInVideoBuffers) {
+        int32_t frameRate) {
     ALOGV("initWithCameraAccess");
     status_t err = OK;
 
@@ -667,24 +568,12 @@
         CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
     }
 
-    // By default, store real data in video buffers.
-    mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV;
-    if (storeMetaDataInVideoBuffers) {
-        if (OK == mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE)) {
-            mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE;
-        } else if (OK == mCamera->setVideoBufferMode(
-                hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA)) {
-            mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA;
-        }
-    }
-
-    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV) {
-        err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV);
-        if (err != OK) {
-            ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV failed: "
-                    "%s (err=%d)", __FUNCTION__, strerror(-err), err);
-            return err;
-        }
+    // Use buffer queue to receive video buffers from camera
+    err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
+    if (err != OK) {
+        ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_BUFFER_QUEUE failed: "
+                "%s (err=%d)", __FUNCTION__, strerror(-err), err);
+        return err;
     }
 
     int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
@@ -724,54 +613,26 @@
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
     status_t err;
 
-    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
-        // Initialize buffer queue.
-        err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
-                (android_dataspace_t)mEncoderDataSpace,
-                mNumInputBuffers > 0 ? mNumInputBuffers : 1);
-        if (err != OK) {
-            ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
-                    strerror(-err), err);
-            return err;
-        }
-    } else {
-        if (mNumInputBuffers > 0) {
-            err = mCamera->sendCommand(
-                CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
-
-            // This could happen for CameraHAL1 clients; thus the failure is
-            // not a fatal error
-            if (err != OK) {
-                ALOGW("Failed to set video buffer count to %d due to %d",
-                    mNumInputBuffers, err);
-            }
-        }
-
-        err = mCamera->sendCommand(
-            CAMERA_CMD_SET_VIDEO_FORMAT, mEncoderFormat, mEncoderDataSpace);
-
-        // This could happen for CameraHAL1 clients; thus the failure is
-        // not a fatal error
-        if (err != OK) {
-            ALOGW("Failed to set video encoder format/dataspace to %d, %d due to %d",
-                    mEncoderFormat, mEncoderDataSpace, err);
-        }
-
-        // Create memory heap to store buffers as VideoNativeMetadata.
-        createVideoBufferMemoryHeap(sizeof(VideoNativeHandleMetadata), kDefaultVideoBufferCount);
+    // Initialize buffer queue.
+    err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
+            (android_dataspace_t)mEncoderDataSpace,
+            mNumInputBuffers > 0 ? mNumInputBuffers : 1);
+    if (err != OK) {
+        ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
+                strerror(-err), err);
+        return err;
     }
 
+    // Start data flow
     err = OK;
     if (mCameraFlags & FLAGS_HOT_CAMERA) {
         mCamera->unlock();
         mCamera.clear();
-        if ((err = mCameraRecordingProxy->startRecording(
-                new ProxyListener(this))) != OK) {
+        if ((err = mCameraRecordingProxy->startRecording()) != OK) {
             ALOGE("Failed to start recording, received error: %s (%d)",
                     strerror(-err), err);
         }
     } else {
-        mCamera->setListener(new CameraSourceListener(this));
         mCamera->startRecording();
         if (!mCamera->recordingEnabled()) {
             err = -EINVAL;
@@ -836,7 +697,6 @@
         }
     } else {
         if (mCamera != 0) {
-            mCamera->setListener(NULL);
             mCamera->stopRecording();
         }
     }
@@ -935,97 +795,31 @@
 void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
     ALOGV("releaseRecordingFrame");
 
-    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
-        // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
-        ssize_t offset;
-        size_t size;
-        sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
-        if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
-            ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
-                    heap->getHeapID(), mMemoryHeapBase->getHeapID());
-            return;
-        }
-
-        VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
-                (uint8_t*)heap->getBase() + offset);
-
-        // Find the corresponding buffer item for the native window buffer.
-        ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
-        if (index == NAME_NOT_FOUND) {
-            ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
-            return;
-        }
-
-        BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
-        mReceivedBufferItemMap.removeItemsAt(index);
-        mVideoBufferConsumer->releaseBuffer(buffer);
-        mMemoryBases.push_back(frame);
-        mMemoryBaseAvailableCond.signal();
-    } else {
-        native_handle_t* handle = nullptr;
-
-        // Check if frame contains a VideoNativeHandleMetadata.
-        if (frame->size() == sizeof(VideoNativeHandleMetadata)) {
-          // TODO: Using unsecurePointer() has some associated security pitfalls
-          //       (see declaration for details).
-          //       Either document why it is safe in this case or address the
-          //       issue (e.g. by copying).
-           VideoNativeHandleMetadata *metadata =
-                (VideoNativeHandleMetadata*)(frame->unsecurePointer());
-            if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
-                handle = metadata->pHandle;
-            }
-        }
-
-        if (handle != nullptr) {
-            ssize_t offset;
-            size_t size;
-            sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
-            if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
-                ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)",
-		     __FUNCTION__, heap->getHeapID(), mMemoryHeapBase->getHeapID());
-                return;
-            }
-            uint32_t batchSize = 0;
-            {
-                Mutex::Autolock autoLock(mBatchLock);
-                if (mInflightBatchSizes.size() > 0) {
-                    batchSize = mInflightBatchSizes[0];
-                }
-            }
-            if (batchSize == 0) { // return buffers one by one
-                // Frame contains a VideoNativeHandleMetadata. Send the handle back to camera.
-                releaseRecordingFrameHandle(handle);
-                mMemoryBases.push_back(frame);
-                mMemoryBaseAvailableCond.signal();
-            } else { // Group buffers in batch then return
-                Mutex::Autolock autoLock(mBatchLock);
-                mInflightReturnedHandles.push_back(handle);
-                mInflightReturnedMemorys.push_back(frame);
-                if (mInflightReturnedHandles.size() == batchSize) {
-                    releaseRecordingFrameHandleBatch(mInflightReturnedHandles);
-
-                    mInflightBatchSizes.pop_front();
-                    mInflightReturnedHandles.clear();
-                    for (const auto& mem : mInflightReturnedMemorys) {
-                        mMemoryBases.push_back(mem);
-                        mMemoryBaseAvailableCond.signal();
-                    }
-                    mInflightReturnedMemorys.clear();
-                }
-            }
-
-        } else if (mCameraRecordingProxy != nullptr) {
-            // mCamera is created by application. Return the frame back to camera via camera
-            // recording proxy.
-            mCameraRecordingProxy->releaseRecordingFrame(frame);
-        } else if (mCamera != nullptr) {
-            // mCamera is created by CameraSource. Return the frame directly back to camera.
-            int64_t token = IPCThreadState::self()->clearCallingIdentity();
-            mCamera->releaseRecordingFrame(frame);
-            IPCThreadState::self()->restoreCallingIdentity(token);
-        }
+    // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
+    ssize_t offset;
+    size_t size;
+    sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
+    if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
+        ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
+                heap->getHeapID(), mMemoryHeapBase->getHeapID());
+        return;
     }
+
+    VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
+        (uint8_t*)heap->getBase() + offset);
+
+    // Find the corresponding buffer item for the native window buffer.
+    ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
+    if (index == NAME_NOT_FOUND) {
+        ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
+        return;
+    }
+
+    BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
+    mReceivedBufferItemMap.removeItemsAt(index);
+    mVideoBufferConsumer->releaseBuffer(buffer);
+    mMemoryBases.push_back(frame);
+    mMemoryBaseAvailableCond.signal();
 }
 
 void CameraSource::releaseQueuedFrames() {
@@ -1181,152 +975,6 @@
     return false;
 }
 
-void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
-        int32_t msgType __unused, const sp<IMemory> &data) {
-    ALOGV("dataCallbackTimestamp: timestamp %lld us", (long long)timestampUs);
-    Mutex::Autolock autoLock(mLock);
-
-    if (shouldSkipFrameLocked(timestampUs)) {
-        releaseOneRecordingFrame(data);
-        return;
-    }
-
-    ++mNumFramesReceived;
-
-    CHECK(data != NULL && data->size() > 0);
-    mFramesReceived.push_back(data);
-    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
-    mFrameTimes.push_back(timeUs);
-    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
-        mStartTimeUs, timeUs);
-    mFrameAvailableCondition.signal();
-}
-
-void CameraSource::releaseRecordingFrameHandle(native_handle_t* handle) {
-    if (mCameraRecordingProxy != nullptr) {
-        mCameraRecordingProxy->releaseRecordingFrameHandle(handle);
-    } else if (mCamera != nullptr) {
-        int64_t token = IPCThreadState::self()->clearCallingIdentity();
-        mCamera->releaseRecordingFrameHandle(handle);
-        IPCThreadState::self()->restoreCallingIdentity(token);
-    } else {
-        native_handle_close(handle);
-        native_handle_delete(handle);
-    }
-}
-
-void CameraSource::releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
-    if (mCameraRecordingProxy != nullptr) {
-        mCameraRecordingProxy->releaseRecordingFrameHandleBatch(handles);
-    } else if (mCamera != nullptr) {
-        int64_t token = IPCThreadState::self()->clearCallingIdentity();
-        mCamera->releaseRecordingFrameHandleBatch(handles);
-        IPCThreadState::self()->restoreCallingIdentity(token);
-    } else {
-        for (auto& handle : handles) {
-            native_handle_close(handle);
-            native_handle_delete(handle);
-        }
-    }
-}
-
-void CameraSource::recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
-                native_handle_t* handle) {
-    ALOGV("%s: timestamp %lld us", __FUNCTION__, (long long)timestampUs);
-    Mutex::Autolock autoLock(mLock);
-    if (handle == nullptr) return;
-
-    if (shouldSkipFrameLocked(timestampUs)) {
-        releaseRecordingFrameHandle(handle);
-        return;
-    }
-
-    while (mMemoryBases.empty()) {
-        if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
-                TIMED_OUT) {
-            ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
-            releaseRecordingFrameHandle(handle);
-            return;
-        }
-    }
-
-    ++mNumFramesReceived;
-
-    sp<IMemory> data = *mMemoryBases.begin();
-    mMemoryBases.erase(mMemoryBases.begin());
-
-    // Wrap native handle in sp<IMemory> so it can be pushed to mFramesReceived.
-    VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(data->unsecurePointer());
-    metadata->eType = kMetadataBufferTypeNativeHandleSource;
-    metadata->pHandle = handle;
-
-    mFramesReceived.push_back(data);
-    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
-    mFrameTimes.push_back(timeUs);
-    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, mStartTimeUs, timeUs);
-    mFrameAvailableCondition.signal();
-}
-
-void CameraSource::recordingFrameHandleCallbackTimestampBatch(
-        const std::vector<int64_t>& timestampsUs,
-        const std::vector<native_handle_t*>& handles) {
-    size_t n = timestampsUs.size();
-    if (n != handles.size()) {
-        ALOGE("%s: timestampsUs(%zu) and handles(%zu) size mismatch!",
-                __FUNCTION__, timestampsUs.size(), handles.size());
-    }
-
-    Mutex::Autolock autoLock(mLock);
-    int batchSize = 0;
-    for (size_t i = 0; i < n; i++) {
-        int64_t timestampUs = timestampsUs[i];
-        native_handle_t* handle = handles[i];
-
-        ALOGV("%s: timestamp %lld us", __FUNCTION__, (long long)timestampUs);
-        if (handle == nullptr) continue;
-
-        if (shouldSkipFrameLocked(timestampUs)) {
-            releaseRecordingFrameHandle(handle);
-            continue;
-        }
-
-        while (mMemoryBases.empty()) {
-            if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
-                    TIMED_OUT) {
-                ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
-                releaseRecordingFrameHandle(handle);
-                continue;
-            }
-        }
-        ++batchSize;
-        ++mNumFramesReceived;
-        sp<IMemory> data = *mMemoryBases.begin();
-        mMemoryBases.erase(mMemoryBases.begin());
-
-        // Wrap native handle in sp<IMemory> so it can be pushed to mFramesReceived.
-        // TODO: Using unsecurePointer() has some associated security pitfalls
-        //       (see declaration for details).
-        //       Either document why it is safe in this case or address the
-        //       issue (e.g. by copying).
-        VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(data->unsecurePointer());
-        metadata->eType = kMetadataBufferTypeNativeHandleSource;
-        metadata->pHandle = handle;
-
-        mFramesReceived.push_back(data);
-        int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
-        mFrameTimes.push_back(timeUs);
-        ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, mStartTimeUs, timeUs);
-
-    }
-    if (batchSize > 0) {
-        Mutex::Autolock autoLock(mBatchLock);
-        mInflightBatchSizes.push_back(batchSize);
-    }
-    for (int i = 0; i < batchSize; i++) {
-        mFrameAvailableCondition.signal();
-    }
-}
-
 CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer,
         const sp<CameraSource>& cameraSource) {
     mConsumer = consumer;
@@ -1417,41 +1065,7 @@
 MetadataBufferType CameraSource::metaDataStoredInVideoBuffers() const {
     ALOGV("metaDataStoredInVideoBuffers");
 
-    // Output buffers will contain metadata if camera sends us buffer in metadata mode or via
-    // buffer queue.
-    switch (mVideoBufferMode) {
-        case hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA:
-            return kMetadataBufferTypeNativeHandleSource;
-        case hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE:
-            return kMetadataBufferTypeANWBuffer;
-        default:
-            return kMetadataBufferTypeInvalid;
-    }
-}
-
-CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
-    mSource = source;
-}
-
-void CameraSource::ProxyListener::dataCallbackTimestamp(
-        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
-    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
-}
-
-void CameraSource::ProxyListener::recordingFrameHandleCallbackTimestamp(nsecs_t timestamp,
-        native_handle_t* handle) {
-    mSource->recordingFrameHandleCallbackTimestamp(timestamp / 1000, handle);
-}
-
-void CameraSource::ProxyListener::recordingFrameHandleCallbackTimestampBatch(
-        const std::vector<int64_t>& timestampsUs,
-        const std::vector<native_handle_t*>& handles) {
-    int n = timestampsUs.size();
-    std::vector<nsecs_t> modifiedTimestamps(n);
-    for (int i = 0; i < n; i++) {
-        modifiedTimestamps[i] = timestampsUs[i] / 1000;
-    }
-    mSource->recordingFrameHandleCallbackTimestampBatch(modifiedTimestamps, handles);
+    return kMetadataBufferTypeANWBuffer;
 }
 
 void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index e0a6eb3..50a512f 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -45,15 +45,13 @@
         Size videoSize,
         int32_t videoFrameRate,
         const sp<IGraphicBufferProducer>& surface,
-        int64_t timeBetweenFrameCaptureUs,
-        bool storeMetaDataInVideoBuffers) {
+        int64_t timeBetweenFrameCaptureUs) {
 
     CameraSourceTimeLapse *source = new
             CameraSourceTimeLapse(camera, proxy, cameraId,
                 clientName, clientUid, clientPid,
                 videoSize, videoFrameRate, surface,
-                timeBetweenFrameCaptureUs,
-                storeMetaDataInVideoBuffers);
+                timeBetweenFrameCaptureUs);
 
     if (source != NULL) {
         if (source->initCheck() != OK) {
@@ -74,11 +72,9 @@
         Size videoSize,
         int32_t videoFrameRate,
         const sp<IGraphicBufferProducer>& surface,
-        int64_t timeBetweenFrameCaptureUs,
-        bool storeMetaDataInVideoBuffers)
+        int64_t timeBetweenFrameCaptureUs)
       : CameraSource(camera, proxy, cameraId, clientName, clientUid, clientPid,
-                videoSize, videoFrameRate, surface,
-                storeMetaDataInVideoBuffers),
+                videoSize, videoFrameRate, surface),
       mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
       mLastTimeLapseFrameRealTimestampUs(0),
       mSkipCurrentFrame(false) {
@@ -173,12 +169,6 @@
     ALOGV("signalBufferReturned");
     Mutex::Autolock autoLock(mQuickStopLock);
     if (mQuickStop && (buffer == mLastReadBufferCopy)) {
-        if (metaDataStoredInVideoBuffers() == kMetadataBufferTypeNativeHandleSource) {
-            native_handle_t* handle = (
-                (VideoNativeHandleMetadata*)(mLastReadBufferCopy->data()))->pHandle;
-            native_handle_close(handle);
-            native_handle_delete(handle);
-        }
         buffer->setObserver(NULL);
         buffer->release();
         mLastReadBufferCopy = NULL;
@@ -191,8 +181,7 @@
 void createMediaBufferCopy(
         const MediaBufferBase& sourceBuffer,
         int64_t frameTime,
-        MediaBufferBase **newBuffer,
-        int32_t videoBufferMode) {
+        MediaBufferBase **newBuffer) {
 
     ALOGV("createMediaBufferCopy");
     size_t sourceSize = sourceBuffer.size();
@@ -203,19 +192,13 @@
 
     (*newBuffer)->meta_data().setInt64(kKeyTime, frameTime);
 
-    if (videoBufferMode == kMetadataBufferTypeNativeHandleSource) {
-        ((VideoNativeHandleMetadata*)((*newBuffer)->data()))->pHandle =
-            native_handle_clone(
-                ((VideoNativeHandleMetadata*)(sourceBuffer.data()))->pHandle);
-    }
 }
 
 void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBufferBase& sourceBuffer) {
     ALOGV("fillLastReadBufferCopy");
     int64_t frameTime;
     CHECK(sourceBuffer.meta_data().findInt64(kKeyTime, &frameTime));
-    createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy,
-        metaDataStoredInVideoBuffers());
+    createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
     mLastReadBufferCopy->add_ref();
     mLastReadBufferCopy->setObserver(this);
 }
@@ -240,19 +223,6 @@
     }
 }
 
-sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(
-        const sp<IMemory> &source_data) {
-
-    ALOGV("createIMemoryCopy");
-    size_t source_size = source_data->size();
-    void* source_pointer = source_data->unsecurePointer();
-
-    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
-    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
-    memcpy(newMemory->unsecurePointer(), source_pointer, source_size);
-    return newMemory;
-}
-
 bool CameraSourceTimeLapse::skipCurrentFrame(int64_t /* timestampUs */) {
     ALOGV("skipCurrentFrame");
     if (mSkipCurrentFrame) {
@@ -318,31 +288,6 @@
     return false;
 }
 
-void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
-            const sp<IMemory> &data) {
-    ALOGV("dataCallbackTimestamp");
-    mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
-    CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
-}
-
-void CameraSourceTimeLapse::recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
-            native_handle_t* handle) {
-    ALOGV("recordingFrameHandleCallbackTimestamp");
-    mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
-    CameraSource::recordingFrameHandleCallbackTimestamp(timestampUs, handle);
-}
-
-void CameraSourceTimeLapse::recordingFrameHandleCallbackTimestampBatch(
-        const std::vector<int64_t>& timestampsUs,
-        const std::vector<native_handle_t*>& handles) {
-    ALOGV("recordingFrameHandleCallbackTimestampBatch");
-    int n = timestampsUs.size();
-    for (int i = 0; i < n; i++) {
-        // Don't do batching for CameraSourceTimeLapse for now
-        recordingFrameHandleCallbackTimestamp(timestampsUs[i], handles[i]);
-    }
-}
-
 void CameraSourceTimeLapse::processBufferQueueFrame(BufferItem& buffer) {
     ALOGV("processBufferQueueFrame");
     int64_t timestampUs = buffer.mTimestamp / 1000;
diff --git a/media/libstagefright/FrameCaptureProcessor.cpp b/media/libstagefright/FrameCaptureProcessor.cpp
index 96c1195..8cd7f82 100644
--- a/media/libstagefright/FrameCaptureProcessor.cpp
+++ b/media/libstagefright/FrameCaptureProcessor.cpp
@@ -164,14 +164,15 @@
 
     if (err != OK) {
         ALOGE("drawLayers returned err %d", err);
-        return err;
+    } else {
+        err = fence->wait(500);
+        if (err != OK) {
+            ALOGW("wait for fence returned err %d", err);
+            err = OK;
+        }
     }
-
-    err = fence->wait(500);
-    if (err != OK) {
-        ALOGW("wait for fence returned err %d", err);
-    }
-    return OK;
+    mRE->cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL);
+    return err;
 }
 
 void FrameCaptureProcessor::onMessageReceived(const sp<AMessage> &msg) {
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 734f5bb..e783578 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -19,6 +19,7 @@
 
 #include "include/FrameDecoder.h"
 #include "include/FrameCaptureLayer.h"
+#include "include/HevcUtils.h"
 #include <binder/MemoryBase.h>
 #include <binder/MemoryHeapBase.h>
 #include <gui/Surface.h>
@@ -120,15 +121,23 @@
             false /*allocRotated*/, true /*metaOnly*/);
 }
 
+bool isAvif(const sp<MetaData> &trackMeta) {
+    const char *mime;
+    return trackMeta->findCString(kKeyMIMEType, &mime)
+        && (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)
+            || !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF));
+}
+
 bool findThumbnailInfo(
         const sp<MetaData> &trackMeta, int32_t *width, int32_t *height,
         uint32_t *type = NULL, const void **data = NULL, size_t *size = NULL) {
     uint32_t dummyType;
     const void *dummyData;
     size_t dummySize;
+    int codecConfigKey = isAvif(trackMeta) ? kKeyThumbnailAV1C : kKeyThumbnailHVCC;
     return trackMeta->findInt32(kKeyThumbnailWidth, width)
         && trackMeta->findInt32(kKeyThumbnailHeight, height)
-        && trackMeta->findData(kKeyThumbnailHVCC,
+        && trackMeta->findData(codecConfigKey,
                 type ?: &dummyType, data ?: &dummyData, size ?: &dummySize);
 }
 
@@ -456,7 +465,8 @@
         const sp<IMediaSource> &source)
     : FrameDecoder(componentName, trackMeta, source),
       mFrame(NULL),
-      mIsAvcOrHevc(false),
+      mIsAvc(false),
+      mIsHevc(false),
       mSeekMode(MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC),
       mTargetTimeUs(-1LL),
       mDefaultSampleDurationUs(0) {
@@ -479,8 +489,8 @@
         return NULL;
     }
 
-    mIsAvcOrHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)
-            || !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
+    mIsAvc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
+    mIsHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
 
     if (frameTimeUs < 0) {
         int64_t thumbNailTime = -1ll;
@@ -543,8 +553,10 @@
         ALOGV("Seeking closest: targetTimeUs=%lld", (long long)mTargetTimeUs);
     }
 
-    if (mIsAvcOrHevc && !isSeekingClosest
-            && IsIDR(codecBuffer->data(), codecBuffer->size())) {
+    if (!isSeekingClosest
+            && ((mIsAvc && IsIDR(codecBuffer->data(), codecBuffer->size()))
+            || (mIsHevc && IsIDR(
+            codecBuffer->data(), codecBuffer->size())))) {
         // Only need to decode one IDR frame, unless we're seeking with CLOSEST
         // option, in which case we need to actually decode to targetTimeUs.
         *flags |= MediaCodec::BUFFER_FLAG_EOS;
@@ -748,7 +760,10 @@
         overrideMeta->remove(kKeyDisplayHeight);
         overrideMeta->setInt32(kKeyWidth, mWidth);
         overrideMeta->setInt32(kKeyHeight, mHeight);
-        overrideMeta->setData(kKeyHVCC, type, data, size);
+        // The AV1 codec configuration data is passed via CSD0 to the AV1
+        // decoder.
+        const int codecConfigKey = isAvif(trackMeta()) ? kKeyOpaqueCSD0 : kKeyHVCC;
+        overrideMeta->setData(codecConfigKey, type, data, size);
         options->setSeekTo(-1);
     } else {
         CHECK(trackMeta()->findInt32(kKeyWidth, &mWidth));
diff --git a/media/libstagefright/HevcUtils.cpp b/media/libstagefright/HevcUtils.cpp
index b347453..5f9c20e 100644
--- a/media/libstagefright/HevcUtils.cpp
+++ b/media/libstagefright/HevcUtils.cpp
@@ -30,9 +30,14 @@
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/Utils.h>
 
+#define UNUSED_PARAM __attribute__((unused))
+
 namespace android {
 
-static const uint8_t kHevcNalUnitTypes[5] = {
+static const uint8_t kHevcNalUnitTypes[8] = {
+    kHevcNalUnitTypeCodedSliceIdr,
+    kHevcNalUnitTypeCodedSliceIdrNoLP,
+    kHevcNalUnitTypeCodedSliceCra,
     kHevcNalUnitTypeVps,
     kHevcNalUnitTypeSps,
     kHevcNalUnitTypePps,
@@ -375,8 +380,56 @@
     return reader.overRead() ? ERROR_MALFORMED : OK;
 }
 
+void HevcParameterSets::FindHEVCDimensions(const sp<ABuffer> &SpsBuffer, int32_t *width, int32_t *height)
+{
+    ALOGD("FindHEVCDimensions");
+    // See Rec. ITU-T H.265 v3 (04/2015) Chapter 7.3.2.2 for reference
+    ABitReader reader(SpsBuffer->data() + 1, SpsBuffer->size() - 1);
+    // Skip sps_video_parameter_set_id
+    reader.skipBits(4);
+    uint8_t maxSubLayersMinus1 = reader.getBitsWithFallback(3, 0);
+    // Skip sps_temporal_id_nesting_flag;
+    reader.skipBits(1);
+    // Skip general profile
+    reader.skipBits(96);
+    if (maxSubLayersMinus1 > 0) {
+        bool subLayerProfilePresentFlag[8];
+        bool subLayerLevelPresentFlag[8];
+        for (int i = 0; i < maxSubLayersMinus1; ++i) {
+            subLayerProfilePresentFlag[i] = reader.getBitsWithFallback(1, 0);
+            subLayerLevelPresentFlag[i] = reader.getBitsWithFallback(1, 0);
+        }
+        // Skip reserved
+        reader.skipBits(2 * (8 - maxSubLayersMinus1));
+        for (int i = 0; i < maxSubLayersMinus1; ++i) {
+            if (subLayerProfilePresentFlag[i]) {
+                // Skip profile
+                reader.skipBits(88);
+            }
+            if (subLayerLevelPresentFlag[i]) {
+                // Skip sub_layer_level_idc[i]
+                reader.skipBits(8);
+            }
+        }
+    }
+    // Skip sps_seq_parameter_set_id
+    skipUE(&reader);
+    uint8_t chromaFormatIdc = parseUEWithFallback(&reader, 0);
+    if (chromaFormatIdc == 3) {
+        // Skip separate_colour_plane_flag
+        reader.skipBits(1);
+    }
+    skipUE(&reader);
+    skipUE(&reader);
+
+    // pic_width_in_luma_samples
+    *width = parseUEWithFallback(&reader, 0);
+    // pic_height_in_luma_samples
+    *height = parseUEWithFallback(&reader, 0);
+}
+
 status_t HevcParameterSets::parsePps(
-        const uint8_t* data __unused, size_t size __unused) {
+        const uint8_t* data UNUSED_PARAM, size_t size UNUSED_PARAM) {
     return OK;
 }
 
@@ -489,4 +542,26 @@
     return OK;
 }
 
+bool HevcParameterSets::IsHevcIDR(const uint8_t *data, size_t size) {
+    bool foundIDR = false;
+    const uint8_t *nalStart;
+    size_t nalSize;
+    while (!foundIDR && getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
+        if (nalSize == 0) {
+            ALOGE("Encountered zero-length HEVC NAL");
+            return false;
+        }
+
+        uint8_t nalType = (nalStart[0] & 0x7E) >> 1;
+        switch(nalType) {
+            case kHevcNalUnitTypeCodedSliceIdr:
+            case kHevcNalUnitTypeCodedSliceIdrNoLP:
+            case kHevcNalUnitTypeCodedSliceCra:
+                foundIDR = true;
+                break;
+        }
+    }
+
+    return foundIDR;
+}
 }  // namespace android
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 8f7d4bf..0af97df 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -542,6 +542,7 @@
     mNumGrids = 0;
     mNextItemId = kItemIdBase;
     mHasRefs = false;
+    mResetStatus = OK;
     mPreAllocFirstTime = true;
     mPrevAllTracksTotalMetaDataSizeEstimate = 0;
 
@@ -1027,6 +1028,11 @@
     return OK;
 }
 
+status_t MPEG4Writer::stop() {
+    // If reset was in progress, wait for it to complete.
+    return reset(true, true);
+}
+
 status_t MPEG4Writer::pause() {
     ALOGW("MPEG4Writer: pause is not supported");
     return ERROR_UNSUPPORTED;
@@ -1159,8 +1165,12 @@
     return err;
 }
 
-void MPEG4Writer::finishCurrentSession() {
-    reset(false /* stopSource */);
+status_t MPEG4Writer::finishCurrentSession() {
+    ALOGV("finishCurrentSession");
+    /* Don't wait if reset is in progress already, that avoids deadlock
+     * as finishCurrentSession() is called from control looper thread.
+     */
+    return reset(false, false);
 }
 
 status_t MPEG4Writer::switchFd() {
@@ -1182,11 +1192,32 @@
     return err;
 }
 
-status_t MPEG4Writer::reset(bool stopSource) {
+status_t MPEG4Writer::reset(bool stopSource, bool waitForAnyPreviousCallToComplete) {
     ALOGD("reset()");
-    std::lock_guard<std::mutex> l(mResetMutex);
+    std::unique_lock<std::mutex> lk(mResetMutex, std::defer_lock);
+    if (waitForAnyPreviousCallToComplete) {
+        /* stop=>reset from client needs the return value of reset call, hence wait here
+         * if a reset was in process already.
+         */
+        lk.lock();
+    } else if (!lk.try_lock()) {
+        /* Internal reset from control looper thread shouldn't wait for any reset in
+         * process already.
+         */
+        return INVALID_OPERATION;
+    }
+
+    if (mResetStatus != OK) {
+        /* Don't have to proceed if reset has finished with an error before.
+         * If there was no error before, proceeding reset would be harmless, as the
+         * the call would return from the mInitCheck condition below.
+         */
+        return mResetStatus;
+    }
+
     if (mInitCheck != OK) {
-        return OK;
+        mResetStatus = OK;
+        return mResetStatus;
     } else {
         if (!mWriterThreadStarted ||
             !mStarted) {
@@ -1198,7 +1229,8 @@
             if (writerErr != OK) {
                 retErr = writerErr;
             }
-            return retErr;
+            mResetStatus = retErr;
+            return mResetStatus;
         }
     }
 
@@ -1245,7 +1277,8 @@
     if (err != OK && err != ERROR_MALFORMED) {
         // Ignoring release() return value as there was an "err" already.
         release();
-        return err;
+        mResetStatus = err;
+        return mResetStatus;
     }
 
     // Fix up the size of the 'mdat' chunk.
@@ -1303,7 +1336,8 @@
     if (err == OK) {
         err = errRelease;
     }
-    return err;
+    mResetStatus = err;
+    return mResetStatus;
 }
 
 /*
@@ -2454,31 +2488,27 @@
             int fd = mNextFd;
             mNextFd = -1;
             mLock.unlock();
-            finishCurrentSession();
-            initInternal(fd, false /*isFirstSession*/);
-            start(mStartMeta.get());
-            mSwitchPending = false;
-            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED, 0);
+            if (finishCurrentSession() == OK) {
+                initInternal(fd, false /*isFirstSession*/);
+                status_t status = start(mStartMeta.get());
+                mSwitchPending = false;
+                if (status == OK)  {
+                    notify(MEDIA_RECORDER_EVENT_INFO,
+                           MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED, 0);
+                }
+            }
             break;
         }
-        // ::write() or lseek64() wasn't a success, file could be malformed
+        /* ::write() or lseek64() wasn't a success, file could be malformed.
+         * Or fallocate() failed. reset() and notify client on both the cases.
+         */
+        case kWhatFallocateError: // fallthrough
         case kWhatIOError: {
-            ALOGE("kWhatIOError");
             int32_t err;
             CHECK(msg->findInt32("err", &err));
-            // Stop tracks' threads and main writer thread.
-            stop();
-            notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_RECORDER_ERROR_UNKNOWN, err);
-            break;
-        }
-        // fallocate() failed, hence stop() and notify app.
-        case kWhatFallocateError: {
-            ALOGE("kWhatFallocateError");
-            int32_t err;
-            CHECK(msg->findInt32("err", &err));
-            // Stop tracks' threads and main writer thread.
-            stop();
-            //TODO: introduce a suitable MEDIA_RECORDER_ERROR_* instead MEDIA_RECORDER_ERROR_UNKNOWN?
+            // If reset already in process, don't wait for it complete to avoid deadlock.
+            reset(true, false);
+            //TODO: new MEDIA_RECORDER_ERROR_**** instead MEDIA_RECORDER_ERROR_UNKNOWN ?
             notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_RECORDER_ERROR_UNKNOWN, err);
             break;
         }
@@ -2486,7 +2516,7 @@
          * Responding with other options could be added later if required.
          */
         case kWhatNoIOErrorSoFar: {
-            ALOGD("kWhatNoIOErrorSoFar");
+            ALOGV("kWhatNoIOErrorSoFar");
             sp<AMessage> response = new AMessage;
             response->setInt32("err", OK);
             sp<AReplyToken> replyID;
@@ -2774,7 +2804,7 @@
         // even if the file is well-formed and the primary picture is correct.
 
         // Reserve item ids for samples + grid
-        size_t numItemsToReserve = mNumTiles + (mNumTiles > 1);
+        size_t numItemsToReserve = mNumTiles + (mNumTiles > 0);
         status_t err = mOwner->reserveItemId_l(numItemsToReserve, &mItemIdBase);
         if (err != OK) {
             return err;
@@ -4715,10 +4745,18 @@
 
 // This is useful if the pixel is not square
 void MPEG4Writer::Track::writePaspBox() {
-    mOwner->beginBox("pasp");
-    mOwner->writeInt32(1 << 16);  // hspacing
-    mOwner->writeInt32(1 << 16);  // vspacing
-    mOwner->endBox();  // pasp
+    // Do not write 'pasp' box unless the track format specifies it.
+    // According to ISO/IEC 14496-12 (ISO base media file format), 'pasp' box
+    // is optional. If present, it overrides the SAR from the video CSD. Only
+    // set it if the track format specifically requests that.
+    int32_t hSpacing, vSpacing;
+    if (mMeta->findInt32(kKeySARWidth, &hSpacing) && (hSpacing > 0)
+            && mMeta->findInt32(kKeySARHeight, &vSpacing) && (vSpacing > 0)) {
+        mOwner->beginBox("pasp");
+        mOwner->writeInt32(hSpacing);  // hspacing
+        mOwner->writeInt32(vSpacing);  // vspacing
+        mOwner->endBox();  // pasp
+    }
 }
 
 int64_t MPEG4Writer::Track::getStartTimeOffsetTimeUs() const {
diff --git a/media/libstagefright/MediaAdapter.cpp b/media/libstagefright/MediaAdapter.cpp
index f1b6e8c..5a2a910 100644
--- a/media/libstagefright/MediaAdapter.cpp
+++ b/media/libstagefright/MediaAdapter.cpp
@@ -114,6 +114,13 @@
         return -EINVAL;
     }
 
+    /* As mAdapterLock is unlocked while waiting for signalBufferReturned,
+     * a new buffer for the same track could be pushed from another thread
+     * in the client process, mBufferGatingMutex will help to hold that
+     * until the previous buffer is processed.
+     */
+    std::unique_lock<std::mutex> lk(mBufferGatingMutex);
+
     Mutex::Autolock autoLock(mAdapterLock);
     if (!mStarted) {
         ALOGE("pushBuffer called before start");
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 5d17f97..da8f024 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -59,6 +59,7 @@
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MediaFilter.h>
@@ -241,6 +242,9 @@
     }
 
     AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
+
+    // Kill clients pending removal.
+    mService->reclaimResourcesFromClientsPendingRemoval(mPid);
 }
 
 //static
@@ -314,7 +318,7 @@
 
 class MediaCodec::ReleaseSurface {
 public:
-    ReleaseSurface() {
+    explicit ReleaseSurface(uint64_t usage) {
         BufferQueue::createBufferQueue(&mProducer, &mConsumer);
         mSurface = new Surface(mProducer, false /* controlledByApp */);
         struct ConsumerListener : public BnConsumerListener {
@@ -325,6 +329,7 @@
         sp<ConsumerListener> listener{new ConsumerListener};
         mConsumer->consumerConnect(listener, false);
         mConsumer->setConsumerName(String8{"MediaCodec.release"});
+        mConsumer->setConsumerUsageBits(usage);
     }
 
     const sp<Surface> &getSurface() {
@@ -614,7 +619,10 @@
     return new PersistentSurface(bufferProducer, bufferSource);
 }
 
-MediaCodec::MediaCodec(const sp<ALooper> &looper, pid_t pid, uid_t uid)
+MediaCodec::MediaCodec(
+        const sp<ALooper> &looper, pid_t pid, uid_t uid,
+        std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
+        std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo)
     : mState(UNINITIALIZED),
       mReleasedByResourceManager(false),
       mLooper(looper),
@@ -631,6 +639,9 @@
       mDequeueInputReplyID(0),
       mDequeueOutputTimeoutGeneration(0),
       mDequeueOutputReplyID(0),
+      mTunneledInputWidth(0),
+      mTunneledInputHeight(0),
+      mTunneled(false),
       mHaveInputSurface(false),
       mHavePendingInputBuffers(false),
       mCpuBoostRequested(false),
@@ -639,7 +650,9 @@
       mNumLowLatencyDisables(0),
       mIsLowLatencyModeOn(false),
       mIndexOfFirstFrameWhenLowLatencyOn(-1),
-      mInputBufferCounter(0) {
+      mInputBufferCounter(0),
+      mGetCodecBase(getCodecBase),
+      mGetCodecInfo(getCodecInfo) {
     if (uid == kNoUid) {
         mUid = AIBinder_getCallingUid();
     } else {
@@ -647,6 +660,33 @@
     }
     mResourceManagerProxy = new ResourceManagerServiceProxy(pid, mUid,
             ::ndk::SharedRefBase::make<ResourceManagerClient>(this));
+    if (!mGetCodecBase) {
+        mGetCodecBase = [](const AString &name, const char *owner) {
+            return GetCodecBase(name, owner);
+        };
+    }
+    if (!mGetCodecInfo) {
+        mGetCodecInfo = [](const AString &name, sp<MediaCodecInfo> *info) -> status_t {
+            *info = nullptr;
+            const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
+            if (!mcl) {
+                return NO_INIT;  // if called from Java should raise IOException
+            }
+            AString tmp = name;
+            if (tmp.endsWith(".secure")) {
+                tmp.erase(tmp.size() - 7, 7);
+            }
+            for (const AString &codecName : { name, tmp }) {
+                ssize_t codecIdx = mcl->findCodecByName(codecName.c_str());
+                if (codecIdx < 0) {
+                    continue;
+                }
+                *info = mcl->getCodecInfo(codecIdx);
+                return OK;
+            }
+            return NAME_NOT_FOUND;
+        };
+    }
 
     initMediametrics();
 }
@@ -1008,6 +1048,12 @@
     return err;
 }
 
+void MediaCodec::PostReplyWithError(const sp<AMessage> &msg, int32_t err) {
+    sp<AReplyToken> replyID;
+    CHECK(msg->senderAwaitsResponse(&replyID));
+    PostReplyWithError(replyID, err);
+}
+
 void MediaCodec::PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err) {
     int32_t finalErr = err;
     if (mReleasedByResourceManager) {
@@ -1084,40 +1130,30 @@
     bool secureCodec = false;
     const char *owner = "";
     if (!name.startsWith("android.filter.")) {
-        AString tmp = name;
-        if (tmp.endsWith(".secure")) {
-            secureCodec = true;
-            tmp.erase(tmp.size() - 7, 7);
-        }
-        const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
-        if (mcl == NULL) {
+        status_t err = mGetCodecInfo(name, &mCodecInfo);
+        if (err != OK) {
             mCodec = NULL;  // remove the codec.
-            return NO_INIT; // if called from Java should raise IOException
-        }
-        for (const AString &codecName : { name, tmp }) {
-            ssize_t codecIdx = mcl->findCodecByName(codecName.c_str());
-            if (codecIdx < 0) {
-                continue;
-            }
-            mCodecInfo = mcl->getCodecInfo(codecIdx);
-            Vector<AString> mediaTypes;
-            mCodecInfo->getSupportedMediaTypes(&mediaTypes);
-            for (size_t i = 0; i < mediaTypes.size(); i++) {
-                if (mediaTypes[i].startsWith("video/")) {
-                    mIsVideo = true;
-                    break;
-                }
-            }
-            break;
+            return err;
         }
         if (mCodecInfo == nullptr) {
+            ALOGE("Getting codec info with name '%s' failed", name.c_str());
             return NAME_NOT_FOUND;
         }
+        secureCodec = name.endsWith(".secure");
+        Vector<AString> mediaTypes;
+        mCodecInfo->getSupportedMediaTypes(&mediaTypes);
+        for (size_t i = 0; i < mediaTypes.size(); ++i) {
+            if (mediaTypes[i].startsWith("video/")) {
+                mIsVideo = true;
+                break;
+            }
+        }
         owner = mCodecInfo->getOwnerName();
     }
 
-    mCodec = GetCodecBase(name, owner);
+    mCodec = mGetCodecBase(name, owner);
     if (mCodec == NULL) {
+        ALOGE("Getting codec base with name '%s' (owner='%s') failed", name.c_str(), owner);
         return NAME_NOT_FOUND;
     }
 
@@ -1511,7 +1547,6 @@
     mStickyError = OK;
 
     // reset state not reset by setState(UNINITIALIZED)
-    mReplyID = 0;
     mDequeueInputReplyID = 0;
     mDequeueOutputReplyID = 0;
     mDequeueInputTimeoutGeneration = 0;
@@ -2043,20 +2078,25 @@
     } else if (mFlags & kFlagOutputBuffersChanged) {
         PostReplyWithError(replyID, INFO_OUTPUT_BUFFERS_CHANGED);
         mFlags &= ~kFlagOutputBuffersChanged;
-    } else if (mFlags & kFlagOutputFormatChanged) {
-        PostReplyWithError(replyID, INFO_FORMAT_CHANGED);
-        mFlags &= ~kFlagOutputFormatChanged;
     } else {
         sp<AMessage> response = new AMessage;
-        ssize_t index = dequeuePortBuffer(kPortIndexOutput);
-
-        if (index < 0) {
-            CHECK_EQ(index, -EAGAIN);
+        BufferInfo *info = peekNextPortBuffer(kPortIndexOutput);
+        if (!info) {
             return false;
         }
 
-        const sp<MediaCodecBuffer> &buffer =
-            mPortBuffers[kPortIndexOutput][index].mData;
+        // In synchronous mode, output format change should be handled
+        // at dequeue to put the event at the correct order.
+
+        const sp<MediaCodecBuffer> &buffer = info->mData;
+        handleOutputFormatChangeIfNeeded(buffer);
+        if (mFlags & kFlagOutputFormatChanged) {
+            PostReplyWithError(replyID, INFO_FORMAT_CHANGED);
+            mFlags &= ~kFlagOutputFormatChanged;
+            return true;
+        }
+
+        ssize_t index = dequeuePortBuffer(kPortIndexOutput);
 
         response->setSize("index", index);
         response->setSize("offset", buffer->offset());
@@ -2093,14 +2133,16 @@
                     CHECK(msg->findInt32("err", &err));
                     CHECK(msg->findInt32("actionCode", &actionCode));
 
-                    ALOGE("Codec reported err %#x, actionCode %d, while in state %d",
-                            err, actionCode, mState);
+                    ALOGE("Codec reported err %#x, actionCode %d, while in state %d/%s",
+                            err, actionCode, mState, stateString(mState).c_str());
                     if (err == DEAD_OBJECT) {
                         mFlags |= kFlagSawMediaServerDie;
                         mFlags &= ~kFlagIsComponentAllocated;
                     }
 
                     bool sendErrorResponse = true;
+                    std::string origin{"kWhatError:"};
+                    origin += stateString(mState);
 
                     switch (mState) {
                         case INITIALIZING:
@@ -2152,14 +2194,14 @@
                                 // be a shutdown complete notification after
                                 // all.
 
-                                // note that we're directly going from
+                                // note that we may be directly going from
                                 // STOPPING->UNINITIALIZED, instead of the
                                 // usual STOPPING->INITIALIZED state.
                                 setState(UNINITIALIZED);
                                 if (mState == RELEASING) {
                                     mComponentName.clear();
                                 }
-                                (new AMessage)->postReply(mReplyID);
+                                postPendingRepliesAndDeferredMessages(origin + ":dead");
                                 sendErrorResponse = false;
                             }
                             break;
@@ -2185,7 +2227,7 @@
                         case FLUSHED:
                         case STARTED:
                         {
-                            sendErrorResponse = false;
+                            sendErrorResponse = (mReplyID != nullptr);
 
                             setStickyError(err);
                             postActivityNotificationIfPossible();
@@ -2215,7 +2257,7 @@
 
                         default:
                         {
-                            sendErrorResponse = false;
+                            sendErrorResponse = (mReplyID != nullptr);
 
                             setStickyError(err);
                             postActivityNotificationIfPossible();
@@ -2242,7 +2284,15 @@
                     }
 
                     if (sendErrorResponse) {
-                        PostReplyWithError(mReplyID, err);
+                        // TRICKY: replicate PostReplyWithError logic for
+                        //         err code override
+                        int32_t finalErr = err;
+                        if (mReleasedByResourceManager) {
+                            // override the err code if MediaCodec has been
+                            // released by ResourceManager.
+                            finalErr = DEAD_OBJECT;
+                        }
+                        postPendingRepliesAndDeferredMessages(origin, finalErr);
                     }
                     break;
                 }
@@ -2252,8 +2302,8 @@
                     if (mState == RELEASING || mState == UNINITIALIZED) {
                         // In case a kWhatError or kWhatRelease message came in and replied,
                         // we log a warning and ignore.
-                        ALOGW("allocate interrupted by error or release, current state %d",
-                              mState);
+                        ALOGW("allocate interrupted by error or release, current state %d/%s",
+                              mState, stateString(mState).c_str());
                         break;
                     }
                     CHECK_EQ(mState, INITIALIZING);
@@ -2290,7 +2340,7 @@
                                 MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
                     }
 
-                    (new AMessage)->postReply(mReplyID);
+                    postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
                     break;
                 }
 
@@ -2299,8 +2349,8 @@
                     if (mState == RELEASING || mState == UNINITIALIZED || mState == INITIALIZED) {
                         // In case a kWhatError or kWhatRelease message came in and replied,
                         // we log a warning and ignore.
-                        ALOGW("configure interrupted by error or release, current state %d",
-                              mState);
+                        ALOGW("configure interrupted by error or release, current state %d/%s",
+                              mState, stateString(mState).c_str());
                         break;
                     }
                     CHECK_EQ(mState, CONFIGURING);
@@ -2315,7 +2365,7 @@
                     if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
                         // signal frame dropping mode in the input format as this may also be
                         // meaningful and confusing for an encoder in a transcoder scenario
-                        mInputFormat->setInt32("allow-frame-drop", mAllowFrameDroppingBySurface);
+                        mInputFormat->setInt32(KEY_ALLOW_FRAME_DROP, mAllowFrameDroppingBySurface);
                     }
                     sp<AMessage> interestingFormat =
                             (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
@@ -2329,7 +2379,7 @@
                         mFlags |= kFlagUsesSoftwareRenderer;
                     }
                     setState(CONFIGURED);
-                    (new AMessage)->postReply(mReplyID);
+                    postPendingRepliesAndDeferredMessages("kWhatComponentConfigured");
 
                     // augment our media metrics info, now that we know more things
                     // such as what the codec extracted from any CSD passed in.
@@ -2374,6 +2424,12 @@
 
                 case kWhatInputSurfaceCreated:
                 {
+                    if (mState != CONFIGURED) {
+                        // state transitioned unexpectedly; we should have replied already.
+                        ALOGD("received kWhatInputSurfaceCreated message in state %s",
+                                stateString(mState).c_str());
+                        break;
+                    }
                     // response to initiateCreateInputSurface()
                     status_t err = NO_ERROR;
                     sp<AMessage> response = new AMessage;
@@ -2392,12 +2448,18 @@
                     } else {
                         response->setInt32("err", err);
                     }
-                    response->postReply(mReplyID);
+                    postPendingRepliesAndDeferredMessages("kWhatInputSurfaceCreated", response);
                     break;
                 }
 
                 case kWhatInputSurfaceAccepted:
                 {
+                    if (mState != CONFIGURED) {
+                        // state transitioned unexpectedly; we should have replied already.
+                        ALOGD("received kWhatInputSurfaceAccepted message in state %s",
+                                stateString(mState).c_str());
+                        break;
+                    }
                     // response to initiateSetInputSurface()
                     status_t err = NO_ERROR;
                     sp<AMessage> response = new AMessage();
@@ -2408,19 +2470,25 @@
                     } else {
                         response->setInt32("err", err);
                     }
-                    response->postReply(mReplyID);
+                    postPendingRepliesAndDeferredMessages("kWhatInputSurfaceAccepted", response);
                     break;
                 }
 
                 case kWhatSignaledInputEOS:
                 {
+                    if (!isExecuting()) {
+                        // state transitioned unexpectedly; we should have replied already.
+                        ALOGD("received kWhatSignaledInputEOS message in state %s",
+                                stateString(mState).c_str());
+                        break;
+                    }
                     // response to signalEndOfInputStream()
                     sp<AMessage> response = new AMessage;
                     status_t err;
                     if (msg->findInt32("err", &err)) {
                         response->setInt32("err", err);
                     }
-                    response->postReply(mReplyID);
+                    postPendingRepliesAndDeferredMessages("kWhatSignaledInputEOS", response);
                     break;
                 }
 
@@ -2429,7 +2497,8 @@
                     if (mState == RELEASING || mState == UNINITIALIZED) {
                         // In case a kWhatRelease message came in and replied,
                         // we log a warning and ignore.
-                        ALOGW("start interrupted by release, current state %d", mState);
+                        ALOGW("start interrupted by release, current state %d/%s",
+                              mState, stateString(mState).c_str());
                         break;
                     }
 
@@ -2439,7 +2508,7 @@
                                 MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
                     }
                     setState(STARTED);
-                    (new AMessage)->postReply(mReplyID);
+                    postPendingRepliesAndDeferredMessages("kWhatStartCompleted");
                     break;
                 }
 
@@ -2540,107 +2609,13 @@
                         break;
                     }
 
-                    sp<RefBase> obj;
-                    CHECK(msg->findObject("buffer", &obj));
-                    sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
-
-                    if (mOutputFormat != buffer->format()) {
-                        if (mFlags & kFlagUseBlockModel) {
-                            sp<AMessage> diff1 = mOutputFormat->changesFrom(buffer->format());
-                            sp<AMessage> diff2 = buffer->format()->changesFrom(mOutputFormat);
-                            std::set<std::string> keys;
-                            size_t numEntries = diff1->countEntries();
-                            AMessage::Type type;
-                            for (size_t i = 0; i < numEntries; ++i) {
-                                keys.emplace(diff1->getEntryNameAt(i, &type));
-                            }
-                            numEntries = diff2->countEntries();
-                            for (size_t i = 0; i < numEntries; ++i) {
-                                keys.emplace(diff2->getEntryNameAt(i, &type));
-                            }
-                            sp<WrapperObject<std::set<std::string>>> changedKeys{
-                                new WrapperObject<std::set<std::string>>{std::move(keys)}};
-                            buffer->meta()->setObject("changedKeys", changedKeys);
-                        }
-                        mOutputFormat = buffer->format();
-                        ALOGV("[%s] output format changed to: %s",
-                                mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
-
-                        if (mSoftRenderer == NULL &&
-                                mSurface != NULL &&
-                                (mFlags & kFlagUsesSoftwareRenderer)) {
-                            AString mime;
-                            CHECK(mOutputFormat->findString("mime", &mime));
-
-                            // TODO: propagate color aspects to software renderer to allow better
-                            // color conversion to RGB. For now, just mark dataspace for YUV
-                            // rendering.
-                            int32_t dataSpace;
-                            if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
-                                ALOGD("[%s] setting dataspace on output surface to #%x",
-                                        mComponentName.c_str(), dataSpace);
-                                int err = native_window_set_buffers_data_space(
-                                        mSurface.get(), (android_dataspace)dataSpace);
-                                ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
-                            }
-                            if (mOutputFormat->contains("hdr-static-info")) {
-                                HDRStaticInfo info;
-                                if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
-                                    setNativeWindowHdrMetadata(mSurface.get(), &info);
-                                }
-                            }
-
-                            sp<ABuffer> hdr10PlusInfo;
-                            if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
-                                    && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
-                                native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
-                                        hdr10PlusInfo->size(), hdr10PlusInfo->data());
-                            }
-
-                            if (mime.startsWithIgnoreCase("video/")) {
-                                mSurface->setDequeueTimeout(-1);
-                                mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
-                            }
-                        }
-
-                        requestCpuBoostIfNeeded();
-
-                        if (mFlags & kFlagIsEncoder) {
-                            // Before we announce the format change we should
-                            // collect codec specific data and amend the output
-                            // format as necessary.
-                            int32_t flags = 0;
-                            (void) buffer->meta()->findInt32("flags", &flags);
-                            if ((flags & BUFFER_FLAG_CODECCONFIG) && !(mFlags & kFlagIsSecure)) {
-                                status_t err =
-                                    amendOutputFormatWithCodecSpecificData(buffer);
-
-                                if (err != OK) {
-                                    ALOGE("Codec spit out malformed codec "
-                                          "specific data!");
-                                }
-                            }
-                        }
-                        if (mFlags & kFlagIsAsync) {
-                            onOutputFormatChanged();
-                        } else {
-                            mFlags |= kFlagOutputFormatChanged;
-                            postActivityNotificationIfPossible();
-                        }
-
-                        // Notify mCrypto of video resolution changes
-                        if (mCrypto != NULL) {
-                            int32_t left, top, right, bottom, width, height;
-                            if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
-                                mCrypto->notifyResolution(right - left + 1, bottom - top + 1);
-                            } else if (mOutputFormat->findInt32("width", &width)
-                                    && mOutputFormat->findInt32("height", &height)) {
-                                mCrypto->notifyResolution(width, height);
-                            }
-                        }
-                    }
-
                     if (mFlags & kFlagIsAsync) {
+                        sp<RefBase> obj;
+                        CHECK(msg->findObject("buffer", &obj));
+                        sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
+
+                        // In asynchronous mode, output format change is processed immediately.
+                        handleOutputFormatChangeIfNeeded(buffer);
                         onOutputBufferAvailable();
                     } else if (mFlags & kFlagDequeueOutputPending) {
                         CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID));
@@ -2665,18 +2640,26 @@
                 case kWhatStopCompleted:
                 {
                     if (mState != STOPPING) {
-                        ALOGW("Received kWhatStopCompleted in state %d", mState);
+                        ALOGW("Received kWhatStopCompleted in state %d/%s",
+                              mState, stateString(mState).c_str());
                         break;
                     }
                     setState(INITIALIZED);
-                    (new AMessage)->postReply(mReplyID);
+                    if (mReplyID) {
+                        postPendingRepliesAndDeferredMessages("kWhatStopCompleted");
+                    } else {
+                        ALOGW("kWhatStopCompleted: presumably an error occurred earlier, "
+                              "but the operation completed anyway. (last reply origin=%s)",
+                              mLastReplyOrigin.c_str());
+                    }
                     break;
                 }
 
                 case kWhatReleaseCompleted:
                 {
                     if (mState != RELEASING) {
-                        ALOGW("Received kWhatReleaseCompleted in state %d", mState);
+                        ALOGW("Received kWhatReleaseCompleted in state %d/%s",
+                              mState, stateString(mState).c_str());
                         break;
                     }
                     setState(UNINITIALIZED);
@@ -2693,7 +2676,7 @@
                     mReleaseSurface.reset();
 
                     if (mReplyID != nullptr) {
-                        (new AMessage)->postReply(mReplyID);
+                        postPendingRepliesAndDeferredMessages("kWhatReleaseCompleted");
                     }
                     if (mAsyncReleaseCompleteNotification != nullptr) {
                         flushMediametrics();
@@ -2706,8 +2689,8 @@
                 case kWhatFlushCompleted:
                 {
                     if (mState != FLUSHING) {
-                        ALOGW("received FlushCompleted message in state %d",
-                                mState);
+                        ALOGW("received FlushCompleted message in state %d/%s",
+                                mState, stateString(mState).c_str());
                         break;
                     }
 
@@ -2718,7 +2701,7 @@
                         mCodec->signalResume();
                     }
 
-                    (new AMessage)->postReply(mReplyID);
+                    postPendingRepliesAndDeferredMessages("kWhatFlushCompleted");
                     break;
                 }
 
@@ -2730,14 +2713,18 @@
 
         case kWhatInit:
         {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
             if (mState != UNINITIALIZED) {
-                PostReplyWithError(replyID, INVALID_OPERATION);
+                PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             }
 
+            if (mReplyID) {
+                mDeferredMessages.push_back(msg);
+                break;
+            }
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
             mReplyID = replyID;
             setState(INITIALIZING);
 
@@ -2799,14 +2786,18 @@
 
         case kWhatConfigure:
         {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
             if (mState != INITIALIZED) {
-                PostReplyWithError(replyID, INVALID_OPERATION);
+                PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             }
 
+            if (mReplyID) {
+                mDeferredMessages.push_back(msg);
+                break;
+            }
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
             sp<RefBase> obj;
             CHECK(msg->findObject("surface", &obj));
 
@@ -2819,7 +2810,7 @@
             }
 
             if (obj != NULL) {
-                if (!format->findInt32("allow-frame-drop", &mAllowFrameDroppingBySurface)) {
+                if (!format->findInt32(KEY_ALLOW_FRAME_DROP, &mAllowFrameDroppingBySurface)) {
                     // allow frame dropping by surface by default
                     mAllowFrameDroppingBySurface = true;
                 }
@@ -2879,6 +2870,14 @@
 
             extractCSD(format);
 
+            int32_t tunneled;
+            if (format->findInt32("feature-tunneled-playback", &tunneled) && tunneled != 0) {
+                ALOGI("Configuring TUNNELED video playback.");
+                mTunneled = true;
+            } else {
+                mTunneled = false;
+            }
+
             mCodec->initiateConfigureComponent(format);
             break;
         }
@@ -2944,15 +2943,19 @@
         case kWhatCreateInputSurface:
         case kWhatSetInputSurface:
         {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
             // Must be configured, but can't have been started yet.
             if (mState != CONFIGURED) {
-                PostReplyWithError(replyID, INVALID_OPERATION);
+                PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             }
 
+            if (mReplyID) {
+                mDeferredMessages.push_back(msg);
+                break;
+            }
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
             mReplyID = replyID;
             if (msg->what() == kWhatCreateInputSurface) {
                 mCodec->initiateCreateInputSurface();
@@ -2967,9 +2970,6 @@
         }
         case kWhatStart:
         {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
             if (mState == FLUSHED) {
                 setState(STARTED);
                 if (mHavePendingInputBuffers) {
@@ -2977,13 +2977,20 @@
                     mHavePendingInputBuffers = false;
                 }
                 mCodec->signalResume();
-                PostReplyWithError(replyID, OK);
+                PostReplyWithError(msg, OK);
                 break;
             } else if (mState != CONFIGURED) {
-                PostReplyWithError(replyID, INVALID_OPERATION);
+                PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             }
 
+            if (mReplyID) {
+                mDeferredMessages.push_back(msg);
+                break;
+            }
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
             mReplyID = replyID;
             setState(STARTING);
 
@@ -2991,15 +2998,42 @@
             break;
         }
 
-        case kWhatStop:
+        case kWhatStop: {
+            if (mReplyID) {
+                mDeferredMessages.push_back(msg);
+                break;
+            }
+            [[fallthrough]];
+        }
         case kWhatRelease:
         {
             State targetState =
                 (msg->what() == kWhatStop) ? INITIALIZED : UNINITIALIZED;
 
+            if ((mState == RELEASING && targetState == UNINITIALIZED)
+                    || (mState == STOPPING && targetState == INITIALIZED)) {
+                mDeferredMessages.push_back(msg);
+                break;
+            }
+
             sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
 
+            sp<AMessage> asyncNotify;
+            (void)msg->findMessage("async", &asyncNotify);
+            // post asyncNotify if going out of scope.
+            struct AsyncNotifyPost {
+                AsyncNotifyPost(const sp<AMessage> &asyncNotify) : mAsyncNotify(asyncNotify) {}
+                ~AsyncNotifyPost() {
+                    if (mAsyncNotify) {
+                        mAsyncNotify->post();
+                    }
+                }
+                void clear() { mAsyncNotify.clear(); }
+            private:
+                sp<AMessage> mAsyncNotify;
+            } asyncNotifyPost{asyncNotify};
+
             // already stopped/released
             if (mState == UNINITIALIZED && mReleasedByResourceManager) {
                 sp<AMessage> response = new AMessage;
@@ -3011,7 +3045,13 @@
             int32_t reclaimed = 0;
             msg->findInt32("reclaimed", &reclaimed);
             if (reclaimed) {
-                mReleasedByResourceManager = true;
+                if (!mReleasedByResourceManager) {
+                    // notify the async client
+                    if (mFlags & kFlagIsAsync) {
+                        onError(DEAD_OBJECT, ACTION_CODE_FATAL);
+                    }
+                    mReleasedByResourceManager = true;
+                }
 
                 int32_t force = 0;
                 msg->findInt32("force", &force);
@@ -3023,10 +3063,6 @@
                     response->setInt32("err", WOULD_BLOCK);
                     response->postReply(replyID);
 
-                    // notify the async client
-                    if (mFlags & kFlagIsAsync) {
-                        onError(DEAD_OBJECT, ACTION_CODE_FATAL);
-                    }
                     break;
                 }
             }
@@ -3063,12 +3099,15 @@
             // after this, and we'll no longer be able to reply.
             if (mState == FLUSHING || mState == STOPPING
                     || mState == CONFIGURING || mState == STARTING) {
-                (new AMessage)->postReply(mReplyID);
+                // mReply is always set if in these states.
+                postPendingRepliesAndDeferredMessages(
+                        std::string("kWhatRelease:") + stateString(mState));
             }
 
             if (mFlags & kFlagSawMediaServerDie) {
                 // It's dead, Jim. Don't expect initiateShutdown to yield
                 // any useful results now...
+                // Any pending reply would have been handled at kWhatError.
                 setState(UNINITIALIZED);
                 if (targetState == UNINITIALIZED) {
                     mComponentName.clear();
@@ -3082,15 +3121,19 @@
             // reply now with an error to unblock the client, client can
             // release after the failure (instead of ANR).
             if (msg->what() == kWhatStop && (mFlags & kFlagStickyError)) {
+                // Any pending reply would have been handled at kWhatError.
                 PostReplyWithError(replyID, getStickyError());
                 break;
             }
 
-            sp<AMessage> asyncNotify;
-            if (msg->findMessage("async", &asyncNotify) && asyncNotify != nullptr) {
+            if (asyncNotify != nullptr) {
                 if (mSurface != NULL) {
                     if (!mReleaseSurface) {
-                        mReleaseSurface.reset(new ReleaseSurface);
+                        uint64_t usage = 0;
+                        if (mSurface->getConsumerUsage(&usage) != OK) {
+                            usage = 0;
+                        }
+                        mReleaseSurface.reset(new ReleaseSurface(usage));
                     }
                     if (mSurface != mReleaseSurface->getSurface()) {
                         status_t err = connectToSurface(mReleaseSurface->getSurface());
@@ -3107,6 +3150,12 @@
                 }
             }
 
+            if (mReplyID) {
+                // State transition replies are handled above, so this reply
+                // would not be related to state transition. As we are
+                // shutting down the component, just fail the operation.
+                postPendingRepliesAndDeferredMessages("kWhatRelease:reply", UNKNOWN_ERROR);
+            }
             mReplyID = replyID;
             setState(msg->what() == kWhatStop ? STOPPING : RELEASING);
 
@@ -3121,8 +3170,8 @@
 
             if (asyncNotify != nullptr) {
                 mResourceManagerProxy->markClientForPendingRemoval();
-                (new AMessage)->postReply(mReplyID);
-                mReplyID = 0;
+                postPendingRepliesAndDeferredMessages("kWhatRelease:async");
+                asyncNotifyPost.clear();
                 mAsyncReleaseCompleteNotification = asyncNotify;
             }
 
@@ -3293,17 +3342,21 @@
 
         case kWhatSignalEndOfInputStream:
         {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
             if (!isExecuting() || !mHaveInputSurface) {
-                PostReplyWithError(replyID, INVALID_OPERATION);
+                PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             } else if (mFlags & kFlagStickyError) {
-                PostReplyWithError(replyID, getStickyError());
+                PostReplyWithError(msg, getStickyError());
                 break;
             }
 
+            if (mReplyID) {
+                mDeferredMessages.push_back(msg);
+                break;
+            }
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
             mReplyID = replyID;
             mCodec->signalEndOfInputStream();
             break;
@@ -3345,17 +3398,21 @@
 
         case kWhatFlush:
         {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
             if (!isExecuting()) {
-                PostReplyWithError(replyID, INVALID_OPERATION);
+                PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             } else if (mFlags & kFlagStickyError) {
-                PostReplyWithError(replyID, getStickyError());
+                PostReplyWithError(msg, getStickyError());
                 break;
             }
 
+            if (mReplyID) {
+                mDeferredMessages.push_back(msg);
+                break;
+            }
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
             mReplyID = replyID;
             // TODO: skip flushing if already FLUSHED
             setState(FLUSHING);
@@ -3469,6 +3526,106 @@
     }
 }
 
+void MediaCodec::handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer) {
+    sp<AMessage> format = buffer->format();
+    if (mOutputFormat == format) {
+        return;
+    }
+    if (mFlags & kFlagUseBlockModel) {
+        sp<AMessage> diff1 = mOutputFormat->changesFrom(format);
+        sp<AMessage> diff2 = format->changesFrom(mOutputFormat);
+        std::set<std::string> keys;
+        size_t numEntries = diff1->countEntries();
+        AMessage::Type type;
+        for (size_t i = 0; i < numEntries; ++i) {
+            keys.emplace(diff1->getEntryNameAt(i, &type));
+        }
+        numEntries = diff2->countEntries();
+        for (size_t i = 0; i < numEntries; ++i) {
+            keys.emplace(diff2->getEntryNameAt(i, &type));
+        }
+        sp<WrapperObject<std::set<std::string>>> changedKeys{
+            new WrapperObject<std::set<std::string>>{std::move(keys)}};
+        buffer->meta()->setObject("changedKeys", changedKeys);
+    }
+    mOutputFormat = format;
+    ALOGV("[%s] output format changed to: %s",
+            mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
+
+    if (mSoftRenderer == NULL &&
+            mSurface != NULL &&
+            (mFlags & kFlagUsesSoftwareRenderer)) {
+        AString mime;
+        CHECK(mOutputFormat->findString("mime", &mime));
+
+        // TODO: propagate color aspects to software renderer to allow better
+        // color conversion to RGB. For now, just mark dataspace for YUV
+        // rendering.
+        int32_t dataSpace;
+        if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
+            ALOGD("[%s] setting dataspace on output surface to #%x",
+                    mComponentName.c_str(), dataSpace);
+            int err = native_window_set_buffers_data_space(
+                    mSurface.get(), (android_dataspace)dataSpace);
+            ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
+        }
+        if (mOutputFormat->contains("hdr-static-info")) {
+            HDRStaticInfo info;
+            if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
+                setNativeWindowHdrMetadata(mSurface.get(), &info);
+            }
+        }
+
+        sp<ABuffer> hdr10PlusInfo;
+        if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
+                && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
+            native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
+                    hdr10PlusInfo->size(), hdr10PlusInfo->data());
+        }
+
+        if (mime.startsWithIgnoreCase("video/")) {
+            mSurface->setDequeueTimeout(-1);
+            mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
+        }
+    }
+
+    requestCpuBoostIfNeeded();
+
+    if (mFlags & kFlagIsEncoder) {
+        // Before we announce the format change we should
+        // collect codec specific data and amend the output
+        // format as necessary.
+        int32_t flags = 0;
+        (void) buffer->meta()->findInt32("flags", &flags);
+        if ((flags & BUFFER_FLAG_CODECCONFIG) && !(mFlags & kFlagIsSecure)) {
+            status_t err =
+                amendOutputFormatWithCodecSpecificData(buffer);
+
+            if (err != OK) {
+                ALOGE("Codec spit out malformed codec "
+                      "specific data!");
+            }
+        }
+    }
+    if (mFlags & kFlagIsAsync) {
+        onOutputFormatChanged();
+    } else {
+        mFlags |= kFlagOutputFormatChanged;
+        postActivityNotificationIfPossible();
+    }
+
+    // Notify mCrypto of video resolution changes
+    if (mCrypto != NULL) {
+        int32_t left, top, right, bottom, width, height;
+        if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
+            mCrypto->notifyResolution(right - left + 1, bottom - top + 1);
+        } else if (mOutputFormat->findInt32("width", &width)
+                && mOutputFormat->findInt32("height", &height)) {
+            mCrypto->notifyResolution(width, height);
+        }
+    }
+}
+
 void MediaCodec::extractCSD(const sp<AMessage> &format) {
     mCSD.clear();
 
@@ -3806,7 +3963,18 @@
     if (hasCryptoOrDescrambler() && !c2Buffer && !memory) {
         AString *errorDetailMsg;
         CHECK(msg->findPointer("errorDetailMsg", (void **)&errorDetailMsg));
-
+        // Notify mCrypto of video resolution changes
+        if (mTunneled && mCrypto != NULL) {
+            int32_t width, height;
+            if (mInputFormat->findInt32("width", &width) &&
+                mInputFormat->findInt32("height", &height) && width > 0 && height > 0) {
+                if (width != mTunneledInputWidth || height != mTunneledInputHeight) {
+                    mTunneledInputWidth = width;
+                    mTunneledInputHeight = height;
+                    mCrypto->notifyResolution(width, height);
+                }
+            }
+        }
         err = mBufferChannel->queueSecureInputBuffer(
                 buffer,
                 (mFlags & kFlagIsSecure),
@@ -3934,19 +4102,31 @@
     return OK;
 }
 
-ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
+MediaCodec::BufferInfo *MediaCodec::peekNextPortBuffer(int32_t portIndex) {
     CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
 
     List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
 
     if (availBuffers->empty()) {
+        return nullptr;
+    }
+
+    return &mPortBuffers[portIndex][*availBuffers->begin()];
+}
+
+ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
+    CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+
+    BufferInfo *info = peekNextPortBuffer(portIndex);
+    if (!info) {
         return -EAGAIN;
     }
 
+    List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
     size_t index = *availBuffers->begin();
+    CHECK_EQ(info, &mPortBuffers[portIndex][index]);
     availBuffers->erase(availBuffers->begin());
 
-    BufferInfo *info = &mPortBuffers[portIndex][index];
     CHECK(!info->mOwnedByClient);
     {
         Mutex::Autolock al(mBufferLock);
@@ -4188,6 +4368,33 @@
     return OK;
 }
 
+void MediaCodec::postPendingRepliesAndDeferredMessages(
+        std::string origin, status_t err /* = OK */) {
+    sp<AMessage> response{new AMessage};
+    if (err != OK) {
+        response->setInt32("err", err);
+    }
+    postPendingRepliesAndDeferredMessages(origin, response);
+}
+
+void MediaCodec::postPendingRepliesAndDeferredMessages(
+        std::string origin, const sp<AMessage> &response) {
+    LOG_ALWAYS_FATAL_IF(
+            !mReplyID,
+            "postPendingRepliesAndDeferredMessages: mReplyID == null, from %s following %s",
+            origin.c_str(),
+            mLastReplyOrigin.c_str());
+    mLastReplyOrigin = origin;
+    response->postReply(mReplyID);
+    mReplyID.clear();
+    ALOGV_IF(!mDeferredMessages.empty(),
+            "posting %zu deferred messages", mDeferredMessages.size());
+    for (sp<AMessage> msg : mDeferredMessages) {
+        msg->post();
+    }
+    mDeferredMessages.clear();
+}
+
 std::string MediaCodec::stateString(State state) {
     const char *rval = NULL;
     char rawbuffer[16]; // room for "%d"
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 1395c27..bc656a2 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -435,6 +435,30 @@
     buffer->release();
 }
 
+status_t MediaCodecSource::setEncodingBitrate(int32_t bitRate) {
+    ALOGV("setEncodingBitrate (%d)", bitRate);
+
+    if (mEncoder == NULL) {
+        ALOGW("setEncodingBitrate (%d) : mEncoder is null", bitRate);
+        return BAD_VALUE;
+    }
+
+    sp<AMessage> params = new AMessage;
+    params->setInt32("video-bitrate", bitRate);
+
+    return mEncoder->setParameters(params);
+}
+
+status_t MediaCodecSource::requestIDRFrame() {
+    if (mEncoder == NULL) {
+        ALOGW("requestIDRFrame : mEncoder is null");
+        return BAD_VALUE;
+    } else {
+        mEncoder->requestIDRFrame();
+        return OK;
+    }
+}
+
 MediaCodecSource::MediaCodecSource(
         const sp<ALooper> &looper,
         const sp<AMessage> &outputFormat,
diff --git a/media/libstagefright/MediaExtractorFactory.cpp b/media/libstagefright/MediaExtractorFactory.cpp
index c6e753d..7c981b3 100644
--- a/media/libstagefright/MediaExtractorFactory.cpp
+++ b/media/libstagefright/MediaExtractorFactory.cpp
@@ -59,7 +59,7 @@
             sp<IMediaExtractor> ex;
             mediaExService->makeExtractor(
                     CreateIDataSourceFromDataSource(source),
-                    mime ? std::make_unique<std::string>(mime) : nullptr,
+                    mime ? std::optional<std::string>(mime) : std::nullopt,
                     &ex);
             return ex;
         } else {
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index cab4ebd..c91386d 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -92,7 +92,9 @@
     }
 
     sp<MetaData> trackMeta = new MetaData;
-    convertMessageToMetaData(format, trackMeta);
+    if (convertMessageToMetaData(format, trackMeta) != OK) {
+        return BAD_VALUE;
+    }
 
     sp<MediaAdapter> newTrack = new MediaAdapter(trackMeta);
     status_t result = mWriter->addSource(newTrack);
@@ -175,16 +177,23 @@
 
 status_t MediaMuxer::writeSampleData(const sp<ABuffer> &buffer, size_t trackIndex,
                                      int64_t timeUs, uint32_t flags) {
-    Mutex::Autolock autoLock(mMuxerLock);
-
     if (buffer.get() == NULL) {
         ALOGE("WriteSampleData() get an NULL buffer.");
         return -EINVAL;
     }
-
-    if (mState != STARTED) {
-        ALOGE("WriteSampleData() is called in invalid state %d", mState);
-        return INVALID_OPERATION;
+    {
+        /* As MediaMuxer's writeSampleData handles inputs from multiple tracks,
+         * limited the scope of mMuxerLock to this inner block so that the
+         * current track's buffer does not wait until the completion
+         * of processing of previous buffer of the same or another track.
+         * It's the responsibility of individual track - MediaAdapter object
+         * to gate its buffers.
+         */
+        Mutex::Autolock autoLock(mMuxerLock);
+        if (mState != STARTED) {
+            ALOGE("WriteSampleData() is called in invalid state %d", mState);
+            return INVALID_OPERATION;
+        }
     }
 
     if (trackIndex >= mTrackList.size()) {
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 050d7c2..6245014 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -312,6 +312,27 @@
         (*format)->setBuffer("pssh", buf);
     }
 
+    // Copy over the slow-motion related metadata
+    const void *slomoMarkers;
+    size_t slomoMarkersSize;
+    if (meta->findData(kKeySlowMotionMarkers, &type, &slomoMarkers, &slomoMarkersSize)
+            && slomoMarkersSize > 0) {
+        sp<ABuffer> buf = new ABuffer(slomoMarkersSize);
+        memcpy(buf->data(), slomoMarkers, slomoMarkersSize);
+        (*format)->setBuffer("slow-motion-markers", buf);
+    }
+
+    int32_t temporalLayerCount;
+    if (meta->findInt32(kKeyTemporalLayerCount, &temporalLayerCount)
+            && temporalLayerCount > 0) {
+        (*format)->setInt32("temporal-layer-count", temporalLayerCount);
+    }
+
+    float captureFps;
+    if (meta->findFloat(kKeyCaptureFramerate, &captureFps) && captureFps > 0.0f) {
+        (*format)->setFloat("capture-rate", captureFps);
+    }
+
     return OK;
 }
 
diff --git a/media/libstagefright/OWNERS b/media/libstagefright/OWNERS
new file mode 100644
index 0000000..819389d
--- /dev/null
+++ b/media/libstagefright/OWNERS
@@ -0,0 +1,7 @@
+set noparent
+chz@google.com
+essick@google.com
+lajos@google.com
+marcone@google.com
+taklee@google.com
+wonsik@google.com
\ No newline at end of file
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index 4c94baa..1f569ef 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -132,37 +132,63 @@
 }
 
 void setNativeWindowHdrMetadata(ANativeWindow *nativeWindow, HDRStaticInfo *info) {
-    struct android_smpte2086_metadata smpte2086_meta = {
-            .displayPrimaryRed = {
-                    info->sType1.mR.x * 0.00002f,
-                    info->sType1.mR.y * 0.00002f
-            },
-            .displayPrimaryGreen = {
-                    info->sType1.mG.x * 0.00002f,
-                    info->sType1.mG.y * 0.00002f
-            },
-            .displayPrimaryBlue = {
-                    info->sType1.mB.x * 0.00002f,
-                    info->sType1.mB.y * 0.00002f
-            },
-            .whitePoint = {
-                    info->sType1.mW.x * 0.00002f,
-                    info->sType1.mW.y * 0.00002f
-            },
-            .maxLuminance = (float) info->sType1.mMaxDisplayLuminance,
-            .minLuminance = info->sType1.mMinDisplayLuminance * 0.0001f
-    };
+    // If mastering max and min luminance fields are 0, do not use them.
+    // It indicates the value may not be present in the stream.
+    if ((float)info->sType1.mMaxDisplayLuminance > 0.0f &&
+        (info->sType1.mMinDisplayLuminance * 0.0001f) > 0.0f) {
+        struct android_smpte2086_metadata smpte2086_meta = {
+                .displayPrimaryRed = {
+                        info->sType1.mR.x * 0.00002f,
+                        info->sType1.mR.y * 0.00002f
+                },
+                .displayPrimaryGreen = {
+                        info->sType1.mG.x * 0.00002f,
+                        info->sType1.mG.y * 0.00002f
+                },
+                .displayPrimaryBlue = {
+                        info->sType1.mB.x * 0.00002f,
+                        info->sType1.mB.y * 0.00002f
+                },
+                .whitePoint = {
+                        info->sType1.mW.x * 0.00002f,
+                        info->sType1.mW.y * 0.00002f
+                },
+                .maxLuminance = (float) info->sType1.mMaxDisplayLuminance,
+                .minLuminance = info->sType1.mMinDisplayLuminance * 0.0001f
+        };
 
-    int err = native_window_set_buffers_smpte2086_metadata(nativeWindow, &smpte2086_meta);
-    ALOGW_IF(err != 0, "failed to set smpte2086 metadata on surface (%d)", err);
+        int err = native_window_set_buffers_smpte2086_metadata(nativeWindow, &smpte2086_meta);
+        ALOGW_IF(err != 0, "failed to set smpte2086 metadata on surface (%d)", err);
+    }
 
-    struct android_cta861_3_metadata cta861_meta = {
-            .maxContentLightLevel = (float) info->sType1.mMaxContentLightLevel,
-            .maxFrameAverageLightLevel = (float) info->sType1.mMaxFrameAverageLightLevel
-    };
+    // If the content light level fields are 0, do not use them, it
+    // indicates the value may not be present in the stream.
+    if ((float)info->sType1.mMaxContentLightLevel > 0.0f &&
+        (float)info->sType1.mMaxFrameAverageLightLevel > 0.0f) {
+        struct android_cta861_3_metadata cta861_meta = {
+                .maxContentLightLevel = (float) info->sType1.mMaxContentLightLevel,
+                .maxFrameAverageLightLevel = (float) info->sType1.mMaxFrameAverageLightLevel
+        };
 
-    err = native_window_set_buffers_cta861_3_metadata(nativeWindow, &cta861_meta);
-    ALOGW_IF(err != 0, "failed to set cta861_3 metadata on surface (%d)", err);
+        int err = native_window_set_buffers_cta861_3_metadata(nativeWindow, &cta861_meta);
+        ALOGW_IF(err != 0, "failed to set cta861_3 metadata on surface (%d)", err);
+    }
+}
+
+status_t setNativeWindowRotation(
+        ANativeWindow *nativeWindow /* nonnull */, int rotation) {
+
+    int transform = 0;
+    if ((rotation % 90) == 0) {
+        switch ((rotation / 90) & 3) {
+            case 1:  transform = HAL_TRANSFORM_ROT_90;  break;
+            case 2:  transform = HAL_TRANSFORM_ROT_180; break;
+            case 3:  transform = HAL_TRANSFORM_ROT_270; break;
+            default: transform = 0;                     break;
+        }
+    }
+
+    return native_window_set_buffers_transform(nativeWindow, transform);
 }
 
 status_t pushBlankBuffersToNativeWindow(ANativeWindow *nativeWindow /* nonnull */) {
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index 8b36ea5..76fc74f 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -1,4 +1,16 @@
 {
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    // writerTest fails about 5 out of 66
+    // { "name": "writerTest" },
+
+    { "name": "HEVCUtilsUnitTest" },
+    { "name": "ExtractorFactoryTest" }
+
+  ],
+
   "presubmit": [
     {
       "name": "CtsMediaTestCases",
@@ -17,6 +29,9 @@
           "exclude-filter": "android.media.cts.AudioRecordTest"
         }
       ]
+    },
+    {
+      "name": "mediacodecTest"
     }
   ],
   "postsubmit": [
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index a1e4d43..48b3255 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -769,6 +769,8 @@
         { "sei", kKeySEI },
         { "text-format-data", kKeyTextFormatData },
         { "thumbnail-csd-hevc", kKeyThumbnailHVCC },
+        { "slow-motion-markers", kKeySlowMotionMarkers },
+        { "thumbnail-csd-av1c", kKeyThumbnailAV1C },
     }
 };
 
@@ -1135,7 +1137,7 @@
         // assertion, let's be lenient for now...
         // CHECK((ptr[4] >> 2) == 0x3f);  // reserved
 
-        size_t lengthSize __unused = 1 + (ptr[4] & 3);
+        // we can get lengthSize value from 1 + (ptr[4] & 3)
 
         // commented out check below as H264_QVGA_500_NO_AUDIO.3gp
         // violates it...
@@ -1663,13 +1665,16 @@
         meta->setInt32(kKeyColorMatrix, colorAspects.mMatrixCoeffs);
     }
 }
-
-void convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) {
+/* Converts key and value pairs in AMessage format to MetaData format.
+ * Also checks for the presence of required keys.
+ */
+status_t convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) {
     AString mime;
     if (msg->findString("mime", &mime)) {
         meta->setCString(kKeyMIMEType, mime.c_str());
     } else {
-        ALOGW("did not find mime type");
+        ALOGE("did not find mime type");
+        return BAD_VALUE;
     }
 
     convertMessageToMetaDataFromMappings(msg, meta);
@@ -1718,7 +1723,8 @@
             meta->setInt32(kKeyWidth, width);
             meta->setInt32(kKeyHeight, height);
         } else {
-            ALOGV("did not find width and/or height");
+            ALOGE("did not find width and/or height");
+            return BAD_VALUE;
         }
 
         int32_t sarWidth, sarHeight;
@@ -1803,14 +1809,14 @@
             }
         }
     } else if (mime.startsWith("audio/")) {
-        int32_t numChannels;
-        if (msg->findInt32("channel-count", &numChannels)) {
-            meta->setInt32(kKeyChannelCount, numChannels);
+        int32_t numChannels, sampleRate;
+        if (!msg->findInt32("channel-count", &numChannels) ||
+                !msg->findInt32("sample-rate", &sampleRate)) {
+            ALOGE("did not find channel-count and/or sample-rate");
+            return BAD_VALUE;
         }
-        int32_t sampleRate;
-        if (msg->findInt32("sample-rate", &sampleRate)) {
-            meta->setInt32(kKeySampleRate, sampleRate);
-        }
+        meta->setInt32(kKeyChannelCount, numChannels);
+        meta->setInt32(kKeySampleRate, sampleRate);
         int32_t bitsPerSample;
         if (msg->findInt32("bits-per-sample", &bitsPerSample)) {
             meta->setInt32(kKeyBitsPerSample, bitsPerSample);
@@ -1900,7 +1906,8 @@
             std::vector<uint8_t> hvcc(csd0size + 1024);
             size_t outsize = reassembleHVCC(csd0, hvcc.data(), hvcc.size(), 4);
             meta->setData(kKeyHVCC, kTypeHVCC, hvcc.data(), outsize);
-        } else if (mime == MEDIA_MIMETYPE_VIDEO_AV1) {
+        } else if (mime == MEDIA_MIMETYPE_VIDEO_AV1 ||
+                   mime == MEDIA_MIMETYPE_IMAGE_AVIF) {
             meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
         } else if (mime == MEDIA_MIMETYPE_VIDEO_DOLBY_VISION) {
             if (msg->findBuffer("csd-2", &csd2)) {
@@ -1925,7 +1932,8 @@
                     }
                 }
             } else {
-                ALOGW("We need csd-2!!. %s", msg->debugString().c_str());
+                ALOGE("We need csd-2!!. %s", msg->debugString().c_str());
+                return BAD_VALUE;
             }
         } else if (mime == MEDIA_MIMETYPE_VIDEO_VP9) {
             meta->setData(kKeyVp9CodecPrivate, 0, csd0->data(), csd0->size());
@@ -1991,6 +1999,7 @@
     ALOGI("converted %s to:", msg->debugString(0).c_str());
     meta->dumpToLog();
 #endif
+    return OK;
 }
 
 status_t sendMetaDataToHal(sp<MediaPlayerBase::AudioSink>& sink,
@@ -2136,8 +2145,10 @@
     }
     info->sample_rate = srate;
 
-    int32_t cmask = 0;
-    if (!meta->findInt32(kKeyChannelMask, &cmask) || cmask == CHANNEL_MASK_USE_CHANNEL_ORDER) {
+    int32_t rawChannelMask;
+    audio_channel_mask_t cmask = meta->findInt32(kKeyChannelMask, &rawChannelMask) ?
+            static_cast<audio_channel_mask_t>(rawChannelMask) : CHANNEL_MASK_USE_CHANNEL_ORDER;
+    if (cmask == CHANNEL_MASK_USE_CHANNEL_ORDER) {
         ALOGV("track of type '%s' does not publish channel mask", mime);
 
         // Try a channel count instead
@@ -2178,7 +2189,11 @@
     }
     // Check if offload is possible for given format, stream type, sample rate,
     // bit rate, duration, video and streaming
+#ifdef DISABLE_AUDIO_SYSTEM_OFFLOAD
+    return false;
+#else
     return AudioSystem::isOffloadSupported(info);
+#endif
 }
 
 HLSTime::HLSTime(const sp<AMessage>& meta) :
diff --git a/media/libstagefright/bqhelper/TEST_MAPPING b/media/libstagefright/bqhelper/TEST_MAPPING
new file mode 100644
index 0000000..c7f2fd8
--- /dev/null
+++ b/media/libstagefright/bqhelper/TEST_MAPPING
@@ -0,0 +1,6 @@
+// mappings for frameworks/av/media/libstagefright/bqhelper
+{
+  "presubmit": [
+    { "name": "FrameDropper_test"}
+  ]
+}
diff --git a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h b/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h
index 4e83059..7e2909a 100644
--- a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h
+++ b/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h
@@ -30,6 +30,8 @@
     FrameDropper();
 
     // maxFrameRate required to be positive.
+    // maxFrameRate negative causes shouldDrop() to always return false
+    // maxFrameRate == 0 is illegal
     status_t setMaxFrameRate(float maxFrameRate);
 
     // Returns false if max frame rate has not been set via setMaxFrameRate.
diff --git a/media/libstagefright/bqhelper/tests/Android.bp b/media/libstagefright/bqhelper/tests/Android.bp
index 2fbc3d6..3897689 100644
--- a/media/libstagefright/bqhelper/tests/Android.bp
+++ b/media/libstagefright/bqhelper/tests/Android.bp
@@ -1,5 +1,6 @@
 cc_test {
     name: "FrameDropper_test",
+    test_suites: ["device-tests"],
 
     srcs: ["FrameDropper_test.cpp"],
 
diff --git a/media/libstagefright/bqhelper/tests/FrameDropper_test.cpp b/media/libstagefright/bqhelper/tests/FrameDropper_test.cpp
index 55ae77c..b18067f 100644
--- a/media/libstagefright/bqhelper/tests/FrameDropper_test.cpp
+++ b/media/libstagefright/bqhelper/tests/FrameDropper_test.cpp
@@ -110,7 +110,7 @@
 };
 
 TEST_F(FrameDropperTest, TestInvalidMaxFrameRate) {
-    EXPECT_NE(OK, mFrameDropper->setMaxFrameRate(-1.0));
+    EXPECT_EQ(OK, mFrameDropper->setMaxFrameRate(-1.0));
     EXPECT_NE(OK, mFrameDropper->setMaxFrameRate(0));
 }
 
diff --git a/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
index 168d140..157cab6 100644
--- a/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
+++ b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
@@ -217,7 +217,7 @@
         }
         else { // handle other used encoder target levels
 
-            // Sanity check: DRC presentation mode is only specified for max. 5.1 channels
+            // Validation check: DRC presentation mode is only specified for max. 5.1 channels
             if (mStreamNrAACChan > 6) {
                 drcPresMode = 0;
             }
@@ -308,7 +308,7 @@
             } // switch()
         } // if (mEncoderTarget  == GPM_ENCODER_TARGET_LEVEL)
 
-        // sanity again
+        // validation check again
         if (newHeavy == 1) {
             newBoostFactor=127; // not really needed as the same would be done by the decoder anyway
             newAttFactor = 127;
diff --git a/media/libstagefright/codecs/amrnb/TEST_MAPPING b/media/libstagefright/codecs/amrnb/TEST_MAPPING
new file mode 100644
index 0000000..343d08a
--- /dev/null
+++ b/media/libstagefright/codecs/amrnb/TEST_MAPPING
@@ -0,0 +1,10 @@
+// mappings for frameworks/av/media/libstagefright/codecs/amrnb
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "AmrnbDecoderTest"},
+    { "name": "AmrnbEncoderTest"}
+  ]
+}
diff --git a/media/libstagefright/codecs/amrnb/dec/test/Android.bp b/media/libstagefright/codecs/amrnb/dec/test/Android.bp
index 7a95cfa..91c9f86 100644
--- a/media/libstagefright/codecs/amrnb/dec/test/Android.bp
+++ b/media/libstagefright/codecs/amrnb/dec/test/Android.bp
@@ -17,6 +17,7 @@
 cc_test {
     name: "AmrnbDecoderTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "AmrnbDecoderTest.cpp",
diff --git a/media/libstagefright/codecs/amrnb/dec/test/AndroidTest.xml b/media/libstagefright/codecs/amrnb/dec/test/AndroidTest.xml
new file mode 100644
index 0000000..1a9e678
--- /dev/null
+++ b/media/libstagefright/codecs/amrnb/dec/test/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for Amr-nb Decoder unit test">
+    <option name="test-suite-tag" value="AmrnbDecoderTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="AmrnbDecoderTest->/data/local/tmp/AmrnbDecoderTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecoderTest.zip?unzip=true"
+            value="/data/local/tmp/AmrnbDecoderTestRes/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="AmrnbDecoderTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/AmrnbDecoderTestRes/" />
+    </test>
+</configuration>
diff --git a/media/libstagefright/codecs/amrnb/dec/test/README.md b/media/libstagefright/codecs/amrnb/dec/test/README.md
index 62e13ae..e9073e4 100644
--- a/media/libstagefright/codecs/amrnb/dec/test/README.md
+++ b/media/libstagefright/codecs/amrnb/dec/test/README.md
@@ -22,13 +22,18 @@
 adb push ${OUT}/data/nativetest/AmrnbDecoderTest/AmrnbDecoderTest /data/local/tmp/
 ```
 
-The resource file for the tests is taken from [here](https://drive.google.com/drive/folders/13cM4tAaVFrmr-zGFqaAzFBbKs75pnm9b). Push these files into device for testing.
-Download amr-nb folder and push all the files in this folder to /data/local/tmp/ on the device.
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecoderTest.zip). Download, unzip and push these files into device for testing.
+
 ```
-adb push amr-nb/. /data/local/tmp/
+adb push AmrnbDecoderTestRes/. /data/local/tmp/
 ```
 
 usage: AmrnbDecoderTest -P \<path_to_folder\>
 ```
-adb shell /data/local/tmp/AmrnbDecoderTest -P /data/local/tmp/
+adb shell /data/local/tmp/AmrnbDecoderTest -P /data/local/tmp/AmrnbDecoderTestRes/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest AmrnbDecoderTest -- --enable-module-dynamic-download=true
 ```
diff --git a/media/libstagefright/codecs/amrnb/enc/Android.bp b/media/libstagefright/codecs/amrnb/enc/Android.bp
index 73a1d4b..ff9a720 100644
--- a/media/libstagefright/codecs/amrnb/enc/Android.bp
+++ b/media/libstagefright/codecs/amrnb/enc/Android.bp
@@ -81,6 +81,13 @@
     //},
 
     shared_libs: ["libstagefright_amrnb_common"],
+
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 //###############################################################################
diff --git a/media/libstagefright/codecs/amrnb/enc/fuzzer/Android.bp b/media/libstagefright/codecs/amrnb/enc/fuzzer/Android.bp
new file mode 100644
index 0000000..e88e5eb
--- /dev/null
+++ b/media/libstagefright/codecs/amrnb/enc/fuzzer/Android.bp
@@ -0,0 +1,41 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+cc_fuzz {
+    name: "amrnb_enc_fuzzer",
+    host_supported: true,
+
+    srcs: [
+        "amrnb_enc_fuzzer.cpp",
+    ],
+
+    static_libs: [
+        "liblog",
+        "libstagefright_amrnbenc",
+        "libstagefright_amrnb_common",
+    ],
+
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/libstagefright/codecs/amrnb/enc/fuzzer/README.md b/media/libstagefright/codecs/amrnb/enc/fuzzer/README.md
new file mode 100644
index 0000000..239b4a8
--- /dev/null
+++ b/media/libstagefright/codecs/amrnb/enc/fuzzer/README.md
@@ -0,0 +1,60 @@
+# Fuzzer for libstagefright_amrnbenc encoder
+
+## Plugin Design Considerations
+The fuzzer plugin for AMR-NB is designed based on the understanding of the
+codec and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+AMR-WB supports the following parameters:
+1. Output Format (parameter name: `outputFormat`)
+2. Mode (parameter name: `mode`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `outputFormat` | 0. `AMR_TX_WMF` 1. `AMR_TX_IF2` 2. `AMR_TX_ETS` | Bits 0, 1 and 2 of 1st byte of data. |
+| `mode`   | 0. `MR475` 1. `MR515` 2. `MR59` 3. `MR67`  4. `MR74 ` 5. `MR795` 6. `MR102` 7. `MR122` 8. `MRDTX` | Bits 3, 4, 5 and 6 of 1st byte of data. |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the codec using a loop.
+If the encode operation was successful, the input is advanced by the frame size.
+If the encode operation was un-successful, the input is still advanced by frame size so
+that the fuzzer can proceed to feed the next frame.
+
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build amrnb_enc_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) amrnb_enc_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some pcm files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/amrnb_enc_fuzzer/amrnb_enc_fuzzer CORPUS_DIR
+```
+To run on host
+```
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/amrnb_enc_fuzzer/amrnb_enc_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/libstagefright/codecs/amrnb/enc/fuzzer/amrnb_enc_fuzzer.cpp b/media/libstagefright/codecs/amrnb/enc/fuzzer/amrnb_enc_fuzzer.cpp
new file mode 100644
index 0000000..2fcbf24
--- /dev/null
+++ b/media/libstagefright/codecs/amrnb/enc/fuzzer/amrnb_enc_fuzzer.cpp
@@ -0,0 +1,105 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <string.h>
+#include <utils/Log.h>
+#include <algorithm>
+#include "gsmamr_enc.h"
+
+// Constants for AMR-NB
+const int32_t kNumInputSamples = L_FRAME;  // 160 samples
+const int32_t kOutputBufferSize = 2 * kNumInputSamples * sizeof(Word16);
+const Mode kModes[9] = {MR475, /* 4.75 kbps */
+                        MR515, /* 5.15 kbps */
+                        MR59,  /* 5.90 kbps */
+                        MR67,  /* 6.70 kbps */
+                        MR74,  /* 7.40 kbps */
+                        MR795, /* 7.95 kbps */
+                        MR102, /* 10.2 kbps */
+                        MR122, /* 12.2 kbps */
+                        MRDTX, /* DTX       */};
+const Word16 kOutputFormat[3] = {AMR_TX_WMF, AMR_TX_IF2, AMR_TX_ETS};
+
+class Codec {
+   public:
+    Codec() = default;
+    ~Codec() { deInitEncoder(); }
+    Word16 initEncoder(const uint8_t *data);
+    void deInitEncoder();
+    void encodeFrames(const uint8_t *data, size_t size);
+
+   private:
+    void *mEncState = nullptr;
+    void *mSidState = nullptr;
+};
+
+Word16 Codec::initEncoder(const uint8_t *data) {
+    return AMREncodeInit(&mEncState, &mSidState, (*data >> 1) & 0x01 /* dtx_enable flag */);
+}
+
+void Codec::deInitEncoder() {
+    if (mEncState) {
+        AMREncodeExit(&mEncState, &mSidState);
+        mEncState = nullptr;
+        mSidState = nullptr;
+    }
+}
+
+void Codec::encodeFrames(const uint8_t *data, size_t size) {
+    AMREncodeReset(mEncState, mSidState);
+    uint8_t startByte = *data;
+    int modeIndex = ((startByte >> 3) % 9);
+    int outputFormatIndex = (startByte % 3);
+    Mode mode = kModes[modeIndex];
+    Word16 outputFormat = kOutputFormat[outputFormatIndex];
+
+    // Consume startByte
+    data++;
+    size--;
+
+    while (size > 0) {
+        Frame_Type_3GPP frameType = (Frame_Type_3GPP)mode;
+
+        Word16 inputBuf[kNumInputSamples] = {};
+        int32_t minSize = std::min(size, sizeof(inputBuf));
+
+        uint8_t outputBuf[kOutputBufferSize] = {};
+        memcpy(inputBuf, data, minSize);
+
+        AMREncode(mEncState, mSidState, mode, inputBuf, outputBuf, &frameType, outputFormat);
+
+        data += minSize;
+        size -= minSize;
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    if (size < 1) {
+        return 0;
+    }
+    Codec *codec = new Codec();
+    if (!codec) {
+        return 0;
+    }
+    if (codec->initEncoder(data) == 0) {
+        codec->encodeFrames(data, size);
+    }
+    delete codec;
+    return 0;
+}
diff --git a/media/libstagefright/codecs/amrnb/enc/src/qgain475.cpp b/media/libstagefright/codecs/amrnb/enc/src/qgain475.cpp
index f8da589..08a5c15 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/qgain475.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/qgain475.cpp
@@ -1106,7 +1106,7 @@
     // the real, quantized gains)
     gc_pred(pred_st, MR475, sf1_code_nosharp,
             &sf1_exp_gcode0, &sf1_frac_gcode0,
-            &sf0_exp_gcode0, &sf0_gcode0); // last two args are dummy
+            &sf0_exp_gcode0, &sf0_gcode0); // last two args are unused
     sf1_gcode0 = extract_l(Pow2(14, sf1_frac_gcode0));
 
     tmp = add (tmp, 2);
@@ -1426,7 +1426,7 @@
        the real, quantized gains)                                   */
     gc_pred(pred_st, MR475, sf1_code_nosharp,
             &sf1_exp_gcode0, &sf1_frac_gcode0,
-            &sf0_exp_gcode0, &sf0_gcode0, /* dummy args */
+            &sf0_exp_gcode0, &sf0_gcode0, /* unused args */
             pOverflow);
 
     sf1_gcode0 = (Word16)(Pow2(14, sf1_frac_gcode0, pOverflow));
diff --git a/media/libstagefright/codecs/amrnb/enc/test/Android.bp b/media/libstagefright/codecs/amrnb/enc/test/Android.bp
index e8982fe..7e1b561 100644
--- a/media/libstagefright/codecs/amrnb/enc/test/Android.bp
+++ b/media/libstagefright/codecs/amrnb/enc/test/Android.bp
@@ -17,6 +17,7 @@
 cc_test {
     name: "AmrnbEncoderTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "AmrnbEncoderTest.cpp",
diff --git a/media/libstagefright/codecs/amrnb/fuzzer/Android.bp b/media/libstagefright/codecs/amrnb/fuzzer/Android.bp
index 54de1cc..c1eaa53 100644
--- a/media/libstagefright/codecs/amrnb/fuzzer/Android.bp
+++ b/media/libstagefright/codecs/amrnb/fuzzer/Android.bp
@@ -34,4 +34,10 @@
             enabled: false,
         },
     },
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
 }
diff --git a/media/libstagefright/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp b/media/libstagefright/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp
index d4e7e5c..c7a7378 100644
--- a/media/libstagefright/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp
+++ b/media/libstagefright/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp
@@ -26,8 +26,10 @@
 constexpr int32_t kBitsPerSample = 16;
 constexpr int32_t kOutputBufferSize = kSamplesPerFrame * kBitsPerSample / 8;
 const bitstream_format kBitStreamFormats[2] = {MIME_IETF, IF2};
-const int32_t kLocalWmfDecBytesPerFrame[8] = {12, 13, 15, 17, 19, 20, 26, 31};
-const int32_t kLocalIf2DecBytesPerFrame[8] = {13, 14, 16, 18, 19, 21, 26, 31};
+const int32_t kLocalWmfDecBytesPerFrame[16] = {12, 13, 15, 17, 19, 20, 26, 31,
+                                               5,  6,  5,  5,  0,  0,  0,  0};
+const int32_t kLocalIf2DecBytesPerFrame[16] = {13, 14, 16, 18, 19, 21, 26, 31,
+                                               13, 14, 16, 18, 19, 21, 26, 31};
 
 class Codec {
  public:
@@ -52,7 +54,7 @@
     bitstream_format bitsreamFormat = kBitStreamFormats[bit];
     int32_t frameSize = 0;
     /* Find frame type */
-    Frame_Type_3GPP frameType = static_cast<Frame_Type_3GPP>((mode >> 3) & 0x07);
+    Frame_Type_3GPP frameType = static_cast<Frame_Type_3GPP>((mode >> 3) & 0x0f);
     ++data;
     --size;
     if (bit) {
diff --git a/media/libstagefright/codecs/amrwb/TEST_MAPPING b/media/libstagefright/codecs/amrwb/TEST_MAPPING
new file mode 100644
index 0000000..0278d26
--- /dev/null
+++ b/media/libstagefright/codecs/amrwb/TEST_MAPPING
@@ -0,0 +1,10 @@
+// mappings for frameworks/av/media/libstagefright/codecs/amrwb
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "AmrwbDecoderTest"}
+
+  ]
+}
diff --git a/media/libstagefright/codecs/amrwb/fuzzer/Android.bp b/media/libstagefright/codecs/amrwb/fuzzer/Android.bp
index 46f77e3..7106a30 100644
--- a/media/libstagefright/codecs/amrwb/fuzzer/Android.bp
+++ b/media/libstagefright/codecs/amrwb/fuzzer/Android.bp
@@ -32,4 +32,10 @@
             enabled: false,
         },
     },
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
 }
diff --git a/media/libstagefright/codecs/amrwb/src/wb_syn_filt.cpp b/media/libstagefright/codecs/amrwb/src/wb_syn_filt.cpp
index e1af6d4..d960322 100644
--- a/media/libstagefright/codecs/amrwb/src/wb_syn_filt.cpp
+++ b/media/libstagefright/codecs/amrwb/src/wb_syn_filt.cpp
@@ -273,9 +273,10 @@
 
         L_tmp1 >>= 11;      /* -4 : sig_lo[i] << 4 */
 
-        L_tmp1 += (int32)exc[(i<<1)] << a0;
+        int64 sig_tmp;
+        sig_tmp = (int64)L_tmp1 + (int32)(exc[(i<<1)] << a0);
+        L_tmp1 = (int32)(sig_tmp - (L_tmp2 << 1));
 
-        L_tmp1 -= (L_tmp2 << 1);
         /* sig_hi = bit16 to bit31 of synthesis */
         L_tmp1 = shl_int32(L_tmp1, 3);           /* ai in Q12 */
 
@@ -290,9 +291,8 @@
         L_tmp3 = fxp_mac_16by16(sig_lo[(i<<1)], a[1], L_tmp3);
         L_tmp3 = -L_tmp3 >> 11;
 
-        L_tmp3 += (int32)exc[(i<<1)+1] << a0;
-
-        L_tmp3 -= (L_tmp4 << 1);
+        sig_tmp = (int64)L_tmp3 + (int32)(exc[(i<<1)+1] << a0);
+        L_tmp3 = (int32)(sig_tmp - (L_tmp4 << 1));
         /* sig_hi = bit16 to bit31 of synthesis */
         L_tmp3 = shl_int32(L_tmp3, 3);           /* ai in Q12 */
         sig_hi[(i<<1)+1] = (int16)(L_tmp3 >> 16);
diff --git a/media/libstagefright/codecs/amrwb/test/Android.bp b/media/libstagefright/codecs/amrwb/test/Android.bp
index 968215a..e8a2aa9 100644
--- a/media/libstagefright/codecs/amrwb/test/Android.bp
+++ b/media/libstagefright/codecs/amrwb/test/Android.bp
@@ -16,6 +16,7 @@
 
 cc_test {
     name: "AmrwbDecoderTest",
+    test_suites: ["device-tests"],
     gtest: true,
 
     srcs: [
diff --git a/media/libstagefright/codecs/amrwbenc/Android.bp b/media/libstagefright/codecs/amrwbenc/Android.bp
index 64f302c..70c672d 100644
--- a/media/libstagefright/codecs/amrwbenc/Android.bp
+++ b/media/libstagefright/codecs/amrwbenc/Android.bp
@@ -138,6 +138,12 @@
         cfi: true,
     },
 
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 //###############################################################################
diff --git a/media/libstagefright/codecs/amrwbenc/TEST_MAPPING b/media/libstagefright/codecs/amrwbenc/TEST_MAPPING
new file mode 100644
index 0000000..045e8b3
--- /dev/null
+++ b/media/libstagefright/codecs/amrwbenc/TEST_MAPPING
@@ -0,0 +1,10 @@
+// mappings for frameworks/av/media/libstagefright/codecs/amrwbenc
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "AmrwbEncoderTest"}
+
+  ]
+}
diff --git a/media/libstagefright/codecs/amrwbenc/fuzzer/Android.bp b/media/libstagefright/codecs/amrwbenc/fuzzer/Android.bp
new file mode 100644
index 0000000..e3473d6
--- /dev/null
+++ b/media/libstagefright/codecs/amrwbenc/fuzzer/Android.bp
@@ -0,0 +1,41 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+cc_fuzz {
+    name: "amrwb_enc_fuzzer",
+    host_supported: true,
+
+    srcs: [
+        "amrwb_enc_fuzzer.cpp",
+    ],
+
+    static_libs: [
+        "liblog",
+        "libstagefright_amrwbenc",
+        "libstagefright_enc_common",
+    ],
+
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/libstagefright/codecs/amrwbenc/fuzzer/README.md b/media/libstagefright/codecs/amrwbenc/fuzzer/README.md
new file mode 100644
index 0000000..447fbfa
--- /dev/null
+++ b/media/libstagefright/codecs/amrwbenc/fuzzer/README.md
@@ -0,0 +1,60 @@
+# Fuzzer for libstagefright_amrwbenc encoder
+
+## Plugin Design Considerations
+The fuzzer plugin for AMR-WB is designed based on the understanding of the
+codec and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+AMR-WB supports the following parameters:
+1. Frame Type (parameter name: `frameType`)
+2. Mode (parameter name: `mode`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `frameType` | 0. `VOAMRWB_DEFAULT` 1. `VOAMRWB_ITU` 2. `VOAMRWB_RFC3267` | Bits 0, 1 and 2 of 1st byte of data. |
+| `mode`   | 0. `VOAMRWB_MD66` 1. `VOAMRWB_MD885` 2. `VOAMRWB_MD1265` 3. `VOAMRWB_MD1425`  4. `VOAMRWB_MD1585 ` 5. `VOAMRWB_MD1825` 6. `VOAMRWB_MD1985` 7. `VOAMRWB_MD2305` 8. `VOAMRWB_MD2385` 9. `VOAMRWB_N_MODES` | Bits 4, 5, 6 and 7 of 1st byte of data. |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the codec using a loop.
+If the encode operation was successful, the input is advanced by the frame size.
+If the encode operation was un-successful, the input is still advanced by frame size so
+that the fuzzer can proceed to feed the next frame.
+
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build amrwb_enc_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) amrwb_enc_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some pcm files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/amrwb_enc_fuzzer/amrwb_enc_fuzzer CORPUS_DIR
+```
+To run on host
+```
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/amrwb_enc_fuzzer/amrwb_enc_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/libstagefright/codecs/amrwbenc/fuzzer/amrwb_enc_fuzzer.cpp b/media/libstagefright/codecs/amrwbenc/fuzzer/amrwb_enc_fuzzer.cpp
new file mode 100644
index 0000000..4773a1f
--- /dev/null
+++ b/media/libstagefright/codecs/amrwbenc/fuzzer/amrwb_enc_fuzzer.cpp
@@ -0,0 +1,142 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <string.h>
+#include <utils/Log.h>
+#include <algorithm>
+#include "cmnMemory.h"
+#include "voAMRWB.h"
+#include "cnst.h"
+
+typedef int(VO_API *VOGETAUDIOENCAPI)(VO_AUDIO_CODECAPI *pEncHandle);
+const int32_t kInputBufferSize = L_FRAME16k * sizeof(int16_t) * 2;
+const int32_t kOutputBufferSize = 2 * kInputBufferSize;
+const int32_t kModes[] = {VOAMRWB_MD66 /* 6.60kbps */,    VOAMRWB_MD885 /* 8.85kbps */,
+                          VOAMRWB_MD1265 /* 12.65kbps */, VOAMRWB_MD1425 /* 14.25kbps */,
+                          VOAMRWB_MD1585 /* 15.85kbps */, VOAMRWB_MD1825 /* 18.25kbps */,
+                          VOAMRWB_MD1985 /* 19.85kbps */, VOAMRWB_MD2305 /* 23.05kbps */,
+                          VOAMRWB_MD2385 /* 23.85kbps */, VOAMRWB_N_MODES /* Invalid Mode */};
+const VOAMRWBFRAMETYPE kFrameTypes[] = {VOAMRWB_DEFAULT, VOAMRWB_ITU, VOAMRWB_RFC3267};
+
+class Codec {
+   public:
+    Codec() = default;
+    ~Codec() { deInitEncoder(); }
+    bool initEncoder(const uint8_t *data);
+    void deInitEncoder();
+    void encodeFrames(const uint8_t *data, size_t size);
+
+   private:
+    VO_AUDIO_CODECAPI *mApiHandle = nullptr;
+    VO_MEM_OPERATOR *mMemOperator = nullptr;
+    VO_HANDLE mEncoderHandle = nullptr;
+};
+
+bool Codec::initEncoder(const uint8_t *data) {
+    uint8_t startByte = *data;
+    int32_t mode = kModes[(startByte >> 4) % 10];
+    VOAMRWBFRAMETYPE frameType = kFrameTypes[startByte % 3];
+    mMemOperator = new VO_MEM_OPERATOR;
+    if (!mMemOperator) {
+        return false;
+    }
+
+    mMemOperator->Alloc = cmnMemAlloc;
+    mMemOperator->Copy = cmnMemCopy;
+    mMemOperator->Free = cmnMemFree;
+    mMemOperator->Set = cmnMemSet;
+    mMemOperator->Check = cmnMemCheck;
+
+    VO_CODEC_INIT_USERDATA userData;
+    memset(&userData, 0, sizeof(userData));
+    userData.memflag = VO_IMF_USERMEMOPERATOR;
+    userData.memData = (VO_PTR)mMemOperator;
+
+    mApiHandle = new VO_AUDIO_CODECAPI;
+    if (!mApiHandle) {
+        return false;
+    }
+    if (VO_ERR_NONE != voGetAMRWBEncAPI(mApiHandle)) {
+        // Failed to get api handle
+        return false;
+    }
+    if (VO_ERR_NONE != mApiHandle->Init(&mEncoderHandle, VO_AUDIO_CodingAMRWB, &userData)) {
+        // Failed to init AMRWB encoder
+        return false;
+    }
+    if (VO_ERR_NONE != mApiHandle->SetParam(mEncoderHandle, VO_PID_AMRWB_FRAMETYPE, &frameType)) {
+        // Failed to set AMRWB encoder frame type
+        return false;
+    }
+    if (VO_ERR_NONE != mApiHandle->SetParam(mEncoderHandle, VO_PID_AMRWB_MODE, &mode)) {
+        // Failed to set AMRWB encoder mode
+        return false;
+    }
+    return true;
+}
+
+void Codec::deInitEncoder() {
+    if (mEncoderHandle) {
+        mApiHandle->Uninit(mEncoderHandle);
+        mEncoderHandle = nullptr;
+    }
+    if (mApiHandle) {
+        delete mApiHandle;
+        mApiHandle = nullptr;
+    }
+    if (mMemOperator) {
+        delete mMemOperator;
+        mMemOperator = nullptr;
+    }
+}
+
+void Codec::encodeFrames(const uint8_t *data, size_t size) {
+    do {
+        int32_t minSize = std::min((int32_t)size, kInputBufferSize);
+        uint8_t outputBuf[kOutputBufferSize] = {};
+        VO_CODECBUFFER inData;
+        VO_CODECBUFFER outData;
+        VO_AUDIO_OUTPUTINFO outFormat;
+        inData.Buffer = (unsigned char *)data;
+        inData.Length = minSize;
+        outData.Buffer = outputBuf;
+        mApiHandle->SetInputData(mEncoderHandle, &inData);
+        mApiHandle->GetOutputData(mEncoderHandle, &outData, &outFormat);
+        data += minSize;
+        size -= minSize;
+    } while (size > 0);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    if (size < 1) {
+        return 0;
+    }
+    Codec *codec = new Codec();
+    if (!codec) {
+        return 0;
+    }
+    if (codec->initEncoder(data)) {
+        // Consume first byte
+        ++data;
+        --size;
+        codec->encodeFrames(data, size);
+    }
+    delete codec;
+    return 0;
+}
diff --git a/media/libstagefright/codecs/amrwbenc/src/preemph.c b/media/libstagefright/codecs/amrwbenc/src/preemph.c
index 70c8650..a43841a 100644
--- a/media/libstagefright/codecs/amrwbenc/src/preemph.c
+++ b/media/libstagefright/codecs/amrwbenc/src/preemph.c
@@ -24,6 +24,7 @@
 
 #include "typedef.h"
 #include "basic_op.h"
+#include <stdint.h>
 
 void Preemph(
         Word16 x[],                           /* (i/o)   : input signal overwritten by the output */
diff --git a/media/libstagefright/codecs/amrwbenc/src/q_pulse.c b/media/libstagefright/codecs/amrwbenc/src/q_pulse.c
index fe0bdda..657b6fe 100644
--- a/media/libstagefright/codecs/amrwbenc/src/q_pulse.c
+++ b/media/libstagefright/codecs/amrwbenc/src/q_pulse.c
@@ -27,6 +27,7 @@
 #include "q_pulse.h"
 
 #define NB_POS 16                          /* pos in track, mask for sign bit */
+#define UNUSED_VAR __attribute__((unused))
 
 Word32 quant_1p_N1(                        /* (o) return N+1 bits             */
         Word16 pos,                        /* (i) position of the pulse       */
@@ -188,7 +189,7 @@
         Word16 pos[],                         /* (i) position of the pulse 1..4  */
         Word16 N)                             /* (i) number of bits for position */
 {
-    Word16 nb_pos, mask __unused, n_1, tmp;
+    Word16 nb_pos, mask UNUSED_VAR, n_1, tmp;
     Word16 posA[4], posB[4];
     Word32 i, j, k, index;
 
diff --git a/media/libstagefright/codecs/amrwbenc/test/Android.bp b/media/libstagefright/codecs/amrwbenc/test/Android.bp
index 7042bc5..0872570 100644
--- a/media/libstagefright/codecs/amrwbenc/test/Android.bp
+++ b/media/libstagefright/codecs/amrwbenc/test/Android.bp
@@ -16,6 +16,7 @@
 
 cc_test {
     name: "AmrwbEncoderTest",
+    test_suites: ["device-tests"],
     gtest: true,
 
     srcs: [
diff --git a/media/libstagefright/codecs/common/Android.bp b/media/libstagefright/codecs/common/Android.bp
index 260a60a..2290722 100644
--- a/media/libstagefright/codecs/common/Android.bp
+++ b/media/libstagefright/codecs/common/Android.bp
@@ -14,4 +14,11 @@
     export_include_dirs: ["include"],
 
     cflags: ["-Werror"],
+
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.h b/media/libstagefright/codecs/gsm/dec/SoftGSM.h
index ef86915..d5885a6 100644
--- a/media/libstagefright/codecs/gsm/dec/SoftGSM.h
+++ b/media/libstagefright/codecs/gsm/dec/SoftGSM.h
@@ -20,9 +20,7 @@
 
 #include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 
-extern "C" {
 #include "gsm.h"
-}
 
 namespace android {
 
diff --git a/media/libstagefright/codecs/m4v_h263/TEST_MAPPING b/media/libstagefright/codecs/m4v_h263/TEST_MAPPING
new file mode 100644
index 0000000..ba3ff1c
--- /dev/null
+++ b/media/libstagefright/codecs/m4v_h263/TEST_MAPPING
@@ -0,0 +1,18 @@
+// mappings for frameworks/av/media/libstagefright/codecs/m4v_h263
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+
+    // the decoder reports something bad about an unexpected newline in the *config file
+    // and the config file looks like the AndroidTest.xml file that we put in there.
+    // I don't get this from the Encoder -- and I don't see any substantive difference
+    // between decode and encode AndroidTest.xml files -- except that encode does NOT
+    // finish with a newline.
+    // strange.
+    { "name": "Mpeg4H263DecoderTest"},
+    { "name": "Mpeg4H263EncoderTest"}
+
+  ]
+}
diff --git a/media/libstagefright/codecs/m4v_h263/dec/Android.bp b/media/libstagefright/codecs/m4v_h263/dec/Android.bp
index 4303565..7a33c54 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/Android.bp
+++ b/media/libstagefright/codecs/m4v_h263/dec/Android.bp
@@ -6,23 +6,18 @@
         "com.android.media.swcodec",
     ],
     min_sdk_version: "29",
+    host_supported: true,
     shared_libs: ["liblog"],
 
     srcs: [
-        "src/adaptive_smooth_no_mmx.cpp",
         "src/bitstream.cpp",
         "src/block_idct.cpp",
         "src/cal_dc_scaler.cpp",
-        "src/chvr_filter.cpp",
-        "src/chv_filter.cpp",
         "src/combined_decode.cpp",
         "src/conceal.cpp",
         "src/datapart_decode.cpp",
         "src/dcac_prediction.cpp",
         "src/dec_pred_intra_dc.cpp",
-        "src/deringing_chroma.cpp",
-        "src/deringing_luma.cpp",
-        "src/find_min_max.cpp",
         "src/get_pred_adv_b_add.cpp",
         "src/get_pred_outside.cpp",
         "src/idct.cpp",
@@ -31,9 +26,6 @@
         "src/mb_utils.cpp",
         "src/packet_util.cpp",
         "src/post_filter.cpp",
-        "src/post_proc_semaphore.cpp",
-        "src/pp_semaphore_chroma_inter.cpp",
-        "src/pp_semaphore_luma.cpp",
         "src/pvdec_api.cpp",
         "src/scaling_tab.cpp",
         "src/vlc_decode.cpp",
@@ -43,11 +35,6 @@
         "src/zigzag_tab.cpp",
     ],
 
-    header_libs: [
-        "media_plugin_headers",
-        "libstagefright_headers"
-    ],
-
     local_include_dirs: ["src"],
     export_include_dirs: ["include"],
 
@@ -63,6 +50,12 @@
         ],
         cfi: true,
     },
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 //###############################################################################
diff --git a/media/libstagefright/codecs/m4v_h263/dec/include/mp4dec_api.h b/media/libstagefright/codecs/m4v_h263/dec/include/mp4dec_api.h
index 1f404ce..06aee07 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/include/mp4dec_api.h
+++ b/media/libstagefright/codecs/m4v_h263/dec/include/mp4dec_api.h
@@ -42,13 +42,6 @@
 #define OSCL_EXPORT_REF /* empty */
 #endif
 
-/* flag for post-processing  4/25/00 */
-
-#ifdef DEC_NOPOSTPROC
-#undef PV_POSTPROC_ON   /* enable compilation of post-processing code */
-#else
-#define PV_POSTPROC_ON
-#endif
 
 #define PV_NO_POST_PROC 0
 #define PV_DEBLOCK 1
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/adaptive_smooth_no_mmx.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/adaptive_smooth_no_mmx.cpp
deleted file mode 100644
index e2761eb..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/adaptive_smooth_no_mmx.cpp
+++ /dev/null
@@ -1,421 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-
- Description: Separated modules into one function per file and put into
-    new template.
-
- Description: Optimizing C code and adding comments.  Also changing variable
-    names to make them more meaningful.
-
- Who:                   Date:
- Description:
-
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-
-    Rec_Y = pointer to 0th position in buffer containing luminance values
-        of type uint8.
-    y_start = value of y coordinate of type int that specifies the first
-        row of pixels to be used in the filter algorithm.
-    x_start = value of x coordinate of type int that specifies the first
-        column of pixels to be used in the filter algorithm.
-    y_blk_start = value of the y coordinate of type int that specifies the
-        row of pixels which contains the start of a block. The row
-        specified by y_blk_start+BLK_SIZE is the last row of pixels
-        that are used in the filter algorithm.
-    x_blk_start = value of the x coordinate of type int that specifies the
-        column of pixels which contains the start of a block.  The
-        column specified by x_blk_start+BLK_SIZE is the last column of
-        pixels that are used in the filter algorithm.
-    thr = value of type int that is compared to the elements in Rec_Y to
-        determine if a particular value in Rec_Y will be modified by
-        the filter or not
-    width = value of type int that specifies the width of the display
-        in pixels (or pels, equivalently).
-    max_diff = value of type int that specifies the value that may be added
-        or subtracted from the pixel in Rec_Y that is being filtered
-        if the filter algorithm decides to change that particular
-        pixel's luminance value.
-
-
- Local Stores/Buffers/Pointers Needed:
-    None
-
- Global Stores/Buffers/Pointers Needed:
-    None
-
- Outputs:
-    None
-
- Pointers and Buffers Modified:
-    Buffer pointed to by Rec_Y is modified with the filtered
-    luminance values.
-
- Local Stores Modified:
-    None
-
- Global Stores Modified:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This function implements a motion compensated noise filter using adaptive
- weighted averaging of luminance values.  *Rec_Y contains the luminance values
- that are being filtered.
-
- The picture below depicts a 3x3 group of pixel luminance values.  The "u", "c",
- and "l" stand for "upper", "center" and "lower", respectively.  The location
- of pelc0 is specified by x_start and y_start in the 1-D array "Rec_Y" as
- follows (assuming x_start=0):
-
- location of pelc0 = [(y_start+1) * width] + x_start
-
- Moving up or down 1 row (moving from pelu2 to pelc2, for example) is done by
- incrementing or decrementing "width" elements within Rec_Y.
-
- The coordinates of the upper left hand corner of a block (not the group of
- 9 pixels depicted in the figure below) is specified by
- (y_blk_start, x_blk_start).  The width and height of the block is BLKSIZE.
- (y_start,x_start) may be specified independently of (y_blk_start, x_blk_start).
-
-    (y_start,x_start)
- -----------|--------------------------
-    |   |   |   |   |
-    |   X   | pelu1 | pelu2 |
-    | pelu0 |   |   |
-    |   |   |   |
- --------------------------------------
-    |   |   |   |
-    | pelc0 | pelc1 | pelc2 |
-    |   |   |   |
-    |   |   |   |
- --------------------------------------
-    |   |   |   |
-    | pell0 | pell1 | pell2 |
-    |   |   |   |
-    |   |   |   |
- --------------------------------------
-
- The filtering of the luminance values is achieved by comparing the 9
- luminance values to a threshold value ("thr") and then changing the
- luminance value of pelc1 if all of the values are above or all of the values
- are below the threshold.  The amount that the luminance value is changed
- depends on a weighted sum of the 9 luminance values. The position of Pelc1
- is then advanced to the right by one (as well as all of the surrounding pixels)
- and the same calculation is performed again for the luminance value of the new
- Pelc1. This continues row-wise until pixels in the last row of the block are
- filtered.
-
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- None.
-
-------------------------------------------------------------------------------
- REFERENCES
-
- ..\corelibs\decoder\common\src\post_proc.c
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-------------------------------------------------------------------------------
- RESOURCES USED
-   When the code is written for a specific target processor the
-     the resources used should be documented below.
-
- STACK USAGE: [stack count for this module] + [variable to represent
-          stack usage for each subroutine called]
-
-     where: [stack usage variable] = stack usage for [subroutine
-         name] (see [filename].ext)
-
- DATA MEMORY USED: x words
-
- PROGRAM MEMORY USED: x words
-
- CLOCK CYCLES: [cycle count equation for this module] + [variable
-           used to represent cycle count for each subroutine
-           called]
-
-     where: [cycle count variable] = cycle count for [subroutine
-        name] (see [filename].ext)
-
-------------------------------------------------------------------------------
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "mp4dec_lib.h"
-#include    "post_proc.h"
-#include    "mp4def.h"
-
-#define OSCL_DISABLE_WARNING_CONV_POSSIBLE_LOSS_OF_DATA
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-#ifdef PV_POSTPROC_ON
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-void AdaptiveSmooth_NoMMX(
-    uint8 *Rec_Y,       /* i/o  */
-    int y_start,        /* i    */
-    int x_start,        /* i    */
-    int y_blk_start,    /* i    */
-    int x_blk_start,    /* i    */
-    int thr,        /* i    */
-    int width,      /* i    */
-    int max_diff        /* i    */
-)
-{
-
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int  sign_v[15];
-    int sum_v[15];
-    int *sum_V_ptr;
-    int *sign_V_ptr;
-    uint8 pelu;
-    uint8 pelc;
-    uint8 pell;
-    uint8 *pelp;
-    uint8 oldrow[15];
-    int  sum;
-    int sum1;
-    uint8 *Rec_Y_ptr;
-    int32  addr_v;
-    int row_cntr;
-    int col_cntr;
-
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    /*  first row
-    */
-    addr_v = (int32)(y_start + 1) * width;  /* y coord of 1st element in the row  /
-                     /containing pelc pixel /     */
-    Rec_Y_ptr = &Rec_Y[addr_v + x_start];  /* initializing pointer to
-                           /  pelc0 position  */
-    sum_V_ptr = &sum_v[0];  /* initializing pointer to 0th element of array
-                /   that will contain weighted sums of pixel
-                /   luminance values */
-    sign_V_ptr = &sign_v[0];  /*  initializing pointer to 0th element of
-                  /   array that will contain sums that indicate
-                  /    how many of the 9 pixels are above or below
-                  /    the threshold value (thr)    */
-    pelp = &oldrow[0];  /* initializing pointer to the 0th element of array
-                /    that will contain current values of pelc that
-                /   are saved and used as values of pelu when the
-                /   next row of pixels are filtered */
-
-    pelu = *(Rec_Y_ptr - width);  /* assigning value of pelu0 to pelu  */
-    *pelp++ = pelc = *Rec_Y_ptr; /* assigning value of pelc0 to pelc and
-                     /  storing this value in pelp which
-                     /   will be used as value of pelu0 when
-                     /  next row is filtered */
-    pell = *(Rec_Y_ptr + width);  /* assigning value of pell0 to pell */
-    Rec_Y_ptr++; /* advancing pointer from pelc0 to pelc1 */
-    *sum_V_ptr++ = pelu + (pelc << 1) + pell;  /* weighted sum of pelu0,
-                         /  pelc0 and pell0  */
-    /* sum of 0's and 1's (0 if pixel value is below thr, 1 if value
-    /is above thr)  */
-    *sign_V_ptr++ = INDEX(pelu, thr) + INDEX(pelc, thr) + INDEX(pell, thr);
-
-
-    pelu = *(Rec_Y_ptr - width);  /* assigning value of pelu1 to pelu */
-    *pelp++ = pelc = *Rec_Y_ptr; /* assigning value of pelc1 to pelc and
-                     /  storing this value in pelp which
-                     /  will be used as the value of pelu1 when
-                     /  next row is filtered */
-    pell = *(Rec_Y_ptr + width);  /* assigning value of pell1 to pell */
-    Rec_Y_ptr++;  /* advancing pointer from pelc1 to pelc2 */
-    *sum_V_ptr++ = pelu + (pelc << 1) + pell; /* weighted sum of pelu1,
-                        / pelc1 and pell1  */
-    /* sum of 0's and 1's (0 if pixel value is below thr, 1 if value
-    /is above thr)  */
-    *sign_V_ptr++ = INDEX(pelu, thr) + INDEX(pelc, thr) + INDEX(pell, thr);
-
-    /* The loop below performs the filtering for the first row of
-    /   pixels in the region.  It steps across the remaining pixels in
-    /   the row and alters the luminance value of pelc1 if necessary,
-    /   depending on the luminance values of the adjacent pixels*/
-
-    for (col_cntr = (x_blk_start + BLKSIZE - 1) - x_start; col_cntr > 0; col_cntr--)
-    {
-        pelu = *(Rec_Y_ptr - width);  /* assigning value of pelu2 to
-                        /   pelu */
-        *pelp++ = pelc = *Rec_Y_ptr; /* assigning value of pelc2 to pelc
-                         / and storing this value in pelp
-                         / which will be used   as value of pelu2
-                         / when next row is filtered */
-        pell = *(Rec_Y_ptr + width); /* assigning value of pell2 to pell */
-
-        /* weighted sum of pelu1, pelc1 and pell1  */
-        *sum_V_ptr = pelu + (pelc << 1) + pell;
-        /* sum of 0's and 1's (0 if pixel value is below thr,
-        /1 if value is above thr)  */
-        *sign_V_ptr = INDEX(pelu, thr) + INDEX(pelc, thr) +
-                      INDEX(pell, thr);
-        /* the value of sum1 indicates how many of the 9 pixels'
-        /luminance values are above or equal to thr */
-        sum1 = *(sign_V_ptr - 2) + *(sign_V_ptr - 1) + *sign_V_ptr;
-
-        /* alter the luminance value of pelc1 if all 9 luminance values
-        /are above or equal to thr or if all 9 values are below thr */
-        if (sum1 == 0 || sum1 == 9)
-        {
-            /* sum is a weighted average of the 9 pixel luminance
-            /values   */
-            sum = (*(sum_V_ptr - 2) + (*(sum_V_ptr - 1) << 1) +
-                   *sum_V_ptr + 8) >> 4;
-
-            Rec_Y_ptr--;  /* move pointer back to pelc1  */
-            /* If luminance value of pelc1 is larger than
-            / sum by more than max_diff, then subract max_diff
-            / from luminance value of pelc1*/
-            if ((int)(*Rec_Y_ptr - sum) > max_diff)
-            {
-                sum = *Rec_Y_ptr - max_diff;
-            }
-            /* If luminance value of pelc1 is smaller than
-            / sum by more than max_diff, then add max_diff
-            / to luminance value of pelc1*/
-            else if ((int)(*Rec_Y_ptr - sum) < -max_diff)
-            {
-                sum = *Rec_Y_ptr + max_diff;
-            }
-            *Rec_Y_ptr++ = sum; /* assign value of sum to pelc1
-                         and advance pointer to pelc2 */
-        }
-        Rec_Y_ptr++; /* advance pointer to new value of pelc2
-                 /   old pelc2 is now treated as pelc1*/
-        sum_V_ptr++; /* pointer is advanced so next weighted sum may
-                 /  be saved */
-        sign_V_ptr++; /* pointer is advanced so next sum of 0's and
-                  / 1's may be saved  */
-    }
-
-    /* The nested loops below perform the filtering for the remaining rows */
-
-    addr_v = (y_start + 2) * width;  /* advance addr_v to the next row
-                     /   (corresponding to pell0)*/
-    /* The outer loop steps throught the rows.   */
-    for (row_cntr = (y_blk_start + BLKSIZE) - (y_start + 2); row_cntr > 0; row_cntr--)
-    {
-        Rec_Y_ptr = &Rec_Y[addr_v + x_start]; /* advance pointer to
-            /the old pell0, which has become the new pelc0 */
-        addr_v += width;  /* move addr_v down 1 row */
-        sum_V_ptr = &sum_v[0];  /* re-initializing pointer */
-        sign_V_ptr = &sign_v[0];  /* re-initilaizing pointer */
-        pelp = &oldrow[0]; /* re-initializing pointer */
-
-        pelu = *pelp; /* setting pelu0 to old value of pelc0 */
-        *pelp++ = pelc = *Rec_Y_ptr;
-        pell = *(Rec_Y_ptr + width);
-        Rec_Y_ptr++;
-        *sum_V_ptr++ = pelu + (pelc << 1) + pell;
-        *sign_V_ptr++ = INDEX(pelu, thr) + INDEX(pelc, thr) +
-                        INDEX(pell, thr);
-
-        pelu = *pelp; /* setting pelu1 to old value of pelc1 */
-        *pelp++ = pelc = *Rec_Y_ptr;
-        pell = *(Rec_Y_ptr + width);
-        Rec_Y_ptr++;
-        *sum_V_ptr++ = pelu + (pelc << 1) + pell;
-        *sign_V_ptr++ = INDEX(pelu, thr) + INDEX(pelc, thr) +
-                        INDEX(pell, thr);
-        /* The inner loop steps through the columns */
-        for (col_cntr = (x_blk_start + BLKSIZE - 1) - x_start; col_cntr > 0; col_cntr--)
-        {
-            pelu = *pelp; /* setting pelu2 to old value of pelc2 */
-            *pelp++ = pelc = *Rec_Y_ptr;
-            pell = *(Rec_Y_ptr + width);
-
-            *sum_V_ptr = pelu + (pelc << 1) + pell;
-            *sign_V_ptr = INDEX(pelu, thr) + INDEX(pelc, thr) +
-                          INDEX(pell, thr);
-
-            sum1 = *(sign_V_ptr - 2) + *(sign_V_ptr - 1) + *sign_V_ptr;
-            /* the "if" statement below is the same as the one in
-            / the first loop */
-            if (sum1 == 0 || sum1 == 9)
-            {
-                sum = (*(sum_V_ptr - 2) + (*(sum_V_ptr - 1) << 1) +
-                       *sum_V_ptr + 8) >> 4;
-
-                Rec_Y_ptr--;
-                if ((int)(*Rec_Y_ptr - sum) > max_diff)
-                {
-                    sum = *Rec_Y_ptr - max_diff;
-                }
-                else if ((int)(*Rec_Y_ptr - sum) < -max_diff)
-                {
-                    sum = *Rec_Y_ptr + max_diff;
-                }
-                *Rec_Y_ptr++ = (uint8) sum;
-            }
-            Rec_Y_ptr++;
-            sum_V_ptr++;
-            sign_V_ptr++;
-        }
-    }
-
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/block_idct.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/block_idct.cpp
index 3d10086..bc708e2 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/src/block_idct.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/src/block_idct.cpp
@@ -506,6 +506,7 @@
 /*----------------------------------------------------------------------------
 ; Function Code FOR idctrow
 ----------------------------------------------------------------------------*/
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idctrow(
     int16 *blk, uint8 *pred, uint8 *dst, int width
 )
@@ -828,6 +829,7 @@
 /*----------------------------------------------------------------------------
 ; Function Code FOR idctcol
 ----------------------------------------------------------------------------*/
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idctcol(
     int16 *blk
 )
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/chv_filter.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/chv_filter.cpp
deleted file mode 100644
index 6593b48..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/chv_filter.cpp
+++ /dev/null
@@ -1,654 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    [input_variable_name] = [description of the input to module, its type
-                 definition, and length (when applicable)]
-
- Local Stores/Buffers/Pointers Needed:
-    [local_store_name] = [description of the local store, its type
-                  definition, and length (when applicable)]
-    [local_buffer_name] = [description of the local buffer, its type
-                   definition, and length (when applicable)]
-    [local_ptr_name] = [description of the local pointer, its type
-                definition, and length (when applicable)]
-
- Global Stores/Buffers/Pointers Needed:
-    [global_store_name] = [description of the global store, its type
-                   definition, and length (when applicable)]
-    [global_buffer_name] = [description of the global buffer, its type
-                definition, and length (when applicable)]
-    [global_ptr_name] = [description of the global pointer, its type
-                 definition, and length (when applicable)]
-
- Outputs:
-    [return_variable_name] = [description of data/pointer returned
-                  by module, its type definition, and length
-                  (when applicable)]
-
- Pointers and Buffers Modified:
-    [variable_bfr_ptr] points to the [describe where the
-      variable_bfr_ptr points to, its type definition, and length
-      (when applicable)]
-    [variable_bfr] contents are [describe the new contents of
-      variable_bfr]
-
- Local Stores Modified:
-    [local_store_name] = [describe new contents, its type
-                  definition, and length (when applicable)]
-
- Global Stores Modified:
-    [global_store_name] = [describe new contents, its type
-                   definition, and length (when applicable)]
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
-   For fast Deblock filtering
-   Newer version (macroblock based processing)
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- [List requirements to be satisfied by this module.]
-
-------------------------------------------------------------------------------
- REFERENCES
-
- [List all references used in designing this module.]
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-------------------------------------------------------------------------------
- RESOURCES USED
-   When the code is written for a specific target processor the
-     the resources used should be documented below.
-
- STACK USAGE: [stack count for this module] + [variable to represent
-          stack usage for each subroutine called]
-
-     where: [stack usage variable] = stack usage for [subroutine
-         name] (see [filename].ext)
-
- DATA MEMORY USED: x words
-
- PROGRAM MEMORY USED: x words
-
- CLOCK CYCLES: [cycle count equation for this module] + [variable
-           used to represent cycle count for each subroutine
-           called]
-
-     where: [cycle count variable] = cycle count for [subroutine
-        name] (see [filename].ext)
-
-------------------------------------------------------------------------------
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "mp4dec_lib.h"
-#include    "post_proc.h"
-
-#define OSCL_DISABLE_WARNING_CONV_POSSIBLE_LOSS_OF_DATA
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-//#define FILTER_LEN_8
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-
-----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-#ifdef PV_POSTPROC_ON
-
-/*************************************************************************
-    Function prototype : void CombinedHorzVertFilter(   uint8 *rec,
-                                                        int width,
-                                                        int height,
-                                                        int *QP_store,
-                                                        int chr,
-                                                        uint8 *pp_mod)
-    Parameters  :
-        rec     :   pointer to the decoded frame buffer.
-        width   :   width of decoded frame.
-        height  :   height of decoded frame
-        QP_store:   pointer to the array of QP corresponding to the decoded frame.
-                    It had only one value for each MB.
-        chr     :   luma or color indication
-                    == 0 luma
-                    == 1 color
-        pp_mod  :   The semphore used for deblocking
-
-    Remark      :   The function do the deblocking on decoded frames.
-                    First based on the semaphore info., it is divided into hard and soft filtering.
-                    To differentiate real and fake edge, it then check the difference with QP to
-                    decide whether to do the filtering or not.
-
-*************************************************************************/
-
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-void CombinedHorzVertFilter(
-    uint8 *rec,
-    int width,
-    int height,
-    int16 *QP_store,
-    int chr,
-    uint8 *pp_mod)
-{
-
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int br, bc, mbr, mbc;
-    int QP = 1;
-    uint8 *ptr, *ptr_e;
-    int pp_w, pp_h;
-    int brwidth;
-
-    int jVal0, jVal1, jVal2;
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    pp_w = (width >> 3);
-    pp_h = (height >> 3);
-
-    for (mbr = 0; mbr < pp_h; mbr += 2)         /* row of blocks */
-    {
-        brwidth = mbr * pp_w;               /* number of blocks above current block row */
-        for (mbc = 0; mbc < pp_w; mbc += 2)     /* col of blocks */
-        {
-            if (!chr)
-                QP = QP_store[(brwidth>>2) + (mbc>>1)]; /* QP is per MB based value */
-
-            /********* for each block **************/
-            /****************** Horiz. Filtering ********************/
-            for (br = mbr + 1; br < mbr + 3; br++)  /* 2x2 blocks */
-            {
-                brwidth += pp_w;                    /* number of blocks above & left current block row */
-                /* the profile on ARM920T shows separate these two boundary check is faster than combine them */
-                if (br < pp_h)                  /* boundary : don't do it on the lowest row block */
-                    for (bc = mbc; bc < mbc + 2; bc++)
-                    {
-                        /****** check boundary for deblocking ************/
-                        if (bc < pp_w)              /* boundary : don't do it on the most right col block */
-                        {
-                            ptr = rec + (brwidth << 6) + (bc << 3);
-                            jVal0 = brwidth + bc;
-                            if (chr)    QP = QP_store[jVal0];
-
-                            ptr_e = ptr + 8;        /* pointer to where the loop ends */
-
-                            if (((pp_mod[jVal0]&0x02)) && ((pp_mod[jVal0-pp_w]&0x02)))
-                            {
-                                /* Horiz Hard filter */
-                                do
-                                {
-                                    jVal0 = *(ptr - width);     /* C */
-                                    jVal1 = *ptr;               /* D */
-                                    jVal2 = jVal1 - jVal0;
-
-                                    if (((jVal2 > 0) && (jVal2 < (QP << 1)))
-                                            || ((jVal2 < 0) && (jVal2 > -(QP << 1)))) /* (D-C) compared with 2QP */
-                                    {
-                                        /* differentiate between real and fake edge */
-                                        jVal0 = ((jVal0 + jVal1) >> 1);     /* (D+C)/2 */
-                                        *(ptr - width) = (uint8)(jVal0);    /*  C */
-                                        *ptr = (uint8)(jVal0);          /*  D */
-
-                                        jVal0 = *(ptr - (width << 1));      /* B */
-                                        jVal1 = *(ptr + width);         /* E */
-                                        jVal2 = jVal1 - jVal0;      /* E-B */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal0 += ((jVal2 + 3) >> 2);
-                                            jVal1 -= ((jVal2 + 3) >> 2);
-                                            *(ptr - (width << 1)) = (uint8)jVal0;       /*  store B */
-                                            *(ptr + width) = (uint8)jVal1;          /* store E */
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal0 -= ((3 - jVal2) >> 2);
-                                            jVal1 += ((3 - jVal2) >> 2);
-                                            *(ptr - (width << 1)) = (uint8)jVal0;       /*  store B */
-                                            *(ptr + width) = (uint8)jVal1;          /* store E */
-                                        }
-
-                                        jVal0 = *(ptr - (width << 1) - width);  /* A */
-                                        jVal1 = *(ptr + (width << 1));      /* F */
-                                        jVal2 = jVal1 - jVal0;              /* (F-A) */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal0 += ((jVal2 + 7) >> 3);
-                                            jVal1 -= ((jVal2 + 7) >> 3);
-                                            *(ptr - (width << 1) - width) = (uint8)(jVal0);
-                                            *(ptr + (width << 1)) = (uint8)(jVal1);
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal0 -= ((7 - jVal2) >> 3);
-                                            jVal1 += ((7 - jVal2) >> 3);
-                                            *(ptr - (width << 1) - width) = (uint8)(jVal0);
-                                            *(ptr + (width << 1)) = (uint8)(jVal1);
-                                        }
-                                    }/* a3_0 > 2QP */
-                                }
-                                while (++ptr < ptr_e);
-                            }
-                            else   /* Horiz soft filter*/
-                            {
-                                do
-                                {
-                                    jVal0 = *(ptr - width); /* B */
-                                    jVal1 = *ptr;           /* C */
-                                    jVal2 = jVal1 - jVal0;  /* C-B */
-
-                                    if (((jVal2 > 0) && (jVal2 < (QP)))
-                                            || ((jVal2 < 0) && (jVal2 > -(QP)))) /* (C-B) compared with QP */
-                                    {
-
-                                        jVal0 = ((jVal0 + jVal1) >> 1);     /* (B+C)/2 cannot overflow; ceil() */
-                                        *(ptr - width) = (uint8)(jVal0);    /* B = (B+C)/2 */
-                                        *ptr = (uint8)jVal0;            /* C = (B+C)/2 */
-
-                                        jVal0 = *(ptr - (width << 1));      /* A */
-                                        jVal1 = *(ptr + width);         /* D */
-                                        jVal2 = jVal1 - jVal0;          /* D-A */
-
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal1 -= ((jVal2 + 7) >> 3);
-                                            jVal0 += ((jVal2 + 7) >> 3);
-                                            *(ptr - (width << 1)) = (uint8)jVal0;       /* A */
-                                            *(ptr + width) = (uint8)jVal1;          /* D */
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal1 += ((7 - jVal2) >> 3);
-                                            jVal0 -= ((7 - jVal2) >> 3);
-                                            *(ptr - (width << 1)) = (uint8)jVal0;       /* A */
-                                            *(ptr + width) = (uint8)jVal1;          /* D */
-                                        }
-                                    }
-                                }
-                                while (++ptr < ptr_e);
-                            } /* Soft filter*/
-                        }/* boundary checking*/
-                    }/*bc*/
-            }/*br*/
-            brwidth -= (pp_w << 1);
-            /****************** Vert. Filtering ********************/
-            for (br = mbr; br < mbr + 2; br++)
-            {
-                if (br < pp_h)
-                    for (bc = mbc + 1; bc < mbc + 3; bc++)
-                    {
-                        /****** check boundary for deblocking ************/
-                        if (bc < pp_w)
-                        {
-                            ptr = rec + (brwidth << 6) + (bc << 3);
-                            jVal0 = brwidth + bc;
-                            if (chr)    QP = QP_store[jVal0];
-
-                            ptr_e = ptr + (width << 3);
-
-                            if (((pp_mod[jVal0-1]&0x01)) && ((pp_mod[jVal0]&0x01)))
-                            {
-                                /* Vert Hard filter */
-                                do
-                                {
-                                    jVal1 = *ptr;       /* D */
-                                    jVal0 = *(ptr - 1); /* C */
-                                    jVal2 = jVal1 - jVal0;  /* D-C */
-
-                                    if (((jVal2 > 0) && (jVal2 < (QP << 1)))
-                                            || ((jVal2 < 0) && (jVal2 > -(QP << 1))))
-                                    {
-                                        jVal1 = (jVal0 + jVal1) >> 1;   /* (C+D)/2 */
-                                        *ptr        =   jVal1;
-                                        *(ptr - 1)  =   jVal1;
-
-                                        jVal1 = *(ptr + 1);     /* E */
-                                        jVal0 = *(ptr - 2);     /* B */
-                                        jVal2 = jVal1 - jVal0;      /* E-B */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal1 -= ((jVal2 + 3) >> 2);        /* E = E -(E-B)/4 */
-                                            jVal0 += ((jVal2 + 3) >> 2);        /* B = B +(E-B)/4 */
-                                            *(ptr + 1) = jVal1;
-                                            *(ptr - 2) = jVal0;
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal1 += ((3 - jVal2) >> 2);        /* E = E -(E-B)/4 */
-                                            jVal0 -= ((3 - jVal2) >> 2);        /* B = B +(E-B)/4 */
-                                            *(ptr + 1) = jVal1;
-                                            *(ptr - 2) = jVal0;
-                                        }
-
-                                        jVal1 = *(ptr + 2);     /* F */
-                                        jVal0 = *(ptr - 3);     /* A */
-
-                                        jVal2 = jVal1 - jVal0;          /* (F-A) */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal1 -= ((jVal2 + 7) >> 3);    /* F -= (F-A)/8 */
-                                            jVal0 += ((jVal2 + 7) >> 3);    /* A += (F-A)/8 */
-                                            *(ptr + 2) = jVal1;
-                                            *(ptr - 3) = jVal0;
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal1 -= ((jVal2 - 7) >> 3);    /* F -= (F-A)/8 */
-                                            jVal0 += ((jVal2 - 7) >> 3);    /* A += (F-A)/8 */
-                                            *(ptr + 2) = jVal1;
-                                            *(ptr - 3) = jVal0;
-                                        }
-                                    }   /* end of ver hard filetering */
-                                }
-                                while ((ptr += width) < ptr_e);
-                            }
-                            else   /* Vert soft filter*/
-                            {
-                                do
-                                {
-                                    jVal1 = *ptr;               /* C */
-                                    jVal0 = *(ptr - 1);         /* B */
-                                    jVal2 = jVal1 - jVal0;
-
-                                    if (((jVal2 > 0) && (jVal2 < (QP)))
-                                            || ((jVal2 < 0) && (jVal2 > -(QP))))
-                                    {
-
-                                        jVal1 = (jVal0 + jVal1 + 1) >> 1;
-                                        *ptr = jVal1;           /* C */
-                                        *(ptr - 1) = jVal1;     /* B */
-
-                                        jVal1 = *(ptr + 1);     /* D */
-                                        jVal0 = *(ptr - 2);     /* A */
-                                        jVal2 = (jVal1 - jVal0);        /* D- A */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal1 -= (((jVal2) + 7) >> 3);      /* D -= (D-A)/8 */
-                                            jVal0 += (((jVal2) + 7) >> 3);      /* A += (D-A)/8 */
-                                            *(ptr + 1) = jVal1;
-                                            *(ptr - 2) = jVal0;
-
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal1 += ((7 - (jVal2)) >> 3);      /* D -= (D-A)/8 */
-                                            jVal0 -= ((7 - (jVal2)) >> 3);      /* A += (D-A)/8 */
-                                            *(ptr + 1) = jVal1;
-                                            *(ptr - 2) = jVal0;
-                                        }
-                                    }
-                                }
-                                while ((ptr += width) < ptr_e);
-                            } /* Soft filter*/
-                        } /* boundary*/
-                    } /*bc*/
-                brwidth += pp_w;
-            }/*br*/
-            brwidth -= (pp_w << 1);
-        }/*mbc*/
-        brwidth += (pp_w << 1);
-    }/*mbr*/
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-void CombinedHorzVertFilter_NoSoftDeblocking(
-    uint8 *rec,
-    int width,
-    int height,
-    int16 *QP_store,
-    int chr,
-    uint8 *pp_mod)
-{
-
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int br, bc, mbr, mbc;
-    int QP = 1;
-    uint8 *ptr, *ptr_e;
-    int pp_w, pp_h;
-    int brwidth;
-
-    int jVal0, jVal1, jVal2;
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    pp_w = (width >> 3);
-    pp_h = (height >> 3);
-
-    for (mbr = 0; mbr < pp_h; mbr += 2)         /* row of blocks */
-    {
-        brwidth = mbr * pp_w;               /* number of blocks above current block row */
-        for (mbc = 0; mbc < pp_w; mbc += 2)     /* col of blocks */
-        {
-            if (!chr)
-                QP = QP_store[(brwidth>>2) + (mbc>>1)]; /* QP is per MB based value */
-
-            /********* for each block **************/
-            /****************** Horiz. Filtering ********************/
-            for (br = mbr + 1; br < mbr + 3; br++)  /* 2x2 blocks */
-            {
-                brwidth += pp_w;                    /* number of blocks above & left current block row */
-                /* the profile on ARM920T shows separate these two boundary check is faster than combine them */
-                if (br < pp_h)                  /* boundary : don't do it on the lowest row block */
-                    for (bc = mbc; bc < mbc + 2; bc++)
-                    {
-                        /****** check boundary for deblocking ************/
-                        if (bc < pp_w)              /* boundary : don't do it on the most right col block */
-                        {
-                            ptr = rec + (brwidth << 6) + (bc << 3);
-                            jVal0 = brwidth + bc;
-                            if (chr)    QP = QP_store[jVal0];
-
-                            ptr_e = ptr + 8;        /* pointer to where the loop ends */
-
-                            if (((pp_mod[jVal0]&0x02)) && ((pp_mod[jVal0-pp_w]&0x02)))
-                            {
-                                /* Horiz Hard filter */
-                                do
-                                {
-                                    jVal0 = *(ptr - width);     /* C */
-                                    jVal1 = *ptr;               /* D */
-                                    jVal2 = jVal1 - jVal0;
-
-                                    if (((jVal2 > 0) && (jVal2 < (QP << 1)))
-                                            || ((jVal2 < 0) && (jVal2 > -(QP << 1)))) /* (D-C) compared with 2QP */
-                                    {
-                                        /* differentiate between real and fake edge */
-                                        jVal0 = ((jVal0 + jVal1) >> 1);     /* (D+C)/2 */
-                                        *(ptr - width) = (uint8)(jVal0);    /*  C */
-                                        *ptr = (uint8)(jVal0);          /*  D */
-
-                                        jVal0 = *(ptr - (width << 1));      /* B */
-                                        jVal1 = *(ptr + width);         /* E */
-                                        jVal2 = jVal1 - jVal0;      /* E-B */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal0 += ((jVal2 + 3) >> 2);
-                                            jVal1 -= ((jVal2 + 3) >> 2);
-                                            *(ptr - (width << 1)) = (uint8)jVal0;       /*  store B */
-                                            *(ptr + width) = (uint8)jVal1;          /* store E */
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal0 -= ((3 - jVal2) >> 2);
-                                            jVal1 += ((3 - jVal2) >> 2);
-                                            *(ptr - (width << 1)) = (uint8)jVal0;       /*  store B */
-                                            *(ptr + width) = (uint8)jVal1;          /* store E */
-                                        }
-
-                                        jVal0 = *(ptr - (width << 1) - width);  /* A */
-                                        jVal1 = *(ptr + (width << 1));      /* F */
-                                        jVal2 = jVal1 - jVal0;              /* (F-A) */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal0 += ((jVal2 + 7) >> 3);
-                                            jVal1 -= ((jVal2 + 7) >> 3);
-                                            *(ptr - (width << 1) - width) = (uint8)(jVal0);
-                                            *(ptr + (width << 1)) = (uint8)(jVal1);
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal0 -= ((7 - jVal2) >> 3);
-                                            jVal1 += ((7 - jVal2) >> 3);
-                                            *(ptr - (width << 1) - width) = (uint8)(jVal0);
-                                            *(ptr + (width << 1)) = (uint8)(jVal1);
-                                        }
-                                    }/* a3_0 > 2QP */
-                                }
-                                while (++ptr < ptr_e);
-                            }
-
-                        }/* boundary checking*/
-                    }/*bc*/
-            }/*br*/
-            brwidth -= (pp_w << 1);
-            /****************** Vert. Filtering ********************/
-            for (br = mbr; br < mbr + 2; br++)
-            {
-                if (br < pp_h)
-                    for (bc = mbc + 1; bc < mbc + 3; bc++)
-                    {
-                        /****** check boundary for deblocking ************/
-                        if (bc < pp_w)
-                        {
-                            ptr = rec + (brwidth << 6) + (bc << 3);
-                            jVal0 = brwidth + bc;
-                            if (chr)    QP = QP_store[jVal0];
-
-                            ptr_e = ptr + (width << 3);
-
-                            if (((pp_mod[jVal0-1]&0x01)) && ((pp_mod[jVal0]&0x01)))
-                            {
-                                /* Vert Hard filter */
-                                do
-                                {
-                                    jVal1 = *ptr;       /* D */
-                                    jVal0 = *(ptr - 1); /* C */
-                                    jVal2 = jVal1 - jVal0;  /* D-C */
-
-                                    if (((jVal2 > 0) && (jVal2 < (QP << 1)))
-                                            || ((jVal2 < 0) && (jVal2 > -(QP << 1))))
-                                    {
-                                        jVal1 = (jVal0 + jVal1) >> 1;   /* (C+D)/2 */
-                                        *ptr        =   jVal1;
-                                        *(ptr - 1)  =   jVal1;
-
-                                        jVal1 = *(ptr + 1);     /* E */
-                                        jVal0 = *(ptr - 2);     /* B */
-                                        jVal2 = jVal1 - jVal0;      /* E-B */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal1 -= ((jVal2 + 3) >> 2);        /* E = E -(E-B)/4 */
-                                            jVal0 += ((jVal2 + 3) >> 2);        /* B = B +(E-B)/4 */
-                                            *(ptr + 1) = jVal1;
-                                            *(ptr - 2) = jVal0;
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal1 += ((3 - jVal2) >> 2);        /* E = E -(E-B)/4 */
-                                            jVal0 -= ((3 - jVal2) >> 2);        /* B = B +(E-B)/4 */
-                                            *(ptr + 1) = jVal1;
-                                            *(ptr - 2) = jVal0;
-                                        }
-
-                                        jVal1 = *(ptr + 2);     /* F */
-                                        jVal0 = *(ptr - 3);     /* A */
-
-                                        jVal2 = jVal1 - jVal0;          /* (F-A) */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal1 -= ((jVal2 + 7) >> 3);    /* F -= (F-A)/8 */
-                                            jVal0 += ((jVal2 + 7) >> 3);    /* A += (F-A)/8 */
-                                            *(ptr + 2) = jVal1;
-                                            *(ptr - 3) = jVal0;
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal1 -= ((jVal2 - 7) >> 3);    /* F -= (F-A)/8 */
-                                            jVal0 += ((jVal2 - 7) >> 3);    /* A += (F-A)/8 */
-                                            *(ptr + 2) = jVal1;
-                                            *(ptr - 3) = jVal0;
-                                        }
-                                    }   /* end of ver hard filetering */
-                                }
-                                while ((ptr += width) < ptr_e);
-                            }
-
-                        } /* boundary*/
-                    } /*bc*/
-                brwidth += pp_w;
-            }/*br*/
-            brwidth -= (pp_w << 1);
-        }/*mbc*/
-        brwidth += (pp_w << 1);
-    }/*mbr*/
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/chvr_filter.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/chvr_filter.cpp
deleted file mode 100644
index 795cf71..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/chvr_filter.cpp
+++ /dev/null
@@ -1,565 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include    "mp4dec_lib.h"
-#include    "post_proc.h"
-
-#ifdef PV_POSTPROC_ON
-
-void CombinedHorzVertRingFilter(
-    uint8 *rec,
-    int width,
-    int height,
-    int16 *QP_store,
-    int chr,
-    uint8 *pp_mod)
-{
-
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int index, counter;
-    int br, bc, incr, mbr, mbc;
-    int QP = 1;
-    int v[5];
-    uint8 *ptr, *ptr_c, *ptr_n;
-    int w1, w2, w3, w4;
-    int pp_w, pp_h, brwidth;
-    int sum, delta;
-    int a3_0, a3_1, a3_2, A3_0;
-    /* for Deringing Threshold approach (MPEG4)*/
-    int max_diff, thres, v0, h0, min_blk, max_blk;
-    int cnthflag;
-
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    /* Calculate the width and height of the area in blocks (divide by 8) */
-    pp_w = (width >> 3);
-    pp_h = (height >> 3);
-
-    /* Set up various values needed for updating pointers into rec */
-    w1 = width;             /* Offset to next row in pixels */
-    w2 = width << 1;        /* Offset to two rows in pixels */
-    w3 = w1 + w2;           /* Offset to three rows in pixels */
-    w4 = w2 << 1;           /* Offset to four rows in pixels */
-    incr = width - BLKSIZE; /* Offset to next row after processing block */
-
-    /* Work through the area hortizontally by two rows per step */
-    for (mbr = 0; mbr < pp_h; mbr += 2)
-    {
-        /* brwidth contains the block number of the leftmost block
-         * of the current row */
-        brwidth = mbr * pp_w;
-
-        /* Work through the area vertically by two columns per step */
-        for (mbc = 0; mbc < pp_w; mbc += 2)
-        {
-            /* if the data is luminance info, get the correct
-                    * quantization paramenter. One parameter per macroblock */
-            if (!chr)
-            {
-                /* brwidth/4 is the macroblock number and mbc/2 is the macroblock col number*/
-                QP = QP_store[(brwidth>>2) + (mbc>>1)];
-            }
-
-            /****************** Horiz. Filtering ********************/
-            /* Process four blocks for the filtering        */
-            /********************************************************/
-            /* Loop over two rows of blocks */
-            for (br = mbr + 1; br < mbr + 3; br++)    /* br is the row counter in blocks */
-            {
-                /* Set brwidth to the first (leftmost) block number of the next row */
-                /* brwidth is used as an index when counting blocks */
-                brwidth += pp_w;
-
-                /* Loop over two columns of blocks in the row */
-                for (bc = mbc; bc < mbc + 2; bc++)    /* bc is the column counter in blocks */
-                {
-                    /****** check boundary for deblocking ************/
-                    /* Execute if the row and column counters are within the area */
-                    if (br < pp_h && bc < pp_w)
-                    {
-                        /* Set the ptr to the first pixel of the first block of the second row
-                        * brwidth * 64 is the pixel row offset
-                        * bc * 8 is the pixel column offset */
-                        ptr = rec + (brwidth << 6) + (bc << 3);
-
-                        /* Set the index to the current block of the second row counting in blocks */
-                        index = brwidth + bc;
-
-                        /* if the data is chrominance info, get the correct
-                         * quantization paramenter. One parameter per block. */
-                        if (chr)
-                        {
-                            QP = QP_store[index];
-                        }
-
-                        /* Execute hard horizontal filter if semaphore for horizontal deblocking
-                          * is set for the current block and block immediately above it */
-                        if (((pp_mod[index]&0x02) != 0) && ((pp_mod[index-pp_w]&0x02) != 0))
-                        {   /* Hard filter */
-
-                            /* Set HorzHflag (bit 4) in the pp_mod location */
-                            pp_mod[index-pp_w] |= 0x10; /*  4/26/00 reuse pp_mod for HorzHflag*/
-
-                            /* Filter across the 8 pixels of the block */
-                            for (index = BLKSIZE; index > 0; index--)
-                            {
-                                /* Difference between the current pixel and the pixel above it */
-                                a3_0 = *ptr - *(ptr - w1);
-
-                                /* if the magnitude of the difference is greater than the KThH threshold
-                                 * and within the quantization parameter, apply hard filter */
-                                if ((a3_0 > KThH || a3_0 < -KThH) && a3_0<QP && a3_0> -QP)
-                                {
-                                    ptr_c = ptr - w3;   /* Points to pixel three rows above */
-                                    ptr_n = ptr + w1;   /* Points to pixel one row below */
-                                    v[0] = (int)(*(ptr_c - w3));
-                                    v[1] = (int)(*(ptr_c - w2));
-                                    v[2] = (int)(*(ptr_c - w1));
-                                    v[3] = (int)(*ptr_c);
-                                    v[4] = (int)(*(ptr_c + w1));
-
-                                    sum = v[0]
-                                          + v[1]
-                                          + v[2]
-                                          + *ptr_c
-                                          + v[4]
-                                          + (*(ptr_c + w2))
-                                          + (*(ptr_c + w3));  /* Current pixel */
-
-                                    delta = (sum + *ptr_c + 4) >> 3;   /* Average pixel values with rounding */
-                                    *(ptr_c) = (uint8) delta;
-
-                                    /* Move pointer down one row of pixels (points to pixel two rows
-                                     * above current pixel) */
-                                    ptr_c += w1;
-
-                                    for (counter = 0; counter < 5; counter++)
-                                    {
-                                        /* Subtract off highest pixel and add in pixel below */
-                                        sum = sum - v[counter] + *ptr_n;
-                                        /* Average the pixel values with rounding */
-                                        delta = (sum + *ptr_c + 4) >> 3;
-                                        *ptr_c = (uint8)(delta);
-
-                                        /* Increment pointers to next pixel row */
-                                        ptr_c += w1;
-                                        ptr_n += w1;
-                                    }
-                                }
-                                /* Increment pointer to next pixel */
-                                ++ptr;
-                            } /* index*/
-                        }
-                        else
-                        { /* soft filter*/
-
-                            /* Clear HorzHflag (bit 4) in the pp_mod location */
-                            pp_mod[index-pp_w] &= 0xef; /* reset 1110,1111 */
-
-                            for (index = BLKSIZE; index > 0; index--)
-                            {
-                                /* Difference between the current pixel and the pixel above it */
-                                a3_0 = *(ptr) - *(ptr - w1);
-
-                                /* if the magnitude of the difference is greater than the KTh threshold,
-                                 * apply soft filter */
-                                if ((a3_0 > KTh || a3_0 < -KTh))
-                                {
-
-                                    /* Sum of weighted differences */
-                                    a3_0 += ((*(ptr - w2) - *(ptr + w1)) << 1) + (a3_0 << 2);
-
-                                    /* Check if sum is less than the quantization parameter */
-                                    if (PV_ABS(a3_0) < (QP << 3))
-                                    {
-                                        a3_1 = *(ptr - w2) - *(ptr - w3);
-                                        a3_1 += ((*(ptr - w4) - *(ptr - w1)) << 1) + (a3_1 << 2);
-
-                                        a3_2  = *(ptr + w2) - *(ptr + w1);
-                                        a3_2 += ((*(ptr) - *(ptr + w3)) << 1) + (a3_2 << 2);
-
-                                        A3_0 = PV_ABS(a3_0) - PV_MIN(PV_ABS(a3_1), PV_ABS(a3_2));
-
-                                        if (A3_0 > 0)
-                                        {
-                                            A3_0 += A3_0 << 2;
-                                            A3_0 = (A3_0 + 32) >> 6;
-                                            if (a3_0 > 0)
-                                            {
-                                                A3_0 = -A3_0;
-                                            }
-
-                                            delta = (*(ptr - w1) - *(ptr)) >> 1;
-                                            if (delta >= 0)
-                                            {
-                                                if (delta >= A3_0)
-                                                {
-                                                    delta = PV_MAX(A3_0, 0);
-                                                }
-                                            }
-                                            else
-                                            {
-                                                if (A3_0 > 0)
-                                                {
-                                                    delta = 0;
-                                                }
-                                                else
-                                                {
-                                                    delta = PV_MAX(A3_0, delta);
-                                                }
-                                            }
-
-                                            *(ptr - w1) = (uint8)(*(ptr - w1) - delta);
-                                            *(ptr) = (uint8)(*(ptr) + delta);
-                                        }
-                                    } /*threshold*/
-                                }
-                                /* Increment pointer to next pixel */
-                                ++ptr;
-                            } /*index*/
-                        } /* Soft filter*/
-                    }/* boundary checking*/
-                }/*bc*/
-            }/*br*/
-            brwidth -= (pp_w << 1);
-
-
-            /****************** Vert. Filtering *********************/
-            /* Process four blocks for the filtering        */
-            /********************************************************/
-            /* Loop over two rows of blocks */
-            for (br = mbr; br < mbr + 2; br++)      /* br is the row counter in blocks */
-            {
-                for (bc = mbc + 1; bc < mbc + 3; bc++)  /* bc is the column counter in blocks */
-                {
-                    /****** check boundary for deblocking ************/
-                    /* Execute if the row and column counters are within the area */
-                    if (br < pp_h && bc < pp_w)
-                    {
-                        /* Set the ptr to the first pixel of the first block of the second row
-                        * brwidth * 64 is the pixel row offset
-                        * bc * 8 is the pixel column offset */
-                        ptr = rec + (brwidth << 6) + (bc << 3);
-
-                        /* Set the index to the current block of the second row counting in blocks */
-                        index = brwidth + bc;
-
-                        /* if the data is chrominance info, get the correct
-                         * quantization paramenter. One parameter per block. */
-                        if (chr)
-                        {
-                            QP = QP_store[index];
-                        }
-
-                        /* Execute hard vertical filter if semaphore for vertical deblocking
-                          * is set for the current block and block immediately left of it */
-                        if (((pp_mod[index-1]&0x01) != 0) && ((pp_mod[index]&0x01) != 0))
-                        {   /* Hard filter */
-
-                            /* Set VertHflag (bit 5) in the pp_mod location of previous block*/
-                            pp_mod[index-1] |= 0x20; /*  4/26/00 reuse pp_mod for VertHflag*/
-
-                            /* Filter across the 8 pixels of the block */
-                            for (index = BLKSIZE; index > 0; index--)
-                            {
-                                /* Difference between the current pixel
-                                * and the pixel to left of it */
-                                a3_0 = *ptr - *(ptr - 1);
-
-                                /* if the magnitude of the difference is greater than the KThH threshold
-                                 * and within the quantization parameter, apply hard filter */
-                                if ((a3_0 > KThH || a3_0 < -KThH) && a3_0<QP && a3_0> -QP)
-                                {
-                                    ptr_c = ptr - 3;
-                                    ptr_n = ptr + 1;
-                                    v[0] = (int)(*(ptr_c - 3));
-                                    v[1] = (int)(*(ptr_c - 2));
-                                    v[2] = (int)(*(ptr_c - 1));
-                                    v[3] = (int)(*ptr_c);
-                                    v[4] = (int)(*(ptr_c + 1));
-
-                                    sum = v[0]
-                                          + v[1]
-                                          + v[2]
-                                          + *ptr_c
-                                          + v[4]
-                                          + (*(ptr_c + 2))
-                                          + (*(ptr_c + 3));
-
-                                    delta = (sum + *ptr_c + 4) >> 3;
-                                    *(ptr_c) = (uint8) delta;
-
-                                    /* Move pointer down one pixel to the right */
-                                    ptr_c += 1;
-                                    for (counter = 0; counter < 5; counter++)
-                                    {
-                                        /* Subtract off highest pixel and add in pixel below */
-                                        sum = sum - v[counter] + *ptr_n;
-                                        /* Average the pixel values with rounding */
-                                        delta = (sum + *ptr_c + 4) >> 3;
-                                        *ptr_c = (uint8)(delta);
-
-                                        /* Increment pointers to next pixel */
-                                        ptr_c += 1;
-                                        ptr_n += 1;
-                                    }
-                                }
-                                /* Increment pointers to next pixel row */
-                                ptr += w1;
-                            } /* index*/
-                        }
-                        else
-                        { /* soft filter*/
-
-                            /* Clear VertHflag (bit 5) in the pp_mod location */
-                            pp_mod[index-1] &= 0xdf; /* reset 1101,1111 */
-                            for (index = BLKSIZE; index > 0; index--)
-                            {
-                                /* Difference between the current pixel and the pixel above it */
-                                a3_0 = *(ptr) - *(ptr - 1);
-
-                                /* if the magnitude of the difference is greater than the KTh threshold,
-                                 * apply soft filter */
-                                if ((a3_0 > KTh || a3_0 < -KTh))
-                                {
-
-                                    /* Sum of weighted differences */
-                                    a3_0 += ((*(ptr - 2) - *(ptr + 1)) << 1) + (a3_0 << 2);
-
-                                    /* Check if sum is less than the quantization parameter */
-                                    if (PV_ABS(a3_0) < (QP << 3))
-                                    {
-                                        a3_1 = *(ptr - 2) - *(ptr - 3);
-                                        a3_1 += ((*(ptr - 4) - *(ptr - 1)) << 1) + (a3_1 << 2);
-
-                                        a3_2  = *(ptr + 2) - *(ptr + 1);
-                                        a3_2 += ((*(ptr) - *(ptr + 3)) << 1) + (a3_2 << 2);
-
-                                        A3_0 = PV_ABS(a3_0) - PV_MIN(PV_ABS(a3_1), PV_ABS(a3_2));
-
-                                        if (A3_0 > 0)
-                                        {
-                                            A3_0 += A3_0 << 2;
-                                            A3_0 = (A3_0 + 32) >> 6;
-                                            if (a3_0 > 0)
-                                            {
-                                                A3_0 = -A3_0;
-                                            }
-
-                                            delta = (*(ptr - 1) - *(ptr)) >> 1;
-                                            if (delta >= 0)
-                                            {
-                                                if (delta >= A3_0)
-                                                {
-                                                    delta = PV_MAX(A3_0, 0);
-                                                }
-                                            }
-                                            else
-                                            {
-                                                if (A3_0 > 0)
-                                                {
-                                                    delta = 0;
-                                                }
-                                                else
-                                                {
-                                                    delta = PV_MAX(A3_0, delta);
-                                                }
-                                            }
-
-                                            *(ptr - 1) = (uint8)(*(ptr - 1) - delta);
-                                            *(ptr) = (uint8)(*(ptr) + delta);
-                                        }
-                                    } /*threshold*/
-                                }
-                                ptr += w1;
-                            } /*index*/
-                        } /* Soft filter*/
-                    } /* boundary*/
-                } /*bc*/
-                /* Increment pointer to next row of pixels */
-                brwidth += pp_w;
-            }/*br*/
-            brwidth -= (pp_w << 1);
-
-            /****************** Deringing ***************************/
-            /* Process four blocks for the filtering        */
-            /********************************************************/
-            /* Loop over two rows of blocks */
-            for (br = mbr; br < mbr + 2; br++)
-            {
-                /* Loop over two columns of blocks in the row */
-                for (bc = mbc; bc < mbc + 2; bc++)
-                {
-                    /* Execute if the row and column counters are within the area */
-                    if (br < pp_h && bc < pp_w)
-                    {
-                        /* Set the index to the current block */
-                        index = brwidth + bc;
-
-                        /* Execute deringing if semaphore for deringing (bit-3 of pp_mod)
-                         * is set for the current block */
-                        if ((pp_mod[index]&0x04) != 0)
-                        {
-                            /* Don't process deringing if on an edge block */
-                            if (br > 0 && bc > 0 && br < pp_h - 1 && bc < pp_w - 1)
-                            {
-                                /* cnthflag = weighted average of HorzHflag of current,
-                                 * one above, previous blocks*/
-                                cnthflag = ((pp_mod[index] & 0x10) +
-                                            (pp_mod[index-pp_w] & 0x10) +
-                                            ((pp_mod[index-1] >> 1) & 0x10) +
-                                            ((pp_mod[index] >> 1) & 0x10)) >> 4; /* 4/26/00*/
-
-                                /* Do the deringing if decision flags indicate it's necessary */
-                                if (cnthflag < 3)
-                                {
-                                    /* if the data is chrominance info, get the correct
-                                     * quantization paramenter. One parameter per block. */
-                                    if (chr)
-                                    {
-                                        QP = QP_store[index];
-                                    }
-
-                                    /* Set amount to change luminance if it needs to be changed
-                                     * based on quantization parameter */
-                                    max_diff = (QP >> 2) + 4;
-
-                                    /* Set pointer to first pixel of current block */
-                                    ptr = rec + (brwidth << 6) + (bc << 3);
-
-                                    /* Find minimum and maximum value of pixel block */
-                                    FindMaxMin(ptr, &min_blk, &max_blk, incr);
-
-                                    /* threshold determination */
-                                    thres = (max_blk + min_blk + 1) >> 1;
-
-                                    /* If pixel range is greater or equal than DERING_THR, smooth the region */
-                                    if ((max_blk - min_blk) >= DERING_THR) /*smooth 8x8 region*/
-#ifndef NoMMX
-                                    {
-                                        /* smooth all pixels in the block*/
-                                        DeringAdaptiveSmoothMMX(ptr, width, thres, max_diff);
-                                    }
-#else
-                                    {
-                                        /* Setup the starting point of the region to smooth */
-                                        v0 = (br << 3) - 1;
-                                        h0 = (bc << 3) - 1;
-
-                                        /*smooth 8x8 region*/
-                                        AdaptiveSmooth_NoMMX(rec, v0, h0, v0 + 1, h0 + 1, thres, width, max_diff);
-                                    }
-#endif
-                                }/*cnthflag*/
-                            } /*dering br==1 or bc==1 (boundary block)*/
-                            else    /* Process the boundary blocks */
-                            {
-                                /* Decide to perform deblocking based on the semaphore flags
-                                   * of the neighboring blocks in each case. A certain number of
-                                 * hard filtering flags have to be set in order to signal need
-                                 * for smoothing */
-                                if (br > 0 && br < pp_h - 1)
-                                {
-                                    if (bc > 0)
-                                    {
-                                        cnthflag = ((pp_mod[index-pp_w] & 0x10) +
-                                                    (pp_mod[index] & 0x10) +
-                                                    ((pp_mod[index-1] >> 1) & 0x10)) >> 4;
-                                    }
-                                    else
-                                    {
-                                        cnthflag = ((pp_mod[index] & 0x10) +
-                                                    (pp_mod[index-pp_w] & 0x10) +
-                                                    ((pp_mod[index] >> 1) & 0x10)) >> 4;
-                                    }
-                                }
-                                else if (bc > 0 && bc < pp_w - 1)
-                                {
-                                    if (br > 0)
-                                    {
-                                        cnthflag = ((pp_mod[index-pp_w] & 0x10) +
-                                                    ((pp_mod[index-1] >> 1) & 0x10) +
-                                                    ((pp_mod[index] >> 1) & 0x10)) >> 4;
-                                    }
-                                    else
-                                    {
-                                        cnthflag = ((pp_mod[index] & 0x10) +
-                                                    ((pp_mod[index-1] >> 1) & 0x10) +
-                                                    ((pp_mod[index] >> 1) & 0x10)) >> 4;
-                                    }
-                                }
-                                else /* at the corner do default*/
-                                {
-                                    cnthflag = 0;
-                                }
-
-                                /* Do the deringing if decision flags indicate it's necessary */
-                                if (cnthflag < 2)
-                                {
-
-                                    /* if the data is chrominance info, get the correct
-                                                         * quantization paramenter. One parameter per block. */
-                                    if (chr)
-                                    {
-                                        QP = QP_store[index];
-                                    }
-
-                                    /* Set amount to change luminance if it needs to be changed
-                                     * based on quantization parameter */
-                                    max_diff = (QP >> 2) + 4;
-
-                                    /* Set pointer to first pixel of current block */
-                                    ptr = rec + (brwidth << 6) + (bc << 3);
-
-                                    /* Find minimum and maximum value of pixel block */
-                                    FindMaxMin(ptr, &min_blk, &max_blk, incr);
-
-                                    /* threshold determination */
-                                    thres = (max_blk + min_blk + 1) >> 1;
-
-                                    /* Setup the starting point of the region to smooth
-                                     * This is going to be a 4x4 region */
-                                    v0 = (br << 3) + 1;
-                                    h0 = (bc << 3) + 1;
-
-                                    /* If pixel range is greater or equal than DERING_THR, smooth the region */
-                                    if ((max_blk - min_blk) >= DERING_THR)
-                                    {
-                                        /* Smooth 4x4 region */
-                                        AdaptiveSmooth_NoMMX(rec, v0, h0, v0 - 3, h0 - 3, thres, width, max_diff);
-                                    }
-                                }/*cnthflag*/
-                            } /* br==0, bc==0*/
-                        }  /* dering*/
-                    } /*boundary condition*/
-                }/*bc*/
-                brwidth += pp_w;
-            }/*br*/
-            brwidth -= (pp_w << 1);
-        }/*mbc*/
-        brwidth += (pp_w << 1);
-    }/*mbr*/
-
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return ;
-}
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/combined_decode.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/combined_decode.cpp
index 6499233..72cbe83 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/src/combined_decode.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/src/combined_decode.cpp
@@ -544,30 +544,12 @@
     int16 DC_coeff;
     PV_STATUS status;
 
-#ifdef PV_POSTPROC_ON
-    /* post-processing */
-    uint8 *pp_mod[6];
-    int TotalMB = video->nTotalMB;
-    int MB_in_width = video->nMBPerRow;
-#endif
     int y_pos = video->mbnum_row;
     int x_pos = video->mbnum_col;
     int32 offset = (int32)(y_pos << 4) * width + (x_pos << 4);
 
     /* Decode each 8-by-8 blocks. comp 0 ~ 3 are luminance blocks, 4 ~ 5 */
     /*  are chrominance blocks.   04/03/2000.                          */
-#ifdef PV_POSTPROC_ON
-    if (video->postFilterType != PV_NO_POST_PROC)
-    {
-        /** post-processing ***/
-        pp_mod[0] = video->pstprcTypCur + (y_pos << 1) * (MB_in_width << 1) + (x_pos << 1);
-        pp_mod[1] = pp_mod[0] + 1;
-        pp_mod[2] = pp_mod[0] + (MB_in_width << 1);
-        pp_mod[3] = pp_mod[2] + 1;
-        pp_mod[4] = video->pstprcTypCur + (TotalMB << 2) + mbnum;
-        pp_mod[5] = pp_mod[4] + TotalMB;
-    }
-#endif
 
     /*  oscl_memset(mblock->block, 0, sizeof(typeMBStore));    Aug 9,2005 */
 
@@ -645,10 +627,6 @@
             }
             no_coeff[comp] = ncoeffs[comp];
 
-#ifdef PV_POSTPROC_ON
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[comp] = (uint8) PostProcSemaphore(dataBlock);
-#endif
         }
         MBlockIDCT(video);
     }
@@ -677,20 +655,6 @@
                 BlockIDCT(c_comp + (comp&2)*(width << 2) + 8*(comp&1), mblock->pred_block + (comp&2)*64 + 8*(comp&1), mblock->block[comp], width, ncoeffs[comp],
                           mblock->bitmapcol[comp], mblock->bitmaprow[comp]);
 
-#ifdef PV_POSTPROC_ON
-                /* for inter just test for ringing */
-                if (video->postFilterType != PV_NO_POST_PROC)
-                    *pp_mod[comp] = (uint8)((ncoeffs[comp] > 3) ? 4 : 0);
-#endif
-            }
-            else
-            {
-                /* no IDCT for all zeros blocks  03/28/2002 */
-                /*              BlockIDCT();                */
-#ifdef PV_POSTPROC_ON
-                if (video->postFilterType != PV_NO_POST_PROC)
-                    *pp_mod[comp] = 0;
-#endif
             }
         }
 
@@ -707,20 +671,6 @@
             BlockIDCT(video->currVop->uChan + (offset >> 2) + (x_pos << 2), mblock->pred_block + 256, mblock->block[4], width >> 1, ncoeffs[4],
                       mblock->bitmapcol[4], mblock->bitmaprow[4]);
 
-#ifdef PV_POSTPROC_ON
-            /* for inter just test for ringing */
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[4] = (uint8)((ncoeffs[4] > 3) ? 4 : 0);
-#endif
-        }
-        else
-        {
-            /* no IDCT for all zeros blocks  03/28/2002 */
-            /*              BlockIDCT();                */
-#ifdef PV_POSTPROC_ON
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[4] = 0;
-#endif
         }
         (*DC)[5] = mid_gray;
         if (CBP & 1)
@@ -731,20 +681,6 @@
             BlockIDCT(video->currVop->vChan + (offset >> 2) + (x_pos << 2), mblock->pred_block + 264, mblock->block[5], width >> 1, ncoeffs[5],
                       mblock->bitmapcol[5], mblock->bitmaprow[5]);
 
-#ifdef PV_POSTPROC_ON
-            /* for inter just test for ringing */
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[5] = (uint8)((ncoeffs[5] > 3) ? 4 : 0);
-#endif
-        }
-        else
-        {
-            /* no IDCT for all zeros blocks  03/28/2002 */
-            /*              BlockIDCT();                */
-#ifdef PV_POSTPROC_ON
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[5] = 0;
-#endif
         }
         video->QPMB[mbnum] = QP;  /* restore the QP values  ANNEX_T*/
 #else
@@ -759,20 +695,6 @@
                 BlockIDCT(c_comp + (comp&2)*(width << 2) + 8*(comp&1), mblock->pred_block + (comp&2)*64 + 8*(comp&1), mblock->block[comp], width, ncoeffs[comp],
                           mblock->bitmapcol[comp], mblock->bitmaprow[comp]);
 
-#ifdef PV_POSTPROC_ON
-                /* for inter just test for ringing */
-                if (video->postFilterType != PV_NO_POST_PROC)
-                    *pp_mod[comp] = (uint8)((ncoeffs[comp] > 3) ? 4 : 0);
-#endif
-            }
-            else
-            {
-                /* no IDCT for all zeros blocks  03/28/2002 */
-                /*              BlockIDCT();                */
-#ifdef PV_POSTPROC_ON
-                if (video->postFilterType != PV_NO_POST_PROC)
-                    *pp_mod[comp] = 0;
-#endif
             }
         }
 
@@ -785,20 +707,11 @@
             BlockIDCT(video->currVop->uChan + (offset >> 2) + (x_pos << 2), mblock->pred_block + 256, mblock->block[4], width >> 1, ncoeffs[4],
                       mblock->bitmapcol[4], mblock->bitmaprow[4]);
 
-#ifdef PV_POSTPROC_ON
-            /* for inter just test for ringing */
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[4] = (uint8)((ncoeffs[4] > 3) ? 4 : 0);
-#endif
         }
         else
         {
             /* no IDCT for all zeros blocks  03/28/2002 */
             /*              BlockIDCT();                */
-#ifdef PV_POSTPROC_ON
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[4] = 0;
-#endif
         }
         (*DC)[5] = mid_gray;
         if (CBP & 1)
@@ -809,20 +722,11 @@
             BlockIDCT(video->currVop->vChan + (offset >> 2) + (x_pos << 2), mblock->pred_block + 264, mblock->block[5], width >> 1, ncoeffs[5],
                       mblock->bitmapcol[5], mblock->bitmaprow[5]);
 
-#ifdef PV_POSTPROC_ON
-            /* for inter just test for ringing */
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[5] = (uint8)((ncoeffs[5] > 3) ? 4 : 0);
-#endif
         }
         else
         {
             /* no IDCT for all zeros blocks  03/28/2002 */
             /*              BlockIDCT();                */
-#ifdef PV_POSTPROC_ON
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[5] = 0;
-#endif
 #endif  // PV_ANNEX_IJKT_SUPPORT
 
 
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/datapart_decode.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/datapart_decode.cpp
index 00db04b..6071f40 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/src/datapart_decode.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/src/datapart_decode.cpp
@@ -635,29 +635,9 @@
     int QP_tmp = QP;
 
     int y_pos = video->mbnum_row;
-#ifdef PV_POSTPROC_ON
-    uint8 *pp_mod[6];
-    int TotalMB = video->nTotalMB;
-    int MB_in_width = video->nMBPerRow;
-#endif
 
 
 
-    /*****
-    *     Decoding of the 6 blocks (depending on transparent pattern)
-    *****/
-#ifdef PV_POSTPROC_ON
-    if (video->postFilterType != PV_NO_POST_PROC)
-    {
-        /** post-processing ***/
-        pp_mod[0] = video->pstprcTypCur + (y_pos << 1) * (MB_in_width << 1) + (x_pos << 1);
-        pp_mod[1] = pp_mod[0] + 1;
-        pp_mod[2] = pp_mod[0] + (MB_in_width << 1);
-        pp_mod[3] = pp_mod[2] + 1;
-        pp_mod[4] = video->pstprcTypCur + (TotalMB << 2) + mbnum;
-        pp_mod[5] = pp_mod[4] + TotalMB;
-    }
-#endif
 
     /*  oscl_memset(mblock->block, 0, sizeof(typeMBStore));    Aug 9,2005 */
 
@@ -698,10 +678,6 @@
             /*  modified to new semaphore for post-proc */
             // Future work:: can be combined in the dequant function
             // @todo Deblocking Semaphore for INTRA block
-#ifdef PV_POSTPROC_ON
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[comp] = (uint8) PostProcSemaphore(dataBlock);
-#endif
         }
         MBlockIDCT(video);
     }
@@ -738,10 +714,6 @@
             }
 
             /*  @todo Deblocking Semaphore for INTRA block, for inter just test for ringing  */
-#ifdef PV_POSTPROC_ON
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[comp] = (uint8)((ncoeffs[comp] > 3) ? 4 : 0);
-#endif
         }
 
         (*DC)[4] = mid_gray;
@@ -760,10 +732,6 @@
         {
             ncoeffs[4] = 0;
         }
-#ifdef PV_POSTPROC_ON
-        if (video->postFilterType != PV_NO_POST_PROC)
-            *pp_mod[4] = (uint8)((ncoeffs[4] > 3) ? 4 : 0);
-#endif
         (*DC)[5] = mid_gray;
         if (CBP & 1)
         {
@@ -780,10 +748,6 @@
         {
             ncoeffs[5] = 0;
         }
-#ifdef PV_POSTPROC_ON
-        if (video->postFilterType != PV_NO_POST_PROC)
-            *pp_mod[5] = (uint8)((ncoeffs[5] > 3) ? 4 : 0);
-#endif
 
 
 
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/deringing_chroma.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/deringing_chroma.cpp
deleted file mode 100644
index ce779b0..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/deringing_chroma.cpp
+++ /dev/null
@@ -1,215 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include    "mp4dec_lib.h"
-#include    "post_proc.h"
-
-#ifdef PV_POSTPROC_ON
-
-void Deringing_Chroma(
-    uint8 *Rec_C,
-    int width,
-    int height,
-    int16 *QP_store,
-    int,
-    uint8 *pp_mod
-)
-{
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int thres;
-    int v_blk, h_blk;
-    int max_diff;
-    int v_pel, h_pel;
-    int max_blk, min_blk;
-    int v0, h0;
-    uint8 *ptr;
-    int sum, sum1, incr;
-    int32 addr_v;
-    int sign_v[10], sum_v[10];
-    int *ptr2, *ptr3;
-    uint8 pelu, pelc, pell;
-    incr = width - BLKSIZE;
-
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    /* chrominance */
-    /* Do the first line (7 pixels at a time => Don't use MMX)*/
-    for (h_blk = 0; h_blk < width; h_blk += BLKSIZE)
-    {
-        max_diff = (QP_store[h_blk>>3] >> 2) + 4;
-        ptr = &Rec_C[h_blk];
-        max_blk = min_blk = *ptr;
-        FindMaxMin(ptr, &min_blk, &max_blk, width);
-        h0 = ((h_blk - 1) >= 1) ? (h_blk - 1) : 1;
-
-        if (max_blk - min_blk >= 4)
-        {
-            thres = (max_blk + min_blk + 1) >> 1;
-
-
-            for (v_pel = 1; v_pel < BLKSIZE - 1; v_pel++)
-            {
-                addr_v = (int32)v_pel * width;
-                ptr = &Rec_C[addr_v + h0 - 1];
-                ptr2 = &sum_v[0];
-                ptr3 = &sign_v[0];
-
-                pelu = *(ptr - width);
-                pelc = *ptr;
-                pell = *(ptr + width);
-                ptr++;
-                *ptr2++ = pelu + (pelc << 1) + pell;
-                *ptr3++ = INDEX(pelu, thres) + INDEX(pelc, thres) + INDEX(pell, thres);
-
-                pelu = *(ptr - width);
-                pelc = *ptr;
-                pell = *(ptr + width);
-                ptr++;
-                *ptr2++ = pelu + (pelc << 1) + pell;
-                *ptr3++ = INDEX(pelu, thres) + INDEX(pelc, thres) + INDEX(pell, thres);
-
-                for (h_pel = h0; h_pel < h_blk + BLKSIZE - 1; h_pel++)
-                {
-                    pelu = *(ptr - width);
-                    pelc = *ptr;
-                    pell = *(ptr + width);
-
-                    *ptr2 = pelu + (pelc << 1) + pell;
-                    *ptr3 = INDEX(pelu, thres) + INDEX(pelc, thres) + INDEX(pell, thres);
-
-                    sum1 = *(ptr3 - 2) + *(ptr3 - 1) + *ptr3;
-                    if (sum1 == 0 || sum1 == 9)
-                    {
-                        sum = (*(ptr2 - 2) + (*(ptr2 - 1) << 1) + *ptr2 + 8) >> 4;
-
-                        ptr--;
-                        if (PV_ABS(*ptr - sum) > max_diff)
-                        {
-                            if (sum > *ptr)
-                                sum = *ptr + max_diff;
-                            else
-                                sum = *ptr - max_diff;
-                        }
-                        *ptr++ = (uint8) sum;
-                    }
-                    ptr++;
-                    ptr2++;
-                    ptr3++;
-                }
-            }
-        }
-    }
-
-    for (v_blk = BLKSIZE; v_blk < height; v_blk += BLKSIZE)
-    {
-        v0 = v_blk - 1;
-        /* Do the first block (pixels=7 => No MMX) */
-        max_diff = (QP_store[((((int32)v_blk*width)>>3))>>3] >> 2) + 4;
-        ptr = &Rec_C[(int32)v_blk * width];
-        max_blk = min_blk = *ptr;
-        FindMaxMin(ptr, &min_blk, &max_blk, incr);
-
-        if (max_blk - min_blk >= 4)
-        {
-            thres = (max_blk + min_blk + 1) >> 1;
-
-            for (v_pel = v0; v_pel < v_blk + BLKSIZE - 1; v_pel++)
-            {
-                addr_v = v_pel * width;
-                ptr = &Rec_C[addr_v];
-                ptr2 = &sum_v[0];
-                ptr3 = &sign_v[0];
-
-                pelu = *(ptr - width);
-                pelc = *ptr;
-                pell = *(ptr + width);
-                ptr++;
-                *ptr2++ = pelu + (pelc << 1) + pell;
-                *ptr3++ = INDEX(pelu, thres) + INDEX(pelc, thres) + INDEX(pell, thres);
-
-                pelu = *(ptr - width);
-                pelc = *ptr;
-                pell = *(ptr + width);
-                ptr++;
-                *ptr2++ = pelu + (pelc << 1) + pell;
-                *ptr3++ = INDEX(pelu, thres) + INDEX(pelc, thres) + INDEX(pell, thres);
-
-                for (h_pel = 1; h_pel < BLKSIZE - 1; h_pel++)
-                {
-                    pelu = *(ptr - width);
-                    pelc = *ptr;
-                    pell = *(ptr + width);
-
-                    *ptr2 = pelu + (pelc << 1) + pell;
-                    *ptr3 = INDEX(pelu, thres) + INDEX(pelc, thres) + INDEX(pell, thres);
-
-                    sum1 = *(ptr3 - 2) + *(ptr3 - 1) + *ptr3;
-                    if (sum1 == 0 || sum1 == 9)
-                    {
-                        sum = (*(ptr2 - 2) + (*(ptr2 - 1) << 1) + *ptr2 + 8) >> 4;
-
-                        ptr--;
-                        if (PV_ABS(*ptr - sum) > max_diff)
-                        {
-                            if (sum > *ptr)
-                                sum = *ptr + max_diff;
-                            else
-                                sum = *ptr - max_diff;
-                        }
-                        *ptr++ = (uint8) sum;
-                    }
-                    ptr++;
-                    ptr2++;
-                    ptr3++;
-                }
-            }
-        }
-
-
-        /* Do the rest in MMX */
-        for (h_blk = BLKSIZE; h_blk < width; h_blk += BLKSIZE)
-        {
-            if ((pp_mod[(v_blk/8)*(width/8)+h_blk/8]&0x4) != 0)
-            {
-                max_diff = (QP_store[((((int32)v_blk*width)>>3)+h_blk)>>3] >> 2) + 4;
-                ptr = &Rec_C[(int32)v_blk * width + h_blk];
-                max_blk = min_blk = *ptr;
-                FindMaxMin(ptr, &min_blk, &max_blk, incr);
-                h0 = h_blk - 1;
-
-                if (max_blk - min_blk >= 4)
-                {
-                    thres = (max_blk + min_blk + 1) >> 1;
-#ifdef NoMMX
-                    AdaptiveSmooth_NoMMX(Rec_C, v0, h0, v_blk, h_blk, thres, width, max_diff);
-#else
-                    DeringAdaptiveSmoothMMX(&Rec_C[(int32)v0*width+h0], width, thres, max_diff);
-#endif
-                }
-            }
-        }
-    } /* macroblock level */
-
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/deringing_luma.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/deringing_luma.cpp
deleted file mode 100644
index b5574b4..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/deringing_luma.cpp
+++ /dev/null
@@ -1,231 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include    "mp4dec_lib.h"
-#include    "post_proc.h"
-
-#ifdef PV_POSTPROC_ON
-
-void Deringing_Luma(
-    uint8 *Rec_Y,
-    int width,
-    int height,
-    int16 *QP_store,
-    int,
-    uint8 *pp_mod)
-{
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int thres[4], range[4], max_range_blk, max_thres_blk;
-    int MB_V, MB_H, BLK_V, BLK_H;
-    int v_blk, h_blk;
-    int max_diff;
-    int max_blk, min_blk;
-    int v0, h0;
-    uint8 *ptr;
-    int thr, blks, incr;
-    int mb_indx, blk_indx;
-
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    incr = width - BLKSIZE;
-
-    /* Dering the first line of macro blocks */
-    for (MB_H = 0; MB_H < width; MB_H += MBSIZE)
-    {
-        max_diff = (QP_store[(MB_H)>>4] >> 2) + 4;
-
-        /* threshold determination */
-        max_range_blk = max_thres_blk = 0;
-        blks = 0;
-
-        for (BLK_V = 0; BLK_V < MBSIZE; BLK_V += BLKSIZE)
-        {
-            for (BLK_H = 0; BLK_H < MBSIZE; BLK_H += BLKSIZE)
-            {
-                ptr = &Rec_Y[(int32)(BLK_V) * width + MB_H + BLK_H];
-                FindMaxMin(ptr, &min_blk, &max_blk, incr);
-
-                thres[blks] = (max_blk + min_blk + 1) >> 1;
-                range[blks] = max_blk - min_blk;
-
-                if (range[blks] >= max_range_blk)
-                {
-                    max_range_blk = range[blks];
-                    max_thres_blk = thres[blks];
-                }
-                blks++;
-            }
-        }
-
-        blks = 0;
-        for (v_blk = 0; v_blk < MBSIZE; v_blk += BLKSIZE)
-        {
-            v0 = ((v_blk - 1) >= 1) ? (v_blk - 1) : 1;
-            for (h_blk = MB_H; h_blk < MB_H + MBSIZE; h_blk += BLKSIZE)
-            {
-                h0 = ((h_blk - 1) >= 1) ? (h_blk - 1) : 1;
-
-                /* threshold rearrangement for flat region adjacent to non-flat region */
-                if (range[blks]<32 && max_range_blk >= 64)
-                    thres[blks] = max_thres_blk;
-
-                /* threshold rearrangement for deblocking
-                (blockiness annoying at DC dominant region) */
-                if (max_range_blk >= 16)
-                {
-                    /* adaptive smoothing */
-                    thr = thres[blks];
-
-                    AdaptiveSmooth_NoMMX(Rec_Y, v0, h0, v_blk, h_blk,
-                                         thr, width, max_diff);
-                }
-                blks++;
-            } /* block level (Luminance) */
-        }
-    } /* macroblock level */
-
-
-    /* Do the rest of the macro-block-lines */
-    for (MB_V = MBSIZE; MB_V < height; MB_V += MBSIZE)
-    {
-        /* First macro-block */
-        max_diff = (QP_store[((((int32)MB_V*width)>>4))>>4] >> 2) + 4;
-        /* threshold determination */
-        max_range_blk = max_thres_blk = 0;
-        blks = 0;
-        for (BLK_V = 0; BLK_V < MBSIZE; BLK_V += BLKSIZE)
-        {
-            for (BLK_H = 0; BLK_H < MBSIZE; BLK_H += BLKSIZE)
-            {
-                ptr = &Rec_Y[(int32)(MB_V + BLK_V) * width + BLK_H];
-                FindMaxMin(ptr, &min_blk, &max_blk, incr);
-                thres[blks] = (max_blk + min_blk + 1) >> 1;
-                range[blks] = max_blk - min_blk;
-
-                if (range[blks] >= max_range_blk)
-                {
-                    max_range_blk = range[blks];
-                    max_thres_blk = thres[blks];
-                }
-                blks++;
-            }
-        }
-
-        blks = 0;
-        for (v_blk = MB_V; v_blk < MB_V + MBSIZE; v_blk += BLKSIZE)
-        {
-            v0 = v_blk - 1;
-            for (h_blk = 0; h_blk < MBSIZE; h_blk += BLKSIZE)
-            {
-                h0 = ((h_blk - 1) >= 1) ? (h_blk - 1) : 1;
-
-                /* threshold rearrangement for flat region adjacent to non-flat region */
-                if (range[blks]<32 && max_range_blk >= 64)
-                    thres[blks] = max_thres_blk;
-
-                /* threshold rearrangement for deblocking
-                (blockiness annoying at DC dominant region) */
-                if (max_range_blk >= 16)
-                {
-                    /* adaptive smoothing */
-                    thr = thres[blks];
-
-                    AdaptiveSmooth_NoMMX(Rec_Y, v0, h0, v_blk, h_blk,
-                                         thr, width, max_diff);
-                }
-                blks++;
-            }
-        } /* block level (Luminance) */
-
-        /* Rest of the macro-blocks */
-        for (MB_H = MBSIZE; MB_H < width; MB_H += MBSIZE)
-        {
-            max_diff = (QP_store[((((int32)MB_V*width)>>4)+MB_H)>>4] >> 2) + 4;
-
-            /* threshold determination */
-            max_range_blk = max_thres_blk = 0;
-            blks = 0;
-
-            mb_indx = (MB_V / 8) * (width / 8) + MB_H / 8;
-            for (BLK_V = 0; BLK_V < MBSIZE; BLK_V += BLKSIZE)
-            {
-                for (BLK_H = 0; BLK_H < MBSIZE; BLK_H += BLKSIZE)
-                {
-                    blk_indx = mb_indx + (BLK_V / 8) * width / 8 + BLK_H / 8;
-                    /* Update based on pp_mod only */
-                    if ((pp_mod[blk_indx]&0x4) != 0)
-                    {
-                        ptr = &Rec_Y[(int32)(MB_V + BLK_V) * width + MB_H + BLK_H];
-                        FindMaxMin(ptr, &min_blk, &max_blk, incr);
-                        thres[blks] = (max_blk + min_blk + 1) >> 1;
-                        range[blks] = max_blk - min_blk;
-
-                        if (range[blks] >= max_range_blk)
-                        {
-                            max_range_blk = range[blks];
-                            max_thres_blk = thres[blks];
-                        }
-                    }
-                    blks++;
-                }
-            }
-
-            blks = 0;
-            for (v_blk = MB_V; v_blk < MB_V + MBSIZE; v_blk += BLKSIZE)
-            {
-                v0 = v_blk - 1;
-                mb_indx = (v_blk / 8) * (width / 8);
-                for (h_blk = MB_H; h_blk < MB_H + MBSIZE; h_blk += BLKSIZE)
-                {
-                    h0 = h_blk - 1;
-                    blk_indx = mb_indx + h_blk / 8;
-                    if ((pp_mod[blk_indx]&0x4) != 0)
-                    {
-                        /* threshold rearrangement for flat region adjacent to non-flat region */
-                        if (range[blks]<32 && max_range_blk >= 64)
-                            thres[blks] = max_thres_blk;
-
-                        /* threshold rearrangement for deblocking
-                        (blockiness annoying at DC dominant region) */
-                        if (max_range_blk >= 16)
-                        {
-                            /* adaptive smoothing */
-                            thr = thres[blks];
-#ifdef NoMMX
-                            AdaptiveSmooth_NoMMX(Rec_Y, v0, h0, v_blk, h_blk,
-                                                 thr, width, max_diff);
-#else
-                            DeringAdaptiveSmoothMMX(&Rec_Y[v0*width+h0],
-                                                    width, thr, max_diff);
-#endif
-                        }
-                    }
-                    blks++;
-                }
-            } /* block level (Luminance) */
-        } /* macroblock level */
-    } /* macroblock level */
-
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/find_min_max.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/find_min_max.cpp
deleted file mode 100644
index 1ac88a1..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/find_min_max.cpp
+++ /dev/null
@@ -1,176 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    input_ptr = pointer to the buffer containing values of type UChar
-            in a 2D block of data.
-    min_ptr = pointer to the minimum value of type Int to be found in a
-          square block of size BLKSIZE contained in 2D block of data.
-    max_ptr = pointer to the maximum value of type Int to be found in a
-          square block of size BLKSIZE contained in 2D block of data.
-    incr = value of type Int representing the width of 2D block of data.
-
- Local Stores/Buffers/Pointers Needed:
-    None
-
- Global Stores/Buffers/Pointers Needed:
-    None
-
- Outputs:
-    None
-
- Pointers and Buffers Modified:
-    min_ptr points to the found minimum value in the square block of
-    size BLKSIZE contained in 2D block of data.
-
-    max_ptr points to the found maximum value in the square block of
-    size BLKSIZE contained in 2D block of data.
-
- Local Stores Modified:
-    None
-
- Global Stores Modified:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This function finds the maximum and the minimum values in a square block of
- data of size BLKSIZE * BLKSIZE. The data is contained in the buffer which
- represents a 2D block of data that is larger than BLKSIZE * BLKSIZE.
- This is illustrated below.
-
-    mem loc x + 00h -> o o o o o o o o o o o o o o o o
-    mem loc x + 10h -> o o o o o X X X X X X X X o o o
-    mem loc x + 20h -> o o o o o X X X X X X X X o o o
-    mem loc x + 30h -> o o o o o X X X X X X X X o o o
-    mem loc x + 40h -> o o o o o X X X X X X X X o o o
-    mem loc x + 50h -> o o o o o X X X X X X X X o o o
-    mem loc x + 60h -> o o o o o X X X X X X X X o o o
-    mem loc x + 70h -> o o o o o X X X X X X X X o o o
-    mem loc x + 80h -> o o o o o X X X X X X X X o o o
-    mem loc x + 90h -> o o o o o o o o o o o o o o o o
-    mem loc x + A0h -> o o o o o o o o o o o o o o o o
-    mem loc x + B0h -> o o o o o o o o o o o o o o o o
-
-For illustration purposes, the diagram assumes that BLKSIZE is equal to 8
-but this is not a requirement. In this diagram, the buffer starts at
-location x but the input pointer, input_ptr, passed into this function
-would be the first row of data to be searched which is at x + 15h. The
-value of incr passed onto this function represents the amount the input_ptr
-needs to be incremented to point to the next row of data.
-
-This function compares each value in a row to the current maximum and
-minimum. After each row, input_ptr is incremented to point to the next row.
-This is repeated until all rows have been processed. When the search is
-complete the location pointed to by min_ptr contains the minimum value
-found and the location pointed to by max_ptr contains the maximum value found.
-
-------------------------------------------------------------------------------
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "mp4dec_lib.h"
-#include    "post_proc.h"
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-
-#ifdef PV_POSTPROC_ON
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-void  FindMaxMin(
-    uint8 *input_ptr,
-    int *min_ptr,
-    int *max_ptr,
-    int incr)
-{
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    uint    i, j;
-    int min, max;
-
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    max = min = *input_ptr;
-    /*  incr = incr - BLKSIZE; */   /*  09/06/2001, already passed in as width - BLKSIZE */
-
-    for (i = BLKSIZE; i > 0; i--)
-    {
-        for (j = BLKSIZE; j > 0; j--)
-        {
-            if (*input_ptr > max)
-            {
-                max = *input_ptr;
-            }
-            else if (*input_ptr < min)
-            {
-                min = *input_ptr;
-            }
-            input_ptr += 1;
-        }
-
-        /* set pointer to the beginning of the next row*/
-        input_ptr += incr;
-    }
-
-    *max_ptr = max;
-    *min_ptr = min;
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/idct_vca.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/idct_vca.cpp
index f35ce4f..dbaf5d1 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/src/idct_vca.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/src/idct_vca.cpp
@@ -37,6 +37,7 @@
     return ;
 }
 
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idctrow1(int16 *blk, uint8 *pred, uint8 *dst, int width)
 {
     /* shortcut */
@@ -94,6 +95,7 @@
     return;
 }
 
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idctrow2(int16 *blk, uint8 *pred, uint8 *dst, int width)
 {
     int32 x0, x1, x2, x4, x5;
@@ -155,6 +157,7 @@
     return ;
 }
 
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idctcol2(int16 *blk)
 {
     int32 x0, x1, x3, x5, x7;//, x8;
@@ -182,6 +185,7 @@
     return ;
 }
 
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idctrow3(int16 *blk, uint8 *pred, uint8 *dst, int width)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -254,6 +258,7 @@
     return ;
 }
 
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idctcol3(int16 *blk)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -291,6 +296,7 @@
 }
 
 
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idctrow4(int16 *blk, uint8 *pred, uint8 *dst, int width)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -368,6 +374,7 @@
     return ;
 }
 
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idctcol4(int16 *blk)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -445,6 +452,7 @@
     return;
 }
 
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idctrow2_intra(int16 *blk, PIXEL *comp, int width)
 {
     int32 x0, x1, x2, x4, x5, temp;
@@ -502,6 +510,7 @@
     return ;
 }
 
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idctrow3_intra(int16 *blk, PIXEL *comp, int width)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8, temp;
@@ -575,6 +584,7 @@
     return ;
 }
 
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idctrow4_intra(int16 *blk, PIXEL *comp, int width)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8, temp;
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/mb_motion_comp.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/mb_motion_comp.cpp
index 877723d..79760f5 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/src/mb_motion_comp.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/src/mb_motion_comp.cpp
@@ -154,14 +154,6 @@
     int xpred, ypred;
     int xsum;
     int round1;
-#ifdef PV_POSTPROC_ON // 2/14/2001      
-    /* Total number of pixels in the VOL */
-    int32 size = (int32) video->nTotalMB << 8;
-    uint8 *pp_dec_y, *pp_dec_u;
-    int ll[4];
-    int tmp = 0;
-    uint8 msk_deblock = 0;
-#endif
     /*----------------------------------------------------------------------------
     ; Function body here
     ----------------------------------------------------------------------------*/
@@ -404,43 +396,6 @@
     /* Call function to set de-blocking and de-ringing */
     /*   semaphores for luminance                      */
 
-#ifdef PV_POSTPROC_ON
-    if (video->postFilterType != PV_NO_POST_PROC)
-    {
-        if (mode&INTER_1VMASK)
-        {
-            pp_dec_y = video->pstprcTypCur + imv;
-            ll[0] = 1;
-            ll[1] = mvwidth - 1;
-            ll[2] = 1;
-            ll[3] = -mvwidth - 1;
-            msk_deblock = pp_semaphore_luma(xpred, ypred, pp_dec_y,
-                                            video->pstprcTypPrv, ll, &tmp, px[0], py[0], mvwidth,
-                                            width, height);
-
-            pp_dec_u = video->pstprcTypCur + (size >> 6) +
-                       ((imv + (xpos >> 3)) >> 2);
-
-            pp_semaphore_chroma_inter(xpred, ypred, pp_dec_u,
-                                      video->pstprcTypPrv, dx, dy, mvwidth, height, size,
-                                      tmp, msk_deblock);
-        }
-        else
-        {
-            /* Post-processing mode (MBM_INTER8) */
-            /* deblocking and deringing) */
-            pp_dec_y = video->pstprcTypCur + imv;
-            *pp_dec_y = 4;
-            *(pp_dec_y + 1) = 4;
-            *(pp_dec_y + mvwidth) = 4;
-            *(pp_dec_y + mvwidth + 1) = 4;
-            pp_dec_u = video->pstprcTypCur + (size >> 6) +
-                       ((imv + (xpos >> 3)) >> 2);
-            *pp_dec_u = 4;
-            pp_dec_u[size>>8] = 4;
-        }
-    }
-#endif
 
 
     /* xpred and ypred calculation for Chrominance is */
@@ -566,13 +521,6 @@
     PIXEL *cv_comp, *cv_prev;
     int width, width_uv;
     int32 offset;
-#ifdef PV_POSTPROC_ON // 2/14/2001      
-    int imv;
-    int32 size = (int32) video->nTotalMB << 8;
-    uint8 *pp_dec_y, *pp_dec_u;
-    uint8 *pp_prev1;
-    int mvwidth = video->nMBPerRow << 1;
-#endif
 
     width = video->width;
     width_uv  = width >> 1;
@@ -609,28 +557,6 @@
     PutSKIPPED_B(cv_comp, cv_prev, width_uv);
 
     /*  10/24/2000 post_processing semaphore generation */
-#ifdef PV_POSTPROC_ON // 2/14/2001
-    if (video->postFilterType != PV_NO_POST_PROC)
-    {
-        imv = (offset >> 6) - (xpos >> 6) + (xpos >> 3);
-        /* Post-processing mode (copy previous MB) */
-        pp_prev1 = video->pstprcTypPrv + imv;
-        pp_dec_y = video->pstprcTypCur + imv;
-        *pp_dec_y = *pp_prev1;
-        *(pp_dec_y + 1) = *(pp_prev1 + 1);
-        *(pp_dec_y + mvwidth) = *(pp_prev1 + mvwidth);
-        *(pp_dec_y + mvwidth + 1) = *(pp_prev1 + mvwidth + 1);
-
-        /* chrominance */
-        /*4*MB_in_width*MB_in_height*/
-        pp_prev1 = video->pstprcTypPrv + (size >> 6) +
-                   ((imv + (xpos >> 3)) >> 2);
-        pp_dec_u = video->pstprcTypCur + (size >> 6) +
-                   ((imv + (xpos >> 3)) >> 2);
-        *pp_dec_u = *pp_prev1;
-        pp_dec_u[size>>8] = pp_prev1[size>>8];
-    }
-#endif
     /*----------------------------------------------------------------------------
     ; Return nothing or data or data pointer
     ----------------------------------------------------------------------------*/
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/mp4dec_lib.h b/media/libstagefright/codecs/m4v_h263/dec/src/mp4dec_lib.h
index 9cd4edc..ce6f9c3 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/src/mp4dec_lib.h
+++ b/media/libstagefright/codecs/m4v_h263/dec/src/mp4dec_lib.h
@@ -170,37 +170,6 @@
 
     /*--------------------------------------------------------------------------*/
     /* defined in pp_semaphore_chroma_inter.c */
-#ifdef PV_POSTPROC_ON
-    void pp_semaphore_chroma_inter(
-        int xpred,      /* i */
-        int ypred,      /* i */
-        uint8   *pp_dec_u,  /* i/o */
-        uint8   *pstprcTypPrv,  /* i */
-        int dx,     /* i */
-        int dy,     /* i */
-        int mvwidth,    /* i */
-        int height,     /* i */
-        int32   size,       /* i */
-        int mv_loc,     /* i */
-        uint8   msk_deblock /* i */
-    );
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in pp_semaphore_luma.c */
-    uint8 pp_semaphore_luma(
-        int xpred,      /* i */
-        int ypred,      /* i */
-        uint8   *pp_dec_y,  /* i/o */
-        uint8   *pstprcTypPrv,  /* i */
-        int *ll,        /* i */
-        int *mv_loc,    /* i/o */
-        int dx,     /* i */
-        int dy,     /* i */
-        int mvwidth,    /* i */
-        int width,      /* i */
-        int height      /* i */
-    );
-#endif
     /*--------------------------------------------------------------------------*/
     /* defined in get_pred_adv_mb_add.c */
     int GetPredAdvancedMB(
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/post_filter.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/post_filter.cpp
index b36050c..37a03a0 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/src/post_filter.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/src/post_filter.cpp
@@ -24,152 +24,6 @@
 const static int STRENGTH_tab[] = {0, 1, 1, 2, 2, 3, 3, 4, 4, 4, 5, 5, 6, 6, 7, 7, 7, 8, 8, 8, 9, 9, 9, 10, 10, 10, 11, 11, 11, 12, 12, 12};
 #endif
 
-#ifdef PV_POSTPROC_ON
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-void PostFilter(
-    VideoDecData *video,
-    int filter_type,
-    uint8 *output)
-{
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    uint8 *pp_mod;
-    int16 *QP_store;
-    int combined_with_deblock_filter;
-    int nTotalMB = video->nTotalMB;
-    int width, height;
-    int32 size;
-    int softDeblocking;
-    uint8 *decodedFrame = video->videoDecControls->outputFrame;
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    width = video->width;
-    height = video->height;
-    size = (int32)width * height;
-
-    oscl_memcpy(output, decodedFrame, size);
-    oscl_memcpy(output + size, decodedFrame + size, (size >> 2));
-    oscl_memcpy(output + size + (size >> 2), decodedFrame + size + (size >> 2), (size >> 2));
-
-    if (filter_type == 0)
-        return;
-
-    /* The softDecoding cutoff corresponds to ~93000 bps for QCIF 15fps clip  */
-    if (PVGetDecBitrate(video->videoDecControls) > (100*video->frameRate*(size >> 12)))  // MC_sofDeblock
-        softDeblocking = FALSE;
-    else
-        softDeblocking = TRUE;
-
-    combined_with_deblock_filter = filter_type & PV_DEBLOCK;
-    QP_store = video->QPMB;
-
-    /* Luma */
-    pp_mod = video->pstprcTypCur;
-
-    if ((filter_type & PV_DEBLOCK) && (filter_type & PV_DERING))
-    {
-        CombinedHorzVertRingFilter(output, width, height, QP_store, 0, pp_mod);
-    }
-    else
-    {
-        if (filter_type & PV_DEBLOCK)
-        {
-            if (softDeblocking)
-            {
-                CombinedHorzVertFilter(output, width, height,
-                                       QP_store, 0, pp_mod);
-            }
-            else
-            {
-                CombinedHorzVertFilter_NoSoftDeblocking(output, width, height,
-                                                        QP_store, 0, pp_mod);
-            }
-        }
-        if (filter_type & PV_DERING)
-        {
-            Deringing_Luma(output, width, height, QP_store,
-                           combined_with_deblock_filter, pp_mod);
-
-        }
-    }
-
-    /* Chroma */
-
-    pp_mod += (nTotalMB << 2);
-    output += size;
-
-    if ((filter_type & PV_DEBLOCK) && (filter_type & PV_DERING))
-    {
-        CombinedHorzVertRingFilter(output, (int)(width >> 1), (int)(height >> 1), QP_store, (int) 1, pp_mod);
-    }
-    else
-    {
-        if (filter_type & PV_DEBLOCK)
-        {
-            if (softDeblocking)
-            {
-                CombinedHorzVertFilter(output, (int)(width >> 1),
-                                       (int)(height >> 1), QP_store, (int) 1, pp_mod);
-            }
-            else
-            {
-                CombinedHorzVertFilter_NoSoftDeblocking(output, (int)(width >> 1),
-                                                        (int)(height >> 1), QP_store, (int) 1, pp_mod);
-            }
-        }
-        if (filter_type & PV_DERING)
-        {
-            Deringing_Chroma(output, (int)(width >> 1),
-                             (int)(height >> 1), QP_store,
-                             combined_with_deblock_filter, pp_mod);
-        }
-    }
-
-    pp_mod += nTotalMB;
-    output += (size >> 2);
-
-    if ((filter_type & PV_DEBLOCK) && (filter_type & PV_DERING))
-    {
-        CombinedHorzVertRingFilter(output, (int)(width >> 1), (int)(height >> 1), QP_store, (int) 1, pp_mod);
-    }
-    else
-    {
-        if (filter_type & PV_DEBLOCK)
-        {
-            if (softDeblocking)
-            {
-                CombinedHorzVertFilter(output, (int)(width >> 1),
-                                       (int)(height >> 1), QP_store, (int) 1, pp_mod);
-            }
-            else
-            {
-                CombinedHorzVertFilter_NoSoftDeblocking(output, (int)(width >> 1),
-                                                        (int)(height >> 1), QP_store, (int) 1, pp_mod);
-            }
-        }
-        if (filter_type & PV_DERING)
-        {
-            Deringing_Chroma(output, (int)(width >> 1),
-                             (int)(height >> 1), QP_store,
-                             combined_with_deblock_filter, pp_mod);
-        }
-    }
-
-    /*  swap current pp_mod to prev_frame pp_mod */
-    pp_mod = video->pstprcTypCur;
-    video->pstprcTypCur = video->pstprcTypPrv;
-    video->pstprcTypPrv = pp_mod;
-
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-#endif
 
 
 #ifdef PV_ANNEX_IJKT_SUPPORT
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/post_proc_semaphore.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/post_proc_semaphore.cpp
deleted file mode 100644
index 3abc6be..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/post_proc_semaphore.cpp
+++ /dev/null
@@ -1,247 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    q_block = pointer to buffer of inverse quantized DCT coefficients of type
-              int for intra-VOP mode or buffer of residual data of type int
-              for inter-VOP mode
-
- Local Stores/Buffers/Pointers Needed:
-    None
-
- Global Stores/Buffers/Pointers Needed:
-    None
-
- Outputs:
-    postmode = post processing semaphore with the vertical deblocking,
-               horizontal deblocking, and deringing bits set up accordingly
-
- Pointers and Buffers Modified:
-    None
-
- Local Stores Modified:
-    None
-
- Global Stores Modified:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This function sets up the postmode semaphore based on the contents of the
- buffer pointed to by q_block. The function starts out with the assumption
- that all entries of q_block, except for the first entry (q_block[0]), are
- zero. This case can induce horizontal and vertical blocking artifacts,
- therefore, both horizontal and vertical deblocking bits are enabled.
-
- The following conditions are tested when setting up the horizontal/vertical
- deblocking and deringing bits:
- 1. When only the elements of the top row of the B_SIZE x B_SIZE block
-    (q_block[n], n = 0,..., B_SIZE-1) are non-zero, vertical blocking artifacts
-    may result, therefore, only the vertical deblocking bit is enabled.
-    Otherwise, the vertical deblocking bit is disabled.
- 2. When only the elements of the far left column of the B_SIZE x B_SIZE block
-    (q_block[n*B_SIZE], n = 0, ..., B_SIZE-1) are non-zero, horizontal blocking
-    artifacts may result, therefore, only the horizontal deblocking bit is
-    enabled. Otherwise, the horizontal deblocking bit is disabled.
- 3. If any non-zero elements exist in positions other than q_block[0],
-    q_block[1], or q_block[B_SIZE], the deringing bit is enabled. Otherwise,
-    it is disabled.
-
- The 3 least significant bits of postmode defines vertical or horizontal
- deblocking and deringing.
-
- The valid values are shown below:
- -------------------------------------------------------
- |           Type                 | Enabled | Disabled |
- -------------------------------------------------------
- | Vertical Deblocking (Bit #0)   |    1    |     0    |
- -------------------------------------------------------
- | Horizontal Deblocking (Bit #1) |    1    |     0    |
- -------------------------------------------------------
- | Deringing (Bit #2)             |    1    |     0    |
- -------------------------------------------------------
-
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "mp4dec_lib.h"
-#include    "mp4def.h"
-#include    "post_proc.h"
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-#ifdef PV_POSTPROC_ON
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-int PostProcSemaphore(
-    int16 *q_block)
-{
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int i, j;
-
-    /* Set default value to vertical and horizontal deblocking enabled */
-    /* Initial assumption is that only q_block[0] element is non-zero, */
-    /* therefore, vertical and horizontal deblocking bits are set to 1 */
-    int postmode = 0x3;
-
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    /* Vertical deblocking bit is enabled when only the entire top row of   */
-    /* the B_SIZE x B_SIZE block, i.e., q_block[n], n = 0,..., B_SIZE-1,    */
-    /* are non-zero. Since initial assumption is that all elements, except  */
-    /* q_block[0], is zero, we need to check the remaining elements in the  */
-    /* top row to  determine if all or some are non-zero.                   */
-    if (q_block[1] != 0)
-    {
-        /* At this point, q_block[0] and q_block[1] are non-zero, while */
-        /* q_block[n], n = 2,..., B_SIZE-1, are zero. Therefore, we     */
-        /* need to disable vertical deblocking                          */
-        postmode &= 0xE;
-    }
-
-    for (i = 2; i < B_SIZE; i++)
-    {
-        if (q_block[i])
-        {
-            /* Check if q_block[n], n = 2,..., B_SIZE-1, are non-zero.*/
-            /* If any of them turn out to be non-zero, we need to     */
-            /* disable vertical deblocking.                           */
-            postmode &= 0xE;
-
-            /* Deringing is enabled if any nonzero elements exist in */
-            /* positions other than q_block[0], q_block[1] or        */
-            /* q_block[B_SIZE].                                      */
-            postmode |= 0x4;
-
-            break;
-        }
-    }
-
-    /* Horizontal deblocking bit is enabled when only the entire far */
-    /* left column, i.e., q_block[n*B_SIZE], n = 0, ..., B_SIZE-1,   */
-    /* are non-zero. Since initial assumption is that all elements,  */
-    /* except q_block[0], is zero, we need to check the remaining    */
-    /* elements in the far left column to determine if all or some   */
-    /* are non-zero.                                                 */
-    if (q_block[B_SIZE])
-    {
-        /* At this point, only q_block[0] and q_block[B_SIZE] are non-zero, */
-        /* while q_block[n*B_SIZE], n = 2, 3,..., B_SIZE-1, are zero.       */
-        /* Therefore, we need to disable horizontal deblocking.             */
-        postmode &= 0xD;
-    }
-
-    for (i = 16; i < NCOEFF_BLOCK; i += B_SIZE)
-    {
-        if (q_block[i])
-        {
-            /* Check if q_block[n], n = 2*B_SIZE,...,(B_SIZE-1)*B_SIZE,  */
-            /* are non-zero. If any of them turn out to be non-zero,     */
-            /* we need to disable horizontal deblocking.                 */
-            postmode &= 0xD;
-
-            /* Deringing is enabled if any nonzero elements exist in */
-            /* positions other than q_block[0], q_block[1] or        */
-            /* q_block[B_SIZE].                                      */
-            postmode |= 0x4;
-
-            break;
-        }
-    }
-
-    /* At this point, only the first row and far left column elements */
-    /* have been tested. If deringing bit is still not set at this    */
-    /* point, check the rest of q_block to determine if the elements  */
-    /* are non-zero. If all elements, besides q_block[0], q_block[1], */
-    /* or q_block[B_SIZE] are non-zero, deringing bit must be set     */
-    if ((postmode & 0x4) == 0)
-    {
-        for (i = 1; i < B_SIZE; i++)
-        {
-            for (j = 1; j < B_SIZE; j++)
-            {
-                if (q_block[(i<<3)+j])
-                {
-                    /* At this point, q_block[0] and another q_block */
-                    /* element are non-zero, therefore, we need to   */
-                    /* disable vertical and horizontal deblocking    */
-                    postmode &= 0xC;
-
-                    /* Deringing is enabled if any nonzero elements exist in */
-                    /* positions other than q_block[0], q_block[1] or        */
-                    /* q_block[B_SIZE].                                      */
-                    postmode |= 0x4;
-
-                    /* Set outer FOR loop count to B_SIZE to get out of */
-                    /* outer FOR loop                                   */
-                    i = B_SIZE;
-
-                    /* Get out of inner FOR loop */
-                    break;
-                }
-            }
-        }
-    }
-
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return (postmode);
-}
-
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/pp_semaphore_chroma_inter.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/pp_semaphore_chroma_inter.cpp
deleted file mode 100644
index 7c20222..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/pp_semaphore_chroma_inter.cpp
+++ /dev/null
@@ -1,262 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    xpred = x-axis coordinate of the block used for prediction (int)
-    ypred = y-axis coordinate of the block used for prediction (int)
-    pp_dec_u = pointer to the post processing semaphore for chrominance
-               (uint8)
-    pstprcTypPrv = pointer the previous frame's post processing type
-                   (uint8)
-    dx = horizontal component of the motion vector (int)
-    dy = vertical component of the motion vector (int)
-    mvwidth = number of blocks per row in the luminance VOP (int)
-    height = luminance VOP height in pixels (int)
-    size = total number of pixel in the current luminance VOP (int)
-    mv_loc = flag indicating location of the motion compensated
-         (x,y) position with respect to the luminance MB (int);
-         0 -> inside MB, 1 -> outside MB
-    msk_deblock = flag indicating whether to perform deblocking
-              (msk_deblock = 0) or not (msk_deblock = 1) (uint8)
-
- Local Stores/Buffers/Pointers Needed:
-    None
-
- Global Stores/Buffers/Pointers Needed:
-    None
-
- Outputs:
-    None
-
- Pointers and Buffers Modified:
-    pp_dec_u contents are the updated semaphore propagation data
-
- Local Stores Modified:
-    None
-
- Global Stores Modified:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This functions performs post processing semaphore propagation processing
- after chrominance prediction in interframe processing mode.
-
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "mp4dec_api.h"
-#include    "mp4def.h"
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-#ifdef PV_POSTPROC_ON
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-    /*----------------------------------------------------------------------------
-    ; FUNCTION CODE
-    ----------------------------------------------------------------------------*/
-    void pp_semaphore_chroma_inter(
-        int xpred,      /* i */
-        int ypred,      /* i */
-        uint8   *pp_dec_u,  /* i/o */
-        uint8   *pstprcTypPrv,  /* i */
-        int dx,     /* i */
-        int dy,     /* i */
-        int mvwidth,    /* i */
-        int height,     /* i */
-        int32   size,       /* i */
-        int mv_loc,     /* i */
-        uint8   msk_deblock /* i */
-    )
-    {
-        /*----------------------------------------------------------------------------
-        ; Define all local variables
-        ----------------------------------------------------------------------------*/
-        int mmvy, mmvx, nmvy, nmvx;
-        uint8 *pp_prev1, *pp_prev2, *pp_prev3, *pp_prev4;
-
-        /*----------------------------------------------------------------------------
-        ; Function body here
-        ----------------------------------------------------------------------------*/
-
-        /* 09/28/2000, modify semaphore propagation to */
-        /* accommodate smart indexing */
-        mmvx = xpred >> 4;  /* block x coor */
-        nmvx = mmvx;
-
-        mmvy = ypred >> 4;  /* block y coor */
-        nmvy = mmvy;
-
-        /* Check if MV is outside the frame */
-        if (mv_loc == 1)
-        {
-            /* Perform boundary check */
-            if (nmvx < 0)
-            {
-                nmvx = 0;
-            }
-            else if (nmvx > mvwidth - 1)
-            {
-                nmvx = mvwidth - 1;
-            }
-
-            if (nmvy < 0)
-            {
-                nmvy = 0;
-            }
-            else if (nmvy > (height >> 4) - 1)
-            {
-                nmvy = (height >> 4) - 1;
-            }
-        }
-
-        /* Calculate pointer to first chrominance b semaphores in       */
-        /* pstprcTypPrv, i.e., first chrominance b semaphore is in      */
-        /* (pstprcTypPrv + (size>>6)).                  */
-        /* Since total number of chrominance blocks per row in a VOP    */
-        /* is half of the total number of luminance blocks per row in a */
-        /* VOP, we use (mvwidth >> 1) when calculating the row offset.  */
-        pp_prev1 = pstprcTypPrv + (size >> 6) + nmvx + nmvy * (mvwidth >> 1) ;
-
-        /* Check if MV is a multiple of 16 */
-        /*  1/5/01, make sure it doesn't go out of bound */
-        if (((dy&0xF) != 0) && (mmvy + 1 < (height >> 4) - 1))
-        {   /* dy is not a multiple of 16 */
-
-            /* pp_prev3 is the block below pp_prev1 block */
-            pp_prev3 = pp_prev1 + (mvwidth >> 1);
-        }
-        else
-        {   /* dy is a multiple of 16 */
-            pp_prev3 = pp_prev1;
-        }
-
-        /*  1/5/01, make sure it doesn't go out of bound */
-        if (((dx&0xF) != 0) && (mmvx + 1 < (mvwidth >> 1) - 1))
-        {   /* dx is not a multiple of 16 */
-
-            /* pp_prev2 is the block to the right of pp_prev1 block */
-            pp_prev2 = pp_prev1 + 1;
-
-            /* pp_prev4 is the block to the right of the block */
-            /* below pp_prev1 block                */
-            pp_prev4 = pp_prev3 + 1;
-        }
-        else
-        {   /* dx is a multiple of 16 */
-
-            pp_prev2 = pp_prev1;
-            pp_prev4 = pp_prev3;
-        }
-
-        /* Advance offset to location of first Chrominance R semaphore in */
-        /* pstprcTypPrv. Since the number of pixels in a Chrominance VOP  */
-        /* is (number of pixels in Luminance VOP/4), and there are 64     */
-        /* pixels in an 8x8 Chrominance block, the offset can be      */
-        /* calculated as:                         */
-        /*  mv_loc = (number of pixels in Luminance VOP/(4*64))   */
-        /*         = size/256 = size>>8               */
-        mv_loc = (size >> 8);
-
-        /*  11/3/00, change the propagation for deblocking */
-        if (msk_deblock == 0)
-        {
-
-            /* Deblocking semaphore propagation for Chrominance */
-            /* b semaphores                     */
-            *(pp_dec_u) = 0;
-
-            /* Advance offset to point to Chrominance r semaphores */
-            pp_dec_u += mv_loc;
-
-            /* Deblocking semaphore propagation for Chrominance */
-            /* r semaphores                     */
-            *(pp_dec_u) = 0;
-        }
-        else
-        {
-            /* Deringing semaphore propagation for Chrominance B block */
-            if ((*(pp_dec_u)&4) == 0)
-            {
-                *(pp_dec_u) |= ((*(pp_prev1) | *(pp_prev2) |
-                                 *(pp_prev3) | *(pp_prev4)) & 0x4);
-            }
-
-            /* Advance offset to point to Chrominance r semaphores */
-            pp_dec_u += mv_loc;
-            pp_prev1 += mv_loc;
-            pp_prev2 += mv_loc;
-            pp_prev3 += mv_loc;
-            pp_prev4 += mv_loc;
-
-            /* Deringing semaphore propagation for Chrominance R */
-            if ((*(pp_dec_u)&4) == 0)
-            {
-                *(pp_dec_u) |= ((*(pp_prev1) | *(pp_prev2) |
-                                 *(pp_prev3) | *(pp_prev4)) & 0x4);
-            }
-        }
-
-        /*----------------------------------------------------------------------------
-        ; Return nothing or data or data pointer
-        ----------------------------------------------------------------------------*/
-        return;
-    }
-#ifdef __cplusplus
-}
-#endif
-
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/pp_semaphore_luma.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/pp_semaphore_luma.cpp
deleted file mode 100644
index b3a1ebd..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/pp_semaphore_luma.cpp
+++ /dev/null
@@ -1,378 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    xpred = x-axis coordinate of the MB used for prediction (int)
-    ypred = y-axis coordinate of the MB used for prediction (int)
-    pp_dec_y = pointer to the post processing semaphore for current
-           luminance frame (uint8)
-    pstprcTypPrv = pointer the previous frame's post processing type
-                   (uint8)
-    ll = pointer to the buffer (int)
-    mv_loc = flag indicating location of the motion compensated
-         (x,y) position with respect to the luminance MB (int);
-         0 -> inside MB, 1 -> outside MB
-    dx = horizontal component of the motion vector (int)
-    dy = vertical component of the motion vector (int)
-    mvwidth = number of blocks per row (int)
-    width = luminance VOP width in pixels (int)
-    height = luminance VOP height in pixels (int)
-
- Local Stores/Buffers/Pointers Needed:
-    None
-
- Global Stores/Buffers/Pointers Needed:
-    None
-
- Outputs:
-    msk_deblock = flag that indicates whether deblocking is to be
-              performed (msk_deblock = 0) or not (msk_deblock =
-              1) (uint8)
-
- Pointers and Buffers Modified:
-    pp_dec_y contents are the updated semapohore propagation data
-
- Local Stores Modified:
-    None
-
- Global Stores Modified:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This functions performs post processing semaphore propagation processing
- after luminance prediction.
-
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "mp4dec_api.h"
-#include    "mp4def.h"
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-#ifdef PV_POSTPROC_ON
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-    /*----------------------------------------------------------------------------
-    ; FUNCTION CODE
-    ----------------------------------------------------------------------------*/
-    uint8 pp_semaphore_luma(
-        int xpred,      /* i */
-        int ypred,      /* i */
-        uint8   *pp_dec_y,  /* i/o */
-        uint8   *pstprcTypPrv,  /* i */
-        int *ll,        /* i */
-        int *mv_loc,    /* i/o */
-        int dx,     /* i */
-        int dy,     /* i */
-        int mvwidth,    /* i */
-        int width,      /* i */
-        int height      /* i */
-    )
-    {
-        /*----------------------------------------------------------------------------
-        ; Define all local variables
-        ----------------------------------------------------------------------------*/
-        int kk, mmvy, mmvx, nmvx, nmvy;
-        uint8   *pp_prev1, *pp_prev2, *pp_prev3, *pp_prev4;
-        uint8   msk_deblock = 0;        /*  11/3/00 */
-
-        /*----------------------------------------------------------------------------
-        ; Function body here
-        ----------------------------------------------------------------------------*/
-        /* Interframe Processing - 1 MV per MB */
-
-        /* check whether the MV points outside the frame */
-        if (xpred >= 0 && xpred <= ((width << 1) - (2*MB_SIZE)) && ypred >= 0 &&
-                ypred <= ((height << 1) - (2*MB_SIZE)))
-        {   /*****************************/
-            /* (x,y) is inside the frame */
-            /*****************************/
-
-            /*  10/24/2000 post_processing semaphore */
-            /* generation */
-
-            /*  10/23/2000 no boundary checking*/
-            *mv_loc = 0;
-
-            /* Calculate block x coordinate. Divide by 16 is for  */
-            /* converting half-pixel resolution to block          */
-            mmvx = xpred >> 4;
-
-            /* Calculate block y coordinate. Divide by 16 is for */
-            /* converting half-pixel resolution to block         */
-            mmvy = ypred >> 4;
-
-            /* Find post processing semaphore location for block */
-            /* used for prediction, i.e.,                */
-            /* pp_prev1 = &pstprcTypPrv[mmvy*mvwidth][mmvx]      */
-            pp_prev1 = pstprcTypPrv + mmvx + mmvy * mvwidth;
-
-            /* Check if MV is a multiple of 16 */
-            if ((dx&0xF) != 0)
-            {   /* dx is not a multiple of 16 */
-
-                /* pp_prev2 is the block to the right of */
-                /* pp_prev1 block            */
-                pp_prev2 = pp_prev1 + 1;
-
-                if ((dy&0xF) != 0)
-                {   /* dy is not a multiple of 16 */
-
-                    /* pp_prev3 is the block below */
-                    /* pp_prev1 block          */
-                    pp_prev3 = pp_prev1 + mvwidth;
-                }
-                else
-                {   /* dy is a multiple of 16 */
-
-                    pp_prev3 = pp_prev1;
-                }
-
-                /* pp_prev4 is the block to the right of */
-                /* pp_prev3 block.           */
-                pp_prev4 = pp_prev3 + 1;
-            }
-            else
-            {   /* dx is a multiple of 16 */
-
-                pp_prev2 = pp_prev1;
-
-                if ((dy&0xF) != 0)
-                {   /* dy is not a multiple of 16 */
-
-                    /* pp_prev3 is the block below */
-                    /* pp_prev1 block.         */
-                    pp_prev3 = pp_prev1 + mvwidth;
-                }
-                else
-                {   /* dy is a multiple of 16 */
-
-                    pp_prev3 = pp_prev1;
-                    msk_deblock = 0x3;
-                }
-
-                pp_prev4 = pp_prev3;
-            }
-
-            /* Perform post processing semaphore propagation for each */
-            /* of the 4 blocks in a MB.               */
-            for (kk = 0; kk < 4; kk++)
-            {
-                /* Deringing semaphore propagation */
-                if ((*(pp_dec_y) & 4) == 0)
-                {
-                    *(pp_dec_y) |= ((*(pp_prev1) | *(pp_prev2) |
-                                     *(pp_prev3) | *(pp_prev4)) & 0x4);
-                }
-                /* Deblocking semaphore propagation */
-                /*  11/3/00, change the propagation for deblocking */
-                if (msk_deblock == 0)
-                {
-                    *(pp_dec_y) = 0;
-                }
-
-                pp_dec_y += ll[kk];
-                pp_prev1 += ll[kk];
-                pp_prev2 += ll[kk];
-                pp_prev3 += ll[kk];
-                pp_prev4 += ll[kk];
-            }
-
-        }
-        else
-        {   /******************************/
-            /* (x,y) is outside the frame */
-            /******************************/
-
-            /*  10/24/2000 post_processing semaphore */
-            /* generation */
-
-            /*  10/23/2000 boundary checking*/
-            *mv_loc = 1;
-
-            /* Perform post processing semaphore propagation for each */
-            /* of the 4 blocks in a MB.               */
-            for (kk = 0; kk < 4; kk++)
-            {
-                /* Calculate block x coordinate and round (?).  */
-                /* Divide by 16 is for converting half-pixel    */
-                /* resolution to block.             */
-                mmvx = (xpred + ((kk & 1) << 3)) >> 4;
-                nmvx = mmvx;
-
-                /* Calculate block y coordinate and round (?).  */
-                /* Divide by 16 is for converting half-pixel    */
-                /* resolution to block.             */
-                mmvy = (ypred + ((kk & 2) << 2)) >> 4;
-                nmvy = mmvy;
-
-                /* Perform boundary checking */
-                if (nmvx < 0)
-                {
-                    nmvx = 0;
-                }
-                else if (nmvx > mvwidth - 1)
-                {
-                    nmvx = mvwidth - 1;
-                }
-
-                if (nmvy < 0)
-                {
-                    nmvy = 0;
-                }
-                else if (nmvy > (height >> 3) - 1)
-                {
-                    nmvy = (height >> 3) - 1;
-                }
-
-                /* Find post processing semaphore location for block */
-                /* used for prediction, i.e.,                */
-                /* pp_prev1 = &pstprcTypPrv[nmvy*mvwidth][nmvx]      */
-                pp_prev1 = pstprcTypPrv + nmvx + nmvy * mvwidth;
-
-                /* Check if x component of MV is a multiple of 16    */
-                /* and check if block x coordinate is out of bounds  */
-                if (((dx&0xF) != 0) && (mmvx + 1 < mvwidth - 1))
-                {   /* dx is not a multiple of 16 and the block */
-                    /* x coordinate is within the bounds        */
-
-                    /* pp_prev2 is the block to the right of */
-                    /* pp_prev1 block            */
-                    pp_prev2 = pp_prev1 + 1;
-
-                    /* Check if y component of MV is a multiple */
-                    /* of 16 and check if block y coordinate is */
-                    /* out of bounds                */
-                    if (((dy&0xF) != 0) && (mmvy + 1 < (height >> 3) - 1))
-                    {   /* dy is not a multiple of 16 and */
-                        /* the block y coordinate is      */
-                        /* within the bounds              */
-
-                        /* pp_prev3 is the block below */
-                        /* pp_prev1 block          */
-                        pp_prev3 = pp_prev1 + mvwidth;
-
-                        /* all prediction are from different blocks */
-                        msk_deblock = 0x3;
-                    }
-                    else
-                    {   /* dy is a multiple of 16 or the block */
-                        /* y coordinate is out of bounds       */
-
-                        pp_prev3 = pp_prev1;
-                    }
-
-                    /* pp_prev4 is the block to the right of */
-                    /* pp_prev3 block.           */
-                    pp_prev4 = pp_prev3 + 1;
-                }
-                else
-                {   /* dx is a multiple of 16 or the block x */
-                    /* coordinate is out of bounds           */
-
-                    pp_prev2 = pp_prev1;
-
-                    /* Check if y component of MV is a multiple */
-                    /* of 16 and check if block y coordinate is */
-                    /* out of bounds                */
-                    if (((dy&0xF) != 0) && (mmvy + 1 < (height >> 3) - 1))
-                    {   /* dy is not a multiple of 16 and */
-                        /* the block y coordinate is      */
-                        /* within the bounds              */
-
-                        /* pp_prev3 is the block below */
-                        /* pp_prev1 block.         */
-                        pp_prev3 = pp_prev1 + mvwidth;
-                    }
-                    else
-                    {   /* dy is a multiple of 16 or the block */
-                        /* y coordinate is out of bounds       */
-
-                        pp_prev3 = pp_prev1;
-                    }
-
-                    pp_prev4 = pp_prev3;
-                }
-
-                /* Deringing semaphore propagation */
-                if ((*(pp_dec_y)&4) == 0)
-                {
-                    *(pp_dec_y) |= ((*(pp_prev1) |
-                                     *(pp_prev2) | *(pp_prev3) |
-                                     *(pp_prev4)) & 0x4);
-                }
-                /* Deblocking semaphore propagation */
-                /*  11/3/00, change the propaga= */
-                /* tion for deblocking */
-                if (msk_deblock == 0)
-                {
-                    *(pp_dec_y) = 0;
-                }
-
-                pp_dec_y += ll[kk];
-            }
-        }
-
-        /*----------------------------------------------------------------------------
-        ; Return nothing or data or data pointer
-        ----------------------------------------------------------------------------*/
-        return (msk_deblock);
-    }
-#ifdef __cplusplus
-}
-#endif
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp
index 9c0fcfa..b0828e4 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp
@@ -512,60 +512,6 @@
     video->memoryUsage += (sizeof(MOT) * 8 * nTotalMB);
 #endif
 
-#ifdef PV_POSTPROC_ON
-    /* Allocating space for post-processing Mode */
-#ifdef DEC_INTERNAL_MEMORY_OPT
-    video->pstprcTypCur = IMEM_pstprcTypCur;
-    video->memoryUsage += (nTotalMB * 6);
-    if (video->pstprcTypCur == NULL)
-    {
-        status = PV_FALSE;
-    }
-    else
-    {
-        oscl_memset(video->pstprcTypCur, 0, 4*nTotalMB + 2*nTotalMB);
-    }
-
-    video->pstprcTypPrv = IMEM_pstprcTypPrv;
-    video->memoryUsage += (nTotalMB * 6);
-    if (video->pstprcTypPrv == NULL)
-    {
-        status = PV_FALSE;
-    }
-    else
-    {
-        oscl_memset(video->pstprcTypPrv, 0, nTotalMB*6);
-    }
-
-#else
-    if (nTotalMB > INT32_MAX / 6) {
-        return PV_FALSE;
-    }
-    video->pstprcTypCur = (uint8 *) oscl_malloc(nTotalMB * 6);
-    video->memoryUsage += (nTotalMB * 6);
-    if (video->pstprcTypCur == NULL)
-    {
-        status = PV_FALSE;
-    }
-    else
-    {
-        oscl_memset(video->pstprcTypCur, 0, 4*nTotalMB + 2*nTotalMB);
-    }
-
-    video->pstprcTypPrv = (uint8 *) oscl_malloc(nTotalMB * 6);
-    video->memoryUsage += (nTotalMB * 6);
-    if (video->pstprcTypPrv == NULL)
-    {
-        status = PV_FALSE;
-    }
-    else
-    {
-        oscl_memset(video->pstprcTypPrv, 0, nTotalMB*6);
-    }
-
-#endif
-
-#endif
 
     /* initialize the decoder library */
     video->prevVop->predictionType = I_VOP;
@@ -631,10 +577,6 @@
 #ifdef DEC_INTERNAL_MEMORY_OPT
     if (video)
     {
-#ifdef PV_POSTPROC_ON
-        video->pstprcTypCur = NULL;
-        video->pstprcTypPrv = NULL;
-#endif
 
         video->acPredFlag       = NULL;
         video->sliceNo          = NULL;
@@ -699,10 +641,6 @@
 
     if (video)
     {
-#ifdef PV_POSTPROC_ON
-        if (video->pstprcTypCur) oscl_free(video->pstprcTypCur);
-        if (video->pstprcTypPrv) oscl_free(video->pstprcTypPrv);
-#endif
         if (video->predDC) oscl_free(video->predDC);
         video->predDCAC_row = NULL;
         if (video->predDCAC_col) oscl_free(video->predDCAC_col);
@@ -830,7 +768,10 @@
 OSCL_EXPORT_REF void PVSetPostProcType(VideoDecControls *decCtrl, int mode)
 {
     VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
-    video->postFilterType = mode;
+    if (mode != 0) {
+        ALOGE("Post processing filters are not supported");
+    }
+    video->postFilterType = 0;
 }
 
 
@@ -1621,43 +1562,8 @@
 void PVDecPostProcess(VideoDecControls *decCtrl, uint8 *outputYUV)
 {
     uint8 *outputBuffer;
-#ifdef PV_POSTPROC_ON
-    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
-    int32 tmpvar;
-    if (outputYUV)
-    {
-        outputBuffer = outputYUV;
-    }
-    else
-    {
-        if (video->postFilterType)
-        {
-            outputBuffer = video->currVop->yChan;
-        }
-        else
-        {
-            outputBuffer = decCtrl->outputFrame;
-        }
-    }
-
-    if (video->postFilterType)
-    {
-        /* Post-processing,  */
-        PostFilter(video, video->postFilterType, outputBuffer);
-    }
-    else
-    {
-        if (outputYUV)
-        {
-            /* Copy decoded frame to the output buffer. */
-            tmpvar = (int32)video->width * video->height;
-            oscl_memcpy(outputBuffer, decCtrl->outputFrame, tmpvar*3 / 2);           /*  3/3/01 */
-        }
-    }
-#else
     outputBuffer = decCtrl->outputFrame;
     outputYUV;
-#endif
     decCtrl->outputFrame = outputBuffer;
     return;
 }
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/vop.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/vop.cpp
index a11f55e..335846c 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/src/vop.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/src/vop.cpp
@@ -24,7 +24,6 @@
 
 #define OSCL_DISABLE_WARNING_CONV_POSSIBLE_LOSS_OF_DATA
 
-#ifdef PV_SUPPORT_MAIN_PROFILE
 /* INTRA */
 const static int mpeg_iqmat_def[NCOEFF_BLOCK] =
 {
@@ -50,7 +49,6 @@
     22, 23, 24, 26, 27, 28, 30, 31,
     23, 24, 25, 27, 28, 30, 31, 33
 };
-#endif
 
 /* ======================================================================== */
 /*  Function : CalcNumBits()                                                */
@@ -86,9 +84,7 @@
     BitstreamDecVideo *stream;
     uint32 tmpvar, vol_shape;
     uint32 startCode;
-#ifdef PV_SUPPORT_MAIN_PROFILE
     int *qmat, i, j;
-#endif
     int version_id = 1;
 #ifdef PV_TOLERATE_VOL_ERRORS
     uint32 profile = 0x01;
@@ -317,7 +313,8 @@
         }
         else
         {
-            if (tmpvar != 0x01) return PV_FAIL;
+            // Simple and advanced simple (for quant-type 1)
+            if (tmpvar != 0x01 && tmpvar != 0x11) return PV_FAIL;
         }
 
         /* version id specified? */
@@ -486,7 +483,6 @@
         currVol->quantType = BitstreamRead1Bits(stream);
         if (currVol->quantType)
         {
-#ifdef PV_SUPPORT_MAIN_PROFILE
             /* load quantization matrices.   5/22/2000 */
             /* load_intra_quant_mat (1 bit) */
             qmat = currVol->iqmat;
@@ -531,9 +527,6 @@
             {
                 oscl_memcpy(qmat, mpeg_nqmat_def, 64*sizeof(int));
             }
-#else
-            return PV_FAIL;
-#endif
         }
 
         if (version_id != 1)
diff --git a/media/libstagefright/codecs/m4v_h263/dec/test/Android.bp b/media/libstagefright/codecs/m4v_h263/dec/test/Android.bp
index 655491a..9c753e6 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/test/Android.bp
+++ b/media/libstagefright/codecs/m4v_h263/dec/test/Android.bp
@@ -17,6 +17,7 @@
 cc_test {
     name: "Mpeg4H263DecoderTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "Mpeg4H263DecoderTest.cpp",
diff --git a/media/libstagefright/codecs/m4v_h263/dec/test/AndroidTest.xml b/media/libstagefright/codecs/m4v_h263/dec/test/AndroidTest.xml
index 47e10ca..f572b0c 100755
--- a/media/libstagefright/codecs/m4v_h263/dec/test/AndroidTest.xml
+++ b/media/libstagefright/codecs/m4v_h263/dec/test/AndroidTest.xml
@@ -19,7 +19,7 @@
         <option name="cleanup" value="true" />
         <option name="push" value="Mpeg4H263DecoderTest->/data/local/tmp/Mpeg4H263DecoderTest" />
         <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263Decoder.zip?unzip=true"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263Decoder-1.1.zip?unzip=true"
             value="/data/local/tmp/Mpeg4H263DecoderTestRes/" />
     </target_preparer>
 
diff --git a/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp b/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp
index 967c1ea..53d66ea 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp
@@ -404,6 +404,9 @@
                           make_tuple("swirl_352x288_h263.h263", "swirl_352x288_h263.info", false),
                           make_tuple("bbb_352x288_h263.h263", "bbb_352x288_h263.info", false),
                           make_tuple("bbb_352x288_mpeg4.m4v", "bbb_352x288_mpeg4.info", true),
+                          make_tuple("qtype0_mpeg4.m4v", "qtype0_mpeg4.info", true),
+                          make_tuple("qtype1_mpeg4.m4v", "qtype1_mpeg4.info", true),
+                          make_tuple("qtype1_qmatrix_mpeg4.m4v", "qtype1_qmatrix_mpeg4.info", true),
                           make_tuple("swirl_128x128_mpeg4.m4v", "swirl_128x128_mpeg4.info", true),
                           make_tuple("swirl_130x132_mpeg4.m4v", "swirl_130x132_mpeg4.info", true),
                           make_tuple("swirl_132x130_mpeg4.m4v", "swirl_132x130_mpeg4.info", true),
diff --git a/media/libstagefright/codecs/m4v_h263/dec/test/README.md b/media/libstagefright/codecs/m4v_h263/dec/test/README.md
index 7e4aea1..38ac567 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/test/README.md
+++ b/media/libstagefright/codecs/m4v_h263/dec/test/README.md
@@ -22,7 +22,8 @@
 adb push ${OUT}/data/nativetest/Mpeg4H263DecoderTest/Mpeg4H263DecoderTest /data/local/tmp/
 ```
 
-The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263Decoder.zip). Download, unzip and push these files into device for testing.
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263Decoder-1.1.zip).
+Download, unzip and push these files into device for testing.
 
 ```
 adb push Mpeg4H263Decoder /data/local/tmp/
diff --git a/media/libstagefright/codecs/m4v_h263/enc/Android.bp b/media/libstagefright/codecs/m4v_h263/enc/Android.bp
index b8bc24e..13d310d 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/Android.bp
+++ b/media/libstagefright/codecs/m4v_h263/enc/Android.bp
@@ -6,6 +6,12 @@
         "com.android.media.swcodec",
     ],
     min_sdk_version: "29",
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 
     srcs: [
         "src/bitstream_io.cpp",
diff --git a/media/libstagefright/codecs/m4v_h263/enc/test/Android.bp b/media/libstagefright/codecs/m4v_h263/enc/test/Android.bp
index b9a8117..d2982da 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/test/Android.bp
+++ b/media/libstagefright/codecs/m4v_h263/enc/test/Android.bp
@@ -17,6 +17,7 @@
 cc_test {
     name: "Mpeg4H263EncoderTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs : [ "Mpeg4H263EncoderTest.cpp" ],
 
diff --git a/media/libstagefright/codecs/m4v_h263/fuzzer/Android.bp b/media/libstagefright/codecs/m4v_h263/fuzzer/Android.bp
new file mode 100644
index 0000000..778dafb
--- /dev/null
+++ b/media/libstagefright/codecs/m4v_h263/fuzzer/Android.bp
@@ -0,0 +1,111 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+cc_defaults {
+    name: "mpeg4_h263_dec_fuzz_defaults",
+
+    host_supported: true,
+
+    srcs: [
+        "mpeg4_h263_dec_fuzzer.cpp",
+    ],
+
+    static_libs: [
+        "libstagefright_m4vh263dec",
+        "liblog",
+    ],
+
+    cflags: [
+        "-DOSCL_IMPORT_REF=",
+    ],
+
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "mpeg4_dec_fuzzer",
+
+    defaults: [
+        "mpeg4_h263_dec_fuzz_defaults",
+    ],
+
+    cflags: [
+        "-DMPEG4",
+    ],
+}
+
+cc_fuzz {
+    name: "h263_dec_fuzzer",
+
+    defaults: [
+        "mpeg4_h263_dec_fuzz_defaults",
+    ],
+}
+
+cc_defaults {
+    name: "mpeg4_h263_enc_fuzz_defaults",
+
+    host_supported: true,
+
+    srcs: ["mpeg4_h263_enc_fuzzer.cpp"],
+
+    shared_libs: [
+        "libutils",
+        "liblog",
+    ],
+
+    static_libs: [
+        "libstagefright_m4vh263enc",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "mpeg4_enc_fuzzer",
+
+    defaults: [
+        "mpeg4_h263_enc_fuzz_defaults",
+    ],
+
+    cflags: ["-DMPEG4"],
+}
+
+cc_fuzz {
+    name: "h263_enc_fuzzer",
+
+    defaults: [
+        "mpeg4_h263_enc_fuzz_defaults",
+    ],
+}
diff --git a/media/libstagefright/codecs/m4v_h263/fuzzer/README.md b/media/libstagefright/codecs/m4v_h263/fuzzer/README.md
new file mode 100644
index 0000000..ad4ff97
--- /dev/null
+++ b/media/libstagefright/codecs/m4v_h263/fuzzer/README.md
@@ -0,0 +1,158 @@
+# Fuzzer for libstagefright_m4vh263dec decoder
+
+## Plugin Design Considerations
+The fuzzer plugin for MPEG4/H263 is designed based on the understanding of the
+codec and tries to achieve the following:
+
+##### Maximize code coverage
+Dict files (dictionary files) are created for MPEG4 and H263 to ensure that the required start
+bytes are present in every input file that goes to the fuzzer.
+This ensures that decoder does not reject any input file in the first check
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the codec using a loop.
+ * If the decode operation was successful, the input is advanced by the number of bytes consumed
+   in the decode call.
+ * If the decode operation was un-successful, the input is advanced by 1 byte so that the fuzzer
+   can proceed to feed the next frame.
+
+This ensures that the plugin tolerates any kind of input (empty, huge, malformed, etc)
+and doesnt `exit()` on any input and thereby increasing the chance of identifying vulnerabilities.
+
+##### Other considerations
+ * Two fuzzer binaries - mpeg4_dec_fuzzer and h263_dec_fuzzer are generated based on the presence
+   of a flag - 'MPEG4'
+ * The number of decode calls are kept to a maximum of 100 so that the fuzzer does not timeout.
+
+## Build
+
+This describes steps to build mpeg4_dec_fuzzer and h263_dec_fuzzer binary.
+
+### Android
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mpeg4_dec_fuzzer
+  $ mm -j$(nproc) h263_dec_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some MPEG4 or H263 files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mpeg4_dec_fuzzer/mpeg4_dec_fuzzer CORPUS_DIR
+  $ adb shell /data/fuzz/arm64/h263_dec_fuzzer/h263_dec_fuzzer CORPUS_DIR
+```
+To run on host
+```
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/mpeg4_dec_fuzzer/mpeg4_dec_fuzzer CORPUS_DIR
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/h263_dec_fuzzer/h263_dec_fuzzer CORPUS_DIR
+```
+
+# Fuzzer for libstagefright_m4vh263enc encoder
+
+## Plugin Design Considerations
+The fuzzer plugin for MPEG4/H263 is designed based on the understanding of the
+codec and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+MPEG4/H263 supports the following parameters:
+1. Frame Width (parameter name: `encWidth`)
+2. Frame Height (parameter name: `encHeight`)
+3. Rate control mode (parameter name: `rcType`)
+4. Number of bytes per packet (parameter name: `packetSize`)
+5. Qp for I-Vop(parameter name: `iQuant`)
+6. Qp for P-Vop (parameter name: `pQuant`)
+7. Enable RVLC mode (parameter name: `rvlcEnable`)
+8. Quantization mode (parameter name: `quantType`)
+9. Disable frame skipping (parameter name: `noFrameSkipped`)
+10. Enable scene change detection (parameter name: `sceneDetect`)
+11. Number of intra MBs in P-frame(parameter name: `numIntraMB`)
+12. Search range of ME (parameter name: `searchRange`)
+13. Enable 8x8 ME and MC (parameter name: `mv8x8Enable`)
+14. Enable AC prediction (parameter name: `useACPred`)
+15. Threshold for intra DC VLC (parameter name: `intraDCVlcTh`)
+16. Encoding Mode (parameter name: `encMode`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `rcType` | 0. `CONSTANT_Q` 1. `CBR_1` 2. `VBR_1` 3. `CBR_2` 4. `VBR_2` 5. `CBR_LOWDELAY` | All the bits of 6th byte of data modulus 6 |
+| `packetSize` | In the range `0 to 255` | All the bits of 7th byte of data |
+| `iQuant` | In the range `1 to 31` | All the bits of 8th byte of data |
+| `pQuant` | In the range `1 to 31` | All the bits of 9th byte of data |
+| `rvlcEnable` | 0. `PV_OFF` 1. `PV_ON` | bit 0 of 10th byte of data |
+| `quantType` | 0. `0` 1. `1` | bit 0 of 11th byte of data |
+| `noFrameSkipped` | 0. `PV_OFF` 1. `PV_ON` | bit 0 of 12th byte of data |
+| `sceneDetect` | 0. `PV_OFF` 1. `PV_ON` | bit 0 of 13th byte of data |
+| `numIntraMB` | In the range `0 to 7` | bit 0, 1 and 2 of 14th byte of data |
+| `searchRange` | In the range `0 to 31` | bit 0, 1, 2, 3 and 4 of 15th byte of data |
+| `mv8x8Enable` | 0. `PV_OFF` 1. `PV_ON` | bit 0 of 16th byte of data |
+| `useACPred` | 0. `PV_OFF` 1. `PV_ON` | bit 0 of 17th byte of data |
+| `intraDCVlcTh` | In the range `0 to 7` | bit 0, 1 and 2 of 18th byte of data |
+
+Following parameters are only for mpeg4_enc_fuzzer
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `encWidth` | In the range `0 to 10239` | All the bits of 1st and 2nd byte of data |
+| `encHeight` | In the range `0 to 10239` | All the bits of 3rd and 4th byte of data |
+| `encMode` | 0. `H263_MODE` 1. `H263_MODE_WITH_ERR_RES` 2. `DATA_PARTITIONING_MODE` 3. `COMBINE_MODE_NO_ERR_RES` 4. `COMBINE_MODE_WITH_ERR_RES` | All the bits of 19th byte of data modulus 5 |
+
+Following parameters are only for h263_enc_fuzzer
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `encWidth` | 0. `128` 1. `176` 2. `352` 3. `704` 4. `1408` | All the bits of 1st byte of data modulus 5|
+| `encHeight` | 0. `96` 1. `144` 2. `288` 3. `576` 4. `1152 ` | All the bits of 3rd byte of data modulus 5|
+| `encMode` | 0. `SHORT_HEADER` 1. `SHORT_HEADER_WITH_ERR_RES` | All the bits of 19th byte of data modulus 2 |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the codec using a loop.
+If the encode operation was successful, the input is advanced by the frame size.
+If the encode operation was un-successful, the input is still advanced by frame size so
+that the fuzzer can proceed to feed the next frame.
+
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build mpeg4_enc_fuzzer and h263_enc_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mpeg4_enc_fuzzer
+  $ mm -j$(nproc) h263_enc_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some yuv files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/m4v_h263_enc_fuzzer/m4v_h263_enc_fuzzer CORPUS_DIR
+  $ adb shell /data/fuzz/arm64/h263_enc_fuzzer/h263_enc_fuzzer CORPUS_DIR
+```
+To run on host
+```
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/mpeg4_enc_fuzzer/mpeg4_enc_fuzzer CORPUS_DIR
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/h263_enc_fuzzer/h263_enc_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/libstagefright/codecs/m4v_h263/fuzzer/h263_dec_fuzzer.dict b/media/libstagefright/codecs/m4v_h263/fuzzer/h263_dec_fuzzer.dict
new file mode 100644
index 0000000..591d37e
--- /dev/null
+++ b/media/libstagefright/codecs/m4v_h263/fuzzer/h263_dec_fuzzer.dict
@@ -0,0 +1,2 @@
+# Start code (bytes 0-3)
+kw1="\x00\x00\x80\x02"
diff --git a/media/libstagefright/codecs/m4v_h263/fuzzer/mpeg4_dec_fuzzer.dict b/media/libstagefright/codecs/m4v_h263/fuzzer/mpeg4_dec_fuzzer.dict
new file mode 100644
index 0000000..76241a6
--- /dev/null
+++ b/media/libstagefright/codecs/m4v_h263/fuzzer/mpeg4_dec_fuzzer.dict
@@ -0,0 +1,2 @@
+# Start code (bytes 0-3)
+kw1="\x00\x00\x01\xB0"
diff --git a/media/libstagefright/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp b/media/libstagefright/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
new file mode 100644
index 0000000..912c821
--- /dev/null
+++ b/media/libstagefright/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
@@ -0,0 +1,205 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include "mp4dec_api.h"
+#define MPEG4_MAX_WIDTH 1920
+#define MPEG4_MAX_HEIGHT 1080
+#define H263_MAX_WIDTH 352
+#define H263_MAX_HEIGHT 288
+#define DEFAULT_WIDTH 352
+#define DEFAULT_HEIGHT 288
+
+constexpr size_t kMaxNumDecodeCalls = 100;
+constexpr uint8_t kNumOutputBuffers = 2;
+constexpr int kLayer = 1;
+
+struct tagvideoDecControls;
+
+/* == ceil(num / den) * den. T must be integer type, alignment must be positive power of 2 */
+template <class T, class U>
+inline static const T align(const T &num, const U &den) {
+  return (num + (T)(den - 1)) & (T) ~(den - 1);
+}
+
+class Codec {
+ public:
+  Codec() = default;
+  ~Codec() { deInitDecoder(); }
+  bool initDecoder();
+  bool allocOutputBuffer(size_t outputBufferSize);
+  void freeOutputBuffer();
+  void handleResolutionChange();
+  void decodeFrames(const uint8_t *data, size_t size);
+  void deInitDecoder();
+
+ private:
+  tagvideoDecControls *mDecHandle = nullptr;
+  uint8_t *mOutputBuffer[kNumOutputBuffers];
+  bool mInitialized = false;
+  bool mFramesConfigured = false;
+#ifdef MPEG4
+  MP4DecodingMode mInputMode = MPEG4_MODE;
+  size_t mMaxWidth = MPEG4_MAX_WIDTH;
+  size_t mMaxHeight = MPEG4_MAX_HEIGHT;
+#else
+  MP4DecodingMode mInputMode = H263_MODE;
+  size_t mMaxWidth = H263_MAX_WIDTH;
+  size_t mMaxHeight = H263_MAX_HEIGHT;
+#endif
+  uint32_t mNumSamplesOutput = 0;
+  uint32_t mWidth = DEFAULT_WIDTH;
+  uint32_t mHeight = DEFAULT_HEIGHT;
+};
+
+bool Codec::initDecoder() {
+  mDecHandle = new tagvideoDecControls;
+  if (!mDecHandle) {
+    return false;
+  }
+  memset(mDecHandle, 0, sizeof(tagvideoDecControls));
+  return true;
+}
+
+bool Codec::allocOutputBuffer(size_t outputBufferSize) {
+  for (uint8_t i = 0; i < kNumOutputBuffers; ++i) {
+    if (!mOutputBuffer[i]) {
+      mOutputBuffer[i] = static_cast<uint8_t *>(malloc(outputBufferSize));
+      if (!mOutputBuffer[i]) {
+        return false;
+      }
+    }
+  }
+  return true;
+}
+
+void Codec::freeOutputBuffer() {
+  for (uint8_t i = 0; i < kNumOutputBuffers; ++i) {
+    if (mOutputBuffer[i]) {
+      free(mOutputBuffer[i]);
+      mOutputBuffer[i] = nullptr;
+    }
+  }
+}
+
+void Codec::handleResolutionChange() {
+  int32_t dispWidth, dispHeight;
+  PVGetVideoDimensions(mDecHandle, &dispWidth, &dispHeight);
+
+  int32_t bufWidth, bufHeight;
+  PVGetBufferDimensions(mDecHandle, &bufWidth, &bufHeight);
+
+  if (dispWidth != mWidth || dispHeight != mHeight) {
+    mWidth = dispWidth;
+    mHeight = dispHeight;
+  }
+}
+
+void Codec::decodeFrames(const uint8_t *data, size_t size) {
+  size_t outputBufferSize = align(mMaxWidth, 16) * align(mMaxHeight, 16) * 3 / 2;
+  uint8_t *start_code = const_cast<uint8_t *>(data);
+  static const uint8_t volInfo[] = {0x00, 0x00, 0x01, 0xB0};
+  bool volHeader = memcmp(start_code, volInfo, 4) == 0;
+  if (volHeader) {
+    PVCleanUpVideoDecoder(mDecHandle);
+    mInitialized = false;
+  }
+
+  if (!mInitialized) {
+    uint8_t *volData[1]{};
+    int32_t volSize = 0;
+
+    if (volHeader) { /* removed some codec config part */
+      volData[0] = const_cast<uint8_t *>(data);
+      volSize = size;
+    }
+
+    if (!PVInitVideoDecoder(mDecHandle, volData, &volSize, kLayer, mMaxWidth, mMaxHeight,
+                            mInputMode)) {
+      return;
+    }
+    mInitialized = true;
+    MP4DecodingMode actualMode = PVGetDecBitstreamMode(mDecHandle);
+    if (mInputMode != actualMode) {
+      return;
+    }
+
+    PVSetPostProcType(mDecHandle, 0);
+  }
+  size_t yFrameSize = sizeof(uint8) * mDecHandle->size;
+  if (outputBufferSize < yFrameSize * 3 / 2) {
+    return;
+  }
+  if (!allocOutputBuffer(outputBufferSize)) {
+    return;
+  }
+  size_t numDecodeCalls = 0;
+  while ((size > 0) && (numDecodeCalls < kMaxNumDecodeCalls)) {
+    if (!mFramesConfigured) {
+      PVSetReferenceYUV(mDecHandle, mOutputBuffer[1]);
+      mFramesConfigured = true;
+    }
+
+    // Need to check if header contains new info, e.g., width/height, etc.
+    VopHeaderInfo header_info;
+    uint32_t useExtTimestamp = (numDecodeCalls == 0);
+    int32_t tempSize = (int32_t)size;
+    uint8_t *bitstreamTmp = const_cast<uint8_t *>(data);
+    uint32_t timestamp = 0;
+    if (PVDecodeVopHeader(mDecHandle, &bitstreamTmp, &timestamp, &tempSize, &header_info,
+                          &useExtTimestamp, mOutputBuffer[mNumSamplesOutput & 1]) != PV_TRUE) {
+      return;
+    }
+
+    handleResolutionChange();
+
+    PVDecodeVopBody(mDecHandle, &tempSize);
+    uint32_t bytesConsumed = 1;
+    if (size > tempSize) {
+      bytesConsumed = size - tempSize;
+    }
+    data += bytesConsumed;
+    size -= bytesConsumed;
+    ++mNumSamplesOutput;
+    ++numDecodeCalls;
+  }
+  freeOutputBuffer();
+}
+
+void Codec::deInitDecoder() {
+  PVCleanUpVideoDecoder(mDecHandle);
+  delete mDecHandle;
+  mDecHandle = nullptr;
+  mInitialized = false;
+  freeOutputBuffer();
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  if (size < 4) {
+    return 0;
+  }
+  Codec *codec = new Codec();
+  if (!codec) {
+    return 0;
+  }
+  if (codec->initDecoder()) {
+    codec->decodeFrames(data, size);
+  }
+  delete codec;
+  return 0;
+}
diff --git a/media/libstagefright/codecs/m4v_h263/fuzzer/mpeg4_h263_enc_fuzzer.cpp b/media/libstagefright/codecs/m4v_h263/fuzzer/mpeg4_h263_enc_fuzzer.cpp
new file mode 100644
index 0000000..f154706
--- /dev/null
+++ b/media/libstagefright/codecs/m4v_h263/fuzzer/mpeg4_h263_enc_fuzzer.cpp
@@ -0,0 +1,190 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <algorithm>
+#include "mp4enc_api.h"
+
+constexpr int8_t kIDRFrameRefreshIntervalInSec = 1;
+constexpr MP4RateControlType krcType[] = {CONSTANT_Q, CBR_1, VBR_1, CBR_2, VBR_2, CBR_LOWDELAY};
+#ifdef MPEG4
+constexpr MP4EncodingMode kEncodingMode[] = {SHORT_HEADER, SHORT_HEADER_WITH_ERR_RES,
+                                             DATA_PARTITIONING_MODE, COMBINE_MODE_NO_ERR_RES,
+                                             COMBINE_MODE_WITH_ERR_RES};
+constexpr size_t kMaxWidth = 10240;
+constexpr size_t kMaxHeight = 10240;
+#else
+constexpr MP4EncodingMode kEncodingMode[] = {H263_MODE, H263_MODE_WITH_ERR_RES};
+constexpr int kWidth[] = {128, 176, 352, 704, 1408};
+constexpr int kHeight[] = {96, 144, 288, 576, 1152};
+constexpr size_t kWidthNum = std::size(kWidth);
+constexpr size_t kHeightNum = std::size(kHeight);
+#endif
+
+constexpr size_t krcTypeNum = std::size(krcType);
+constexpr size_t kEncodingModeNum = std::size(kEncodingMode);
+constexpr size_t kMaxQP = 51;
+
+enum {
+    IDX_WD_BYTE_1,
+    IDX_WD_BYTE_2,
+    IDX_HT_BYTE_1,
+    IDX_HT_BYTE_2,
+    IDX_FRAME_RATE,
+    IDX_RC_TYPE,
+    IDX_PACKET_SIZE,
+    IDX_I_FRAME_QP,
+    IDX_P_FRAME_QP,
+    IDX_ENABLE_RVLC,
+    IDX_QUANT_TYPE,
+    IDX_NO_FRAME_SKIPPED_FLAG,
+    IDX_ENABLE_SCENE_DETECT,
+    IDX_NUM_INTRA_MB,
+    IDX_SEARCH_RANGE,
+    IDX_ENABLE_MV_8x8,
+    IDX_USE_AC_PRED,
+    IDX_INTRA_DC_VLC_THRESHOLD,
+    IDX_ENC_MODE,
+    IDX_LAST
+};
+
+class Codec {
+   public:
+    Codec() = default;
+    ~Codec() { deInitEncoder(); }
+    bool initEncoder(const uint8_t *data);
+    void encodeFrames(const uint8_t *data, size_t size);
+    void deInitEncoder();
+
+   private:
+    int32_t mFrameWidth = 352;
+    int32_t mFrameHeight = 288;
+    float mFrameRate = 25.0f;
+    VideoEncOptions *mEncodeHandle = nullptr;
+    VideoEncControls *mEncodeControl = nullptr;
+};
+
+bool Codec::initEncoder(const uint8_t *data) {
+    mEncodeHandle = new VideoEncOptions;
+    if (!mEncodeHandle) {
+        return false;
+    }
+    memset(mEncodeHandle, 0, sizeof(VideoEncOptions));
+    mEncodeControl = new VideoEncControls;
+    if (!mEncodeControl) {
+        return false;
+    }
+    memset(mEncodeControl, 0, sizeof(VideoEncControls));
+    PVGetDefaultEncOption(mEncodeHandle, 0);
+
+#ifdef MPEG4
+    mFrameWidth = ((data[IDX_WD_BYTE_1] << 8) | data[IDX_WD_BYTE_2]) % kMaxWidth;
+    mFrameHeight = ((data[IDX_HT_BYTE_1] << 8) | data[IDX_HT_BYTE_2]) % kMaxHeight;
+#else
+    mFrameWidth = kWidth[data[IDX_WD_BYTE_1] % kWidthNum];
+    mFrameHeight = kHeight[data[IDX_HT_BYTE_1] % kHeightNum];
+#endif
+    mFrameRate = data[IDX_FRAME_RATE];
+    mEncodeHandle->rcType = krcType[data[IDX_RC_TYPE] % krcTypeNum];
+    mEncodeHandle->profile_level = CORE_PROFILE_LEVEL2;
+    mEncodeHandle->packetSize = data[IDX_PACKET_SIZE];
+    mEncodeHandle->iQuant[0] = (data[IDX_I_FRAME_QP] % kMaxQP) + 1;
+    mEncodeHandle->pQuant[0] = (data[IDX_P_FRAME_QP] % kMaxQP) + 1;
+    mEncodeHandle->rvlcEnable = (data[IDX_ENABLE_RVLC] & 0x01) ? PV_OFF : PV_ON;
+    mEncodeHandle->quantType[0] = (data[IDX_QUANT_TYPE] & 0x01) ? 0 : 1;
+    mEncodeHandle->noFrameSkipped = (data[IDX_NO_FRAME_SKIPPED_FLAG] & 0x01) ? PV_OFF : PV_ON;
+    mEncodeHandle->sceneDetect = (data[IDX_ENABLE_SCENE_DETECT] & 0x01) ? PV_OFF : PV_ON;
+    mEncodeHandle->numIntraMB = data[IDX_NUM_INTRA_MB] & 0x07;
+    mEncodeHandle->searchRange = data[IDX_SEARCH_RANGE] & 0x1F;
+    mEncodeHandle->mv8x8Enable = (data[IDX_ENABLE_MV_8x8] & 0x01) ? PV_OFF : PV_ON;
+    mEncodeHandle->useACPred = (data[IDX_USE_AC_PRED] & 0x01) ? PV_OFF : PV_ON;
+    mEncodeHandle->intraDCVlcTh = data[IDX_INTRA_DC_VLC_THRESHOLD] & 0x07;
+    mEncodeHandle->encMode = kEncodingMode[data[IDX_ENC_MODE] % kEncodingModeNum];
+    mEncodeHandle->encWidth[0] = mFrameWidth;
+    mEncodeHandle->encHeight[0] = mFrameHeight;
+    mEncodeHandle->encFrameRate[0] = mFrameRate;
+    mEncodeHandle->tickPerSrc = mEncodeHandle->timeIncRes / mFrameRate;
+    mEncodeHandle->intraPeriod = (kIDRFrameRefreshIntervalInSec * mFrameRate);
+    if (!PVInitVideoEncoder(mEncodeControl, mEncodeHandle)) {
+        return false;
+    }
+    return true;
+}
+
+void Codec::deInitEncoder() {
+    if (mEncodeControl) {
+        PVCleanUpVideoEncoder(mEncodeControl);
+        delete mEncodeControl;
+        mEncodeControl = nullptr;
+    }
+    if (mEncodeHandle) {
+        delete mEncodeHandle;
+        mEncodeHandle = nullptr;
+    }
+}
+
+void Codec::encodeFrames(const uint8_t *data, size_t size) {
+    size_t inputBufferSize = (mFrameWidth * mFrameHeight * 3) / 2;
+    size_t outputBufferSize = inputBufferSize * 2;
+    uint8_t outputBuffer[outputBufferSize];
+
+    // Get VOL header.
+    int32_t sizeOutputBuffer = outputBufferSize;
+    PVGetVolHeader(mEncodeControl, outputBuffer, &sizeOutputBuffer, 0);
+
+    size_t numFrame = 0;
+    while (size > 0) {
+        size_t bytesConsumed = std::min(size, inputBufferSize);
+        uint8_t inputBuffer[inputBufferSize];
+        memcpy(inputBuffer, data, bytesConsumed);
+        if (bytesConsumed < sizeof(inputBuffer)) {
+            memset(inputBuffer + bytesConsumed, data[0], sizeof(inputBuffer) - bytesConsumed);
+        }
+        VideoEncFrameIO videoIn{}, videoOut{};
+        videoIn.height = mFrameHeight;
+        videoIn.pitch = mFrameWidth;
+        videoIn.timestamp = (numFrame * 1000) / mFrameRate;
+        videoIn.yChan = inputBuffer;
+        videoIn.uChan = videoIn.yChan + videoIn.height * videoIn.pitch;
+        videoIn.vChan = videoIn.uChan + ((videoIn.height * videoIn.pitch) >> 2);
+        uint32_t modTimeMs = 0;
+        int32_t dataLength = outputBufferSize;
+        int32_t nLayer = 0;
+        PVEncodeVideoFrame(mEncodeControl, &videoIn, &videoOut, &modTimeMs, outputBuffer,
+                           &dataLength, &nLayer);
+        MP4HintTrack hintTrack;
+        PVGetHintTrack(mEncodeControl, &hintTrack);
+        PVGetOverrunBuffer(mEncodeControl);
+        ++numFrame;
+        data += bytesConsumed;
+        size -= bytesConsumed;
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    if (size < IDX_LAST) {
+        return 0;
+    }
+    Codec *codec = new Codec();
+    if (!codec) {
+        return 0;
+    }
+    if (codec->initEncoder(data)) {
+        data += IDX_LAST;
+        size -= IDX_LAST;
+        codec->encodeFrames(data, size);
+    }
+    delete codec;
+    return 0;
+}
diff --git a/media/libstagefright/codecs/mp3dec/Android.bp b/media/libstagefright/codecs/mp3dec/Android.bp
index 96106f1..316d63c 100644
--- a/media/libstagefright/codecs/mp3dec/Android.bp
+++ b/media/libstagefright/codecs/mp3dec/Android.bp
@@ -3,6 +3,7 @@
     vendor_available: true,
     min_sdk_version: "29",
 
+    host_supported:true,
     srcs: [
         "src/pvmp3_normalize.cpp",
         "src/pvmp3_alias_reduction.cpp",
@@ -73,6 +74,12 @@
         "-DOSCL_UNUSED_ARG(x)=(void)(x)",
         "-Werror",
     ],
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 //###############################################################################
diff --git a/media/libstagefright/codecs/mp3dec/TEST_MAPPING b/media/libstagefright/codecs/mp3dec/TEST_MAPPING
new file mode 100644
index 0000000..4ef4317
--- /dev/null
+++ b/media/libstagefright/codecs/mp3dec/TEST_MAPPING
@@ -0,0 +1,9 @@
+// mappings for frameworks/av/media/libstagefright/codecs/mp3dec
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "Mp3DecoderTest"}
+  ]
+}
diff --git a/media/libstagefright/codecs/mp3dec/fuzzer/Android.bp b/media/libstagefright/codecs/mp3dec/fuzzer/Android.bp
new file mode 100644
index 0000000..79fa1e9
--- /dev/null
+++ b/media/libstagefright/codecs/mp3dec/fuzzer/Android.bp
@@ -0,0 +1,39 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+cc_fuzz {
+    name: "mp3_dec_fuzzer",
+    host_supported: true,
+
+    static_libs: [
+        "libstagefright_mp3dec",
+    ],
+
+    srcs: [
+        "mp3_dec_fuzzer.cpp",
+    ],
+
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/libstagefright/codecs/mp3dec/fuzzer/README.md b/media/libstagefright/codecs/mp3dec/fuzzer/README.md
new file mode 100644
index 0000000..09dd5c3
--- /dev/null
+++ b/media/libstagefright/codecs/mp3dec/fuzzer/README.md
@@ -0,0 +1,56 @@
+# Fuzzer for libstagefright_mp3dec decoder
+
+## Plugin Design Considerations
+The fuzzer plugin for mp3 decoder is designed based on the understanding of the
+codec and tries to achieve the following:
+
+##### Maximize code coverage
+
+This fuzzer makes use of the following config parameters:
+1. Equalizer type (parameter name: `equalizerType`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `equalizerType` | 0. `flat ` 1. `bass_boost ` 2. `rock ` 3. `pop ` 4. `jazz ` 5. `classical ` 6. `talk ` 7. `flat_ ` | Bits 0, 1 and 2 of first byte of input stream |
+| `crcEnabled` | 0. `false ` 1. `true `| Bit 0 of second byte of input stream |
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the codec using a loop.
+ * If the decode operation was successful, the input is advanced by the number
+   of bytes used by the decoder.
+ * If the decode operation was un-successful, the input is advanced by 1 byte
+   till it reaches a valid frame or end of stream.
+
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build mp3_dec_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mp3_dec_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some mp3 files to that folder.
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mp3_dec_fuzzer/mp3_dec_fuzzer CORPUS_DIR
+```
+To run on host
+```
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/mp3_dec_fuzzer/mp3_dec_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/libstagefright/codecs/mp3dec/fuzzer/mp3_dec_fuzzer.cpp b/media/libstagefright/codecs/mp3dec/fuzzer/mp3_dec_fuzzer.cpp
new file mode 100644
index 0000000..847c8c4
--- /dev/null
+++ b/media/libstagefright/codecs/mp3dec/fuzzer/mp3_dec_fuzzer.cpp
@@ -0,0 +1,237 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include <stdlib.h>
+#include <algorithm>
+
+#include <pvmp3decoder_api.h>
+
+constexpr int kMaxFrameSamples = 4608;
+constexpr int kMaxChannels = 2;
+constexpr e_equalization kEqualizerTypes[] = {flat, bass_boost, rock, pop,
+                                              jazz, classical,  talk, flat_};
+
+static bool parseMp3Header(uint32_t header, size_t *frame_size,
+                           uint32_t *out_sampling_rate = nullptr, uint32_t *out_channels = nullptr,
+                           uint32_t *out_bitrate = nullptr, uint32_t *out_num_samples = nullptr) {
+  *frame_size = 0;
+  if (out_sampling_rate) *out_sampling_rate = 0;
+  if (out_channels) *out_channels = 0;
+  if (out_bitrate) *out_bitrate = 0;
+  if (out_num_samples) *out_num_samples = 0;
+
+  if ((header & 0xffe00000) != 0xffe00000) {
+    return false;
+  }
+  unsigned version = (header >> 19) & 3;
+  if (version == 0x01) {
+    return false;
+  }
+  unsigned layer = (header >> 17) & 3;
+  if (layer == 0x00) {
+    return false;
+  }
+  unsigned bitrate_index = (header >> 12) & 0x0f;
+  if (bitrate_index == 0 || bitrate_index == 0x0f) {
+    return false;
+  }
+  unsigned sampling_rate_index = (header >> 10) & 3;
+  if (sampling_rate_index == 3) {
+    return false;
+  }
+  static const int kSamplingRateV1[] = {44100, 48000, 32000};
+  int sampling_rate = kSamplingRateV1[sampling_rate_index];
+  if (version == 2 /* V2 */) {
+    sampling_rate /= 2;
+  } else if (version == 0 /* V2.5 */) {
+    sampling_rate /= 4;
+  }
+
+  unsigned padding = (header >> 9) & 1;
+
+  if (layer == 3) {  // layer I
+    static const int kBitrateV1[] = {32,  64,  96,  128, 160, 192, 224,
+                                     256, 288, 320, 352, 384, 416, 448};
+    static const int kBitrateV2[] = {32,  48,  56,  64,  80,  96,  112,
+                                     128, 144, 160, 176, 192, 224, 256};
+
+    int bitrate =
+        (version == 3 /* V1 */) ? kBitrateV1[bitrate_index - 1] : kBitrateV2[bitrate_index - 1];
+
+    if (out_bitrate) {
+      *out_bitrate = bitrate;
+    }
+    *frame_size = (12000 * bitrate / sampling_rate + padding) * 4;
+    if (out_num_samples) {
+      *out_num_samples = 384;
+    }
+  } else {  // layer II or III
+    static const int kBitrateV1L2[] = {32,  48,  56,  64,  80,  96,  112,
+                                       128, 160, 192, 224, 256, 320, 384};
+    static const int kBitrateV1L3[] = {32,  40,  48,  56,  64,  80,  96,
+                                       112, 128, 160, 192, 224, 256, 320};
+    static const int kBitrateV2[] = {8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160};
+    int bitrate;
+    if (version == 3 /* V1 */) {
+      bitrate =
+          (layer == 2 /* L2 */) ? kBitrateV1L2[bitrate_index - 1] : kBitrateV1L3[bitrate_index - 1];
+
+      if (out_num_samples) {
+        *out_num_samples = 1152;
+      }
+    } else {  // V2 (or 2.5)
+      bitrate = kBitrateV2[bitrate_index - 1];
+      if (out_num_samples) {
+        *out_num_samples = (layer == 1 /* L3 */) ? 576 : 1152;
+      }
+    }
+
+    if (out_bitrate) {
+      *out_bitrate = bitrate;
+    }
+
+    if (version == 3 /* V1 */) {
+      *frame_size = 144000 * bitrate / sampling_rate + padding;
+    } else {  // V2 or V2.5
+      size_t tmp = (layer == 1 /* L3 */) ? 72000 : 144000;
+      *frame_size = tmp * bitrate / sampling_rate + padding;
+    }
+  }
+
+  if (out_sampling_rate) {
+    *out_sampling_rate = sampling_rate;
+  }
+
+  if (out_channels) {
+    int channel_mode = (header >> 6) & 3;
+    *out_channels = (channel_mode == 3) ? 1 : 2;
+  }
+
+  return true;
+}
+
+static uint32_t U32_AT(const uint8_t *ptr) {
+  return ptr[0] << 24 | ptr[1] << 16 | ptr[2] << 8 | ptr[3];
+}
+
+static bool checkHeader(uint8 *header, size_t inSize) {
+  size_t frameSize;
+  size_t totalInSize = 0;
+  bool isValidBuffer = false;
+
+  while (totalInSize + 4 < inSize) {
+    isValidBuffer = true;
+    uint32_t val = U32_AT(header + totalInSize);
+    if (!parseMp3Header(val, &frameSize, nullptr, nullptr, nullptr, nullptr)) {
+      return false;
+    }
+    totalInSize += frameSize;
+  }
+
+  return (isValidBuffer);
+}
+
+class Codec {
+ public:
+  Codec() = default;
+  ~Codec() { deInitDecoder(); }
+
+  bool initDecoder();
+  void decodeFrames(uint8_t *data, size_t size);
+  void deInitDecoder();
+
+ private:
+  tPVMP3DecoderExternal *mConfig = nullptr;
+  void *mDecoderBuf = nullptr;
+};
+
+bool Codec::initDecoder() {
+  mConfig = new tPVMP3DecoderExternal{};
+  if (!mConfig) {
+    return false;
+  }
+  size_t decoderBufSize = pvmp3_decoderMemRequirements();
+  mDecoderBuf = malloc(decoderBufSize);
+  if (!mDecoderBuf) {
+    return false;
+  }
+  memset(mDecoderBuf, 0x0, decoderBufSize);
+  pvmp3_InitDecoder(mConfig, mDecoderBuf);
+  return true;
+}
+
+void Codec::decodeFrames(uint8_t *data, size_t size) {
+  uint8_t equalizerTypeValue = (data[0] & 0x7);
+  mConfig->equalizerType = kEqualizerTypes[equalizerTypeValue];
+  mConfig->crcEnabled = data[1] & 0x1;
+
+  while (size > 0) {
+    bool status = checkHeader(data, size);
+    if (!status) {
+      size--;
+      data++;
+      continue;
+    }
+    size_t outBufSize = kMaxFrameSamples * kMaxChannels;
+    size_t usedBytes = 0;
+    int16_t outputBuf[outBufSize];
+    mConfig->inputBufferCurrentLength = size;
+    mConfig->inputBufferUsedLength = 0;
+    mConfig->inputBufferMaxLength = 0;
+    mConfig->pInputBuffer = data;
+    mConfig->pOutputBuffer = outputBuf;
+    mConfig->outputFrameSize = outBufSize / sizeof(int16_t);
+
+    ERROR_CODE decoderErr;
+    decoderErr = pvmp3_framedecoder(mConfig, mDecoderBuf);
+    if (decoderErr != NO_DECODING_ERROR) {
+      size--;
+      data++;
+    } else {
+      usedBytes = std::min((int32_t)size, mConfig->inputBufferUsedLength);
+      size -= usedBytes;
+      data += usedBytes;
+    }
+  }
+}
+
+void Codec::deInitDecoder() {
+  if (mDecoderBuf) {
+    free(mDecoderBuf);
+    mDecoderBuf = nullptr;
+  }
+  delete mConfig;
+  mConfig = nullptr;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  if (size < 4) {
+    return 0;
+  }
+  Codec *codec = new Codec();
+  if (!codec) {
+    return 0;
+  }
+  if (codec->initDecoder()) {
+    codec->decodeFrames(const_cast<uint8_t *>(data), size);
+  }
+  delete codec;
+  return 0;
+}
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_framedecoder.cpp b/media/libstagefright/codecs/mp3dec/src/pvmp3_framedecoder.cpp
index a5c7f5e..15d2feb 100644
--- a/media/libstagefright/codecs/mp3dec/src/pvmp3_framedecoder.cpp
+++ b/media/libstagefright/codecs/mp3dec/src/pvmp3_framedecoder.cpp
@@ -219,6 +219,11 @@
 
     if (info->error_protection)
     {
+        if (!bitsAvailable(&pVars->inputStream, 16))
+        {
+            return SIDE_INFO_ERROR;
+        }
+
         /*
          *  Get crc content
          */
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_get_side_info.cpp b/media/libstagefright/codecs/mp3dec/src/pvmp3_get_side_info.cpp
index d644207..1a3fca5 100644
--- a/media/libstagefright/codecs/mp3dec/src/pvmp3_get_side_info.cpp
+++ b/media/libstagefright/codecs/mp3dec/src/pvmp3_get_side_info.cpp
@@ -73,6 +73,7 @@
 
 #include "pvmp3_get_side_info.h"
 #include "pvmp3_crc.h"
+#include "pvmp3_getbits.h"
 
 
 /*----------------------------------------------------------------------------
@@ -125,12 +126,22 @@
     {
         if (stereo == 1)
         {
+            if (!bitsAvailable(inputStream, 14))
+            {
+                return SIDE_INFO_ERROR;
+            }
+
             tmp = getbits_crc(inputStream, 14, crc, info->error_protection);
             si->main_data_begin = (tmp << 18) >> 23;    /* 9 */
             si->private_bits    = (tmp << 27) >> 27;    /* 5 */
         }
         else
         {
+            if (!bitsAvailable(inputStream, 12))
+            {
+                return SIDE_INFO_ERROR;
+            }
+
             tmp = getbits_crc(inputStream, 12, crc, info->error_protection);
             si->main_data_begin = (tmp << 20) >> 23;    /* 9 */
             si->private_bits    = (tmp << 29) >> 29;    /* 3 */
@@ -139,6 +150,11 @@
 
         for (ch = 0; ch < stereo; ch++)
         {
+            if (!bitsAvailable(inputStream, 4))
+            {
+                return SIDE_INFO_ERROR;
+            }
+
             tmp = getbits_crc(inputStream, 4, crc, info->error_protection);
             si->ch[ch].scfsi[0] = (tmp << 28) >> 31;    /* 1 */
             si->ch[ch].scfsi[1] = (tmp << 29) >> 31;    /* 1 */
@@ -150,6 +166,11 @@
         {
             for (ch = 0; ch < stereo; ch++)
             {
+                if (!bitsAvailable(inputStream, 34))
+                {
+                    return SIDE_INFO_ERROR;
+                }
+
                 si->ch[ch].gran[gr].part2_3_length    = getbits_crc(inputStream, 12, crc, info->error_protection);
                 tmp = getbits_crc(inputStream, 22, crc, info->error_protection);
 
@@ -160,6 +181,11 @@
 
                 if (si->ch[ch].gran[gr].window_switching_flag)
                 {
+                    if (!bitsAvailable(inputStream, 22))
+                    {
+                        return SIDE_INFO_ERROR;
+                    }
+
                     tmp = getbits_crc(inputStream, 22, crc, info->error_protection);
 
                     si->ch[ch].gran[gr].block_type       = (tmp << 10) >> 30;   /* 2 */;
@@ -192,6 +218,11 @@
                 }
                 else
                 {
+                    if (!bitsAvailable(inputStream, 22))
+                    {
+                        return SIDE_INFO_ERROR;
+                    }
+
                     tmp = getbits_crc(inputStream, 22, crc, info->error_protection);
 
                     si->ch[ch].gran[gr].table_select[0] = (tmp << 10) >> 27;   /* 5 */;
@@ -204,6 +235,11 @@
                     si->ch[ch].gran[gr].block_type      = 0;
                 }
 
+                if (!bitsAvailable(inputStream, 3))
+                {
+                    return SIDE_INFO_ERROR;
+                }
+
                 tmp = getbits_crc(inputStream, 3, crc, info->error_protection);
                 si->ch[ch].gran[gr].preflag            = (tmp << 29) >> 31;    /* 1 */
                 si->ch[ch].gran[gr].scalefac_scale     = (tmp << 30) >> 31;    /* 1 */
@@ -213,11 +249,21 @@
     }
     else /* Layer 3 LSF */
     {
+        if (!bitsAvailable(inputStream, 8 + stereo))
+        {
+            return SIDE_INFO_ERROR;
+        }
+
         si->main_data_begin = getbits_crc(inputStream,      8, crc, info->error_protection);
         si->private_bits    = getbits_crc(inputStream, stereo, crc, info->error_protection);
 
         for (ch = 0; ch < stereo; ch++)
         {
+            if (!bitsAvailable(inputStream, 39))
+            {
+                return SIDE_INFO_ERROR;
+            }
+
             tmp = getbits_crc(inputStream, 21, crc, info->error_protection);
             si->ch[ch].gran[0].part2_3_length    = (tmp << 11) >> 20;  /* 12 */
             si->ch[ch].gran[0].big_values        = (tmp << 23) >> 23;  /*  9 */
@@ -230,6 +276,11 @@
             if (si->ch[ch].gran[0].window_switching_flag)
             {
 
+                if (!bitsAvailable(inputStream, 22))
+                {
+                    return SIDE_INFO_ERROR;
+                }
+
                 tmp = getbits_crc(inputStream, 22, crc, info->error_protection);
 
                 si->ch[ch].gran[0].block_type       = (tmp << 10) >> 30;   /* 2 */;
@@ -262,6 +313,11 @@
             }
             else
             {
+                if (!bitsAvailable(inputStream, 22))
+                {
+                    return SIDE_INFO_ERROR;
+                }
+
                 tmp = getbits_crc(inputStream, 22, crc, info->error_protection);
 
                 si->ch[ch].gran[0].table_select[0] = (tmp << 10) >> 27;   /* 5 */;
@@ -274,6 +330,11 @@
                 si->ch[ch].gran[0].block_type      = 0;
             }
 
+            if (!bitsAvailable(inputStream, 2))
+            {
+                return SIDE_INFO_ERROR;
+            }
+
             tmp = getbits_crc(inputStream, 2, crc, info->error_protection);
             si->ch[ch].gran[0].scalefac_scale     =  tmp >> 1;  /* 1 */
             si->ch[ch].gran[0].count1table_select =  tmp & 1;  /* 1 */
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.cpp b/media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.cpp
index 8ff7953..4d252ef 100644
--- a/media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.cpp
+++ b/media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.cpp
@@ -113,10 +113,11 @@
 
     uint32    offset;
     uint32    bitIndex;
-    uint8     Elem;         /* Needs to be same type as pInput->pBuffer */
-    uint8     Elem1;
-    uint8     Elem2;
-    uint8     Elem3;
+    uint32    bytesToFetch;
+    uint8     Elem  = 0;         /* Needs to be same type as pInput->pBuffer */
+    uint8     Elem1 = 0;
+    uint8     Elem2 = 0;
+    uint8     Elem3 = 0;
     uint32   returnValue = 0;
 
     if (!neededBits)
@@ -126,10 +127,25 @@
 
     offset = (ptBitStream->usedBits) >> INBUF_ARRAY_INDEX_SHIFT;
 
-    Elem  = *(ptBitStream->pBuffer + module(offset  , BUFSIZE));
-    Elem1 = *(ptBitStream->pBuffer + module(offset + 1, BUFSIZE));
-    Elem2 = *(ptBitStream->pBuffer + module(offset + 2, BUFSIZE));
-    Elem3 = *(ptBitStream->pBuffer + module(offset + 3, BUFSIZE));
+    /* Remove extra high bits by shifting up */
+    bitIndex = module(ptBitStream->usedBits, INBUF_BIT_WIDTH);
+
+    bytesToFetch = (bitIndex + neededBits + 7 ) >> 3 ;
+
+    switch (bytesToFetch)
+    {
+    case 4:
+        Elem3 = *(ptBitStream->pBuffer + module(offset + 3, BUFSIZE));
+        [[fallthrough]];
+    case 3:
+        Elem2 = *(ptBitStream->pBuffer + module(offset + 2, BUFSIZE));
+        [[fallthrough]];
+    case 2:
+        Elem1 = *(ptBitStream->pBuffer + module(offset + 1, BUFSIZE));
+        [[fallthrough]];
+    case 1:
+        Elem = *(ptBitStream->pBuffer + module(offset, BUFSIZE));
+    }
 
 
     returnValue = (((uint32)(Elem)) << 24) |
@@ -137,9 +153,6 @@
                   (((uint32)(Elem2)) << 8) |
                   ((uint32)(Elem3));
 
-    /* Remove extra high bits by shifting up */
-    bitIndex = module(ptBitStream->usedBits, INBUF_BIT_WIDTH);
-
     /* This line is faster than to mask off the high bits. */
     returnValue <<= bitIndex;
 
@@ -161,22 +174,32 @@
 
     uint32    offset;
     uint32    bitIndex;
-    uint8    Elem;         /* Needs to be same type as pInput->pBuffer */
-    uint8    Elem1;
+    uint32    bytesToFetch;
+    uint8    Elem  = 0;         /* Needs to be same type as pInput->pBuffer */
+    uint8    Elem1 = 0;
     uint16   returnValue;
 
     offset = (ptBitStream->usedBits) >> INBUF_ARRAY_INDEX_SHIFT;
 
-    Elem  = *(ptBitStream->pBuffer + module(offset  , BUFSIZE));
-    Elem1 = *(ptBitStream->pBuffer + module(offset + 1, BUFSIZE));
+    /* Remove extra high bits by shifting up */
+    bitIndex = module(ptBitStream->usedBits, INBUF_BIT_WIDTH);
+
+    bytesToFetch = (bitIndex + neededBits + 7 ) >> 3 ;
+
+    if (bytesToFetch > 1)
+    {
+        Elem = *(ptBitStream->pBuffer + module(offset, BUFSIZE));
+        Elem1 = *(ptBitStream->pBuffer + module(offset + 1, BUFSIZE));
+    }
+    else if (bytesToFetch > 0)
+    {
+        Elem = *(ptBitStream->pBuffer + module(offset, BUFSIZE));
+    }
 
 
     returnValue = (((uint16)(Elem)) << 8) |
                   ((uint16)(Elem1));
 
-    /* Remove extra high bits by shifting up */
-    bitIndex = module(ptBitStream->usedBits, INBUF_BIT_WIDTH);
-
     ptBitStream->usedBits += neededBits;
     /* This line is faster than to mask off the high bits. */
     returnValue = (returnValue << (bitIndex));
@@ -197,25 +220,40 @@
 
     uint32    offset;
     uint32    bitIndex;
-    uint8     Elem;         /* Needs to be same type as pInput->pBuffer */
-    uint8     Elem1;
-    uint8     Elem2;
+    uint32    bytesToFetch;
+    uint8     Elem  = 0;         /* Needs to be same type as pInput->pBuffer */
+    uint8     Elem1 = 0;
+    uint8     Elem2 = 0;
     uint32   returnValue;
 
     offset = (ptBitStream->usedBits) >> INBUF_ARRAY_INDEX_SHIFT;
 
-    Elem  = *(ptBitStream->pBuffer + module(offset  , BUFSIZE));
-    Elem1 = *(ptBitStream->pBuffer + module(offset + 1, BUFSIZE));
-    Elem2 = *(ptBitStream->pBuffer + module(offset + 2, BUFSIZE));
+    /* Remove extra high bits by shifting up */
+    bitIndex = module(ptBitStream->usedBits, INBUF_BIT_WIDTH);
+
+    bytesToFetch = (bitIndex + neededBits + 7 ) >> 3 ;
+
+    if (bytesToFetch > 2)
+    {
+        Elem  = *(ptBitStream->pBuffer + module(offset, BUFSIZE));
+        Elem1 = *(ptBitStream->pBuffer + module(offset + 1, BUFSIZE));
+        Elem2 = *(ptBitStream->pBuffer + module(offset + 2, BUFSIZE));
+    }
+    else if (bytesToFetch > 1)
+    {
+        Elem  = *(ptBitStream->pBuffer + module(offset, BUFSIZE));
+        Elem1 = *(ptBitStream->pBuffer + module(offset + 1, BUFSIZE));
+    }
+    else if (bytesToFetch > 0)
+    {
+        Elem = *(ptBitStream->pBuffer + module(offset, BUFSIZE));
+    }
 
 
     returnValue = (((uint32)(Elem)) << 16) |
                   (((uint32)(Elem1)) << 8) |
                   ((uint32)(Elem2));
 
-    /* Remove extra high bits by shifting up */
-    bitIndex = module(ptBitStream->usedBits, INBUF_BIT_WIDTH);
-
     ptBitStream->usedBits += neededBits;
     /* This line is faster than to mask off the high bits. */
     returnValue = 0xFFFFFF & (returnValue << (bitIndex));
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.h b/media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.h
index b058b00..b04fe6d 100644
--- a/media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.h
+++ b/media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.h
@@ -104,6 +104,11 @@
 ; Function Prototype declaration
 ----------------------------------------------------------------------------*/
 
+static inline bool bitsAvailable(tmp3Bits *inputStream, uint32 neededBits)
+{
+    return (inputStream->inputBufferCurrentLength << 3) >= (neededBits + inputStream->usedBits);
+}
+
 /*----------------------------------------------------------------------------
 ; END
 ----------------------------------------------------------------------------*/
diff --git a/media/libstagefright/codecs/mp3dec/test/Android.bp b/media/libstagefright/codecs/mp3dec/test/Android.bp
index 0ff8b12..6b92ae9 100644
--- a/media/libstagefright/codecs/mp3dec/test/Android.bp
+++ b/media/libstagefright/codecs/mp3dec/test/Android.bp
@@ -17,6 +17,7 @@
 cc_test {
     name: "Mp3DecoderTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "mp3reader.cpp",
diff --git a/media/libstagefright/codecs/mp3dec/test/AndroidTest.xml b/media/libstagefright/codecs/mp3dec/test/AndroidTest.xml
index 233f9bb..29952eb 100644
--- a/media/libstagefright/codecs/mp3dec/test/AndroidTest.xml
+++ b/media/libstagefright/codecs/mp3dec/test/AndroidTest.xml
@@ -19,7 +19,7 @@
         <option name="cleanup" value="true" />
         <option name="push" value="Mp3DecoderTest->/data/local/tmp/Mp3DecoderTest" />
         <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/mp3dec/test/Mp3DecoderTest-1.1.zip?unzip=true"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/mp3dec/test/Mp3DecoderTest-1.2.zip?unzip=true"
             value="/data/local/tmp/Mp3DecoderTestRes/" />
     </target_preparer>
 
diff --git a/media/libstagefright/codecs/mp3dec/test/Mp3DecoderTest.cpp b/media/libstagefright/codecs/mp3dec/test/Mp3DecoderTest.cpp
index 0784c0c..91326a8 100644
--- a/media/libstagefright/codecs/mp3dec/test/Mp3DecoderTest.cpp
+++ b/media/libstagefright/codecs/mp3dec/test/Mp3DecoderTest.cpp
@@ -186,6 +186,7 @@
                          ::testing::Values(("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3"),
                                            ("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3"),
                                            ("bug_136053885.mp3"),
+                                           ("bbb_2ch_44kHz_lame_crc.mp3"),
                                            ("bbb_mp3_stereo_192kbps_48000hz.mp3")));
 
 int main(int argc, char **argv) {
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
index 4f61aa8..5bb1879 100644
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
@@ -58,6 +58,8 @@
       mInputBufferCount(0),
       mDecoder(NULL),
       mHeader(NULL),
+      mNumChannels(1),
+      mSamplingRate(kRate),
       mCodecDelay(0),
       mSeekPreRoll(0),
       mAnchorTimeUs(0),
@@ -169,11 +171,11 @@
             }
 
             opusParams->nAudioBandWidth = 0;
-            opusParams->nSampleRate = kRate;
+            opusParams->nSampleRate = mSamplingRate;
             opusParams->nBitRate = 0;
 
             if (!isConfigured()) {
-                opusParams->nChannels = 1;
+                opusParams->nChannels = mNumChannels;
             } else {
                 opusParams->nChannels = mHeader->channels;
             }
@@ -274,7 +276,8 @@
             if (opusParams->nPortIndex != 0) {
                 return OMX_ErrorUndefined;
             }
-
+            mNumChannels = opusParams->nChannels;
+            mSamplingRate = opusParams->nSampleRate;
             return OMX_ErrorNone;
         }
 
@@ -496,6 +499,8 @@
                                    *(reinterpret_cast<int64_t*>(inHeader->pBuffer +
                                                                 inHeader->nOffset)),
                                    kRate);
+                mSamplingRate = kRate;
+                mNumChannels = mHeader->channels;
                 notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
                 mOutputPortSettingsChange = AWAITING_DISABLED;
             }
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.h b/media/libstagefright/codecs/opus/dec/SoftOpus.h
index 91cafa1..00058c8 100644
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.h
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.h
@@ -70,6 +70,8 @@
     OpusMSDecoder *mDecoder;
     OpusHeader *mHeader;
 
+    int32_t mNumChannels;
+    int32_t mSamplingRate;
     int64_t mCodecDelay;
     int64_t mSeekPreRoll;
     int64_t mSamplesToDiscard;
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/libstagefright/flac/dec/Android.bp
index 32b2075..b63353c 100644
--- a/media/libstagefright/flac/dec/Android.bp
+++ b/media/libstagefright/flac/dec/Android.bp
@@ -2,6 +2,7 @@
     name: "libstagefright_flacdec",
     vendor_available: true,
     min_sdk_version: "29",
+    host_supported: true,
 
     srcs: [
         "FLACDecoder.cpp",
@@ -33,6 +34,13 @@
     ],
 
     header_libs: [
-        "libmedia_headers",
+        "libstagefright_foundation_headers",
+        "libstagefright_headers",
     ],
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/libstagefright/flac/dec/FLACDecoder.cpp b/media/libstagefright/flac/dec/FLACDecoder.cpp
index cef0bc6..f5e9532 100644
--- a/media/libstagefright/flac/dec/FLACDecoder.cpp
+++ b/media/libstagefright/flac/dec/FLACDecoder.cpp
@@ -433,7 +433,7 @@
             if (mBuffer == nullptr) {
                 mBufferDataSize = 0;
                 mBufferLen = 0;
-                ALOGE("decodeOneFrame: failed to allocate memory for input buffer");
+                ALOGE("addDataToBuffer: failed to allocate memory for input buffer");
                 return NO_MEMORY;
             }
             mBufferLen = mBufferDataSize + inBufferLen;
diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/libstagefright/foundation/ALooperRoster.cpp
index 8a7c3eb..0a4e598 100644
--- a/media/libstagefright/foundation/ALooperRoster.cpp
+++ b/media/libstagefright/foundation/ALooperRoster.cpp
@@ -166,7 +166,7 @@
         }
         s.append("\n");
     }
-    write(fd, s.string(), s.size());
+    (void)write(fd, s.string(), s.size());
 }
 
 }  // namespace android
diff --git a/media/libstagefright/foundation/AString.cpp b/media/libstagefright/foundation/AString.cpp
index 4bd186c..8722e14 100644
--- a/media/libstagefright/foundation/AString.cpp
+++ b/media/libstagefright/foundation/AString.cpp
@@ -387,10 +387,14 @@
     va_start(ap, format);
 
     char *buffer;
-    vasprintf(&buffer, format, ap);
+    int bufferSize = vasprintf(&buffer, format, ap);
 
     va_end(ap);
 
+    if(bufferSize < 0) {
+        return AString();
+    }
+
     AString result(buffer);
 
     free(buffer);
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index f440e00..ebf1035 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -12,6 +12,7 @@
     vndk: {
         enabled: true,
     },
+    host_supported: true,
     double_loadable: true,
     include_dirs: [
         "frameworks/av/include",
@@ -25,7 +26,6 @@
     ],
 
     header_libs: [
-        "libhardware_headers",
         "libstagefright_foundation_headers",
         "media_ndk_headers",
         "media_plugin_headers",
@@ -86,6 +86,9 @@
                 "-DNO_IMEMORY",
             ],
         },
+        darwin: {
+            enabled: false,
+        },
     },
 
     clang: true,
diff --git a/media/libstagefright/foundation/MediaDefs.cpp b/media/libstagefright/foundation/MediaDefs.cpp
index a08fed1..c216bc5 100644
--- a/media/libstagefright/foundation/MediaDefs.cpp
+++ b/media/libstagefright/foundation/MediaDefs.cpp
@@ -20,6 +20,7 @@
 
 const char *MEDIA_MIMETYPE_IMAGE_JPEG = "image/jpeg";
 const char *MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC = "image/vnd.android.heic";
+const char *MEDIA_MIMETYPE_IMAGE_AVIF = "image/avif";
 
 const char *MEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
 const char *MEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
diff --git a/media/libstagefright/foundation/TEST_MAPPING b/media/libstagefright/foundation/TEST_MAPPING
index 3301c4b..a70c352 100644
--- a/media/libstagefright/foundation/TEST_MAPPING
+++ b/media/libstagefright/foundation/TEST_MAPPING
@@ -1,5 +1,14 @@
+// mappings for frameworks/av/media/libstagefright/foundation
 {
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "OpusHeaderTest" }
+  ],
+
   "presubmit": [
-    { "name": "sf_foundation_test" }
+    { "name": "sf_foundation_test" },
+    { "name": "MetaDataBaseUnitTest"}
   ]
 }
diff --git a/media/libstagefright/foundation/avc_utils.cpp b/media/libstagefright/foundation/avc_utils.cpp
index f53d2c9..9d6887c 100644
--- a/media/libstagefright/foundation/avc_utils.cpp
+++ b/media/libstagefright/foundation/avc_utils.cpp
@@ -559,11 +559,9 @@
     CHECK_NE(video_object_type_indication,
              0x21u /* Fine Granularity Scalable */);
 
-    unsigned video_object_layer_verid __unused;
-    unsigned video_object_layer_priority __unused;
     if (br.getBits(1)) {
-        video_object_layer_verid = br.getBits(4);
-        video_object_layer_priority = br.getBits(3);
+        br.skipBits(4); //video_object_layer_verid
+        br.skipBits(3); //video_object_layer_priority
     }
     unsigned aspect_ratio_info = br.getBits(4);
     if (aspect_ratio_info == 0x0f /* extended PAR */) {
@@ -622,7 +620,7 @@
     unsigned video_object_layer_height = br.getBits(13);
     CHECK(br.getBits(1));  // marker_bit
 
-    unsigned interlaced __unused = br.getBits(1);
+    br.skipBits(1); // interlaced
 
     *width = video_object_layer_width;
     *height = video_object_layer_height;
@@ -668,7 +666,7 @@
         return false;
     }
 
-    unsigned protection __unused = (header >> 16) & 1;
+    // we can get protection value from (header >> 16) & 1
 
     unsigned bitrate_index = (header >> 12) & 0x0f;
 
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h
index ab17a02..e4b99bf 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h
@@ -148,7 +148,8 @@
     static char *GetDebugName(const char *name);
 
     inline static bool isExperimentEnabled(
-            const char *name __unused /* nonnull */, bool allow __unused = true) {
+            const char *name __attribute__((unused)) /* nonnull */,
+            bool allow __attribute__((unused)) = true) {
 #ifdef ENABLE_STAGEFRIGHT_EXPERIMENTS
         if (!strcmp(name, "legacy-adaptive")) {
             return getExperimentFlag(allow, name, 2, 1); // every other day
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/AUtils.h
index af6b357..3b646dc 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/AUtils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/AUtils.h
@@ -63,7 +63,7 @@
 
 template<class T>
 void ENSURE_UNSIGNED_TYPE() {
-    T TYPE_MUST_BE_UNSIGNED[(T)-1 < 0 ? -1 : 0] __unused;
+    T TYPE_MUST_BE_UNSIGNED[(T)-1 < 0 ? -1 : 0] __attribute__((unused));
 }
 
 // needle is in range [hayStart, hayStart + haySize)
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h b/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
index 1f9e636..e96243e 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
@@ -22,6 +22,7 @@
 
 extern const char *MEDIA_MIMETYPE_IMAGE_JPEG;
 extern const char *MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC;
+extern const char *MEDIA_MIMETYPE_IMAGE_AVIF;
 
 extern const char *MEDIA_MIMETYPE_VIDEO_VP8;
 extern const char *MEDIA_MIMETYPE_VIDEO_VP9;
diff --git a/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsTestEnvironment.h b/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsTestEnvironment.h
new file mode 100644
index 0000000..b28a7bc
--- /dev/null
+++ b/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsTestEnvironment.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __AVC_UTILS_TEST_ENVIRONMENT_H__
+#define __AVC_UTILS_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class AVCUtilsTestEnvironment : public::testing::Environment {
+  public:
+    AVCUtilsTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int AVCUtilsTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"path", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P': {
+                setRes(optarg);
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __AVC_UTILS_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsUnitTest.cpp b/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsUnitTest.cpp
new file mode 100644
index 0000000..77a8599
--- /dev/null
+++ b/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsUnitTest.cpp
@@ -0,0 +1,411 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AVCUtilsUnitTest"
+#include <utils/Log.h>
+
+#include <fstream>
+
+#include "media/stagefright/foundation/ABitReader.h"
+#include "media/stagefright/foundation/avc_utils.h"
+
+#include "AVCUtilsTestEnvironment.h"
+
+constexpr size_t kSmallBufferSize = 2;
+constexpr uint8_t kSPSmask = 0x1f;
+constexpr uint8_t kSPSStartCode = 0x07;
+constexpr uint8_t kConfigVersion = 0x01;
+
+using namespace android;
+
+static AVCUtilsTestEnvironment *gEnv = nullptr;
+
+class MpegAudioUnitTest
+    : public ::testing::TestWithParam<
+              tuple</*audioHeader*/ uint32_t, /*frameSize*/ int32_t, /*sampleRate*/ int32_t,
+                    /*numChannels*/ int32_t, /*bitRate*/ int32_t, /*numSamples*/ int32_t>> {};
+
+class VOLDimensionTest
+    : public ::testing::TestWithParam<
+              tuple</*fileName*/ string, /*volWidth*/ int32_t, /*volHeight*/ int32_t>> {};
+
+class AVCUtils {
+  public:
+    bool SetUpAVCUtils(string fileName, string infoFileName) {
+        mInputFile = gEnv->getRes() + fileName;
+        mInputFileStream.open(mInputFile, ifstream::in);
+        if (!mInputFileStream.is_open()) return false;
+
+        mInfoFile = gEnv->getRes() + infoFileName;
+        mInfoFileStream.open(mInfoFile, ifstream::in);
+        if (!mInputFileStream.is_open()) return false;
+        return true;
+    }
+
+    ~AVCUtils() {
+        if (mInputFileStream.is_open()) mInputFileStream.close();
+        if (mInfoFileStream.is_open()) mInfoFileStream.close();
+    }
+
+    string mInputFile;
+    string mInfoFile;
+
+    ifstream mInputFileStream;
+    ifstream mInfoFileStream;
+};
+
+class AVCDimensionTest
+    : public AVCUtils,
+      public ::testing::TestWithParam<
+              tuple</*fileName*/ string, /*infoFileName*/ string,
+                    /*avcWidth*/ size_t, /*avcHeight*/ size_t, /*numberOfNALUnits*/ int32_t>> {
+  public:
+    virtual void SetUp() override {
+        tuple<string, string, size_t, size_t, size_t> params = GetParam();
+        string fileName = get<0>(params);
+        string infoFileName = get<1>(params);
+        AVCUtils::SetUpAVCUtils(fileName, infoFileName);
+
+        mFrameWidth = get<2>(params);
+        mFrameHeight = get<3>(params);
+        mNalUnitsExpected = get<4>(params);
+    }
+
+    size_t mFrameWidth;
+    size_t mFrameHeight;
+    int32_t mNalUnitsExpected;
+};
+
+class AvccBoxTest : public AVCDimensionTest {
+  public:
+    virtual void SetUp() override { AVCDimensionTest::SetUp(); }
+};
+
+class AVCFrameTest
+    : public AVCUtils,
+      public ::testing::TestWithParam<pair</*fileName*/ string, /*infoFileName*/ string>> {
+  public:
+    virtual void SetUp() override {
+        string fileName = GetParam().first;
+        string infoFileName = GetParam().second;
+        AVCUtils::SetUpAVCUtils(fileName, infoFileName);
+    }
+};
+
+TEST_P(MpegAudioUnitTest, AudioProfileTest) {
+    tuple<uint32_t, size_t, int, int, int, int> params = GetParam();
+    uint32_t header = get<0>(params);
+
+    size_t audioFrameSize = get<1>(params);
+    int audioSampleRate = get<2>(params);
+    int audioNumChannels = get<3>(params);
+    int audioBitRate = get<4>(params);
+    int audioNumSamples = get<5>(params);
+
+    size_t frameSize = 0;
+    int sampleRate = 0;
+    int numChannels = 0;
+    int bitRate = 0;
+    int numSamples = 0;
+
+    bool status = GetMPEGAudioFrameSize(header, &frameSize, &sampleRate, &numChannels, &bitRate,
+                                        &numSamples);
+    ASSERT_TRUE(status) << "Failed to get Audio properties";
+
+    ASSERT_EQ(frameSize, audioFrameSize) << "Wrong frame size found";
+
+    ASSERT_EQ(sampleRate, audioSampleRate) << "Wrong sample rate found";
+
+    ASSERT_EQ(numChannels, audioNumChannels) << "Wrong number of channels found";
+
+    ASSERT_EQ(bitRate, audioBitRate) << "Wrong bit rate found";
+
+    ASSERT_EQ(numSamples, audioNumSamples) << "Wrong number of samples found";
+}
+
+TEST_P(VOLDimensionTest, DimensionTest) {
+    tuple<string, int32_t, int32_t> params = GetParam();
+    string inputFile = gEnv->getRes() + get<0>(params);
+    ifstream inputFileStream;
+    inputFileStream.open(inputFile, ifstream::in);
+    ASSERT_TRUE(inputFileStream.is_open()) << "Failed to open: " << inputFile;
+
+    struct stat buf;
+    int8_t err = stat(inputFile.c_str(), &buf);
+    ASSERT_EQ(err, 0) << "Failed to get information for file: " << inputFile;
+
+    size_t fileSize = buf.st_size;
+    ASSERT_NE(fileSize, 0) << "Invalid file size found";
+
+    const uint8_t *volBuffer = new uint8_t[fileSize];
+    ASSERT_NE(volBuffer, nullptr) << "Failed to allocate VOL buffer of size: " << fileSize;
+
+    inputFileStream.read((char *)(volBuffer), fileSize);
+    ASSERT_EQ(inputFileStream.gcount(), fileSize)
+            << "Failed to read complete file, bytes read: " << inputFileStream.gcount();
+
+    int32_t width = get<1>(params);
+    int32_t height = get<2>(params);
+    int32_t volWidth = -1;
+    int32_t volHeight = -1;
+
+    bool status = ExtractDimensionsFromVOLHeader(volBuffer, fileSize, &volWidth, &volHeight);
+    ASSERT_TRUE(status)
+            << "Failed to get VOL dimensions from function: ExtractDimensionsFromVOLHeader()";
+
+    ASSERT_EQ(volWidth, width) << "Expected width: " << width << "Found: " << volWidth;
+
+    ASSERT_EQ(volHeight, height) << "Expected height: " << height << "Found: " << volHeight;
+
+    delete[] volBuffer;
+}
+
+TEST_P(AVCDimensionTest, DimensionTest) {
+    int32_t numNalUnits = 0;
+    int32_t avcWidth = -1;
+    int32_t avcHeight = -1;
+    string line;
+    string type;
+    size_t chunkLength;
+    while (getline(mInfoFileStream, line)) {
+        istringstream stringLine(line);
+        stringLine >> type >> chunkLength;
+        ASSERT_GT(chunkLength, 0) << "Length of the data chunk must be greater than zero";
+
+        const uint8_t *data = new uint8_t[chunkLength];
+        ASSERT_NE(data, nullptr) << "Failed to create a data buffer of size: " << chunkLength;
+
+        const uint8_t *nalStart;
+        size_t nalSize;
+
+        mInputFileStream.read((char *)data, chunkLength);
+        ASSERT_EQ(mInputFileStream.gcount(), chunkLength)
+                << "Failed to read complete file, bytes read: " << mInputFileStream.gcount();
+
+        size_t smallBufferSize = kSmallBufferSize;
+        const uint8_t *sanityData = new uint8_t[smallBufferSize];
+        memcpy((void *)sanityData, (void *)data, smallBufferSize);
+
+        status_t result = getNextNALUnit(&sanityData, &smallBufferSize, &nalStart, &nalSize, true);
+        ASSERT_EQ(result, -EAGAIN) << "Invalid result found when wrong NAL unit passed";
+
+        while (!getNextNALUnit(&data, &chunkLength, &nalStart, &nalSize, true)) {
+            numNalUnits++;
+            // Check if it's an SPS
+            if ((nalStart[0] & kSPSmask) != kSPSStartCode) continue;
+            ASSERT_TRUE(nalSize > 0) << "NAL unit size must be greater than 0";
+
+            sp<ABuffer> spsBuffer = new ABuffer(nalSize);
+            ASSERT_NE(spsBuffer, nullptr) << "ABuffer returned null for size: " << nalSize;
+
+            memcpy(spsBuffer->data(), nalStart, nalSize);
+            FindAVCDimensions(spsBuffer, &avcWidth, &avcHeight);
+            spsBuffer.clear();
+            ASSERT_EQ(avcWidth, mFrameWidth)
+                    << "Expected width: " << mFrameWidth << "Found: " << avcWidth;
+
+            ASSERT_EQ(avcHeight, mFrameHeight)
+                    << "Expected height: " << mFrameHeight << "Found: " << avcHeight;
+        }
+        delete[] data;
+    }
+    if (mNalUnitsExpected < 0) {
+        ASSERT_GT(numNalUnits, 0) << "Failed to find an NAL Unit";
+    } else {
+        ASSERT_EQ(numNalUnits, mNalUnitsExpected)
+                << "Expected number of NAL units: " << mNalUnitsExpected
+                << " found: " << numNalUnits;
+    }
+}
+
+TEST_P(AvccBoxTest, AvccBoxValidationTest) {
+    int32_t avcWidth = -1;
+    int32_t avcHeight = -1;
+    int32_t accessUnitLength = 0;
+    int32_t profile = -1;
+    int32_t level = -1;
+    string line;
+    string type;
+    size_t chunkLength;
+    while (getline(mInfoFileStream, line)) {
+        istringstream stringLine(line);
+        stringLine >> type >> chunkLength;
+
+        if (type.compare("SPS") && type.compare("PPS")) continue;
+        ASSERT_GT(chunkLength, 0) << "Length of the data chunk must be greater than zero";
+
+        accessUnitLength += chunkLength;
+
+        if (!type.compare("SPS")) {
+            const uint8_t *data = new uint8_t[chunkLength];
+            ASSERT_NE(data, nullptr) << "Failed to create a data buffer of size: " << chunkLength;
+
+            const uint8_t *nalStart;
+            size_t nalSize;
+
+            mInputFileStream.read((char *)data, (uint32_t)chunkLength);
+            ASSERT_EQ(mInputFileStream.gcount(), chunkLength)
+                    << "Failed to read complete file, bytes read: " << mInputFileStream.gcount();
+
+            while (!getNextNALUnit(&data, &chunkLength, &nalStart, &nalSize, true)) {
+                // Check if it's an SPS
+                ASSERT_TRUE(nalSize > 0 && (nalStart[0] & kSPSmask) == kSPSStartCode)
+                        << "Failed to get SPS";
+
+                ASSERT_GE(nalSize, 4) << "SPS size must be greater than or equal to 4";
+
+                profile = nalStart[1];
+                level = nalStart[3];
+            }
+            delete[] data;
+        }
+    }
+    const uint8_t *accessUnitData = new uint8_t[accessUnitLength];
+    ASSERT_NE(accessUnitData, nullptr) << "Failed to create a buffer of size: " << accessUnitLength;
+
+    mInputFileStream.seekg(0, ios::beg);
+    mInputFileStream.read((char *)accessUnitData, accessUnitLength);
+    ASSERT_EQ(mInputFileStream.gcount(), accessUnitLength)
+            << "Failed to read complete file, bytes read: " << mInputFileStream.gcount();
+
+    sp<ABuffer> accessUnit = new ABuffer(accessUnitLength);
+    ASSERT_NE(accessUnit, nullptr)
+            << "Failed to create an android data buffer of size: " << accessUnitLength;
+
+    memcpy(accessUnit->data(), accessUnitData, accessUnitLength);
+    sp<ABuffer> csdDataBuffer = MakeAVCCodecSpecificData(accessUnit, &avcWidth, &avcHeight);
+    ASSERT_NE(csdDataBuffer, nullptr) << "No data returned from MakeAVCCodecSpecificData()";
+
+    ASSERT_EQ(avcWidth, mFrameWidth) << "Expected width: " << mFrameWidth << "Found: " << avcWidth;
+
+    ASSERT_EQ(avcHeight, mFrameHeight)
+            << "Expected height: " << mFrameHeight << "Found: " << avcHeight;
+
+    uint8_t *csdData = csdDataBuffer->data();
+    ASSERT_EQ(*csdData, kConfigVersion) << "Invalid configuration version";
+
+    ASSERT_GE(csdDataBuffer->size(), 4) << "CSD data size must be greater than or equal to 4";
+
+    ASSERT_EQ(*(csdData + 1), profile)
+            << "Expected AVC profile: " << profile << " found: " << *(csdData + 1);
+
+    ASSERT_EQ(*(csdData + 3), level)
+            << "Expected AVC level: " << level << " found: " << *(csdData + 3);
+    csdDataBuffer.clear();
+    delete[] accessUnitData;
+    accessUnit.clear();
+}
+
+TEST_P(AVCFrameTest, FrameTest) {
+    string line;
+    string type;
+    size_t chunkLength;
+    int32_t frameLayerID;
+    while (getline(mInfoFileStream, line)) {
+        uint32_t layerID = 0;
+        istringstream stringLine(line);
+        stringLine >> type >> chunkLength >> frameLayerID;
+        ASSERT_GT(chunkLength, 0) << "Length of the data chunk must be greater than zero";
+
+        char *data = new char[chunkLength];
+        ASSERT_NE(data, nullptr) << "Failed to allocation data buffer of size: " << chunkLength;
+
+        mInputFileStream.read(data, chunkLength);
+        ASSERT_EQ(mInputFileStream.gcount(), chunkLength)
+                << "Failed to read complete file, bytes read: " << mInputFileStream.gcount();
+
+        if (!type.compare("IDR")) {
+            bool isIDR = IsIDR((uint8_t *)data, chunkLength);
+            ASSERT_TRUE(isIDR);
+
+            layerID = FindAVCLayerId((uint8_t *)data, chunkLength);
+            ASSERT_EQ(layerID, frameLayerID) << "Wrong layer ID found";
+        } else if (!type.compare("P") || !type.compare("B")) {
+            sp<ABuffer> accessUnit = new ABuffer(chunkLength);
+            ASSERT_NE(accessUnit, nullptr) << "Unable to create access Unit";
+
+            memcpy(accessUnit->data(), data, chunkLength);
+            bool isReferenceFrame = IsAVCReferenceFrame(accessUnit);
+            ASSERT_TRUE(isReferenceFrame);
+
+            accessUnit.clear();
+            layerID = FindAVCLayerId((uint8_t *)data, chunkLength);
+            ASSERT_EQ(layerID, frameLayerID) << "Wrong layer ID found";
+        }
+        delete[] data;
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(AVCUtilsTestAll, MpegAudioUnitTest,
+                         ::testing::Values(make_tuple(0xFFFB9204, 418, 44100, 2, 128, 1152),
+                                           make_tuple(0xFFFB7604, 289, 48000, 2, 96, 1152),
+                                           make_tuple(0xFFFE5604, 164, 48000, 2, 160, 384)));
+
+// Info File contains the type and length for each chunk/frame
+INSTANTIATE_TEST_SUITE_P(
+        AVCUtilsTestAll, AVCDimensionTest,
+        ::testing::Values(make_tuple("crowd_8x8p50f32_200kbps_bp.h264",
+                                     "crowd_8x8p50f32_200kbps_bp.info", 8, 8, 11),
+                          make_tuple("crowd_640x360p24f300_1000kbps_bp.h264",
+                                     "crowd_640x360p24f300_1000kbps_bp.info", 640, 360, 11),
+                          make_tuple("crowd_1280x720p30f300_5000kbps_bp.h264",
+                                     "crowd_1280x720p30f300_5000kbps_bp.info", 1280, 720, 12),
+                          make_tuple("crowd_1920x1080p50f300_12000kbps_bp.h264",
+                                     "crowd_1920x1080p50f300_12000kbps_bp.info", 1920, 1080, 14),
+                          make_tuple("crowd_3840x2160p60f300_68000kbps_bp.h264",
+                                     "crowd_3840x2160p60f300_68000kbps_bp.info", 3840, 2160, 14)));
+
+// Info File contains the type and length for each chunk/frame
+INSTANTIATE_TEST_SUITE_P(
+        AVCUtilsTestAll, AvccBoxTest,
+        ::testing::Values(make_tuple("crowd_8x8p50f32_200kbps_bp.h264",
+                                     "crowd_8x8p50f32_200kbps_bp.info", 8, 8, 11),
+                          make_tuple("crowd_1280x720p30f300_5000kbps_bp.h264",
+                                     "crowd_1280x720p30f300_5000kbps_bp.info", 1280, 720, 12),
+                          make_tuple("crowd_1920x1080p50f300_12000kbps_bp.h264",
+                                     "crowd_1920x1080p50f300_12000kbps_bp.info", 1920, 1080, 14)));
+
+// Info File contains the type and length for each chunk/frame
+INSTANTIATE_TEST_SUITE_P(AVCUtilsTestAll, VOLDimensionTest,
+                         ::testing::Values(make_tuple("volData_720_480", 720, 480),
+                                           make_tuple("volData_1280_720", 1280, 720),
+                                           make_tuple("volData_1920_1080", 1920, 1080)));
+
+// Info File contains the type, length and layer ID for each chunk/frame
+INSTANTIATE_TEST_SUITE_P(AVCUtilsTestAll, AVCFrameTest,
+                         ::testing::Values(make_tuple("crowd_8x8p50f32_200kbps_bp.h264",
+                                                      "crowd_8x8p50f32_200kbps_bp.info"),
+                                           make_tuple("crowd_640x360p24f300_1000kbps_bp.h264",
+                                                      "crowd_640x360p24f300_1000kbps_bp.info"),
+                                           make_tuple("crowd_1280x720p30f300_5000kbps_bp.h264",
+                                                      "crowd_1280x720p30f300_5000kbps_bp.info"),
+                                           make_tuple("crowd_1920x1080p50f300_12000kbps_bp.h264",
+                                                      "crowd_1920x1080p50f300_12000kbps_bp.info"),
+                                           make_tuple("crowd_3840x2160p60f300_68000kbps_bp.h264",
+                                                      "crowd_3840x2160p60f300_68000kbps_bp.info")));
+
+int main(int argc, char **argv) {
+    gEnv = new AVCUtilsTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("Test result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/foundation/tests/AVCUtils/Android.bp b/media/libstagefright/foundation/tests/AVCUtils/Android.bp
new file mode 100644
index 0000000..5d0e481
--- /dev/null
+++ b/media/libstagefright/foundation/tests/AVCUtils/Android.bp
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+    name: "AVCUtilsUnitTest",
+    gtest: true,
+
+    srcs: [
+        "AVCUtilsUnitTest.cpp",
+    ],
+
+    shared_libs: [
+        "libutils",
+        "liblog",
+    ],
+
+    static_libs: [
+        "libstagefright",
+        "libstagefright_foundation",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright/foundation",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/foundation/tests/AVCUtils/AndroidTest.xml b/media/libstagefright/foundation/tests/AVCUtils/AndroidTest.xml
new file mode 100644
index 0000000..6a088a8
--- /dev/null
+++ b/media/libstagefright/foundation/tests/AVCUtils/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for AVC Utils unit tests">
+    <option name="test-suite-tag" value="AVCUtilsUnitTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="false" />
+        <option name="push" value="AVCUtilsUnitTest->/data/local/tmp/AVCUtilsUnitTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsUnitTest.zip?unzip=true"
+            value="/data/local/tmp/AVCUtilsUnitTest/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="AVCUtilsUnitTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/AVCUtilsUnitTest/" />
+    </test>
+</configuration>
diff --git a/media/libstagefright/foundation/tests/AVCUtils/README.md b/media/libstagefright/foundation/tests/AVCUtils/README.md
new file mode 100644
index 0000000..609d72e
--- /dev/null
+++ b/media/libstagefright/foundation/tests/AVCUtils/README.md
@@ -0,0 +1,39 @@
+## Media Testing ##
+---
+#### AVCUtils Test
+The AVC Utility Unit Test Suite validates the avc_utils librariy available in libstagefright/foundation.
+
+Run the following steps to build the test suite:
+```
+m AVCUtilsUnitTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/AVCUtilsUnitTest/AVCUtilsUnitTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/AVCUtilsUnitTest/AVCUtilsUnitTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsUnitTest.zip). Download, unzip and push these files into device for testing.
+
+```
+adb push AVCUtilsUnitTest /data/local/tmp/
+```
+
+usage: AVCUtilsUnitTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/AVCUtilsUnitTest -P /data/local/tmp/AVCUtilsUnitTest/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest AVCUtilsUnitTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/foundation/tests/Android.bp b/media/libstagefright/foundation/tests/Android.bp
index f2157c9..9e67189 100644
--- a/media/libstagefright/foundation/tests/Android.bp
+++ b/media/libstagefright/foundation/tests/Android.bp
@@ -25,3 +25,32 @@
         "Utils_test.cpp",
     ],
 }
+
+cc_test {
+    name: "MetaDataBaseUnitTest",
+    test_suites: ["device-tests"],
+    gtest: true,
+
+    srcs: [
+        "MetaDataBaseUnitTest.cpp",
+    ],
+
+    shared_libs: [
+        "libutils",
+        "liblog",
+    ],
+
+    static_libs: [
+        "libstagefright",
+        "libstagefright_foundation",
+    ],
+
+    header_libs: [
+        "libmedia_headers",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+}
diff --git a/media/libstagefright/foundation/tests/MetaDataBaseUnitTest.cpp b/media/libstagefright/foundation/tests/MetaDataBaseUnitTest.cpp
new file mode 100644
index 0000000..0aed4d2
--- /dev/null
+++ b/media/libstagefright/foundation/tests/MetaDataBaseUnitTest.cpp
@@ -0,0 +1,288 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <gtest/gtest.h>
+
+#include <stdio.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <fstream>
+
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaDataBase.h>
+
+constexpr int32_t kWidth1 = 1920;
+constexpr int32_t kHeight1 = 1080;
+constexpr int32_t kWidth2 = 1280;
+constexpr int32_t kHeight2 = 920;
+constexpr int32_t kWidth3 = 720;
+constexpr int32_t kHeight3 = 480;
+constexpr int32_t kProfile = 1;
+constexpr int32_t kLevel = 1;
+constexpr int32_t kPlatformValue = 1;
+
+// Rectangle margins
+constexpr int32_t kLeft = 100;
+constexpr int32_t kTop = 100;
+constexpr int32_t kRight = 100;
+constexpr int32_t kBottom = 100;
+
+constexpr int64_t kDurationUs = 60000000;
+
+constexpr float kCaptureRate = 30.0;
+
+namespace android {
+
+class MetaDataBaseUnitTest : public ::testing::Test {};
+
+TEST_F(MetaDataBaseUnitTest, CreateMetaDataBaseTest) {
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
+
+    // Testing copy constructor
+    MetaDataBase *metaDataCopy = metaData;
+    ASSERT_NE(metaDataCopy, nullptr) << "Failed to create meta data copy";
+
+    delete metaData;
+}
+
+TEST_F(MetaDataBaseUnitTest, SetAndFindDataTest) {
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
+
+    // Setting the different key-value pair type for first time, overwrite
+    // expected to be false
+    bool status = metaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+    ASSERT_FALSE(status) << "Initializing kKeyMIMEType, overwrite is expected to be false";
+
+    status = metaData->setInt32(kKeyWidth, kWidth1);
+    ASSERT_FALSE(status) << "Initializing kKeyWidth, overwrite is expected to be false";
+    status = metaData->setInt32(kKeyHeight, kHeight1);
+    ASSERT_FALSE(status) << "Initializing kKeyHeight, overwrite is expected to be false";
+    status = metaData->setInt32(kKeyVideoProfile, kProfile);
+    ASSERT_FALSE(status) << "Initializing kKeyVideoProfile, overwrite is expected to be false";
+    status = metaData->setInt32(kKeyVideoLevel, kLevel);
+    ASSERT_FALSE(status) << "Initializing kKeyVideoLevel, overwrite is expected to be false";
+
+    status = metaData->setInt64(kKeyDuration, kDurationUs);
+    ASSERT_FALSE(status) << "Initializing kKeyDuration, overwrite is expected to be false";
+
+    status = metaData->setFloat(kKeyCaptureFramerate, kCaptureRate);
+    ASSERT_FALSE(status) << "Initializing kKeyCaptureFramerate, overwrite is expected to be false";
+
+    const int32_t *platform = &kPlatformValue;
+    status = metaData->setPointer(kKeyPlatformPrivate, (void *)platform);
+    ASSERT_FALSE(status) << "Initializing kKeyPlatformPrivate, overwrite is expected to be false";
+
+    status = metaData->setRect(kKeyCropRect, kLeft, kTop, kRight, kBottom);
+    ASSERT_FALSE(status) << "Initializing kKeyCropRect, overwrite is expected to be false";
+
+    // Dump to log for reference
+    metaData->dumpToLog();
+
+    // Find the data which was set
+    const char *mime;
+    status = metaData->findCString(kKeyMIMEType, &mime);
+    ASSERT_TRUE(status) << "kKeyMIMEType key does not exists in metadata";
+    ASSERT_STREQ(mime, MEDIA_MIMETYPE_VIDEO_AVC) << "Incorrect mime type returned";
+
+    int32_t width, height, profile, level;
+    status = metaData->findInt32(kKeyWidth, &width);
+    ASSERT_TRUE(status) << "kKeyWidth key does not exists in metadata";
+    ASSERT_EQ(width, kWidth1) << "Incorrect value of width returned";
+
+    status = metaData->findInt32(kKeyHeight, &height);
+    ASSERT_TRUE(status) << "kKeyHeight key does not exists in metadata";
+    ASSERT_EQ(height, kHeight1) << "Incorrect value of height returned";
+
+    status = metaData->findInt32(kKeyVideoProfile, &profile);
+    ASSERT_TRUE(status) << "kKeyVideoProfile key does not exists in metadata";
+    ASSERT_EQ(profile, kProfile) << "Incorrect value of profile returned";
+
+    status = metaData->findInt32(kKeyVideoLevel, &level);
+    ASSERT_TRUE(status) << "kKeyVideoLevel key does not exists in metadata";
+    ASSERT_EQ(level, kLevel) << "Incorrect value of level returned";
+
+    int64_t duration;
+    status = metaData->findInt64(kKeyDuration, &duration);
+    ASSERT_TRUE(status) << "kKeyDuration key does not exists in metadata";
+    ASSERT_EQ(duration, kDurationUs) << "Incorrect value of duration returned";
+
+    float frameRate;
+    status = metaData->findFloat(kKeyCaptureFramerate, &frameRate);
+    ASSERT_TRUE(status) << "kKeyCaptureFramerate key does not exists in metadata";
+    ASSERT_EQ(frameRate, kCaptureRate) << "Incorrect value of captureFrameRate returned";
+
+    int32_t top, bottom, left, right;
+    status = metaData->findRect(kKeyCropRect, &left, &top, &right, &bottom);
+    ASSERT_TRUE(status) << "kKeyCropRect key does not exists in metadata";
+    ASSERT_EQ(left, kLeft) << "Incorrect value of left margin returned";
+    ASSERT_EQ(top, kTop) << "Incorrect value of top margin returned";
+    ASSERT_EQ(right, kRight) << "Incorrect value of right margin returned";
+    ASSERT_EQ(bottom, kBottom) << "Incorrect value of bottom margin returned";
+
+    void *platformValue;
+    status = metaData->findPointer(kKeyPlatformPrivate, &platformValue);
+    ASSERT_TRUE(status) << "kKeyPlatformPrivate key does not exists in metadata";
+    ASSERT_EQ(platformValue, &kPlatformValue) << "Incorrect value of pointer returned";
+
+    // Check for the key which is not added to metadata
+    int32_t angle;
+    status = metaData->findInt32(kKeyRotation, &angle);
+    ASSERT_FALSE(status) << "Value for an invalid key is returned when the key is not set";
+
+    delete (metaData);
+}
+
+TEST_F(MetaDataBaseUnitTest, OverWriteFunctionalityTest) {
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
+
+    // set/set/read to check first overwrite operation
+    bool status = metaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+    ASSERT_FALSE(status) << "Initializing kKeyMIMEType, overwrite is expected to be false";
+    // Overwrite the value
+    status = metaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_HEVC);
+    ASSERT_TRUE(status) << "Setting kKeyMIMEType again, overwrite is expected to be true";
+    // Check the value
+    const char *mime;
+    status = metaData->findCString(kKeyMIMEType, &mime);
+    ASSERT_TRUE(status) << "kKeyMIMEType key does not exists in metadata";
+    ASSERT_STREQ(mime, MEDIA_MIMETYPE_VIDEO_HEVC) << "Mime value is not overwritten";
+
+    // set/set/set/read to check second overwrite operation
+    status = metaData->setInt32(kKeyWidth, kWidth1);
+    ASSERT_FALSE(status) << "Initializing kKeyWidth, overwrite is expected to be false";
+    status = metaData->setInt32(kKeyHeight, kHeight1);
+    ASSERT_FALSE(status) << "Initializing kKeyHeight, overwrite is expected to be false";
+    // Overwrite the value
+    status = metaData->setInt32(kKeyWidth, kWidth2);
+    ASSERT_TRUE(status) << "Setting kKeyWidth again, overwrite is expected to be true";
+    status = metaData->setInt32(kKeyHeight, kHeight2);
+    ASSERT_TRUE(status) << "Setting kKeyHeight again, overwrite is expected to be true";
+    // Overwrite the value again
+    status = metaData->setInt32(kKeyWidth, kWidth3);
+    ASSERT_TRUE(status) << "Setting kKeyWidth again, overwrite is expected to be true";
+    status = metaData->setInt32(kKeyHeight, kHeight3);
+    ASSERT_TRUE(status) << "Setting kKeyHeight again, overwrite is expected to be true";
+    // Check the value
+    int32_t width, height;
+    status = metaData->findInt32(kKeyWidth, &width);
+    ASSERT_TRUE(status) << "kKeyWidth key does not exists in metadata";
+    ASSERT_EQ(width, kWidth3) << "Value of width is not overwritten";
+
+    status = metaData->findInt32(kKeyHeight, &height);
+    ASSERT_TRUE(status) << "kKeyHeight key does not exists in metadata";
+    ASSERT_EQ(height, kHeight3) << "Value of height is not overwritten";
+
+    delete (metaData);
+}
+
+TEST_F(MetaDataBaseUnitTest, RemoveKeyTest) {
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
+
+    bool status = metaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+    ASSERT_FALSE(status) << "Initializing kKeyMIMEType, overwrite is expected to be false";
+    // Query the key
+    status = metaData->hasData(kKeyMIMEType);
+    ASSERT_TRUE(status) << "MetaData does not have the mime key";
+
+    status = metaData->remove(kKeyMIMEType);
+    ASSERT_TRUE(status) << "Failed to remove the kKeyMIMEType key";
+
+    // Query the key
+    status = metaData->hasData(kKeyMIMEType);
+    ASSERT_FALSE(status) << "MetaData has mime key after removing it, expected to be false";
+
+    // Remove the non existing key
+    status = metaData->remove(kKeyMIMEType);
+    ASSERT_FALSE(status) << "Removed the non existing key";
+
+    // Check overwriting the removed key
+    metaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_HEVC);
+    ASSERT_FALSE(status) << "Overwrite should be false since the key was removed";
+
+    status = metaData->setInt32(kKeyWidth, kWidth1);
+    ASSERT_FALSE(status) << "Initializing kKeyWidth, overwrite is expected to be false";
+
+    // Clear whole metadata
+    metaData->clear();
+
+    // Check finding key after clearing the metadata
+    int32_t width;
+    status = metaData->findInt32(kKeyWidth, &width);
+    ASSERT_FALSE(status) << "MetaData found kKeyWidth key after clearing all the items in it, "
+                            "expected to be false";
+
+    // Query the key
+    status = metaData->hasData(kKeyWidth);
+    ASSERT_FALSE(status)
+            << "MetaData has width key after clearing all the items in it, expected to be false";
+
+    status = metaData->hasData(kKeyMIMEType);
+    ASSERT_FALSE(status)
+            << "MetaData has mime key after clearing all the items in it, expected to be false";
+
+    // Check removing key after clearing the metadata
+    status = metaData->remove(kKeyMIMEType);
+    ASSERT_FALSE(status) << "Removed the key, after clearing the metadata";
+
+    // Checking set after clearing the metadata
+    status = metaData->setInt32(kKeyWidth, kWidth1);
+    ASSERT_FALSE(status) << "Overwrite should be false since the metadata was cleared";
+
+    metaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_HEVC);
+    ASSERT_FALSE(status) << "Overwrite should be false since the metadata was cleared";
+
+    delete (metaData);
+}
+
+TEST_F(MetaDataBaseUnitTest, ConvertToStringTest) {
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
+
+    String8 info = metaData->toString();
+    ASSERT_EQ(info.length(), 0) << "Empty MetaData length is non-zero: " << info.length();
+
+    bool status = metaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+    ASSERT_FALSE(status) << "Initializing kKeyMIMEType, overwrite is expected to be false";
+
+    status = metaData->setInt32(kKeyWidth, kWidth1);
+    ASSERT_FALSE(status) << "Initializing kKeyWidth, overwrite is expected to be false";
+    status = metaData->setInt32(kKeyHeight, kHeight1);
+    ASSERT_FALSE(status) << "Initializing kKeyHeight, overwrite is expected to be false";
+    status = metaData->setInt32(kKeyVideoProfile, kProfile);
+    ASSERT_FALSE(status) << "Initializing kKeyVideoProfile, overwrite is expected to be false";
+    status = metaData->setInt32(kKeyVideoLevel, kLevel);
+    ASSERT_FALSE(status) << "Initializing kKeyVideoLevel, overwrite is expected to be false";
+
+    info = metaData->toString();
+    ASSERT_GT(info.length(), 0) << "MetaData contains no information";
+
+    // Dump to log for reference
+    metaData->dumpToLog();
+
+    // Clear whole metadata
+    metaData->clear();
+
+    info = metaData->toString();
+    ASSERT_EQ(info.length(), 0) << "MetaData length is non-zero after clearing it: "
+                                << info.length();
+
+    delete (metaData);
+}
+
+}  // namespace android
diff --git a/media/libstagefright/foundation/tests/OpusHeader/Android.bp b/media/libstagefright/foundation/tests/OpusHeader/Android.bp
new file mode 100644
index 0000000..ed3298c
--- /dev/null
+++ b/media/libstagefright/foundation/tests/OpusHeader/Android.bp
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+    name: "OpusHeaderTest",
+    test_suites: ["device-tests"],
+    gtest: true,
+
+    srcs: [
+        "OpusHeaderTest.cpp",
+    ],
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    static_libs: [
+        "libstagefright_foundation",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+}
diff --git a/media/libstagefright/foundation/tests/OpusHeader/AndroidTest.xml b/media/libstagefright/foundation/tests/OpusHeader/AndroidTest.xml
new file mode 100644
index 0000000..afee16a
--- /dev/null
+++ b/media/libstagefright/foundation/tests/OpusHeader/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for opus header unit tests">
+    <option name="test-suite-tag" value="OpusHeaderTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="OpusHeaderTest->/data/local/tmp/OpusHeaderTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/OpusHeader/OpusHeader.zip?unzip=true"
+            value="/data/local/tmp/OpusHeaderTestRes/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="OpusHeaderTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/OpusHeaderTestRes/" />
+    </test>
+</configuration>
\ No newline at end of file
diff --git a/media/libstagefright/foundation/tests/OpusHeader/OpusHeaderTest.cpp b/media/libstagefright/foundation/tests/OpusHeader/OpusHeaderTest.cpp
new file mode 100644
index 0000000..e39c915
--- /dev/null
+++ b/media/libstagefright/foundation/tests/OpusHeader/OpusHeaderTest.cpp
@@ -0,0 +1,342 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "OpusHeaderTest"
+#include <utils/Log.h>
+
+#include <fstream>
+#include <stdio.h>
+#include <string.h>
+
+#include <media/stagefright/foundation/OpusHeader.h>
+
+#include "OpusHeaderTestEnvironment.h"
+
+using namespace android;
+
+#define OUTPUT_FILE_NAME "/data/local/tmp/OpusOutput"
+
+// Opus in WebM is a well-known, yet under-documented, format. The codec private data
+// of the track is an Opus Ogg header (https://tools.ietf.org/html/rfc7845#section-5.1)
+// channel mapping offset in opus header
+constexpr size_t kOpusHeaderStreamMapOffset = 21;
+constexpr size_t kMaxOpusHeaderSize = 100;
+// AOPUSHDR + AOPUSHDRLength +
+// (8 + 8 ) +
+// Header(csd) + num_streams + num_coupled + 1
+// (19 + 1 + 1 + 1) +
+// AOPUSDLY + AOPUSDLYLength + DELAY + AOPUSPRL + AOPUSPRLLength + PRL
+// (8 + 8 + 8 + 8 + 8 + 8)
+// = 86
+constexpr size_t kOpusHeaderChannelMapOffset = 86;
+constexpr uint32_t kOpusSampleRate = 48000;
+constexpr uint64_t kOpusSeekPrerollNs = 80000000;
+constexpr int64_t kNsecPerSec = 1000000000ll;
+
+// Opus uses Vorbis channel mapping, and Vorbis channel mapping specifies
+// mappings for up to 8 channels. This information is part of the Vorbis I
+// Specification:
+// http://www.xiph.org/vorbis/doc/Vorbis_I_spec.html
+constexpr int kMaxChannels = 8;
+constexpr uint8_t kOpusChannelMap[kMaxChannels][kMaxChannels] = {
+        {0},
+        {0, 1},
+        {0, 2, 1},
+        {0, 1, 2, 3},
+        {0, 4, 1, 2, 3},
+        {0, 4, 1, 2, 3, 5},
+        {0, 4, 1, 2, 3, 5, 6},
+        {0, 6, 1, 2, 3, 4, 5, 7},
+};
+
+static OpusHeaderTestEnvironment *gEnv = nullptr;
+
+class OpusHeaderTest {
+  public:
+    OpusHeaderTest() : mInputBuffer(nullptr) {}
+
+    ~OpusHeaderTest() {
+        if (mEleStream.is_open()) mEleStream.close();
+        if (mInputBuffer) {
+            free(mInputBuffer);
+            mInputBuffer = nullptr;
+        }
+    }
+    ifstream mEleStream;
+    uint8_t *mInputBuffer;
+};
+
+class OpusHeaderParseTest : public OpusHeaderTest,
+                            public ::testing::TestWithParam<
+                                    tuple<string /* InputFileName */, int32_t /* ChannelCount */,
+                                          bool /* isHeaderValid */, bool /* isCodecDelayValid */,
+                                          bool /* isSeekPreRollValid */, bool /* isInputValid */>> {
+};
+
+class OpusHeaderWriteTest
+    : public OpusHeaderTest,
+      public ::testing::TestWithParam<tuple<int32_t /* ChannelCount */, int32_t /* skipSamples */,
+                                            string /* referenceFile */>> {};
+
+TEST_P(OpusHeaderWriteTest, WriteTest) {
+    tuple<int32_t, int32_t, string> params = GetParam();
+    OpusHeader writtenHeader;
+    memset(&writtenHeader, 0, sizeof(writtenHeader));
+    int32_t channels = get<0>(params);
+    writtenHeader.channels = channels;
+    writtenHeader.num_streams = channels;
+    writtenHeader.channel_mapping = ((channels > 8) ? 255 : (channels > 2));
+    int32_t skipSamples = get<1>(params);
+    string referenceFileName = gEnv->getRes() + get<2>(params);
+    writtenHeader.skip_samples = skipSamples;
+    uint64_t codecDelayNs = skipSamples * kNsecPerSec / kOpusSampleRate;
+    uint8_t headerData[kMaxOpusHeaderSize];
+    int32_t headerSize = WriteOpusHeaders(writtenHeader, kOpusSampleRate, headerData,
+                                          sizeof(headerData), codecDelayNs, kOpusSeekPrerollNs);
+    ASSERT_GT(headerSize, 0) << "failed to generate Opus header";
+    ASSERT_LE(headerSize, kMaxOpusHeaderSize)
+            << "Invalid header written. Header size can't exceed kMaxOpusHeaderSize";
+
+    ofstream ostrm;
+    ostrm.open(OUTPUT_FILE_NAME, ofstream::binary);
+    ASSERT_TRUE(ostrm.is_open()) << "Failed to open output file " << OUTPUT_FILE_NAME;
+    ostrm.write(reinterpret_cast<char *>(headerData), headerSize);
+    ostrm.close();
+
+    mEleStream.open(referenceFileName, ifstream::binary);
+    ASSERT_EQ(mEleStream.is_open(), true) << "Failed to open referenceFileName " << get<2>(params);
+
+    struct stat buf;
+    int32_t statStatus = stat(referenceFileName.c_str(), &buf);
+    ASSERT_EQ(statStatus, 0) << "Unable to get file properties";
+
+    size_t fileSize = buf.st_size;
+    mInputBuffer = (uint8_t *)malloc(fileSize);
+    ASSERT_NE(mInputBuffer, nullptr) << "Insufficient memory. Malloc failed for size " << fileSize;
+
+    mEleStream.read(reinterpret_cast<char *>(mInputBuffer), fileSize);
+    ASSERT_EQ(mEleStream.gcount(), fileSize) << "mEleStream.gcount() != bytesCount";
+
+    ASSERT_EQ(fileSize, headerSize)
+            << "Mismatch in size between header generated and reference header";
+    int32_t match = memcmp(reinterpret_cast<char *>(mInputBuffer),
+                           reinterpret_cast<char *>(headerData), fileSize);
+    ASSERT_EQ(match, 0) << "Opus header does not match reference file: " << referenceFileName;
+
+    size_t opusHeadSize = 0;
+    size_t codecDelayBufSize = 0;
+    size_t seekPreRollBufSize = 0;
+    void *opusHeadBuf = nullptr;
+    void *codecDelayBuf = nullptr;
+    void *seekPreRollBuf = nullptr;
+    bool status = GetOpusHeaderBuffers(headerData, headerSize, &opusHeadBuf, &opusHeadSize,
+                                       &codecDelayBuf, &codecDelayBufSize, &seekPreRollBuf,
+                                       &seekPreRollBufSize);
+    ASSERT_TRUE(status) << "Encountered error in GetOpusHeaderBuffers";
+
+    uint64_t value = *((uint64_t *)codecDelayBuf);
+    ASSERT_EQ(value, codecDelayNs);
+
+    value = *((uint64_t *)seekPreRollBuf);
+    ASSERT_EQ(value, kOpusSeekPrerollNs);
+
+    OpusHeader parsedHeader;
+    status = ParseOpusHeader((uint8_t *)opusHeadBuf, opusHeadSize, &parsedHeader);
+    ASSERT_TRUE(status) << "Encountered error while Parsing Opus Header.";
+
+    ASSERT_EQ(writtenHeader.channels, parsedHeader.channels)
+            << "Invalid header generated. Mismatch between channel counts";
+
+    ASSERT_EQ(writtenHeader.skip_samples, parsedHeader.skip_samples)
+            << "Mismatch between no of skipSamples written "
+               "and no of skipSamples got after parsing";
+
+    ASSERT_EQ(writtenHeader.channel_mapping, parsedHeader.channel_mapping)
+            << "Mismatch between channelMapping written "
+               "and channelMapping got after parsing";
+
+    if (parsedHeader.channel_mapping) {
+        ASSERT_GT(parsedHeader.channels, 2);
+        ASSERT_EQ(writtenHeader.num_streams, parsedHeader.num_streams)
+                << "Invalid header generated. Mismatch between channel counts";
+
+        ASSERT_EQ(writtenHeader.num_coupled, parsedHeader.num_coupled)
+                << "Invalid header generated. Mismatch between channel counts";
+
+        ASSERT_EQ(parsedHeader.num_coupled + parsedHeader.num_streams, parsedHeader.channels);
+
+        ASSERT_LE(parsedHeader.num_coupled, parsedHeader.num_streams)
+                << "Invalid header generated. Number of coupled streams cannot be greater than "
+                   "number "
+                   "of streams.";
+
+        ASSERT_EQ(headerSize, kOpusHeaderChannelMapOffset + writtenHeader.channels)
+                << "Invalid header written. Header size should be equal to 86 + "
+                   "writtenHeader.channels";
+
+        uint8_t mappedChannelNumber;
+        for (int32_t channelNumber = 0; channelNumber < channels; channelNumber++) {
+            mappedChannelNumber = *(reinterpret_cast<uint8_t *>(opusHeadBuf) +
+                                    kOpusHeaderStreamMapOffset + channelNumber);
+            ASSERT_LT(mappedChannelNumber, channels) << "Invalid header generated. Channel mapping "
+                                                        "cannot be greater than channel count.";
+
+            ASSERT_EQ(mappedChannelNumber, kOpusChannelMap[channels - 1][channelNumber])
+                    << "Invalid header generated. Channel mapping is not as per specification.";
+        }
+    } else {
+        ASSERT_LE(parsedHeader.channels, 2);
+    }
+}
+
+TEST_P(OpusHeaderParseTest, ParseTest) {
+    tuple<string, int32_t, bool, bool, bool, bool> params = GetParam();
+    string inputFileName = gEnv->getRes() + get<0>(params);
+    mEleStream.open(inputFileName, ifstream::binary);
+    ASSERT_EQ(mEleStream.is_open(), true) << "Failed to open inputfile " << get<0>(params);
+    bool isHeaderValid = get<2>(params);
+    bool isCodecDelayValid = get<3>(params);
+    bool isSeekPreRollValid = get<4>(params);
+    bool isInputValid = get<5>(params);
+
+    struct stat buf;
+    stat(inputFileName.c_str(), &buf);
+    size_t fileSize = buf.st_size;
+    mInputBuffer = (uint8_t *)malloc(fileSize);
+    ASSERT_NE(mInputBuffer, nullptr) << "Insufficient memory. Malloc failed for size " << fileSize;
+
+    mEleStream.read(reinterpret_cast<char *>(mInputBuffer), fileSize);
+    ASSERT_EQ(mEleStream.gcount(), fileSize) << "mEleStream.gcount() != bytesCount";
+
+    OpusHeader header;
+    size_t opusHeadSize = 0;
+    size_t codecDelayBufSize = 0;
+    size_t seekPreRollBufSize = 0;
+    void *opusHeadBuf = nullptr;
+    void *codecDelayBuf = nullptr;
+    void *seekPreRollBuf = nullptr;
+    bool status = GetOpusHeaderBuffers(mInputBuffer, fileSize, &opusHeadBuf, &opusHeadSize,
+                                       &codecDelayBuf, &codecDelayBufSize, &seekPreRollBuf,
+                                       &seekPreRollBufSize);
+    if (!isHeaderValid) {
+        ASSERT_EQ(opusHeadBuf, nullptr);
+    } else {
+        ASSERT_NE(opusHeadBuf, nullptr);
+    }
+    if (!isCodecDelayValid) {
+        ASSERT_EQ(codecDelayBuf, nullptr);
+    } else {
+        ASSERT_NE(codecDelayBuf, nullptr);
+    }
+    if (!isSeekPreRollValid) {
+        ASSERT_EQ(seekPreRollBuf, nullptr);
+    } else {
+        ASSERT_NE(seekPreRollBuf, nullptr);
+    }
+    if (!status) {
+        ASSERT_FALSE(isInputValid) << "GetOpusHeaderBuffers failed";
+        return;
+    }
+
+    status = ParseOpusHeader((uint8_t *)opusHeadBuf, opusHeadSize, &header);
+
+    if (status) {
+        ASSERT_TRUE(isInputValid) << "Parse opus header didn't fail for invalid input";
+    } else {
+        ASSERT_FALSE(isInputValid);
+        return;
+    }
+
+    int32_t channels = get<1>(params);
+    ASSERT_EQ(header.channels, channels) << "Parser returned invalid channel count";
+    ASSERT_LE(header.channels, kMaxChannels);
+
+    ASSERT_LE(header.num_coupled, header.num_streams)
+            << "Invalid header generated. Number of coupled streams cannot be greater than number "
+               "of streams.";
+
+    ASSERT_EQ(header.num_coupled + header.num_streams, header.channels);
+
+    if (header.channel_mapping) {
+        uint8_t mappedChannelNumber;
+        for (int32_t channelNumber = 0; channelNumber < channels; channelNumber++) {
+            mappedChannelNumber = *(reinterpret_cast<uint8_t *>(opusHeadBuf) +
+                                    kOpusHeaderStreamMapOffset + channelNumber);
+            ASSERT_LT(mappedChannelNumber, channels)
+                    << "Invalid header. Channel mapping cannot be greater than channel count.";
+
+            ASSERT_EQ(mappedChannelNumber, kOpusChannelMap[channels - 1][channelNumber])
+                    << "Invalid header generated. Channel mapping "
+                       "is not as per specification.";
+        }
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        OpusHeaderTestAll, OpusHeaderWriteTest,
+        ::testing::Values(make_tuple(1, 312, "output_channels_1skipSamples_312.opus"),
+                          make_tuple(2, 312, "output_channels_2skipSamples_312.opus"),
+                          make_tuple(5, 312, "output_channels_5skipSamples_312.opus"),
+                          make_tuple(6, 312, "output_channels_6skipSamples_312.opus"),
+                          make_tuple(1, 0, "output_channels_1skipSamples_0.opus"),
+                          make_tuple(2, 0, "output_channels_2skipSamples_0.opus"),
+                          make_tuple(5, 0, "output_channels_5skipSamples_0.opus"),
+                          make_tuple(6, 0, "output_channels_6skipSamples_0.opus"),
+                          make_tuple(1, 624, "output_channels_1skipSamples_624.opus"),
+                          make_tuple(2, 624, "output_channels_2skipSamples_624.opus"),
+                          make_tuple(5, 624, "output_channels_5skipSamples_624.opus"),
+                          make_tuple(6, 624, "output_channels_6skipSamples_624.opus")));
+
+INSTANTIATE_TEST_SUITE_P(
+        OpusHeaderTestAll, OpusHeaderParseTest,
+        ::testing::Values(
+                make_tuple("2ch_valid_size83B.opus", 2, true, true, true, true),
+                make_tuple("3ch_valid_size88B.opus", 3, true, true, true, true),
+                make_tuple("5ch_valid.opus", 5, true, false, false, true),
+                make_tuple("6ch_valid.opus", 6, true, false, false, true),
+                make_tuple("1ch_valid.opus", 1, true, false, false, true),
+                make_tuple("2ch_valid.opus", 2, true, false, false, true),
+                make_tuple("3ch_invalid_size.opus", 3, true, true, true, false),
+                make_tuple("3ch_invalid_streams.opus", 3, true, true, true, false),
+                make_tuple("5ch_invalid_channelmapping.opus", 5, true, false, false, false),
+                make_tuple("5ch_invalid_coupledstreams.opus", 5, true, false, false, false),
+                make_tuple("6ch_invalid_channelmapping.opus", 6, true, false, false, false),
+                make_tuple("9ch_invalid_channels.opus", 9, true, true, true, false),
+                make_tuple("2ch_invalid_header.opus", 2, false, false, false, false),
+                make_tuple("2ch_invalid_headerlength_16.opus", 2, false, false, false, false),
+                make_tuple("2ch_invalid_headerlength_256.opus", 2, false, false, false, false),
+                make_tuple("2ch_invalid_size.opus", 2, false, false, false, false),
+                make_tuple("3ch_invalid_channelmapping_0.opus", 3, true, true, true, false),
+                make_tuple("3ch_invalid_coupledstreams.opus", 3, true, true, true, false),
+                make_tuple("3ch_invalid_headerlength.opus", 3, true, true, true, false),
+                make_tuple("3ch_invalid_headerSize1.opus", 3, false, false, false, false),
+                make_tuple("3ch_invalid_headerSize2.opus", 3, false, false, false, false),
+                make_tuple("3ch_invalid_headerSize3.opus", 3, false, false, false, false),
+                make_tuple("3ch_invalid_nodelay.opus", 3, false, false, false, false),
+                make_tuple("3ch_invalid_nopreroll.opus", 3, false, false, false, false)));
+
+int main(int argc, char **argv) {
+    gEnv = new OpusHeaderTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGD("Opus Header Test Result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/foundation/tests/OpusHeader/OpusHeaderTestEnvironment.h b/media/libstagefright/foundation/tests/OpusHeader/OpusHeaderTestEnvironment.h
new file mode 100644
index 0000000..d0163c3
--- /dev/null
+++ b/media/libstagefright/foundation/tests/OpusHeader/OpusHeaderTestEnvironment.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __OPUS_HEADER_TEST_ENVIRONMENT_H__
+#define __OPUS_HEADER_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class OpusHeaderTestEnvironment : public ::testing::Environment {
+  public:
+    OpusHeaderTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int OpusHeaderTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"path", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P': {
+                setRes(optarg);
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __OPUS_HEADER_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/foundation/tests/OpusHeader/README.md b/media/libstagefright/foundation/tests/OpusHeader/README.md
new file mode 100644
index 0000000..860c827
--- /dev/null
+++ b/media/libstagefright/foundation/tests/OpusHeader/README.md
@@ -0,0 +1,39 @@
+## Media Testing ##
+---
+#### Opus Header
+The OpusHeader Test Suite validates the OPUS header available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m OpusHeaderTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/OpusHeaderTest/OpusHeaderTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/OpusHeaderTest/OpusHeaderTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/OpusHeader/OpusHeader.zip). Download, unzip and push these files into device for testing.
+
+```
+adb push OpusHeader /data/local/tmp/
+```
+
+usage: OpusHeaderTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/OpusHeaderTest -P /data/local/tmp/OpusHeader/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest OpusHeaderTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/foundation/tests/TypeTraits_test.cpp b/media/libstagefright/foundation/tests/TypeTraits_test.cpp
index 1e2049d..d5383d1 100644
--- a/media/libstagefright/foundation/tests/TypeTraits_test.cpp
+++ b/media/libstagefright/foundation/tests/TypeTraits_test.cpp
@@ -30,7 +30,7 @@
     enum IA : int32_t { };
 };
 
-// =========== basic sanity tests for type-support templates
+// =========== basic tests for type-support templates
 TEST_F(TypeTraitsTest, StaticTests) {
 
     // ============ is_integral_or_enum
diff --git a/media/libstagefright/foundation/tests/colorutils/Android.bp b/media/libstagefright/foundation/tests/colorutils/Android.bp
new file mode 100644
index 0000000..d77f405
--- /dev/null
+++ b/media/libstagefright/foundation/tests/colorutils/Android.bp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+    name: "ColorUtilsTest",
+    gtest: true,
+
+    srcs: [
+        "ColorUtilsTest.cpp",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libutils",
+        "libmediandk",
+    ],
+
+    static_libs: [
+        "libstagefright_foundation",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/foundation/tests/colorutils/ColorUtilsTest.cpp b/media/libstagefright/foundation/tests/colorutils/ColorUtilsTest.cpp
new file mode 100644
index 0000000..0d802b4
--- /dev/null
+++ b/media/libstagefright/foundation/tests/colorutils/ColorUtilsTest.cpp
@@ -0,0 +1,773 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ColorUtilsTest"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include <stdio.h>
+
+#include <media/NdkMediaFormat.h>
+#include <media/NdkMediaFormatPriv.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+
+const size_t kHDRBufferSize = 25;
+const uint16_t kHDRInfoTestValue1 = 420;
+const uint16_t kHDRInfoTestValue2 = 42069;
+
+using namespace android;
+
+typedef ColorAspects CA;
+
+class ColorRangeTest : public ::testing::TestWithParam</* ColorRange */ CA::Range> {
+  public:
+    ColorRangeTest() { mRange = GetParam(); };
+
+    CA::Range mRange;
+};
+
+class ColorTransferTest : public ::testing::TestWithParam</* ColorTransfer */ CA::Transfer> {
+  public:
+    ColorTransferTest() { mTransfer = GetParam(); };
+
+    CA::Transfer mTransfer;
+};
+
+class ColorStandardTest : public ::testing::TestWithParam<std::pair<
+                                  /* Primaries */ CA::Primaries,
+                                  /* MatrixCoeffs */ CA::MatrixCoeffs>> {
+  public:
+    ColorStandardTest() {
+        mPrimaries = GetParam().first;
+        mMatrixCoeffs = GetParam().second;
+    };
+
+    CA::Primaries mPrimaries;
+    CA::MatrixCoeffs mMatrixCoeffs;
+};
+
+class IsoToPlatformAspectsTest : public ::testing::TestWithParam<std::tuple<
+                                         /* Primaries */ CA::Primaries,
+                                         /* Transfer */ CA::Transfer,
+                                         /* MatrixCoeffs */ CA::MatrixCoeffs,
+                                         /* Standard */ int32_t,
+                                         /* Transfer */ int32_t>> {
+  public:
+    IsoToPlatformAspectsTest() {
+        mPrimaries = std::get<0>(GetParam());
+        mTransfer = std::get<1>(GetParam());
+        mMatrixCoeffs = std::get<2>(GetParam());
+        mPlatformStandard = std::get<3>(GetParam());
+        mPlatformTransfer = std::get<4>(GetParam());
+    };
+
+    CA::Primaries mPrimaries;
+    CA::Transfer mTransfer;
+    CA::MatrixCoeffs mMatrixCoeffs;
+    int32_t mPlatformStandard;
+    int32_t mPlatformTransfer;
+};
+
+class ColorAspectsTest : public ::testing::TestWithParam<std::tuple<
+                                 /* Primaries */ CA::Primaries,
+                                 /* ColorTransfer */ CA::Transfer,
+                                 /* MatrixCoeffs */ CA::MatrixCoeffs,
+                                 /* ColorRange */ CA::Range,
+                                 /* ColorStandard */ CA::Standard>> {
+  public:
+    ColorAspectsTest() {
+        mPrimaries = std::get<0>(GetParam());
+        mTransfer = std::get<1>(GetParam());
+        mMatrixCoeffs = std::get<2>(GetParam());
+        mRange = std::get<3>(GetParam());
+        mStandard = std::get<4>(GetParam());
+    };
+
+    CA::Primaries mPrimaries;
+    CA::Transfer mTransfer;
+    CA::MatrixCoeffs mMatrixCoeffs;
+    CA::Range mRange;
+    CA::Standard mStandard;
+};
+
+class DefaultColorAspectsTest : public ::testing::TestWithParam<std::tuple<
+                                        /* Width */ int32_t,
+                                        /* Height */ int32_t,
+                                        /* Primaries */ CA::Primaries,
+                                        /* MatrixCoeffs */ CA::MatrixCoeffs>> {
+  public:
+    DefaultColorAspectsTest() {
+        mWidth = std::get<0>(GetParam());
+        mHeight = std::get<1>(GetParam());
+        mPrimaries = std::get<2>(GetParam());
+        mMatrixCoeffs = std::get<3>(GetParam());
+    };
+
+    int32_t mWidth;
+    int32_t mHeight;
+    CA::Primaries mPrimaries;
+    CA::MatrixCoeffs mMatrixCoeffs;
+};
+
+class DataSpaceTest : public ::testing::TestWithParam<std::tuple<
+                              /* ColorRange */ CA::Range,
+                              /* Primaries */ CA::Primaries,
+                              /* ColorTransfer */ CA::Transfer,
+                              /* MatrixCoeffs */ CA::MatrixCoeffs,
+                              /* v0_android_dataspace */ android_dataspace,
+                              /* android_dataspace */ android_dataspace>> {
+  public:
+    DataSpaceTest() {
+        mRange = std::get<0>(GetParam());
+        mPrimaries = std::get<1>(GetParam());
+        mTransfer = std::get<2>(GetParam());
+        mMatrixCoeffs = std::get<3>(GetParam());
+        mDataSpaceV0 = std::get<4>(GetParam());
+        mDataSpace = std::get<5>(GetParam());
+    };
+
+    CA::Range mRange;
+    CA::Primaries mPrimaries;
+    CA::Transfer mTransfer;
+    CA::MatrixCoeffs mMatrixCoeffs;
+    android_dataspace mDataSpaceV0;
+    android_dataspace mDataSpace;
+};
+
+TEST_P(ColorRangeTest, WrapColorRangeTest) {
+    int32_t range = ColorUtils::wrapColorAspectsIntoColorRange(mRange);
+    CA::Range unwrappedRange;
+    status_t status = ColorUtils::unwrapColorAspectsFromColorRange(range, &unwrappedRange);
+    ASSERT_EQ(status, OK) << "unwrapping ColorAspects from ColorRange failed";
+    EXPECT_EQ(unwrappedRange, mRange) << "Returned ColorRange doesn't match";
+    ALOGV("toString test: Range: %s", asString(mRange, "default"));
+}
+
+TEST_P(ColorTransferTest, WrapColorTransferTest) {
+    int32_t transfer = ColorUtils::wrapColorAspectsIntoColorTransfer(mTransfer);
+    CA::Transfer unwrappedTransfer;
+    status_t status = ColorUtils::unwrapColorAspectsFromColorTransfer(transfer, &unwrappedTransfer);
+    ASSERT_EQ(status, OK) << "unwrapping ColorAspects from ColorTransfer failed";
+    EXPECT_EQ(unwrappedTransfer, mTransfer) << "Returned ColorTransfer doesn't match";
+    ALOGV("toString test: Transfer: %s", asString(mTransfer, "default"));
+}
+
+TEST_P(ColorStandardTest, WrapColorStandardTest) {
+    int32_t standard = ColorUtils::wrapColorAspectsIntoColorStandard(mPrimaries, mMatrixCoeffs);
+    CA::Primaries unwrappedPrimaries;
+    CA::MatrixCoeffs unwrappedMatrixCoeffs;
+    status_t status = ColorUtils::unwrapColorAspectsFromColorStandard(standard, &unwrappedPrimaries,
+                                                                      &unwrappedMatrixCoeffs);
+    ASSERT_EQ(status, OK) << "unwrapping ColorAspects from ColorStandard failed";
+    EXPECT_EQ(unwrappedPrimaries, mPrimaries) << "Returned primaries doesn't match";
+    EXPECT_EQ(unwrappedMatrixCoeffs, mMatrixCoeffs) << "Returned  matrixCoeffs doesn't match";
+}
+
+TEST_P(ColorAspectsTest, PlatformAspectsTest) {
+    CA aspects;
+    aspects.mRange = mRange;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+
+    int32_t range = -1;
+    int32_t standard = -1;
+    int32_t transfer = -1;
+    status_t status = ColorUtils::convertCodecColorAspectsToPlatformAspects(aspects, &range,
+                                                                            &standard, &transfer);
+    ASSERT_EQ(status, OK) << "Conversion of ColorAspects to PlatformAspects failed";
+
+    CA returnedAspects;
+    status = ColorUtils::convertPlatformColorAspectsToCodecAspects(range, standard, transfer,
+                                                                   returnedAspects);
+    ASSERT_EQ(status, OK) << "Conversion of PlatformAspects to ColorAspects failed";
+    EXPECT_EQ(returnedAspects.mRange, aspects.mRange)
+            << "range mismatch for conversion between PlatformAspects";
+    EXPECT_EQ(returnedAspects.mPrimaries, aspects.mPrimaries)
+            << "primaries mismatch for conversion between PlatformAspects";
+    EXPECT_EQ(returnedAspects.mTransfer, aspects.mTransfer)
+            << "transfer mismatch for conversion between PlatformAspects";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, aspects.mMatrixCoeffs)
+            << "matrixCoeffs mismatch for conversion between PlatformAspects";
+}
+
+TEST_P(ColorAspectsTest, IsoAspectsTest) {
+    CA aspects;
+    aspects.mRange = mRange;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+
+    int32_t primaries = -1;
+    int32_t colorTransfer = -1;
+    int32_t matrixCoeffs = -1;
+    bool fullRange = false;
+    ColorUtils::convertCodecColorAspectsToIsoAspects(aspects, &primaries, &colorTransfer,
+                                                     &matrixCoeffs, &fullRange);
+
+    CA returnedAspects;
+    ColorUtils::convertIsoColorAspectsToCodecAspects(primaries, colorTransfer, matrixCoeffs,
+                                                     fullRange, returnedAspects);
+    EXPECT_EQ(returnedAspects.mRange, aspects.mRange)
+            << "range mismatch for conversion between IsoAspects";
+    EXPECT_EQ(returnedAspects.mPrimaries, aspects.mPrimaries)
+            << "primaries mismatch for conversion between IsoAspects";
+    EXPECT_EQ(returnedAspects.mTransfer, aspects.mTransfer)
+            << "transfer mismatch for conversion between IsoAspects";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, aspects.mMatrixCoeffs)
+            << "matrixCoeffs mismatch for conversion between IsoAspects";
+}
+
+TEST_P(IsoToPlatformAspectsTest, IsoAspectsToPlatformAspectsTest) {
+    CA aspects;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+
+    int32_t isoPrimaries = -1;
+    int32_t isoTransfer = -1;
+    int32_t isoMatrixCoeffs = -1;
+    bool fullrange = false;
+    ColorUtils::convertCodecColorAspectsToIsoAspects(aspects, &isoPrimaries, &isoTransfer,
+                                                     &isoMatrixCoeffs, &fullrange);
+
+    int32_t range = -1;
+    int32_t standard = -1;
+    int32_t transfer = -1;
+    ColorUtils::convertIsoColorAspectsToPlatformAspects(isoPrimaries, isoTransfer, isoMatrixCoeffs,
+                                                        fullrange, &range, &standard, &transfer);
+    if (fullrange) {
+        EXPECT_EQ(range, ColorUtils::kColorRangeFull)
+                << "range incorrect converting to PlatformAspects";
+    }
+    EXPECT_EQ(standard, mPlatformStandard) << "standard incorrect converting to PlatformAspects";
+    EXPECT_EQ(transfer, mPlatformTransfer) << "transfer incorrect converting to PlatformAspects";
+}
+
+TEST_P(ColorAspectsTest, PackColorAspectsTest) {
+    CA aspects;
+    aspects.mRange = mRange;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+    uint32_t packedColorAspects = ColorUtils::packToU32(aspects);
+
+    CA unpackedAspects = ColorUtils::unpackToColorAspects(packedColorAspects);
+    EXPECT_EQ(unpackedAspects.mRange, mRange) << "range mismatch after unpacking";
+    EXPECT_EQ(unpackedAspects.mPrimaries, mPrimaries) << "primaries mismatch after unpacking";
+    EXPECT_EQ(unpackedAspects.mTransfer, mTransfer) << "transfer mismatch after unpacking";
+    EXPECT_EQ(unpackedAspects.mMatrixCoeffs, mMatrixCoeffs)
+            << "matrixCoeffs mismatch after unpacking";
+    ALOGV("toString test: Standard: %s", asString(mStandard, "default"));
+}
+
+TEST_P(DefaultColorAspectsTest, DefaultColorAspectsTest) {
+    CA aspects;
+    aspects.mRange = CA::RangeUnspecified;
+    aspects.mPrimaries = CA::PrimariesUnspecified;
+    aspects.mMatrixCoeffs = CA::MatrixUnspecified;
+    aspects.mTransfer = CA::TransferUnspecified;
+
+    ColorUtils::setDefaultCodecColorAspectsIfNeeded(aspects, mWidth, mHeight);
+    EXPECT_EQ(aspects.mRange, CA::RangeLimited) << "range not set to default";
+    EXPECT_EQ(aspects.mPrimaries, mPrimaries) << "primaries not set to default";
+    EXPECT_EQ(aspects.mMatrixCoeffs, mMatrixCoeffs) << "matrixCoeffs not set to default";
+    EXPECT_EQ(aspects.mTransfer, CA::TransferSMPTE170M) << "transfer not set to default";
+}
+
+TEST_P(DataSpaceTest, DataSpaceTest) {
+    CA aspects;
+    aspects.mRange = mRange;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+
+    android_dataspace dataSpace = ColorUtils::getDataSpaceForColorAspects(aspects, false);
+    EXPECT_EQ(dataSpace, mDataSpace) << "Returned incorrect dataspace";
+
+    bool status = ColorUtils::convertDataSpaceToV0(dataSpace);
+    ASSERT_TRUE(status) << "Returned v0 dataspace is not aspect-only";
+    EXPECT_EQ(dataSpace, mDataSpaceV0) << "Returned incorrect v0 dataspace";
+}
+
+TEST(ColorUtilsUnitTest, AspectsChangedTest) {
+    CA origAspects;
+    origAspects.mRange = CA::Range::RangeFull;
+    origAspects.mPrimaries = CA::Primaries::PrimariesBT709_5;
+    origAspects.mTransfer = CA::Transfer::TransferLinear;
+    origAspects.mMatrixCoeffs = CA::MatrixCoeffs::MatrixBT709_5;
+
+    CA aspects;
+    aspects.mRange = CA::Range::RangeFull;
+    aspects.mPrimaries = CA::Primaries::PrimariesBT709_5;
+    aspects.mTransfer = CA::Transfer::TransferLinear;
+    aspects.mMatrixCoeffs = CA::MatrixCoeffs::MatrixBT709_5;
+
+    bool status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects);
+    ASSERT_FALSE(status) << "ColorAspects comparison check failed";
+
+    aspects.mRange = CA::Range::RangeLimited;
+    status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects);
+    ASSERT_TRUE(status) << "ColorAspects comparison check failed";
+    EXPECT_EQ(aspects.mRange, CA::Range::RangeUnspecified) << "range should have been unspecified";
+    aspects.mRange = CA::Range::RangeFull;
+
+    aspects.mTransfer = CA::Transfer::TransferSRGB;
+    status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects);
+    ASSERT_TRUE(status) << "ColorAspects comparison check failed";
+    EXPECT_EQ(aspects.mTransfer, CA::Transfer::TransferUnspecified)
+            << "transfer should have been unspecified";
+    aspects.mTransfer = CA::Transfer::TransferLinear;
+
+    aspects.mPrimaries = CA::Primaries::PrimariesBT2020;
+    status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects, true);
+    ASSERT_TRUE(status) << "ColorAspects comparison check failed";
+    EXPECT_EQ(aspects.mPrimaries, CA::Primaries::PrimariesUnspecified)
+            << "primaries should have been unspecified";
+    EXPECT_EQ(aspects.mMatrixCoeffs, CA::MatrixCoeffs::MatrixUnspecified)
+            << "matrixCoeffs should have been unspecified";
+
+    aspects.mMatrixCoeffs = CA::MatrixCoeffs::MatrixSMPTE240M;
+    status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects, true);
+    ASSERT_TRUE(status) << "ColorAspects comparison check failed";
+    EXPECT_EQ(aspects.mPrimaries, CA::Primaries::PrimariesUnspecified)
+            << "primaries should have been unspecified";
+    EXPECT_EQ(aspects.mMatrixCoeffs, CA::MatrixCoeffs::MatrixUnspecified)
+            << "matrixCoeffs should have been unspecified";
+}
+
+TEST(ColorUtilsUnitTest, ColorConfigFromFormatTest) {
+    int range = -1;
+    int standard = -1;
+    int transfer = -1;
+    sp<AMessage> format = new AMessage();
+    ASSERT_NE(format, nullptr) << "failed to create AMessage";
+    ColorUtils::getColorConfigFromFormat(format, &range, &standard, &transfer);
+    EXPECT_EQ(range | standard | transfer, 0) << "color config didn't default to 0";
+
+    format->setInt32(KEY_COLOR_RANGE, CA::Range::RangeFull);
+    format->setInt32(KEY_COLOR_STANDARD, CA::Standard::StandardBT709);
+    format->setInt32(KEY_COLOR_TRANSFER, CA::Transfer::TransferLinear);
+    ColorUtils::getColorConfigFromFormat(format, &range, &standard, &transfer);
+    EXPECT_EQ(range, CA::Range::RangeFull) << "range mismatch";
+    EXPECT_EQ(standard, CA::Standard::StandardBT709) << "standard mismatch";
+    EXPECT_EQ(transfer, CA::Transfer::TransferLinear) << "transfer mismatch";
+
+    range = standard = transfer = -1;
+    sp<AMessage> copyFormat = new AMessage();
+    ASSERT_NE(copyFormat, nullptr) << "failed to create AMessage";
+    ColorUtils::copyColorConfig(format, copyFormat);
+    bool status = copyFormat->findInt32(KEY_COLOR_RANGE, &range);
+    ASSERT_TRUE(status) << "ColorConfig range entry missing";
+    status = copyFormat->findInt32(KEY_COLOR_STANDARD, &standard);
+    ASSERT_TRUE(status) << "ColorConfig standard entry missing";
+    status = copyFormat->findInt32(KEY_COLOR_TRANSFER, &transfer);
+    ASSERT_TRUE(status) << "ColorConfig transfer entry missing";
+    EXPECT_EQ(range, CA::Range::RangeFull) << "range mismatch";
+    EXPECT_EQ(standard, CA::Standard::StandardBT709) << "standard mismatch";
+    EXPECT_EQ(transfer, CA::Transfer::TransferLinear) << "transfer mismatchd";
+
+    range = standard = transfer = -1;
+    ColorUtils::getColorConfigFromFormat(copyFormat, &range, &standard, &transfer);
+    EXPECT_EQ(range, CA::Range::RangeFull) << "range mismatch";
+    EXPECT_EQ(standard, CA::Standard::StandardBT709) << "standard mismatch";
+    EXPECT_EQ(transfer, CA::Transfer::TransferLinear) << "transfer mismatch";
+}
+
+TEST_P(ColorAspectsTest, FormatTest) {
+    CA aspects;
+    sp<AMessage> format = new AMessage();
+    ASSERT_NE(format, nullptr) << "failed to create AMessage";
+    ColorUtils::setColorAspectsIntoFormat(aspects, format, true);
+
+    CA returnedAspects;
+    ColorUtils::getColorAspectsFromFormat(format, returnedAspects);
+    EXPECT_EQ(returnedAspects.mRange, aspects.mRange) << "range mismatch";
+    EXPECT_EQ(returnedAspects.mPrimaries, aspects.mPrimaries) << "primaries mismatch";
+    EXPECT_EQ(returnedAspects.mTransfer, aspects.mTransfer) << "transfer mismatch";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, aspects.mMatrixCoeffs) << "matrixCoeffs mismatch";
+
+    aspects.mRange = mRange;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+    ColorUtils::setColorAspectsIntoFormat(aspects, format);
+
+    memset(&returnedAspects, 0, sizeof(returnedAspects));
+    ColorUtils::getColorAspectsFromFormat(format, returnedAspects);
+    EXPECT_EQ(returnedAspects.mRange, aspects.mRange) << "range mismatch";
+    EXPECT_EQ(returnedAspects.mPrimaries, aspects.mPrimaries) << "primaries mismatch";
+    EXPECT_EQ(returnedAspects.mTransfer, aspects.mTransfer) << "transfer mismatch";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, aspects.mMatrixCoeffs) << "matrixCoeffs mismatch";
+}
+
+TEST(ColorUtilsUnitTest, HDRStaticInfoTest) {
+    sp<AMessage> format = new AMessage();
+    ASSERT_NE(format, nullptr) << "failed to create AMessage";
+
+    HDRStaticInfo returnedInfoHDR;
+    bool status = ColorUtils::getHDRStaticInfoFromFormat(format, &returnedInfoHDR);
+    ASSERT_FALSE(status) << "HDR info should be absent in empty format";
+
+    HDRStaticInfo infoHDR;
+    infoHDR.sType1.mMaxDisplayLuminance = kHDRInfoTestValue2;
+    infoHDR.sType1.mMinDisplayLuminance = kHDRInfoTestValue1;
+    infoHDR.sType1.mMaxContentLightLevel = kHDRInfoTestValue2;
+    infoHDR.sType1.mMaxFrameAverageLightLevel = kHDRInfoTestValue1;
+    infoHDR.sType1.mR.x = kHDRInfoTestValue1;
+    infoHDR.sType1.mR.y = kHDRInfoTestValue2;
+    infoHDR.sType1.mG.x = kHDRInfoTestValue1;
+    infoHDR.sType1.mG.y = kHDRInfoTestValue2;
+    infoHDR.sType1.mB.x = kHDRInfoTestValue1;
+    infoHDR.sType1.mB.y = kHDRInfoTestValue2;
+    infoHDR.sType1.mW.x = kHDRInfoTestValue1;
+    infoHDR.sType1.mW.y = kHDRInfoTestValue2;
+    ColorUtils::setHDRStaticInfoIntoFormat(infoHDR, format);
+
+    status = ColorUtils::getHDRStaticInfoFromFormat(format, &returnedInfoHDR);
+    ASSERT_TRUE(status) << "Failed to get HDR info from format";
+    ASSERT_EQ(0, memcmp(&returnedInfoHDR, &infoHDR, sizeof(infoHDR))) << " HDRStaticInfo mismatch";
+
+    AMediaFormat *mediaFormat = AMediaFormat_new();
+    ASSERT_NE(mediaFormat, nullptr) << "Unable to create AMediaFormat";
+    ColorUtils::setHDRStaticInfoIntoAMediaFormat(infoHDR, mediaFormat);
+    memset(&returnedInfoHDR, 0, sizeof(returnedInfoHDR));
+    status = ColorUtils::getHDRStaticInfoFromFormat(mediaFormat->mFormat, &returnedInfoHDR);
+    AMediaFormat_delete(mediaFormat);
+    ASSERT_TRUE(status) << "Failed to get HDR info from media format";
+    ASSERT_EQ(0, memcmp(&returnedInfoHDR, &infoHDR, sizeof(infoHDR))) << " HDRStaticInfo mismatch";
+}
+
+TEST(ColorUtilsUnitTest, SanityTest) {
+    CA::Primaries unmappedPrimaries = (CA::Primaries)(CA::Primaries::PrimariesOther + 1);
+    CA::MatrixCoeffs unmappedMatrixCoeffs = (CA::MatrixCoeffs)(CA::MatrixOther + 1);
+    int32_t colorStandard =
+            ColorUtils::wrapColorAspectsIntoColorStandard(unmappedPrimaries, CA::MatrixUnspecified);
+    EXPECT_EQ(colorStandard, ColorUtils::kColorStandardUnspecified)
+            << "Standard unspecified expected";
+    colorStandard =
+            ColorUtils::wrapColorAspectsIntoColorStandard(CA::PrimariesOther, unmappedMatrixCoeffs);
+    EXPECT_EQ(colorStandard, ColorUtils::kColorStandardUnspecified)
+            << "Standard unspecified expected";
+    colorStandard = ColorUtils::wrapColorAspectsIntoColorStandard(CA::PrimariesBT601_6_525,
+                                                                  CA::MatrixBT2020);
+    EXPECT_GE(colorStandard, ColorUtils::kColorStandardExtendedStart)
+            << "Standard greater than extended start expected";
+    unmappedPrimaries = (CA::Primaries)(CA::Primaries::PrimariesBT2020 + 1);
+    unmappedMatrixCoeffs = (CA::MatrixCoeffs)(CA::MatrixBT2020Constant + 1);
+    colorStandard =
+            ColorUtils::wrapColorAspectsIntoColorStandard(unmappedPrimaries, unmappedMatrixCoeffs);
+    EXPECT_GE(colorStandard, ColorUtils::kColorStandardExtendedStart)
+            << "Standard greater than extended start expected";
+
+    CA aspects;
+    int32_t colorRange = -1;
+    colorStandard = -1;
+    int32_t colorTransfer = -1;
+    aspects.mPrimaries = (CA::Primaries)(CA::Primaries::PrimariesOther + 1);
+    status_t status = ColorUtils::convertCodecColorAspectsToPlatformAspects(
+            aspects, &colorRange, &colorStandard, &colorTransfer);
+    EXPECT_NE(status, OK) << "invalid colorAspects value accepted";
+
+    int32_t colorPrimaries = -1;
+    colorTransfer = -1;
+    int32_t colorMatrixCoeffs = -1;
+    bool fullRange = false;
+    aspects.mPrimaries = CA::PrimariesOther;
+    aspects.mTransfer = CA::TransferOther;
+    aspects.mMatrixCoeffs = CA::MatrixOther;
+    ColorUtils::convertCodecColorAspectsToIsoAspects(aspects, &colorPrimaries, &colorTransfer,
+                                                     &colorMatrixCoeffs, &fullRange);
+    CA returnedAspects;
+    ColorUtils::convertIsoColorAspectsToCodecAspects(colorPrimaries, colorTransfer,
+                                                     colorMatrixCoeffs, fullRange, returnedAspects);
+    EXPECT_EQ(returnedAspects.mPrimaries, CA::PrimariesUnspecified)
+            << "expected unspecified Primaries";
+    EXPECT_EQ(returnedAspects.mTransfer, CA::TransferUnspecified)
+            << "expected unspecified Transfer";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, CA::MatrixUnspecified)
+            << "expected unspecified MatrixCoeffs";
+
+    // invalid values, other value equals 0xFF
+    colorPrimaries = CA::PrimariesOther;
+    colorTransfer = CA::TransferOther;
+    colorMatrixCoeffs = CA::MatrixOther;
+    fullRange = false;
+    memset(&returnedAspects, 0, sizeof(returnedAspects));
+    ColorUtils::convertIsoColorAspectsToCodecAspects(colorPrimaries, colorTransfer,
+                                                     colorMatrixCoeffs, fullRange, returnedAspects);
+    EXPECT_EQ(returnedAspects.mPrimaries, CA::PrimariesUnspecified)
+            << "expected unspecified Primaries";
+    EXPECT_EQ(returnedAspects.mTransfer, CA::TransferUnspecified)
+            << "expected unspecified Transfer";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, CA::MatrixUnspecified)
+            << "expected unspecified MatrixCoeffs";
+
+    CA::Primaries primaries = CA::PrimariesUnspecified;
+    CA::MatrixCoeffs matrixCoeffs = CA::MatrixUnspecified;
+    status = ColorUtils::unwrapColorAspectsFromColorStandard(ColorUtils::kColorStandardVendorStart,
+                                                             &primaries, &matrixCoeffs);
+    EXPECT_EQ(status, OK) << "unwrapping aspects from color standard failed";
+
+    primaries = CA::PrimariesUnspecified;
+    matrixCoeffs = CA::MatrixUnspecified;
+    status = ColorUtils::unwrapColorAspectsFromColorStandard(
+            ColorUtils::kColorStandardVendorStart * 4, &primaries, &matrixCoeffs);
+    EXPECT_NE(status, OK) << "unwrapping aspects from color standard failed";
+
+    colorRange = ColorUtils::wrapColorAspectsIntoColorRange((CA::Range)(CA::RangeOther + 1));
+    EXPECT_EQ(colorRange, ColorUtils::kColorRangeUnspecified) << "expected unspecified color range";
+
+    CA::Range range;
+    status = ColorUtils::unwrapColorAspectsFromColorRange(
+            ColorUtils::kColorRangeVendorStart + CA::RangeOther + 1, &range);
+    EXPECT_NE(status, OK) << "invalid range value accepted";
+    EXPECT_EQ(range, CA::RangeOther) << "returned unexpected range value";
+
+    colorTransfer =
+            ColorUtils::wrapColorAspectsIntoColorTransfer((CA::Transfer)(CA::TransferOther + 1));
+    EXPECT_EQ(colorTransfer, ColorUtils::kColorTransferUnspecified)
+            << "expected unspecified color transfer";
+
+    CA::Transfer transfer;
+    status = ColorUtils::unwrapColorAspectsFromColorTransfer(
+            ColorUtils::kColorTransferVendorStart + CA::TransferOther + 1, &transfer);
+    EXPECT_NE(status, OK) << "invalid transfer value accepted";
+    EXPECT_EQ(transfer, CA::TransferOther) << "expected other color transfer";
+}
+
+TEST(ColorUtilsUnitTest, HDRInfoSanityTest) {
+    HDRStaticInfo hdrInfo;
+    sp<AMessage> format = new AMessage();
+    ASSERT_NE(format, nullptr) << "failed to create AMessage";
+
+    bool boolStatus = ColorUtils::getHDRStaticInfoFromFormat(format, &hdrInfo);
+    EXPECT_FALSE(boolStatus) << "HDRStaticInfo should not be present";
+
+    sp<ABuffer> invalidSizeHDRInfoBuffer = new ABuffer(kHDRBufferSize - 1);
+    ASSERT_NE(invalidSizeHDRInfoBuffer, nullptr) << "failed to create ABuffer";
+    format->setBuffer(KEY_HDR_STATIC_INFO, invalidSizeHDRInfoBuffer);
+    memset(&hdrInfo, 0, sizeof(hdrInfo));
+    boolStatus = ColorUtils::getHDRStaticInfoFromFormat(format, &hdrInfo);
+    EXPECT_FALSE(boolStatus) << "incorrect HDRStaticInfo buffer accepted";
+
+    sp<ABuffer> invalidHDRInfoBuffer = new ABuffer(kHDRBufferSize);
+    ASSERT_NE(invalidHDRInfoBuffer, nullptr) << "failed to create ABuffer";
+    uint8_t *data = invalidHDRInfoBuffer->data();
+    *data = HDRStaticInfo::kType1 + 1;
+    format->setBuffer(KEY_HDR_STATIC_INFO, invalidHDRInfoBuffer);
+    memset(&hdrInfo, 0, sizeof(hdrInfo));
+    boolStatus = ColorUtils::getHDRStaticInfoFromFormat(format, &hdrInfo);
+    EXPECT_FALSE(boolStatus) << "incorrect HDRStaticInfo buffer accepted";
+
+    CA aspects;
+    format->setInt32(KEY_COLOR_RANGE, ColorUtils::kColorRangeVendorStart + CA::RangeOther + 1);
+    format->setInt32(KEY_COLOR_STANDARD, CA::Standard::StandardBT709);
+    format->setInt32(KEY_COLOR_TRANSFER, CA::Transfer::TransferLinear);
+    ColorUtils::getColorAspectsFromFormat(format, aspects);
+    EXPECT_EQ(aspects.mRange, CA::RangeOther) << "unexpected range";
+}
+
+TEST(ColorUtilsUnitTest, DataSpaceSanityTest) {
+    CA aspects;
+    aspects.mRange = CA::RangeUnspecified;
+    aspects.mPrimaries = CA::PrimariesUnspecified;
+    aspects.mMatrixCoeffs = CA::MatrixUnspecified;
+    aspects.mTransfer = CA::TransferUnspecified;
+    android_dataspace dataSpace = ColorUtils::getDataSpaceForColorAspects(aspects, true);
+    EXPECT_EQ(dataSpace, 0) << "expected invalid dataspace";
+    aspects.mPrimaries = CA::PrimariesUnspecified;
+    aspects.mMatrixCoeffs = CA::MatrixBT2020Constant;
+    dataSpace = ColorUtils::getDataSpaceForColorAspects(aspects, true);
+    EXPECT_NE(dataSpace, 0) << "unexpected value";
+}
+
+INSTANTIATE_TEST_SUITE_P(ColorUtilsUnitTest, ColorRangeTest,
+                         ::testing::Values(
+                                 // ColorRange
+                                 CA::Range::RangeLimited, CA::Range::RangeFull,
+                                 CA::Range::RangeUnspecified, CA::Range::RangeOther));
+
+INSTANTIATE_TEST_SUITE_P(ColorUtilsUnitTest, ColorTransferTest,
+                         ::testing::Values(
+                                 // ColorTransfer
+                                 CA::Transfer::TransferUnspecified, CA::Transfer::TransferLinear,
+                                 CA::Transfer::TransferSRGB, CA::Transfer::TransferSMPTE170M,
+                                 CA::Transfer::TransferGamma22, CA::Transfer::TransferGamma28,
+                                 CA::Transfer::TransferST2084, CA::Transfer::TransferHLG,
+                                 CA::Transfer::TransferSMPTE240M, CA::Transfer::TransferXvYCC,
+                                 CA::Transfer::TransferBT1361, CA::Transfer::TransferST428,
+                                 CA::Transfer::TransferOther));
+
+INSTANTIATE_TEST_SUITE_P(
+        ColorUtilsUnitTest, ColorStandardTest,
+        ::testing::Values(
+                // Primaries, MatrixCoeffs
+                std::make_pair(CA::Primaries::PrimariesUnspecified,
+                               CA::MatrixCoeffs::MatrixUnspecified),
+                std::make_pair(CA::Primaries::PrimariesBT709_5,
+                               CA::MatrixCoeffs::MatrixBT709_5),
+                std::make_pair(CA::Primaries::PrimariesBT601_6_625,
+                               CA::MatrixCoeffs::MatrixBT601_6),
+                std::make_pair(CA::Primaries::PrimariesBT601_6_625,
+                               CA::MatrixCoeffs::MatrixBT709_5),
+                std::make_pair(CA::Primaries::PrimariesBT601_6_525,
+                               CA::MatrixCoeffs::MatrixBT601_6),
+                std::make_pair(CA::Primaries::PrimariesBT601_6_525,
+                               CA::MatrixCoeffs::MatrixSMPTE240M),
+                std::make_pair(CA::Primaries::PrimariesBT2020,
+                               CA::MatrixCoeffs::MatrixBT2020),
+                std::make_pair(CA::Primaries::PrimariesBT2020,
+                               CA::MatrixCoeffs::MatrixBT2020Constant),
+                std::make_pair(CA::Primaries::PrimariesBT470_6M,
+                               CA::MatrixCoeffs::MatrixBT470_6M),
+                std::make_pair(CA::Primaries::PrimariesGenericFilm,
+                               CA::MatrixCoeffs::MatrixBT2020)));
+
+INSTANTIATE_TEST_SUITE_P(
+        ColorUtilsUnitTest, ColorAspectsTest,
+        ::testing::Values(
+                // Primaries, ColorTransfer, MatrixCoeffs, ColorRange, ColorStandard
+                std::make_tuple(CA::Primaries::PrimariesUnspecified,
+                                CA::Transfer::TransferUnspecified,
+                                CA::MatrixCoeffs::MatrixUnspecified, CA::Range::RangeFull,
+                                CA::Standard::StandardUnspecified),
+                std::make_tuple(CA::Primaries::PrimariesBT709_5, CA::Transfer::TransferLinear,
+                                CA::MatrixCoeffs::MatrixBT709_5, CA::Range::RangeFull,
+                                CA::Standard::StandardBT709),
+                std::make_tuple(CA::Primaries::PrimariesBT601_6_625, CA::Transfer::TransferSRGB,
+                                CA::MatrixCoeffs::MatrixBT601_6, CA::Range::RangeFull,
+                                CA::Standard::StandardUnspecified),
+                std::make_tuple(CA::Primaries::PrimariesBT601_6_625,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT709_5,
+                                CA::Range::RangeFull, CA::Standard::StandardUnspecified),
+                std::make_tuple(CA::Primaries::PrimariesBT601_6_525, CA::Transfer::TransferGamma22,
+                                CA::MatrixCoeffs::MatrixBT601_6, CA::Range::RangeFull,
+                                CA::Standard::StandardUnspecified),
+                std::make_tuple(CA::Primaries::PrimariesBT601_6_525, CA::Transfer::TransferGamma28,
+                                CA::MatrixCoeffs::MatrixSMPTE240M, CA::Range::RangeFull,
+                                CA::Standard::StandardBT470M),
+                std::make_tuple(CA::Primaries::PrimariesBT2020, CA::Transfer::TransferST2084,
+                                CA::MatrixCoeffs::MatrixBT2020, CA::Range::RangeFull,
+                                CA::Standard::StandardBT601_525),
+                std::make_tuple(CA::Primaries::PrimariesBT2020, CA::Transfer::TransferHLG,
+                                CA::MatrixCoeffs::MatrixBT2020Constant, CA::Range::RangeFull,
+                                CA::Standard::StandardBT601_525),
+                std::make_tuple(CA::Primaries::PrimariesBT470_6M, CA::Transfer::TransferLinear,
+                                CA::MatrixCoeffs::MatrixBT470_6M, CA::Range::RangeFull,
+                                CA::Standard::StandardUnspecified),
+                std::make_tuple(CA::Primaries::PrimariesGenericFilm, CA::Transfer::TransferLinear,
+                                CA::MatrixCoeffs::MatrixBT2020, CA::Range::RangeFull,
+                                CA::Standard::StandardBT601_625)));
+
+INSTANTIATE_TEST_SUITE_P(
+        ColorUtilsUnitTest, IsoToPlatformAspectsTest,
+        ::testing::Values(
+                // Primaries, Transfer, MatrixCoeffs, Standard, Transfer
+                std::make_tuple(CA::PrimariesUnspecified, CA::TransferUnspecified,
+                                CA::MatrixUnspecified, ColorUtils::kColorStandardUnspecified,
+                                ColorUtils::kColorTransferUnspecified),
+                std::make_tuple(CA::PrimariesBT709_5, CA::TransferLinear, CA::MatrixBT709_5,
+                                ColorUtils::kColorStandardBT709, ColorUtils::kColorTransferLinear),
+                std::make_tuple(CA::PrimariesBT601_6_625, CA::TransferSRGB, CA::MatrixBT601_6,
+                                ColorUtils::kColorStandardBT601_625,
+                                ColorUtils::kColorTransferSRGB),
+                std::make_tuple(CA::PrimariesBT601_6_625, CA::TransferSMPTE170M, CA::MatrixBT709_5,
+                                ColorUtils::kColorStandardBT601_625_Unadjusted,
+                                ColorUtils::kColorTransferSMPTE_170M),
+                std::make_tuple(CA::PrimariesBT601_6_525, CA::TransferGamma22, CA::MatrixBT601_6,
+                                ColorUtils::kColorStandardBT601_525,
+                                ColorUtils::kColorTransferGamma22),
+                std::make_tuple(CA::PrimariesBT601_6_525, CA::TransferGamma28, CA::MatrixSMPTE240M,
+                                ColorUtils::kColorStandardBT601_525_Unadjusted,
+                                ColorUtils::kColorTransferGamma28),
+                std::make_tuple(CA::PrimariesBT2020, CA::TransferST2084, CA::MatrixBT2020,
+                                ColorUtils::kColorStandardBT2020, ColorUtils::kColorTransferST2084),
+                std::make_tuple(CA::PrimariesBT2020, CA::TransferHLG, CA::MatrixBT2020Constant,
+                                ColorUtils::kColorStandardBT2020Constant,
+                                ColorUtils::kColorTransferHLG),
+                std::make_tuple(CA::PrimariesBT470_6M, CA::TransferUnspecified, CA::MatrixBT470_6M,
+                                ColorUtils::kColorStandardBT470M,
+                                ColorUtils::kColorTransferUnspecified),
+                std::make_tuple(CA::PrimariesGenericFilm, CA::TransferLinear, CA::MatrixBT2020,
+                                ColorUtils::kColorStandardFilm, ColorUtils::kColorTransferLinear)));
+
+INSTANTIATE_TEST_SUITE_P(
+        ColorUtilsUnitTest, DefaultColorAspectsTest,
+        ::testing::Values(
+                // Width, Height, Primaries, MatrixCoeffs
+                std::make_tuple(3840, 3840, CA::PrimariesBT2020, CA::MatrixBT2020),
+                std::make_tuple(720, 576, CA::PrimariesBT601_6_625, CA::MatrixBT601_6),
+                std::make_tuple(480, 360, CA::PrimariesBT601_6_525, CA::MatrixBT601_6),
+                std::make_tuple(480, 1920, CA::PrimariesBT709_5, CA::MatrixBT709_5)));
+
+INSTANTIATE_TEST_SUITE_P(
+        ColorUtilsUnitTest, DataSpaceTest,
+        ::testing::Values(
+                // ColorRange, Primaries, ColorTransfer, MatrixCoeffs, v0_android_dataspace,
+                // android_dataspace
+                std::make_tuple(CA::Range::RangeFull, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSRGB, CA::MatrixCoeffs::MatrixBT709_5,
+                                HAL_DATASPACE_V0_SRGB, HAL_DATASPACE_SRGB),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT709_5,
+                                HAL_DATASPACE_V0_BT709, HAL_DATASPACE_BT709),
+                std::make_tuple(CA::Range::RangeFull, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferLinear, CA::MatrixCoeffs::MatrixBT709_5,
+                                HAL_DATASPACE_V0_SRGB_LINEAR, HAL_DATASPACE_SRGB_LINEAR),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_525,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT601_6,
+                                HAL_DATASPACE_V0_BT601_525, HAL_DATASPACE_BT601_525),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_625,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT601_6,
+                                HAL_DATASPACE_V0_BT601_625, HAL_DATASPACE_BT601_625),
+                std::make_tuple(CA::Range::RangeFull, CA::Primaries::PrimariesBT601_6_625,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT601_6,
+                                HAL_DATASPACE_V0_JFIF, HAL_DATASPACE_JFIF),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT470_6M,
+                                HAL_DATASPACE_V0_BT601_625, HAL_DATASPACE_BT601_625),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT601_6,
+                                HAL_DATASPACE_V0_BT601_625, HAL_DATASPACE_BT601_625),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixSMPTE240M,
+                                HAL_DATASPACE_V0_BT709, HAL_DATASPACE_BT709),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT2020,
+                                HAL_DATASPACE_V0_BT709, HAL_DATASPACE_BT709),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M,
+                                CA::MatrixCoeffs::MatrixBT2020Constant, HAL_DATASPACE_V0_BT601_525,
+                                HAL_DATASPACE_BT601_525),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_625,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT470_6M,
+                                HAL_DATASPACE_V0_BT601_625, HAL_DATASPACE_BT601_625),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_625,
+                                CA::Transfer::TransferSMPTE170M,
+                                CA::MatrixCoeffs::MatrixBT2020Constant, HAL_DATASPACE_V0_BT601_525,
+                                HAL_DATASPACE_BT601_525),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_525,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT470_6M,
+                                HAL_DATASPACE_V0_BT601_525, HAL_DATASPACE_BT601_525),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_525,
+                                CA::Transfer::TransferSMPTE170M,
+                                CA::MatrixCoeffs::MatrixBT2020Constant, HAL_DATASPACE_V0_BT601_525,
+                                HAL_DATASPACE_BT601_525)));
diff --git a/media/libstagefright/id3/Android.bp b/media/libstagefright/id3/Android.bp
index db37fe9..e34504d 100644
--- a/media/libstagefright/id3/Android.bp
+++ b/media/libstagefright/id3/Android.bp
@@ -5,7 +5,9 @@
     srcs: ["ID3.cpp"],
 
     header_libs: [
-        "libmedia_headers",
+        "libmedia_datasource_headers",
+        "libstagefright_foundation_headers",
+        "libstagefright_headers",
         "media_ndk_headers",
     ],
 
@@ -19,6 +21,12 @@
         ],
         cfi: true,
     },
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 //###############################################################################
diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp
index e97f6eb..5509512 100644
--- a/media/libstagefright/id3/ID3.cpp
+++ b/media/libstagefright/id3/ID3.cpp
@@ -813,10 +813,6 @@
                 baseSize = U32_AT(&mParent.mData[mOffset + 4]);
             }
 
-            if (baseSize == 0) {
-                return;
-            }
-
             // Prevent integer overflow when adding
             if (SIZE_MAX - 10 <= baseSize) {
                 return;
diff --git a/media/libstagefright/id3/TEST_MAPPING b/media/libstagefright/id3/TEST_MAPPING
new file mode 100644
index 0000000..d070d25
--- /dev/null
+++ b/media/libstagefright/id3/TEST_MAPPING
@@ -0,0 +1,24 @@
+// frameworks/av/media/libstagefright/id3
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "ID3Test" }
+  ],
+
+  "presubmit": [
+    // this doesn't seem to run any tests.
+    // but: cts-tradefed run -m CtsMediaTestCases -t android.media.cts.MediaMetadataRetrieverTest
+    // does run he 32 and 64 bit tests, but not the instant tests
+    // but all I know is that with 'atest', it's not running
+    {
+      "name": "CtsMediaTestCases",
+      "options": [
+          {
+            "include-filter": "android.media.cts.MediaMetadataRetrieverTest"
+          }
+      ]
+    }
+  ]
+}
diff --git a/media/libstagefright/id3/test/Android.bp b/media/libstagefright/id3/test/Android.bp
index 9d26eec..acf38e2 100644
--- a/media/libstagefright/id3/test/Android.bp
+++ b/media/libstagefright/id3/test/Android.bp
@@ -16,6 +16,7 @@
 
 cc_test {
     name: "ID3Test",
+    test_suites: ["device-tests"],
     gtest: true,
 
     srcs: ["ID3Test.cpp"],
diff --git a/media/libstagefright/id3/test/AndroidTest.xml b/media/libstagefright/id3/test/AndroidTest.xml
index 6c6697d..d6ea470 100644
--- a/media/libstagefright/id3/test/AndroidTest.xml
+++ b/media/libstagefright/id3/test/AndroidTest.xml
@@ -19,7 +19,7 @@
         <option name="cleanup" value="true" />
         <option name="push" value="ID3Test->/data/local/tmp/ID3Test" />
         <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/id3/test/ID3Test.zip?unzip=true"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/id3/test/ID3Test-1.1.zip?unzip=true"
             value="/data/local/tmp/ID3TestRes/" />
     </target_preparer>
 
diff --git a/media/libstagefright/id3/test/ID3Test.cpp b/media/libstagefright/id3/test/ID3Test.cpp
index cd5cd9e..8db83cb 100644
--- a/media/libstagefright/id3/test/ID3Test.cpp
+++ b/media/libstagefright/id3/test/ID3Test.cpp
@@ -51,7 +51,7 @@
     while (!it.done()) {
         String8 id;
         it.getID(&id);
-        ASSERT_GT(id.length(), 0) << "No ID tag found! \n";
+        ASSERT_GT(id.length(), 0) << "Found an ID3 tag of 0 size";
         ALOGV("Found ID tag: %s\n", String8(id).c_str());
         it.next();
     }
@@ -66,8 +66,8 @@
     DataSourceHelper helper(file->wrap());
     ID3 tag(&helper);
     ASSERT_TRUE(tag.isValid()) << "No valid ID3 tag found for " << path.c_str() << "\n";
-    ASSERT_TRUE(tag.version() >= versionNumber)
-            << "Expected version: " << tag.version() << " Found version: " << versionNumber;
+    ASSERT_EQ(tag.version(), versionNumber)
+            << "Found version: " << tag.version() << " Expected version: " << versionNumber;
 }
 
 TEST_P(ID3textTagTest, TextTagTest) {
@@ -81,17 +81,34 @@
     ASSERT_TRUE(tag.isValid()) << "No valid ID3 tag found for " << path.c_str() << "\n";
     int countTextFrames = 0;
     ID3::Iterator it(tag, nullptr);
-    while (!it.done()) {
-        String8 id;
-        it.getID(&id);
-        ASSERT_GT(id.length(), 0);
-        if (id[0] == 'T') {
-            String8 text;
-            countTextFrames++;
-            it.getString(&text);
-            ALOGV("Found text frame %s : %s \n", id.string(), text.string());
+    if (tag.version() != ID3::ID3_V1 && tag.version() != ID3::ID3_V1_1) {
+        while (!it.done()) {
+            String8 id;
+            it.getID(&id);
+            ASSERT_GT(id.length(), 0) << "Found an ID3 tag of 0 size";
+            if (id[0] == 'T') {
+                String8 text;
+                countTextFrames++;
+                it.getString(&text);
+                ALOGV("Found text frame %s : %s \n", id.string(), text.string());
+            }
+            it.next();
         }
-        it.next();
+    } else {
+        while (!it.done()) {
+            String8 id;
+            String8 text;
+            it.getID(&id);
+            ASSERT_GT(id.length(), 0) << "Found an ID3 tag of 0 size";
+            it.getString(&text);
+            // if the tag has a value
+            if (strcmp(text.string(), "")) {
+                countTextFrames++;
+                ALOGV("ID: %s\n", id.c_str());
+                ALOGV("Text string: %s\n", text.string());
+            }
+            it.next();
+        }
     }
     ASSERT_EQ(countTextFrames, numTextFrames)
             << "Expected " << numTextFrames << " text frames, found " << countTextFrames;
@@ -114,7 +131,7 @@
         if (data) {
             ALOGV("Found album art: size = %zu mime = %s \n", dataSize, mime.string());
         }
-        ASSERT_NE(data, nullptr) << "Expected album art, found none!" << path;
+        ASSERT_NE(data, nullptr) << "Expected album art, found none! " << path;
     } else {
         ASSERT_EQ(data, nullptr) << "Found album art when expected none!";
     }
@@ -137,7 +154,7 @@
     while (!it.done()) {
         String8 id;
         it.getID(&id);
-        ASSERT_GT(id.length(), 0);
+        ASSERT_GT(id.length(), 0) << "Found an ID3 tag of 0 size";
         // Check if the tag is an "APIC/PIC" tag.
         if (String8(id) == "APIC" || String8(id) == "PIC") {
             count++;
@@ -150,7 +167,7 @@
                 hexdump(data, dataSize > 128 ? 128 : dataSize);
 #endif
             }
-            ASSERT_NE(data, nullptr) << "Expected album art, found none!" << path;
+            ASSERT_NE(data, nullptr) << "Expected album art, found none! " << path;
         }
         it.next();
     }
@@ -159,56 +176,67 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(id3TestAll, ID3tagTest,
-                         ::testing::Values("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3",
-                                           "bbb_44100hz_2ch_128kbps_mp3_30sec_1_image.mp3",
-                                           "bbb_44100hz_2ch_128kbps_mp3_30sec_2_image.mp3",
-                                           "bbb_44100hz_2ch_128kbps_mp3_5mins.mp3",
-                                           "bbb_44100hz_2ch_128kbps_mp3_5mins_1_image.mp3",
-                                           "bbb_44100hz_2ch_128kbps_mp3_5mins_2_image.mp3",
-                                           "bbb_44100hz_2ch_128kbps_mp3_5mins_largeSize.mp3",
-                                           "bbb_44100hz_2ch_128kbps_mp3_30sec_moreTextFrames.mp3"));
+                         ::testing::Values("bbb_1sec_v23.mp3",
+                                           "bbb_1sec_1_image.mp3",
+                                           "bbb_1sec_2_image.mp3",
+                                           "bbb_2sec_v24.mp3",
+                                           "bbb_2sec_1_image.mp3",
+                                           "bbb_2sec_2_image.mp3",
+                                           "bbb_2sec_largeSize.mp3",
+                                           "bbb_1sec_v23_3tags.mp3",
+                                           "bbb_1sec_v1_5tags.mp3",
+                                           "bbb_2sec_v24_unsynchronizedOneFrame.mp3",
+                                           "bbb_2sec_v24_unsynchronizedAllFrames.mp3"));
 
 INSTANTIATE_TEST_SUITE_P(
         id3TestAll, ID3versionTest,
-        ::testing::Values(make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3", 4),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_1_image.mp3", 4),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_2_image.mp3", 4),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3", 4),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_1_image.mp3", 4),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_2_image.mp3", 4),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_largeSize.mp3", 4),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_moreTextFrames.mp3", 4)));
+        ::testing::Values(make_pair("bbb_1sec_v23.mp3", ID3::ID3_V2_3),
+                          make_pair("bbb_1sec_1_image.mp3", ID3::ID3_V2_3),
+                          make_pair("bbb_1sec_2_image.mp3", ID3::ID3_V2_3),
+                          make_pair("bbb_2sec_v24.mp3", ID3::ID3_V2_4),
+                          make_pair("bbb_2sec_1_image.mp3", ID3::ID3_V2_4),
+                          make_pair("bbb_2sec_2_image.mp3", ID3::ID3_V2_4),
+                          make_pair("bbb_2sec_largeSize.mp3", ID3::ID3_V2_4),
+                          make_pair("bbb_1sec_v23_3tags.mp3", ID3::ID3_V2_3),
+                          make_pair("bbb_1sec_v1_5tags.mp3", ID3::ID3_V1_1),
+                          make_pair("bbb_1sec_v1_3tags.mp3", ID3::ID3_V1_1),
+                          make_pair("bbb_2sec_v24_unsynchronizedOneFrame.mp3", ID3::ID3_V2_4),
+                          make_pair("bbb_2sec_v24_unsynchronizedAllFrames.mp3", ID3::ID3_V2_4)));
 
 INSTANTIATE_TEST_SUITE_P(
         id3TestAll, ID3textTagTest,
-        ::testing::Values(make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_1_image.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_2_image.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_1_image.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_2_image.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_largeSize.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_moreTextFrames.mp3", 5)));
+        ::testing::Values(
+                make_pair("bbb_1sec_v23.mp3", 1),
+                make_pair("bbb_1sec_1_image.mp3", 1),
+                make_pair("bbb_1sec_2_image.mp3", 1),
+                make_pair("bbb_2sec_v24.mp3", 1),
+                make_pair("bbb_2sec_1_image.mp3", 1),
+                make_pair("bbb_2sec_2_image.mp3", 1),
+                make_pair("bbb_2sec_largeSize.mp3", 1),
+                make_pair("bbb_1sec_v23_3tags.mp3", 3),
+                make_pair("bbb_1sec_v1_5tags.mp3", 5),
+                make_pair("bbb_1sec_v1_3tags.mp3", 3),
+                make_pair("bbb_2sec_v24_unsynchronizedOneFrame.mp3", 3),
+                make_pair("bbb_2sec_v24_unsynchronizedAllFrames.mp3", 3)));
 
-INSTANTIATE_TEST_SUITE_P(
-        id3TestAll, ID3albumArtTest,
-        ::testing::Values(make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3", false),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_1_image.mp3", true),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_2_image.mp3", true),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3", false),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_1_image.mp3", true),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_2_image.mp3", true),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_largeSize.mp3", true)));
+INSTANTIATE_TEST_SUITE_P(id3TestAll, ID3albumArtTest,
+                         ::testing::Values(make_pair("bbb_1sec_v23.mp3", false),
+                                           make_pair("bbb_1sec_1_image.mp3", true),
+                                           make_pair("bbb_1sec_2_image.mp3", true),
+                                           make_pair("bbb_2sec_v24.mp3", false),
+                                           make_pair("bbb_2sec_1_image.mp3", true),
+                                           make_pair("bbb_2sec_2_image.mp3", true),
+                                           make_pair("bbb_2sec_largeSize.mp3", true),
+                                           make_pair("bbb_1sec_v1_5tags.mp3", false)));
 
-INSTANTIATE_TEST_SUITE_P(
-        id3TestAll, ID3multiAlbumArtTest,
-        ::testing::Values(make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3", 0),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3", 0),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_1_image.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_1_image.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_2_image.mp3", 2),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_2_image.mp3", 2),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_largeSize.mp3", 3)));
+INSTANTIATE_TEST_SUITE_P(id3TestAll, ID3multiAlbumArtTest,
+                         ::testing::Values(make_pair("bbb_1sec_v23.mp3", 0),
+                                           make_pair("bbb_2sec_v24.mp3", 0),
+                                           make_pair("bbb_1sec_1_image.mp3", 1),
+                                           make_pair("bbb_2sec_1_image.mp3", 1),
+                                           make_pair("bbb_1sec_2_image.mp3", 2),
+                                           make_pair("bbb_2sec_2_image.mp3", 2),
+                                           make_pair("bbb_2sec_largeSize.mp3", 3)));
 
 int main(int argc, char **argv) {
     gEnv = new ID3TestEnvironment();
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
index 19ae0e3..bca7f01 100644
--- a/media/libstagefright/include/FrameDecoder.h
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -135,7 +135,8 @@
 private:
     sp<FrameCaptureLayer> mCaptureLayer;
     VideoFrame *mFrame;
-    bool mIsAvcOrHevc;
+    bool mIsAvc;
+    bool mIsHevc;
     MediaSource::ReadOptions::SeekMode mSeekMode;
     int64_t mTargetTimeUs;
     List<int64_t> mSampleDurations;
diff --git a/media/libstagefright/include/HevcUtils.h b/media/libstagefright/include/HevcUtils.h
index 0f59631..6a4a168 100644
--- a/media/libstagefright/include/HevcUtils.h
+++ b/media/libstagefright/include/HevcUtils.h
@@ -30,6 +30,10 @@
 namespace android {
 
 enum {
+    kHevcNalUnitTypeCodedSliceIdr = 19,
+    kHevcNalUnitTypeCodedSliceIdrNoLP = 20,
+    kHevcNalUnitTypeCodedSliceCra = 21,
+
     kHevcNalUnitTypeVps = 32,
     kHevcNalUnitTypeSps = 33,
     kHevcNalUnitTypePps = 34,
@@ -90,8 +94,11 @@
     // Note that this method does not write the start code.
     bool write(size_t index, uint8_t* dest, size_t size);
     status_t makeHvcc(uint8_t *hvcc, size_t *hvccSize, size_t nalSizeLength);
+    void FindHEVCDimensions(
+            const sp<ABuffer> &SpsBuffer, int32_t *width, int32_t *height);
 
     Info getInfo() const { return mInfo; }
+    static bool IsHevcIDR(const uint8_t *data, size_t size);
 
 private:
     status_t parseVps(const uint8_t* data, size_t size);
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 83e92b9..4c97b4d 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -147,6 +147,7 @@
         kWhatReleaseCodecInstance    = 'relC',
         kWhatForceStateTransition    = 'fstt',
         kWhatCheckIfStuck            = 'Cstk',
+        kWhatSubmitExtraOutputMetadataBuffer = 'sbxo',
     };
 
     enum {
@@ -272,6 +273,8 @@
     bool mShutdownInProgress;
     bool mExplicitShutdown;
     bool mIsLegacyVP9Decoder;
+    bool mIsStreamCorruptFree;
+    bool mIsLowLatency;
 
     // If "mKeepComponentAllocated" we only transition back to Loaded state
     // and do not release the component instance.
@@ -499,6 +502,7 @@
     status_t setupAMRCodec(bool encoder, bool isWAMR, int32_t bitRate);
     status_t setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels);
 
+    status_t setupOpusCodec(bool encoder, int32_t sampleRate, int32_t numChannels);
     status_t setupFlacCodec(
             bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel,
             AudioEncoding encoding);
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index 6f0d3b5..16e7d89 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -23,7 +23,6 @@
 #include <media/stagefright/MediaBuffer.h>
 #include <camera/android/hardware/ICamera.h>
 #include <camera/ICameraRecordingProxy.h>
-#include <camera/ICameraRecordingProxyListener.h>
 #include <camera/CameraParameters.h>
 #include <gui/BufferItemConsumer.h>
 #include <utils/List.h>
@@ -40,17 +39,6 @@
 class CameraSource : public MediaSource, public MediaBufferObserver {
 public:
     /**
-     * Factory method to create a new CameraSource using the current
-     * settings (such as video size, frame rate, color format, etc)
-     * from the default camera.
-     *
-     * @param clientName The package/process name of the client application.
-     *    This is used for permissions checking.
-     * @return NULL on error.
-     */
-    static CameraSource *Create(const String16 &clientName);
-
-    /**
      * Factory method to create a new CameraSource.
      *
      * @param camera the video input frame data source. If it is NULL,
@@ -89,8 +77,7 @@
                                           pid_t clientPid,
                                           Size videoSize,
                                           int32_t frameRate,
-                                          const sp<IGraphicBufferProducer>& surface,
-                                          bool storeMetaDataInVideoBuffers = true);
+                                          const sp<IGraphicBufferProducer>& surface);
 
     virtual ~CameraSource();
 
@@ -132,26 +119,6 @@
 protected:
 
     /**
-     * The class for listening to BnCameraRecordingProxyListener. This is used to receive video
-     * buffers in VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV and VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA
-     * mode. When a frame is available, CameraSource::dataCallbackTimestamp() will be called.
-     */
-    class ProxyListener: public BnCameraRecordingProxyListener {
-    public:
-        ProxyListener(const sp<CameraSource>& source);
-        virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
-                const sp<IMemory> &data);
-        virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
-                native_handle_t* handle);
-        virtual void recordingFrameHandleCallbackTimestampBatch(
-                const std::vector<int64_t>& timestampsUs,
-                const std::vector<native_handle_t*>& handles);
-
-    private:
-        sp<CameraSource> mSource;
-    };
-
-    /**
      * The class for listening to BufferQueue's onFrameAvailable. This is used to receive video
      * buffers in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode. When a frame is available,
      * CameraSource::processBufferQueueFrame() will be called.
@@ -213,32 +180,15 @@
     CameraSource(const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
                  int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid,
                  Size videoSize, int32_t frameRate,
-                 const sp<IGraphicBufferProducer>& surface,
-                 bool storeMetaDataInVideoBuffers);
+                 const sp<IGraphicBufferProducer>& surface);
 
     virtual status_t startCameraRecording();
     virtual void releaseRecordingFrame(const sp<IMemory>& frame);
-    virtual void releaseRecordingFrameHandle(native_handle_t* handle);
-    // stagefright recorder not using this for now
-    virtual void releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles);
 
     // Returns true if need to skip the current frame.
     // Called from dataCallbackTimestamp.
     virtual bool skipCurrentFrame(int64_t /*timestampUs*/) {return false;}
 
-    // Callback called when still camera raw data is available.
-    virtual void dataCallback(int32_t /*msgType*/, const sp<IMemory>& /*data*/) {}
-
-    virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
-            const sp<IMemory> &data);
-
-    virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
-            native_handle_t* handle);
-
-    virtual void recordingFrameHandleCallbackTimestampBatch(
-            const std::vector<int64_t>& timestampsUs,
-            const std::vector<native_handle_t*>& handles);
-
     // Process a buffer item received in BufferQueueListener.
     virtual void processBufferQueueFrame(BufferItem& buffer);
 
@@ -261,9 +211,6 @@
     int64_t mGlitchDurationThresholdUs;
     bool mCollectStats;
 
-    // The mode video buffers are received from camera. One of VIDEO_BUFFER_MODE_*.
-    int32_t mVideoBufferMode;
-
     static const uint32_t kDefaultVideoBufferCount = 32;
 
     /**
@@ -297,12 +244,12 @@
 
     status_t init(const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
                   int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid,
-                  Size videoSize, int32_t frameRate, bool storeMetaDataInVideoBuffers);
+                  Size videoSize, int32_t frameRate);
 
     status_t initWithCameraAccess(
                   const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
                   int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid,
-                  Size videoSize, int32_t frameRate, bool storeMetaDataInVideoBuffers);
+                  Size videoSize, int32_t frameRate);
 
     // Initialize the buffer queue used in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
     status_t initBufferQueue(uint32_t width, uint32_t height, uint32_t format,
diff --git a/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h b/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
index 533e33b..3c311cf 100644
--- a/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
+++ b/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
@@ -45,8 +45,7 @@
         Size videoSize,
         int32_t videoFrameRate,
         const sp<IGraphicBufferProducer>& surface,
-        int64_t timeBetweenTimeLapseFrameCaptureUs,
-        bool storeMetaDataInVideoBuffers = true);
+        int64_t timeBetweenTimeLapseFrameCaptureUs);
 
     virtual ~CameraSourceTimeLapse();
 
@@ -122,8 +121,7 @@
         Size videoSize,
         int32_t videoFrameRate,
         const sp<IGraphicBufferProducer>& surface,
-        int64_t timeBetweenTimeLapseFrameCaptureUs,
-        bool storeMetaDataInVideoBuffers = true);
+        int64_t timeBetweenTimeLapseFrameCaptureUs);
 
     // Wrapper over CameraSource::signalBufferReturned() to implement quick stop.
     // It only handles the case when mLastReadBufferCopy is signalled. Otherwise
@@ -137,33 +135,6 @@
     // frame needs to be skipped and this function just returns the value of mSkipCurrentFrame.
     virtual bool skipCurrentFrame(int64_t timestampUs);
 
-    // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
-    // timestamp and set mSkipCurrentFrame.
-    // Then it calls the base CameraSource::dataCallbackTimestamp()
-    // This will be called in VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV and
-    // VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA mode.
-    virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
-            const sp<IMemory> &data);
-
-    // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
-    // timestamp and set mSkipCurrentFrame.
-    // Then it calls the base CameraSource::recordingFrameHandleCallbackTimestamp() or
-    // CameraSource::recordingFrameHandleCallbackTimestampBatch()
-    // This will be called in VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA mode when
-    // the metadata is VideoNativeHandleMetadata.
-    virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
-            native_handle_t* handle);
-
-    // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
-    // timestamp and set mSkipCurrentFrame.
-    // Then it calls the base CameraSource::recordingFrameHandleCallbackTimestamp() or
-    // CameraSource::recordingFrameHandleCallbackTimestampBatch()
-    // This will be called in VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA mode when
-    // the metadata is VideoNativeHandleMetadata.
-    virtual void recordingFrameHandleCallbackTimestampBatch(
-            const std::vector<int64_t>& timestampsUs,
-            const std::vector<native_handle_t*>& handles);
-
     // Process a buffer item received in CameraSource::BufferQueueListener.
     // This will be called in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
     virtual void processBufferQueueFrame(BufferItem& buffer);
@@ -187,9 +158,6 @@
     // Wrapper to enter threadTimeLapseEntry()
     static void *ThreadTimeLapseWrapper(void *me);
 
-    // Creates a copy of source_data into a new memory of final type MemoryBase.
-    sp<IMemory> createIMemoryCopy(const sp<IMemory> &source_data);
-
     CameraSourceTimeLapse(const CameraSourceTimeLapse &);
     CameraSourceTimeLapse &operator=(const CameraSourceTimeLapse &);
 };
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 501cf2c..2582ed0 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -46,7 +46,7 @@
 
     // Returns INVALID_OPERATION if there is no source or track.
     virtual status_t start(MetaData *param = NULL);
-    virtual status_t stop() { return reset(); }
+    virtual status_t stop();
     virtual status_t pause();
     virtual bool reachedEOS();
     virtual status_t dump(int fd, const Vector<String16>& args);
@@ -126,6 +126,7 @@
     bool mWriteSeekErr;
     bool mFallocateErr;
     bool mPreAllocationEnabled;
+    status_t mResetStatus;
     // Queue to hold top long write durations
     std::priority_queue<std::chrono::microseconds, std::vector<std::chrono::microseconds>,
                         std::greater<std::chrono::microseconds>> mWriteDurationPQ;
@@ -135,7 +136,9 @@
     sp<AHandlerReflector<MPEG4Writer> > mReflector;
 
     Mutex mLock;
+    // Serialize reset calls from client of MPEG4Writer and MP4WtrCtrlHlpLooper.
     std::mutex mResetMutex;
+    // Serialize preallocation calls from different track threads.
     std::mutex mFallocMutex;
     bool mPreAllocFirstTime; // Pre-allocate space for file and track headers only once per file.
     uint64_t mPrevAllTracksTotalMetaDataSizeEstimate;
@@ -304,7 +307,7 @@
     void writeGeoDataBox();
     void writeLatitude(int degreex10000);
     void writeLongitude(int degreex10000);
-    void finishCurrentSession();
+    status_t finishCurrentSession();
 
     void addDeviceMeta();
     void writeHdlr(const char *handlerType);
@@ -337,7 +340,7 @@
     void sendSessionSummary();
     status_t release();
     status_t switchFd();
-    status_t reset(bool stopSource = true);
+    status_t reset(bool stopSource = true, bool waitForAnyPreviousCallToComplete = true);
 
     static uint32_t getMpeg4Time();
 
diff --git a/media/libstagefright/include/media/stagefright/MediaAdapter.h b/media/libstagefright/include/media/stagefright/MediaAdapter.h
index 177a9e9..c7d7765 100644
--- a/media/libstagefright/include/media/stagefright/MediaAdapter.h
+++ b/media/libstagefright/include/media/stagefright/MediaAdapter.h
@@ -58,6 +58,7 @@
 
 private:
     Mutex mAdapterLock;
+    std::mutex mBufferGatingMutex;
     // Make sure the read() wait for the incoming buffer.
     Condition mBufferReadCond;
     // Make sure the pushBuffer() wait for the current buffer consumed.
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index f7e6c27..a28d479 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -81,6 +81,13 @@
         BUFFER_FLAG_MUXER_DATA    = 16,
     };
 
+    enum CVODegree {
+        CVO_DEGREE_0   = 0,
+        CVO_DEGREE_90  = 90,
+        CVO_DEGREE_180 = 180,
+        CVO_DEGREE_270 = 270,
+    };
+
     enum {
         CB_INPUT_AVAILABLE = 1,
         CB_OUTPUT_AVAILABLE = 2,
@@ -366,6 +373,8 @@
     AString mOwnerName;
     sp<MediaCodecInfo> mCodecInfo;
     sp<AReplyToken> mReplyID;
+    std::string mLastReplyOrigin;
+    std::vector<sp<AMessage>> mDeferredMessages;
     uint32_t mFlags;
     status_t mStickyError;
     sp<Surface> mSurface;
@@ -416,6 +425,10 @@
 
     sp<ICrypto> mCrypto;
 
+    int32_t mTunneledInputWidth;
+    int32_t mTunneledInputHeight;
+    bool mTunneled;
+
     sp<IDescrambler> mDescrambler;
 
     List<sp<ABuffer> > mCSD;
@@ -428,13 +441,17 @@
 
     std::shared_ptr<BufferChannelBase> mBufferChannel;
 
-    MediaCodec(const sp<ALooper> &looper, pid_t pid, uid_t uid);
+    MediaCodec(
+            const sp<ALooper> &looper, pid_t pid, uid_t uid,
+            std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase = nullptr,
+            std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo = nullptr);
 
     static sp<CodecBase> GetCodecBase(const AString &name, const char *owner = nullptr);
 
     static status_t PostAndAwaitResponse(
             const sp<AMessage> &msg, sp<AMessage> *response);
 
+    void PostReplyWithError(const sp<AMessage> &msg, int32_t err);
     void PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err);
 
     status_t init(const AString &name);
@@ -445,6 +462,7 @@
     size_t updateBuffers(int32_t portIndex, const sp<AMessage> &msg);
     status_t onQueueInputBuffer(const sp<AMessage> &msg);
     status_t onReleaseOutputBuffer(const sp<AMessage> &msg);
+    BufferInfo *peekNextPortBuffer(int32_t portIndex);
     ssize_t dequeuePortBuffer(int32_t portIndex);
 
     status_t getBufferAndFormat(
@@ -476,6 +494,7 @@
     status_t onSetParameters(const sp<AMessage> &params);
 
     status_t amendOutputFormatWithCodecSpecificData(const sp<MediaCodecBuffer> &buffer);
+    void handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer);
     bool isExecuting() const;
 
     uint64_t getGraphicBufferSize();
@@ -484,6 +503,9 @@
     bool hasPendingBuffer(int portIndex);
     bool hasPendingBuffer();
 
+    void postPendingRepliesAndDeferredMessages(std::string origin, status_t err = OK);
+    void postPendingRepliesAndDeferredMessages(std::string origin, const sp<AMessage> &response);
+
     /* called to get the last codec error when the sticky flag is set.
      * if no such codec error is found, returns UNKNOWN_ERROR.
      */
@@ -569,6 +591,10 @@
 
     Histogram mLatencyHist;
 
+    std::function<sp<CodecBase>(const AString &, const char *)> mGetCodecBase;
+    std::function<status_t(const AString &, sp<MediaCodecInfo> *)> mGetCodecInfo;
+    friend class MediaTestHelper;
+
     DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);
 };
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 178d334..9c67338 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -744,6 +744,7 @@
 constexpr char KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT[] = "aac-max-output-channel_count";
 constexpr char KEY_AAC_PROFILE[] = "aac-profile";
 constexpr char KEY_AAC_SBR_MODE[] = "aac-sbr-mode";
+constexpr char KEY_ALLOW_FRAME_DROP[] = "allow-frame-drop";
 constexpr char KEY_AUDIO_SESSION_ID[] = "audio-session-id";
 constexpr char KEY_BIT_RATE[] = "bitrate";
 constexpr char KEY_BITRATE_MODE[] = "bitrate-mode";
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecListWriter.h b/media/libstagefright/include/media/stagefright/MediaCodecListWriter.h
index f53b23e..bf85d7e 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecListWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecListWriter.h
@@ -19,7 +19,6 @@
 #define MEDIA_CODEC_LIST_WRITER_H_
 
 #include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/MediaCodecListWriter.h>
 #include <media/MediaCodecInfo.h>
 
 #include <utils/Errors.h>
@@ -65,6 +64,7 @@
     std::vector<sp<MediaCodecInfo>> mCodecInfos;
 
     friend struct MediaCodecList;
+    friend class MediaTestHelper;
 };
 
 /**
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecSource.h b/media/libstagefright/include/media/stagefright/MediaCodecSource.h
index 2f98af1..0f7b535 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecSource.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecSource.h
@@ -64,12 +64,15 @@
 
     // MediaBufferObserver
     virtual void signalBufferReturned(MediaBufferBase *buffer);
+    virtual status_t setEncodingBitrate(int32_t bitRate /* bps */);
 
     // for AHandlerReflector
     void onMessageReceived(const sp<AMessage> &msg);
 
 
 
+    status_t requestIDRFrame();
+
 protected:
     virtual ~MediaCodecSource();
 
diff --git a/media/libstagefright/include/media/stagefright/MediaWriter.h b/media/libstagefright/include/media/stagefright/MediaWriter.h
index 1f4fbcb..17b1abf 100644
--- a/media/libstagefright/include/media/stagefright/MediaWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaWriter.h
@@ -54,6 +54,10 @@
     virtual void setStartTimeOffsetMs(int /*ms*/) {}
     virtual int32_t getStartTimeOffsetMs() const { return 0; }
     virtual status_t setNextFd(int /*fd*/) { return INVALID_OPERATION; }
+    virtual void updateCVODegrees(int32_t /*cvoDegrees*/) {}
+    virtual void updatePayloadType(int32_t /*payloadType*/) {}
+    virtual void updateSocketNetwork(int64_t /*socketNetwork*/) {}
+    virtual uint32_t getSequenceNum() { return 0; }
 
 protected:
     virtual ~MediaWriter() {}
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 64eb8b4..f260510 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -62,6 +62,7 @@
     kKeyDVCC              = 'dvcc',  // raw data
     kKeyAV1C              = 'av1c',  // raw data
     kKeyThumbnailHVCC     = 'thvc',  // raw data
+    kKeyThumbnailAV1C     = 'tav1',  // raw data
     kKeyD263              = 'd263',  // raw data
     kKeyOpusHeader        = 'ohdr',  // raw data
     kKeyOpusCodecDelay    = 'ocod',  // uint64_t (codec delay in ns)
@@ -247,6 +248,20 @@
 
     // Treat empty track as malformed for MediaRecorder.
     kKeyEmptyTrackMalFormed = 'nemt', // bool (int32_t)
+
+    kKeyVps              = 'sVps', // int32_t, indicates that a buffer has vps.
+    kKeySps              = 'sSps', // int32_t, indicates that a buffer has sps.
+    kKeyPps              = 'sPps', // int32_t, indicates that a buffer has pps.
+    kKeySelfID           = 'sfid', // int32_t, source ID to identify itself on RTP protocol.
+    kKeyPayloadType      = 'pTyp', // int32_t, SDP negotiated payload type.
+    kKeyRtpExtMap        = 'extm', // int32_t, rtp extension ID for cvo on RTP protocol.
+    kKeyRtpCvoDegrees    = 'cvod', // int32_t, rtp cvo degrees as per 3GPP 26.114.
+    kKeyRtpDscp          = 'dscp', // int32_t, DSCP(Differentiated services codepoint) of RFC 2474.
+    kKeySocketNetwork    = 'sNet', // int64_t, socket will be bound to network handle.
+
+    // Slow-motion markers
+    kKeySlowMotionMarkers = 'slmo', // raw data, byte array following spec for
+                                    // MediaFormat#KEY_SLOW_MOTION_MARKERS
 };
 
 enum {
diff --git a/media/libstagefright/include/media/stagefright/ProcessInfo.h b/media/libstagefright/include/media/stagefright/ProcessInfo.h
index 0be1a52..b8a3c10 100644
--- a/media/libstagefright/include/media/stagefright/ProcessInfo.h
+++ b/media/libstagefright/include/media/stagefright/ProcessInfo.h
@@ -20,6 +20,9 @@
 
 #include <media/stagefright/foundation/ABase.h>
 #include <media/stagefright/ProcessInfoInterface.h>
+#include <map>
+#include <mutex>
+#include <utils/Condition.h>
 
 namespace android {
 
@@ -28,11 +31,20 @@
 
     virtual bool getPriority(int pid, int* priority);
     virtual bool isValidPid(int pid);
+    virtual bool overrideProcessInfo(int pid, int procState, int oomScore);
+    virtual void removeProcessInfoOverride(int pid);
 
 protected:
     virtual ~ProcessInfo();
 
 private:
+    struct ProcessInfoOverride {
+        int procState;
+        int oomScore;
+    };
+    std::mutex mOverrideLock;
+    std::map<int, ProcessInfoOverride> mOverrideMap GUARDED_BY(mOverrideLock);
+
     DISALLOW_EVIL_CONSTRUCTORS(ProcessInfo);
 };
 
diff --git a/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h b/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
index b39112a..9260181 100644
--- a/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
+++ b/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
@@ -24,6 +24,8 @@
 struct ProcessInfoInterface : public RefBase {
     virtual bool getPriority(int pid, int* priority) = 0;
     virtual bool isValidPid(int pid) = 0;
+    virtual bool overrideProcessInfo(int pid, int procState, int oomScore);
+    virtual void removeProcessInfoOverride(int pid);
 
 protected:
     virtual ~ProcessInfoInterface() {}
diff --git a/media/libstagefright/include/media/stagefright/RemoteDataSource.h b/media/libstagefright/include/media/stagefright/RemoteDataSource.h
index d82be8a..d605cda 100644
--- a/media/libstagefright/include/media/stagefright/RemoteDataSource.h
+++ b/media/libstagefright/include/media/stagefright/RemoteDataSource.h
@@ -41,6 +41,11 @@
         close();
     }
     virtual sp<IMemory> getIMemory() {
+        Mutex::Autolock lock(mLock);
+        if (mMemory.get() == nullptr) {
+            ALOGE("getIMemory() failed, mMemory is nullptr");
+            return nullptr;
+        }
         return mMemory;
     }
     virtual ssize_t readAt(off64_t offset, size_t size) {
@@ -48,19 +53,35 @@
         if (size > kBufferSize) {
             size = kBufferSize;
         }
+
+        Mutex::Autolock lock(mLock);
+        if (mSource.get() == nullptr) {
+            ALOGE("readAt() failed, mSource is nullptr");
+            return 0;
+        }
         return mSource->readAt(offset, mMemory->unsecurePointer(), size);
     }
     virtual status_t getSize(off64_t *size) {
+        Mutex::Autolock lock(mLock);
+        if (mSource.get() == nullptr) {
+            ALOGE("getSize() failed, mSource is nullptr");
+            return INVALID_OPERATION;
+        }
         return mSource->getSize(size);
     }
     virtual void close() {
         // Protect strong pointer assignments. This also can be called from the binder
         // clean-up procedure which is running on a separate thread.
-        Mutex::Autolock lock(mCloseLock);
+        Mutex::Autolock lock(mLock);
         mSource = nullptr;
         mMemory = nullptr;
     }
     virtual uint32_t getFlags() {
+        Mutex::Autolock lock(mLock);
+        if (mSource.get() == nullptr) {
+            ALOGE("getSize() failed, mSource is nullptr");
+            return 0;
+        }
         return mSource->flags();
     }
     virtual String8 toString()  {
@@ -75,9 +96,10 @@
     sp<IMemory> mMemory;
     sp<DataSource> mSource;
     String8 mName;
-    Mutex mCloseLock;
+    Mutex mLock;
 
     explicit RemoteDataSource(const sp<DataSource> &source) {
+        Mutex::Autolock lock(mLock);
         mSource = source;
         sp<MemoryDealer> memoryDealer = new MemoryDealer(kBufferSize, "RemoteDataSource");
         mMemory = memoryDealer->allocate(kBufferSize);
diff --git a/media/libstagefright/include/media/stagefright/SurfaceUtils.h b/media/libstagefright/include/media/stagefright/SurfaceUtils.h
index ae55c65..35b3fa2 100644
--- a/media/libstagefright/include/media/stagefright/SurfaceUtils.h
+++ b/media/libstagefright/include/media/stagefright/SurfaceUtils.h
@@ -38,6 +38,8 @@
         int width, int height, int format, int rotation, int usage, bool reconnect);
 void setNativeWindowHdrMetadata(
         ANativeWindow *nativeWindow /* nonnull */, HDRStaticInfo *info /* nonnull */);
+status_t setNativeWindowRotation(
+        ANativeWindow *nativeWindow /* nonnull */, int rotation);
 status_t pushBlankBuffersToNativeWindow(ANativeWindow *nativeWindow /* nonnull */);
 status_t nativeWindowConnect(ANativeWindow *surface, const char *reason);
 status_t nativeWindowDisconnect(ANativeWindow *surface, const char *reason);
diff --git a/media/libstagefright/include/media/stagefright/Utils.h b/media/libstagefright/include/media/stagefright/Utils.h
index 2b9b759..1673120 100644
--- a/media/libstagefright/include/media/stagefright/Utils.h
+++ b/media/libstagefright/include/media/stagefright/Utils.h
@@ -33,7 +33,7 @@
         const MetaDataBase *meta, sp<AMessage> *format);
 status_t convertMetaDataToMessage(
         const sp<MetaData> &meta, sp<AMessage> *format);
-void convertMessageToMetaData(
+status_t convertMessageToMetaData(
         const sp<AMessage> &format, sp<MetaData> &meta);
 
 // Returns a pointer to the next NAL start code in buffer of size |length| starting at |data|, or
diff --git a/media/libstagefright/mpeg2ts/Android.bp b/media/libstagefright/mpeg2ts/Android.bp
index fbb2d0c..5d697f7 100644
--- a/media/libstagefright/mpeg2ts/Android.bp
+++ b/media/libstagefright/mpeg2ts/Android.bp
@@ -1,12 +1,11 @@
-cc_library_static {
-    name: "libstagefright_mpeg2support",
+cc_defaults {
+    name: "libstagefright_mpeg2support_defaults",
 
     srcs: [
         "AnotherPacketSource.cpp",
         "ATSParser.cpp",
         "CasManager.cpp",
         "ESQueue.cpp",
-        "HlsSampleDecryptor.cpp",
     ],
 
     include_dirs: [
@@ -28,7 +27,6 @@
     },
 
     shared_libs: [
-        "libcrypto",
         "libhidlmemory",
         "android.hardware.cas.native@1.0",
         "android.hidl.memory@1.0",
@@ -36,9 +34,10 @@
     ],
 
     header_libs: [
-        "libmedia_headers",
+        "libmedia_datasource_headers",
         "libaudioclient_headers",
         "media_ndk_headers",
+        "libstagefright_foundation_headers",
     ],
 
     export_include_dirs: ["."],
@@ -48,4 +47,39 @@
     ],
 
     min_sdk_version: "29",
+
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+
+cc_library_static {
+    name: "libstagefright_mpeg2support",
+    defaults: [
+        "libstagefright_mpeg2support_defaults",
+    ],
+    cflags: [
+        "-DENABLE_CRYPTO",
+    ],
+    shared_libs: [
+        "libcrypto",
+    ],
+    srcs: [
+        "HlsSampleDecryptor.cpp",
+    ],
+}
+
+cc_library_static {
+    name: "libstagefright_mpeg2support_nocrypto",
+    defaults: [
+        "libstagefright_mpeg2support_defaults",
+    ],
+    apex_available: [
+        "com.android.media",
+    ],
 }
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index ea5d2de..192ba77 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -36,7 +36,7 @@
 #include <inttypes.h>
 #include <netinet/in.h>
 
-#ifndef __ANDROID_APEX__
+#ifdef ENABLE_CRYPTO
 #include "HlsSampleDecryptor.h"
 #endif
 
@@ -55,10 +55,10 @@
     // Create the decryptor anyway since we don't know the use-case unless key is provided
     // Won't decrypt if key info not available (e.g., scanner/extractor just parsing ts files)
     mSampleDecryptor = isSampleEncrypted() ?
-#ifdef __ANDROID_APEX__
-        new SampleDecryptor
-#else
+#ifdef ENABLE_CRYPTO
         new HlsSampleDecryptor
+#else
+        new SampleDecryptor
 #endif
         : NULL;
 }
@@ -172,29 +172,26 @@
         return 0;
     }
 
-    unsigned bsmod __unused = bits.getBits(3);
+    bits.skipBits(3); // bsmod
     unsigned acmod = bits.getBits(3);
-    unsigned cmixlev __unused = 0;
-    unsigned surmixlev __unused = 0;
-    unsigned dsurmod __unused = 0;
 
     if ((acmod & 1) > 0 && acmod != 1) {
         if (bits.numBitsLeft() < 2) {
             return 0;
         }
-        cmixlev = bits.getBits(2);
+        bits.skipBits(2); //cmixlev
     }
     if ((acmod & 4) > 0) {
         if (bits.numBitsLeft() < 2) {
             return 0;
         }
-        surmixlev = bits.getBits(2);
+        bits.skipBits(2); //surmixlev
     }
     if (acmod == 2) {
         if (bits.numBitsLeft() < 2) {
             return 0;
         }
-        dsurmod = bits.getBits(2);
+        bits.skipBits(2); //dsurmod
     }
 
     if (bits.numBitsLeft() < 1) {
@@ -269,7 +266,7 @@
         samplingRate = samplingRateTable2[fscod2];
     } else {
         samplingRate = samplingRateTable[fscod];
-        unsigned numblkscod __unused = bits.getBits(2);
+        bits.skipBits(2); // numblkscod
     }
 
     unsigned acmod = bits.getBits(3);
@@ -1087,7 +1084,7 @@
     }
     unsigned numAUs = bits.getBits(8);
     bits.skipBits(8);
-    unsigned quantization_word_length __unused = bits.getBits(2);
+    bits.skipBits(2); // quantization_word_length
     unsigned audio_sampling_frequency = bits.getBits(3);
     unsigned num_channels = bits.getBits(3);
 
diff --git a/media/libstagefright/mpeg2ts/TEST_MAPPING b/media/libstagefright/mpeg2ts/TEST_MAPPING
new file mode 100644
index 0000000..9f4bbdf
--- /dev/null
+++ b/media/libstagefright/mpeg2ts/TEST_MAPPING
@@ -0,0 +1,9 @@
+// frameworks/av/media/libstagefright/mpeg2ts
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "Mpeg2tsUnitTest" }
+  ]
+}
diff --git a/media/libstagefright/mpeg2ts/test/Android.bp b/media/libstagefright/mpeg2ts/test/Android.bp
new file mode 100644
index 0000000..d8b0304
--- /dev/null
+++ b/media/libstagefright/mpeg2ts/test/Android.bp
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test{
+    name: "Mpeg2tsUnitTest",
+    gtest: true,
+    test_suites: ["device-tests"],
+
+    srcs: [
+        "Mpeg2tsUnitTest.cpp"
+    ],
+
+    shared_libs: [
+        "android.hardware.cas@1.0",
+        "android.hardware.cas.native@1.0",
+        "android.hidl.token@1.0-utils",
+        "android.hidl.allocator@1.0",
+        "libcrypto",
+        "libhidlbase",
+        "libhidlmemory",
+        "liblog",
+        "libmedia",
+        "libbinder",
+        "libbinder_ndk",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libdatasource",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libstagefright_metadatautils",
+        "libstagefright_mpeg2support",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/",
+        "frameworks/av/media/libstagefright/",
+    ],
+
+    header_libs: [
+        "libmedia_headers",
+        "libaudioclient_headers",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/mpeg2ts/test/AndroidTest.xml b/media/libstagefright/mpeg2ts/test/AndroidTest.xml
new file mode 100644
index 0000000..ac1294d
--- /dev/null
+++ b/media/libstagefright/mpeg2ts/test/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for Mpeg2ts unit tests">
+    <option name="test-suite-tag" value="Mpeg2tsUnitTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="Mpeg2tsUnitTest->/data/local/tmp/Mpeg2tsUnitTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.zip?unzip=true"
+            value="/data/local/tmp/Mpeg2tsUnitTestRes/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="Mpeg2tsUnitTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/Mpeg2tsUnitTestRes/" />
+    </test>
+</configuration>
diff --git a/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.cpp b/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.cpp
new file mode 100644
index 0000000..79c233b
--- /dev/null
+++ b/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.cpp
@@ -0,0 +1,236 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Mpeg2tsUnitTest"
+
+#include <utils/Log.h>
+
+#include <stdint.h>
+#include <sys/stat.h>
+
+#include <datasource/FileSource.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaDataBase.h>
+#include <media/stagefright/foundation/AUtils.h>
+
+#include "mpeg2ts/ATSParser.h"
+#include "mpeg2ts/AnotherPacketSource.h"
+
+#include "Mpeg2tsUnitTestEnvironment.h"
+
+constexpr size_t kTSPacketSize = 188;
+constexpr uint16_t kPIDMask = 0x1FFF;
+// Max value of PID which is also used for Null packets
+constexpr uint16_t kPIDMaxValue = 8191;
+constexpr uint8_t kTSSyncByte = 0x47;
+constexpr uint8_t kVideoPresent = 0x01;
+constexpr uint8_t kAudioPresent = 0x02;
+constexpr uint8_t kMetaDataPresent = 0x04;
+
+static Mpeg2tsUnitTestEnvironment *gEnv = nullptr;
+
+using namespace android;
+
+class Mpeg2tsUnitTest
+    : public ::testing ::TestWithParam<
+              tuple</*fileName*/ string, /*sourceType*/ char, /*numSource*/ uint16_t>> {
+  public:
+    Mpeg2tsUnitTest()
+        : mInputBuffer(nullptr), mSource(nullptr), mFpInput(nullptr), mParser(nullptr) {}
+
+    ~Mpeg2tsUnitTest() {
+        if (mInputBuffer) free(mInputBuffer);
+        if (mFpInput) fclose(mFpInput);
+        mSource.clear();
+    }
+
+    void SetUp() override {
+        mOffset = 0;
+        mNumDataSource = 0;
+        tuple<string, char, uint16_t> params = GetParam();
+        char sourceType = get<1>(params);
+        /* mSourceType = 0b x x x x x M A V
+                                     /  |  \
+                            metaData  audio  video */
+        mMediaType = (sourceType & 0x07);
+        mNumDataSource = get<2>(params);
+        string inputFile = gEnv->getRes() + get<0>(params);
+        mFpInput = fopen(inputFile.c_str(), "rb");
+        ASSERT_NE(mFpInput, nullptr) << "Failed to open file: " << inputFile;
+
+        struct stat buf;
+        int8_t err = stat(inputFile.c_str(), &buf);
+        ASSERT_EQ(err, 0) << "Failed to get information for file: " << inputFile;
+
+        long fileSize = buf.st_size;
+        mTotalPackets = fileSize / kTSPacketSize;
+        int32_t fd = fileno(mFpInput);
+        ASSERT_GE(fd, 0) << "Failed to get the integer file descriptor";
+
+        mSource = new FileSource(dup(fd), 0, buf.st_size);
+        ASSERT_NE(mSource, nullptr) << "Failed to get the data source!";
+
+        mParser = new ATSParser();
+        ASSERT_NE(mParser, nullptr) << "Unable to create ATS parser!";
+        mInputBuffer = (uint8_t *)malloc(kTSPacketSize);
+        ASSERT_NE(mInputBuffer, nullptr) << "Failed to allocate memory for TS packet!";
+    }
+
+    uint64_t mOffset;
+    uint64_t mTotalPackets;
+    uint16_t mNumDataSource;
+
+    int8_t mMediaType;
+
+    uint8_t *mInputBuffer;
+    string mInputFile;
+    sp<DataSource> mSource;
+    FILE *mFpInput;
+    ATSParser *mParser;
+};
+
+TEST_P(Mpeg2tsUnitTest, MediaInfoTest) {
+    bool videoFound = false;
+    bool audioFound = false;
+    bool metaDataFound = false;
+    bool syncPointPresent = false;
+
+    int16_t totalDataSource = 0;
+    int32_t val32 = 0;
+    uint8_t numDataSource = 0;
+    uint8_t packet[kTSPacketSize];
+    ssize_t numBytesRead = -1;
+
+    ATSParser::SyncEvent event(mOffset);
+    static const ATSParser::SourceType mediaType[] = {ATSParser::VIDEO, ATSParser::AUDIO,
+                                                      ATSParser::META, ATSParser::NUM_SOURCE_TYPES};
+    const uint32_t nMediaTypes = sizeof(mediaType) / sizeof(mediaType[0]);
+
+    while ((numBytesRead = mSource->readAt(mOffset, packet, kTSPacketSize)) == kTSPacketSize) {
+        ASSERT_TRUE(packet[0] == kTSSyncByte) << "Sync byte error!";
+
+        // pid is 13 bits
+        uint16_t pid = (packet[1] + (packet[2] << 8)) & kPIDMask;
+        ASSERT_TRUE(pid <= kPIDMaxValue) << "Invalid PID: " << pid;
+
+        status_t err = mParser->feedTSPacket(packet, kTSPacketSize, &event);
+        ASSERT_EQ(err, (status_t)OK) << "Unable to feed TS packet!";
+
+        mOffset += numBytesRead;
+        for (int i = 0; i < nMediaTypes; i++) {
+            if (mParser->hasSource(mediaType[i])) {
+                switch (mediaType[i]) {
+                    case ATSParser::VIDEO:
+                        videoFound = true;
+                        break;
+                    case ATSParser::AUDIO:
+                        audioFound = true;
+                        break;
+                    case ATSParser::META:
+                        metaDataFound = true;
+                        break;
+                    case ATSParser::NUM_SOURCE_TYPES:
+                        numDataSource = 3;
+                        break;
+                    default:
+                        break;
+                }
+            }
+        }
+        if (videoFound && audioFound && metaDataFound && (numDataSource == 3)) break;
+    }
+
+    for (int i = 0; i < nMediaTypes; i++) {
+        ATSParser::SourceType currentMediaType = mediaType[i];
+        if (mParser->hasSource(currentMediaType)) {
+            if (event.hasReturnedData()) {
+                syncPointPresent = true;
+                sp<AnotherPacketSource> syncPacketSource = event.getMediaSource();
+                ASSERT_NE(syncPacketSource, nullptr)
+                        << "Cannot get sync source for media type: " << currentMediaType;
+
+                status_t err = syncPacketSource->start();
+                ASSERT_EQ(err, (status_t)OK) << "Error returned while starting!";
+
+                sp<MetaData> format = syncPacketSource->getFormat();
+                ASSERT_NE(format, nullptr) << "Unable to get the format of the source packet!";
+
+                MediaBufferBase *buf;
+                syncPacketSource->read(&buf, nullptr);
+                ASSERT_NE(buf, nullptr) << "Failed to read sync packet source data";
+
+                MetaDataBase &inMeta = buf->meta_data();
+                bool status = inMeta.findInt32(kKeyIsSyncFrame, &val32);
+                ASSERT_EQ(status, true) << "Sync frame key is not set";
+
+                status = inMeta.findInt32(kKeyCryptoMode, &val32);
+                ASSERT_EQ(status, false) << "Invalid packet, found scrambled packets!";
+
+                err = syncPacketSource->stop();
+                ASSERT_EQ(err, (status_t)OK) << "Error returned while stopping!";
+            }
+            sp<AnotherPacketSource> packetSource = mParser->getSource(currentMediaType);
+            ASSERT_NE(packetSource, nullptr)
+                    << "Cannot get source for media type: " << currentMediaType;
+
+            status_t err = packetSource->start();
+            ASSERT_EQ(err, (status_t)OK) << "Error returned while starting!";
+            sp<MetaData> format = packetSource->getFormat();
+            ASSERT_NE(format, nullptr) << "Unable to get the format of the packet!";
+
+            err = packetSource->stop();
+            ASSERT_EQ(err, (status_t)OK) << "Error returned while stopping!";
+        }
+    }
+
+    ASSERT_EQ(videoFound, bool(mMediaType & kVideoPresent)) << "No Video packets found!";
+    ASSERT_EQ(audioFound, bool(mMediaType & kAudioPresent)) << "No Audio packets found!";
+    ASSERT_EQ(metaDataFound, bool(mMediaType & kMetaDataPresent)) << "No meta data found!";
+
+    if (videoFound || audioFound) {
+        ASSERT_TRUE(syncPointPresent) << "No sync points found for audio/video";
+    }
+
+    if (videoFound) totalDataSource += 1;
+    if (audioFound) totalDataSource += 1;
+    if (metaDataFound) totalDataSource += 1;
+
+    ASSERT_TRUE(totalDataSource == mNumDataSource)
+            << "Expected " << mNumDataSource << " data sources, found " << totalDataSource;
+    if (numDataSource == 3) {
+        ASSERT_EQ(numDataSource, mNumDataSource)
+                << "Expected " << mNumDataSource << " data sources, found " << totalDataSource;
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        infoTest, Mpeg2tsUnitTest,
+        ::testing::Values(make_tuple("crowd_1920x1080_25fps_6700kbps_h264.ts", 0x01, 1),
+                          make_tuple("segment000001.ts", 0x03, 2),
+                          make_tuple("bbb_44100hz_2ch_128kbps_mp3_5mins.ts", 0x02, 1)));
+
+int32_t main(int argc, char **argv) {
+    gEnv = new Mpeg2tsUnitTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    uint8_t status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("Mpeg2tsUnit Test Result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTestEnvironment.h b/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTestEnvironment.h
new file mode 100644
index 0000000..9e41db7
--- /dev/null
+++ b/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTestEnvironment.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __MPEG2TS_UNIT_TEST_ENVIRONMENT_H__
+#define __MPEG2TS_UNIT_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class Mpeg2tsUnitTestEnvironment : public::testing::Environment {
+  public:
+    Mpeg2tsUnitTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int Mpeg2tsUnitTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"path", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P': {
+                setRes(optarg);
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __MPEG2TS_UNIT_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/mpeg2ts/test/README.md b/media/libstagefright/mpeg2ts/test/README.md
new file mode 100644
index 0000000..237ce72
--- /dev/null
+++ b/media/libstagefright/mpeg2ts/test/README.md
@@ -0,0 +1,38 @@
+## Media Testing ##
+---
+#### Mpeg2TS Unit Test :
+The Mpeg2TS Unit Test Suite validates the functionality of the libraries present in Mpeg2TS.
+
+Run the following steps to build the test suite:
+```
+mmm frameworks/av/media/libstagefright/mpeg2ts/test/
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+
+adb push ${OUT}/data/nativetest64/Mpeg2tsUnitTest/Mpeg2tsUnitTest /data/local/tmp/
+
+To test 32-bit binary push binaries from nativetest.
+
+adb push ${OUT}/data/nativetest/Mpeg2tsUnitTest/Mpeg2tsUnitTest /data/local/tmp/
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.zip ).
+Download, unzip and push these files into device for testing.
+
+```
+adb push Mpeg2tsUnitTestRes/. /data/local/tmp/
+```
+
+usage: Mpeg2tsUnitTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/Mpeg2tsUnitTest -P /data/local/tmp/Mpeg2tsUnitTestRes/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest Mpeg2tsUnitTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/omx/1.0/Omx.cpp b/media/libstagefright/omx/1.0/Omx.cpp
index eef9ce3..eb15039 100644
--- a/media/libstagefright/omx/1.0/Omx.cpp
+++ b/media/libstagefright/omx/1.0/Omx.cpp
@@ -22,7 +22,7 @@
 #include <media/openmax/OMX_AsString.h>
 
 #include <media/stagefright/omx/OMXUtils.h>
-#include <media/stagefright/omx/OMXMaster.h>
+#include <media/stagefright/omx/OMXStore.h>
 #include <media/stagefright/omx/OmxGraphicBufferSource.h>
 
 #include <media/stagefright/omx/1.0/WOmxNode.h>
@@ -41,21 +41,21 @@
 constexpr size_t kMaxNodeInstances = (1 << 16);
 
 Omx::Omx() :
-    mMaster(new OMXMaster()),
+    mStore(new OMXStore()),
     mParser() {
     (void)mParser.parseXmlFilesInSearchDirs();
     (void)mParser.parseXmlPath(mParser.defaultProfilingResultsXmlPath);
 }
 
 Omx::~Omx() {
-    delete mMaster;
+    delete mStore;
 }
 
 Return<void> Omx::listNodes(listNodes_cb _hidl_cb) {
     std::list<::android::IOMX::ComponentInfo> list;
     char componentName[256];
     for (OMX_U32 index = 0;
-            mMaster->enumerateComponents(
+            mStore->enumerateComponents(
             componentName, sizeof(componentName), index) == OMX_ErrorNone;
             ++index) {
         list.push_back(::android::IOMX::ComponentInfo());
@@ -63,7 +63,7 @@
         info.mName = componentName;
         ::android::Vector<::android::String8> roles;
         OMX_ERRORTYPE err =
-                mMaster->getRolesOfComponent(componentName, &roles);
+                mStore->getRolesOfComponent(componentName, &roles);
         if (err == OMX_ErrorNone) {
             for (OMX_U32 i = 0; i < roles.size(); ++i) {
                 info.mRoles.push_back(roles[i]);
@@ -101,7 +101,7 @@
                 this, new LWOmxObserver(observer), name.c_str());
 
         OMX_COMPONENTTYPE *handle;
-        OMX_ERRORTYPE err = mMaster->makeComponentInstance(
+        OMX_ERRORTYPE err = mStore->makeComponentInstance(
                 name.c_str(), &OMXNodeInstance::kCallbacks,
                 instance.get(), &handle);
 
@@ -208,7 +208,7 @@
 
     OMX_ERRORTYPE err = OMX_ErrorNone;
     if (instance->handle() != NULL) {
-        err = mMaster->destroyComponentInstance(
+        err = mStore->destroyComponentInstance(
                 static_cast<OMX_COMPONENTTYPE*>(instance->handle()));
     }
     return StatusFromOMXError(err);
diff --git a/media/libstagefright/omx/1.0/OmxStore.cpp b/media/libstagefright/omx/1.0/OmxStore.cpp
index 67f478e..b5c1166 100644
--- a/media/libstagefright/omx/1.0/OmxStore.cpp
+++ b/media/libstagefright/omx/1.0/OmxStore.cpp
@@ -54,6 +54,24 @@
         });
     }
 
+    if (!nodes.empty()) {
+        auto anyNode = nodes.cbegin();
+        std::string::const_iterator first = anyNode->cbegin();
+        std::string::const_iterator last = anyNode->cend();
+        for (const std::string &name : nodes) {
+            std::string::const_iterator it1 = first;
+            for (std::string::const_iterator it2 = name.cbegin();
+                    it1 != last && it2 != name.cend() && tolower(*it1) == tolower(*it2);
+                    ++it1, ++it2) {
+            }
+            last = it1;
+        }
+        mPrefix = std::string(first, last);
+        LOG(INFO) << "omx common prefix: '" << mPrefix.c_str() << "'";
+    } else {
+        LOG(INFO) << "omx common prefix: no nodes";
+    }
+
     MediaCodecsXmlParser parser;
     parser.parseXmlFilesInSearchDirs(xmlNames, searchDirs);
     if (profilingResultsXmlPath != nullptr) {
@@ -112,8 +130,6 @@
         mRoleList[i] = std::move(role);
         ++i;
     }
-
-    mPrefix = parser.getCommonPrefix();
 }
 
 OmxStore::~OmxStore() {
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
index 7d217eb..f7bf3ba 100644
--- a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
+++ b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
@@ -67,7 +67,7 @@
             int32_t dataSpace, int32_t aspects, int32_t pixelFormat) override {
         Message tMsg;
         tMsg.type = Message::Type::EVENT;
-        tMsg.fence = native_handle_create(0, 0);
+        tMsg.fence.setTo(native_handle_create(0, 0), /* shouldOwn = */ true);
         tMsg.data.eventData.event = uint32_t(OMX_EventDataSpaceChanged);
         tMsg.data.eventData.data1 = dataSpace;
         tMsg.data.eventData.data2 = aspects;
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 78b4f19..7c372cd 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -7,7 +7,7 @@
     double_loadable: true,
 
     srcs: [
-        "OMXMaster.cpp",
+        "OMXStore.cpp",
         "OMXNodeInstance.cpp",
         "OMXUtils.cpp",
         "OmxGraphicBufferSource.cpp",
diff --git a/media/libstagefright/omx/OMXMaster.cpp b/media/libstagefright/omx/OMXMaster.cpp
deleted file mode 100644
index 094b1f5..0000000
--- a/media/libstagefright/omx/OMXMaster.cpp
+++ /dev/null
@@ -1,233 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "OMXMaster"
-#include <android-base/properties.h>
-#include <utils/Log.h>
-
-#include <media/stagefright/omx/OMXMaster.h>
-#include <media/stagefright/omx/SoftOMXPlugin.h>
-#include <media/stagefright/foundation/ADebug.h>
-
-#include <vndksupport/linker.h>
-
-#include <dlfcn.h>
-#include <fcntl.h>
-
-namespace android {
-
-OMXMaster::OMXMaster() {
-
-    pid_t pid = getpid();
-    char filename[20];
-    snprintf(filename, sizeof(filename), "/proc/%d/comm", pid);
-    int fd = open(filename, O_RDONLY);
-    if (fd < 0) {
-      ALOGW("couldn't determine process name");
-      strlcpy(mProcessName, "<unknown>", sizeof(mProcessName));
-    } else {
-      ssize_t len = read(fd, mProcessName, sizeof(mProcessName));
-      if (len < 2) {
-        ALOGW("couldn't determine process name");
-        strlcpy(mProcessName, "<unknown>", sizeof(mProcessName));
-      } else {
-        // the name is newline terminated, so erase the newline
-        mProcessName[len - 1] = 0;
-      }
-      close(fd);
-    }
-
-    addVendorPlugin();
-    addPlatformPlugin();
-}
-
-OMXMaster::~OMXMaster() {
-    clearPlugins();
-}
-
-void OMXMaster::addVendorPlugin() {
-    addPlugin("libstagefrighthw.so");
-}
-
-void OMXMaster::addPlatformPlugin() {
-    addPlugin("libstagefright_softomx_plugin.so");
-}
-
-void OMXMaster::addPlugin(const char *libname) {
-    if (::android::base::GetIntProperty("vendor.media.omx", int64_t(1)) == 0) {
-        return;
-    }
-
-    void *libHandle = android_load_sphal_library(libname, RTLD_NOW);
-
-    if (libHandle == NULL) {
-        return;
-    }
-
-    typedef OMXPluginBase *(*CreateOMXPluginFunc)();
-    CreateOMXPluginFunc createOMXPlugin =
-        (CreateOMXPluginFunc)dlsym(
-                libHandle, "createOMXPlugin");
-    if (!createOMXPlugin)
-        createOMXPlugin = (CreateOMXPluginFunc)dlsym(
-                libHandle, "_ZN7android15createOMXPluginEv");
-
-    OMXPluginBase *plugin = nullptr;
-    if (createOMXPlugin) {
-        plugin = (*createOMXPlugin)();
-    }
-
-    if (plugin) {
-        mPlugins.push_back({ plugin, libHandle });
-        addPlugin(plugin);
-    } else {
-        android_unload_sphal_library(libHandle);
-    }
-}
-
-void OMXMaster::addPlugin(OMXPluginBase *plugin) {
-    Mutex::Autolock autoLock(mLock);
-
-    OMX_U32 index = 0;
-
-    char name[128];
-    OMX_ERRORTYPE err;
-    while ((err = plugin->enumerateComponents(
-                    name, sizeof(name), index++)) == OMX_ErrorNone) {
-        String8 name8(name);
-
-        if (mPluginByComponentName.indexOfKey(name8) >= 0) {
-            ALOGE("A component of name '%s' already exists, ignoring this one.",
-                 name8.string());
-
-            continue;
-        }
-
-        mPluginByComponentName.add(name8, plugin);
-    }
-
-    if (err != OMX_ErrorNoMore) {
-        ALOGE("OMX plugin failed w/ error 0x%08x after registering %zu "
-             "components", err, mPluginByComponentName.size());
-    }
-}
-
-void OMXMaster::clearPlugins() {
-    Mutex::Autolock autoLock(mLock);
-
-    mPluginByComponentName.clear();
-    mPluginByInstance.clear();
-
-    typedef void (*DestroyOMXPluginFunc)(OMXPluginBase*);
-    for (const Plugin &plugin : mPlugins) {
-        DestroyOMXPluginFunc destroyOMXPlugin =
-            (DestroyOMXPluginFunc)dlsym(
-                    plugin.mLibHandle, "destroyOMXPlugin");
-        if (destroyOMXPlugin)
-            destroyOMXPlugin(plugin.mOmx);
-        else
-            delete plugin.mOmx;
-
-        android_unload_sphal_library(plugin.mLibHandle);
-    }
-
-    mPlugins.clear();
-}
-
-OMX_ERRORTYPE OMXMaster::makeComponentInstance(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component) {
-    ALOGI("makeComponentInstance(%s) in %s process", name, mProcessName);
-    Mutex::Autolock autoLock(mLock);
-
-    *component = NULL;
-
-    ssize_t index = mPluginByComponentName.indexOfKey(String8(name));
-
-    if (index < 0) {
-        return OMX_ErrorInvalidComponentName;
-    }
-
-    OMXPluginBase *plugin = mPluginByComponentName.valueAt(index);
-    OMX_ERRORTYPE err =
-        plugin->makeComponentInstance(name, callbacks, appData, component);
-
-    if (err != OMX_ErrorNone) {
-        return err;
-    }
-
-    mPluginByInstance.add(*component, plugin);
-
-    return err;
-}
-
-OMX_ERRORTYPE OMXMaster::destroyComponentInstance(
-        OMX_COMPONENTTYPE *component) {
-    Mutex::Autolock autoLock(mLock);
-
-    ssize_t index = mPluginByInstance.indexOfKey(component);
-
-    if (index < 0) {
-        return OMX_ErrorBadParameter;
-    }
-
-    OMXPluginBase *plugin = mPluginByInstance.valueAt(index);
-    mPluginByInstance.removeItemsAt(index);
-
-    return plugin->destroyComponentInstance(component);
-}
-
-OMX_ERRORTYPE OMXMaster::enumerateComponents(
-        OMX_STRING name,
-        size_t size,
-        OMX_U32 index) {
-    Mutex::Autolock autoLock(mLock);
-
-    size_t numComponents = mPluginByComponentName.size();
-
-    if (index >= numComponents) {
-        return OMX_ErrorNoMore;
-    }
-
-    const String8 &name8 = mPluginByComponentName.keyAt(index);
-
-    CHECK(size >= 1 + name8.size());
-    strcpy(name, name8.string());
-
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE OMXMaster::getRolesOfComponent(
-        const char *name,
-        Vector<String8> *roles) {
-    Mutex::Autolock autoLock(mLock);
-
-    roles->clear();
-
-    ssize_t index = mPluginByComponentName.indexOfKey(String8(name));
-
-    if (index < 0) {
-        return OMX_ErrorInvalidComponentName;
-    }
-
-    OMXPluginBase *plugin = mPluginByComponentName.valueAt(index);
-    return plugin->getRolesOfComponent(name, roles);
-}
-
-}  // namespace android
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index ac42373..bebd516 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -22,7 +22,7 @@
 #include <inttypes.h>
 
 #include <media/stagefright/omx/OMXNodeInstance.h>
-#include <media/stagefright/omx/OMXMaster.h>
+#include <media/stagefright/omx/OMXStore.h>
 #include <media/stagefright/omx/OMXUtils.h>
 #include <android/IOMXBufferSource.h>
 
diff --git a/media/libstagefright/omx/OMXStore.cpp b/media/libstagefright/omx/OMXStore.cpp
new file mode 100644
index 0000000..e8fee42
--- /dev/null
+++ b/media/libstagefright/omx/OMXStore.cpp
@@ -0,0 +1,233 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "OMXStore"
+#include <android-base/properties.h>
+#include <utils/Log.h>
+
+#include <media/stagefright/omx/OMXStore.h>
+#include <media/stagefright/omx/SoftOMXPlugin.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+#include <vndksupport/linker.h>
+
+#include <dlfcn.h>
+#include <fcntl.h>
+
+namespace android {
+
+OMXStore::OMXStore() {
+
+    pid_t pid = getpid();
+    char filename[20];
+    snprintf(filename, sizeof(filename), "/proc/%d/comm", pid);
+    int fd = open(filename, O_RDONLY);
+    if (fd < 0) {
+      ALOGW("couldn't determine process name");
+      strlcpy(mProcessName, "<unknown>", sizeof(mProcessName));
+    } else {
+      ssize_t len = read(fd, mProcessName, sizeof(mProcessName));
+      if (len < 2) {
+        ALOGW("couldn't determine process name");
+        strlcpy(mProcessName, "<unknown>", sizeof(mProcessName));
+      } else {
+        // the name is newline terminated, so erase the newline
+        mProcessName[len - 1] = 0;
+      }
+      close(fd);
+    }
+
+    addVendorPlugin();
+    addPlatformPlugin();
+}
+
+OMXStore::~OMXStore() {
+    clearPlugins();
+}
+
+void OMXStore::addVendorPlugin() {
+    addPlugin("libstagefrighthw.so");
+}
+
+void OMXStore::addPlatformPlugin() {
+    addPlugin("libstagefright_softomx_plugin.so");
+}
+
+void OMXStore::addPlugin(const char *libname) {
+    if (::android::base::GetIntProperty("vendor.media.omx", int64_t(1)) == 0) {
+        return;
+    }
+
+    void *libHandle = android_load_sphal_library(libname, RTLD_NOW);
+
+    if (libHandle == NULL) {
+        return;
+    }
+
+    typedef OMXPluginBase *(*CreateOMXPluginFunc)();
+    CreateOMXPluginFunc createOMXPlugin =
+        (CreateOMXPluginFunc)dlsym(
+                libHandle, "createOMXPlugin");
+    if (!createOMXPlugin)
+        createOMXPlugin = (CreateOMXPluginFunc)dlsym(
+                libHandle, "_ZN7android15createOMXPluginEv");
+
+    OMXPluginBase *plugin = nullptr;
+    if (createOMXPlugin) {
+        plugin = (*createOMXPlugin)();
+    }
+
+    if (plugin) {
+        mPlugins.push_back({ plugin, libHandle });
+        addPlugin(plugin);
+    } else {
+        android_unload_sphal_library(libHandle);
+    }
+}
+
+void OMXStore::addPlugin(OMXPluginBase *plugin) {
+    Mutex::Autolock autoLock(mLock);
+
+    OMX_U32 index = 0;
+
+    char name[128];
+    OMX_ERRORTYPE err;
+    while ((err = plugin->enumerateComponents(
+                    name, sizeof(name), index++)) == OMX_ErrorNone) {
+        String8 name8(name);
+
+        if (mPluginByComponentName.indexOfKey(name8) >= 0) {
+            ALOGE("A component of name '%s' already exists, ignoring this one.",
+                 name8.string());
+
+            continue;
+        }
+
+        mPluginByComponentName.add(name8, plugin);
+    }
+
+    if (err != OMX_ErrorNoMore) {
+        ALOGE("OMX plugin failed w/ error 0x%08x after registering %zu "
+             "components", err, mPluginByComponentName.size());
+    }
+}
+
+void OMXStore::clearPlugins() {
+    Mutex::Autolock autoLock(mLock);
+
+    mPluginByComponentName.clear();
+    mPluginByInstance.clear();
+
+    typedef void (*DestroyOMXPluginFunc)(OMXPluginBase*);
+    for (const Plugin &plugin : mPlugins) {
+        DestroyOMXPluginFunc destroyOMXPlugin =
+            (DestroyOMXPluginFunc)dlsym(
+                    plugin.mLibHandle, "destroyOMXPlugin");
+        if (destroyOMXPlugin)
+            destroyOMXPlugin(plugin.mOmx);
+        else
+            delete plugin.mOmx;
+
+        android_unload_sphal_library(plugin.mLibHandle);
+    }
+
+    mPlugins.clear();
+}
+
+OMX_ERRORTYPE OMXStore::makeComponentInstance(
+        const char *name,
+        const OMX_CALLBACKTYPE *callbacks,
+        OMX_PTR appData,
+        OMX_COMPONENTTYPE **component) {
+    ALOGI("makeComponentInstance(%s) in %s process", name, mProcessName);
+    Mutex::Autolock autoLock(mLock);
+
+    *component = NULL;
+
+    ssize_t index = mPluginByComponentName.indexOfKey(String8(name));
+
+    if (index < 0) {
+        return OMX_ErrorInvalidComponentName;
+    }
+
+    OMXPluginBase *plugin = mPluginByComponentName.valueAt(index);
+    OMX_ERRORTYPE err =
+        plugin->makeComponentInstance(name, callbacks, appData, component);
+
+    if (err != OMX_ErrorNone) {
+        return err;
+    }
+
+    mPluginByInstance.add(*component, plugin);
+
+    return err;
+}
+
+OMX_ERRORTYPE OMXStore::destroyComponentInstance(
+        OMX_COMPONENTTYPE *component) {
+    Mutex::Autolock autoLock(mLock);
+
+    ssize_t index = mPluginByInstance.indexOfKey(component);
+
+    if (index < 0) {
+        return OMX_ErrorBadParameter;
+    }
+
+    OMXPluginBase *plugin = mPluginByInstance.valueAt(index);
+    mPluginByInstance.removeItemsAt(index);
+
+    return plugin->destroyComponentInstance(component);
+}
+
+OMX_ERRORTYPE OMXStore::enumerateComponents(
+        OMX_STRING name,
+        size_t size,
+        OMX_U32 index) {
+    Mutex::Autolock autoLock(mLock);
+
+    size_t numComponents = mPluginByComponentName.size();
+
+    if (index >= numComponents) {
+        return OMX_ErrorNoMore;
+    }
+
+    const String8 &name8 = mPluginByComponentName.keyAt(index);
+
+    CHECK(size >= 1 + name8.size());
+    strcpy(name, name8.string());
+
+    return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE OMXStore::getRolesOfComponent(
+        const char *name,
+        Vector<String8> *roles) {
+    Mutex::Autolock autoLock(mLock);
+
+    roles->clear();
+
+    ssize_t index = mPluginByComponentName.indexOfKey(String8(name));
+
+    if (index < 0) {
+        return OMX_ErrorInvalidComponentName;
+    }
+
+    OMXPluginBase *plugin = mPluginByComponentName.valueAt(index);
+    return plugin->getRolesOfComponent(name, roles);
+}
+
+}  // namespace android
diff --git a/media/libstagefright/omx/OMXUtils.cpp b/media/libstagefright/omx/OMXUtils.cpp
index 1b8493a..49b2dec 100644
--- a/media/libstagefright/omx/OMXUtils.cpp
+++ b/media/libstagefright/omx/OMXUtils.cpp
@@ -172,6 +172,8 @@
             "audio_decoder.ac4", "audio_encoder.ac4" },
         { MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC,
             "image_decoder.heic", "image_encoder.heic" },
+        { MEDIA_MIMETYPE_IMAGE_AVIF,
+            "image_decoder.avif", "image_encoder.avif" },
     };
 
     static const size_t kNumMimeToRole =
@@ -354,7 +356,7 @@
     DescribeColorFormat2Params describeParams;
     InitOMXParams(&describeParams);
     describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat;
-    // reasonable dummy values
+    // reasonable initial values (that will be overwritten)
     describeParams.nFrameWidth = 128;
     describeParams.nFrameHeight = 128;
     describeParams.nStride = 128;
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
index 5a46b26..84ae511 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
@@ -27,7 +27,7 @@
 
 namespace android {
 
-struct OMXMaster;
+struct OMXStore;
 struct OMXNodeInstance;
 
 namespace hardware {
@@ -51,7 +51,7 @@
 using ::android::sp;
 using ::android::wp;
 
-using ::android::OMXMaster;
+using ::android::OMXStore;
 using ::android::OMXNodeInstance;
 
 struct Omx : public IOmx, public hidl_death_recipient {
@@ -73,7 +73,7 @@
     status_t freeNode(sp<OMXNodeInstance> const& instance);
 
 protected:
-    OMXMaster* mMaster;
+    OMXStore* mStore;
     Mutex mLock;
     KeyedVector<wp<IBase>, sp<OMXNodeInstance> > mLiveNodes;
     KeyedVector<OMXNodeInstance*, wp<IBase> > mNode2Observer;
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h
deleted file mode 100644
index 93eaef1..0000000
--- a/media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef OMX_MASTER_H_
-
-#define OMX_MASTER_H_
-
-#include <media/hardware/OMXPluginBase.h>
-
-#include <utils/threads.h>
-#include <utils/KeyedVector.h>
-#include <utils/List.h>
-#include <utils/String8.h>
-
-namespace android {
-
-struct OMXMaster : public OMXPluginBase {
-    OMXMaster();
-    virtual ~OMXMaster();
-
-    virtual OMX_ERRORTYPE makeComponentInstance(
-            const char *name,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-    virtual OMX_ERRORTYPE destroyComponentInstance(
-            OMX_COMPONENTTYPE *component);
-
-    virtual OMX_ERRORTYPE enumerateComponents(
-            OMX_STRING name,
-            size_t size,
-            OMX_U32 index);
-
-    virtual OMX_ERRORTYPE getRolesOfComponent(
-            const char *name,
-            Vector<String8> *roles);
-
-private:
-    char mProcessName[16];
-    Mutex mLock;
-    struct Plugin {
-        OMXPluginBase *mOmx;
-        void *mLibHandle;
-    };
-    List<Plugin> mPlugins;
-    KeyedVector<String8, OMXPluginBase *> mPluginByComponentName;
-    KeyedVector<OMX_COMPONENTTYPE *, OMXPluginBase *> mPluginByInstance;
-
-    void addVendorPlugin();
-    void addPlatformPlugin();
-    void addPlugin(const char *libname);
-    void addPlugin(OMXPluginBase *plugin);
-    void clearPlugins();
-
-    OMXMaster(const OMXMaster &);
-    OMXMaster &operator=(const OMXMaster &);
-};
-
-}  // namespace android
-
-#endif  // OMX_MASTER_H_
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
index a761ef6..5f32c9e 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
@@ -33,7 +33,7 @@
 class GraphicBuffer;
 class IOMXBufferSource;
 class IOMXObserver;
-struct OMXMaster;
+struct OMXStore;
 class OMXBuffer;
 using IHidlMemory = hidl::memory::V1_0::IMemory;
 using hardware::media::omx::V1_0::implementation::Omx;
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/OMXStore.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXStore.h
new file mode 100644
index 0000000..5d6c3ed
--- /dev/null
+++ b/media/libstagefright/omx/include/media/stagefright/omx/OMXStore.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OMX_STORE_H_
+
+#define OMX_STORE_H_
+
+#include <media/hardware/OMXPluginBase.h>
+
+#include <utils/threads.h>
+#include <utils/KeyedVector.h>
+#include <utils/List.h>
+#include <utils/String8.h>
+
+namespace android {
+
+struct OMXStore : public OMXPluginBase {
+    OMXStore();
+    virtual ~OMXStore();
+
+    virtual OMX_ERRORTYPE makeComponentInstance(
+            const char *name,
+            const OMX_CALLBACKTYPE *callbacks,
+            OMX_PTR appData,
+            OMX_COMPONENTTYPE **component);
+
+    virtual OMX_ERRORTYPE destroyComponentInstance(
+            OMX_COMPONENTTYPE *component);
+
+    virtual OMX_ERRORTYPE enumerateComponents(
+            OMX_STRING name,
+            size_t size,
+            OMX_U32 index);
+
+    virtual OMX_ERRORTYPE getRolesOfComponent(
+            const char *name,
+            Vector<String8> *roles);
+
+private:
+    char mProcessName[16];
+    Mutex mLock;
+    struct Plugin {
+        OMXPluginBase *mOmx;
+        void *mLibHandle;
+    };
+    List<Plugin> mPlugins;
+    KeyedVector<String8, OMXPluginBase *> mPluginByComponentName;
+    KeyedVector<OMX_COMPONENTTYPE *, OMXPluginBase *> mPluginByInstance;
+
+    void addVendorPlugin();
+    void addPlatformPlugin();
+    void addPlugin(const char *libname);
+    void addPlugin(OMXPluginBase *plugin);
+    void clearPlugins();
+
+    OMXStore(const OMXStore &);
+    OMXStore &operator=(const OMXStore &);
+};
+
+}  // namespace android
+
+#endif  // OMX_STORE_H_
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
new file mode 100644
index 0000000..c17f84e
--- /dev/null
+++ b/media/libstagefright/renderfright/Android.bp
@@ -0,0 +1,111 @@
+cc_defaults {
+    name: "renderfright_defaults",
+    cflags: [
+        "-DLOG_TAG=\"renderfright\"",
+        "-Wall",
+        "-Werror",
+        "-Wthread-safety",
+        "-Wunused",
+        "-Wunreachable-code",
+    ],
+}
+
+cc_defaults {
+    name: "librenderfright_defaults",
+    defaults: ["renderfright_defaults"],
+    cflags: [
+        "-DGL_GLEXT_PROTOTYPES",
+        "-DEGL_EGLEXT_PROTOTYPES",
+    ],
+    shared_libs: [
+        "libbase",
+        "libcutils",
+        "libEGL",
+        "libGLESv1_CM",
+        "libGLESv2",
+        "libgui",
+        "liblog",
+        "libnativewindow",
+        "libprocessgroup",
+        "libsync",
+        "libui",
+        "libutils",
+    ],
+    local_include_dirs: ["include"],
+    export_include_dirs: ["include"],
+}
+
+filegroup {
+    name: "librenderfright_sources",
+    srcs: [
+        "Description.cpp",
+        "Mesh.cpp",
+        "RenderEngine.cpp",
+        "Texture.cpp",
+    ],
+}
+
+filegroup {
+    name: "librenderfright_gl_sources",
+    srcs: [
+        "gl/GLESRenderEngine.cpp",
+        "gl/GLExtensions.cpp",
+        "gl/GLFramebuffer.cpp",
+        "gl/GLImage.cpp",
+        "gl/GLShadowTexture.cpp",
+        "gl/GLShadowVertexGenerator.cpp",
+        "gl/GLSkiaShadowPort.cpp",
+        "gl/GLVertexBuffer.cpp",
+        "gl/ImageManager.cpp",
+        "gl/Program.cpp",
+        "gl/ProgramCache.cpp",
+        "gl/filters/BlurFilter.cpp",
+        "gl/filters/GenericProgram.cpp",
+    ],
+}
+
+filegroup {
+    name: "librenderfright_threaded_sources",
+    srcs: [
+        "threaded/RenderEngineThreaded.cpp",
+    ],
+}
+
+cc_library_static {
+    name: "librenderfright",
+    defaults: ["librenderfright_defaults"],
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
+    double_loadable: true,
+    clang: true,
+    cflags: [
+        "-fvisibility=hidden",
+        "-Werror=format",
+    ],
+    srcs: [
+        ":librenderfright_sources",
+        ":librenderfright_gl_sources",
+        ":librenderfright_threaded_sources",
+    ],
+    lto: {
+        thin: true,
+    },
+}
+
+cc_library_static {
+    name: "librenderfright_mocks",
+    defaults: ["librenderfright_defaults"],
+    srcs: [
+        "mock/Framebuffer.cpp",
+        "mock/Image.cpp",
+        "mock/RenderEngine.cpp",
+    ],
+    static_libs: [
+        "libgtest",
+        "libgmock",
+    ],
+    local_include_dirs: ["include"],
+    export_include_dirs: ["include"],
+}
diff --git a/media/libstagefright/renderfright/Description.cpp b/media/libstagefright/renderfright/Description.cpp
new file mode 100644
index 0000000..b9cea10
--- /dev/null
+++ b/media/libstagefright/renderfright/Description.cpp
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/private/Description.h>
+
+#include <stdint.h>
+
+#include <utils/TypeHelpers.h>
+
+namespace android {
+namespace renderengine {
+
+Description::TransferFunction Description::dataSpaceToTransferFunction(ui::Dataspace dataSpace) {
+    ui::Dataspace transfer = static_cast<ui::Dataspace>(dataSpace & ui::Dataspace::TRANSFER_MASK);
+    switch (transfer) {
+        case ui::Dataspace::TRANSFER_ST2084:
+            return Description::TransferFunction::ST2084;
+        case ui::Dataspace::TRANSFER_HLG:
+            return Description::TransferFunction::HLG;
+        case ui::Dataspace::TRANSFER_LINEAR:
+            return Description::TransferFunction::LINEAR;
+        default:
+            return Description::TransferFunction::SRGB;
+    }
+}
+
+bool Description::hasInputTransformMatrix() const {
+    const mat4 identity;
+    return inputTransformMatrix != identity;
+}
+
+bool Description::hasOutputTransformMatrix() const {
+    const mat4 identity;
+    return outputTransformMatrix != identity;
+}
+
+bool Description::hasColorMatrix() const {
+    const mat4 identity;
+    return colorMatrix != identity;
+}
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/Mesh.cpp b/media/libstagefright/renderfright/Mesh.cpp
new file mode 100644
index 0000000..ed2f45f
--- /dev/null
+++ b/media/libstagefright/renderfright/Mesh.cpp
@@ -0,0 +1,146 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/Mesh.h>
+
+#include <utils/Log.h>
+
+namespace android {
+namespace renderengine {
+
+Mesh::Mesh(Primitive primitive, size_t vertexCount, size_t vertexSize, size_t texCoordSize,
+           size_t cropCoordsSize, size_t shadowColorSize, size_t shadowParamsSize,
+           size_t indexCount)
+      : mVertexCount(vertexCount),
+        mVertexSize(vertexSize),
+        mTexCoordsSize(texCoordSize),
+        mCropCoordsSize(cropCoordsSize),
+        mShadowColorSize(shadowColorSize),
+        mShadowParamsSize(shadowParamsSize),
+        mPrimitive(primitive),
+        mIndexCount(indexCount) {
+    if (vertexCount == 0) {
+        mVertices.resize(1);
+        mVertices[0] = 0.0f;
+        mStride = 0;
+        return;
+    }
+    size_t stride = vertexSize + texCoordSize + cropCoordsSize + shadowColorSize + shadowParamsSize;
+    size_t remainder = (stride * vertexCount) / vertexCount;
+    // Since all of the input parameters are unsigned, if stride is less than
+    // either vertexSize or texCoordSize, it must have overflowed. remainder
+    // will be equal to stride as long as stride * vertexCount doesn't overflow.
+    if ((stride < vertexSize) || (remainder != stride)) {
+        ALOGE("Overflow in Mesh(..., %zu, %zu, %zu, %zu, %zu, %zu)", vertexCount, vertexSize,
+              texCoordSize, cropCoordsSize, shadowColorSize, shadowParamsSize);
+        mVertices.resize(1);
+        mVertices[0] = 0.0f;
+        mVertexCount = 0;
+        mVertexSize = 0;
+        mTexCoordsSize = 0;
+        mCropCoordsSize = 0;
+        mShadowColorSize = 0;
+        mShadowParamsSize = 0;
+        mStride = 0;
+        return;
+    }
+
+    mVertices.resize(stride * vertexCount);
+    mStride = stride;
+    mIndices.resize(indexCount);
+}
+
+Mesh::Primitive Mesh::getPrimitive() const {
+    return mPrimitive;
+}
+
+float const* Mesh::getPositions() const {
+    return mVertices.data();
+}
+float* Mesh::getPositions() {
+    return mVertices.data();
+}
+
+float const* Mesh::getTexCoords() const {
+    return mVertices.data() + mVertexSize;
+}
+float* Mesh::getTexCoords() {
+    return mVertices.data() + mVertexSize;
+}
+
+float const* Mesh::getCropCoords() const {
+    return mVertices.data() + mVertexSize + mTexCoordsSize;
+}
+float* Mesh::getCropCoords() {
+    return mVertices.data() + mVertexSize + mTexCoordsSize;
+}
+
+float const* Mesh::getShadowColor() const {
+    return mVertices.data() + mVertexSize + mTexCoordsSize + mCropCoordsSize;
+}
+float* Mesh::getShadowColor() {
+    return mVertices.data() + mVertexSize + mTexCoordsSize + mCropCoordsSize;
+}
+
+float const* Mesh::getShadowParams() const {
+    return mVertices.data() + mVertexSize + mTexCoordsSize + mCropCoordsSize + mShadowColorSize;
+}
+float* Mesh::getShadowParams() {
+    return mVertices.data() + mVertexSize + mTexCoordsSize + mCropCoordsSize + mShadowColorSize;
+}
+
+uint16_t const* Mesh::getIndices() const {
+    return mIndices.data();
+}
+
+uint16_t* Mesh::getIndices() {
+    return mIndices.data();
+}
+
+size_t Mesh::getVertexCount() const {
+    return mVertexCount;
+}
+
+size_t Mesh::getVertexSize() const {
+    return mVertexSize;
+}
+
+size_t Mesh::getTexCoordsSize() const {
+    return mTexCoordsSize;
+}
+
+size_t Mesh::getShadowColorSize() const {
+    return mShadowColorSize;
+}
+
+size_t Mesh::getShadowParamsSize() const {
+    return mShadowParamsSize;
+}
+
+size_t Mesh::getByteStride() const {
+    return mStride * sizeof(float);
+}
+
+size_t Mesh::getStride() const {
+    return mStride;
+}
+
+size_t Mesh::getIndexCount() const {
+    return mIndexCount;
+}
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/RenderEngine.cpp b/media/libstagefright/renderfright/RenderEngine.cpp
new file mode 100644
index 0000000..c3fbb60
--- /dev/null
+++ b/media/libstagefright/renderfright/RenderEngine.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/RenderEngine.h>
+
+#include <cutils/properties.h>
+#include <log/log.h>
+#include <private/gui/SyncFeatures.h>
+#include "gl/GLESRenderEngine.h"
+#include "threaded/RenderEngineThreaded.h"
+
+namespace android {
+namespace renderengine {
+
+std::unique_ptr<RenderEngine> RenderEngine::create(const RenderEngineCreationArgs& args) {
+    RenderEngineType renderEngineType = args.renderEngineType;
+
+    // Keep the ability to override by PROPERTIES:
+    char prop[PROPERTY_VALUE_MAX];
+    property_get(PROPERTY_DEBUG_RENDERENGINE_BACKEND, prop, "");
+    if (strcmp(prop, "gles") == 0) {
+        renderEngineType = RenderEngineType::GLES;
+    }
+    if (strcmp(prop, "threaded") == 0) {
+        renderEngineType = RenderEngineType::THREADED;
+    }
+
+    switch (renderEngineType) {
+        case RenderEngineType::THREADED:
+            ALOGD("Threaded RenderEngine with GLES Backend");
+            return renderengine::threaded::RenderEngineThreaded::create(
+                    [args]() { return android::renderengine::gl::GLESRenderEngine::create(args); });
+        case RenderEngineType::GLES:
+        default:
+            ALOGD("RenderEngine with GLES Backend");
+            return renderengine::gl::GLESRenderEngine::create(args);
+    }
+}
+
+RenderEngine::~RenderEngine() = default;
+
+namespace impl {
+
+RenderEngine::RenderEngine(const RenderEngineCreationArgs& args) : mArgs(args) {}
+
+RenderEngine::~RenderEngine() = default;
+
+bool RenderEngine::useNativeFenceSync() const {
+    return SyncFeatures::getInstance().useNativeFenceSync();
+}
+
+bool RenderEngine::useWaitSync() const {
+    return SyncFeatures::getInstance().useWaitSync();
+}
+
+} // namespace impl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/Texture.cpp b/media/libstagefright/renderfright/Texture.cpp
new file mode 100644
index 0000000..154cde8
--- /dev/null
+++ b/media/libstagefright/renderfright/Texture.cpp
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/Texture.h>
+
+namespace android {
+namespace renderengine {
+
+Texture::Texture()
+      : mTextureName(0), mTextureTarget(TEXTURE_2D), mWidth(0), mHeight(0), mFiltering(false) {}
+
+Texture::Texture(Target textureTarget, uint32_t textureName)
+      : mTextureName(textureName),
+        mTextureTarget(textureTarget),
+        mWidth(0),
+        mHeight(0),
+        mFiltering(false) {}
+
+void Texture::init(Target textureTarget, uint32_t textureName) {
+    mTextureName = textureName;
+    mTextureTarget = textureTarget;
+}
+
+Texture::~Texture() {}
+
+void Texture::setMatrix(float const* matrix) {
+    mTextureMatrix = mat4(matrix);
+}
+
+void Texture::setFiltering(bool enabled) {
+    mFiltering = enabled;
+}
+
+void Texture::setDimensions(size_t width, size_t height) {
+    mWidth = width;
+    mHeight = height;
+}
+
+uint32_t Texture::getTextureName() const {
+    return mTextureName;
+}
+
+uint32_t Texture::getTextureTarget() const {
+    return mTextureTarget;
+}
+
+const mat4& Texture::getMatrix() const {
+    return mTextureMatrix;
+}
+
+bool Texture::getFiltering() const {
+    return mFiltering;
+}
+
+size_t Texture::getWidth() const {
+    return mWidth;
+}
+
+size_t Texture::getHeight() const {
+    return mHeight;
+}
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp b/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp
new file mode 100644
index 0000000..824bdd9
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp
@@ -0,0 +1,1772 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#undef LOG_TAG
+#define LOG_TAG "RenderEngine"
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include <sched.h>
+#include <cmath>
+#include <fstream>
+#include <sstream>
+#include <unordered_set>
+
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <android-base/stringprintf.h>
+#include <cutils/compiler.h>
+#include <cutils/properties.h>
+#include <gui/DebugEGLImageTracker.h>
+#include <renderengine/Mesh.h>
+#include <renderengine/Texture.h>
+#include <renderengine/private/Description.h>
+#include <sync/sync.h>
+#include <ui/ColorSpace.h>
+#include <ui/DebugUtils.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/Rect.h>
+#include <ui/Region.h>
+#include <utils/KeyedVector.h>
+#include <utils/Trace.h>
+#include "GLESRenderEngine.h"
+#include "GLExtensions.h"
+#include "GLFramebuffer.h"
+#include "GLImage.h"
+#include "GLShadowVertexGenerator.h"
+#include "Program.h"
+#include "ProgramCache.h"
+#include "filters/BlurFilter.h"
+
+bool checkGlError(const char* op, int lineNumber) {
+    bool errorFound = false;
+    GLint error = glGetError();
+    while (error != GL_NO_ERROR) {
+        errorFound = true;
+        error = glGetError();
+        ALOGV("after %s() (line # %d) glError (0x%x)\n", op, lineNumber, error);
+    }
+    return errorFound;
+}
+
+static constexpr bool outputDebugPPMs = false;
+
+void writePPM(const char* basename, GLuint width, GLuint height) {
+    ALOGV("writePPM #%s: %d x %d", basename, width, height);
+
+    std::vector<GLubyte> pixels(width * height * 4);
+    std::vector<GLubyte> outBuffer(width * height * 3);
+
+    // TODO(courtneygo): We can now have float formats, need
+    // to remove this code or update to support.
+    // Make returned pixels fit in uint32_t, one byte per component
+    glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, pixels.data());
+    if (checkGlError(__FUNCTION__, __LINE__)) {
+        return;
+    }
+
+    std::string filename(basename);
+    filename.append(".ppm");
+    std::ofstream file(filename.c_str(), std::ios::binary);
+    if (!file.is_open()) {
+        ALOGE("Unable to open file: %s", filename.c_str());
+        ALOGE("You may need to do: \"adb shell setenforce 0\" to enable "
+              "surfaceflinger to write debug images");
+        return;
+    }
+
+    file << "P6\n";
+    file << width << "\n";
+    file << height << "\n";
+    file << 255 << "\n";
+
+    auto ptr = reinterpret_cast<char*>(pixels.data());
+    auto outPtr = reinterpret_cast<char*>(outBuffer.data());
+    for (int y = height - 1; y >= 0; y--) {
+        char* data = ptr + y * width * sizeof(uint32_t);
+
+        for (GLuint x = 0; x < width; x++) {
+            // Only copy R, G and B components
+            outPtr[0] = data[0];
+            outPtr[1] = data[1];
+            outPtr[2] = data[2];
+            data += sizeof(uint32_t);
+            outPtr += 3;
+        }
+    }
+    file.write(reinterpret_cast<char*>(outBuffer.data()), outBuffer.size());
+}
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+using base::StringAppendF;
+using ui::Dataspace;
+
+static status_t selectConfigForAttribute(EGLDisplay dpy, EGLint const* attrs, EGLint attribute,
+                                         EGLint wanted, EGLConfig* outConfig) {
+    EGLint numConfigs = -1, n = 0;
+    eglGetConfigs(dpy, nullptr, 0, &numConfigs);
+    std::vector<EGLConfig> configs(numConfigs, EGL_NO_CONFIG_KHR);
+    eglChooseConfig(dpy, attrs, configs.data(), configs.size(), &n);
+    configs.resize(n);
+
+    if (!configs.empty()) {
+        if (attribute != EGL_NONE) {
+            for (EGLConfig config : configs) {
+                EGLint value = 0;
+                eglGetConfigAttrib(dpy, config, attribute, &value);
+                if (wanted == value) {
+                    *outConfig = config;
+                    return NO_ERROR;
+                }
+            }
+        } else {
+            // just pick the first one
+            *outConfig = configs[0];
+            return NO_ERROR;
+        }
+    }
+
+    return NAME_NOT_FOUND;
+}
+
+static status_t selectEGLConfig(EGLDisplay display, EGLint format, EGLint renderableType,
+                                EGLConfig* config) {
+    // select our EGLConfig. It must support EGL_RECORDABLE_ANDROID if
+    // it is to be used with WIFI displays
+    status_t err;
+    EGLint wantedAttribute;
+    EGLint wantedAttributeValue;
+
+    std::vector<EGLint> attribs;
+    if (renderableType) {
+        const ui::PixelFormat pixelFormat = static_cast<ui::PixelFormat>(format);
+        const bool is1010102 = pixelFormat == ui::PixelFormat::RGBA_1010102;
+
+        // Default to 8 bits per channel.
+        const EGLint tmpAttribs[] = {
+                EGL_RENDERABLE_TYPE,
+                renderableType,
+                EGL_RECORDABLE_ANDROID,
+                EGL_TRUE,
+                EGL_SURFACE_TYPE,
+                EGL_WINDOW_BIT | EGL_PBUFFER_BIT,
+                EGL_FRAMEBUFFER_TARGET_ANDROID,
+                EGL_TRUE,
+                EGL_RED_SIZE,
+                is1010102 ? 10 : 8,
+                EGL_GREEN_SIZE,
+                is1010102 ? 10 : 8,
+                EGL_BLUE_SIZE,
+                is1010102 ? 10 : 8,
+                EGL_ALPHA_SIZE,
+                is1010102 ? 2 : 8,
+                EGL_NONE,
+        };
+        std::copy(tmpAttribs, tmpAttribs + (sizeof(tmpAttribs) / sizeof(EGLint)),
+                  std::back_inserter(attribs));
+        wantedAttribute = EGL_NONE;
+        wantedAttributeValue = EGL_NONE;
+    } else {
+        // if no renderable type specified, fallback to a simplified query
+        wantedAttribute = EGL_NATIVE_VISUAL_ID;
+        wantedAttributeValue = format;
+    }
+
+    err = selectConfigForAttribute(display, attribs.data(), wantedAttribute, wantedAttributeValue,
+                                   config);
+    if (err == NO_ERROR) {
+        EGLint caveat;
+        if (eglGetConfigAttrib(display, *config, EGL_CONFIG_CAVEAT, &caveat))
+            ALOGW_IF(caveat == EGL_SLOW_CONFIG, "EGL_SLOW_CONFIG selected!");
+    }
+
+    return err;
+}
+
+std::unique_ptr<GLESRenderEngine> GLESRenderEngine::create(const RenderEngineCreationArgs& args) {
+    // initialize EGL for the default display
+    EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+    if (!eglInitialize(display, nullptr, nullptr)) {
+        LOG_ALWAYS_FATAL("failed to initialize EGL");
+    }
+
+    const auto eglVersion = eglQueryString(display, EGL_VERSION);
+    if (!eglVersion) {
+        checkGlError(__FUNCTION__, __LINE__);
+        LOG_ALWAYS_FATAL("eglQueryString(EGL_VERSION) failed");
+    }
+
+    const auto eglExtensions = eglQueryString(display, EGL_EXTENSIONS);
+    if (!eglExtensions) {
+        checkGlError(__FUNCTION__, __LINE__);
+        LOG_ALWAYS_FATAL("eglQueryString(EGL_EXTENSIONS) failed");
+    }
+
+    GLExtensions& extensions = GLExtensions::getInstance();
+    extensions.initWithEGLStrings(eglVersion, eglExtensions);
+
+    // The code assumes that ES2 or later is available if this extension is
+    // supported.
+    EGLConfig config = EGL_NO_CONFIG;
+    if (!extensions.hasNoConfigContext()) {
+        config = chooseEglConfig(display, args.pixelFormat, /*logConfig*/ true);
+    }
+
+    bool useContextPriority =
+            extensions.hasContextPriority() && args.contextPriority == ContextPriority::HIGH;
+    EGLContext protectedContext = EGL_NO_CONTEXT;
+    if (args.enableProtectedContext && extensions.hasProtectedContent()) {
+        protectedContext = createEglContext(display, config, nullptr, useContextPriority,
+                                            Protection::PROTECTED);
+        ALOGE_IF(protectedContext == EGL_NO_CONTEXT, "Can't create protected context");
+    }
+
+    EGLContext ctxt = createEglContext(display, config, protectedContext, useContextPriority,
+                                       Protection::UNPROTECTED);
+
+    // if can't create a GL context, we can only abort.
+    LOG_ALWAYS_FATAL_IF(ctxt == EGL_NO_CONTEXT, "EGLContext creation failed");
+
+    EGLSurface stub = EGL_NO_SURFACE;
+    if (!extensions.hasSurfacelessContext()) {
+        stub = createStubEglPbufferSurface(display, config, args.pixelFormat,
+                                           Protection::UNPROTECTED);
+        LOG_ALWAYS_FATAL_IF(stub == EGL_NO_SURFACE, "can't create stub pbuffer");
+    }
+    EGLBoolean success = eglMakeCurrent(display, stub, stub, ctxt);
+    LOG_ALWAYS_FATAL_IF(!success, "can't make stub pbuffer current");
+    extensions.initWithGLStrings(glGetString(GL_VENDOR), glGetString(GL_RENDERER),
+                                 glGetString(GL_VERSION), glGetString(GL_EXTENSIONS));
+
+    EGLSurface protectedStub = EGL_NO_SURFACE;
+    if (protectedContext != EGL_NO_CONTEXT && !extensions.hasSurfacelessContext()) {
+        protectedStub = createStubEglPbufferSurface(display, config, args.pixelFormat,
+                                                    Protection::PROTECTED);
+        ALOGE_IF(protectedStub == EGL_NO_SURFACE, "can't create protected stub pbuffer");
+    }
+
+    // now figure out what version of GL did we actually get
+    GlesVersion version = parseGlesVersion(extensions.getVersion());
+
+    LOG_ALWAYS_FATAL_IF(args.supportsBackgroundBlur && version < GLES_VERSION_3_0,
+        "Blurs require OpenGL ES 3.0. Please unset ro.surface_flinger.supports_background_blur");
+
+    // initialize the renderer while GL is current
+    std::unique_ptr<GLESRenderEngine> engine;
+    switch (version) {
+        case GLES_VERSION_1_0:
+        case GLES_VERSION_1_1:
+            LOG_ALWAYS_FATAL("SurfaceFlinger requires OpenGL ES 2.0 minimum to run.");
+            break;
+        case GLES_VERSION_2_0:
+        case GLES_VERSION_3_0:
+            engine = std::make_unique<GLESRenderEngine>(args, display, config, ctxt, stub,
+                                                        protectedContext, protectedStub);
+            break;
+    }
+
+    ALOGI("OpenGL ES informations:");
+    ALOGI("vendor    : %s", extensions.getVendor());
+    ALOGI("renderer  : %s", extensions.getRenderer());
+    ALOGI("version   : %s", extensions.getVersion());
+    ALOGI("extensions: %s", extensions.getExtensions());
+    ALOGI("GL_MAX_TEXTURE_SIZE = %zu", engine->getMaxTextureSize());
+    ALOGI("GL_MAX_VIEWPORT_DIMS = %zu", engine->getMaxViewportDims());
+
+    return engine;
+}
+
+EGLConfig GLESRenderEngine::chooseEglConfig(EGLDisplay display, int format, bool logConfig) {
+    status_t err;
+    EGLConfig config;
+
+    // First try to get an ES3 config
+    err = selectEGLConfig(display, format, EGL_OPENGL_ES3_BIT, &config);
+    if (err != NO_ERROR) {
+        // If ES3 fails, try to get an ES2 config
+        err = selectEGLConfig(display, format, EGL_OPENGL_ES2_BIT, &config);
+        if (err != NO_ERROR) {
+            // If ES2 still doesn't work, probably because we're on the emulator.
+            // try a simplified query
+            ALOGW("no suitable EGLConfig found, trying a simpler query");
+            err = selectEGLConfig(display, format, 0, &config);
+            if (err != NO_ERROR) {
+                // this EGL is too lame for android
+                LOG_ALWAYS_FATAL("no suitable EGLConfig found, giving up");
+            }
+        }
+    }
+
+    if (logConfig) {
+        // print some debugging info
+        EGLint r, g, b, a;
+        eglGetConfigAttrib(display, config, EGL_RED_SIZE, &r);
+        eglGetConfigAttrib(display, config, EGL_GREEN_SIZE, &g);
+        eglGetConfigAttrib(display, config, EGL_BLUE_SIZE, &b);
+        eglGetConfigAttrib(display, config, EGL_ALPHA_SIZE, &a);
+        ALOGI("EGL information:");
+        ALOGI("vendor    : %s", eglQueryString(display, EGL_VENDOR));
+        ALOGI("version   : %s", eglQueryString(display, EGL_VERSION));
+        ALOGI("extensions: %s", eglQueryString(display, EGL_EXTENSIONS));
+        ALOGI("Client API: %s", eglQueryString(display, EGL_CLIENT_APIS) ?: "Not Supported");
+        ALOGI("EGLSurface: %d-%d-%d-%d, config=%p", r, g, b, a, config);
+    }
+
+    return config;
+}
+
+GLESRenderEngine::GLESRenderEngine(const RenderEngineCreationArgs& args, EGLDisplay display,
+                                   EGLConfig config, EGLContext ctxt, EGLSurface stub,
+                                   EGLContext protectedContext, EGLSurface protectedStub)
+      : renderengine::impl::RenderEngine(args),
+        mEGLDisplay(display),
+        mEGLConfig(config),
+        mEGLContext(ctxt),
+        mStubSurface(stub),
+        mProtectedEGLContext(protectedContext),
+        mProtectedStubSurface(protectedStub),
+        mVpWidth(0),
+        mVpHeight(0),
+        mFramebufferImageCacheSize(args.imageCacheSize),
+        mUseColorManagement(args.useColorManagement) {
+    glGetIntegerv(GL_MAX_TEXTURE_SIZE, &mMaxTextureSize);
+    glGetIntegerv(GL_MAX_VIEWPORT_DIMS, mMaxViewportDims);
+
+    glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
+    glPixelStorei(GL_PACK_ALIGNMENT, 4);
+
+    // Initialize protected EGL Context.
+    if (mProtectedEGLContext != EGL_NO_CONTEXT) {
+        EGLBoolean success = eglMakeCurrent(display, mProtectedStubSurface, mProtectedStubSurface,
+                                            mProtectedEGLContext);
+        ALOGE_IF(!success, "can't make protected context current");
+        glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
+        glPixelStorei(GL_PACK_ALIGNMENT, 4);
+        success = eglMakeCurrent(display, mStubSurface, mStubSurface, mEGLContext);
+        LOG_ALWAYS_FATAL_IF(!success, "can't make default context current");
+    }
+
+    // mColorBlindnessCorrection = M;
+
+    if (mUseColorManagement) {
+        const ColorSpace srgb(ColorSpace::sRGB());
+        const ColorSpace displayP3(ColorSpace::DisplayP3());
+        const ColorSpace bt2020(ColorSpace::BT2020());
+
+        // no chromatic adaptation needed since all color spaces use D65 for their white points.
+        mSrgbToXyz = mat4(srgb.getRGBtoXYZ());
+        mDisplayP3ToXyz = mat4(displayP3.getRGBtoXYZ());
+        mBt2020ToXyz = mat4(bt2020.getRGBtoXYZ());
+        mXyzToSrgb = mat4(srgb.getXYZtoRGB());
+        mXyzToDisplayP3 = mat4(displayP3.getXYZtoRGB());
+        mXyzToBt2020 = mat4(bt2020.getXYZtoRGB());
+
+        // Compute sRGB to Display P3 and BT2020 transform matrix.
+        // NOTE: For now, we are limiting output wide color space support to
+        // Display-P3 and BT2020 only.
+        mSrgbToDisplayP3 = mXyzToDisplayP3 * mSrgbToXyz;
+        mSrgbToBt2020 = mXyzToBt2020 * mSrgbToXyz;
+
+        // Compute Display P3 to sRGB and BT2020 transform matrix.
+        mDisplayP3ToSrgb = mXyzToSrgb * mDisplayP3ToXyz;
+        mDisplayP3ToBt2020 = mXyzToBt2020 * mDisplayP3ToXyz;
+
+        // Compute BT2020 to sRGB and Display P3 transform matrix
+        mBt2020ToSrgb = mXyzToSrgb * mBt2020ToXyz;
+        mBt2020ToDisplayP3 = mXyzToDisplayP3 * mBt2020ToXyz;
+    }
+
+    char value[PROPERTY_VALUE_MAX];
+    property_get("debug.egl.traceGpuCompletion", value, "0");
+    if (atoi(value)) {
+        mTraceGpuCompletion = true;
+        mFlushTracer = std::make_unique<FlushTracer>(this);
+    }
+
+    if (args.supportsBackgroundBlur) {
+        mBlurFilter = new BlurFilter(*this);
+        checkErrors("BlurFilter creation");
+    }
+
+    mImageManager = std::make_unique<ImageManager>(this);
+    mImageManager->initThread();
+    mDrawingBuffer = createFramebuffer();
+    sp<GraphicBuffer> buf =
+            new GraphicBuffer(1, 1, PIXEL_FORMAT_RGBA_8888, 1,
+                              GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE, "placeholder");
+
+    const status_t err = buf->initCheck();
+    if (err != OK) {
+        ALOGE("Error allocating placeholder buffer: %d", err);
+        return;
+    }
+    mPlaceholderBuffer = buf.get();
+    EGLint attributes[] = {
+            EGL_NONE,
+    };
+    mPlaceholderImage = eglCreateImageKHR(mEGLDisplay, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID,
+                                          mPlaceholderBuffer, attributes);
+    ALOGE_IF(mPlaceholderImage == EGL_NO_IMAGE_KHR, "Failed to create placeholder image: %#x",
+             eglGetError());
+}
+
+GLESRenderEngine::~GLESRenderEngine() {
+    // Destroy the image manager first.
+    mImageManager = nullptr;
+    std::lock_guard<std::mutex> lock(mRenderingMutex);
+    unbindFrameBuffer(mDrawingBuffer.get());
+    mDrawingBuffer = nullptr;
+    while (!mFramebufferImageCache.empty()) {
+        EGLImageKHR expired = mFramebufferImageCache.front().second;
+        mFramebufferImageCache.pop_front();
+        eglDestroyImageKHR(mEGLDisplay, expired);
+        DEBUG_EGL_IMAGE_TRACKER_DESTROY();
+    }
+    eglDestroyImageKHR(mEGLDisplay, mPlaceholderImage);
+    mImageCache.clear();
+    eglMakeCurrent(mEGLDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
+    eglTerminate(mEGLDisplay);
+}
+
+std::unique_ptr<Framebuffer> GLESRenderEngine::createFramebuffer() {
+    return std::make_unique<GLFramebuffer>(*this);
+}
+
+std::unique_ptr<Image> GLESRenderEngine::createImage() {
+    return std::make_unique<GLImage>(*this);
+}
+
+Framebuffer* GLESRenderEngine::getFramebufferForDrawing() {
+    return mDrawingBuffer.get();
+}
+
+void GLESRenderEngine::primeCache() const {
+    ProgramCache::getInstance().primeCache(mInProtectedContext ? mProtectedEGLContext : mEGLContext,
+                                           mArgs.useColorManagement,
+                                           mArgs.precacheToneMapperShaderOnly);
+}
+
+base::unique_fd GLESRenderEngine::flush() {
+    ATRACE_CALL();
+    if (!GLExtensions::getInstance().hasNativeFenceSync()) {
+        return base::unique_fd();
+    }
+
+    EGLSyncKHR sync = eglCreateSyncKHR(mEGLDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, nullptr);
+    if (sync == EGL_NO_SYNC_KHR) {
+        ALOGW("failed to create EGL native fence sync: %#x", eglGetError());
+        return base::unique_fd();
+    }
+
+    // native fence fd will not be populated until flush() is done.
+    glFlush();
+
+    // get the fence fd
+    base::unique_fd fenceFd(eglDupNativeFenceFDANDROID(mEGLDisplay, sync));
+    eglDestroySyncKHR(mEGLDisplay, sync);
+    if (fenceFd == EGL_NO_NATIVE_FENCE_FD_ANDROID) {
+        ALOGW("failed to dup EGL native fence sync: %#x", eglGetError());
+    }
+
+    // Only trace if we have a valid fence, as current usage falls back to
+    // calling finish() if the fence fd is invalid.
+    if (CC_UNLIKELY(mTraceGpuCompletion && mFlushTracer) && fenceFd.get() >= 0) {
+        mFlushTracer->queueSync(eglCreateSyncKHR(mEGLDisplay, EGL_SYNC_FENCE_KHR, nullptr));
+    }
+
+    return fenceFd;
+}
+
+bool GLESRenderEngine::finish() {
+    ATRACE_CALL();
+    if (!GLExtensions::getInstance().hasFenceSync()) {
+        ALOGW("no synchronization support");
+        return false;
+    }
+
+    EGLSyncKHR sync = eglCreateSyncKHR(mEGLDisplay, EGL_SYNC_FENCE_KHR, nullptr);
+    if (sync == EGL_NO_SYNC_KHR) {
+        ALOGW("failed to create EGL fence sync: %#x", eglGetError());
+        return false;
+    }
+
+    if (CC_UNLIKELY(mTraceGpuCompletion && mFlushTracer)) {
+        mFlushTracer->queueSync(eglCreateSyncKHR(mEGLDisplay, EGL_SYNC_FENCE_KHR, nullptr));
+    }
+
+    return waitSync(sync, EGL_SYNC_FLUSH_COMMANDS_BIT_KHR);
+}
+
+bool GLESRenderEngine::waitSync(EGLSyncKHR sync, EGLint flags) {
+    EGLint result = eglClientWaitSyncKHR(mEGLDisplay, sync, flags, 2000000000 /*2 sec*/);
+    EGLint error = eglGetError();
+    eglDestroySyncKHR(mEGLDisplay, sync);
+    if (result != EGL_CONDITION_SATISFIED_KHR) {
+        if (result == EGL_TIMEOUT_EXPIRED_KHR) {
+            ALOGW("fence wait timed out");
+        } else {
+            ALOGW("error waiting on EGL fence: %#x", error);
+        }
+        return false;
+    }
+
+    return true;
+}
+
+bool GLESRenderEngine::waitFence(base::unique_fd fenceFd) {
+    if (!GLExtensions::getInstance().hasNativeFenceSync() ||
+        !GLExtensions::getInstance().hasWaitSync()) {
+        return false;
+    }
+
+    // release the fd and transfer the ownership to EGLSync
+    EGLint attribs[] = {EGL_SYNC_NATIVE_FENCE_FD_ANDROID, fenceFd.release(), EGL_NONE};
+    EGLSyncKHR sync = eglCreateSyncKHR(mEGLDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, attribs);
+    if (sync == EGL_NO_SYNC_KHR) {
+        ALOGE("failed to create EGL native fence sync: %#x", eglGetError());
+        return false;
+    }
+
+    // XXX: The spec draft is inconsistent as to whether this should return an
+    // EGLint or void.  Ignore the return value for now, as it's not strictly
+    // needed.
+    eglWaitSyncKHR(mEGLDisplay, sync, 0);
+    EGLint error = eglGetError();
+    eglDestroySyncKHR(mEGLDisplay, sync);
+    if (error != EGL_SUCCESS) {
+        ALOGE("failed to wait for EGL native fence sync: %#x", error);
+        return false;
+    }
+
+    return true;
+}
+
+void GLESRenderEngine::clearWithColor(float red, float green, float blue, float alpha) {
+    ATRACE_CALL();
+    glDisable(GL_BLEND);
+    glClearColor(red, green, blue, alpha);
+    glClear(GL_COLOR_BUFFER_BIT);
+}
+
+void GLESRenderEngine::fillRegionWithColor(const Region& region, float red, float green, float blue,
+                                           float alpha) {
+    size_t c;
+    Rect const* r = region.getArray(&c);
+    Mesh mesh = Mesh::Builder()
+                        .setPrimitive(Mesh::TRIANGLES)
+                        .setVertices(c * 6 /* count */, 2 /* size */)
+                        .build();
+    Mesh::VertexArray<vec2> position(mesh.getPositionArray<vec2>());
+    for (size_t i = 0; i < c; i++, r++) {
+        position[i * 6 + 0].x = r->left;
+        position[i * 6 + 0].y = r->top;
+        position[i * 6 + 1].x = r->left;
+        position[i * 6 + 1].y = r->bottom;
+        position[i * 6 + 2].x = r->right;
+        position[i * 6 + 2].y = r->bottom;
+        position[i * 6 + 3].x = r->left;
+        position[i * 6 + 3].y = r->top;
+        position[i * 6 + 4].x = r->right;
+        position[i * 6 + 4].y = r->bottom;
+        position[i * 6 + 5].x = r->right;
+        position[i * 6 + 5].y = r->top;
+    }
+    setupFillWithColor(red, green, blue, alpha);
+    drawMesh(mesh);
+}
+
+void GLESRenderEngine::setScissor(const Rect& region) {
+    glScissor(region.left, region.top, region.getWidth(), region.getHeight());
+    glEnable(GL_SCISSOR_TEST);
+}
+
+void GLESRenderEngine::disableScissor() {
+    glDisable(GL_SCISSOR_TEST);
+}
+
+void GLESRenderEngine::genTextures(size_t count, uint32_t* names) {
+    glGenTextures(count, names);
+}
+
+void GLESRenderEngine::deleteTextures(size_t count, uint32_t const* names) {
+    for (int i = 0; i < count; ++i) {
+        mTextureView.erase(names[i]);
+    }
+    glDeleteTextures(count, names);
+}
+
+void GLESRenderEngine::bindExternalTextureImage(uint32_t texName, const Image& image) {
+    ATRACE_CALL();
+    const GLImage& glImage = static_cast<const GLImage&>(image);
+    const GLenum target = GL_TEXTURE_EXTERNAL_OES;
+
+    glBindTexture(target, texName);
+    if (glImage.getEGLImage() != EGL_NO_IMAGE_KHR) {
+        glEGLImageTargetTexture2DOES(target, static_cast<GLeglImageOES>(glImage.getEGLImage()));
+    }
+}
+
+status_t GLESRenderEngine::bindExternalTextureBuffer(uint32_t texName,
+                                                     const sp<GraphicBuffer>& buffer,
+                                                     const sp<Fence>& bufferFence) {
+    if (buffer == nullptr) {
+        return BAD_VALUE;
+    }
+
+    ATRACE_CALL();
+
+    bool found = false;
+    {
+        std::lock_guard<std::mutex> lock(mRenderingMutex);
+        auto cachedImage = mImageCache.find(buffer->getId());
+        found = (cachedImage != mImageCache.end());
+    }
+
+    // If we couldn't find the image in the cache at this time, then either
+    // SurfaceFlinger messed up registering the buffer ahead of time or we got
+    // backed up creating other EGLImages.
+    if (!found) {
+        status_t cacheResult = mImageManager->cache(buffer);
+        if (cacheResult != NO_ERROR) {
+            return cacheResult;
+        }
+    }
+
+    // Whether or not we needed to cache, re-check mImageCache to make sure that
+    // there's an EGLImage. The current threading model guarantees that we don't
+    // destroy a cached image until it's really not needed anymore (i.e. this
+    // function should not be called), so the only possibility is that something
+    // terrible went wrong and we should just bind something and move on.
+    {
+        std::lock_guard<std::mutex> lock(mRenderingMutex);
+        auto cachedImage = mImageCache.find(buffer->getId());
+
+        if (cachedImage == mImageCache.end()) {
+            // We failed creating the image if we got here, so bail out.
+            ALOGE("Failed to create an EGLImage when rendering");
+            bindExternalTextureImage(texName, *createImage());
+            return NO_INIT;
+        }
+
+        bindExternalTextureImage(texName, *cachedImage->second);
+        mTextureView.insert_or_assign(texName, buffer->getId());
+    }
+
+    // Wait for the new buffer to be ready.
+    if (bufferFence != nullptr && bufferFence->isValid()) {
+        if (GLExtensions::getInstance().hasWaitSync()) {
+            base::unique_fd fenceFd(bufferFence->dup());
+            if (fenceFd == -1) {
+                ALOGE("error dup'ing fence fd: %d", errno);
+                return -errno;
+            }
+            if (!waitFence(std::move(fenceFd))) {
+                ALOGE("failed to wait on fence fd");
+                return UNKNOWN_ERROR;
+            }
+        } else {
+            status_t err = bufferFence->waitForever("RenderEngine::bindExternalTextureBuffer");
+            if (err != NO_ERROR) {
+                ALOGE("error waiting for fence: %d", err);
+                return err;
+            }
+        }
+    }
+
+    return NO_ERROR;
+}
+
+void GLESRenderEngine::cacheExternalTextureBuffer(const sp<GraphicBuffer>& buffer) {
+    mImageManager->cacheAsync(buffer, nullptr);
+}
+
+std::shared_ptr<ImageManager::Barrier> GLESRenderEngine::cacheExternalTextureBufferForTesting(
+        const sp<GraphicBuffer>& buffer) {
+    auto barrier = std::make_shared<ImageManager::Barrier>();
+    mImageManager->cacheAsync(buffer, barrier);
+    return barrier;
+}
+
+status_t GLESRenderEngine::cacheExternalTextureBufferInternal(const sp<GraphicBuffer>& buffer) {
+    if (buffer == nullptr) {
+        return BAD_VALUE;
+    }
+
+    {
+        std::lock_guard<std::mutex> lock(mRenderingMutex);
+        if (mImageCache.count(buffer->getId()) > 0) {
+            // If there's already an image then fail fast here.
+            return NO_ERROR;
+        }
+    }
+    ATRACE_CALL();
+
+    // Create the image without holding a lock so that we don't block anything.
+    std::unique_ptr<Image> newImage = createImage();
+
+    bool created = newImage->setNativeWindowBuffer(buffer->getNativeBuffer(),
+                                                   buffer->getUsage() & GRALLOC_USAGE_PROTECTED);
+    if (!created) {
+        ALOGE("Failed to create image. size=%ux%u st=%u usage=%#" PRIx64 " fmt=%d",
+              buffer->getWidth(), buffer->getHeight(), buffer->getStride(), buffer->getUsage(),
+              buffer->getPixelFormat());
+        return NO_INIT;
+    }
+
+    {
+        std::lock_guard<std::mutex> lock(mRenderingMutex);
+        if (mImageCache.count(buffer->getId()) > 0) {
+            // In theory it's possible for another thread to recache the image,
+            // so bail out if another thread won.
+            return NO_ERROR;
+        }
+        mImageCache.insert(std::make_pair(buffer->getId(), std::move(newImage)));
+    }
+
+    return NO_ERROR;
+}
+
+void GLESRenderEngine::unbindExternalTextureBuffer(uint64_t bufferId) {
+    mImageManager->releaseAsync(bufferId, nullptr);
+}
+
+std::shared_ptr<ImageManager::Barrier> GLESRenderEngine::unbindExternalTextureBufferForTesting(
+        uint64_t bufferId) {
+    auto barrier = std::make_shared<ImageManager::Barrier>();
+    mImageManager->releaseAsync(bufferId, barrier);
+    return barrier;
+}
+
+void GLESRenderEngine::unbindExternalTextureBufferInternal(uint64_t bufferId) {
+    std::unique_ptr<Image> image;
+    {
+        std::lock_guard<std::mutex> lock(mRenderingMutex);
+        const auto& cachedImage = mImageCache.find(bufferId);
+
+        if (cachedImage != mImageCache.end()) {
+            ALOGV("Destroying image for buffer: %" PRIu64, bufferId);
+            // Move the buffer out of cache first, so that we can destroy
+            // without holding the cache's lock.
+            image = std::move(cachedImage->second);
+            mImageCache.erase(bufferId);
+            return;
+        }
+    }
+    ALOGV("Failed to find image for buffer: %" PRIu64, bufferId);
+}
+
+FloatRect GLESRenderEngine::setupLayerCropping(const LayerSettings& layer, Mesh& mesh) {
+    // Translate win by the rounded corners rect coordinates, to have all values in
+    // layer coordinate space.
+    FloatRect cropWin = layer.geometry.boundaries;
+    const FloatRect& roundedCornersCrop = layer.geometry.roundedCornersCrop;
+    cropWin.left -= roundedCornersCrop.left;
+    cropWin.right -= roundedCornersCrop.left;
+    cropWin.top -= roundedCornersCrop.top;
+    cropWin.bottom -= roundedCornersCrop.top;
+    Mesh::VertexArray<vec2> cropCoords(mesh.getCropCoordArray<vec2>());
+    cropCoords[0] = vec2(cropWin.left, cropWin.top);
+    cropCoords[1] = vec2(cropWin.left, cropWin.top + cropWin.getHeight());
+    cropCoords[2] = vec2(cropWin.right, cropWin.top + cropWin.getHeight());
+    cropCoords[3] = vec2(cropWin.right, cropWin.top);
+
+    setupCornerRadiusCropSize(roundedCornersCrop.getWidth(), roundedCornersCrop.getHeight());
+    return cropWin;
+}
+
+void GLESRenderEngine::handleRoundedCorners(const DisplaySettings& display,
+                                            const LayerSettings& layer, const Mesh& mesh) {
+    // We separate the layer into 3 parts essentially, such that we only turn on blending for the
+    // top rectangle and the bottom rectangle, and turn off blending for the middle rectangle.
+    FloatRect bounds = layer.geometry.roundedCornersCrop;
+
+    // Explicitly compute the transform from the clip rectangle to the physical
+    // display. Normally, this is done in glViewport but we explicitly compute
+    // it here so that we can get the scissor bounds correct.
+    const Rect& source = display.clip;
+    const Rect& destination = display.physicalDisplay;
+    // Here we compute the following transform:
+    // 1. Translate the top left corner of the source clip to (0, 0)
+    // 2. Rotate the clip rectangle about the origin in accordance with the
+    // orientation flag
+    // 3. Translate the top left corner back to the origin.
+    // 4. Scale the clip rectangle to the destination rectangle dimensions
+    // 5. Translate the top left corner to the destination rectangle's top left
+    // corner.
+    const mat4 translateSource = mat4::translate(vec4(-source.left, -source.top, 0, 1));
+    mat4 rotation;
+    int displacementX = 0;
+    int displacementY = 0;
+    float destinationWidth = static_cast<float>(destination.getWidth());
+    float destinationHeight = static_cast<float>(destination.getHeight());
+    float sourceWidth = static_cast<float>(source.getWidth());
+    float sourceHeight = static_cast<float>(source.getHeight());
+    const float rot90InRadians = 2.0f * static_cast<float>(M_PI) / 4.0f;
+    switch (display.orientation) {
+        case ui::Transform::ROT_90:
+            rotation = mat4::rotate(rot90InRadians, vec3(0, 0, 1));
+            displacementX = source.getHeight();
+            std::swap(sourceHeight, sourceWidth);
+            break;
+        case ui::Transform::ROT_180:
+            rotation = mat4::rotate(rot90InRadians * 2.0f, vec3(0, 0, 1));
+            displacementY = source.getHeight();
+            displacementX = source.getWidth();
+            break;
+        case ui::Transform::ROT_270:
+            rotation = mat4::rotate(rot90InRadians * 3.0f, vec3(0, 0, 1));
+            displacementY = source.getWidth();
+            std::swap(sourceHeight, sourceWidth);
+            break;
+        default:
+            break;
+    }
+
+    const mat4 intermediateTranslation = mat4::translate(vec4(displacementX, displacementY, 0, 1));
+    const mat4 scale = mat4::scale(
+            vec4(destinationWidth / sourceWidth, destinationHeight / sourceHeight, 1, 1));
+    const mat4 translateDestination =
+            mat4::translate(vec4(destination.left, destination.top, 0, 1));
+    const mat4 globalTransform =
+            translateDestination * scale * intermediateTranslation * rotation * translateSource;
+
+    const mat4 transformMatrix = globalTransform * layer.geometry.positionTransform;
+    const vec4 leftTopCoordinate(bounds.left, bounds.top, 1.0, 1.0);
+    const vec4 rightBottomCoordinate(bounds.right, bounds.bottom, 1.0, 1.0);
+    const vec4 leftTopCoordinateInBuffer = transformMatrix * leftTopCoordinate;
+    const vec4 rightBottomCoordinateInBuffer = transformMatrix * rightBottomCoordinate;
+    bounds = FloatRect(std::min(leftTopCoordinateInBuffer[0], rightBottomCoordinateInBuffer[0]),
+                       std::min(leftTopCoordinateInBuffer[1], rightBottomCoordinateInBuffer[1]),
+                       std::max(leftTopCoordinateInBuffer[0], rightBottomCoordinateInBuffer[0]),
+                       std::max(leftTopCoordinateInBuffer[1], rightBottomCoordinateInBuffer[1]));
+
+    // Finally, we cut the layer into 3 parts, with top and bottom parts having rounded corners
+    // and the middle part without rounded corners.
+    const int32_t radius = ceil(layer.geometry.roundedCornersRadius);
+    const Rect topRect(bounds.left, bounds.top, bounds.right, bounds.top + radius);
+    setScissor(topRect);
+    drawMesh(mesh);
+    const Rect bottomRect(bounds.left, bounds.bottom - radius, bounds.right, bounds.bottom);
+    setScissor(bottomRect);
+    drawMesh(mesh);
+
+    // The middle part of the layer can turn off blending.
+    if (topRect.bottom < bottomRect.top) {
+        const Rect middleRect(bounds.left, bounds.top + radius, bounds.right,
+                              bounds.bottom - radius);
+        setScissor(middleRect);
+        mState.cornerRadius = 0.0;
+        disableBlending();
+        drawMesh(mesh);
+    }
+    disableScissor();
+}
+
+status_t GLESRenderEngine::bindFrameBuffer(Framebuffer* framebuffer) {
+    ATRACE_CALL();
+    GLFramebuffer* glFramebuffer = static_cast<GLFramebuffer*>(framebuffer);
+    EGLImageKHR eglImage = glFramebuffer->getEGLImage();
+    uint32_t textureName = glFramebuffer->getTextureName();
+    uint32_t framebufferName = glFramebuffer->getFramebufferName();
+
+    // Bind the texture and turn our EGLImage into a texture
+    glBindTexture(GL_TEXTURE_2D, textureName);
+    glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, (GLeglImageOES)eglImage);
+
+    // Bind the Framebuffer to render into
+    glBindFramebuffer(GL_FRAMEBUFFER, framebufferName);
+    glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureName, 0);
+
+    uint32_t glStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER);
+    ALOGE_IF(glStatus != GL_FRAMEBUFFER_COMPLETE_OES, "glCheckFramebufferStatusOES error %d",
+             glStatus);
+
+    return glStatus == GL_FRAMEBUFFER_COMPLETE_OES ? NO_ERROR : BAD_VALUE;
+}
+
+void GLESRenderEngine::unbindFrameBuffer(Framebuffer* /*framebuffer*/) {
+    ATRACE_CALL();
+
+    // back to main framebuffer
+    glBindFramebuffer(GL_FRAMEBUFFER, 0);
+}
+
+bool GLESRenderEngine::cleanupPostRender(CleanupMode mode) {
+    ATRACE_CALL();
+
+    if (mPriorResourcesCleaned ||
+        (mLastDrawFence != nullptr && mLastDrawFence->getStatus() != Fence::Status::Signaled)) {
+        // If we don't have a prior frame needing cleanup, then don't do anything.
+        return false;
+    }
+
+    // This is a bit of a band-aid fix for FrameCaptureProcessor, as we should
+    // not need to keep memory around if we don't need to do so.
+    if (mode == CleanupMode::CLEAN_ALL) {
+        // TODO: SurfaceFlinger memory utilization may benefit from resetting
+        // texture bindings as well. Assess if it does and there's no performance regression
+        // when rebinding the same image data to the same texture, and if so then its mode
+        // behavior can be tweaked.
+        if (mPlaceholderImage != EGL_NO_IMAGE_KHR) {
+            for (auto [textureName, bufferId] : mTextureView) {
+                if (bufferId && mPlaceholderImage != EGL_NO_IMAGE_KHR) {
+                    glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureName);
+                    glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES,
+                                                 static_cast<GLeglImageOES>(mPlaceholderImage));
+                    mTextureView[textureName] = std::nullopt;
+                    checkErrors();
+                }
+            }
+        }
+        {
+            std::lock_guard<std::mutex> lock(mRenderingMutex);
+            mImageCache.clear();
+        }
+    }
+
+    // Bind the texture to placeholder so that backing image data can be freed.
+    GLFramebuffer* glFramebuffer = static_cast<GLFramebuffer*>(getFramebufferForDrawing());
+    glFramebuffer->allocateBuffers(1, 1, mPlaceholderDrawBuffer);
+    // Release the cached fence here, so that we don't churn reallocations when
+    // we could no-op repeated calls of this method instead.
+    mLastDrawFence = nullptr;
+    mPriorResourcesCleaned = true;
+    return true;
+}
+
+void GLESRenderEngine::checkErrors() const {
+    checkErrors(nullptr);
+}
+
+void GLESRenderEngine::checkErrors(const char* tag) const {
+    do {
+        // there could be more than one error flag
+        GLenum error = glGetError();
+        if (error == GL_NO_ERROR) break;
+        if (tag == nullptr) {
+            ALOGE("GL error 0x%04x", int(error));
+        } else {
+            ALOGE("GL error: %s -> 0x%04x", tag, int(error));
+        }
+    } while (true);
+}
+
+bool GLESRenderEngine::supportsProtectedContent() const {
+    return mProtectedEGLContext != EGL_NO_CONTEXT;
+}
+
+bool GLESRenderEngine::useProtectedContext(bool useProtectedContext) {
+    if (useProtectedContext == mInProtectedContext) {
+        return true;
+    }
+    if (useProtectedContext && mProtectedEGLContext == EGL_NO_CONTEXT) {
+        return false;
+    }
+    const EGLSurface surface = useProtectedContext ? mProtectedStubSurface : mStubSurface;
+    const EGLContext context = useProtectedContext ? mProtectedEGLContext : mEGLContext;
+    const bool success = eglMakeCurrent(mEGLDisplay, surface, surface, context) == EGL_TRUE;
+    if (success) {
+        mInProtectedContext = useProtectedContext;
+    }
+    return success;
+}
+EGLImageKHR GLESRenderEngine::createFramebufferImageIfNeeded(ANativeWindowBuffer* nativeBuffer,
+                                                             bool isProtected,
+                                                             bool useFramebufferCache) {
+    sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(nativeBuffer);
+    if (useFramebufferCache) {
+        std::lock_guard<std::mutex> lock(mFramebufferImageCacheMutex);
+        for (const auto& image : mFramebufferImageCache) {
+            if (image.first == graphicBuffer->getId()) {
+                return image.second;
+            }
+        }
+    }
+    EGLint attributes[] = {
+            isProtected ? EGL_PROTECTED_CONTENT_EXT : EGL_NONE,
+            isProtected ? EGL_TRUE : EGL_NONE,
+            EGL_NONE,
+    };
+    EGLImageKHR image = eglCreateImageKHR(mEGLDisplay, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID,
+                                          nativeBuffer, attributes);
+    if (useFramebufferCache) {
+        if (image != EGL_NO_IMAGE_KHR) {
+            std::lock_guard<std::mutex> lock(mFramebufferImageCacheMutex);
+            if (mFramebufferImageCache.size() >= mFramebufferImageCacheSize) {
+                EGLImageKHR expired = mFramebufferImageCache.front().second;
+                mFramebufferImageCache.pop_front();
+                eglDestroyImageKHR(mEGLDisplay, expired);
+                DEBUG_EGL_IMAGE_TRACKER_DESTROY();
+            }
+            mFramebufferImageCache.push_back({graphicBuffer->getId(), image});
+        }
+    }
+
+    if (image != EGL_NO_IMAGE_KHR) {
+        DEBUG_EGL_IMAGE_TRACKER_CREATE();
+    }
+    return image;
+}
+
+status_t GLESRenderEngine::drawLayers(const DisplaySettings& display,
+                                      const std::vector<const LayerSettings*>& layers,
+                                      const sp<GraphicBuffer>& buffer,
+                                      const bool useFramebufferCache, base::unique_fd&& bufferFence,
+                                      base::unique_fd* drawFence) {
+    ATRACE_CALL();
+    if (layers.empty()) {
+        ALOGV("Drawing empty layer stack");
+        return NO_ERROR;
+    }
+
+    if (bufferFence.get() >= 0) {
+        // Duplicate the fence for passing to waitFence.
+        base::unique_fd bufferFenceDup(dup(bufferFence.get()));
+        if (bufferFenceDup < 0 || !waitFence(std::move(bufferFenceDup))) {
+            ATRACE_NAME("Waiting before draw");
+            sync_wait(bufferFence.get(), -1);
+        }
+    }
+
+    if (buffer == nullptr) {
+        ALOGE("No output buffer provided. Aborting GPU composition.");
+        return BAD_VALUE;
+    }
+
+    std::unique_ptr<BindNativeBufferAsFramebuffer> fbo;
+    // Gathering layers that requested blur, we'll need them to decide when to render to an
+    // offscreen buffer, and when to render to the native buffer.
+    std::deque<const LayerSettings*> blurLayers;
+    if (CC_LIKELY(mBlurFilter != nullptr)) {
+        for (auto layer : layers) {
+            if (layer->backgroundBlurRadius > 0) {
+                blurLayers.push_back(layer);
+            }
+        }
+    }
+    const auto blurLayersSize = blurLayers.size();
+
+    if (blurLayersSize == 0) {
+        fbo = std::make_unique<BindNativeBufferAsFramebuffer>(*this,
+                                                              buffer.get()->getNativeBuffer(),
+                                                              useFramebufferCache);
+        if (fbo->getStatus() != NO_ERROR) {
+            ALOGE("Failed to bind framebuffer! Aborting GPU composition for buffer (%p).",
+                  buffer->handle);
+            checkErrors();
+            return fbo->getStatus();
+        }
+        setViewportAndProjection(display.physicalDisplay, display.clip);
+    } else {
+        setViewportAndProjection(display.physicalDisplay, display.clip);
+        auto status =
+                mBlurFilter->setAsDrawTarget(display, blurLayers.front()->backgroundBlurRadius);
+        if (status != NO_ERROR) {
+            ALOGE("Failed to prepare blur filter! Aborting GPU composition for buffer (%p).",
+                  buffer->handle);
+            checkErrors();
+            return status;
+        }
+    }
+
+    // clear the entire buffer, sometimes when we reuse buffers we'd persist
+    // ghost images otherwise.
+    // we also require a full transparent framebuffer for overlays. This is
+    // probably not quite efficient on all GPUs, since we could filter out
+    // opaque layers.
+    clearWithColor(0.0, 0.0, 0.0, 0.0);
+
+    setOutputDataSpace(display.outputDataspace);
+    setDisplayMaxLuminance(display.maxLuminance);
+
+    const mat4 projectionMatrix =
+            ui::Transform(display.orientation).asMatrix4() * mState.projectionMatrix;
+    if (!display.clearRegion.isEmpty()) {
+        glDisable(GL_BLEND);
+        fillRegionWithColor(display.clearRegion, 0.0, 0.0, 0.0, 1.0);
+    }
+
+    Mesh mesh = Mesh::Builder()
+                        .setPrimitive(Mesh::TRIANGLE_FAN)
+                        .setVertices(4 /* count */, 2 /* size */)
+                        .setTexCoords(2 /* size */)
+                        .setCropCoords(2 /* size */)
+                        .build();
+    for (auto const layer : layers) {
+        if (blurLayers.size() > 0 && blurLayers.front() == layer) {
+            blurLayers.pop_front();
+
+            auto status = mBlurFilter->prepare();
+            if (status != NO_ERROR) {
+                ALOGE("Failed to render blur effect! Aborting GPU composition for buffer (%p).",
+                      buffer->handle);
+                checkErrors("Can't render first blur pass");
+                return status;
+            }
+
+            if (blurLayers.size() == 0) {
+                // Done blurring, time to bind the native FBO and render our blur onto it.
+                fbo = std::make_unique<BindNativeBufferAsFramebuffer>(*this,
+                                                                      buffer.get()
+                                                                              ->getNativeBuffer(),
+                                                                      useFramebufferCache);
+                status = fbo->getStatus();
+                setViewportAndProjection(display.physicalDisplay, display.clip);
+            } else {
+                // There's still something else to blur, so let's keep rendering to our FBO
+                // instead of to the display.
+                status = mBlurFilter->setAsDrawTarget(display,
+                                                      blurLayers.front()->backgroundBlurRadius);
+            }
+            if (status != NO_ERROR) {
+                ALOGE("Failed to bind framebuffer! Aborting GPU composition for buffer (%p).",
+                      buffer->handle);
+                checkErrors("Can't bind native framebuffer");
+                return status;
+            }
+
+            status = mBlurFilter->render(blurLayersSize > 1);
+            if (status != NO_ERROR) {
+                ALOGE("Failed to render blur effect! Aborting GPU composition for buffer (%p).",
+                      buffer->handle);
+                checkErrors("Can't render blur filter");
+                return status;
+            }
+        }
+
+        mState.maxMasteringLuminance = layer->source.buffer.maxMasteringLuminance;
+        mState.maxContentLuminance = layer->source.buffer.maxContentLuminance;
+        mState.projectionMatrix = projectionMatrix * layer->geometry.positionTransform;
+
+        const FloatRect bounds = layer->geometry.boundaries;
+        Mesh::VertexArray<vec2> position(mesh.getPositionArray<vec2>());
+        position[0] = vec2(bounds.left, bounds.top);
+        position[1] = vec2(bounds.left, bounds.bottom);
+        position[2] = vec2(bounds.right, bounds.bottom);
+        position[3] = vec2(bounds.right, bounds.top);
+
+        setupLayerCropping(*layer, mesh);
+        setColorTransform(display.colorTransform * layer->colorTransform);
+
+        bool usePremultipliedAlpha = true;
+        bool disableTexture = true;
+        bool isOpaque = false;
+        if (layer->source.buffer.buffer != nullptr) {
+            disableTexture = false;
+            isOpaque = layer->source.buffer.isOpaque;
+
+            sp<GraphicBuffer> gBuf = layer->source.buffer.buffer;
+            bindExternalTextureBuffer(layer->source.buffer.textureName, gBuf,
+                                      layer->source.buffer.fence);
+
+            usePremultipliedAlpha = layer->source.buffer.usePremultipliedAlpha;
+            Texture texture(Texture::TEXTURE_EXTERNAL, layer->source.buffer.textureName);
+            mat4 texMatrix = layer->source.buffer.textureTransform;
+
+            texture.setMatrix(texMatrix.asArray());
+            texture.setFiltering(layer->source.buffer.useTextureFiltering);
+
+            texture.setDimensions(gBuf->getWidth(), gBuf->getHeight());
+            setSourceY410BT2020(layer->source.buffer.isY410BT2020);
+
+            renderengine::Mesh::VertexArray<vec2> texCoords(mesh.getTexCoordArray<vec2>());
+            texCoords[0] = vec2(0.0, 0.0);
+            texCoords[1] = vec2(0.0, 1.0);
+            texCoords[2] = vec2(1.0, 1.0);
+            texCoords[3] = vec2(1.0, 0.0);
+            setupLayerTexturing(texture);
+        }
+
+        const half3 solidColor = layer->source.solidColor;
+        const half4 color = half4(solidColor.r, solidColor.g, solidColor.b, layer->alpha);
+        // Buffer sources will have a black solid color ignored in the shader,
+        // so in that scenario the solid color passed here is arbitrary.
+        setupLayerBlending(usePremultipliedAlpha, isOpaque, disableTexture, color,
+                           layer->geometry.roundedCornersRadius);
+        if (layer->disableBlending) {
+            glDisable(GL_BLEND);
+        }
+        setSourceDataSpace(layer->sourceDataspace);
+
+        if (layer->shadow.length > 0.0f) {
+            handleShadow(layer->geometry.boundaries, layer->geometry.roundedCornersRadius,
+                         layer->shadow);
+        }
+        // We only want to do a special handling for rounded corners when having rounded corners
+        // is the only reason it needs to turn on blending, otherwise, we handle it like the
+        // usual way since it needs to turn on blending anyway.
+        else if (layer->geometry.roundedCornersRadius > 0.0 && color.a >= 1.0f && isOpaque) {
+            handleRoundedCorners(display, *layer, mesh);
+        } else {
+            drawMesh(mesh);
+        }
+
+        // Cleanup if there's a buffer source
+        if (layer->source.buffer.buffer != nullptr) {
+            disableBlending();
+            setSourceY410BT2020(false);
+            disableTexturing();
+        }
+    }
+
+    if (drawFence != nullptr) {
+        *drawFence = flush();
+    }
+    // If flush failed or we don't support native fences, we need to force the
+    // gl command stream to be executed.
+    if (drawFence == nullptr || drawFence->get() < 0) {
+        bool success = finish();
+        if (!success) {
+            ALOGE("Failed to flush RenderEngine commands");
+            checkErrors();
+            // Chances are, something illegal happened (either the caller passed
+            // us bad parameters, or we messed up our shader generation).
+            return INVALID_OPERATION;
+        }
+        mLastDrawFence = nullptr;
+    } else {
+        // The caller takes ownership of drawFence, so we need to duplicate the
+        // fd here.
+        mLastDrawFence = new Fence(dup(drawFence->get()));
+    }
+    mPriorResourcesCleaned = false;
+
+    checkErrors();
+    return NO_ERROR;
+}
+
+void GLESRenderEngine::setViewportAndProjection(Rect viewport, Rect clip) {
+    ATRACE_CALL();
+    mVpWidth = viewport.getWidth();
+    mVpHeight = viewport.getHeight();
+
+    // We pass the top left corner instead of the bottom left corner,
+    // because since we're rendering off-screen first.
+    glViewport(viewport.left, viewport.top, mVpWidth, mVpHeight);
+
+    mState.projectionMatrix = mat4::ortho(clip.left, clip.right, clip.top, clip.bottom, 0, 1);
+}
+
+void GLESRenderEngine::setupLayerBlending(bool premultipliedAlpha, bool opaque, bool disableTexture,
+                                          const half4& color, float cornerRadius) {
+    mState.isPremultipliedAlpha = premultipliedAlpha;
+    mState.isOpaque = opaque;
+    mState.color = color;
+    mState.cornerRadius = cornerRadius;
+
+    if (disableTexture) {
+        mState.textureEnabled = false;
+    }
+
+    if (color.a < 1.0f || !opaque || cornerRadius > 0.0f) {
+        glEnable(GL_BLEND);
+        glBlendFunc(premultipliedAlpha ? GL_ONE : GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
+    } else {
+        glDisable(GL_BLEND);
+    }
+}
+
+void GLESRenderEngine::setSourceY410BT2020(bool enable) {
+    mState.isY410BT2020 = enable;
+}
+
+void GLESRenderEngine::setSourceDataSpace(Dataspace source) {
+    mDataSpace = source;
+}
+
+void GLESRenderEngine::setOutputDataSpace(Dataspace dataspace) {
+    mOutputDataSpace = dataspace;
+}
+
+void GLESRenderEngine::setDisplayMaxLuminance(const float maxLuminance) {
+    mState.displayMaxLuminance = maxLuminance;
+}
+
+void GLESRenderEngine::setupLayerTexturing(const Texture& texture) {
+    GLuint target = texture.getTextureTarget();
+    glBindTexture(target, texture.getTextureName());
+    GLenum filter = GL_NEAREST;
+    if (texture.getFiltering()) {
+        filter = GL_LINEAR;
+    }
+    glTexParameteri(target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameteri(target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+    glTexParameteri(target, GL_TEXTURE_MAG_FILTER, filter);
+    glTexParameteri(target, GL_TEXTURE_MIN_FILTER, filter);
+
+    mState.texture = texture;
+    mState.textureEnabled = true;
+}
+
+void GLESRenderEngine::setColorTransform(const mat4& colorTransform) {
+    mState.colorMatrix = colorTransform;
+}
+
+void GLESRenderEngine::disableTexturing() {
+    mState.textureEnabled = false;
+}
+
+void GLESRenderEngine::disableBlending() {
+    glDisable(GL_BLEND);
+}
+
+void GLESRenderEngine::setupFillWithColor(float r, float g, float b, float a) {
+    mState.isPremultipliedAlpha = true;
+    mState.isOpaque = false;
+    mState.color = half4(r, g, b, a);
+    mState.textureEnabled = false;
+    glDisable(GL_BLEND);
+}
+
+void GLESRenderEngine::setupCornerRadiusCropSize(float width, float height) {
+    mState.cropSize = half2(width, height);
+}
+
+void GLESRenderEngine::drawMesh(const Mesh& mesh) {
+    ATRACE_CALL();
+    if (mesh.getTexCoordsSize()) {
+        glEnableVertexAttribArray(Program::texCoords);
+        glVertexAttribPointer(Program::texCoords, mesh.getTexCoordsSize(), GL_FLOAT, GL_FALSE,
+                              mesh.getByteStride(), mesh.getTexCoords());
+    }
+
+    glVertexAttribPointer(Program::position, mesh.getVertexSize(), GL_FLOAT, GL_FALSE,
+                          mesh.getByteStride(), mesh.getPositions());
+
+    if (mState.cornerRadius > 0.0f) {
+        glEnableVertexAttribArray(Program::cropCoords);
+        glVertexAttribPointer(Program::cropCoords, mesh.getVertexSize(), GL_FLOAT, GL_FALSE,
+                              mesh.getByteStride(), mesh.getCropCoords());
+    }
+
+    if (mState.drawShadows) {
+        glEnableVertexAttribArray(Program::shadowColor);
+        glVertexAttribPointer(Program::shadowColor, mesh.getShadowColorSize(), GL_FLOAT, GL_FALSE,
+                              mesh.getByteStride(), mesh.getShadowColor());
+
+        glEnableVertexAttribArray(Program::shadowParams);
+        glVertexAttribPointer(Program::shadowParams, mesh.getShadowParamsSize(), GL_FLOAT, GL_FALSE,
+                              mesh.getByteStride(), mesh.getShadowParams());
+    }
+
+    Description managedState = mState;
+    // By default, DISPLAY_P3 is the only supported wide color output. However,
+    // when HDR content is present, hardware composer may be able to handle
+    // BT2020 data space, in that case, the output data space is set to be
+    // BT2020_HLG or BT2020_PQ respectively. In GPU fall back we need
+    // to respect this and convert non-HDR content to HDR format.
+    if (mUseColorManagement) {
+        Dataspace inputStandard = static_cast<Dataspace>(mDataSpace & Dataspace::STANDARD_MASK);
+        Dataspace inputTransfer = static_cast<Dataspace>(mDataSpace & Dataspace::TRANSFER_MASK);
+        Dataspace outputStandard =
+                static_cast<Dataspace>(mOutputDataSpace & Dataspace::STANDARD_MASK);
+        Dataspace outputTransfer =
+                static_cast<Dataspace>(mOutputDataSpace & Dataspace::TRANSFER_MASK);
+        bool needsXYZConversion = needsXYZTransformMatrix();
+
+        // NOTE: if the input standard of the input dataspace is not STANDARD_DCI_P3 or
+        // STANDARD_BT2020, it will be  treated as STANDARD_BT709
+        if (inputStandard != Dataspace::STANDARD_DCI_P3 &&
+            inputStandard != Dataspace::STANDARD_BT2020) {
+            inputStandard = Dataspace::STANDARD_BT709;
+        }
+
+        if (needsXYZConversion) {
+            // The supported input color spaces are standard RGB, Display P3 and BT2020.
+            switch (inputStandard) {
+                case Dataspace::STANDARD_DCI_P3:
+                    managedState.inputTransformMatrix = mDisplayP3ToXyz;
+                    break;
+                case Dataspace::STANDARD_BT2020:
+                    managedState.inputTransformMatrix = mBt2020ToXyz;
+                    break;
+                default:
+                    managedState.inputTransformMatrix = mSrgbToXyz;
+                    break;
+            }
+
+            // The supported output color spaces are BT2020, Display P3 and standard RGB.
+            switch (outputStandard) {
+                case Dataspace::STANDARD_BT2020:
+                    managedState.outputTransformMatrix = mXyzToBt2020;
+                    break;
+                case Dataspace::STANDARD_DCI_P3:
+                    managedState.outputTransformMatrix = mXyzToDisplayP3;
+                    break;
+                default:
+                    managedState.outputTransformMatrix = mXyzToSrgb;
+                    break;
+            }
+        } else if (inputStandard != outputStandard) {
+            // At this point, the input data space and output data space could be both
+            // HDR data spaces, but they match each other, we do nothing in this case.
+            // In addition to the case above, the input data space could be
+            // - scRGB linear
+            // - scRGB non-linear
+            // - sRGB
+            // - Display P3
+            // - BT2020
+            // The output data spaces could be
+            // - sRGB
+            // - Display P3
+            // - BT2020
+            switch (outputStandard) {
+                case Dataspace::STANDARD_BT2020:
+                    if (inputStandard == Dataspace::STANDARD_BT709) {
+                        managedState.outputTransformMatrix = mSrgbToBt2020;
+                    } else if (inputStandard == Dataspace::STANDARD_DCI_P3) {
+                        managedState.outputTransformMatrix = mDisplayP3ToBt2020;
+                    }
+                    break;
+                case Dataspace::STANDARD_DCI_P3:
+                    if (inputStandard == Dataspace::STANDARD_BT709) {
+                        managedState.outputTransformMatrix = mSrgbToDisplayP3;
+                    } else if (inputStandard == Dataspace::STANDARD_BT2020) {
+                        managedState.outputTransformMatrix = mBt2020ToDisplayP3;
+                    }
+                    break;
+                default:
+                    if (inputStandard == Dataspace::STANDARD_DCI_P3) {
+                        managedState.outputTransformMatrix = mDisplayP3ToSrgb;
+                    } else if (inputStandard == Dataspace::STANDARD_BT2020) {
+                        managedState.outputTransformMatrix = mBt2020ToSrgb;
+                    }
+                    break;
+            }
+        }
+
+        // we need to convert the RGB value to linear space and convert it back when:
+        // - there is a color matrix that is not an identity matrix, or
+        // - there is an output transform matrix that is not an identity matrix, or
+        // - the input transfer function doesn't match the output transfer function.
+        if (managedState.hasColorMatrix() || managedState.hasOutputTransformMatrix() ||
+            inputTransfer != outputTransfer) {
+            managedState.inputTransferFunction =
+                    Description::dataSpaceToTransferFunction(inputTransfer);
+            managedState.outputTransferFunction =
+                    Description::dataSpaceToTransferFunction(outputTransfer);
+        }
+    }
+
+    ProgramCache::getInstance().useProgram(mInProtectedContext ? mProtectedEGLContext : mEGLContext,
+                                           managedState);
+
+    if (mState.drawShadows) {
+        glDrawElements(mesh.getPrimitive(), mesh.getIndexCount(), GL_UNSIGNED_SHORT,
+                       mesh.getIndices());
+    } else {
+        glDrawArrays(mesh.getPrimitive(), 0, mesh.getVertexCount());
+    }
+
+    if (mUseColorManagement && outputDebugPPMs) {
+        static uint64_t managedColorFrameCount = 0;
+        std::ostringstream out;
+        out << "/data/texture_out" << managedColorFrameCount++;
+        writePPM(out.str().c_str(), mVpWidth, mVpHeight);
+    }
+
+    if (mesh.getTexCoordsSize()) {
+        glDisableVertexAttribArray(Program::texCoords);
+    }
+
+    if (mState.cornerRadius > 0.0f) {
+        glDisableVertexAttribArray(Program::cropCoords);
+    }
+
+    if (mState.drawShadows) {
+        glDisableVertexAttribArray(Program::shadowColor);
+        glDisableVertexAttribArray(Program::shadowParams);
+    }
+}
+
+size_t GLESRenderEngine::getMaxTextureSize() const {
+    return mMaxTextureSize;
+}
+
+size_t GLESRenderEngine::getMaxViewportDims() const {
+    return mMaxViewportDims[0] < mMaxViewportDims[1] ? mMaxViewportDims[0] : mMaxViewportDims[1];
+}
+
+void GLESRenderEngine::dump(std::string& result) {
+    const GLExtensions& extensions = GLExtensions::getInstance();
+    ProgramCache& cache = ProgramCache::getInstance();
+
+    StringAppendF(&result, "EGL implementation : %s\n", extensions.getEGLVersion());
+    StringAppendF(&result, "%s\n", extensions.getEGLExtensions());
+    StringAppendF(&result, "GLES: %s, %s, %s\n", extensions.getVendor(), extensions.getRenderer(),
+                  extensions.getVersion());
+    StringAppendF(&result, "%s\n", extensions.getExtensions());
+    StringAppendF(&result, "RenderEngine supports protected context: %d\n",
+                  supportsProtectedContent());
+    StringAppendF(&result, "RenderEngine is in protected context: %d\n", mInProtectedContext);
+    StringAppendF(&result, "RenderEngine program cache size for unprotected context: %zu\n",
+                  cache.getSize(mEGLContext));
+    StringAppendF(&result, "RenderEngine program cache size for protected context: %zu\n",
+                  cache.getSize(mProtectedEGLContext));
+    StringAppendF(&result, "RenderEngine last dataspace conversion: (%s) to (%s)\n",
+                  dataspaceDetails(static_cast<android_dataspace>(mDataSpace)).c_str(),
+                  dataspaceDetails(static_cast<android_dataspace>(mOutputDataSpace)).c_str());
+    {
+        std::lock_guard<std::mutex> lock(mRenderingMutex);
+        StringAppendF(&result, "RenderEngine image cache size: %zu\n", mImageCache.size());
+        StringAppendF(&result, "Dumping buffer ids...\n");
+        for (const auto& [id, unused] : mImageCache) {
+            StringAppendF(&result, "0x%" PRIx64 "\n", id);
+        }
+    }
+    {
+        std::lock_guard<std::mutex> lock(mFramebufferImageCacheMutex);
+        StringAppendF(&result, "RenderEngine framebuffer image cache size: %zu\n",
+                      mFramebufferImageCache.size());
+        StringAppendF(&result, "Dumping buffer ids...\n");
+        for (const auto& [id, unused] : mFramebufferImageCache) {
+            StringAppendF(&result, "0x%" PRIx64 "\n", id);
+        }
+    }
+}
+
+GLESRenderEngine::GlesVersion GLESRenderEngine::parseGlesVersion(const char* str) {
+    int major, minor;
+    if (sscanf(str, "OpenGL ES-CM %d.%d", &major, &minor) != 2) {
+        if (sscanf(str, "OpenGL ES %d.%d", &major, &minor) != 2) {
+            ALOGW("Unable to parse GL_VERSION string: \"%s\"", str);
+            return GLES_VERSION_1_0;
+        }
+    }
+
+    if (major == 1 && minor == 0) return GLES_VERSION_1_0;
+    if (major == 1 && minor >= 1) return GLES_VERSION_1_1;
+    if (major == 2 && minor >= 0) return GLES_VERSION_2_0;
+    if (major == 3 && minor >= 0) return GLES_VERSION_3_0;
+
+    ALOGW("Unrecognized OpenGL ES version: %d.%d", major, minor);
+    return GLES_VERSION_1_0;
+}
+
+EGLContext GLESRenderEngine::createEglContext(EGLDisplay display, EGLConfig config,
+                                              EGLContext shareContext, bool useContextPriority,
+                                              Protection protection) {
+    EGLint renderableType = 0;
+    if (config == EGL_NO_CONFIG) {
+        renderableType = EGL_OPENGL_ES3_BIT;
+    } else if (!eglGetConfigAttrib(display, config, EGL_RENDERABLE_TYPE, &renderableType)) {
+        LOG_ALWAYS_FATAL("can't query EGLConfig RENDERABLE_TYPE");
+    }
+    EGLint contextClientVersion = 0;
+    if (renderableType & EGL_OPENGL_ES3_BIT) {
+        contextClientVersion = 3;
+    } else if (renderableType & EGL_OPENGL_ES2_BIT) {
+        contextClientVersion = 2;
+    } else if (renderableType & EGL_OPENGL_ES_BIT) {
+        contextClientVersion = 1;
+    } else {
+        LOG_ALWAYS_FATAL("no supported EGL_RENDERABLE_TYPEs");
+    }
+
+    std::vector<EGLint> contextAttributes;
+    contextAttributes.reserve(7);
+    contextAttributes.push_back(EGL_CONTEXT_CLIENT_VERSION);
+    contextAttributes.push_back(contextClientVersion);
+    if (useContextPriority) {
+        contextAttributes.push_back(EGL_CONTEXT_PRIORITY_LEVEL_IMG);
+        contextAttributes.push_back(EGL_CONTEXT_PRIORITY_HIGH_IMG);
+    }
+    if (protection == Protection::PROTECTED) {
+        contextAttributes.push_back(EGL_PROTECTED_CONTENT_EXT);
+        contextAttributes.push_back(EGL_TRUE);
+    }
+    contextAttributes.push_back(EGL_NONE);
+
+    EGLContext context = eglCreateContext(display, config, shareContext, contextAttributes.data());
+
+    if (contextClientVersion == 3 && context == EGL_NO_CONTEXT) {
+        // eglGetConfigAttrib indicated we can create GLES 3 context, but we failed, thus
+        // EGL_NO_CONTEXT so that we can abort.
+        if (config != EGL_NO_CONFIG) {
+            return context;
+        }
+        // If |config| is EGL_NO_CONFIG, we speculatively try to create GLES 3 context, so we should
+        // try to fall back to GLES 2.
+        contextAttributes[1] = 2;
+        context = eglCreateContext(display, config, shareContext, contextAttributes.data());
+    }
+
+    return context;
+}
+
+EGLSurface GLESRenderEngine::createStubEglPbufferSurface(EGLDisplay display, EGLConfig config,
+                                                         int hwcFormat, Protection protection) {
+    EGLConfig stubConfig = config;
+    if (stubConfig == EGL_NO_CONFIG) {
+        stubConfig = chooseEglConfig(display, hwcFormat, /*logConfig*/ true);
+    }
+    std::vector<EGLint> attributes;
+    attributes.reserve(7);
+    attributes.push_back(EGL_WIDTH);
+    attributes.push_back(1);
+    attributes.push_back(EGL_HEIGHT);
+    attributes.push_back(1);
+    if (protection == Protection::PROTECTED) {
+        attributes.push_back(EGL_PROTECTED_CONTENT_EXT);
+        attributes.push_back(EGL_TRUE);
+    }
+    attributes.push_back(EGL_NONE);
+
+    return eglCreatePbufferSurface(display, stubConfig, attributes.data());
+}
+
+bool GLESRenderEngine::isHdrDataSpace(const Dataspace dataSpace) const {
+    const Dataspace standard = static_cast<Dataspace>(dataSpace & Dataspace::STANDARD_MASK);
+    const Dataspace transfer = static_cast<Dataspace>(dataSpace & Dataspace::TRANSFER_MASK);
+    return standard == Dataspace::STANDARD_BT2020 &&
+            (transfer == Dataspace::TRANSFER_ST2084 || transfer == Dataspace::TRANSFER_HLG);
+}
+
+// For convenience, we want to convert the input color space to XYZ color space first,
+// and then convert from XYZ color space to output color space when
+// - SDR and HDR contents are mixed, either SDR content will be converted to HDR or
+//   HDR content will be tone-mapped to SDR; Or,
+// - there are HDR PQ and HLG contents presented at the same time, where we want to convert
+//   HLG content to PQ content.
+// In either case above, we need to operate the Y value in XYZ color space. Thus, when either
+// input data space or output data space is HDR data space, and the input transfer function
+// doesn't match the output transfer function, we would enable an intermediate transfrom to
+// XYZ color space.
+bool GLESRenderEngine::needsXYZTransformMatrix() const {
+    const bool isInputHdrDataSpace = isHdrDataSpace(mDataSpace);
+    const bool isOutputHdrDataSpace = isHdrDataSpace(mOutputDataSpace);
+    const Dataspace inputTransfer = static_cast<Dataspace>(mDataSpace & Dataspace::TRANSFER_MASK);
+    const Dataspace outputTransfer =
+            static_cast<Dataspace>(mOutputDataSpace & Dataspace::TRANSFER_MASK);
+
+    return (isInputHdrDataSpace || isOutputHdrDataSpace) && inputTransfer != outputTransfer;
+}
+
+bool GLESRenderEngine::isImageCachedForTesting(uint64_t bufferId) {
+    std::lock_guard<std::mutex> lock(mRenderingMutex);
+    const auto& cachedImage = mImageCache.find(bufferId);
+    return cachedImage != mImageCache.end();
+}
+
+bool GLESRenderEngine::isTextureNameKnownForTesting(uint32_t texName) {
+    const auto& entry = mTextureView.find(texName);
+    return entry != mTextureView.end();
+}
+
+std::optional<uint64_t> GLESRenderEngine::getBufferIdForTextureNameForTesting(uint32_t texName) {
+    const auto& entry = mTextureView.find(texName);
+    return entry != mTextureView.end() ? entry->second : std::nullopt;
+}
+
+bool GLESRenderEngine::isFramebufferImageCachedForTesting(uint64_t bufferId) {
+    std::lock_guard<std::mutex> lock(mFramebufferImageCacheMutex);
+    return std::any_of(mFramebufferImageCache.cbegin(), mFramebufferImageCache.cend(),
+                       [=](std::pair<uint64_t, EGLImageKHR> image) {
+                           return image.first == bufferId;
+                       });
+}
+
+// FlushTracer implementation
+GLESRenderEngine::FlushTracer::FlushTracer(GLESRenderEngine* engine) : mEngine(engine) {
+    mThread = std::thread(&GLESRenderEngine::FlushTracer::loop, this);
+}
+
+GLESRenderEngine::FlushTracer::~FlushTracer() {
+    {
+        std::lock_guard<std::mutex> lock(mMutex);
+        mRunning = false;
+    }
+    mCondition.notify_all();
+    if (mThread.joinable()) {
+        mThread.join();
+    }
+}
+
+void GLESRenderEngine::FlushTracer::queueSync(EGLSyncKHR sync) {
+    std::lock_guard<std::mutex> lock(mMutex);
+    char name[64];
+    const uint64_t frameNum = mFramesQueued++;
+    snprintf(name, sizeof(name), "Queueing sync for frame: %lu",
+             static_cast<unsigned long>(frameNum));
+    ATRACE_NAME(name);
+    mQueue.push({sync, frameNum});
+    ATRACE_INT("GPU Frames Outstanding", mQueue.size());
+    mCondition.notify_one();
+}
+
+void GLESRenderEngine::FlushTracer::loop() {
+    while (mRunning) {
+        QueueEntry entry;
+        {
+            std::lock_guard<std::mutex> lock(mMutex);
+
+            mCondition.wait(mMutex,
+                            [&]() REQUIRES(mMutex) { return !mQueue.empty() || !mRunning; });
+
+            if (!mRunning) {
+                // if mRunning is false, then FlushTracer is being destroyed, so
+                // bail out now.
+                break;
+            }
+            entry = mQueue.front();
+            mQueue.pop();
+        }
+        {
+            char name[64];
+            snprintf(name, sizeof(name), "waiting for frame %lu",
+                     static_cast<unsigned long>(entry.mFrameNum));
+            ATRACE_NAME(name);
+            mEngine->waitSync(entry.mSync, 0);
+        }
+    }
+}
+
+void GLESRenderEngine::handleShadow(const FloatRect& casterRect, float casterCornerRadius,
+                                    const ShadowSettings& settings) {
+    ATRACE_CALL();
+    const float casterZ = settings.length / 2.0f;
+    const GLShadowVertexGenerator shadows(casterRect, casterCornerRadius, casterZ,
+                                          settings.casterIsTranslucent, settings.ambientColor,
+                                          settings.spotColor, settings.lightPos,
+                                          settings.lightRadius);
+
+    // setup mesh for both shadows
+    Mesh mesh = Mesh::Builder()
+                        .setPrimitive(Mesh::TRIANGLES)
+                        .setVertices(shadows.getVertexCount(), 2 /* size */)
+                        .setShadowAttrs()
+                        .setIndices(shadows.getIndexCount())
+                        .build();
+
+    Mesh::VertexArray<vec2> position = mesh.getPositionArray<vec2>();
+    Mesh::VertexArray<vec4> shadowColor = mesh.getShadowColorArray<vec4>();
+    Mesh::VertexArray<vec3> shadowParams = mesh.getShadowParamsArray<vec3>();
+    shadows.fillVertices(position, shadowColor, shadowParams);
+    shadows.fillIndices(mesh.getIndicesArray());
+
+    mState.cornerRadius = 0.0f;
+    mState.drawShadows = true;
+    setupLayerTexturing(mShadowTexture.getTexture());
+    drawMesh(mesh);
+    mState.drawShadows = false;
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLESRenderEngine.h b/media/libstagefright/renderfright/gl/GLESRenderEngine.h
new file mode 100644
index 0000000..2c6eae2
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLESRenderEngine.h
@@ -0,0 +1,296 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_GLESRENDERENGINE_H_
+#define SF_GLESRENDERENGINE_H_
+
+#include <condition_variable>
+#include <deque>
+#include <mutex>
+#include <queue>
+#include <thread>
+#include <unordered_map>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GLES2/gl2.h>
+#include <android-base/thread_annotations.h>
+#include <renderengine/RenderEngine.h>
+#include <renderengine/private/Description.h>
+#include <sys/types.h>
+#include "GLShadowTexture.h"
+#include "ImageManager.h"
+
+#define EGL_NO_CONFIG ((EGLConfig)0)
+
+namespace android {
+
+namespace renderengine {
+
+class Mesh;
+class Texture;
+
+namespace gl {
+
+class GLImage;
+class BlurFilter;
+
+class GLESRenderEngine : public impl::RenderEngine {
+public:
+    static std::unique_ptr<GLESRenderEngine> create(const RenderEngineCreationArgs& args);
+
+    GLESRenderEngine(const RenderEngineCreationArgs& args, EGLDisplay display, EGLConfig config,
+                     EGLContext ctxt, EGLSurface stub, EGLContext protectedContext,
+                     EGLSurface protectedStub);
+    ~GLESRenderEngine() override EXCLUDES(mRenderingMutex);
+
+    void primeCache() const override;
+    void genTextures(size_t count, uint32_t* names) override;
+    void deleteTextures(size_t count, uint32_t const* names) override;
+    void bindExternalTextureImage(uint32_t texName, const Image& image) override;
+    status_t bindExternalTextureBuffer(uint32_t texName, const sp<GraphicBuffer>& buffer,
+                                       const sp<Fence>& fence) EXCLUDES(mRenderingMutex);
+    void cacheExternalTextureBuffer(const sp<GraphicBuffer>& buffer) EXCLUDES(mRenderingMutex);
+    void unbindExternalTextureBuffer(uint64_t bufferId) EXCLUDES(mRenderingMutex);
+    status_t bindFrameBuffer(Framebuffer* framebuffer) override;
+    void unbindFrameBuffer(Framebuffer* framebuffer) override;
+
+    bool isProtected() const override { return mInProtectedContext; }
+    bool supportsProtectedContent() const override;
+    bool useProtectedContext(bool useProtectedContext) override;
+    status_t drawLayers(const DisplaySettings& display,
+                        const std::vector<const LayerSettings*>& layers,
+                        const sp<GraphicBuffer>& buffer, const bool useFramebufferCache,
+                        base::unique_fd&& bufferFence, base::unique_fd* drawFence) override;
+    bool cleanupPostRender(CleanupMode mode) override;
+
+    EGLDisplay getEGLDisplay() const { return mEGLDisplay; }
+    // Creates an output image for rendering to
+    EGLImageKHR createFramebufferImageIfNeeded(ANativeWindowBuffer* nativeBuffer, bool isProtected,
+                                               bool useFramebufferCache)
+            EXCLUDES(mFramebufferImageCacheMutex);
+
+    // Test-only methods
+    // Returns true iff mImageCache contains an image keyed by bufferId
+    bool isImageCachedForTesting(uint64_t bufferId) EXCLUDES(mRenderingMutex);
+    // Returns true iff texName was previously generated by RenderEngine and was
+    // not destroyed.
+    bool isTextureNameKnownForTesting(uint32_t texName);
+    // Returns the buffer ID of the content bound to texName, or nullopt if no
+    // such mapping exists.
+    std::optional<uint64_t> getBufferIdForTextureNameForTesting(uint32_t texName);
+    // Returns true iff mFramebufferImageCache contains an image keyed by bufferId
+    bool isFramebufferImageCachedForTesting(uint64_t bufferId)
+            EXCLUDES(mFramebufferImageCacheMutex);
+    // These are wrappers around public methods above, but exposing Barrier
+    // objects so that tests can block.
+    std::shared_ptr<ImageManager::Barrier> cacheExternalTextureBufferForTesting(
+            const sp<GraphicBuffer>& buffer);
+    std::shared_ptr<ImageManager::Barrier> unbindExternalTextureBufferForTesting(uint64_t bufferId);
+
+protected:
+    Framebuffer* getFramebufferForDrawing() override;
+    void dump(std::string& result) override EXCLUDES(mRenderingMutex)
+            EXCLUDES(mFramebufferImageCacheMutex);
+    size_t getMaxTextureSize() const override;
+    size_t getMaxViewportDims() const override;
+
+private:
+    enum GlesVersion {
+        GLES_VERSION_1_0 = 0x10000,
+        GLES_VERSION_1_1 = 0x10001,
+        GLES_VERSION_2_0 = 0x20000,
+        GLES_VERSION_3_0 = 0x30000,
+    };
+
+    static EGLConfig chooseEglConfig(EGLDisplay display, int format, bool logConfig);
+    static GlesVersion parseGlesVersion(const char* str);
+    static EGLContext createEglContext(EGLDisplay display, EGLConfig config,
+                                       EGLContext shareContext, bool useContextPriority,
+                                       Protection protection);
+    static EGLSurface createStubEglPbufferSurface(EGLDisplay display, EGLConfig config,
+                                                  int hwcFormat, Protection protection);
+    std::unique_ptr<Framebuffer> createFramebuffer();
+    std::unique_ptr<Image> createImage();
+    void checkErrors() const;
+    void checkErrors(const char* tag) const;
+    void setScissor(const Rect& region);
+    void disableScissor();
+    bool waitSync(EGLSyncKHR sync, EGLint flags);
+    status_t cacheExternalTextureBufferInternal(const sp<GraphicBuffer>& buffer)
+            EXCLUDES(mRenderingMutex);
+    void unbindExternalTextureBufferInternal(uint64_t bufferId) EXCLUDES(mRenderingMutex);
+
+    // A data space is considered HDR data space if it has BT2020 color space
+    // with PQ or HLG transfer function.
+    bool isHdrDataSpace(const ui::Dataspace dataSpace) const;
+    bool needsXYZTransformMatrix() const;
+    // Defines the viewport, and sets the projection matrix to the projection
+    // defined by the clip.
+    void setViewportAndProjection(Rect viewport, Rect clip);
+    // Evicts stale images from the buffer cache.
+    void evictImages(const std::vector<LayerSettings>& layers);
+    // Computes the cropping window for the layer and sets up cropping
+    // coordinates for the mesh.
+    FloatRect setupLayerCropping(const LayerSettings& layer, Mesh& mesh);
+
+    // We do a special handling for rounded corners when it's possible to turn off blending
+    // for the majority of the layer. The rounded corners needs to turn on blending such that
+    // we can set the alpha value correctly, however, only the corners need this, and since
+    // blending is an expensive operation, we want to turn off blending when it's not necessary.
+    void handleRoundedCorners(const DisplaySettings& display, const LayerSettings& layer,
+                              const Mesh& mesh);
+    base::unique_fd flush();
+    bool finish();
+    bool waitFence(base::unique_fd fenceFd);
+    void clearWithColor(float red, float green, float blue, float alpha);
+    void fillRegionWithColor(const Region& region, float red, float green, float blue, float alpha);
+    void handleShadow(const FloatRect& casterRect, float casterCornerRadius,
+                      const ShadowSettings& shadowSettings);
+    void setupLayerBlending(bool premultipliedAlpha, bool opaque, bool disableTexture,
+                            const half4& color, float cornerRadius);
+    void setupLayerTexturing(const Texture& texture);
+    void setupFillWithColor(float r, float g, float b, float a);
+    void setColorTransform(const mat4& colorTransform);
+    void disableTexturing();
+    void disableBlending();
+    void setupCornerRadiusCropSize(float width, float height);
+
+    // HDR and color management related functions and state
+    void setSourceY410BT2020(bool enable);
+    void setSourceDataSpace(ui::Dataspace source);
+    void setOutputDataSpace(ui::Dataspace dataspace);
+    void setDisplayMaxLuminance(const float maxLuminance);
+
+    // drawing
+    void drawMesh(const Mesh& mesh);
+
+    EGLDisplay mEGLDisplay;
+    EGLConfig mEGLConfig;
+    EGLContext mEGLContext;
+    EGLSurface mStubSurface;
+    EGLContext mProtectedEGLContext;
+    EGLSurface mProtectedStubSurface;
+    GLint mMaxViewportDims[2];
+    GLint mMaxTextureSize;
+    GLuint mVpWidth;
+    GLuint mVpHeight;
+    Description mState;
+    GLShadowTexture mShadowTexture;
+
+    mat4 mSrgbToXyz;
+    mat4 mDisplayP3ToXyz;
+    mat4 mBt2020ToXyz;
+    mat4 mXyzToSrgb;
+    mat4 mXyzToDisplayP3;
+    mat4 mXyzToBt2020;
+    mat4 mSrgbToDisplayP3;
+    mat4 mSrgbToBt2020;
+    mat4 mDisplayP3ToSrgb;
+    mat4 mDisplayP3ToBt2020;
+    mat4 mBt2020ToSrgb;
+    mat4 mBt2020ToDisplayP3;
+
+    bool mInProtectedContext = false;
+    // If set to true, then enables tracing flush() and finish() to systrace.
+    bool mTraceGpuCompletion = false;
+    // Maximum size of mFramebufferImageCache. If more images would be cached, then (approximately)
+    // the last recently used buffer should be kicked out.
+    uint32_t mFramebufferImageCacheSize = 0;
+
+    // Cache of output images, keyed by corresponding GraphicBuffer ID.
+    std::deque<std::pair<uint64_t, EGLImageKHR>> mFramebufferImageCache
+            GUARDED_BY(mFramebufferImageCacheMutex);
+    // The only reason why we have this mutex is so that we don't segfault when
+    // dumping info.
+    std::mutex mFramebufferImageCacheMutex;
+
+    // Current dataspace of layer being rendered
+    ui::Dataspace mDataSpace = ui::Dataspace::UNKNOWN;
+
+    // Current output dataspace of the render engine
+    ui::Dataspace mOutputDataSpace = ui::Dataspace::UNKNOWN;
+
+    // Whether device supports color management, currently color management
+    // supports sRGB, DisplayP3 color spaces.
+    const bool mUseColorManagement = false;
+
+    // Cache of GL images that we'll store per GraphicBuffer ID
+    std::unordered_map<uint64_t, std::unique_ptr<Image>> mImageCache GUARDED_BY(mRenderingMutex);
+    std::unordered_map<uint32_t, std::optional<uint64_t>> mTextureView;
+
+    // Mutex guarding rendering operations, so that:
+    // 1. GL operations aren't interleaved, and
+    // 2. Internal state related to rendering that is potentially modified by
+    // multiple threads is guaranteed thread-safe.
+    std::mutex mRenderingMutex;
+
+    std::unique_ptr<Framebuffer> mDrawingBuffer;
+    // this is a 1x1 RGB buffer, but over-allocate in case a driver wants more
+    // memory or if it needs to satisfy alignment requirements. In this case:
+    // assume that each channel requires 4 bytes, and add 3 additional bytes to
+    // ensure that we align on a word. Allocating 16 bytes will provide a
+    // guarantee that we don't clobber memory.
+    uint32_t mPlaceholderDrawBuffer[4];
+    // Placeholder buffer and image, similar to mPlaceholderDrawBuffer, but
+    // instead these are intended for cleaning up texture memory with the
+    // GL_TEXTURE_EXTERNAL_OES target.
+    ANativeWindowBuffer* mPlaceholderBuffer = nullptr;
+    EGLImage mPlaceholderImage = EGL_NO_IMAGE_KHR;
+    sp<Fence> mLastDrawFence;
+    // Store a separate boolean checking if prior resources were cleaned up, as
+    // devices that don't support native sync fences can't rely on a last draw
+    // fence that doesn't exist.
+    bool mPriorResourcesCleaned = true;
+
+    // Blur effect processor, only instantiated when a layer requests it.
+    BlurFilter* mBlurFilter = nullptr;
+
+    class FlushTracer {
+    public:
+        FlushTracer(GLESRenderEngine* engine);
+        ~FlushTracer();
+        void queueSync(EGLSyncKHR sync) EXCLUDES(mMutex);
+
+        struct QueueEntry {
+            EGLSyncKHR mSync = nullptr;
+            uint64_t mFrameNum = 0;
+        };
+
+    private:
+        void loop();
+        GLESRenderEngine* const mEngine;
+        std::thread mThread;
+        std::condition_variable_any mCondition;
+        std::mutex mMutex;
+        std::queue<QueueEntry> mQueue GUARDED_BY(mMutex);
+        uint64_t mFramesQueued GUARDED_BY(mMutex) = 0;
+        bool mRunning = true;
+    };
+    friend class FlushTracer;
+    friend class ImageManager;
+    friend class GLFramebuffer;
+    friend class BlurFilter;
+    friend class GenericProgram;
+    std::unique_ptr<FlushTracer> mFlushTracer;
+    std::unique_ptr<ImageManager> mImageManager = std::make_unique<ImageManager>(this);
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
+
+#endif /* SF_GLESRENDERENGINE_H_ */
diff --git a/media/libstagefright/renderfright/gl/GLExtensions.cpp b/media/libstagefright/renderfright/gl/GLExtensions.cpp
new file mode 100644
index 0000000..2924b0e
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLExtensions.cpp
@@ -0,0 +1,135 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "GLExtensions.h"
+
+#include <string>
+#include <unordered_set>
+
+#include <stdint.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+ANDROID_SINGLETON_STATIC_INSTANCE(android::renderengine::gl::GLExtensions)
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+namespace {
+
+class ExtensionSet {
+public:
+    ExtensionSet(const char* extensions) {
+        char const* curr = extensions;
+        char const* head = curr;
+        do {
+            head = strchr(curr, ' ');
+            size_t len = head ? head - curr : strlen(curr);
+            if (len > 0) {
+                mExtensions.emplace(curr, len);
+            }
+            curr = head + 1;
+        } while (head);
+    }
+
+    bool hasExtension(const char* extension) const { return mExtensions.count(extension) > 0; }
+
+private:
+    std::unordered_set<std::string> mExtensions;
+};
+
+} // anonymous namespace
+
+void GLExtensions::initWithGLStrings(GLubyte const* vendor, GLubyte const* renderer,
+                                     GLubyte const* version, GLubyte const* extensions) {
+    mVendor = (char const*)vendor;
+    mRenderer = (char const*)renderer;
+    mVersion = (char const*)version;
+    mExtensions = (char const*)extensions;
+
+    ExtensionSet extensionSet(mExtensions.c_str());
+    if (extensionSet.hasExtension("GL_EXT_protected_textures")) {
+        mHasProtectedTexture = true;
+    }
+}
+
+char const* GLExtensions::getVendor() const {
+    return mVendor.string();
+}
+
+char const* GLExtensions::getRenderer() const {
+    return mRenderer.string();
+}
+
+char const* GLExtensions::getVersion() const {
+    return mVersion.string();
+}
+
+char const* GLExtensions::getExtensions() const {
+    return mExtensions.string();
+}
+
+void GLExtensions::initWithEGLStrings(char const* eglVersion, char const* eglExtensions) {
+    mEGLVersion = eglVersion;
+    mEGLExtensions = eglExtensions;
+
+    ExtensionSet extensionSet(eglExtensions);
+
+    // EGL_ANDROIDX_no_config_context is an experimental extension with no
+    // written specification. It will be replaced by something more formal.
+    // SurfaceFlinger is using it to allow a single EGLContext to render to
+    // both a 16-bit primary display framebuffer and a 32-bit virtual display
+    // framebuffer.
+    //
+    // EGL_KHR_no_config_context is official extension to allow creating a
+    // context that works with any surface of a display.
+    if (extensionSet.hasExtension("EGL_ANDROIDX_no_config_context") ||
+        extensionSet.hasExtension("EGL_KHR_no_config_context")) {
+        mHasNoConfigContext = true;
+    }
+
+    if (extensionSet.hasExtension("EGL_ANDROID_native_fence_sync")) {
+        mHasNativeFenceSync = true;
+    }
+    if (extensionSet.hasExtension("EGL_KHR_fence_sync")) {
+        mHasFenceSync = true;
+    }
+    if (extensionSet.hasExtension("EGL_KHR_wait_sync")) {
+        mHasWaitSync = true;
+    }
+    if (extensionSet.hasExtension("EGL_EXT_protected_content")) {
+        mHasProtectedContent = true;
+    }
+    if (extensionSet.hasExtension("EGL_IMG_context_priority")) {
+        mHasContextPriority = true;
+    }
+    if (extensionSet.hasExtension("EGL_KHR_surfaceless_context")) {
+        mHasSurfacelessContext = true;
+    }
+}
+
+char const* GLExtensions::getEGLVersion() const {
+    return mEGLVersion.string();
+}
+
+char const* GLExtensions::getEGLExtensions() const {
+    return mEGLExtensions.string();
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLExtensions.h b/media/libstagefright/renderfright/gl/GLExtensions.h
new file mode 100644
index 0000000..ef00009
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLExtensions.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SF_GLEXTENSION_H
+#define ANDROID_SF_GLEXTENSION_H
+
+#include <stdint.h>
+#include <sys/types.h>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GLES/gl.h>
+#include <GLES/glext.h>
+#include <utils/Singleton.h>
+#include <utils/String8.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLExtensions : public Singleton<GLExtensions> {
+public:
+    bool hasNoConfigContext() const { return mHasNoConfigContext; }
+    bool hasNativeFenceSync() const { return mHasNativeFenceSync; }
+    bool hasFenceSync() const { return mHasFenceSync; }
+    bool hasWaitSync() const { return mHasWaitSync; }
+    bool hasProtectedContent() const { return mHasProtectedContent; }
+    bool hasContextPriority() const { return mHasContextPriority; }
+    bool hasSurfacelessContext() const { return mHasSurfacelessContext; }
+    bool hasProtectedTexture() const { return mHasProtectedTexture; }
+
+    void initWithGLStrings(GLubyte const* vendor, GLubyte const* renderer, GLubyte const* version,
+                           GLubyte const* extensions);
+    char const* getVendor() const;
+    char const* getRenderer() const;
+    char const* getVersion() const;
+    char const* getExtensions() const;
+
+    void initWithEGLStrings(char const* eglVersion, char const* eglExtensions);
+    char const* getEGLVersion() const;
+    char const* getEGLExtensions() const;
+
+protected:
+    GLExtensions() = default;
+
+private:
+    friend class Singleton<GLExtensions>;
+
+    bool mHasNoConfigContext = false;
+    bool mHasNativeFenceSync = false;
+    bool mHasFenceSync = false;
+    bool mHasWaitSync = false;
+    bool mHasProtectedContent = false;
+    bool mHasContextPriority = false;
+    bool mHasSurfacelessContext = false;
+    bool mHasProtectedTexture = false;
+
+    String8 mVendor;
+    String8 mRenderer;
+    String8 mVersion;
+    String8 mExtensions;
+    String8 mEGLVersion;
+    String8 mEGLExtensions;
+
+    GLExtensions(const GLExtensions&);
+    GLExtensions& operator=(const GLExtensions&);
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
+
+#endif // ANDROID_SF_GLEXTENSION_H
diff --git a/media/libstagefright/renderfright/gl/GLFramebuffer.cpp b/media/libstagefright/renderfright/gl/GLFramebuffer.cpp
new file mode 100644
index 0000000..383486b
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLFramebuffer.cpp
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "GLFramebuffer.h"
+
+#include <GLES/gl.h>
+#include <GLES/glext.h>
+#include <GLES2/gl2ext.h>
+#include <GLES3/gl3.h>
+#include <gui/DebugEGLImageTracker.h>
+#include <nativebase/nativebase.h>
+#include <utils/Trace.h>
+#include "GLESRenderEngine.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+GLFramebuffer::GLFramebuffer(GLESRenderEngine& engine)
+      : mEngine(engine), mEGLDisplay(engine.getEGLDisplay()), mEGLImage(EGL_NO_IMAGE_KHR) {
+    glGenTextures(1, &mTextureName);
+    glGenFramebuffers(1, &mFramebufferName);
+}
+
+GLFramebuffer::~GLFramebuffer() {
+    glDeleteFramebuffers(1, &mFramebufferName);
+    glDeleteTextures(1, &mTextureName);
+}
+
+bool GLFramebuffer::setNativeWindowBuffer(ANativeWindowBuffer* nativeBuffer, bool isProtected,
+                                          const bool useFramebufferCache) {
+    ATRACE_CALL();
+    if (mEGLImage != EGL_NO_IMAGE_KHR) {
+        if (!usingFramebufferCache) {
+            eglDestroyImageKHR(mEGLDisplay, mEGLImage);
+            DEBUG_EGL_IMAGE_TRACKER_DESTROY();
+        }
+        mEGLImage = EGL_NO_IMAGE_KHR;
+        mBufferWidth = 0;
+        mBufferHeight = 0;
+    }
+
+    if (nativeBuffer) {
+        mEGLImage = mEngine.createFramebufferImageIfNeeded(nativeBuffer, isProtected,
+                                                           useFramebufferCache);
+        if (mEGLImage == EGL_NO_IMAGE_KHR) {
+            return false;
+        }
+        usingFramebufferCache = useFramebufferCache;
+        mBufferWidth = nativeBuffer->width;
+        mBufferHeight = nativeBuffer->height;
+    }
+    return true;
+}
+
+void GLFramebuffer::allocateBuffers(uint32_t width, uint32_t height, void* data) {
+    ATRACE_CALL();
+
+    glBindTexture(GL_TEXTURE_2D, mTextureName);
+    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, data);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_MIRRORED_REPEAT);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_MIRRORED_REPEAT);
+
+    mBufferHeight = height;
+    mBufferWidth = width;
+    mEngine.checkErrors("Allocating Fbo texture");
+
+    bind();
+    glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, mTextureName, 0);
+    mStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER);
+    unbind();
+    glBindTexture(GL_TEXTURE_2D, 0);
+
+    if (mStatus != GL_FRAMEBUFFER_COMPLETE) {
+        ALOGE("Frame buffer is not complete. Error %d", mStatus);
+    }
+}
+
+void GLFramebuffer::bind() const {
+    glBindFramebuffer(GL_FRAMEBUFFER, mFramebufferName);
+}
+
+void GLFramebuffer::bindAsReadBuffer() const {
+    glBindFramebuffer(GL_READ_FRAMEBUFFER, mFramebufferName);
+}
+
+void GLFramebuffer::bindAsDrawBuffer() const {
+    glBindFramebuffer(GL_DRAW_FRAMEBUFFER, mFramebufferName);
+}
+
+void GLFramebuffer::unbind() const {
+    glBindFramebuffer(GL_FRAMEBUFFER, 0);
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLFramebuffer.h b/media/libstagefright/renderfright/gl/GLFramebuffer.h
new file mode 100644
index 0000000..6757695
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLFramebuffer.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GLES2/gl2.h>
+#include <renderengine/Framebuffer.h>
+
+struct ANativeWindowBuffer;
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLESRenderEngine;
+
+class GLFramebuffer : public renderengine::Framebuffer {
+public:
+    explicit GLFramebuffer(GLESRenderEngine& engine);
+    explicit GLFramebuffer(GLESRenderEngine& engine, bool multiTarget);
+    ~GLFramebuffer() override;
+
+    bool setNativeWindowBuffer(ANativeWindowBuffer* nativeBuffer, bool isProtected,
+                               const bool useFramebufferCache) override;
+    void allocateBuffers(uint32_t width, uint32_t height, void* data = nullptr);
+    EGLImageKHR getEGLImage() const { return mEGLImage; }
+    uint32_t getTextureName() const { return mTextureName; }
+    uint32_t getFramebufferName() const { return mFramebufferName; }
+    int32_t getBufferHeight() const { return mBufferHeight; }
+    int32_t getBufferWidth() const { return mBufferWidth; }
+    GLenum getStatus() const { return mStatus; }
+    void bind() const;
+    void bindAsReadBuffer() const;
+    void bindAsDrawBuffer() const;
+    void unbind() const;
+
+private:
+    GLESRenderEngine& mEngine;
+    EGLDisplay mEGLDisplay;
+    EGLImageKHR mEGLImage;
+    bool usingFramebufferCache = false;
+    GLenum mStatus = GL_FRAMEBUFFER_UNSUPPORTED;
+    uint32_t mTextureName, mFramebufferName;
+
+    int32_t mBufferHeight = 0;
+    int32_t mBufferWidth = 0;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLImage.cpp b/media/libstagefright/renderfright/gl/GLImage.cpp
new file mode 100644
index 0000000..8497721
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLImage.cpp
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "GLImage.h"
+
+#include <vector>
+
+#include <gui/DebugEGLImageTracker.h>
+#include <log/log.h>
+#include <utils/Trace.h>
+#include "GLESRenderEngine.h"
+#include "GLExtensions.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+static std::vector<EGLint> buildAttributeList(bool isProtected) {
+    std::vector<EGLint> attrs;
+    attrs.reserve(16);
+
+    attrs.push_back(EGL_IMAGE_PRESERVED_KHR);
+    attrs.push_back(EGL_TRUE);
+
+    if (isProtected && GLExtensions::getInstance().hasProtectedContent()) {
+        attrs.push_back(EGL_PROTECTED_CONTENT_EXT);
+        attrs.push_back(EGL_TRUE);
+    }
+
+    attrs.push_back(EGL_NONE);
+
+    return attrs;
+}
+
+GLImage::GLImage(const GLESRenderEngine& engine) : mEGLDisplay(engine.getEGLDisplay()) {}
+
+GLImage::~GLImage() {
+    setNativeWindowBuffer(nullptr, false);
+}
+
+bool GLImage::setNativeWindowBuffer(ANativeWindowBuffer* buffer, bool isProtected) {
+    ATRACE_CALL();
+    if (mEGLImage != EGL_NO_IMAGE_KHR) {
+        if (!eglDestroyImageKHR(mEGLDisplay, mEGLImage)) {
+            ALOGE("failed to destroy image: %#x", eglGetError());
+        }
+        DEBUG_EGL_IMAGE_TRACKER_DESTROY();
+        mEGLImage = EGL_NO_IMAGE_KHR;
+    }
+
+    if (buffer) {
+        std::vector<EGLint> attrs = buildAttributeList(isProtected);
+        mEGLImage = eglCreateImageKHR(mEGLDisplay, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID,
+                                      static_cast<EGLClientBuffer>(buffer), attrs.data());
+        if (mEGLImage == EGL_NO_IMAGE_KHR) {
+            ALOGE("failed to create EGLImage: %#x", eglGetError());
+            return false;
+        }
+        DEBUG_EGL_IMAGE_TRACKER_CREATE();
+        mProtected = isProtected;
+    }
+
+    return true;
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLImage.h b/media/libstagefright/renderfright/gl/GLImage.h
new file mode 100644
index 0000000..59d6ce3
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLImage.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <android-base/macros.h>
+#include <renderengine/Image.h>
+
+struct ANativeWindowBuffer;
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLESRenderEngine;
+
+class GLImage : public renderengine::Image {
+public:
+    explicit GLImage(const GLESRenderEngine& engine);
+    ~GLImage() override;
+
+    bool setNativeWindowBuffer(ANativeWindowBuffer* buffer, bool isProtected) override;
+
+    EGLImageKHR getEGLImage() const { return mEGLImage; }
+    bool isProtected() const { return mProtected; }
+
+private:
+    EGLDisplay mEGLDisplay;
+    EGLImageKHR mEGLImage = EGL_NO_IMAGE_KHR;
+    bool mProtected = false;
+
+    DISALLOW_COPY_AND_ASSIGN(GLImage);
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLShadowTexture.cpp b/media/libstagefright/renderfright/gl/GLShadowTexture.cpp
new file mode 100644
index 0000000..2423a34
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLShadowTexture.cpp
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <GLES/gl.h>
+#include <GLES/glext.h>
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <GLES3/gl3.h>
+
+#include "GLShadowTexture.h"
+#include "GLSkiaShadowPort.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+GLShadowTexture::GLShadowTexture() {
+    fillShadowTextureData(mTextureData, SHADOW_TEXTURE_WIDTH);
+
+    glGenTextures(1, &mName);
+    glBindTexture(GL_TEXTURE_2D, mName);
+    glTexImage2D(GL_TEXTURE_2D, 0 /* base image level */, GL_ALPHA, SHADOW_TEXTURE_WIDTH,
+                 SHADOW_TEXTURE_HEIGHT, 0 /* border */, GL_ALPHA, GL_UNSIGNED_BYTE, mTextureData);
+    mTexture.init(Texture::TEXTURE_2D, mName);
+    mTexture.setFiltering(true);
+    mTexture.setDimensions(SHADOW_TEXTURE_WIDTH, 1);
+}
+
+GLShadowTexture::~GLShadowTexture() {
+    glDeleteTextures(1, &mName);
+}
+
+const Texture& GLShadowTexture::getTexture() {
+    return mTexture;
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLShadowTexture.h b/media/libstagefright/renderfright/gl/GLShadowTexture.h
new file mode 100644
index 0000000..250a9d7
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLShadowTexture.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <renderengine/Texture.h>
+#include <cstdint>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLShadowTexture {
+public:
+    GLShadowTexture();
+    ~GLShadowTexture();
+
+    const Texture& getTexture();
+
+private:
+    static constexpr int SHADOW_TEXTURE_WIDTH = 128;
+    static constexpr int SHADOW_TEXTURE_HEIGHT = 1;
+
+    GLuint mName;
+    Texture mTexture;
+    uint8_t mTextureData[SHADOW_TEXTURE_WIDTH];
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.cpp b/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.cpp
new file mode 100644
index 0000000..3181f9b
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.cpp
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/Mesh.h>
+
+#include <math/vec4.h>
+
+#include <ui/Rect.h>
+#include <ui/Transform.h>
+
+#include "GLShadowVertexGenerator.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+GLShadowVertexGenerator::GLShadowVertexGenerator(const FloatRect& casterRect,
+                                                 float casterCornerRadius, float casterZ,
+                                                 bool casterIsTranslucent, const vec4& ambientColor,
+                                                 const vec4& spotColor, const vec3& lightPosition,
+                                                 float lightRadius) {
+    mDrawAmbientShadow = ambientColor.a > 0.f;
+    mDrawSpotShadow = spotColor.a > 0.f;
+
+    // Generate geometries and find number of vertices to generate
+    if (mDrawAmbientShadow) {
+        mAmbientShadowGeometry = getAmbientShadowGeometry(casterRect, casterCornerRadius, casterZ,
+                                                          casterIsTranslucent, ambientColor);
+        mAmbientShadowVertexCount = getVertexCountForGeometry(*mAmbientShadowGeometry.get());
+        mAmbientShadowIndexCount = getIndexCountForGeometry(*mAmbientShadowGeometry.get());
+    } else {
+        mAmbientShadowVertexCount = 0;
+        mAmbientShadowIndexCount = 0;
+    }
+
+    if (mDrawSpotShadow) {
+        mSpotShadowGeometry =
+                getSpotShadowGeometry(casterRect, casterCornerRadius, casterZ, casterIsTranslucent,
+                                      spotColor, lightPosition, lightRadius);
+        mSpotShadowVertexCount = getVertexCountForGeometry(*mSpotShadowGeometry.get());
+        mSpotShadowIndexCount = getIndexCountForGeometry(*mSpotShadowGeometry.get());
+    } else {
+        mSpotShadowVertexCount = 0;
+        mSpotShadowIndexCount = 0;
+    }
+}
+
+size_t GLShadowVertexGenerator::getVertexCount() const {
+    return mAmbientShadowVertexCount + mSpotShadowVertexCount;
+}
+
+size_t GLShadowVertexGenerator::getIndexCount() const {
+    return mAmbientShadowIndexCount + mSpotShadowIndexCount;
+}
+
+void GLShadowVertexGenerator::fillVertices(Mesh::VertexArray<vec2>& position,
+                                           Mesh::VertexArray<vec4>& color,
+                                           Mesh::VertexArray<vec3>& params) const {
+    if (mDrawAmbientShadow) {
+        fillVerticesForGeometry(*mAmbientShadowGeometry.get(), mAmbientShadowVertexCount, position,
+                                color, params);
+    }
+    if (mDrawSpotShadow) {
+        fillVerticesForGeometry(*mSpotShadowGeometry.get(), mSpotShadowVertexCount,
+                                Mesh::VertexArray<vec2>(position, mAmbientShadowVertexCount),
+                                Mesh::VertexArray<vec4>(color, mAmbientShadowVertexCount),
+                                Mesh::VertexArray<vec3>(params, mAmbientShadowVertexCount));
+    }
+}
+
+void GLShadowVertexGenerator::fillIndices(uint16_t* indices) const {
+    if (mDrawAmbientShadow) {
+        fillIndicesForGeometry(*mAmbientShadowGeometry.get(), mAmbientShadowIndexCount,
+                               0 /* starting vertex offset */, indices);
+    }
+    if (mDrawSpotShadow) {
+        fillIndicesForGeometry(*mSpotShadowGeometry.get(), mSpotShadowIndexCount,
+                               mAmbientShadowVertexCount /* starting vertex offset */,
+                               &(indices[mAmbientShadowIndexCount]));
+    }
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.h b/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.h
new file mode 100644
index 0000000..112f976
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <math/vec4.h>
+#include <ui/Rect.h>
+
+#include "GLSkiaShadowPort.h"
+
+namespace android {
+namespace renderengine {
+
+class Mesh;
+
+namespace gl {
+
+/**
+ * Generates gl attributes required to draw shadow spot and/or ambient shadows.
+ *
+ * Each shadow can support different colors. This class generates three vertex attributes for
+ * each shadow, its position, color and shadow params(offset and distance). These can be sent
+ * using a single glDrawElements call.
+ */
+class GLShadowVertexGenerator {
+public:
+    GLShadowVertexGenerator(const FloatRect& casterRect, float casterCornerRadius, float casterZ,
+                            bool casterIsTranslucent, const vec4& ambientColor,
+                            const vec4& spotColor, const vec3& lightPosition, float lightRadius);
+    ~GLShadowVertexGenerator() = default;
+
+    size_t getVertexCount() const;
+    size_t getIndexCount() const;
+    void fillVertices(Mesh::VertexArray<vec2>& position, Mesh::VertexArray<vec4>& color,
+                      Mesh::VertexArray<vec3>& params) const;
+    void fillIndices(uint16_t* indices) const;
+
+private:
+    bool mDrawAmbientShadow;
+    std::unique_ptr<Geometry> mAmbientShadowGeometry;
+    int mAmbientShadowVertexCount = 0;
+    int mAmbientShadowIndexCount = 0;
+
+    bool mDrawSpotShadow;
+    std::unique_ptr<Geometry> mSpotShadowGeometry;
+    int mSpotShadowVertexCount = 0;
+    int mSpotShadowIndexCount = 0;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLSkiaShadowPort.cpp b/media/libstagefright/renderfright/gl/GLSkiaShadowPort.cpp
new file mode 100644
index 0000000..da8b435
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLSkiaShadowPort.cpp
@@ -0,0 +1,656 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <math/vec4.h>
+
+#include <renderengine/Mesh.h>
+
+#include <ui/Rect.h>
+#include <ui/Transform.h>
+
+#include <utils/Log.h>
+
+#include "GLSkiaShadowPort.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+/**
+ * The shadow geometry logic and vertex generation code has been ported from skia shadow
+ * fast path OpenGL implementation to draw shadows around rects and rounded rects including
+ * circles.
+ *
+ * path: skia/src/gpu/GrRenderTargetContext.cpp GrRenderTargetContext::drawFastShadow
+ *
+ * Modifications made:
+ * - Switched to using std lib math functions
+ * - Fall off function is implemented in vertex shader rather than a shadow texture
+ * - Removed transformations applied on the caster rect since the caster will be in local
+ *   coordinate space and will be transformed by the vertex shader.
+ */
+
+static inline float divide_and_pin(float numer, float denom, float min, float max) {
+    if (denom == 0.0f) return min;
+    return std::clamp(numer / denom, min, max);
+}
+
+static constexpr auto SK_ScalarSqrt2 = 1.41421356f;
+static constexpr auto kAmbientHeightFactor = 1.0f / 128.0f;
+static constexpr auto kAmbientGeomFactor = 64.0f;
+// Assuming that we have a light height of 600 for the spot shadow,
+// the spot values will reach their maximum at a height of approximately 292.3077.
+// We'll round up to 300 to keep it simple.
+static constexpr auto kMaxAmbientRadius = 300 * kAmbientHeightFactor * kAmbientGeomFactor;
+
+inline float AmbientBlurRadius(float height) {
+    return std::min(height * kAmbientHeightFactor * kAmbientGeomFactor, kMaxAmbientRadius);
+}
+inline float AmbientRecipAlpha(float height) {
+    return 1.0f + std::max(height * kAmbientHeightFactor, 0.0f);
+}
+
+//////////////////////////////////////////////////////////////////////////////
+// Circle Data
+//
+// We have two possible cases for geometry for a circle:
+
+// In the case of a normal fill, we draw geometry for the circle as an octagon.
+static const uint16_t gFillCircleIndices[] = {
+        // enter the octagon
+        // clang-format off
+         0, 1, 8, 1, 2, 8,
+         2, 3, 8, 3, 4, 8,
+         4, 5, 8, 5, 6, 8,
+         6, 7, 8, 7, 0, 8,
+        // clang-format on
+};
+
+// For stroked circles, we use two nested octagons.
+static const uint16_t gStrokeCircleIndices[] = {
+        // enter the octagon
+        // clang-format off
+         0, 1,  9, 0,  9,  8,
+         1, 2, 10, 1, 10,  9,
+         2, 3, 11, 2, 11, 10,
+         3, 4, 12, 3, 12, 11,
+         4, 5, 13, 4, 13, 12,
+         5, 6, 14, 5, 14, 13,
+         6, 7, 15, 6, 15, 14,
+         7, 0,  8, 7,  8, 15,
+        // clang-format on
+};
+
+#define SK_ARRAY_COUNT(a) (sizeof(a) / sizeof((a)[0]))
+static const int kIndicesPerFillCircle = SK_ARRAY_COUNT(gFillCircleIndices);
+static const int kIndicesPerStrokeCircle = SK_ARRAY_COUNT(gStrokeCircleIndices);
+static const int kVertsPerStrokeCircle = 16;
+static const int kVertsPerFillCircle = 9;
+
+static int circle_type_to_vert_count(bool stroked) {
+    return stroked ? kVertsPerStrokeCircle : kVertsPerFillCircle;
+}
+
+static int circle_type_to_index_count(bool stroked) {
+    return stroked ? kIndicesPerStrokeCircle : kIndicesPerFillCircle;
+}
+
+static const uint16_t* circle_type_to_indices(bool stroked) {
+    return stroked ? gStrokeCircleIndices : gFillCircleIndices;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// RoundRect Data
+//
+// The geometry for a shadow roundrect is similar to a 9-patch:
+//    ____________
+//   |_|________|_|
+//   | |        | |
+//   | |        | |
+//   | |        | |
+//   |_|________|_|
+//   |_|________|_|
+//
+// However, each corner is rendered as a fan rather than a simple quad, as below. (The diagram
+// shows the upper part of the upper left corner. The bottom triangle would similarly be split
+// into two triangles.)
+//    ________
+//   |\  \   |
+//   |  \ \  |
+//   |    \\ |
+//   |      \|
+//   --------
+//
+// The center of the fan handles the curve of the corner. For roundrects where the stroke width
+// is greater than the corner radius, the outer triangles blend from the curve to the straight
+// sides. Otherwise these triangles will be degenerate.
+//
+// In the case where the stroke width is greater than the corner radius and the
+// blur radius (overstroke), we add additional geometry to mark out the rectangle in the center.
+// This rectangle extends the coverage values of the center edges of the 9-patch.
+//    ____________
+//   |_|________|_|
+//   | |\ ____ /| |
+//   | | |    | | |
+//   | | |____| | |
+//   |_|/______\|_|
+//   |_|________|_|
+//
+// For filled rrects we reuse the stroke geometry but add an additional quad to the center.
+
+static const uint16_t gRRectIndices[] = {
+        // clang-format off
+     // overstroke quads
+     // we place this at the beginning so that we can skip these indices when rendering as filled
+     0, 6, 25, 0, 25, 24,
+     6, 18, 27, 6, 27, 25,
+     18, 12, 26, 18, 26, 27,
+     12, 0, 24, 12, 24, 26,
+
+     // corners
+     0, 1, 2, 0, 2, 3, 0, 3, 4, 0, 4, 5,
+     6, 11, 10, 6, 10, 9, 6, 9, 8, 6, 8, 7,
+     12, 17, 16, 12, 16, 15, 12, 15, 14, 12, 14, 13,
+     18, 19, 20, 18, 20, 21, 18, 21, 22, 18, 22, 23,
+
+     // edges
+     0, 5, 11, 0, 11, 6,
+     6, 7, 19, 6, 19, 18,
+     18, 23, 17, 18, 17, 12,
+     12, 13, 1, 12, 1, 0,
+
+     // fill quad
+     // we place this at the end so that we can skip these indices when rendering as stroked
+     0, 6, 18, 0, 18, 12,
+        // clang-format on
+};
+
+// overstroke count
+static const int kIndicesPerOverstrokeRRect = SK_ARRAY_COUNT(gRRectIndices) - 6;
+// simple stroke count skips overstroke indices
+static const int kIndicesPerStrokeRRect = kIndicesPerOverstrokeRRect - 6 * 4;
+// fill count adds final quad to stroke count
+static const int kIndicesPerFillRRect = kIndicesPerStrokeRRect + 6;
+static const int kVertsPerStrokeRRect = 24;
+static const int kVertsPerOverstrokeRRect = 28;
+static const int kVertsPerFillRRect = 24;
+
+static int rrect_type_to_vert_count(RRectType type) {
+    switch (type) {
+        case kFill_RRectType:
+            return kVertsPerFillRRect;
+        case kStroke_RRectType:
+            return kVertsPerStrokeRRect;
+        case kOverstroke_RRectType:
+            return kVertsPerOverstrokeRRect;
+    }
+    ALOGE("Invalid rect type: %d", type);
+    return -1;
+}
+
+static int rrect_type_to_index_count(RRectType type) {
+    switch (type) {
+        case kFill_RRectType:
+            return kIndicesPerFillRRect;
+        case kStroke_RRectType:
+            return kIndicesPerStrokeRRect;
+        case kOverstroke_RRectType:
+            return kIndicesPerOverstrokeRRect;
+    }
+    ALOGE("Invalid rect type: %d", type);
+    return -1;
+}
+
+static const uint16_t* rrect_type_to_indices(RRectType type) {
+    switch (type) {
+        case kFill_RRectType:
+        case kStroke_RRectType:
+            return gRRectIndices + 6 * 4;
+        case kOverstroke_RRectType:
+            return gRRectIndices;
+    }
+    ALOGE("Invalid rect type: %d", type);
+    return nullptr;
+}
+
+static void fillInCircleVerts(const Geometry& args, bool isStroked,
+                              Mesh::VertexArray<vec2>& position,
+                              Mesh::VertexArray<vec4>& shadowColor,
+                              Mesh::VertexArray<vec3>& shadowParams) {
+    vec4 color = args.fColor;
+    float outerRadius = args.fOuterRadius;
+    float innerRadius = args.fInnerRadius;
+    float blurRadius = args.fBlurRadius;
+    float distanceCorrection = outerRadius / blurRadius;
+
+    const FloatRect& bounds = args.fDevBounds;
+
+    // The inner radius in the vertex data must be specified in normalized space.
+    innerRadius = innerRadius / outerRadius;
+
+    vec2 center = vec2(bounds.getWidth() / 2.0f, bounds.getHeight() / 2.0f);
+    float halfWidth = 0.5f * bounds.getWidth();
+    float octOffset = 0.41421356237f; // sqrt(2) - 1
+    int vertexCount = 0;
+
+    position[vertexCount] = center + vec2(-octOffset * halfWidth, -halfWidth);
+    shadowColor[vertexCount] = color;
+    shadowParams[vertexCount] = vec3(-octOffset, -1, distanceCorrection);
+    vertexCount++;
+
+    position[vertexCount] = center + vec2(octOffset * halfWidth, -halfWidth);
+    shadowColor[vertexCount] = color;
+    shadowParams[vertexCount] = vec3(octOffset, -1, distanceCorrection);
+    vertexCount++;
+
+    position[vertexCount] = center + vec2(halfWidth, -octOffset * halfWidth);
+    shadowColor[vertexCount] = color;
+    shadowParams[vertexCount] = vec3(1, -octOffset, distanceCorrection);
+    vertexCount++;
+
+    position[vertexCount] = center + vec2(halfWidth, octOffset * halfWidth);
+    shadowColor[vertexCount] = color;
+    shadowParams[vertexCount] = vec3(1, octOffset, distanceCorrection);
+    vertexCount++;
+
+    position[vertexCount] = center + vec2(octOffset * halfWidth, halfWidth);
+    shadowColor[vertexCount] = color;
+    shadowParams[vertexCount] = vec3(octOffset, 1, distanceCorrection);
+    vertexCount++;
+
+    position[vertexCount] = center + vec2(-octOffset * halfWidth, halfWidth);
+    shadowColor[vertexCount] = color;
+    shadowParams[vertexCount] = vec3(-octOffset, 1, distanceCorrection);
+    vertexCount++;
+
+    position[vertexCount] = center + vec2(-halfWidth, octOffset * halfWidth);
+    shadowColor[vertexCount] = color;
+    shadowParams[vertexCount] = vec3(-1, octOffset, distanceCorrection);
+    vertexCount++;
+
+    position[vertexCount] = center + vec2(-halfWidth, -octOffset * halfWidth);
+    shadowColor[vertexCount] = color;
+    shadowParams[vertexCount] = vec3(-1, -octOffset, distanceCorrection);
+    vertexCount++;
+
+    if (isStroked) {
+        // compute the inner ring
+
+        // cosine and sine of pi/8
+        float c = 0.923579533f;
+        float s = 0.382683432f;
+        float r = args.fInnerRadius;
+
+        position[vertexCount] = center + vec2(-s * r, -c * r);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(-s * innerRadius, -c * innerRadius, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = center + vec2(s * r, -c * r);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(s * innerRadius, -c * innerRadius, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = center + vec2(c * r, -s * r);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(c * innerRadius, -s * innerRadius, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = center + vec2(c * r, s * r);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(c * innerRadius, s * innerRadius, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = center + vec2(s * r, c * r);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(s * innerRadius, c * innerRadius, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = center + vec2(-s * r, c * r);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(-s * innerRadius, c * innerRadius, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = center + vec2(-c * r, s * r);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(-c * innerRadius, s * innerRadius, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = center + vec2(-c * r, -s * r);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(-c * innerRadius, -s * innerRadius, distanceCorrection);
+        vertexCount++;
+    } else {
+        // filled
+        position[vertexCount] = center;
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+        vertexCount++;
+    }
+}
+
+static void fillInRRectVerts(const Geometry& args, Mesh::VertexArray<vec2>& position,
+                             Mesh::VertexArray<vec4>& shadowColor,
+                             Mesh::VertexArray<vec3>& shadowParams) {
+    vec4 color = args.fColor;
+    float outerRadius = args.fOuterRadius;
+
+    const FloatRect& bounds = args.fDevBounds;
+
+    float umbraInset = args.fUmbraInset;
+    float minDim = 0.5f * std::min(bounds.getWidth(), bounds.getHeight());
+    if (umbraInset > minDim) {
+        umbraInset = minDim;
+    }
+
+    float xInner[4] = {bounds.left + umbraInset, bounds.right - umbraInset,
+                       bounds.left + umbraInset, bounds.right - umbraInset};
+    float xMid[4] = {bounds.left + outerRadius, bounds.right - outerRadius,
+                     bounds.left + outerRadius, bounds.right - outerRadius};
+    float xOuter[4] = {bounds.left, bounds.right, bounds.left, bounds.right};
+    float yInner[4] = {bounds.top + umbraInset, bounds.top + umbraInset, bounds.bottom - umbraInset,
+                       bounds.bottom - umbraInset};
+    float yMid[4] = {bounds.top + outerRadius, bounds.top + outerRadius,
+                     bounds.bottom - outerRadius, bounds.bottom - outerRadius};
+    float yOuter[4] = {bounds.top, bounds.top, bounds.bottom, bounds.bottom};
+
+    float blurRadius = args.fBlurRadius;
+
+    // In the case where we have to inset more for the umbra, our two triangles in the
+    // corner get skewed to a diamond rather than a square. To correct for that,
+    // we also skew the vectors we send to the shader that help define the circle.
+    // By doing so, we end up with a quarter circle in the corner rather than the
+    // elliptical curve.
+
+    // This is a bit magical, but it gives us the correct results at extrema:
+    //   a) umbraInset == outerRadius produces an orthogonal vector
+    //   b) outerRadius == 0 produces a diagonal vector
+    // And visually the corner looks correct.
+    vec2 outerVec = vec2(outerRadius - umbraInset, -outerRadius - umbraInset);
+    outerVec = normalize(outerVec);
+    // We want the circle edge to fall fractionally along the diagonal at
+    //      (sqrt(2)*(umbraInset - outerRadius) + outerRadius)/sqrt(2)*umbraInset
+    //
+    // Setting the components of the diagonal offset to the following value will give us that.
+    float diagVal = umbraInset / (SK_ScalarSqrt2 * (outerRadius - umbraInset) - outerRadius);
+    vec2 diagVec = vec2(diagVal, diagVal);
+    float distanceCorrection = umbraInset / blurRadius;
+
+    int vertexCount = 0;
+    // build corner by corner
+    for (int i = 0; i < 4; ++i) {
+        // inner point
+        position[vertexCount] = vec2(xInner[i], yInner[i]);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+        vertexCount++;
+
+        // outer points
+        position[vertexCount] = vec2(xOuter[i], yInner[i]);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(0, -1, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = vec2(xOuter[i], yMid[i]);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(outerVec.x, outerVec.y, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = vec2(xOuter[i], yOuter[i]);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(diagVec.x, diagVec.y, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = vec2(xMid[i], yOuter[i]);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(outerVec.x, outerVec.y, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = vec2(xInner[i], yOuter[i]);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(0, -1, distanceCorrection);
+        vertexCount++;
+    }
+
+    // Add the additional vertices for overstroked rrects.
+    // Effectively this is an additional stroked rrect, with its
+    // parameters equal to those in the center of the 9-patch. This will
+    // give constant values across this inner ring.
+    if (kOverstroke_RRectType == args.fType) {
+        float inset = umbraInset + args.fInnerRadius;
+
+        // TL
+        position[vertexCount] = vec2(bounds.left + inset, bounds.top + inset);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+        vertexCount++;
+
+        // TR
+        position[vertexCount] = vec2(bounds.right - inset, bounds.top + inset);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+        vertexCount++;
+
+        // BL
+        position[vertexCount] = vec2(bounds.left + inset, bounds.bottom - inset);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+        vertexCount++;
+
+        // BR
+        position[vertexCount] = vec2(bounds.right - inset, bounds.bottom - inset);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+        vertexCount++;
+    }
+}
+
+int getVertexCountForGeometry(const Geometry& shadowGeometry) {
+    if (shadowGeometry.fIsCircle) {
+        return circle_type_to_vert_count(shadowGeometry.fType);
+    }
+
+    return rrect_type_to_vert_count(shadowGeometry.fType);
+}
+
+int getIndexCountForGeometry(const Geometry& shadowGeometry) {
+    if (shadowGeometry.fIsCircle) {
+        return circle_type_to_index_count(kStroke_RRectType == shadowGeometry.fType);
+    }
+
+    return rrect_type_to_index_count(shadowGeometry.fType);
+}
+
+void fillVerticesForGeometry(const Geometry& shadowGeometry, int /* vertexCount */,
+                             Mesh::VertexArray<vec2> position, Mesh::VertexArray<vec4> shadowColor,
+                             Mesh::VertexArray<vec3> shadowParams) {
+    if (shadowGeometry.fIsCircle) {
+        fillInCircleVerts(shadowGeometry, shadowGeometry.fIsStroked, position, shadowColor,
+                          shadowParams);
+    } else {
+        fillInRRectVerts(shadowGeometry, position, shadowColor, shadowParams);
+    }
+}
+
+void fillIndicesForGeometry(const Geometry& shadowGeometry, int indexCount,
+                            int startingVertexOffset, uint16_t* indices) {
+    if (shadowGeometry.fIsCircle) {
+        const uint16_t* primIndices = circle_type_to_indices(shadowGeometry.fIsStroked);
+        for (int i = 0; i < indexCount; ++i) {
+            indices[i] = primIndices[i] + startingVertexOffset;
+        }
+    } else {
+        const uint16_t* primIndices = rrect_type_to_indices(shadowGeometry.fType);
+        for (int i = 0; i < indexCount; ++i) {
+            indices[i] = primIndices[i] + startingVertexOffset;
+        }
+    }
+}
+
+inline void GetSpotParams(float occluderZ, float lightX, float lightY, float lightZ,
+                          float lightRadius, float& blurRadius, float& scale, vec2& translate) {
+    float zRatio = divide_and_pin(occluderZ, lightZ - occluderZ, 0.0f, 0.95f);
+    blurRadius = lightRadius * zRatio;
+    scale = divide_and_pin(lightZ, lightZ - occluderZ, 1.0f, 1.95f);
+    translate.x = -zRatio * lightX;
+    translate.y = -zRatio * lightY;
+}
+
+static std::unique_ptr<Geometry> getShadowGeometry(const vec4& color, const FloatRect& devRect,
+                                                   float devRadius, float blurRadius,
+                                                   float insetWidth) {
+    // An insetWidth > 1/2 rect width or height indicates a simple fill.
+    const bool isCircle = ((devRadius >= devRect.getWidth()) && (devRadius >= devRect.getHeight()));
+
+    FloatRect bounds = devRect;
+    float innerRadius = 0.0f;
+    float outerRadius = devRadius;
+    float umbraInset;
+
+    RRectType type = kFill_RRectType;
+    if (isCircle) {
+        umbraInset = 0;
+    } else {
+        umbraInset = std::max(outerRadius, blurRadius);
+    }
+
+    // If stroke is greater than width or height, this is still a fill,
+    // otherwise we compute stroke params.
+    if (isCircle) {
+        innerRadius = devRadius - insetWidth;
+        type = innerRadius > 0 ? kStroke_RRectType : kFill_RRectType;
+    } else {
+        if (insetWidth <= 0.5f * std::min(devRect.getWidth(), devRect.getHeight())) {
+            // We don't worry about a real inner radius, we just need to know if we
+            // need to create overstroke vertices.
+            innerRadius = std::max(insetWidth - umbraInset, 0.0f);
+            type = innerRadius > 0 ? kOverstroke_RRectType : kStroke_RRectType;
+        }
+    }
+    const bool isStroked = (kStroke_RRectType == type);
+    return std::make_unique<Geometry>(Geometry{color, outerRadius, umbraInset, innerRadius,
+                                               blurRadius, bounds, type, isCircle, isStroked});
+}
+
+std::unique_ptr<Geometry> getAmbientShadowGeometry(const FloatRect& casterRect,
+                                                   float casterCornerRadius, float casterZ,
+                                                   bool casterIsTranslucent,
+                                                   const vec4& ambientColor) {
+    float devSpaceInsetWidth = AmbientBlurRadius(casterZ);
+    const float umbraRecipAlpha = AmbientRecipAlpha(casterZ);
+    const float devSpaceAmbientBlur = devSpaceInsetWidth * umbraRecipAlpha;
+
+    // Outset the shadow rrect to the border of the penumbra
+    float ambientPathOutset = devSpaceInsetWidth;
+    FloatRect outsetRect(casterRect);
+    outsetRect.left -= ambientPathOutset;
+    outsetRect.top -= ambientPathOutset;
+    outsetRect.right += ambientPathOutset;
+    outsetRect.bottom += ambientPathOutset;
+
+    float outsetRad = casterCornerRadius + ambientPathOutset;
+    if (casterIsTranslucent) {
+        // set a large inset to force a fill
+        devSpaceInsetWidth = outsetRect.getWidth();
+    }
+
+    return getShadowGeometry(ambientColor, outsetRect, std::abs(outsetRad), devSpaceAmbientBlur,
+                             std::abs(devSpaceInsetWidth));
+}
+
+std::unique_ptr<Geometry> getSpotShadowGeometry(const FloatRect& casterRect,
+                                                float casterCornerRadius, float casterZ,
+                                                bool casterIsTranslucent, const vec4& spotColor,
+                                                const vec3& lightPosition, float lightRadius) {
+    float devSpaceSpotBlur;
+    float spotScale;
+    vec2 spotOffset;
+    GetSpotParams(casterZ, lightPosition.x, lightPosition.y, lightPosition.z, lightRadius,
+                  devSpaceSpotBlur, spotScale, spotOffset);
+    // handle scale of radius due to CTM
+    const float srcSpaceSpotBlur = devSpaceSpotBlur;
+
+    // Adjust translate for the effect of the scale.
+    spotOffset.x += spotScale;
+    spotOffset.y += spotScale;
+
+    // Compute the transformed shadow rect
+    ui::Transform shadowTransform;
+    shadowTransform.set(spotOffset.x, spotOffset.y);
+    shadowTransform.set(spotScale, 0, 0, spotScale);
+    FloatRect spotShadowRect = shadowTransform.transform(casterRect);
+    float spotShadowRadius = casterCornerRadius * spotScale;
+
+    // Compute the insetWidth
+    float blurOutset = srcSpaceSpotBlur;
+    float insetWidth = blurOutset;
+    if (casterIsTranslucent) {
+        // If transparent, just do a fill
+        insetWidth += spotShadowRect.getWidth();
+    } else {
+        // For shadows, instead of using a stroke we specify an inset from the penumbra
+        // border. We want to extend this inset area so that it meets up with the caster
+        // geometry. The inset geometry will by default already be inset by the blur width.
+        //
+        // We compare the min and max corners inset by the radius between the original
+        // rrect and the shadow rrect. The distance between the two plus the difference
+        // between the scaled radius and the original radius gives the distance from the
+        // transformed shadow shape to the original shape in that corner. The max
+        // of these gives the maximum distance we need to cover.
+        //
+        // Since we are outsetting by 1/2 the blur distance, we just add the maxOffset to
+        // that to get the full insetWidth.
+        float maxOffset;
+        if (casterCornerRadius <= 0.f) {
+            // Manhattan distance works better for rects
+            maxOffset = std::max(std::max(std::abs(spotShadowRect.left - casterRect.left),
+                                          std::abs(spotShadowRect.top - casterRect.top)),
+                                 std::max(std::abs(spotShadowRect.right - casterRect.right),
+                                          std::abs(spotShadowRect.bottom - casterRect.bottom)));
+        } else {
+            float dr = spotShadowRadius - casterCornerRadius;
+            vec2 upperLeftOffset = vec2(spotShadowRect.left - casterRect.left + dr,
+                                        spotShadowRect.top - casterRect.top + dr);
+            vec2 lowerRightOffset = vec2(spotShadowRect.right - casterRect.right - dr,
+                                         spotShadowRect.bottom - casterRect.bottom - dr);
+            maxOffset = sqrt(std::max(dot(upperLeftOffset, lowerRightOffset),
+                                      dot(lowerRightOffset, lowerRightOffset))) +
+                    dr;
+        }
+        insetWidth += std::max(blurOutset, maxOffset);
+    }
+
+    // Outset the shadow rrect to the border of the penumbra
+    spotShadowRadius += blurOutset;
+    spotShadowRect.left -= blurOutset;
+    spotShadowRect.top -= blurOutset;
+    spotShadowRect.right += blurOutset;
+    spotShadowRect.bottom += blurOutset;
+
+    return getShadowGeometry(spotColor, spotShadowRect, std::abs(spotShadowRadius),
+                             2.0f * devSpaceSpotBlur, std::abs(insetWidth));
+}
+
+void fillShadowTextureData(uint8_t* data, size_t shadowTextureWidth) {
+    for (int i = 0; i < shadowTextureWidth; i++) {
+        const float d = 1 - i / ((shadowTextureWidth * 1.0f) - 1.0f);
+        data[i] = static_cast<uint8_t>((exp(-4.0f * d * d) - 0.018f) * 255);
+    }
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLSkiaShadowPort.h b/media/libstagefright/renderfright/gl/GLSkiaShadowPort.h
new file mode 100644
index 0000000..912c8bb
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLSkiaShadowPort.h
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <math/vec4.h>
+#include <renderengine/Mesh.h>
+#include <ui/Rect.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+/**
+ * The shadow geometry logic and vertex generation code has been ported from skia shadow
+ * fast path OpenGL implementation to draw shadows around rects and rounded rects including
+ * circles.
+ *
+ * path: skia/src/gpu/GrRenderTargetContext.cpp GrRenderTargetContext::drawFastShadow
+ *
+ * Modifications made:
+ * - Switched to using std lib math functions
+ * - Fall off function is implemented in vertex shader rather than a shadow texture
+ * - Removed transformations applied on the caster rect since the caster will be in local
+ *   coordinate space and will be transformed by the vertex shader.
+ */
+
+enum RRectType {
+    kFill_RRectType,
+    kStroke_RRectType,
+    kOverstroke_RRectType,
+};
+
+struct Geometry {
+    vec4 fColor;
+    float fOuterRadius;
+    float fUmbraInset;
+    float fInnerRadius;
+    float fBlurRadius;
+    FloatRect fDevBounds;
+    RRectType fType;
+    bool fIsCircle;
+    bool fIsStroked;
+};
+
+std::unique_ptr<Geometry> getSpotShadowGeometry(const FloatRect& casterRect,
+                                                float casterCornerRadius, float casterZ,
+                                                bool casterIsTranslucent, const vec4& spotColor,
+                                                const vec3& lightPosition, float lightRadius);
+
+std::unique_ptr<Geometry> getAmbientShadowGeometry(const FloatRect& casterRect,
+                                                   float casterCornerRadius, float casterZ,
+                                                   bool casterIsTranslucent,
+                                                   const vec4& ambientColor);
+
+int getVertexCountForGeometry(const Geometry& shadowGeometry);
+
+int getIndexCountForGeometry(const Geometry& shadowGeometry);
+
+void fillVerticesForGeometry(const Geometry& shadowGeometry, int vertexCount,
+                             Mesh::VertexArray<vec2> position, Mesh::VertexArray<vec4> shadowColor,
+                             Mesh::VertexArray<vec3> shadowParams);
+
+void fillIndicesForGeometry(const Geometry& shadowGeometry, int indexCount,
+                            int startingVertexOffset, uint16_t* indices);
+
+/**
+ * Maps shadow geometry 'alpha' varying (1 for darkest, 0 for transparent) to
+ * darkness at that spot. Values are determined by an exponential falloff
+ * function provided by UX.
+ *
+ * The texture is used for quick lookup in theshadow shader.
+ *
+ * textureData - filled with shadow texture data that needs to be at least of
+ *               size textureWidth
+ *
+ * textureWidth - width of the texture, height is always 1
+ */
+void fillShadowTextureData(uint8_t* textureData, size_t textureWidth);
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLVertexBuffer.cpp b/media/libstagefright/renderfright/gl/GLVertexBuffer.cpp
new file mode 100644
index 0000000..e50c471
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLVertexBuffer.cpp
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "GLVertexBuffer.h"
+
+#include <GLES/gl.h>
+#include <GLES2/gl2.h>
+#include <nativebase/nativebase.h>
+#include <utils/Trace.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+GLVertexBuffer::GLVertexBuffer() {
+    glGenBuffers(1, &mBufferName);
+}
+
+GLVertexBuffer::~GLVertexBuffer() {
+    glDeleteBuffers(1, &mBufferName);
+}
+
+void GLVertexBuffer::allocateBuffers(const GLfloat data[], const GLuint size) {
+    ATRACE_CALL();
+    bind();
+    glBufferData(GL_ARRAY_BUFFER, size * sizeof(GLfloat), data, GL_STATIC_DRAW);
+    unbind();
+}
+
+void GLVertexBuffer::bind() const {
+    glBindBuffer(GL_ARRAY_BUFFER, mBufferName);
+}
+
+void GLVertexBuffer::unbind() const {
+    glBindBuffer(GL_ARRAY_BUFFER, 0);
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLVertexBuffer.h b/media/libstagefright/renderfright/gl/GLVertexBuffer.h
new file mode 100644
index 0000000..c0fd0c1
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLVertexBuffer.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GLES2/gl2.h>
+
+struct ANativeWindowBuffer;
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLESRenderEngine;
+
+class GLVertexBuffer {
+public:
+    explicit GLVertexBuffer();
+    ~GLVertexBuffer();
+
+    void allocateBuffers(const GLfloat data[], const GLuint size);
+    uint32_t getBufferName() const { return mBufferName; }
+    void bind() const;
+    void unbind() const;
+
+private:
+    uint32_t mBufferName;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/ImageManager.cpp b/media/libstagefright/renderfright/gl/ImageManager.cpp
new file mode 100644
index 0000000..6256649
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/ImageManager.cpp
@@ -0,0 +1,148 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#undef LOG_TAG
+#define LOG_TAG "RenderEngine"
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include <pthread.h>
+
+#include <processgroup/sched_policy.h>
+#include <utils/Trace.h>
+#include "GLESRenderEngine.h"
+#include "ImageManager.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+ImageManager::ImageManager(GLESRenderEngine* engine) : mEngine(engine) {}
+
+void ImageManager::initThread() {
+    mThread = std::thread([this]() { threadMain(); });
+    pthread_setname_np(mThread.native_handle(), "ImageManager");
+    // Use SCHED_FIFO to minimize jitter
+    struct sched_param param = {0};
+    param.sched_priority = 2;
+    if (pthread_setschedparam(mThread.native_handle(), SCHED_FIFO, &param) != 0) {
+        ALOGE("Couldn't set SCHED_FIFO for ImageManager");
+    }
+}
+
+ImageManager::~ImageManager() {
+    {
+        std::lock_guard<std::mutex> lock(mMutex);
+        mRunning = false;
+    }
+    mCondition.notify_all();
+    if (mThread.joinable()) {
+        mThread.join();
+    }
+}
+
+void ImageManager::cacheAsync(const sp<GraphicBuffer>& buffer,
+                              const std::shared_ptr<Barrier>& barrier) {
+    if (buffer == nullptr) {
+        {
+            std::lock_guard<std::mutex> lock(barrier->mutex);
+            barrier->isOpen = true;
+            barrier->result = BAD_VALUE;
+        }
+        barrier->condition.notify_one();
+        return;
+    }
+    ATRACE_CALL();
+    QueueEntry entry = {QueueEntry::Operation::Insert, buffer, buffer->getId(), barrier};
+    queueOperation(std::move(entry));
+}
+
+status_t ImageManager::cache(const sp<GraphicBuffer>& buffer) {
+    ATRACE_CALL();
+    auto barrier = std::make_shared<Barrier>();
+    cacheAsync(buffer, barrier);
+    std::lock_guard<std::mutex> lock(barrier->mutex);
+    barrier->condition.wait(barrier->mutex,
+                            [&]() REQUIRES(barrier->mutex) { return barrier->isOpen; });
+    return barrier->result;
+}
+
+void ImageManager::releaseAsync(uint64_t bufferId, const std::shared_ptr<Barrier>& barrier) {
+    ATRACE_CALL();
+    QueueEntry entry = {QueueEntry::Operation::Delete, nullptr, bufferId, barrier};
+    queueOperation(std::move(entry));
+}
+
+void ImageManager::queueOperation(const QueueEntry&& entry) {
+    {
+        std::lock_guard<std::mutex> lock(mMutex);
+        mQueue.emplace(entry);
+        ATRACE_INT("ImageManagerQueueDepth", mQueue.size());
+    }
+    mCondition.notify_one();
+}
+
+void ImageManager::threadMain() {
+    set_sched_policy(0, SP_FOREGROUND);
+    bool run;
+    {
+        std::lock_guard<std::mutex> lock(mMutex);
+        run = mRunning;
+    }
+    while (run) {
+        QueueEntry entry;
+        {
+            std::lock_guard<std::mutex> lock(mMutex);
+            mCondition.wait(mMutex,
+                            [&]() REQUIRES(mMutex) { return !mQueue.empty() || !mRunning; });
+            run = mRunning;
+
+            if (!mRunning) {
+                // if mRunning is false, then ImageManager is being destroyed, so
+                // bail out now.
+                break;
+            }
+
+            entry = mQueue.front();
+            mQueue.pop();
+            ATRACE_INT("ImageManagerQueueDepth", mQueue.size());
+        }
+
+        status_t result = NO_ERROR;
+        switch (entry.op) {
+            case QueueEntry::Operation::Delete:
+                mEngine->unbindExternalTextureBufferInternal(entry.bufferId);
+                break;
+            case QueueEntry::Operation::Insert:
+                result = mEngine->cacheExternalTextureBufferInternal(entry.buffer);
+                break;
+        }
+        if (entry.barrier != nullptr) {
+            {
+                std::lock_guard<std::mutex> entryLock(entry.barrier->mutex);
+                entry.barrier->result = result;
+                entry.barrier->isOpen = true;
+            }
+            entry.barrier->condition.notify_one();
+        }
+    }
+
+    ALOGD("Reached end of threadMain, terminating ImageManager thread!");
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/ImageManager.h b/media/libstagefright/renderfright/gl/ImageManager.h
new file mode 100644
index 0000000..be67de8
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/ImageManager.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <condition_variable>
+#include <mutex>
+#include <queue>
+#include <thread>
+
+#include <ui/GraphicBuffer.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLESRenderEngine;
+
+class ImageManager {
+public:
+    struct Barrier {
+        std::mutex mutex;
+        std::condition_variable_any condition;
+        bool isOpen GUARDED_BY(mutex) = false;
+        status_t result GUARDED_BY(mutex) = NO_ERROR;
+    };
+    ImageManager(GLESRenderEngine* engine);
+    ~ImageManager();
+    // Starts the background thread for the ImageManager
+    // We need this to guarantee that the class is fully-constructed before the
+    // thread begins running.
+    void initThread();
+    void cacheAsync(const sp<GraphicBuffer>& buffer, const std::shared_ptr<Barrier>& barrier)
+            EXCLUDES(mMutex);
+    status_t cache(const sp<GraphicBuffer>& buffer);
+    void releaseAsync(uint64_t bufferId, const std::shared_ptr<Barrier>& barrier) EXCLUDES(mMutex);
+
+private:
+    struct QueueEntry {
+        enum class Operation { Delete, Insert };
+
+        Operation op = Operation::Delete;
+        sp<GraphicBuffer> buffer = nullptr;
+        uint64_t bufferId = 0;
+        std::shared_ptr<Barrier> barrier = nullptr;
+    };
+
+    void queueOperation(const QueueEntry&& entry);
+    void threadMain();
+    GLESRenderEngine* const mEngine;
+    std::thread mThread;
+    std::condition_variable_any mCondition;
+    std::mutex mMutex;
+    std::queue<QueueEntry> mQueue GUARDED_BY(mMutex);
+
+    bool mRunning GUARDED_BY(mMutex) = true;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/Program.cpp b/media/libstagefright/renderfright/gl/Program.cpp
new file mode 100644
index 0000000..f4fbf35
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/Program.cpp
@@ -0,0 +1,163 @@
+/*Gluint
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Program.h"
+
+#include <stdint.h>
+
+#include <log/log.h>
+#include <math/mat4.h>
+#include <utils/String8.h>
+#include "ProgramCache.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+Program::Program(const ProgramCache::Key& /*needs*/, const char* vertex, const char* fragment)
+      : mInitialized(false) {
+    GLuint vertexId = buildShader(vertex, GL_VERTEX_SHADER);
+    GLuint fragmentId = buildShader(fragment, GL_FRAGMENT_SHADER);
+    GLuint programId = glCreateProgram();
+    glAttachShader(programId, vertexId);
+    glAttachShader(programId, fragmentId);
+    glBindAttribLocation(programId, position, "position");
+    glBindAttribLocation(programId, texCoords, "texCoords");
+    glBindAttribLocation(programId, cropCoords, "cropCoords");
+    glBindAttribLocation(programId, shadowColor, "shadowColor");
+    glBindAttribLocation(programId, shadowParams, "shadowParams");
+    glLinkProgram(programId);
+
+    GLint status;
+    glGetProgramiv(programId, GL_LINK_STATUS, &status);
+    if (status != GL_TRUE) {
+        ALOGE("Error while linking shaders:");
+        GLint infoLen = 0;
+        glGetProgramiv(programId, GL_INFO_LOG_LENGTH, &infoLen);
+        if (infoLen > 1) {
+            GLchar log[infoLen];
+            glGetProgramInfoLog(programId, infoLen, 0, &log[0]);
+            ALOGE("%s", log);
+        }
+        glDetachShader(programId, vertexId);
+        glDetachShader(programId, fragmentId);
+        glDeleteShader(vertexId);
+        glDeleteShader(fragmentId);
+        glDeleteProgram(programId);
+    } else {
+        mProgram = programId;
+        mVertexShader = vertexId;
+        mFragmentShader = fragmentId;
+        mInitialized = true;
+        mProjectionMatrixLoc = glGetUniformLocation(programId, "projection");
+        mTextureMatrixLoc = glGetUniformLocation(programId, "texture");
+        mSamplerLoc = glGetUniformLocation(programId, "sampler");
+        mColorLoc = glGetUniformLocation(programId, "color");
+        mDisplayMaxLuminanceLoc = glGetUniformLocation(programId, "displayMaxLuminance");
+        mMaxMasteringLuminanceLoc = glGetUniformLocation(programId, "maxMasteringLuminance");
+        mMaxContentLuminanceLoc = glGetUniformLocation(programId, "maxContentLuminance");
+        mInputTransformMatrixLoc = glGetUniformLocation(programId, "inputTransformMatrix");
+        mOutputTransformMatrixLoc = glGetUniformLocation(programId, "outputTransformMatrix");
+        mCornerRadiusLoc = glGetUniformLocation(programId, "cornerRadius");
+        mCropCenterLoc = glGetUniformLocation(programId, "cropCenter");
+
+        // set-up the default values for our uniforms
+        glUseProgram(programId);
+        glUniformMatrix4fv(mProjectionMatrixLoc, 1, GL_FALSE, mat4().asArray());
+        glEnableVertexAttribArray(0);
+    }
+}
+
+bool Program::isValid() const {
+    return mInitialized;
+}
+
+void Program::use() {
+    glUseProgram(mProgram);
+}
+
+GLuint Program::getAttrib(const char* name) const {
+    // TODO: maybe use a local cache
+    return glGetAttribLocation(mProgram, name);
+}
+
+GLint Program::getUniform(const char* name) const {
+    // TODO: maybe use a local cache
+    return glGetUniformLocation(mProgram, name);
+}
+
+GLuint Program::buildShader(const char* source, GLenum type) {
+    GLuint shader = glCreateShader(type);
+    glShaderSource(shader, 1, &source, 0);
+    glCompileShader(shader);
+    GLint status;
+    glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
+    if (status != GL_TRUE) {
+        // Some drivers return wrong values for GL_INFO_LOG_LENGTH
+        // use a fixed size instead
+        GLchar log[512];
+        glGetShaderInfoLog(shader, sizeof(log), 0, log);
+        ALOGE("Error while compiling shader: \n%s\n%s", source, log);
+        glDeleteShader(shader);
+        return 0;
+    }
+    return shader;
+}
+
+void Program::setUniforms(const Description& desc) {
+    // TODO: we should have a mechanism here to not always reset uniforms that
+    // didn't change for this program.
+
+    if (mSamplerLoc >= 0) {
+        glUniform1i(mSamplerLoc, 0);
+        glUniformMatrix4fv(mTextureMatrixLoc, 1, GL_FALSE, desc.texture.getMatrix().asArray());
+    }
+    if (mColorLoc >= 0) {
+        const float color[4] = {desc.color.r, desc.color.g, desc.color.b, desc.color.a};
+        glUniform4fv(mColorLoc, 1, color);
+    }
+    if (mInputTransformMatrixLoc >= 0) {
+        mat4 inputTransformMatrix = desc.inputTransformMatrix;
+        glUniformMatrix4fv(mInputTransformMatrixLoc, 1, GL_FALSE, inputTransformMatrix.asArray());
+    }
+    if (mOutputTransformMatrixLoc >= 0) {
+        // The output transform matrix and color matrix can be combined as one matrix
+        // that is applied right before applying OETF.
+        mat4 outputTransformMatrix = desc.colorMatrix * desc.outputTransformMatrix;
+        glUniformMatrix4fv(mOutputTransformMatrixLoc, 1, GL_FALSE, outputTransformMatrix.asArray());
+    }
+    if (mDisplayMaxLuminanceLoc >= 0) {
+        glUniform1f(mDisplayMaxLuminanceLoc, desc.displayMaxLuminance);
+    }
+    if (mMaxMasteringLuminanceLoc >= 0) {
+        glUniform1f(mMaxMasteringLuminanceLoc, desc.maxMasteringLuminance);
+    }
+    if (mMaxContentLuminanceLoc >= 0) {
+        glUniform1f(mMaxContentLuminanceLoc, desc.maxContentLuminance);
+    }
+    if (mCornerRadiusLoc >= 0) {
+        glUniform1f(mCornerRadiusLoc, desc.cornerRadius);
+    }
+    if (mCropCenterLoc >= 0) {
+        glUniform2f(mCropCenterLoc, desc.cropSize.x / 2.0f, desc.cropSize.y / 2.0f);
+    }
+    // these uniforms are always present
+    glUniformMatrix4fv(mProjectionMatrixLoc, 1, GL_FALSE, desc.projectionMatrix.asArray());
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/Program.h b/media/libstagefright/renderfright/gl/Program.h
new file mode 100644
index 0000000..fc3755e
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/Program.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDER_ENGINE_PROGRAM_H
+#define SF_RENDER_ENGINE_PROGRAM_H
+
+#include <stdint.h>
+
+#include <GLES2/gl2.h>
+#include <renderengine/private/Description.h>
+#include "ProgramCache.h"
+
+namespace android {
+
+class String8;
+
+namespace renderengine {
+namespace gl {
+
+/*
+ * Abstracts a GLSL program comprising a vertex and fragment shader
+ */
+class Program {
+public:
+    // known locations for position and texture coordinates
+    enum {
+        /* position of each vertex for vertex shader */
+        position = 0,
+
+        /* UV coordinates for texture mapping */
+        texCoords = 1,
+
+        /* Crop coordinates, in pixels */
+        cropCoords = 2,
+
+        /* Shadow color */
+        shadowColor = 3,
+
+        /* Shadow params */
+        shadowParams = 4,
+    };
+
+    Program(const ProgramCache::Key& needs, const char* vertex, const char* fragment);
+    ~Program() = default;
+
+    /* whether this object is usable */
+    bool isValid() const;
+
+    /* Binds this program to the GLES context */
+    void use();
+
+    /* Returns the location of the specified attribute */
+    GLuint getAttrib(const char* name) const;
+
+    /* Returns the location of the specified uniform */
+    GLint getUniform(const char* name) const;
+
+    /* set-up uniforms from the description */
+    void setUniforms(const Description& desc);
+
+private:
+    GLuint buildShader(const char* source, GLenum type);
+
+    // whether the initialization succeeded
+    bool mInitialized;
+
+    // Name of the OpenGL program and shaders
+    GLuint mProgram;
+    GLuint mVertexShader;
+    GLuint mFragmentShader;
+
+    /* location of the projection matrix uniform */
+    GLint mProjectionMatrixLoc;
+
+    /* location of the texture matrix uniform */
+    GLint mTextureMatrixLoc;
+
+    /* location of the sampler uniform */
+    GLint mSamplerLoc;
+
+    /* location of the color uniform */
+    GLint mColorLoc;
+
+    /* location of display luminance uniform */
+    GLint mDisplayMaxLuminanceLoc;
+    /* location of max mastering luminance uniform */
+    GLint mMaxMasteringLuminanceLoc;
+    /* location of max content luminance uniform */
+    GLint mMaxContentLuminanceLoc;
+
+    /* location of transform matrix */
+    GLint mInputTransformMatrixLoc;
+    GLint mOutputTransformMatrixLoc;
+
+    /* location of corner radius uniform */
+    GLint mCornerRadiusLoc;
+
+    /* location of surface crop origin uniform, for rounded corner clipping */
+    GLint mCropCenterLoc;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
+
+#endif /* SF_RENDER_ENGINE_PROGRAM_H */
diff --git a/media/libstagefright/renderfright/gl/ProgramCache.cpp b/media/libstagefright/renderfright/gl/ProgramCache.cpp
new file mode 100644
index 0000000..3ae35ec
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/ProgramCache.cpp
@@ -0,0 +1,800 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "ProgramCache.h"
+
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <log/log.h>
+#include <renderengine/private/Description.h>
+#include <utils/String8.h>
+#include <utils/Trace.h>
+#include "Program.h"
+
+ANDROID_SINGLETON_STATIC_INSTANCE(android::renderengine::gl::ProgramCache)
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+/*
+ * A simple formatter class to automatically add the endl and
+ * manage the indentation.
+ */
+
+class Formatter;
+static Formatter& indent(Formatter& f);
+static Formatter& dedent(Formatter& f);
+
+class Formatter {
+    String8 mString;
+    int mIndent;
+    typedef Formatter& (*FormaterManipFunc)(Formatter&);
+    friend Formatter& indent(Formatter& f);
+    friend Formatter& dedent(Formatter& f);
+
+public:
+    Formatter() : mIndent(0) {}
+
+    String8 getString() const { return mString; }
+
+    friend Formatter& operator<<(Formatter& out, const char* in) {
+        for (int i = 0; i < out.mIndent; i++) {
+            out.mString.append("    ");
+        }
+        out.mString.append(in);
+        out.mString.append("\n");
+        return out;
+    }
+    friend inline Formatter& operator<<(Formatter& out, const String8& in) {
+        return operator<<(out, in.string());
+    }
+    friend inline Formatter& operator<<(Formatter& to, FormaterManipFunc func) {
+        return (*func)(to);
+    }
+};
+Formatter& indent(Formatter& f) {
+    f.mIndent++;
+    return f;
+}
+Formatter& dedent(Formatter& f) {
+    f.mIndent--;
+    return f;
+}
+
+void ProgramCache::primeCache(
+        EGLContext context, bool useColorManagement, bool toneMapperShaderOnly) {
+    auto& cache = mCaches[context];
+    uint32_t shaderCount = 0;
+
+    if (toneMapperShaderOnly) {
+        Key shaderKey;
+        // base settings used by HDR->SDR tonemap only
+        shaderKey.set(Key::BLEND_MASK | Key::INPUT_TRANSFORM_MATRIX_MASK |
+                      Key::OUTPUT_TRANSFORM_MATRIX_MASK | Key::OUTPUT_TF_MASK |
+                      Key::OPACITY_MASK | Key::ALPHA_MASK |
+                      Key::ROUNDED_CORNERS_MASK | Key::TEXTURE_MASK,
+                      Key::BLEND_NORMAL | Key::INPUT_TRANSFORM_MATRIX_ON |
+                      Key::OUTPUT_TRANSFORM_MATRIX_ON | Key::OUTPUT_TF_SRGB |
+                      Key::OPACITY_OPAQUE | Key::ALPHA_EQ_ONE |
+                      Key::ROUNDED_CORNERS_OFF | Key::TEXTURE_EXT);
+        for (int i = 0; i < 4; i++) {
+            // Cache input transfer for HLG & ST2084
+            shaderKey.set(Key::INPUT_TF_MASK, (i & 1) ?
+                    Key::INPUT_TF_HLG : Key::INPUT_TF_ST2084);
+
+            // Cache Y410 input on or off
+            shaderKey.set(Key::Y410_BT2020_MASK, (i & 2) ?
+                    Key::Y410_BT2020_ON : Key::Y410_BT2020_OFF);
+            if (cache.count(shaderKey) == 0) {
+                cache.emplace(shaderKey, generateProgram(shaderKey));
+                shaderCount++;
+            }
+        }
+        return;
+    }
+
+    uint32_t keyMask = Key::BLEND_MASK | Key::OPACITY_MASK | Key::ALPHA_MASK | Key::TEXTURE_MASK
+        | Key::ROUNDED_CORNERS_MASK;
+    // Prime the cache for all combinations of the above masks,
+    // leaving off the experimental color matrix mask options.
+
+    nsecs_t timeBefore = systemTime();
+    for (uint32_t keyVal = 0; keyVal <= keyMask; keyVal++) {
+        Key shaderKey;
+        shaderKey.set(keyMask, keyVal);
+        uint32_t tex = shaderKey.getTextureTarget();
+        if (tex != Key::TEXTURE_OFF && tex != Key::TEXTURE_EXT && tex != Key::TEXTURE_2D) {
+            continue;
+        }
+        if (cache.count(shaderKey) == 0) {
+            cache.emplace(shaderKey, generateProgram(shaderKey));
+            shaderCount++;
+        }
+    }
+
+    // Prime for sRGB->P3 conversion
+    if (useColorManagement) {
+        Key shaderKey;
+        shaderKey.set(Key::BLEND_MASK | Key::OUTPUT_TRANSFORM_MATRIX_MASK | Key::INPUT_TF_MASK |
+                              Key::OUTPUT_TF_MASK,
+                      Key::BLEND_PREMULT | Key::OUTPUT_TRANSFORM_MATRIX_ON | Key::INPUT_TF_SRGB |
+                              Key::OUTPUT_TF_SRGB);
+        for (int i = 0; i < 16; i++) {
+            shaderKey.set(Key::OPACITY_MASK,
+                          (i & 1) ? Key::OPACITY_OPAQUE : Key::OPACITY_TRANSLUCENT);
+            shaderKey.set(Key::ALPHA_MASK, (i & 2) ? Key::ALPHA_LT_ONE : Key::ALPHA_EQ_ONE);
+
+            // Cache rounded corners
+            shaderKey.set(Key::ROUNDED_CORNERS_MASK,
+                          (i & 4) ? Key::ROUNDED_CORNERS_ON : Key::ROUNDED_CORNERS_OFF);
+
+            // Cache texture off option for window transition
+            shaderKey.set(Key::TEXTURE_MASK, (i & 8) ? Key::TEXTURE_EXT : Key::TEXTURE_OFF);
+            if (cache.count(shaderKey) == 0) {
+                cache.emplace(shaderKey, generateProgram(shaderKey));
+                shaderCount++;
+            }
+        }
+    }
+
+    nsecs_t timeAfter = systemTime();
+    float compileTimeMs = static_cast<float>(timeAfter - timeBefore) / 1.0E6;
+    ALOGD("shader cache generated - %u shaders in %f ms\n", shaderCount, compileTimeMs);
+}
+
+ProgramCache::Key ProgramCache::computeKey(const Description& description) {
+    Key needs;
+    needs.set(Key::TEXTURE_MASK,
+              !description.textureEnabled
+                      ? Key::TEXTURE_OFF
+                      : description.texture.getTextureTarget() == GL_TEXTURE_EXTERNAL_OES
+                              ? Key::TEXTURE_EXT
+                              : description.texture.getTextureTarget() == GL_TEXTURE_2D
+                                      ? Key::TEXTURE_2D
+                                      : Key::TEXTURE_OFF)
+            .set(Key::ALPHA_MASK, (description.color.a < 1) ? Key::ALPHA_LT_ONE : Key::ALPHA_EQ_ONE)
+            .set(Key::BLEND_MASK,
+                 description.isPremultipliedAlpha ? Key::BLEND_PREMULT : Key::BLEND_NORMAL)
+            .set(Key::OPACITY_MASK,
+                 description.isOpaque ? Key::OPACITY_OPAQUE : Key::OPACITY_TRANSLUCENT)
+            .set(Key::Key::INPUT_TRANSFORM_MATRIX_MASK,
+                 description.hasInputTransformMatrix() ? Key::INPUT_TRANSFORM_MATRIX_ON
+                                                       : Key::INPUT_TRANSFORM_MATRIX_OFF)
+            .set(Key::Key::OUTPUT_TRANSFORM_MATRIX_MASK,
+                 description.hasOutputTransformMatrix() || description.hasColorMatrix()
+                         ? Key::OUTPUT_TRANSFORM_MATRIX_ON
+                         : Key::OUTPUT_TRANSFORM_MATRIX_OFF)
+            .set(Key::ROUNDED_CORNERS_MASK,
+                 description.cornerRadius > 0 ? Key::ROUNDED_CORNERS_ON : Key::ROUNDED_CORNERS_OFF)
+            .set(Key::SHADOW_MASK, description.drawShadows ? Key::SHADOW_ON : Key::SHADOW_OFF);
+    needs.set(Key::Y410_BT2020_MASK,
+              description.isY410BT2020 ? Key::Y410_BT2020_ON : Key::Y410_BT2020_OFF);
+
+    if (needs.hasTransformMatrix() ||
+        (description.inputTransferFunction != description.outputTransferFunction)) {
+        switch (description.inputTransferFunction) {
+            case Description::TransferFunction::LINEAR:
+            default:
+                needs.set(Key::INPUT_TF_MASK, Key::INPUT_TF_LINEAR);
+                break;
+            case Description::TransferFunction::SRGB:
+                needs.set(Key::INPUT_TF_MASK, Key::INPUT_TF_SRGB);
+                break;
+            case Description::TransferFunction::ST2084:
+                needs.set(Key::INPUT_TF_MASK, Key::INPUT_TF_ST2084);
+                break;
+            case Description::TransferFunction::HLG:
+                needs.set(Key::INPUT_TF_MASK, Key::INPUT_TF_HLG);
+                break;
+        }
+
+        switch (description.outputTransferFunction) {
+            case Description::TransferFunction::LINEAR:
+            default:
+                needs.set(Key::OUTPUT_TF_MASK, Key::OUTPUT_TF_LINEAR);
+                break;
+            case Description::TransferFunction::SRGB:
+                needs.set(Key::OUTPUT_TF_MASK, Key::OUTPUT_TF_SRGB);
+                break;
+            case Description::TransferFunction::ST2084:
+                needs.set(Key::OUTPUT_TF_MASK, Key::OUTPUT_TF_ST2084);
+                break;
+            case Description::TransferFunction::HLG:
+                needs.set(Key::OUTPUT_TF_MASK, Key::OUTPUT_TF_HLG);
+                break;
+        }
+    }
+
+    return needs;
+}
+
+// Generate EOTF that converts signal values to relative display light,
+// both normalized to [0, 1].
+void ProgramCache::generateEOTF(Formatter& fs, const Key& needs) {
+    switch (needs.getInputTF()) {
+        case Key::INPUT_TF_SRGB:
+            fs << R"__SHADER__(
+                float EOTF_sRGB(float srgb) {
+                    return srgb <= 0.04045 ? srgb / 12.92 : pow((srgb + 0.055) / 1.055, 2.4);
+                }
+
+                vec3 EOTF_sRGB(const vec3 srgb) {
+                    return vec3(EOTF_sRGB(srgb.r), EOTF_sRGB(srgb.g), EOTF_sRGB(srgb.b));
+                }
+
+                vec3 EOTF(const vec3 srgb) {
+                    return sign(srgb.rgb) * EOTF_sRGB(abs(srgb.rgb));
+                }
+            )__SHADER__";
+            break;
+        case Key::INPUT_TF_ST2084:
+            fs << R"__SHADER__(
+                vec3 EOTF(const highp vec3 color) {
+                    const highp float m1 = (2610.0 / 4096.0) / 4.0;
+                    const highp float m2 = (2523.0 / 4096.0) * 128.0;
+                    const highp float c1 = (3424.0 / 4096.0);
+                    const highp float c2 = (2413.0 / 4096.0) * 32.0;
+                    const highp float c3 = (2392.0 / 4096.0) * 32.0;
+
+                    highp vec3 tmp = pow(clamp(color, 0.0, 1.0), 1.0 / vec3(m2));
+                    tmp = max(tmp - c1, 0.0) / (c2 - c3 * tmp);
+                    return pow(tmp, 1.0 / vec3(m1));
+                }
+            )__SHADER__";
+            break;
+        case Key::INPUT_TF_HLG:
+            fs << R"__SHADER__(
+                highp float EOTF_channel(const highp float channel) {
+                    const highp float a = 0.17883277;
+                    const highp float b = 0.28466892;
+                    const highp float c = 0.55991073;
+                    return channel <= 0.5 ? channel * channel / 3.0 :
+                            (exp((channel - c) / a) + b) / 12.0;
+                }
+
+                vec3 EOTF(const highp vec3 color) {
+                    return vec3(EOTF_channel(color.r), EOTF_channel(color.g),
+                            EOTF_channel(color.b));
+                }
+            )__SHADER__";
+            break;
+        default:
+            fs << R"__SHADER__(
+                vec3 EOTF(const vec3 linear) {
+                    return linear;
+                }
+            )__SHADER__";
+            break;
+    }
+}
+
+void ProgramCache::generateToneMappingProcess(Formatter& fs, const Key& needs) {
+    // Convert relative light to absolute light.
+    switch (needs.getInputTF()) {
+        case Key::INPUT_TF_ST2084:
+            fs << R"__SHADER__(
+                highp vec3 ScaleLuminance(highp vec3 color) {
+                    return color * 10000.0;
+                }
+            )__SHADER__";
+            break;
+        case Key::INPUT_TF_HLG:
+            fs << R"__SHADER__(
+                highp vec3 ScaleLuminance(highp vec3 color) {
+                    // The formula is:
+                    // alpha * pow(Y, gamma - 1.0) * color + beta;
+                    // where alpha is 1000.0, gamma is 1.2, beta is 0.0.
+                    return color * 1000.0 * pow(color.y, 0.2);
+                }
+            )__SHADER__";
+            break;
+        default:
+            fs << R"__SHADER__(
+                highp vec3 ScaleLuminance(highp vec3 color) {
+                    return color * displayMaxLuminance;
+                }
+            )__SHADER__";
+            break;
+    }
+
+    // Tone map absolute light to display luminance range.
+    switch (needs.getInputTF()) {
+        case Key::INPUT_TF_ST2084:
+        case Key::INPUT_TF_HLG:
+            switch (needs.getOutputTF()) {
+                case Key::OUTPUT_TF_HLG:
+                    // Right now when mixed PQ and HLG contents are presented,
+                    // HLG content will always be converted to PQ. However, for
+                    // completeness, we simply clamp the value to [0.0, 1000.0].
+                    fs << R"__SHADER__(
+                        highp vec3 ToneMap(highp vec3 color) {
+                            return clamp(color, 0.0, 1000.0);
+                        }
+                    )__SHADER__";
+                    break;
+                case Key::OUTPUT_TF_ST2084:
+                    fs << R"__SHADER__(
+                        highp vec3 ToneMap(highp vec3 color) {
+                            return color;
+                        }
+                    )__SHADER__";
+                    break;
+                default:
+                    fs << R"__SHADER__(
+                        highp vec3 ToneMap(highp vec3 color) {
+                            float maxMasteringLumi = maxMasteringLuminance;
+                            float maxContentLumi = maxContentLuminance;
+                            float maxInLumi = min(maxMasteringLumi, maxContentLumi);
+                            float maxOutLumi = displayMaxLuminance;
+
+                            float nits = color.y;
+
+                            // clamp to max input luminance
+                            nits = clamp(nits, 0.0, maxInLumi);
+
+                            // scale [0.0, maxInLumi] to [0.0, maxOutLumi]
+                            if (maxInLumi <= maxOutLumi) {
+                                return color * (maxOutLumi / maxInLumi);
+                            } else {
+                                // three control points
+                                const float x0 = 10.0;
+                                const float y0 = 17.0;
+                                float x1 = maxOutLumi * 0.75;
+                                float y1 = x1;
+                                float x2 = x1 + (maxInLumi - x1) / 2.0;
+                                float y2 = y1 + (maxOutLumi - y1) * 0.75;
+
+                                // horizontal distances between the last three control points
+                                float h12 = x2 - x1;
+                                float h23 = maxInLumi - x2;
+                                // tangents at the last three control points
+                                float m1 = (y2 - y1) / h12;
+                                float m3 = (maxOutLumi - y2) / h23;
+                                float m2 = (m1 + m3) / 2.0;
+
+                                if (nits < x0) {
+                                    // scale [0.0, x0] to [0.0, y0] linearly
+                                    float slope = y0 / x0;
+                                    return color * slope;
+                                } else if (nits < x1) {
+                                    // scale [x0, x1] to [y0, y1] linearly
+                                    float slope = (y1 - y0) / (x1 - x0);
+                                    nits = y0 + (nits - x0) * slope;
+                                } else if (nits < x2) {
+                                    // scale [x1, x2] to [y1, y2] using Hermite interp
+                                    float t = (nits - x1) / h12;
+                                    nits = (y1 * (1.0 + 2.0 * t) + h12 * m1 * t) * (1.0 - t) * (1.0 - t) +
+                                            (y2 * (3.0 - 2.0 * t) + h12 * m2 * (t - 1.0)) * t * t;
+                                } else {
+                                    // scale [x2, maxInLumi] to [y2, maxOutLumi] using Hermite interp
+                                    float t = (nits - x2) / h23;
+                                    nits = (y2 * (1.0 + 2.0 * t) + h23 * m2 * t) * (1.0 - t) * (1.0 - t) +
+                                            (maxOutLumi * (3.0 - 2.0 * t) + h23 * m3 * (t - 1.0)) * t * t;
+                                }
+                            }
+
+                            // color.y is greater than x0 and is thus non-zero
+                            return color * (nits / color.y);
+                        }
+                    )__SHADER__";
+                    break;
+            }
+            break;
+        default:
+            // inverse tone map; the output luminance can be up to maxOutLumi.
+            fs << R"__SHADER__(
+                highp vec3 ToneMap(highp vec3 color) {
+                    const float maxOutLumi = 3000.0;
+
+                    const float x0 = 5.0;
+                    const float y0 = 2.5;
+                    float x1 = displayMaxLuminance * 0.7;
+                    float y1 = maxOutLumi * 0.15;
+                    float x2 = displayMaxLuminance * 0.9;
+                    float y2 = maxOutLumi * 0.45;
+                    float x3 = displayMaxLuminance;
+                    float y3 = maxOutLumi;
+
+                    float c1 = y1 / 3.0;
+                    float c2 = y2 / 2.0;
+                    float c3 = y3 / 1.5;
+
+                    float nits = color.y;
+
+                    float scale;
+                    if (nits <= x0) {
+                        // scale [0.0, x0] to [0.0, y0] linearly
+                        const float slope = y0 / x0;
+                        return color * slope;
+                    } else if (nits <= x1) {
+                        // scale [x0, x1] to [y0, y1] using a curve
+                        float t = (nits - x0) / (x1 - x0);
+                        nits = (1.0 - t) * (1.0 - t) * y0 + 2.0 * (1.0 - t) * t * c1 + t * t * y1;
+                    } else if (nits <= x2) {
+                        // scale [x1, x2] to [y1, y2] using a curve
+                        float t = (nits - x1) / (x2 - x1);
+                        nits = (1.0 - t) * (1.0 - t) * y1 + 2.0 * (1.0 - t) * t * c2 + t * t * y2;
+                    } else {
+                        // scale [x2, x3] to [y2, y3] using a curve
+                        float t = (nits - x2) / (x3 - x2);
+                        nits = (1.0 - t) * (1.0 - t) * y2 + 2.0 * (1.0 - t) * t * c3 + t * t * y3;
+                    }
+
+                    // color.y is greater than x0 and is thus non-zero
+                    return color * (nits / color.y);
+                }
+            )__SHADER__";
+            break;
+    }
+
+    // convert absolute light to relative light.
+    switch (needs.getOutputTF()) {
+        case Key::OUTPUT_TF_ST2084:
+            fs << R"__SHADER__(
+                highp vec3 NormalizeLuminance(highp vec3 color) {
+                    return color / 10000.0;
+                }
+            )__SHADER__";
+            break;
+        case Key::OUTPUT_TF_HLG:
+            fs << R"__SHADER__(
+                highp vec3 NormalizeLuminance(highp vec3 color) {
+                    return color / 1000.0 * pow(color.y / 1000.0, -0.2 / 1.2);
+                }
+            )__SHADER__";
+            break;
+        default:
+            fs << R"__SHADER__(
+                highp vec3 NormalizeLuminance(highp vec3 color) {
+                    return color / displayMaxLuminance;
+                }
+            )__SHADER__";
+            break;
+    }
+}
+
+// Generate OOTF that modifies the relative scence light to relative display light.
+void ProgramCache::generateOOTF(Formatter& fs, const ProgramCache::Key& needs) {
+    if (!needs.needsToneMapping()) {
+        fs << R"__SHADER__(
+            highp vec3 OOTF(const highp vec3 color) {
+                return color;
+            }
+        )__SHADER__";
+    } else {
+        generateToneMappingProcess(fs, needs);
+        fs << R"__SHADER__(
+            highp vec3 OOTF(const highp vec3 color) {
+                return NormalizeLuminance(ToneMap(ScaleLuminance(color)));
+            }
+        )__SHADER__";
+    }
+}
+
+// Generate OETF that converts relative display light to signal values,
+// both normalized to [0, 1]
+void ProgramCache::generateOETF(Formatter& fs, const Key& needs) {
+    switch (needs.getOutputTF()) {
+        case Key::OUTPUT_TF_SRGB:
+            fs << R"__SHADER__(
+                float OETF_sRGB(const float linear) {
+                    return linear <= 0.0031308 ?
+                            linear * 12.92 : (pow(linear, 1.0 / 2.4) * 1.055) - 0.055;
+                }
+
+                vec3 OETF_sRGB(const vec3 linear) {
+                    return vec3(OETF_sRGB(linear.r), OETF_sRGB(linear.g), OETF_sRGB(linear.b));
+                }
+
+                vec3 OETF(const vec3 linear) {
+                    return sign(linear.rgb) * OETF_sRGB(abs(linear.rgb));
+                }
+            )__SHADER__";
+            break;
+        case Key::OUTPUT_TF_ST2084:
+            fs << R"__SHADER__(
+                vec3 OETF(const vec3 linear) {
+                    const highp float m1 = (2610.0 / 4096.0) / 4.0;
+                    const highp float m2 = (2523.0 / 4096.0) * 128.0;
+                    const highp float c1 = (3424.0 / 4096.0);
+                    const highp float c2 = (2413.0 / 4096.0) * 32.0;
+                    const highp float c3 = (2392.0 / 4096.0) * 32.0;
+
+                    highp vec3 tmp = pow(linear, vec3(m1));
+                    tmp = (c1 + c2 * tmp) / (1.0 + c3 * tmp);
+                    return pow(tmp, vec3(m2));
+                }
+            )__SHADER__";
+            break;
+        case Key::OUTPUT_TF_HLG:
+            fs << R"__SHADER__(
+                highp float OETF_channel(const highp float channel) {
+                    const highp float a = 0.17883277;
+                    const highp float b = 0.28466892;
+                    const highp float c = 0.55991073;
+                    return channel <= 1.0 / 12.0 ? sqrt(3.0 * channel) :
+                            a * log(12.0 * channel - b) + c;
+                }
+
+                vec3 OETF(const highp vec3 color) {
+                    return vec3(OETF_channel(color.r), OETF_channel(color.g),
+                            OETF_channel(color.b));
+                }
+            )__SHADER__";
+            break;
+        default:
+            fs << R"__SHADER__(
+                vec3 OETF(const vec3 linear) {
+                    return linear;
+                }
+            )__SHADER__";
+            break;
+    }
+}
+
+String8 ProgramCache::generateVertexShader(const Key& needs) {
+    Formatter vs;
+    if (needs.hasTextureCoords()) {
+        vs << "attribute vec4 texCoords;"
+           << "varying vec2 outTexCoords;";
+    }
+    if (needs.hasRoundedCorners()) {
+        vs << "attribute lowp vec4 cropCoords;";
+        vs << "varying lowp vec2 outCropCoords;";
+    }
+    if (needs.drawShadows()) {
+        vs << "attribute lowp vec4 shadowColor;";
+        vs << "varying lowp vec4 outShadowColor;";
+        vs << "attribute lowp vec4 shadowParams;";
+        vs << "varying lowp vec3 outShadowParams;";
+    }
+    vs << "attribute vec4 position;"
+       << "uniform mat4 projection;"
+       << "uniform mat4 texture;"
+       << "void main(void) {" << indent << "gl_Position = projection * position;";
+    if (needs.hasTextureCoords()) {
+        vs << "outTexCoords = (texture * texCoords).st;";
+    }
+    if (needs.hasRoundedCorners()) {
+        vs << "outCropCoords = cropCoords.st;";
+    }
+    if (needs.drawShadows()) {
+        vs << "outShadowColor = shadowColor;";
+        vs << "outShadowParams = shadowParams.xyz;";
+    }
+    vs << dedent << "}";
+    return vs.getString();
+}
+
+String8 ProgramCache::generateFragmentShader(const Key& needs) {
+    Formatter fs;
+    if (needs.getTextureTarget() == Key::TEXTURE_EXT) {
+        fs << "#extension GL_OES_EGL_image_external : require";
+    }
+
+    // default precision is required-ish in fragment shaders
+    fs << "precision mediump float;";
+
+    if (needs.getTextureTarget() == Key::TEXTURE_EXT) {
+        fs << "uniform samplerExternalOES sampler;";
+    } else if (needs.getTextureTarget() == Key::TEXTURE_2D) {
+        fs << "uniform sampler2D sampler;";
+    }
+
+    if (needs.hasTextureCoords()) {
+        fs << "varying vec2 outTexCoords;";
+    }
+
+    if (needs.hasRoundedCorners()) {
+        // Rounded corners implementation using a signed distance function.
+        fs << R"__SHADER__(
+            uniform float cornerRadius;
+            uniform vec2 cropCenter;
+            varying vec2 outCropCoords;
+
+            /**
+             * This function takes the current crop coordinates and calculates an alpha value based
+             * on the corner radius and distance from the crop center.
+             */
+            float applyCornerRadius(vec2 cropCoords)
+            {
+                vec2 position = cropCoords - cropCenter;
+                // Scale down the dist vector here, as otherwise large corner
+                // radii can cause floating point issues when computing the norm
+                vec2 dist = (abs(position) - cropCenter + vec2(cornerRadius)) / 16.0;
+                // Once we've found the norm, then scale back up.
+                float plane = length(max(dist, vec2(0.0))) * 16.0;
+                return 1.0 - clamp(plane - cornerRadius, 0.0, 1.0);
+            }
+            )__SHADER__";
+    }
+
+    if (needs.drawShadows()) {
+        fs << R"__SHADER__(
+            varying lowp vec4 outShadowColor;
+            varying lowp vec3 outShadowParams;
+
+            /**
+             * Returns the shadow color.
+             */
+            vec4 getShadowColor()
+            {
+                lowp float d = length(outShadowParams.xy);
+                vec2 uv = vec2(outShadowParams.z * (1.0 - d), 0.5);
+                lowp float factor = texture2D(sampler, uv).a;
+                return outShadowColor * factor;
+            }
+            )__SHADER__";
+    }
+
+    if (needs.getTextureTarget() == Key::TEXTURE_OFF || needs.hasAlpha()) {
+        fs << "uniform vec4 color;";
+    }
+
+    if (needs.isY410BT2020()) {
+        fs << R"__SHADER__(
+            vec3 convertY410BT2020(const vec3 color) {
+                const vec3 offset = vec3(0.0625, 0.5, 0.5);
+                const mat3 transform = mat3(
+                    vec3(1.1678,  1.1678, 1.1678),
+                    vec3(   0.0, -0.1878, 2.1481),
+                    vec3(1.6836, -0.6523,   0.0));
+                // Y is in G, U is in R, and V is in B
+                return clamp(transform * (color.grb - offset), 0.0, 1.0);
+            }
+            )__SHADER__";
+    }
+
+    if (needs.hasTransformMatrix() || (needs.getInputTF() != needs.getOutputTF())) {
+        if (needs.needsToneMapping()) {
+            fs << "uniform float displayMaxLuminance;";
+            fs << "uniform float maxMasteringLuminance;";
+            fs << "uniform float maxContentLuminance;";
+        }
+
+        if (needs.hasInputTransformMatrix()) {
+            fs << "uniform mat4 inputTransformMatrix;";
+            fs << R"__SHADER__(
+                highp vec3 InputTransform(const highp vec3 color) {
+                    return clamp(vec3(inputTransformMatrix * vec4(color, 1.0)), 0.0, 1.0);
+                }
+            )__SHADER__";
+        } else {
+            fs << R"__SHADER__(
+                highp vec3 InputTransform(const highp vec3 color) {
+                    return color;
+                }
+            )__SHADER__";
+        }
+
+        // the transformation from a wider colorspace to a narrower one can
+        // result in >1.0 or <0.0 pixel values
+        if (needs.hasOutputTransformMatrix()) {
+            fs << "uniform mat4 outputTransformMatrix;";
+            fs << R"__SHADER__(
+                highp vec3 OutputTransform(const highp vec3 color) {
+                    return clamp(vec3(outputTransformMatrix * vec4(color, 1.0)), 0.0, 1.0);
+                }
+            )__SHADER__";
+        } else {
+            fs << R"__SHADER__(
+                highp vec3 OutputTransform(const highp vec3 color) {
+                    return clamp(color, 0.0, 1.0);
+                }
+            )__SHADER__";
+        }
+
+        generateEOTF(fs, needs);
+        generateOOTF(fs, needs);
+        generateOETF(fs, needs);
+    }
+
+    fs << "void main(void) {" << indent;
+    if (needs.drawShadows()) {
+        fs << "gl_FragColor = getShadowColor();";
+    } else {
+        if (needs.isTexturing()) {
+            fs << "gl_FragColor = texture2D(sampler, outTexCoords);";
+            if (needs.isY410BT2020()) {
+                fs << "gl_FragColor.rgb = convertY410BT2020(gl_FragColor.rgb);";
+            }
+        } else {
+            fs << "gl_FragColor.rgb = color.rgb;";
+            fs << "gl_FragColor.a = 1.0;";
+        }
+        if (needs.isOpaque()) {
+            fs << "gl_FragColor.a = 1.0;";
+        }
+        if (needs.hasAlpha()) {
+            // modulate the current alpha value with alpha set
+            if (needs.isPremultiplied()) {
+                // ... and the color too if we're premultiplied
+                fs << "gl_FragColor *= color.a;";
+            } else {
+                fs << "gl_FragColor.a *= color.a;";
+            }
+        }
+    }
+
+    if (needs.hasTransformMatrix() || (needs.getInputTF() != needs.getOutputTF())) {
+        if (!needs.isOpaque() && needs.isPremultiplied()) {
+            // un-premultiply if needed before linearization
+            // avoid divide by 0 by adding 0.5/256 to the alpha channel
+            fs << "gl_FragColor.rgb = gl_FragColor.rgb / (gl_FragColor.a + 0.0019);";
+        }
+        fs << "gl_FragColor.rgb = "
+              "OETF(OutputTransform(OOTF(InputTransform(EOTF(gl_FragColor.rgb)))));";
+        if (!needs.isOpaque() && needs.isPremultiplied()) {
+            // and re-premultiply if needed after gamma correction
+            fs << "gl_FragColor.rgb = gl_FragColor.rgb * (gl_FragColor.a + 0.0019);";
+        }
+    }
+
+    if (needs.hasRoundedCorners()) {
+        if (needs.isPremultiplied()) {
+            fs << "gl_FragColor *= vec4(applyCornerRadius(outCropCoords));";
+        } else {
+            fs << "gl_FragColor.a *= applyCornerRadius(outCropCoords);";
+        }
+    }
+
+    fs << dedent << "}";
+    return fs.getString();
+}
+
+std::unique_ptr<Program> ProgramCache::generateProgram(const Key& needs) {
+    ATRACE_CALL();
+
+    // vertex shader
+    String8 vs = generateVertexShader(needs);
+
+    // fragment shader
+    String8 fs = generateFragmentShader(needs);
+
+    return std::make_unique<Program>(needs, vs.string(), fs.string());
+}
+
+void ProgramCache::useProgram(EGLContext context, const Description& description) {
+    // generate the key for the shader based on the description
+    Key needs(computeKey(description));
+
+    // look-up the program in the cache
+    auto& cache = mCaches[context];
+    auto it = cache.find(needs);
+    if (it == cache.end()) {
+        // we didn't find our program, so generate one...
+        nsecs_t time = systemTime();
+        it = cache.emplace(needs, generateProgram(needs)).first;
+        time = systemTime() - time;
+
+        ALOGV(">>> generated new program for context %p: needs=%08X, time=%u ms (%zu programs)",
+              context, needs.mKey, uint32_t(ns2ms(time)), cache.size());
+    }
+
+    // here we have a suitable program for this description
+    std::unique_ptr<Program>& program = it->second;
+    if (program->isValid()) {
+        program->use();
+        program->setUniforms(description);
+    }
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/ProgramCache.h b/media/libstagefright/renderfright/gl/ProgramCache.h
new file mode 100644
index 0000000..901e631
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/ProgramCache.h
@@ -0,0 +1,228 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDER_ENGINE_PROGRAMCACHE_H
+#define SF_RENDER_ENGINE_PROGRAMCACHE_H
+
+#include <memory>
+#include <unordered_map>
+
+#include <EGL/egl.h>
+#include <GLES2/gl2.h>
+#include <renderengine/private/Description.h>
+#include <utils/Singleton.h>
+#include <utils/TypeHelpers.h>
+
+namespace android {
+
+class String8;
+
+namespace renderengine {
+
+struct Description;
+
+namespace gl {
+
+class Formatter;
+class Program;
+
+/*
+ * This class generates GLSL programs suitable to handle a given
+ * Description. It's responsible for figuring out what to
+ * generate from a Description.
+ * It also maintains a cache of these Programs.
+ */
+class ProgramCache : public Singleton<ProgramCache> {
+public:
+    /*
+     * Key is used to retrieve a Program in the cache.
+     * A Key is generated from a Description.
+     */
+    class Key {
+        friend class ProgramCache;
+        typedef uint32_t key_t;
+        key_t mKey;
+
+    public:
+        enum {
+            BLEND_SHIFT = 0,
+            BLEND_MASK = 1 << BLEND_SHIFT,
+            BLEND_PREMULT = 1 << BLEND_SHIFT,
+            BLEND_NORMAL = 0 << BLEND_SHIFT,
+
+            OPACITY_SHIFT = 1,
+            OPACITY_MASK = 1 << OPACITY_SHIFT,
+            OPACITY_OPAQUE = 1 << OPACITY_SHIFT,
+            OPACITY_TRANSLUCENT = 0 << OPACITY_SHIFT,
+
+            ALPHA_SHIFT = 2,
+            ALPHA_MASK = 1 << ALPHA_SHIFT,
+            ALPHA_LT_ONE = 1 << ALPHA_SHIFT,
+            ALPHA_EQ_ONE = 0 << ALPHA_SHIFT,
+
+            TEXTURE_SHIFT = 3,
+            TEXTURE_MASK = 3 << TEXTURE_SHIFT,
+            TEXTURE_OFF = 0 << TEXTURE_SHIFT,
+            TEXTURE_EXT = 1 << TEXTURE_SHIFT,
+            TEXTURE_2D = 2 << TEXTURE_SHIFT,
+
+            ROUNDED_CORNERS_SHIFT = 5,
+            ROUNDED_CORNERS_MASK = 1 << ROUNDED_CORNERS_SHIFT,
+            ROUNDED_CORNERS_OFF = 0 << ROUNDED_CORNERS_SHIFT,
+            ROUNDED_CORNERS_ON = 1 << ROUNDED_CORNERS_SHIFT,
+
+            INPUT_TRANSFORM_MATRIX_SHIFT = 6,
+            INPUT_TRANSFORM_MATRIX_MASK = 1 << INPUT_TRANSFORM_MATRIX_SHIFT,
+            INPUT_TRANSFORM_MATRIX_OFF = 0 << INPUT_TRANSFORM_MATRIX_SHIFT,
+            INPUT_TRANSFORM_MATRIX_ON = 1 << INPUT_TRANSFORM_MATRIX_SHIFT,
+
+            OUTPUT_TRANSFORM_MATRIX_SHIFT = 7,
+            OUTPUT_TRANSFORM_MATRIX_MASK = 1 << OUTPUT_TRANSFORM_MATRIX_SHIFT,
+            OUTPUT_TRANSFORM_MATRIX_OFF = 0 << OUTPUT_TRANSFORM_MATRIX_SHIFT,
+            OUTPUT_TRANSFORM_MATRIX_ON = 1 << OUTPUT_TRANSFORM_MATRIX_SHIFT,
+
+            INPUT_TF_SHIFT = 8,
+            INPUT_TF_MASK = 3 << INPUT_TF_SHIFT,
+            INPUT_TF_LINEAR = 0 << INPUT_TF_SHIFT,
+            INPUT_TF_SRGB = 1 << INPUT_TF_SHIFT,
+            INPUT_TF_ST2084 = 2 << INPUT_TF_SHIFT,
+            INPUT_TF_HLG = 3 << INPUT_TF_SHIFT,
+
+            OUTPUT_TF_SHIFT = 10,
+            OUTPUT_TF_MASK = 3 << OUTPUT_TF_SHIFT,
+            OUTPUT_TF_LINEAR = 0 << OUTPUT_TF_SHIFT,
+            OUTPUT_TF_SRGB = 1 << OUTPUT_TF_SHIFT,
+            OUTPUT_TF_ST2084 = 2 << OUTPUT_TF_SHIFT,
+            OUTPUT_TF_HLG = 3 << OUTPUT_TF_SHIFT,
+
+            Y410_BT2020_SHIFT = 12,
+            Y410_BT2020_MASK = 1 << Y410_BT2020_SHIFT,
+            Y410_BT2020_OFF = 0 << Y410_BT2020_SHIFT,
+            Y410_BT2020_ON = 1 << Y410_BT2020_SHIFT,
+
+            SHADOW_SHIFT = 13,
+            SHADOW_MASK = 1 << SHADOW_SHIFT,
+            SHADOW_OFF = 0 << SHADOW_SHIFT,
+            SHADOW_ON = 1 << SHADOW_SHIFT,
+        };
+
+        inline Key() : mKey(0) {}
+        inline Key(const Key& rhs) : mKey(rhs.mKey) {}
+
+        inline Key& set(key_t mask, key_t value) {
+            mKey = (mKey & ~mask) | value;
+            return *this;
+        }
+
+        inline bool isTexturing() const { return (mKey & TEXTURE_MASK) != TEXTURE_OFF; }
+        inline bool hasTextureCoords() const { return isTexturing() && !drawShadows(); }
+        inline int getTextureTarget() const { return (mKey & TEXTURE_MASK); }
+        inline bool isPremultiplied() const { return (mKey & BLEND_MASK) == BLEND_PREMULT; }
+        inline bool isOpaque() const { return (mKey & OPACITY_MASK) == OPACITY_OPAQUE; }
+        inline bool hasAlpha() const { return (mKey & ALPHA_MASK) == ALPHA_LT_ONE; }
+        inline bool hasRoundedCorners() const {
+            return (mKey & ROUNDED_CORNERS_MASK) == ROUNDED_CORNERS_ON;
+        }
+        inline bool drawShadows() const { return (mKey & SHADOW_MASK) == SHADOW_ON; }
+        inline bool hasInputTransformMatrix() const {
+            return (mKey & INPUT_TRANSFORM_MATRIX_MASK) == INPUT_TRANSFORM_MATRIX_ON;
+        }
+        inline bool hasOutputTransformMatrix() const {
+            return (mKey & OUTPUT_TRANSFORM_MATRIX_MASK) == OUTPUT_TRANSFORM_MATRIX_ON;
+        }
+        inline bool hasTransformMatrix() const {
+            return hasInputTransformMatrix() || hasOutputTransformMatrix();
+        }
+        inline int getInputTF() const { return (mKey & INPUT_TF_MASK); }
+        inline int getOutputTF() const { return (mKey & OUTPUT_TF_MASK); }
+
+        // When HDR and non-HDR contents are mixed, or different types of HDR contents are
+        // mixed, we will do a tone mapping process to tone map the input content to output
+        // content. Currently, the following conversions handled, they are:
+        // * SDR -> HLG
+        // * SDR -> PQ
+        // * HLG -> PQ
+        inline bool needsToneMapping() const {
+            int inputTF = getInputTF();
+            int outputTF = getOutputTF();
+
+            // Return false when converting from SDR to SDR.
+            if (inputTF == Key::INPUT_TF_SRGB && outputTF == Key::OUTPUT_TF_LINEAR) {
+                return false;
+            }
+            if (inputTF == Key::INPUT_TF_LINEAR && outputTF == Key::OUTPUT_TF_SRGB) {
+                return false;
+            }
+
+            inputTF >>= Key::INPUT_TF_SHIFT;
+            outputTF >>= Key::OUTPUT_TF_SHIFT;
+            return inputTF != outputTF;
+        }
+        inline bool isY410BT2020() const { return (mKey & Y410_BT2020_MASK) == Y410_BT2020_ON; }
+
+        // for use by std::unordered_map
+
+        bool operator==(const Key& other) const { return mKey == other.mKey; }
+
+        struct Hash {
+            size_t operator()(const Key& key) const { return static_cast<size_t>(key.mKey); }
+        };
+    };
+
+    ProgramCache() = default;
+    ~ProgramCache() = default;
+
+    // Generate shaders to populate the cache
+    void primeCache(const EGLContext context, bool useColorManagement, bool toneMapperShaderOnly);
+
+    size_t getSize(const EGLContext context) { return mCaches[context].size(); }
+
+    // useProgram lookup a suitable program in the cache or generates one
+    // if none can be found.
+    void useProgram(const EGLContext context, const Description& description);
+
+private:
+    // compute a cache Key from a Description
+    static Key computeKey(const Description& description);
+    // Generate EOTF based from Key.
+    static void generateEOTF(Formatter& fs, const Key& needs);
+    // Generate necessary tone mapping methods for OOTF.
+    static void generateToneMappingProcess(Formatter& fs, const Key& needs);
+    // Generate OOTF based from Key.
+    static void generateOOTF(Formatter& fs, const Key& needs);
+    // Generate OETF based from Key.
+    static void generateOETF(Formatter& fs, const Key& needs);
+    // generates a program from the Key
+    static std::unique_ptr<Program> generateProgram(const Key& needs);
+    // generates the vertex shader from the Key
+    static String8 generateVertexShader(const Key& needs);
+    // generates the fragment shader from the Key
+    static String8 generateFragmentShader(const Key& needs);
+
+    // Key/Value map used for caching Programs. Currently the cache
+    // is never shrunk (and the GL program objects are never deleted).
+    std::unordered_map<EGLContext, std::unordered_map<Key, std::unique_ptr<Program>, Key::Hash>>
+            mCaches;
+};
+
+} // namespace gl
+} // namespace renderengine
+
+ANDROID_BASIC_TYPES_TRAITS(renderengine::gl::ProgramCache::Key)
+
+} // namespace android
+
+#endif /* SF_RENDER_ENGINE_PROGRAMCACHE_H */
diff --git a/media/libstagefright/renderfright/gl/filters/BlurFilter.cpp b/media/libstagefright/renderfright/gl/filters/BlurFilter.cpp
new file mode 100644
index 0000000..19f18c0
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/filters/BlurFilter.cpp
@@ -0,0 +1,268 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "BlurFilter.h"
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GLES3/gl3.h>
+#include <GLES3/gl3ext.h>
+#include <ui/GraphicTypes.h>
+#include <cstdint>
+
+#include <utils/Trace.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+BlurFilter::BlurFilter(GLESRenderEngine& engine)
+      : mEngine(engine),
+        mCompositionFbo(engine),
+        mPingFbo(engine),
+        mPongFbo(engine),
+        mMixProgram(engine),
+        mBlurProgram(engine) {
+    mMixProgram.compile(getVertexShader(), getMixFragShader());
+    mMPosLoc = mMixProgram.getAttributeLocation("aPosition");
+    mMUvLoc = mMixProgram.getAttributeLocation("aUV");
+    mMTextureLoc = mMixProgram.getUniformLocation("uTexture");
+    mMCompositionTextureLoc = mMixProgram.getUniformLocation("uCompositionTexture");
+    mMMixLoc = mMixProgram.getUniformLocation("uMix");
+
+    mBlurProgram.compile(getVertexShader(), getFragmentShader());
+    mBPosLoc = mBlurProgram.getAttributeLocation("aPosition");
+    mBUvLoc = mBlurProgram.getAttributeLocation("aUV");
+    mBTextureLoc = mBlurProgram.getUniformLocation("uTexture");
+    mBOffsetLoc = mBlurProgram.getUniformLocation("uOffset");
+
+    static constexpr auto size = 2.0f;
+    static constexpr auto translation = 1.0f;
+    const GLfloat vboData[] = {
+        // Vertex data
+        translation - size, -translation - size,
+        translation - size, -translation + size,
+        translation + size, -translation + size,
+        // UV data
+        0.0f, 0.0f - translation,
+        0.0f, size - translation,
+        size, size - translation
+    };
+    mMeshBuffer.allocateBuffers(vboData, 12 /* size */);
+}
+
+status_t BlurFilter::setAsDrawTarget(const DisplaySettings& display, uint32_t radius) {
+    ATRACE_NAME("BlurFilter::setAsDrawTarget");
+    mRadius = radius;
+    mDisplayX = display.physicalDisplay.left;
+    mDisplayY = display.physicalDisplay.top;
+
+    if (mDisplayWidth < display.physicalDisplay.width() ||
+        mDisplayHeight < display.physicalDisplay.height()) {
+        ATRACE_NAME("BlurFilter::allocatingTextures");
+
+        mDisplayWidth = display.physicalDisplay.width();
+        mDisplayHeight = display.physicalDisplay.height();
+        mCompositionFbo.allocateBuffers(mDisplayWidth, mDisplayHeight);
+
+        const uint32_t fboWidth = floorf(mDisplayWidth * kFboScale);
+        const uint32_t fboHeight = floorf(mDisplayHeight * kFboScale);
+        mPingFbo.allocateBuffers(fboWidth, fboHeight);
+        mPongFbo.allocateBuffers(fboWidth, fboHeight);
+
+        if (mPingFbo.getStatus() != GL_FRAMEBUFFER_COMPLETE) {
+            ALOGE("Invalid ping buffer");
+            return mPingFbo.getStatus();
+        }
+        if (mPongFbo.getStatus() != GL_FRAMEBUFFER_COMPLETE) {
+            ALOGE("Invalid pong buffer");
+            return mPongFbo.getStatus();
+        }
+        if (mCompositionFbo.getStatus() != GL_FRAMEBUFFER_COMPLETE) {
+            ALOGE("Invalid composition buffer");
+            return mCompositionFbo.getStatus();
+        }
+        if (!mBlurProgram.isValid()) {
+            ALOGE("Invalid shader");
+            return GL_INVALID_OPERATION;
+        }
+    }
+
+    mCompositionFbo.bind();
+    glViewport(0, 0, mCompositionFbo.getBufferWidth(), mCompositionFbo.getBufferHeight());
+    return NO_ERROR;
+}
+
+void BlurFilter::drawMesh(GLuint uv, GLuint position) {
+
+    glEnableVertexAttribArray(uv);
+    glEnableVertexAttribArray(position);
+    mMeshBuffer.bind();
+    glVertexAttribPointer(position, 2 /* size */, GL_FLOAT, GL_FALSE,
+                          2 * sizeof(GLfloat) /* stride */, 0 /* offset */);
+    glVertexAttribPointer(uv, 2 /* size */, GL_FLOAT, GL_FALSE, 0 /* stride */,
+                          (GLvoid*)(6 * sizeof(GLfloat)) /* offset */);
+    mMeshBuffer.unbind();
+
+    // draw mesh
+    glDrawArrays(GL_TRIANGLES, 0 /* first */, 3 /* count */);
+}
+
+status_t BlurFilter::prepare() {
+    ATRACE_NAME("BlurFilter::prepare");
+
+    // Kawase is an approximation of Gaussian, but it behaves differently from it.
+    // A radius transformation is required for approximating them, and also to introduce
+    // non-integer steps, necessary to smoothly interpolate large radii.
+    const auto radius = mRadius / 6.0f;
+
+    // Calculate how many passes we'll do, based on the radius.
+    // Too many passes will make the operation expensive.
+    const auto passes = min(kMaxPasses, (uint32_t)ceil(radius));
+
+    const float radiusByPasses = radius / (float)passes;
+    const float stepX = radiusByPasses / (float)mCompositionFbo.getBufferWidth();
+    const float stepY = radiusByPasses / (float)mCompositionFbo.getBufferHeight();
+
+    // Let's start by downsampling and blurring the composited frame simultaneously.
+    mBlurProgram.useProgram();
+    glActiveTexture(GL_TEXTURE0);
+    glUniform1i(mBTextureLoc, 0);
+    glBindTexture(GL_TEXTURE_2D, mCompositionFbo.getTextureName());
+    glUniform2f(mBOffsetLoc, stepX, stepY);
+    glViewport(0, 0, mPingFbo.getBufferWidth(), mPingFbo.getBufferHeight());
+    mPingFbo.bind();
+    drawMesh(mBUvLoc, mBPosLoc);
+
+    // And now we'll ping pong between our textures, to accumulate the result of various offsets.
+    GLFramebuffer* read = &mPingFbo;
+    GLFramebuffer* draw = &mPongFbo;
+    glViewport(0, 0, draw->getBufferWidth(), draw->getBufferHeight());
+    for (auto i = 1; i < passes; i++) {
+        ATRACE_NAME("BlurFilter::renderPass");
+        draw->bind();
+
+        glBindTexture(GL_TEXTURE_2D, read->getTextureName());
+        glUniform2f(mBOffsetLoc, stepX * i, stepY * i);
+
+        drawMesh(mBUvLoc, mBPosLoc);
+
+        // Swap buffers for next iteration
+        auto tmp = draw;
+        draw = read;
+        read = tmp;
+    }
+    mLastDrawTarget = read;
+
+    return NO_ERROR;
+}
+
+status_t BlurFilter::render(bool multiPass) {
+    ATRACE_NAME("BlurFilter::render");
+
+    // Now let's scale our blur up. It will be interpolated with the larger composited
+    // texture for the first frames, to hide downscaling artifacts.
+    GLfloat mix = fmin(1.0, mRadius / kMaxCrossFadeRadius);
+
+    // When doing multiple passes, we cannot try to read mCompositionFbo, given that we'll
+    // be writing onto it. Let's disable the crossfade, otherwise we'd need 1 extra frame buffer,
+    // as large as the screen size.
+    if (mix >= 1 || multiPass) {
+        mLastDrawTarget->bindAsReadBuffer();
+        glBlitFramebuffer(0, 0, mLastDrawTarget->getBufferWidth(),
+                          mLastDrawTarget->getBufferHeight(), mDisplayX, mDisplayY, mDisplayWidth,
+                          mDisplayHeight, GL_COLOR_BUFFER_BIT, GL_LINEAR);
+        return NO_ERROR;
+    }
+
+    mMixProgram.useProgram();
+    glUniform1f(mMMixLoc, mix);
+    glActiveTexture(GL_TEXTURE0);
+    glBindTexture(GL_TEXTURE_2D, mLastDrawTarget->getTextureName());
+    glUniform1i(mMTextureLoc, 0);
+    glActiveTexture(GL_TEXTURE1);
+    glBindTexture(GL_TEXTURE_2D, mCompositionFbo.getTextureName());
+    glUniform1i(mMCompositionTextureLoc, 1);
+
+    drawMesh(mMUvLoc, mMPosLoc);
+
+    glUseProgram(0);
+    glActiveTexture(GL_TEXTURE0);
+    mEngine.checkErrors("Drawing blur mesh");
+    return NO_ERROR;
+}
+
+string BlurFilter::getVertexShader() const {
+    return R"SHADER(#version 310 es
+        precision mediump float;
+
+        in vec2 aPosition;
+        in highp vec2 aUV;
+        out highp vec2 vUV;
+
+        void main() {
+            vUV = aUV;
+            gl_Position = vec4(aPosition, 0.0, 1.0);
+        }
+    )SHADER";
+}
+
+string BlurFilter::getFragmentShader() const {
+    return R"SHADER(#version 310 es
+        precision mediump float;
+
+        uniform sampler2D uTexture;
+        uniform vec2 uOffset;
+
+        in highp vec2 vUV;
+        out vec4 fragColor;
+
+        void main() {
+            fragColor  = texture(uTexture, vUV, 0.0);
+            fragColor += texture(uTexture, vUV + vec2( uOffset.x,  uOffset.y), 0.0);
+            fragColor += texture(uTexture, vUV + vec2( uOffset.x, -uOffset.y), 0.0);
+            fragColor += texture(uTexture, vUV + vec2(-uOffset.x,  uOffset.y), 0.0);
+            fragColor += texture(uTexture, vUV + vec2(-uOffset.x, -uOffset.y), 0.0);
+
+            fragColor = vec4(fragColor.rgb * 0.2, 1.0);
+        }
+    )SHADER";
+}
+
+string BlurFilter::getMixFragShader() const {
+    string shader = R"SHADER(#version 310 es
+        precision mediump float;
+
+        in highp vec2 vUV;
+        out vec4 fragColor;
+
+        uniform sampler2D uCompositionTexture;
+        uniform sampler2D uTexture;
+        uniform float uMix;
+
+        void main() {
+            vec4 blurred = texture(uTexture, vUV);
+            vec4 composition = texture(uCompositionTexture, vUV);
+            fragColor = mix(composition, blurred, uMix);
+        }
+    )SHADER";
+    return shader;
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/filters/BlurFilter.h b/media/libstagefright/renderfright/gl/filters/BlurFilter.h
new file mode 100644
index 0000000..593a8fd
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/filters/BlurFilter.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <ui/GraphicTypes.h>
+#include "../GLESRenderEngine.h"
+#include "../GLFramebuffer.h"
+#include "../GLVertexBuffer.h"
+#include "GenericProgram.h"
+
+using namespace std;
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+/**
+ * This is an implementation of a Kawase blur, as described in here:
+ * https://community.arm.com/cfs-file/__key/communityserver-blogs-components-weblogfiles/
+ * 00-00-00-20-66/siggraph2015_2D00_mmg_2D00_marius_2D00_notes.pdf
+ */
+class BlurFilter {
+public:
+    // Downsample FBO to improve performance
+    static constexpr float kFboScale = 0.25f;
+    // Maximum number of render passes
+    static constexpr uint32_t kMaxPasses = 4;
+    // To avoid downscaling artifacts, we interpolate the blurred fbo with the full composited
+    // image, up to this radius.
+    static constexpr float kMaxCrossFadeRadius = 30.0f;
+
+    explicit BlurFilter(GLESRenderEngine& engine);
+    virtual ~BlurFilter(){};
+
+    // Set up render targets, redirecting output to offscreen texture.
+    status_t setAsDrawTarget(const DisplaySettings&, uint32_t radius);
+    // Execute blur passes, rendering to offscreen texture.
+    status_t prepare();
+    // Render blur to the bound framebuffer (screen).
+    status_t render(bool multiPass);
+
+private:
+    uint32_t mRadius;
+    void drawMesh(GLuint uv, GLuint position);
+    string getVertexShader() const;
+    string getFragmentShader() const;
+    string getMixFragShader() const;
+
+    GLESRenderEngine& mEngine;
+    // Frame buffer holding the composited background.
+    GLFramebuffer mCompositionFbo;
+    // Frame buffers holding the blur passes.
+    GLFramebuffer mPingFbo;
+    GLFramebuffer mPongFbo;
+    uint32_t mDisplayWidth = 0;
+    uint32_t mDisplayHeight = 0;
+    uint32_t mDisplayX = 0;
+    uint32_t mDisplayY = 0;
+    // Buffer holding the final blur pass.
+    GLFramebuffer* mLastDrawTarget;
+
+    // VBO containing vertex and uv data of a fullscreen triangle.
+    GLVertexBuffer mMeshBuffer;
+
+    GenericProgram mMixProgram;
+    GLuint mMPosLoc;
+    GLuint mMUvLoc;
+    GLuint mMMixLoc;
+    GLuint mMTextureLoc;
+    GLuint mMCompositionTextureLoc;
+
+    GenericProgram mBlurProgram;
+    GLuint mBPosLoc;
+    GLuint mBUvLoc;
+    GLuint mBTextureLoc;
+    GLuint mBOffsetLoc;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/filters/GenericProgram.cpp b/media/libstagefright/renderfright/gl/filters/GenericProgram.cpp
new file mode 100644
index 0000000..bb35889
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/filters/GenericProgram.cpp
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "GenericProgram.h"
+
+#include <GLES/gl.h>
+#include <GLES/glext.h>
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+GenericProgram::GenericProgram(GLESRenderEngine& engine) : mEngine(engine) {}
+
+GenericProgram::~GenericProgram() {
+    if (mVertexShaderHandle != 0) {
+        if (mProgramHandle != 0) {
+            glDetachShader(mProgramHandle, mVertexShaderHandle);
+        }
+        glDeleteShader(mVertexShaderHandle);
+    }
+
+    if (mFragmentShaderHandle != 0) {
+        if (mProgramHandle != 0) {
+            glDetachShader(mProgramHandle, mFragmentShaderHandle);
+        }
+        glDeleteShader(mFragmentShaderHandle);
+    }
+
+    if (mProgramHandle != 0) {
+        glDeleteProgram(mProgramHandle);
+    }
+}
+
+void GenericProgram::compile(string vertexShader, string fragmentShader) {
+    mVertexShaderHandle = compileShader(GL_VERTEX_SHADER, vertexShader);
+    mFragmentShaderHandle = compileShader(GL_FRAGMENT_SHADER, fragmentShader);
+    if (mVertexShaderHandle == 0 || mFragmentShaderHandle == 0) {
+        ALOGE("Aborting program creation.");
+        return;
+    }
+    mProgramHandle = createAndLink(mVertexShaderHandle, mFragmentShaderHandle);
+    mEngine.checkErrors("Linking program");
+}
+
+void GenericProgram::useProgram() const {
+    glUseProgram(mProgramHandle);
+}
+
+GLuint GenericProgram::compileShader(GLuint type, string src) const {
+    const GLuint shader = glCreateShader(type);
+    if (shader == 0) {
+        mEngine.checkErrors("Creating shader");
+        return 0;
+    }
+    const GLchar* charSrc = (const GLchar*)src.c_str();
+    glShaderSource(shader, 1, &charSrc, nullptr);
+    glCompileShader(shader);
+
+    GLint isCompiled = 0;
+    glGetShaderiv(shader, GL_COMPILE_STATUS, &isCompiled);
+    if (isCompiled == GL_FALSE) {
+        GLint maxLength = 0;
+        glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &maxLength);
+        string errorLog;
+        errorLog.reserve(maxLength);
+        glGetShaderInfoLog(shader, maxLength, &maxLength, errorLog.data());
+        glDeleteShader(shader);
+        ALOGE("Error compiling shader: %s", errorLog.c_str());
+        return 0;
+    }
+    return shader;
+}
+GLuint GenericProgram::createAndLink(GLuint vertexShader, GLuint fragmentShader) const {
+    const GLuint program = glCreateProgram();
+    mEngine.checkErrors("Creating program");
+
+    glAttachShader(program, vertexShader);
+    glAttachShader(program, fragmentShader);
+    glLinkProgram(program);
+    mEngine.checkErrors("Linking program");
+    return program;
+}
+
+GLuint GenericProgram::getUniformLocation(const string name) const {
+    if (mProgramHandle == 0) {
+        ALOGE("Can't get location of %s on an invalid program.", name.c_str());
+        return -1;
+    }
+    return glGetUniformLocation(mProgramHandle, (const GLchar*)name.c_str());
+}
+
+GLuint GenericProgram::getAttributeLocation(const string name) const {
+    if (mProgramHandle == 0) {
+        ALOGE("Can't get location of %s on an invalid program.", name.c_str());
+        return -1;
+    }
+    return glGetAttribLocation(mProgramHandle, (const GLchar*)name.c_str());
+}
+
+bool GenericProgram::isValid() const {
+    return mProgramHandle != 0;
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/filters/GenericProgram.h b/media/libstagefright/renderfright/gl/filters/GenericProgram.h
new file mode 100644
index 0000000..6da2a5a
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/filters/GenericProgram.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <ui/GraphicTypes.h>
+#include "../GLESRenderEngine.h"
+#include "../GLFramebuffer.h"
+
+using namespace std;
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GenericProgram {
+public:
+    explicit GenericProgram(GLESRenderEngine& renderEngine);
+    ~GenericProgram();
+    void compile(string vertexShader, string fragmentShader);
+    bool isValid() const;
+    void useProgram() const;
+    GLuint getAttributeLocation(const string name) const;
+    GLuint getUniformLocation(const string name) const;
+
+private:
+    GLuint compileShader(GLuint type, const string src) const;
+    GLuint createAndLink(GLuint vertexShader, GLuint fragmentShader) const;
+
+    GLESRenderEngine& mEngine;
+    GLuint mVertexShaderHandle = 0;
+    GLuint mFragmentShaderHandle = 0;
+    GLuint mProgramHandle = 0;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/DisplaySettings.h b/media/libstagefright/renderfright/include/renderengine/DisplaySettings.h
new file mode 100644
index 0000000..ca16d2c
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/DisplaySettings.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <iosfwd>
+
+#include <math/mat4.h>
+#include <ui/GraphicTypes.h>
+#include <ui/Rect.h>
+#include <ui/Region.h>
+#include <ui/Transform.h>
+
+namespace android {
+namespace renderengine {
+
+// DisplaySettings contains the settings that are applicable when drawing all
+// layers for a given display.
+struct DisplaySettings {
+    // Rectangle describing the physical display. We will project from the
+    // logical clip onto this rectangle.
+    Rect physicalDisplay = Rect::INVALID_RECT;
+
+    // Rectangle bounded by the x,y- clipping planes in the logical display, so
+    // that the orthographic projection matrix can be computed. When
+    // constructing this matrix, z-coordinate bound are assumed to be at z=0 and
+    // z=1.
+    Rect clip = Rect::INVALID_RECT;
+
+    // Maximum luminance pulled from the display's HDR capabilities.
+    float maxLuminance = 1.0f;
+
+    // Output dataspace that will be populated if wide color gamut is used, or
+    // DataSpace::UNKNOWN otherwise.
+    ui::Dataspace outputDataspace = ui::Dataspace::UNKNOWN;
+
+    // Additional color transform to apply in linear space after transforming
+    // to the output dataspace.
+    mat4 colorTransform = mat4();
+
+    // Region that will be cleared to (0, 0, 0, 1) prior to rendering.
+    // This is specified in layer-stack space.
+    Region clearRegion = Region::INVALID_REGION;
+
+    // An additional orientation flag to be applied after clipping the output.
+    // By way of example, this may be used for supporting fullscreen screenshot
+    // capture of a device in landscape while the buffer is in portrait
+    // orientation.
+    uint32_t orientation = ui::Transform::ROT_0;
+};
+
+static inline bool operator==(const DisplaySettings& lhs, const DisplaySettings& rhs) {
+    return lhs.physicalDisplay == rhs.physicalDisplay && lhs.clip == rhs.clip &&
+            lhs.maxLuminance == rhs.maxLuminance && lhs.outputDataspace == rhs.outputDataspace &&
+            lhs.colorTransform == rhs.colorTransform &&
+            lhs.clearRegion.hasSameRects(rhs.clearRegion) && lhs.orientation == rhs.orientation;
+}
+
+// Defining PrintTo helps with Google Tests.
+static inline void PrintTo(const DisplaySettings& settings, ::std::ostream* os) {
+    *os << "DisplaySettings {";
+    *os << "\n    .physicalDisplay = ";
+    PrintTo(settings.physicalDisplay, os);
+    *os << "\n    .clip = ";
+    PrintTo(settings.clip, os);
+    *os << "\n    .maxLuminance = " << settings.maxLuminance;
+    *os << "\n    .outputDataspace = ";
+    PrintTo(settings.outputDataspace, os);
+    *os << "\n    .colorTransform = " << settings.colorTransform;
+    *os << "\n    .clearRegion = ";
+    PrintTo(settings.clearRegion, os);
+    *os << "\n    .orientation = " << settings.orientation;
+    *os << "\n}";
+}
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/Framebuffer.h b/media/libstagefright/renderfright/include/renderengine/Framebuffer.h
new file mode 100644
index 0000000..6511127
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/Framebuffer.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+
+struct ANativeWindowBuffer;
+
+namespace android {
+namespace renderengine {
+
+class Framebuffer {
+public:
+    virtual ~Framebuffer() = default;
+
+    virtual bool setNativeWindowBuffer(ANativeWindowBuffer* nativeBuffer, bool isProtected,
+                                       const bool useFramebufferCache) = 0;
+};
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/Image.h b/media/libstagefright/renderfright/include/renderengine/Image.h
new file mode 100644
index 0000000..3bb4731
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/Image.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+struct ANativeWindowBuffer;
+
+namespace android {
+namespace renderengine {
+
+class Image {
+public:
+    virtual ~Image() = default;
+    virtual bool setNativeWindowBuffer(ANativeWindowBuffer* buffer, bool isProtected) = 0;
+};
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/LayerSettings.h b/media/libstagefright/renderfright/include/renderengine/LayerSettings.h
new file mode 100644
index 0000000..95e9367
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/LayerSettings.h
@@ -0,0 +1,258 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <iosfwd>
+
+#include <math/mat4.h>
+#include <math/vec3.h>
+#include <renderengine/Texture.h>
+#include <ui/Fence.h>
+#include <ui/FloatRect.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicTypes.h>
+#include <ui/Rect.h>
+#include <ui/Region.h>
+#include <ui/Transform.h>
+
+namespace android {
+namespace renderengine {
+
+// Metadata describing the input buffer to render from.
+struct Buffer {
+    // Buffer containing the image that we will render.
+    // If buffer == nullptr, then the rest of the fields in this struct will be
+    // ignored.
+    sp<GraphicBuffer> buffer = nullptr;
+
+    // Fence that will fire when the buffer is ready to be bound.
+    sp<Fence> fence = nullptr;
+
+    // Texture identifier to bind the external texture to.
+    // TODO(alecmouri): This is GL-specific...make the type backend-agnostic.
+    uint32_t textureName = 0;
+
+    // Whether to use filtering when rendering the texture.
+    bool useTextureFiltering = false;
+
+    // Transform matrix to apply to texture coordinates.
+    mat4 textureTransform = mat4();
+
+    // Whether to use pre-multiplied alpha.
+    bool usePremultipliedAlpha = true;
+
+    // Override flag that alpha for each pixel in the buffer *must* be 1.0.
+    // LayerSettings::alpha is still used if isOpaque==true - this flag only
+    // overrides the alpha channel of the buffer.
+    bool isOpaque = false;
+
+    // HDR color-space setting for Y410.
+    bool isY410BT2020 = false;
+    float maxMasteringLuminance = 0.0;
+    float maxContentLuminance = 0.0;
+};
+
+// Metadata describing the layer geometry.
+struct Geometry {
+    // Boundaries of the layer.
+    FloatRect boundaries = FloatRect();
+
+    // Transform matrix to apply to mesh coordinates.
+    mat4 positionTransform = mat4();
+
+    // Radius of rounded corners, if greater than 0. Otherwise, this layer's
+    // corners are not rounded.
+    // Having corner radius will force GPU composition on the layer and its children, drawing it
+    // with a special shader. The shader will receive the radius and the crop rectangle as input,
+    // modifying the opacity of the destination texture, multiplying it by a number between 0 and 1.
+    // We query Layer#getRoundedCornerState() to retrieve the radius as well as the rounded crop
+    // rectangle to figure out how to apply the radius for this layer. The crop rectangle will be
+    // in local layer coordinate space, so we have to take the layer transform into account when
+    // walking up the tree.
+    float roundedCornersRadius = 0.0;
+
+    // Rectangle within which corners will be rounded.
+    FloatRect roundedCornersCrop = FloatRect();
+};
+
+// Descriptor of the source pixels for this layer.
+struct PixelSource {
+    // Source buffer
+    Buffer buffer = Buffer();
+
+    // The solid color with which to fill the layer.
+    // This should only be populated if we don't render from an application
+    // buffer.
+    half3 solidColor = half3(0.0f, 0.0f, 0.0f);
+};
+
+/*
+ * Contains the configuration for the shadows drawn by single layer. Shadow follows
+ * material design guidelines.
+ */
+struct ShadowSettings {
+    // Color to the ambient shadow. The alpha is premultiplied.
+    vec4 ambientColor = vec4();
+
+    // Color to the spot shadow. The alpha is premultiplied. The position of the spot shadow
+    // depends on the light position.
+    vec4 spotColor = vec4();
+
+    // Position of the light source used to cast the spot shadow.
+    vec3 lightPos = vec3();
+
+    // Radius of the spot light source. Smaller radius will have sharper edges,
+    // larger radius will have softer shadows
+    float lightRadius = 0.f;
+
+    // Length of the cast shadow. If length is <= 0.f no shadows will be drawn.
+    float length = 0.f;
+
+    // If true fill in the casting layer is translucent and the shadow needs to fill the bounds.
+    // Otherwise the shadow will only be drawn around the edges of the casting layer.
+    bool casterIsTranslucent = false;
+};
+
+// The settings that RenderEngine requires for correctly rendering a Layer.
+struct LayerSettings {
+    // Geometry information
+    Geometry geometry = Geometry();
+
+    // Source pixels for this layer.
+    PixelSource source = PixelSource();
+
+    // Alpha option to blend with the source pixels
+    half alpha = half(0.0);
+
+    // Color space describing how the source pixels should be interpreted.
+    ui::Dataspace sourceDataspace = ui::Dataspace::UNKNOWN;
+
+    // Additional layer-specific color transform to be applied before the global
+    // transform.
+    mat4 colorTransform = mat4();
+
+    // True if blending will be forced to be disabled.
+    bool disableBlending = false;
+
+    ShadowSettings shadow;
+
+    int backgroundBlurRadius = 0;
+};
+
+// Keep in sync with custom comparison function in
+// compositionengine/impl/ClientCompositionRequestCache.cpp
+static inline bool operator==(const Buffer& lhs, const Buffer& rhs) {
+    return lhs.buffer == rhs.buffer && lhs.fence == rhs.fence &&
+            lhs.textureName == rhs.textureName &&
+            lhs.useTextureFiltering == rhs.useTextureFiltering &&
+            lhs.textureTransform == rhs.textureTransform &&
+            lhs.usePremultipliedAlpha == rhs.usePremultipliedAlpha &&
+            lhs.isOpaque == rhs.isOpaque && lhs.isY410BT2020 == rhs.isY410BT2020 &&
+            lhs.maxMasteringLuminance == rhs.maxMasteringLuminance &&
+            lhs.maxContentLuminance == rhs.maxContentLuminance;
+}
+
+static inline bool operator==(const Geometry& lhs, const Geometry& rhs) {
+    return lhs.boundaries == rhs.boundaries && lhs.positionTransform == rhs.positionTransform &&
+            lhs.roundedCornersRadius == rhs.roundedCornersRadius &&
+            lhs.roundedCornersCrop == rhs.roundedCornersCrop;
+}
+
+static inline bool operator==(const PixelSource& lhs, const PixelSource& rhs) {
+    return lhs.buffer == rhs.buffer && lhs.solidColor == rhs.solidColor;
+}
+
+static inline bool operator==(const ShadowSettings& lhs, const ShadowSettings& rhs) {
+    return lhs.ambientColor == rhs.ambientColor && lhs.spotColor == rhs.spotColor &&
+            lhs.lightPos == rhs.lightPos && lhs.lightRadius == rhs.lightRadius &&
+            lhs.length == rhs.length && lhs.casterIsTranslucent == rhs.casterIsTranslucent;
+}
+
+static inline bool operator==(const LayerSettings& lhs, const LayerSettings& rhs) {
+    return lhs.geometry == rhs.geometry && lhs.source == rhs.source && lhs.alpha == rhs.alpha &&
+            lhs.sourceDataspace == rhs.sourceDataspace &&
+            lhs.colorTransform == rhs.colorTransform &&
+            lhs.disableBlending == rhs.disableBlending && lhs.shadow == rhs.shadow &&
+            lhs.backgroundBlurRadius == rhs.backgroundBlurRadius;
+}
+
+// Defining PrintTo helps with Google Tests.
+
+static inline void PrintTo(const Buffer& settings, ::std::ostream* os) {
+    *os << "Buffer {";
+    *os << "\n    .buffer = " << settings.buffer.get();
+    *os << "\n    .fence = " << settings.fence.get();
+    *os << "\n    .textureName = " << settings.textureName;
+    *os << "\n    .useTextureFiltering = " << settings.useTextureFiltering;
+    *os << "\n    .textureTransform = " << settings.textureTransform;
+    *os << "\n    .usePremultipliedAlpha = " << settings.usePremultipliedAlpha;
+    *os << "\n    .isOpaque = " << settings.isOpaque;
+    *os << "\n    .isY410BT2020 = " << settings.isY410BT2020;
+    *os << "\n    .maxMasteringLuminance = " << settings.maxMasteringLuminance;
+    *os << "\n    .maxContentLuminance = " << settings.maxContentLuminance;
+    *os << "\n}";
+}
+
+static inline void PrintTo(const Geometry& settings, ::std::ostream* os) {
+    *os << "Geometry {";
+    *os << "\n    .boundaries = ";
+    PrintTo(settings.boundaries, os);
+    *os << "\n    .positionTransform = " << settings.positionTransform;
+    *os << "\n    .roundedCornersRadius = " << settings.roundedCornersRadius;
+    *os << "\n    .roundedCornersCrop = ";
+    PrintTo(settings.roundedCornersCrop, os);
+    *os << "\n}";
+}
+
+static inline void PrintTo(const PixelSource& settings, ::std::ostream* os) {
+    *os << "PixelSource {";
+    *os << "\n    .buffer = ";
+    PrintTo(settings.buffer, os);
+    *os << "\n    .solidColor = " << settings.solidColor;
+    *os << "\n}";
+}
+
+static inline void PrintTo(const ShadowSettings& settings, ::std::ostream* os) {
+    *os << "ShadowSettings {";
+    *os << "\n    .ambientColor = " << settings.ambientColor;
+    *os << "\n    .spotColor = " << settings.spotColor;
+    *os << "\n    .lightPos = " << settings.lightPos;
+    *os << "\n    .lightRadius = " << settings.lightRadius;
+    *os << "\n    .length = " << settings.length;
+    *os << "\n    .casterIsTranslucent = " << settings.casterIsTranslucent;
+    *os << "\n}";
+}
+
+static inline void PrintTo(const LayerSettings& settings, ::std::ostream* os) {
+    *os << "LayerSettings {";
+    *os << "\n    .geometry = ";
+    PrintTo(settings.geometry, os);
+    *os << "\n    .source = ";
+    PrintTo(settings.source, os);
+    *os << "\n    .alpha = " << settings.alpha;
+    *os << "\n    .sourceDataspace = ";
+    PrintTo(settings.sourceDataspace, os);
+    *os << "\n    .colorTransform = " << settings.colorTransform;
+    *os << "\n    .disableBlending = " << settings.disableBlending;
+    *os << "\n    .backgroundBlurRadius = " << settings.backgroundBlurRadius;
+    *os << "\n    .shadow = ";
+    PrintTo(settings.shadow, os);
+    *os << "\n}";
+}
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/Mesh.h b/media/libstagefright/renderfright/include/renderengine/Mesh.h
new file mode 100644
index 0000000..167f13f
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/Mesh.h
@@ -0,0 +1,205 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDER_ENGINE_MESH_H
+#define SF_RENDER_ENGINE_MESH_H
+
+#include <vector>
+
+#include <stdint.h>
+
+namespace android {
+namespace renderengine {
+
+class Mesh {
+public:
+    class Builder;
+
+    enum Primitive {
+        TRIANGLES = 0x0004,      // GL_TRIANGLES
+        TRIANGLE_STRIP = 0x0005, // GL_TRIANGLE_STRIP
+        TRIANGLE_FAN = 0x0006    // GL_TRIANGLE_FAN
+    };
+
+    ~Mesh() = default;
+
+    /*
+     * VertexArray handles the stride automatically.
+     */
+    template <typename TYPE>
+    class VertexArray {
+        friend class Mesh;
+        float* mData;
+        size_t mStride;
+        size_t mOffset = 0;
+        VertexArray(float* data, size_t stride) : mData(data), mStride(stride) {}
+
+    public:
+        // Returns a vertex array at an offset so its easier to append attributes from
+        // multiple sources.
+        VertexArray(VertexArray<TYPE>& other, size_t offset)
+              : mData(other.mData), mStride(other.mStride), mOffset(offset) {}
+
+        TYPE& operator[](size_t index) {
+            return *reinterpret_cast<TYPE*>(&mData[(index + mOffset) * mStride]);
+        }
+        TYPE const& operator[](size_t index) const {
+            return *reinterpret_cast<TYPE const*>(&mData[(index + mOffset) * mStride]);
+        }
+    };
+
+    template <typename TYPE>
+    VertexArray<TYPE> getPositionArray() {
+        return VertexArray<TYPE>(getPositions(), mStride);
+    }
+
+    template <typename TYPE>
+    VertexArray<TYPE> getTexCoordArray() {
+        return VertexArray<TYPE>(getTexCoords(), mStride);
+    }
+
+    template <typename TYPE>
+    VertexArray<TYPE> getCropCoordArray() {
+        return VertexArray<TYPE>(getCropCoords(), mStride);
+    }
+
+    template <typename TYPE>
+    VertexArray<TYPE> getShadowColorArray() {
+        return VertexArray<TYPE>(getShadowColor(), mStride);
+    }
+
+    template <typename TYPE>
+    VertexArray<TYPE> getShadowParamsArray() {
+        return VertexArray<TYPE>(getShadowParams(), mStride);
+    }
+
+    uint16_t* getIndicesArray() { return getIndices(); }
+
+    Primitive getPrimitive() const;
+
+    // returns a pointer to the vertices positions
+    float const* getPositions() const;
+
+    // returns a pointer to the vertices texture coordinates
+    float const* getTexCoords() const;
+
+    // returns a pointer to the vertices crop coordinates
+    float const* getCropCoords() const;
+
+    // returns a pointer to colors
+    float const* getShadowColor() const;
+
+    // returns a pointer to the shadow params
+    float const* getShadowParams() const;
+
+    // returns a pointer to indices
+    uint16_t const* getIndices() const;
+
+    // number of vertices in this mesh
+    size_t getVertexCount() const;
+
+    // dimension of vertices
+    size_t getVertexSize() const;
+
+    // dimension of texture coordinates
+    size_t getTexCoordsSize() const;
+
+    size_t getShadowParamsSize() const;
+
+    size_t getShadowColorSize() const;
+
+    size_t getIndexCount() const;
+
+    // return stride in bytes
+    size_t getByteStride() const;
+
+    // return stride in floats
+    size_t getStride() const;
+
+private:
+    Mesh(Primitive primitive, size_t vertexCount, size_t vertexSize, size_t texCoordSize,
+         size_t cropCoordsSize, size_t shadowColorSize, size_t shadowParamsSize, size_t indexCount);
+    Mesh(const Mesh&);
+    Mesh& operator=(const Mesh&);
+    Mesh const& operator=(const Mesh&) const;
+
+    float* getPositions();
+    float* getTexCoords();
+    float* getCropCoords();
+    float* getShadowColor();
+    float* getShadowParams();
+    uint16_t* getIndices();
+
+    std::vector<float> mVertices;
+    size_t mVertexCount;
+    size_t mVertexSize;
+    size_t mTexCoordsSize;
+    size_t mCropCoordsSize;
+    size_t mShadowColorSize;
+    size_t mShadowParamsSize;
+    size_t mStride;
+    Primitive mPrimitive;
+    std::vector<uint16_t> mIndices;
+    size_t mIndexCount;
+};
+
+class Mesh::Builder {
+public:
+    Builder& setPrimitive(Primitive primitive) {
+        mPrimitive = primitive;
+        return *this;
+    };
+    Builder& setVertices(size_t vertexCount, size_t vertexSize) {
+        mVertexCount = vertexCount;
+        mVertexSize = vertexSize;
+        return *this;
+    };
+    Builder& setTexCoords(size_t texCoordsSize) {
+        mTexCoordsSize = texCoordsSize;
+        return *this;
+    };
+    Builder& setCropCoords(size_t cropCoordsSize) {
+        mCropCoordsSize = cropCoordsSize;
+        return *this;
+    };
+    Builder& setShadowAttrs() {
+        mShadowParamsSize = 3;
+        mShadowColorSize = 4;
+        return *this;
+    };
+    Builder& setIndices(size_t indexCount) {
+        mIndexCount = indexCount;
+        return *this;
+    };
+    Mesh build() const {
+        return Mesh{mPrimitive,      mVertexCount,     mVertexSize,       mTexCoordsSize,
+                    mCropCoordsSize, mShadowColorSize, mShadowParamsSize, mIndexCount};
+    }
+
+private:
+    size_t mVertexCount = 0;
+    size_t mVertexSize = 0;
+    size_t mTexCoordsSize = 0;
+    size_t mCropCoordsSize = 0;
+    size_t mShadowColorSize = 0;
+    size_t mShadowParamsSize = 0;
+    size_t mIndexCount = 0;
+    Primitive mPrimitive;
+};
+
+} // namespace renderengine
+} // namespace android
+#endif /* SF_RENDER_ENGINE_MESH_H */
diff --git a/media/libstagefright/renderfright/include/renderengine/RenderEngine.h b/media/libstagefright/renderfright/include/renderengine/RenderEngine.h
new file mode 100644
index 0000000..09a0f65
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/RenderEngine.h
@@ -0,0 +1,324 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDERENGINE_H_
+#define SF_RENDERENGINE_H_
+
+#include <stdint.h>
+#include <sys/types.h>
+#include <memory>
+
+#include <android-base/unique_fd.h>
+#include <math/mat4.h>
+#include <renderengine/DisplaySettings.h>
+#include <renderengine/Framebuffer.h>
+#include <renderengine/Image.h>
+#include <renderengine/LayerSettings.h>
+#include <ui/GraphicTypes.h>
+#include <ui/Transform.h>
+
+/**
+ * Allows to set RenderEngine backend to GLES (default) or Vulkan (NOT yet supported).
+ */
+#define PROPERTY_DEBUG_RENDERENGINE_BACKEND "debug.renderengine.backend"
+
+struct ANativeWindowBuffer;
+
+namespace android {
+
+class Rect;
+class Region;
+
+namespace renderengine {
+
+class BindNativeBufferAsFramebuffer;
+class Image;
+class Mesh;
+class Texture;
+struct RenderEngineCreationArgs;
+
+namespace threaded {
+class RenderEngineThreaded;
+}
+
+namespace impl {
+class RenderEngine;
+}
+
+enum class Protection {
+    UNPROTECTED = 1,
+    PROTECTED = 2,
+};
+
+class RenderEngine {
+public:
+    enum class ContextPriority {
+        LOW = 1,
+        MEDIUM = 2,
+        HIGH = 3,
+    };
+
+    enum class RenderEngineType {
+        GLES = 1,
+        THREADED = 2,
+    };
+
+    static std::unique_ptr<RenderEngine> create(const RenderEngineCreationArgs& args);
+
+    virtual ~RenderEngine() = 0;
+
+    // ----- BEGIN DEPRECATED INTERFACE -----
+    // This interface, while still in use until a suitable replacement is built,
+    // should be considered deprecated, minus some methods which still may be
+    // used to support legacy behavior.
+    virtual void primeCache() const = 0;
+
+    // dump the extension strings. always call the base class.
+    virtual void dump(std::string& result) = 0;
+
+    virtual bool useNativeFenceSync() const = 0;
+    virtual bool useWaitSync() const = 0;
+    virtual void genTextures(size_t count, uint32_t* names) = 0;
+    virtual void deleteTextures(size_t count, uint32_t const* names) = 0;
+    virtual void bindExternalTextureImage(uint32_t texName, const Image& image) = 0;
+    // Legacy public method used by devices that don't support native fence
+    // synchronization in their GPU driver, as this method provides implicit
+    // synchronization for latching buffers.
+    virtual status_t bindExternalTextureBuffer(uint32_t texName, const sp<GraphicBuffer>& buffer,
+                                               const sp<Fence>& fence) = 0;
+    // Caches Image resources for this buffer, but does not bind the buffer to
+    // a particular texture.
+    // Note that work is deferred to an additional thread, i.e. this call
+    // is made asynchronously, but the caller can expect that cache/unbind calls
+    // are performed in a manner that's conflict serializable, i.e. unbinding
+    // a buffer should never occur before binding the buffer if the caller
+    // called {bind, cache}ExternalTextureBuffer before calling unbind.
+    virtual void cacheExternalTextureBuffer(const sp<GraphicBuffer>& buffer) = 0;
+    // Removes internal resources referenced by the bufferId. This method should be
+    // invoked when the caller will no longer hold a reference to a GraphicBuffer
+    // and needs to clean up its resources.
+    // Note that work is deferred to an additional thread, i.e. this call
+    // is made asynchronously, but the caller can expect that cache/unbind calls
+    // are performed in a manner that's conflict serializable, i.e. unbinding
+    // a buffer should never occur before binding the buffer if the caller
+    // called {bind, cache}ExternalTextureBuffer before calling unbind.
+    virtual void unbindExternalTextureBuffer(uint64_t bufferId) = 0;
+    // When binding a native buffer, it must be done before setViewportAndProjection
+    // Returns NO_ERROR when binds successfully, NO_MEMORY when there's no memory for allocation.
+    virtual status_t bindFrameBuffer(Framebuffer* framebuffer) = 0;
+    virtual void unbindFrameBuffer(Framebuffer* framebuffer) = 0;
+
+    enum class CleanupMode {
+        CLEAN_OUTPUT_RESOURCES,
+        CLEAN_ALL,
+    };
+    // Clean-up method that should be called on the main thread after the
+    // drawFence returned by drawLayers fires. This method will free up
+    // resources used by the most recently drawn frame. If the frame is still
+    // being drawn, then this call is silently ignored.
+    //
+    // If mode is CLEAN_OUTPUT_RESOURCES, then only resources related to the
+    // output framebuffer are cleaned up, including the sibling texture.
+    //
+    // If mode is CLEAN_ALL, then we also cleanup resources related to any input
+    // buffers.
+    //
+    // Returns true if resources were cleaned up, and false if we didn't need to
+    // do any work.
+    virtual bool cleanupPostRender(CleanupMode mode = CleanupMode::CLEAN_OUTPUT_RESOURCES) = 0;
+
+    // queries
+    virtual size_t getMaxTextureSize() const = 0;
+    virtual size_t getMaxViewportDims() const = 0;
+
+    // ----- END DEPRECATED INTERFACE -----
+
+    // ----- BEGIN NEW INTERFACE -----
+
+    virtual bool isProtected() const = 0;
+    virtual bool supportsProtectedContent() const = 0;
+    virtual bool useProtectedContext(bool useProtectedContext) = 0;
+
+    // Renders layers for a particular display via GPU composition. This method
+    // should be called for every display that needs to be rendered via the GPU.
+    // @param display The display-wide settings that should be applied prior to
+    // drawing any layers.
+    //
+    // Assumptions when calling this method:
+    // 1. There is exactly one caller - i.e. multi-threading is not supported.
+    // 2. Additional threads may be calling the {bind,cache}ExternalTexture
+    // methods above. But the main thread is responsible for holding resources
+    // such that Image destruction does not occur while this method is called.
+    //
+    // TODO(b/136806342): This should behavior should ideally be fixed since
+    // the above two assumptions are brittle, as conditional thread safetyness
+    // may be insufficient when maximizing rendering performance in the future.
+    //
+    // @param layers The layers to draw onto the display, in Z-order.
+    // @param buffer The buffer which will be drawn to. This buffer will be
+    // ready once drawFence fires.
+    // @param useFramebufferCache True if the framebuffer cache should be used.
+    // If an implementation does not cache output framebuffers, then this
+    // parameter does nothing.
+    // @param bufferFence Fence signalling that the buffer is ready to be drawn
+    // to.
+    // @param drawFence A pointer to a fence, which will fire when the buffer
+    // has been drawn to and is ready to be examined. The fence will be
+    // initialized by this method. The caller will be responsible for owning the
+    // fence.
+    // @return An error code indicating whether drawing was successful. For
+    // now, this always returns NO_ERROR.
+    virtual status_t drawLayers(const DisplaySettings& display,
+                                const std::vector<const LayerSettings*>& layers,
+                                const sp<GraphicBuffer>& buffer, const bool useFramebufferCache,
+                                base::unique_fd&& bufferFence, base::unique_fd* drawFence) = 0;
+
+protected:
+    // Gets a framebuffer to render to. This framebuffer may or may not be
+    // cached depending on the implementation.
+    //
+    // Note that this method does not transfer ownership, so the caller most not
+    // live longer than RenderEngine.
+    virtual Framebuffer* getFramebufferForDrawing() = 0;
+    friend class BindNativeBufferAsFramebuffer;
+    friend class threaded::RenderEngineThreaded;
+};
+
+struct RenderEngineCreationArgs {
+    int pixelFormat;
+    uint32_t imageCacheSize;
+    bool useColorManagement;
+    bool enableProtectedContext;
+    bool precacheToneMapperShaderOnly;
+    bool supportsBackgroundBlur;
+    RenderEngine::ContextPriority contextPriority;
+    RenderEngine::RenderEngineType renderEngineType;
+
+    struct Builder;
+
+private:
+    // must be created by Builder via constructor with full argument list
+    RenderEngineCreationArgs(int _pixelFormat, uint32_t _imageCacheSize, bool _useColorManagement,
+                             bool _enableProtectedContext, bool _precacheToneMapperShaderOnly,
+                             bool _supportsBackgroundBlur,
+                             RenderEngine::ContextPriority _contextPriority,
+                             RenderEngine::RenderEngineType _renderEngineType)
+          : pixelFormat(_pixelFormat),
+            imageCacheSize(_imageCacheSize),
+            useColorManagement(_useColorManagement),
+            enableProtectedContext(_enableProtectedContext),
+            precacheToneMapperShaderOnly(_precacheToneMapperShaderOnly),
+            supportsBackgroundBlur(_supportsBackgroundBlur),
+            contextPriority(_contextPriority),
+            renderEngineType(_renderEngineType) {}
+    RenderEngineCreationArgs() = delete;
+};
+
+struct RenderEngineCreationArgs::Builder {
+    Builder() {}
+
+    Builder& setPixelFormat(int pixelFormat) {
+        this->pixelFormat = pixelFormat;
+        return *this;
+    }
+    Builder& setImageCacheSize(uint32_t imageCacheSize) {
+        this->imageCacheSize = imageCacheSize;
+        return *this;
+    }
+    Builder& setUseColorManagerment(bool useColorManagement) {
+        this->useColorManagement = useColorManagement;
+        return *this;
+    }
+    Builder& setEnableProtectedContext(bool enableProtectedContext) {
+        this->enableProtectedContext = enableProtectedContext;
+        return *this;
+    }
+    Builder& setPrecacheToneMapperShaderOnly(bool precacheToneMapperShaderOnly) {
+        this->precacheToneMapperShaderOnly = precacheToneMapperShaderOnly;
+        return *this;
+    }
+    Builder& setSupportsBackgroundBlur(bool supportsBackgroundBlur) {
+        this->supportsBackgroundBlur = supportsBackgroundBlur;
+        return *this;
+    }
+    Builder& setContextPriority(RenderEngine::ContextPriority contextPriority) {
+        this->contextPriority = contextPriority;
+        return *this;
+    }
+    Builder& setRenderEngineType(RenderEngine::RenderEngineType renderEngineType) {
+        this->renderEngineType = renderEngineType;
+        return *this;
+    }
+    RenderEngineCreationArgs build() const {
+        return RenderEngineCreationArgs(pixelFormat, imageCacheSize, useColorManagement,
+                                        enableProtectedContext, precacheToneMapperShaderOnly,
+                                        supportsBackgroundBlur, contextPriority, renderEngineType);
+    }
+
+private:
+    // 1 means RGBA_8888
+    int pixelFormat = 1;
+    uint32_t imageCacheSize = 0;
+    bool useColorManagement = true;
+    bool enableProtectedContext = false;
+    bool precacheToneMapperShaderOnly = false;
+    bool supportsBackgroundBlur = false;
+    RenderEngine::ContextPriority contextPriority = RenderEngine::ContextPriority::MEDIUM;
+    RenderEngine::RenderEngineType renderEngineType = RenderEngine::RenderEngineType::GLES;
+};
+
+class BindNativeBufferAsFramebuffer {
+public:
+    BindNativeBufferAsFramebuffer(RenderEngine& engine, ANativeWindowBuffer* buffer,
+                                  const bool useFramebufferCache)
+          : mEngine(engine), mFramebuffer(mEngine.getFramebufferForDrawing()), mStatus(NO_ERROR) {
+        mStatus = mFramebuffer->setNativeWindowBuffer(buffer, mEngine.isProtected(),
+                                                      useFramebufferCache)
+                ? mEngine.bindFrameBuffer(mFramebuffer)
+                : NO_MEMORY;
+    }
+    ~BindNativeBufferAsFramebuffer() {
+        mFramebuffer->setNativeWindowBuffer(nullptr, false, /*arbitrary*/ true);
+        mEngine.unbindFrameBuffer(mFramebuffer);
+    }
+    status_t getStatus() const { return mStatus; }
+
+private:
+    RenderEngine& mEngine;
+    Framebuffer* mFramebuffer;
+    status_t mStatus;
+};
+
+namespace impl {
+
+// impl::RenderEngine contains common implementation that is graphics back-end agnostic.
+class RenderEngine : public renderengine::RenderEngine {
+public:
+    virtual ~RenderEngine() = 0;
+
+    bool useNativeFenceSync() const override;
+    bool useWaitSync() const override;
+
+protected:
+    RenderEngine(const RenderEngineCreationArgs& args);
+    const RenderEngineCreationArgs mArgs;
+};
+
+} // namespace impl
+} // namespace renderengine
+} // namespace android
+
+#endif /* SF_RENDERENGINE_H_ */
diff --git a/media/libstagefright/renderfright/include/renderengine/Texture.h b/media/libstagefright/renderfright/include/renderengine/Texture.h
new file mode 100644
index 0000000..c69ace0
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/Texture.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDER_ENGINE_TEXTURE_H
+#define SF_RENDER_ENGINE_TEXTURE_H
+
+#include <stdint.h>
+
+#include <math/mat4.h>
+
+namespace android {
+namespace renderengine {
+
+class Texture {
+public:
+    enum Target { TEXTURE_2D = 0x0DE1, TEXTURE_EXTERNAL = 0x8D65 };
+
+    Texture();
+    Texture(Target textureTarget, uint32_t textureName);
+    ~Texture();
+
+    void init(Target textureTarget, uint32_t textureName);
+
+    void setMatrix(float const* matrix);
+    void setFiltering(bool enabled);
+    void setDimensions(size_t width, size_t height);
+
+    uint32_t getTextureName() const;
+    uint32_t getTextureTarget() const;
+
+    const mat4& getMatrix() const;
+    bool getFiltering() const;
+    size_t getWidth() const;
+    size_t getHeight() const;
+
+private:
+    uint32_t mTextureName;
+    uint32_t mTextureTarget;
+    size_t mWidth;
+    size_t mHeight;
+    bool mFiltering;
+    mat4 mTextureMatrix;
+};
+
+} // namespace renderengine
+} // namespace android
+#endif /* SF_RENDER_ENGINE_TEXTURE_H */
diff --git a/media/libstagefright/renderfright/include/renderengine/mock/Framebuffer.h b/media/libstagefright/renderfright/include/renderengine/mock/Framebuffer.h
new file mode 100644
index 0000000..dfb6a4e
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/mock/Framebuffer.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gmock/gmock.h>
+#include <renderengine/Framebuffer.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+class Framebuffer : public renderengine::Framebuffer {
+public:
+    Framebuffer();
+    ~Framebuffer() override;
+
+    MOCK_METHOD3(setNativeWindowBuffer, bool(ANativeWindowBuffer*, bool, const bool));
+};
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/mock/Image.h b/media/libstagefright/renderfright/include/renderengine/mock/Image.h
new file mode 100644
index 0000000..2b0eed1
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/mock/Image.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gmock/gmock.h>
+#include <renderengine/Image.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+class Image : public renderengine::Image {
+public:
+    Image();
+    ~Image() override;
+
+    MOCK_METHOD2(setNativeWindowBuffer, bool(ANativeWindowBuffer* buffer, bool isProtected));
+};
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/mock/RenderEngine.h b/media/libstagefright/renderfright/include/renderengine/mock/RenderEngine.h
new file mode 100644
index 0000000..e03dd58
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/mock/RenderEngine.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gmock/gmock.h>
+#include <renderengine/DisplaySettings.h>
+#include <renderengine/LayerSettings.h>
+#include <renderengine/Mesh.h>
+#include <renderengine/RenderEngine.h>
+#include <renderengine/Texture.h>
+#include <ui/Fence.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/Region.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+class RenderEngine : public renderengine::RenderEngine {
+public:
+    RenderEngine();
+    ~RenderEngine() override;
+
+    MOCK_METHOD0(getFramebufferForDrawing, Framebuffer*());
+    MOCK_CONST_METHOD0(primeCache, void());
+    MOCK_METHOD1(dump, void(std::string&));
+    MOCK_CONST_METHOD0(useNativeFenceSync, bool());
+    MOCK_CONST_METHOD0(useWaitSync, bool());
+    MOCK_CONST_METHOD0(isCurrent, bool());
+    MOCK_METHOD2(genTextures, void(size_t, uint32_t*));
+    MOCK_METHOD2(deleteTextures, void(size_t, uint32_t const*));
+    MOCK_METHOD2(bindExternalTextureImage, void(uint32_t, const renderengine::Image&));
+    MOCK_METHOD1(cacheExternalTextureBuffer, void(const sp<GraphicBuffer>&));
+    MOCK_METHOD3(bindExternalTextureBuffer,
+                 status_t(uint32_t, const sp<GraphicBuffer>&, const sp<Fence>&));
+    MOCK_METHOD1(unbindExternalTextureBuffer, void(uint64_t));
+    MOCK_METHOD1(bindFrameBuffer, status_t(renderengine::Framebuffer*));
+    MOCK_METHOD1(unbindFrameBuffer, void(renderengine::Framebuffer*));
+    MOCK_METHOD1(drawMesh, void(const renderengine::Mesh&));
+    MOCK_CONST_METHOD0(getMaxTextureSize, size_t());
+    MOCK_CONST_METHOD0(getMaxViewportDims, size_t());
+    MOCK_CONST_METHOD0(isProtected, bool());
+    MOCK_CONST_METHOD0(supportsProtectedContent, bool());
+    MOCK_METHOD1(useProtectedContext, bool(bool));
+    MOCK_METHOD1(cleanupPostRender, bool(CleanupMode mode));
+    MOCK_METHOD6(drawLayers,
+                 status_t(const DisplaySettings&, const std::vector<const LayerSettings*>&,
+                          const sp<GraphicBuffer>&, const bool, base::unique_fd&&,
+                          base::unique_fd*));
+};
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/private/Description.h b/media/libstagefright/renderfright/include/renderengine/private/Description.h
new file mode 100644
index 0000000..a62161a
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/private/Description.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDER_ENGINE_DESCRIPTION_H_
+#define SF_RENDER_ENGINE_DESCRIPTION_H_
+
+#include <renderengine/Texture.h>
+#include <ui/GraphicTypes.h>
+
+namespace android {
+namespace renderengine {
+
+/*
+ * This is the structure that holds the state of the rendering engine.
+ * This class is used to generate a corresponding GLSL program and set the
+ * appropriate uniform.
+ */
+struct Description {
+    enum class TransferFunction : int {
+        LINEAR,
+        SRGB,
+        ST2084,
+        HLG, // Hybrid Log-Gamma for HDR.
+    };
+
+    static TransferFunction dataSpaceToTransferFunction(ui::Dataspace dataSpace);
+
+    Description() = default;
+    ~Description() = default;
+
+    bool hasInputTransformMatrix() const;
+    bool hasOutputTransformMatrix() const;
+    bool hasColorMatrix() const;
+
+    // whether textures are premultiplied
+    bool isPremultipliedAlpha = false;
+    // whether this layer is marked as opaque
+    bool isOpaque = true;
+
+    // corner radius of the layer
+    float cornerRadius = 0;
+
+    // Size of the rounded rectangle we are cropping to
+    half2 cropSize;
+
+    // Texture this layer uses
+    Texture texture;
+    bool textureEnabled = false;
+
+    // color used when texturing is disabled or when setting alpha.
+    half4 color;
+
+    // true if the sampled pixel values are in Y410/BT2020 rather than RGBA
+    bool isY410BT2020 = false;
+
+    // transfer functions for the input/output
+    TransferFunction inputTransferFunction = TransferFunction::LINEAR;
+    TransferFunction outputTransferFunction = TransferFunction::LINEAR;
+
+    float displayMaxLuminance;
+    float maxMasteringLuminance;
+    float maxContentLuminance;
+
+    // projection matrix
+    mat4 projectionMatrix;
+
+    // The color matrix will be applied in linear space right before OETF.
+    mat4 colorMatrix;
+    mat4 inputTransformMatrix;
+    mat4 outputTransformMatrix;
+
+    // True if this layer will draw a shadow.
+    bool drawShadows = false;
+};
+
+} // namespace renderengine
+} // namespace android
+
+#endif /* SF_RENDER_ENGINE_DESCRIPTION_H_ */
diff --git a/media/libstagefright/renderfright/mock/Framebuffer.cpp b/media/libstagefright/renderfright/mock/Framebuffer.cpp
new file mode 100644
index 0000000..fbdcaab
--- /dev/null
+++ b/media/libstagefright/renderfright/mock/Framebuffer.cpp
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/mock/Framebuffer.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+// The Google Mock documentation recommends explicit non-header instantiations
+// for better compile time performance.
+Framebuffer::Framebuffer() = default;
+Framebuffer::~Framebuffer() = default;
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/mock/Image.cpp b/media/libstagefright/renderfright/mock/Image.cpp
new file mode 100644
index 0000000..57f4346
--- /dev/null
+++ b/media/libstagefright/renderfright/mock/Image.cpp
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/mock/Image.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+// The Google Mock documentation recommends explicit non-header instantiations
+// for better compile time performance.
+Image::Image() = default;
+Image::~Image() = default;
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/mock/RenderEngine.cpp b/media/libstagefright/renderfright/mock/RenderEngine.cpp
new file mode 100644
index 0000000..261636d
--- /dev/null
+++ b/media/libstagefright/renderfright/mock/RenderEngine.cpp
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/mock/RenderEngine.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+// The Google Mock documentation recommends explicit non-header instantiations
+// for better compile time performance.
+RenderEngine::RenderEngine() = default;
+RenderEngine::~RenderEngine() = default;
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/tests/Android.bp b/media/libstagefright/renderfright/tests/Android.bp
new file mode 100644
index 0000000..9fee646
--- /dev/null
+++ b/media/libstagefright/renderfright/tests/Android.bp
@@ -0,0 +1,41 @@
+// Copyright 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+cc_test {
+    name: "librenderfright_test",
+    defaults: ["surfaceflinger_defaults"],
+    test_suites: ["device-tests"],
+    srcs: [
+        "RenderEngineTest.cpp",
+        "RenderEngineThreadedTest.cpp",
+    ],
+    static_libs: [
+        "libgmock",
+        "librenderfright",
+        "librenderfright_mocks",
+    ],
+    shared_libs: [
+        "libbase",
+        "libcutils",
+        "libEGL",
+        "libGLESv2",
+        "libgui",
+        "liblog",
+        "libnativewindow",
+        "libprocessgroup",
+        "libsync",
+        "libui",
+        "libutils",
+    ],
+}
diff --git a/media/libstagefright/renderfright/tests/RenderEngineTest.cpp b/media/libstagefright/renderfright/tests/RenderEngineTest.cpp
new file mode 100644
index 0000000..730f606
--- /dev/null
+++ b/media/libstagefright/renderfright/tests/RenderEngineTest.cpp
@@ -0,0 +1,1469 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// TODO(b/129481165): remove the #pragma below and fix conversion issues
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wconversion"
+
+#include <chrono>
+#include <condition_variable>
+#include <fstream>
+
+#include <cutils/properties.h>
+#include <gtest/gtest.h>
+#include <renderengine/RenderEngine.h>
+#include <sync/sync.h>
+#include <ui/PixelFormat.h>
+#include "../gl/GLESRenderEngine.h"
+#include "../threaded/RenderEngineThreaded.h"
+
+constexpr int DEFAULT_DISPLAY_WIDTH = 128;
+constexpr int DEFAULT_DISPLAY_HEIGHT = 256;
+constexpr int DEFAULT_DISPLAY_OFFSET = 64;
+constexpr bool WRITE_BUFFER_TO_FILE_ON_FAILURE = false;
+
+namespace android {
+
+struct RenderEngineTest : public ::testing::Test {
+    static void SetUpTestSuite() {
+        sRE = renderengine::gl::GLESRenderEngine::create(
+                renderengine::RenderEngineCreationArgs::Builder()
+                        .setPixelFormat(static_cast<int>(ui::PixelFormat::RGBA_8888))
+                        .setImageCacheSize(1)
+                        .setUseColorManagerment(false)
+                        .setEnableProtectedContext(false)
+                        .setPrecacheToneMapperShaderOnly(false)
+                        .setSupportsBackgroundBlur(true)
+                        .setContextPriority(renderengine::RenderEngine::ContextPriority::MEDIUM)
+                        .setRenderEngineType(renderengine::RenderEngine::RenderEngineType::GLES)
+                        .build());
+    }
+
+    static void TearDownTestSuite() {
+        // The ordering here is important - sCurrentBuffer must live longer
+        // than RenderEngine to avoid a null reference on tear-down.
+        sRE = nullptr;
+        sCurrentBuffer = nullptr;
+    }
+
+    static sp<GraphicBuffer> allocateDefaultBuffer() {
+        return new GraphicBuffer(DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT,
+                                 HAL_PIXEL_FORMAT_RGBA_8888, 1,
+                                 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
+                                         GRALLOC_USAGE_HW_RENDER,
+                                 "output");
+    }
+
+    // Allocates a 1x1 buffer to fill with a solid color
+    static sp<GraphicBuffer> allocateSourceBuffer(uint32_t width, uint32_t height) {
+        return new GraphicBuffer(width, height, HAL_PIXEL_FORMAT_RGBA_8888, 1,
+                                 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
+                                         GRALLOC_USAGE_HW_TEXTURE,
+                                 "input");
+    }
+
+    RenderEngineTest() { mBuffer = allocateDefaultBuffer(); }
+
+    ~RenderEngineTest() {
+        if (WRITE_BUFFER_TO_FILE_ON_FAILURE && ::testing::Test::HasFailure()) {
+            writeBufferToFile("/data/texture_out_");
+        }
+        for (uint32_t texName : mTexNames) {
+            sRE->deleteTextures(1, &texName);
+            EXPECT_FALSE(sRE->isTextureNameKnownForTesting(texName));
+        }
+    }
+
+    void writeBufferToFile(const char* basename) {
+        std::string filename(basename);
+        filename.append(::testing::UnitTest::GetInstance()->current_test_info()->name());
+        filename.append(".ppm");
+        std::ofstream file(filename.c_str(), std::ios::binary);
+        if (!file.is_open()) {
+            ALOGE("Unable to open file: %s", filename.c_str());
+            ALOGE("You may need to do: \"adb shell setenforce 0\" to enable "
+                  "surfaceflinger to write debug images");
+            return;
+        }
+
+        uint8_t* pixels;
+        mBuffer->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+                      reinterpret_cast<void**>(&pixels));
+
+        file << "P6\n";
+        file << mBuffer->getWidth() << "\n";
+        file << mBuffer->getHeight() << "\n";
+        file << 255 << "\n";
+
+        std::vector<uint8_t> outBuffer(mBuffer->getWidth() * mBuffer->getHeight() * 3);
+        auto outPtr = reinterpret_cast<uint8_t*>(outBuffer.data());
+
+        for (int32_t j = 0; j < mBuffer->getHeight(); j++) {
+            const uint8_t* src = pixels + (mBuffer->getStride() * j) * 4;
+            for (int32_t i = 0; i < mBuffer->getWidth(); i++) {
+                // Only copy R, G and B components
+                outPtr[0] = src[0];
+                outPtr[1] = src[1];
+                outPtr[2] = src[2];
+                outPtr += 3;
+
+                src += 4;
+            }
+        }
+        file.write(reinterpret_cast<char*>(outBuffer.data()), outBuffer.size());
+        mBuffer->unlock();
+    }
+
+    void expectBufferColor(const Region& region, uint8_t r, uint8_t g, uint8_t b, uint8_t a) {
+        size_t c;
+        Rect const* rect = region.getArray(&c);
+        for (size_t i = 0; i < c; i++, rect++) {
+            expectBufferColor(*rect, r, g, b, a);
+        }
+    }
+
+    void expectBufferColor(const Rect& rect, uint8_t r, uint8_t g, uint8_t b, uint8_t a,
+                           uint8_t tolerance = 0) {
+        auto colorCompare = [tolerance](const uint8_t* colorA, const uint8_t* colorB) {
+            auto colorBitCompare = [tolerance](uint8_t a, uint8_t b) {
+                uint8_t tmp = a >= b ? a - b : b - a;
+                return tmp <= tolerance;
+            };
+            return std::equal(colorA, colorA + 4, colorB, colorBitCompare);
+        };
+
+        expectBufferColor(rect, r, g, b, a, colorCompare);
+    }
+
+    void expectBufferColor(const Rect& region, uint8_t r, uint8_t g, uint8_t b, uint8_t a,
+                           std::function<bool(const uint8_t* a, const uint8_t* b)> colorCompare) {
+        uint8_t* pixels;
+        mBuffer->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+                      reinterpret_cast<void**>(&pixels));
+        int32_t maxFails = 10;
+        int32_t fails = 0;
+        for (int32_t j = 0; j < region.getHeight(); j++) {
+            const uint8_t* src =
+                    pixels + (mBuffer->getStride() * (region.top + j) + region.left) * 4;
+            for (int32_t i = 0; i < region.getWidth(); i++) {
+                const uint8_t expected[4] = {r, g, b, a};
+                bool equal = colorCompare(src, expected);
+                EXPECT_TRUE(equal)
+                        << "pixel @ (" << region.left + i << ", " << region.top + j << "): "
+                        << "expected (" << static_cast<uint32_t>(r) << ", "
+                        << static_cast<uint32_t>(g) << ", " << static_cast<uint32_t>(b) << ", "
+                        << static_cast<uint32_t>(a) << "), "
+                        << "got (" << static_cast<uint32_t>(src[0]) << ", "
+                        << static_cast<uint32_t>(src[1]) << ", " << static_cast<uint32_t>(src[2])
+                        << ", " << static_cast<uint32_t>(src[3]) << ")";
+                src += 4;
+                if (!equal && ++fails >= maxFails) {
+                    break;
+                }
+            }
+            if (fails >= maxFails) {
+                break;
+            }
+        }
+        mBuffer->unlock();
+    }
+
+    void expectAlpha(const Rect& rect, uint8_t a) {
+        auto colorCompare = [](const uint8_t* colorA, const uint8_t* colorB) {
+            return colorA[3] == colorB[3];
+        };
+        expectBufferColor(rect, 0.0f /* r */, 0.0f /*g */, 0.0f /* b */, a, colorCompare);
+    }
+
+    void expectShadowColor(const renderengine::LayerSettings& castingLayer,
+                           const renderengine::ShadowSettings& shadow, const ubyte4& casterColor,
+                           const ubyte4& backgroundColor) {
+        const Rect casterRect(castingLayer.geometry.boundaries);
+        Region casterRegion = Region(casterRect);
+        const float casterCornerRadius = castingLayer.geometry.roundedCornersRadius;
+        if (casterCornerRadius > 0.0f) {
+            // ignore the corners if a corner radius is set
+            Rect cornerRect(casterCornerRadius, casterCornerRadius);
+            casterRegion.subtractSelf(cornerRect.offsetTo(casterRect.left, casterRect.top));
+            casterRegion.subtractSelf(
+                    cornerRect.offsetTo(casterRect.right - casterCornerRadius, casterRect.top));
+            casterRegion.subtractSelf(
+                    cornerRect.offsetTo(casterRect.left, casterRect.bottom - casterCornerRadius));
+            casterRegion.subtractSelf(cornerRect.offsetTo(casterRect.right - casterCornerRadius,
+                                                          casterRect.bottom - casterCornerRadius));
+        }
+
+        const float shadowInset = shadow.length * -1.0f;
+        const Rect casterWithShadow =
+                Rect(casterRect).inset(shadowInset, shadowInset, shadowInset, shadowInset);
+        const Region shadowRegion = Region(casterWithShadow).subtractSelf(casterRect);
+        const Region backgroundRegion = Region(fullscreenRect()).subtractSelf(casterWithShadow);
+
+        // verify casting layer
+        expectBufferColor(casterRegion, casterColor.r, casterColor.g, casterColor.b, casterColor.a);
+
+        // verify shadows by testing just the alpha since its difficult to validate the shadow color
+        size_t c;
+        Rect const* r = shadowRegion.getArray(&c);
+        for (size_t i = 0; i < c; i++, r++) {
+            expectAlpha(*r, 255);
+        }
+
+        // verify background
+        expectBufferColor(backgroundRegion, backgroundColor.r, backgroundColor.g, backgroundColor.b,
+                          backgroundColor.a);
+    }
+
+    static renderengine::ShadowSettings getShadowSettings(const vec2& casterPos, float shadowLength,
+                                                          bool casterIsTranslucent) {
+        renderengine::ShadowSettings shadow;
+        shadow.ambientColor = {0.0f, 0.0f, 0.0f, 0.039f};
+        shadow.spotColor = {0.0f, 0.0f, 0.0f, 0.19f};
+        shadow.lightPos = vec3(casterPos.x, casterPos.y, 0);
+        shadow.lightRadius = 0.0f;
+        shadow.length = shadowLength;
+        shadow.casterIsTranslucent = casterIsTranslucent;
+        return shadow;
+    }
+
+    static Rect fullscreenRect() { return Rect(DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT); }
+
+    static Rect offsetRect() {
+        return Rect(DEFAULT_DISPLAY_OFFSET, DEFAULT_DISPLAY_OFFSET, DEFAULT_DISPLAY_WIDTH,
+                    DEFAULT_DISPLAY_HEIGHT);
+    }
+
+    static Rect offsetRectAtZero() {
+        return Rect(DEFAULT_DISPLAY_WIDTH - DEFAULT_DISPLAY_OFFSET,
+                    DEFAULT_DISPLAY_HEIGHT - DEFAULT_DISPLAY_OFFSET);
+    }
+
+    void invokeDraw(renderengine::DisplaySettings settings,
+                    std::vector<const renderengine::LayerSettings*> layers,
+                    sp<GraphicBuffer> buffer) {
+        base::unique_fd fence;
+        status_t status =
+                sRE->drawLayers(settings, layers, buffer, true, base::unique_fd(), &fence);
+        sCurrentBuffer = buffer;
+
+        int fd = fence.release();
+        if (fd >= 0) {
+            sync_wait(fd, -1);
+            close(fd);
+        }
+
+        ASSERT_EQ(NO_ERROR, status);
+        if (layers.size() > 0) {
+            ASSERT_TRUE(sRE->isFramebufferImageCachedForTesting(buffer->getId()));
+        }
+    }
+
+    void drawEmptyLayers() {
+        renderengine::DisplaySettings settings;
+        std::vector<const renderengine::LayerSettings*> layers;
+        // Meaningless buffer since we don't do any drawing
+        sp<GraphicBuffer> buffer = new GraphicBuffer();
+        invokeDraw(settings, layers, buffer);
+    }
+
+    template <typename SourceVariant>
+    void fillBuffer(half r, half g, half b, half a);
+
+    template <typename SourceVariant>
+    void fillRedBuffer();
+
+    template <typename SourceVariant>
+    void fillGreenBuffer();
+
+    template <typename SourceVariant>
+    void fillBlueBuffer();
+
+    template <typename SourceVariant>
+    void fillRedTransparentBuffer();
+
+    template <typename SourceVariant>
+    void fillRedOffsetBuffer();
+
+    template <typename SourceVariant>
+    void fillBufferPhysicalOffset();
+
+    template <typename SourceVariant>
+    void fillBufferCheckers(uint32_t rotation);
+
+    template <typename SourceVariant>
+    void fillBufferCheckersRotate0();
+
+    template <typename SourceVariant>
+    void fillBufferCheckersRotate90();
+
+    template <typename SourceVariant>
+    void fillBufferCheckersRotate180();
+
+    template <typename SourceVariant>
+    void fillBufferCheckersRotate270();
+
+    template <typename SourceVariant>
+    void fillBufferWithLayerTransform();
+
+    template <typename SourceVariant>
+    void fillBufferLayerTransform();
+
+    template <typename SourceVariant>
+    void fillBufferWithColorTransform();
+
+    template <typename SourceVariant>
+    void fillBufferColorTransform();
+
+    template <typename SourceVariant>
+    void fillRedBufferWithRoundedCorners();
+
+    template <typename SourceVariant>
+    void fillBufferWithRoundedCorners();
+
+    template <typename SourceVariant>
+    void fillBufferAndBlurBackground();
+
+    template <typename SourceVariant>
+    void overlayCorners();
+
+    void fillRedBufferTextureTransform();
+
+    void fillBufferTextureTransform();
+
+    void fillRedBufferWithPremultiplyAlpha();
+
+    void fillBufferWithPremultiplyAlpha();
+
+    void fillRedBufferWithoutPremultiplyAlpha();
+
+    void fillBufferWithoutPremultiplyAlpha();
+
+    void fillGreenColorBufferThenClearRegion();
+
+    void clearLeftRegion();
+
+    void clearRegion();
+
+    template <typename SourceVariant>
+    void drawShadow(const renderengine::LayerSettings& castingLayer,
+                    const renderengine::ShadowSettings& shadow, const ubyte4& casterColor,
+                    const ubyte4& backgroundColor);
+
+    // Keep around the same renderengine object to save on initialization time.
+    // For now, exercise the GL backend directly so that some caching specifics
+    // can be tested without changing the interface.
+    static std::unique_ptr<renderengine::gl::GLESRenderEngine> sRE;
+    // Hack to avoid NPE in the EGL driver: the GraphicBuffer needs to
+    // be freed *after* RenderEngine is destroyed, so that the EGL image is
+    // destroyed first.
+    static sp<GraphicBuffer> sCurrentBuffer;
+
+    sp<GraphicBuffer> mBuffer;
+
+    std::vector<uint32_t> mTexNames;
+};
+
+std::unique_ptr<renderengine::gl::GLESRenderEngine> RenderEngineTest::sRE = nullptr;
+sp<GraphicBuffer> RenderEngineTest::sCurrentBuffer = nullptr;
+
+struct ColorSourceVariant {
+    static void fillColor(renderengine::LayerSettings& layer, half r, half g, half b,
+                          RenderEngineTest* /*fixture*/) {
+        layer.source.solidColor = half3(r, g, b);
+    }
+};
+
+struct RelaxOpaqueBufferVariant {
+    static void setOpaqueBit(renderengine::LayerSettings& layer) {
+        layer.source.buffer.isOpaque = false;
+    }
+
+    static uint8_t getAlphaChannel() { return 255; }
+};
+
+struct ForceOpaqueBufferVariant {
+    static void setOpaqueBit(renderengine::LayerSettings& layer) {
+        layer.source.buffer.isOpaque = true;
+    }
+
+    static uint8_t getAlphaChannel() {
+        // The isOpaque bit will override the alpha channel, so this should be
+        // arbitrary.
+        return 10;
+    }
+};
+
+template <typename OpaquenessVariant>
+struct BufferSourceVariant {
+    static void fillColor(renderengine::LayerSettings& layer, half r, half g, half b,
+                          RenderEngineTest* fixture) {
+        sp<GraphicBuffer> buf = RenderEngineTest::allocateSourceBuffer(1, 1);
+        uint32_t texName;
+        fixture->sRE->genTextures(1, &texName);
+        fixture->mTexNames.push_back(texName);
+
+        uint8_t* pixels;
+        buf->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+                  reinterpret_cast<void**>(&pixels));
+
+        for (int32_t j = 0; j < buf->getHeight(); j++) {
+            uint8_t* iter = pixels + (buf->getStride() * j) * 4;
+            for (int32_t i = 0; i < buf->getWidth(); i++) {
+                iter[0] = uint8_t(r * 255);
+                iter[1] = uint8_t(g * 255);
+                iter[2] = uint8_t(b * 255);
+                iter[3] = OpaquenessVariant::getAlphaChannel();
+                iter += 4;
+            }
+        }
+
+        buf->unlock();
+
+        layer.source.buffer.buffer = buf;
+        layer.source.buffer.textureName = texName;
+        OpaquenessVariant::setOpaqueBit(layer);
+    }
+};
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBuffer(half r, half g, half b, half a) {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = fullscreenRect().toFloatRect();
+    SourceVariant::fillColor(layer, r, g, b, this);
+    layer.alpha = a;
+
+    layers.push_back(&layer);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillRedBuffer() {
+    fillBuffer<SourceVariant>(1.0f, 0.0f, 0.0f, 1.0f);
+    expectBufferColor(fullscreenRect(), 255, 0, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillGreenBuffer() {
+    fillBuffer<SourceVariant>(0.0f, 1.0f, 0.0f, 1.0f);
+    expectBufferColor(fullscreenRect(), 0, 255, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBlueBuffer() {
+    fillBuffer<SourceVariant>(0.0f, 0.0f, 1.0f, 1.0f);
+    expectBufferColor(fullscreenRect(), 0, 0, 255, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillRedTransparentBuffer() {
+    fillBuffer<SourceVariant>(1.0f, 0.0f, 0.0f, .2f);
+    expectBufferColor(fullscreenRect(), 51, 0, 0, 51);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillRedOffsetBuffer() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = offsetRect();
+    settings.clip = offsetRectAtZero();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = offsetRectAtZero().toFloatRect();
+    SourceVariant::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+    layer.alpha = 1.0f;
+
+    layers.push_back(&layer);
+    invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferPhysicalOffset() {
+    fillRedOffsetBuffer<SourceVariant>();
+
+    expectBufferColor(Rect(DEFAULT_DISPLAY_OFFSET, DEFAULT_DISPLAY_OFFSET, DEFAULT_DISPLAY_WIDTH,
+                           DEFAULT_DISPLAY_HEIGHT),
+                      255, 0, 0, 255);
+    Rect offsetRegionLeft(DEFAULT_DISPLAY_OFFSET, DEFAULT_DISPLAY_HEIGHT);
+    Rect offsetRegionTop(DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_OFFSET);
+
+    expectBufferColor(offsetRegionLeft, 0, 0, 0, 0);
+    expectBufferColor(offsetRegionTop, 0, 0, 0, 0);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferCheckers(uint32_t orientationFlag) {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    // Here logical space is 2x2
+    settings.clip = Rect(2, 2);
+    settings.orientation = orientationFlag;
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layerOne;
+    Rect rectOne(0, 0, 1, 1);
+    layerOne.geometry.boundaries = rectOne.toFloatRect();
+    SourceVariant::fillColor(layerOne, 1.0f, 0.0f, 0.0f, this);
+    layerOne.alpha = 1.0f;
+
+    renderengine::LayerSettings layerTwo;
+    Rect rectTwo(0, 1, 1, 2);
+    layerTwo.geometry.boundaries = rectTwo.toFloatRect();
+    SourceVariant::fillColor(layerTwo, 0.0f, 1.0f, 0.0f, this);
+    layerTwo.alpha = 1.0f;
+
+    renderengine::LayerSettings layerThree;
+    Rect rectThree(1, 0, 2, 1);
+    layerThree.geometry.boundaries = rectThree.toFloatRect();
+    SourceVariant::fillColor(layerThree, 0.0f, 0.0f, 1.0f, this);
+    layerThree.alpha = 1.0f;
+
+    layers.push_back(&layerOne);
+    layers.push_back(&layerTwo);
+    layers.push_back(&layerThree);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferCheckersRotate0() {
+    fillBufferCheckers<SourceVariant>(ui::Transform::ROT_0);
+    expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2), 255, 0, 0,
+                      255);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, 0, DEFAULT_DISPLAY_WIDTH,
+                           DEFAULT_DISPLAY_HEIGHT / 2),
+                      0, 0, 255, 255);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2,
+                           DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+                      0, 0, 0, 0);
+    expectBufferColor(Rect(0, DEFAULT_DISPLAY_HEIGHT / 2, DEFAULT_DISPLAY_WIDTH / 2,
+                           DEFAULT_DISPLAY_HEIGHT),
+                      0, 255, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferCheckersRotate90() {
+    fillBufferCheckers<SourceVariant>(ui::Transform::ROT_90);
+    expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2), 0, 255, 0,
+                      255);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, 0, DEFAULT_DISPLAY_WIDTH,
+                           DEFAULT_DISPLAY_HEIGHT / 2),
+                      255, 0, 0, 255);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2,
+                           DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+                      0, 0, 255, 255);
+    expectBufferColor(Rect(0, DEFAULT_DISPLAY_HEIGHT / 2, DEFAULT_DISPLAY_WIDTH / 2,
+                           DEFAULT_DISPLAY_HEIGHT),
+                      0, 0, 0, 0);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferCheckersRotate180() {
+    fillBufferCheckers<SourceVariant>(ui::Transform::ROT_180);
+    expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2), 0, 0, 0,
+                      0);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, 0, DEFAULT_DISPLAY_WIDTH,
+                           DEFAULT_DISPLAY_HEIGHT / 2),
+                      0, 255, 0, 255);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2,
+                           DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+                      255, 0, 0, 255);
+    expectBufferColor(Rect(0, DEFAULT_DISPLAY_HEIGHT / 2, DEFAULT_DISPLAY_WIDTH / 2,
+                           DEFAULT_DISPLAY_HEIGHT),
+                      0, 0, 255, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferCheckersRotate270() {
+    fillBufferCheckers<SourceVariant>(ui::Transform::ROT_270);
+    expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2), 0, 0, 255,
+                      255);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, 0, DEFAULT_DISPLAY_WIDTH,
+                           DEFAULT_DISPLAY_HEIGHT / 2),
+                      0, 0, 0, 0);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2,
+                           DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+                      0, 255, 0, 255);
+    expectBufferColor(Rect(0, DEFAULT_DISPLAY_HEIGHT / 2, DEFAULT_DISPLAY_WIDTH / 2,
+                           DEFAULT_DISPLAY_HEIGHT),
+                      255, 0, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferWithLayerTransform() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    // Here logical space is 2x2
+    settings.clip = Rect(2, 2);
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+    // Translate one pixel diagonally
+    layer.geometry.positionTransform = mat4(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1);
+    SourceVariant::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+    layer.source.solidColor = half3(1.0f, 0.0f, 0.0f);
+    layer.alpha = 1.0f;
+
+    layers.push_back(&layer);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferLayerTransform() {
+    fillBufferWithLayerTransform<SourceVariant>();
+    expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT / 2), 0, 0, 0, 0);
+    expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT), 0, 0, 0, 0);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2,
+                           DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+                      255, 0, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferWithColorTransform() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = Rect(1, 1);
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+    SourceVariant::fillColor(layer, 0.5f, 0.25f, 0.125f, this);
+    layer.alpha = 1.0f;
+
+    // construct a fake color matrix
+    // annihilate green and blue channels
+    settings.colorTransform = mat4::scale(vec4(1, 0, 0, 1));
+    // set red channel to red + green
+    layer.colorTransform = mat4(1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1);
+
+    layer.alpha = 1.0f;
+    layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+
+    layers.push_back(&layer);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferColorTransform() {
+    fillBufferWithColorTransform<SourceVariant>();
+    expectBufferColor(fullscreenRect(), 191, 0, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillRedBufferWithRoundedCorners() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = fullscreenRect().toFloatRect();
+    layer.geometry.roundedCornersRadius = 5.0f;
+    layer.geometry.roundedCornersCrop = fullscreenRect().toFloatRect();
+    SourceVariant::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+    layer.alpha = 1.0f;
+
+    layers.push_back(&layer);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferWithRoundedCorners() {
+    fillRedBufferWithRoundedCorners<SourceVariant>();
+    // Corners should be ignored...
+    expectBufferColor(Rect(0, 0, 1, 1), 0, 0, 0, 0);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH - 1, 0, DEFAULT_DISPLAY_WIDTH, 1), 0, 0, 0, 0);
+    expectBufferColor(Rect(0, DEFAULT_DISPLAY_HEIGHT - 1, 1, DEFAULT_DISPLAY_HEIGHT), 0, 0, 0, 0);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH - 1, DEFAULT_DISPLAY_HEIGHT - 1,
+                           DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+                      0, 0, 0, 0);
+    // ...And the non-rounded portion should be red.
+    // Other pixels may be anti-aliased, so let's not check those.
+    expectBufferColor(Rect(5, 5, DEFAULT_DISPLAY_WIDTH - 5, DEFAULT_DISPLAY_HEIGHT - 5), 255, 0, 0,
+                      255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferAndBlurBackground() {
+        char value[PROPERTY_VALUE_MAX];
+    property_get("ro.surface_flinger.supports_background_blur", value, "0");
+    if (!atoi(value)) {
+        // This device doesn't support blurs, no-op.
+        return;
+    }
+
+    auto blurRadius = 50;
+    auto center = DEFAULT_DISPLAY_WIDTH / 2;
+
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings backgroundLayer;
+    backgroundLayer.geometry.boundaries = fullscreenRect().toFloatRect();
+    SourceVariant::fillColor(backgroundLayer, 0.0f, 1.0f, 0.0f, this);
+    backgroundLayer.alpha = 1.0f;
+    layers.push_back(&backgroundLayer);
+
+    renderengine::LayerSettings leftLayer;
+    leftLayer.geometry.boundaries =
+            Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT).toFloatRect();
+    SourceVariant::fillColor(leftLayer, 1.0f, 0.0f, 0.0f, this);
+    leftLayer.alpha = 1.0f;
+    layers.push_back(&leftLayer);
+
+    renderengine::LayerSettings blurLayer;
+    blurLayer.geometry.boundaries = fullscreenRect().toFloatRect();
+    blurLayer.backgroundBlurRadius = blurRadius;
+    blurLayer.alpha = 0;
+    layers.push_back(&blurLayer);
+
+    invokeDraw(settings, layers, mBuffer);
+
+    expectBufferColor(Rect(center - 1, center - 5, center, center + 5), 150, 150, 0, 255,
+                      50 /* tolerance */);
+    expectBufferColor(Rect(center, center - 5, center + 1, center + 5), 150, 150, 0, 255,
+                      50 /* tolerance */);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::overlayCorners() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layersFirst;
+
+    renderengine::LayerSettings layerOne;
+    layerOne.geometry.boundaries =
+            FloatRect(0, 0, DEFAULT_DISPLAY_WIDTH / 3.0, DEFAULT_DISPLAY_HEIGHT / 3.0);
+    SourceVariant::fillColor(layerOne, 1.0f, 0.0f, 0.0f, this);
+    layerOne.alpha = 0.2;
+
+    layersFirst.push_back(&layerOne);
+    invokeDraw(settings, layersFirst, mBuffer);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 3, DEFAULT_DISPLAY_HEIGHT / 3), 51, 0, 0, 51);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 3 + 1, DEFAULT_DISPLAY_HEIGHT / 3 + 1,
+                           DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+                      0, 0, 0, 0);
+
+    std::vector<const renderengine::LayerSettings*> layersSecond;
+    renderengine::LayerSettings layerTwo;
+    layerTwo.geometry.boundaries =
+            FloatRect(DEFAULT_DISPLAY_WIDTH / 3.0, DEFAULT_DISPLAY_HEIGHT / 3.0,
+                      DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT);
+    SourceVariant::fillColor(layerTwo, 0.0f, 1.0f, 0.0f, this);
+    layerTwo.alpha = 1.0f;
+
+    layersSecond.push_back(&layerTwo);
+    invokeDraw(settings, layersSecond, mBuffer);
+
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 3, DEFAULT_DISPLAY_HEIGHT / 3), 0, 0, 0, 0);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 3 + 1, DEFAULT_DISPLAY_HEIGHT / 3 + 1,
+                           DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+                      0, 255, 0, 255);
+}
+
+void RenderEngineTest::fillRedBufferTextureTransform() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = Rect(1, 1);
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    // Here will allocate a checker board texture, but transform texture
+    // coordinates so that only the upper left is applied.
+    sp<GraphicBuffer> buf = allocateSourceBuffer(2, 2);
+    uint32_t texName;
+    RenderEngineTest::sRE->genTextures(1, &texName);
+    this->mTexNames.push_back(texName);
+
+    uint8_t* pixels;
+    buf->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+              reinterpret_cast<void**>(&pixels));
+    // Red top left, Green top right, Blue bottom left, Black bottom right
+    pixels[0] = 255;
+    pixels[1] = 0;
+    pixels[2] = 0;
+    pixels[3] = 255;
+    pixels[4] = 0;
+    pixels[5] = 255;
+    pixels[6] = 0;
+    pixels[7] = 255;
+    pixels[8] = 0;
+    pixels[9] = 0;
+    pixels[10] = 255;
+    pixels[11] = 255;
+    buf->unlock();
+
+    layer.source.buffer.buffer = buf;
+    layer.source.buffer.textureName = texName;
+    // Transform coordinates to only be inside the red quadrant.
+    layer.source.buffer.textureTransform = mat4::scale(vec4(0.2, 0.2, 1, 1));
+    layer.alpha = 1.0f;
+    layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+
+    layers.push_back(&layer);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+void RenderEngineTest::fillBufferTextureTransform() {
+    fillRedBufferTextureTransform();
+    expectBufferColor(fullscreenRect(), 255, 0, 0, 255);
+}
+
+void RenderEngineTest::fillRedBufferWithPremultiplyAlpha() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    // Here logical space is 1x1
+    settings.clip = Rect(1, 1);
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    sp<GraphicBuffer> buf = allocateSourceBuffer(1, 1);
+    uint32_t texName;
+    RenderEngineTest::sRE->genTextures(1, &texName);
+    this->mTexNames.push_back(texName);
+
+    uint8_t* pixels;
+    buf->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+              reinterpret_cast<void**>(&pixels));
+    pixels[0] = 255;
+    pixels[1] = 0;
+    pixels[2] = 0;
+    pixels[3] = 255;
+    buf->unlock();
+
+    layer.source.buffer.buffer = buf;
+    layer.source.buffer.textureName = texName;
+    layer.source.buffer.usePremultipliedAlpha = true;
+    layer.alpha = 0.5f;
+    layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+
+    layers.push_back(&layer);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+void RenderEngineTest::fillBufferWithPremultiplyAlpha() {
+    fillRedBufferWithPremultiplyAlpha();
+    expectBufferColor(fullscreenRect(), 128, 0, 0, 128);
+}
+
+void RenderEngineTest::fillRedBufferWithoutPremultiplyAlpha() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    // Here logical space is 1x1
+    settings.clip = Rect(1, 1);
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    sp<GraphicBuffer> buf = allocateSourceBuffer(1, 1);
+    uint32_t texName;
+    RenderEngineTest::sRE->genTextures(1, &texName);
+    this->mTexNames.push_back(texName);
+
+    uint8_t* pixels;
+    buf->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+              reinterpret_cast<void**>(&pixels));
+    pixels[0] = 255;
+    pixels[1] = 0;
+    pixels[2] = 0;
+    pixels[3] = 255;
+    buf->unlock();
+
+    layer.source.buffer.buffer = buf;
+    layer.source.buffer.textureName = texName;
+    layer.source.buffer.usePremultipliedAlpha = false;
+    layer.alpha = 0.5f;
+    layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+
+    layers.push_back(&layer);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+void RenderEngineTest::fillBufferWithoutPremultiplyAlpha() {
+    fillRedBufferWithoutPremultiplyAlpha();
+    expectBufferColor(fullscreenRect(), 128, 0, 0, 64, 1);
+}
+
+void RenderEngineTest::clearLeftRegion() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    // Here logical space is 4x4
+    settings.clip = Rect(4, 4);
+    settings.clearRegion = Region(Rect(2, 4));
+    std::vector<const renderengine::LayerSettings*> layers;
+    // fake layer, without bounds should not render anything
+    renderengine::LayerSettings layer;
+    layers.push_back(&layer);
+    invokeDraw(settings, layers, mBuffer);
+}
+
+void RenderEngineTest::clearRegion() {
+    // Reuse mBuffer
+    clearLeftRegion();
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT), 0, 0, 0, 255);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, 0, DEFAULT_DISPLAY_WIDTH,
+                           DEFAULT_DISPLAY_HEIGHT),
+                      0, 0, 0, 0);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::drawShadow(const renderengine::LayerSettings& castingLayer,
+                                  const renderengine::ShadowSettings& shadow,
+                                  const ubyte4& casterColor, const ubyte4& backgroundColor) {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    // add background layer
+    renderengine::LayerSettings bgLayer;
+    bgLayer.geometry.boundaries = fullscreenRect().toFloatRect();
+    ColorSourceVariant::fillColor(bgLayer, backgroundColor.r / 255.0f, backgroundColor.g / 255.0f,
+                                  backgroundColor.b / 255.0f, this);
+    bgLayer.alpha = backgroundColor.a / 255.0f;
+    layers.push_back(&bgLayer);
+
+    // add shadow layer
+    renderengine::LayerSettings shadowLayer;
+    shadowLayer.geometry.boundaries = castingLayer.geometry.boundaries;
+    shadowLayer.alpha = castingLayer.alpha;
+    shadowLayer.shadow = shadow;
+    layers.push_back(&shadowLayer);
+
+    // add layer casting the shadow
+    renderengine::LayerSettings layer = castingLayer;
+    SourceVariant::fillColor(layer, casterColor.r / 255.0f, casterColor.g / 255.0f,
+                             casterColor.b / 255.0f, this);
+    layers.push_back(&layer);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+TEST_F(RenderEngineTest, drawLayers_noLayersToDraw) {
+    drawEmptyLayers();
+}
+
+TEST_F(RenderEngineTest, drawLayers_nullOutputBuffer) {
+    renderengine::DisplaySettings settings;
+    std::vector<const renderengine::LayerSettings*> layers;
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = fullscreenRect().toFloatRect();
+    BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+    layers.push_back(&layer);
+    base::unique_fd fence;
+    status_t status = sRE->drawLayers(settings, layers, nullptr, true, base::unique_fd(), &fence);
+
+    ASSERT_EQ(BAD_VALUE, status);
+}
+
+TEST_F(RenderEngineTest, drawLayers_nullOutputFence) {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = fullscreenRect().toFloatRect();
+    BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+    layer.alpha = 1.0;
+    layers.push_back(&layer);
+
+    status_t status = sRE->drawLayers(settings, layers, mBuffer, true, base::unique_fd(), nullptr);
+    sCurrentBuffer = mBuffer;
+    ASSERT_EQ(NO_ERROR, status);
+    expectBufferColor(fullscreenRect(), 255, 0, 0, 255);
+}
+
+TEST_F(RenderEngineTest, drawLayers_doesNotCacheFramebuffer) {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = fullscreenRect().toFloatRect();
+    BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+    layer.alpha = 1.0;
+    layers.push_back(&layer);
+
+    status_t status = sRE->drawLayers(settings, layers, mBuffer, false, base::unique_fd(), nullptr);
+    sCurrentBuffer = mBuffer;
+    ASSERT_EQ(NO_ERROR, status);
+    ASSERT_FALSE(sRE->isFramebufferImageCachedForTesting(mBuffer->getId()));
+    expectBufferColor(fullscreenRect(), 255, 0, 0, 255);
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedBuffer_colorSource) {
+    fillRedBuffer<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillGreenBuffer_colorSource) {
+    fillGreenBuffer<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBlueBuffer_colorSource) {
+    fillBlueBuffer<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedTransparentBuffer_colorSource) {
+    fillRedTransparentBuffer<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferPhysicalOffset_colorSource) {
+    fillBufferPhysicalOffset<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate0_colorSource) {
+    fillBufferCheckersRotate0<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate90_colorSource) {
+    fillBufferCheckersRotate90<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate180_colorSource) {
+    fillBufferCheckersRotate180<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate270_colorSource) {
+    fillBufferCheckersRotate270<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferLayerTransform_colorSource) {
+    fillBufferLayerTransform<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferColorTransform_colorSource) {
+    fillBufferLayerTransform<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferRoundedCorners_colorSource) {
+    fillBufferWithRoundedCorners<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferAndBlurBackground_colorSource) {
+    fillBufferAndBlurBackground<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_overlayCorners_colorSource) {
+    overlayCorners<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedBuffer_opaqueBufferSource) {
+    fillRedBuffer<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillGreenBuffer_opaqueBufferSource) {
+    fillGreenBuffer<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBlueBuffer_opaqueBufferSource) {
+    fillBlueBuffer<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedTransparentBuffer_opaqueBufferSource) {
+    fillRedTransparentBuffer<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferPhysicalOffset_opaqueBufferSource) {
+    fillBufferPhysicalOffset<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate0_opaqueBufferSource) {
+    fillBufferCheckersRotate0<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate90_opaqueBufferSource) {
+    fillBufferCheckersRotate90<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate180_opaqueBufferSource) {
+    fillBufferCheckersRotate180<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate270_opaqueBufferSource) {
+    fillBufferCheckersRotate270<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferLayerTransform_opaqueBufferSource) {
+    fillBufferLayerTransform<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferColorTransform_opaqueBufferSource) {
+    fillBufferLayerTransform<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferRoundedCorners_opaqueBufferSource) {
+    fillBufferWithRoundedCorners<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferAndBlurBackground_opaqueBufferSource) {
+    fillBufferAndBlurBackground<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_overlayCorners_opaqueBufferSource) {
+    overlayCorners<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedBuffer_bufferSource) {
+    fillRedBuffer<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillGreenBuffer_bufferSource) {
+    fillGreenBuffer<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBlueBuffer_bufferSource) {
+    fillBlueBuffer<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedTransparentBuffer_bufferSource) {
+    fillRedTransparentBuffer<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferPhysicalOffset_bufferSource) {
+    fillBufferPhysicalOffset<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate0_bufferSource) {
+    fillBufferCheckersRotate0<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate90_bufferSource) {
+    fillBufferCheckersRotate90<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate180_bufferSource) {
+    fillBufferCheckersRotate180<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate270_bufferSource) {
+    fillBufferCheckersRotate270<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferLayerTransform_bufferSource) {
+    fillBufferLayerTransform<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferColorTransform_bufferSource) {
+    fillBufferLayerTransform<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferRoundedCorners_bufferSource) {
+    fillBufferWithRoundedCorners<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferAndBlurBackground_bufferSource) {
+    fillBufferAndBlurBackground<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_overlayCorners_bufferSource) {
+    overlayCorners<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferTextureTransform) {
+    fillBufferTextureTransform();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBuffer_premultipliesAlpha) {
+    fillBufferWithPremultiplyAlpha();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBuffer_withoutPremultiplyingAlpha) {
+    fillBufferWithoutPremultiplyAlpha();
+}
+
+TEST_F(RenderEngineTest, drawLayers_clearRegion) {
+    clearRegion();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillsBufferAndCachesImages) {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = fullscreenRect().toFloatRect();
+    BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+
+    layers.push_back(&layer);
+    invokeDraw(settings, layers, mBuffer);
+    uint64_t bufferId = layer.source.buffer.buffer->getId();
+    EXPECT_TRUE(sRE->isImageCachedForTesting(bufferId));
+    std::shared_ptr<renderengine::gl::ImageManager::Barrier> barrier =
+            sRE->unbindExternalTextureBufferForTesting(bufferId);
+    std::lock_guard<std::mutex> lock(barrier->mutex);
+    ASSERT_TRUE(barrier->condition.wait_for(barrier->mutex, std::chrono::seconds(5),
+                                            [&]() REQUIRES(barrier->mutex) {
+                                                return barrier->isOpen;
+                                            }));
+    EXPECT_FALSE(sRE->isImageCachedForTesting(bufferId));
+    EXPECT_EQ(NO_ERROR, barrier->result);
+}
+
+TEST_F(RenderEngineTest, bindExternalBuffer_withNullBuffer) {
+    status_t result = sRE->bindExternalTextureBuffer(0, nullptr, nullptr);
+    ASSERT_EQ(BAD_VALUE, result);
+}
+
+TEST_F(RenderEngineTest, bindExternalBuffer_cachesImages) {
+    sp<GraphicBuffer> buf = allocateSourceBuffer(1, 1);
+    uint32_t texName;
+    sRE->genTextures(1, &texName);
+    mTexNames.push_back(texName);
+
+    sRE->bindExternalTextureBuffer(texName, buf, nullptr);
+    uint64_t bufferId = buf->getId();
+    EXPECT_TRUE(sRE->isImageCachedForTesting(bufferId));
+    std::shared_ptr<renderengine::gl::ImageManager::Barrier> barrier =
+            sRE->unbindExternalTextureBufferForTesting(bufferId);
+    std::lock_guard<std::mutex> lock(barrier->mutex);
+    ASSERT_TRUE(barrier->condition.wait_for(barrier->mutex, std::chrono::seconds(5),
+                                            [&]() REQUIRES(barrier->mutex) {
+                                                return barrier->isOpen;
+                                            }));
+    EXPECT_EQ(NO_ERROR, barrier->result);
+    EXPECT_FALSE(sRE->isImageCachedForTesting(bufferId));
+}
+
+TEST_F(RenderEngineTest, cacheExternalBuffer_withNullBuffer) {
+    std::shared_ptr<renderengine::gl::ImageManager::Barrier> barrier =
+            sRE->cacheExternalTextureBufferForTesting(nullptr);
+    std::lock_guard<std::mutex> lock(barrier->mutex);
+    ASSERT_TRUE(barrier->condition.wait_for(barrier->mutex, std::chrono::seconds(5),
+                                            [&]() REQUIRES(barrier->mutex) {
+                                                return barrier->isOpen;
+                                            }));
+    EXPECT_TRUE(barrier->isOpen);
+    EXPECT_EQ(BAD_VALUE, barrier->result);
+}
+
+TEST_F(RenderEngineTest, cacheExternalBuffer_cachesImages) {
+    sp<GraphicBuffer> buf = allocateSourceBuffer(1, 1);
+    uint64_t bufferId = buf->getId();
+    std::shared_ptr<renderengine::gl::ImageManager::Barrier> barrier =
+            sRE->cacheExternalTextureBufferForTesting(buf);
+    {
+        std::lock_guard<std::mutex> lock(barrier->mutex);
+        ASSERT_TRUE(barrier->condition.wait_for(barrier->mutex, std::chrono::seconds(5),
+                                                [&]() REQUIRES(barrier->mutex) {
+                                                    return barrier->isOpen;
+                                                }));
+        EXPECT_EQ(NO_ERROR, barrier->result);
+    }
+    EXPECT_TRUE(sRE->isImageCachedForTesting(bufferId));
+    barrier = sRE->unbindExternalTextureBufferForTesting(bufferId);
+    {
+        std::lock_guard<std::mutex> lock(barrier->mutex);
+        ASSERT_TRUE(barrier->condition.wait_for(barrier->mutex, std::chrono::seconds(5),
+                                                [&]() REQUIRES(barrier->mutex) {
+                                                    return barrier->isOpen;
+                                                }));
+        EXPECT_EQ(NO_ERROR, barrier->result);
+    }
+    EXPECT_FALSE(sRE->isImageCachedForTesting(bufferId));
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillShadow_casterLayerMinSize) {
+    const ubyte4 casterColor(255, 0, 0, 255);
+    const ubyte4 backgroundColor(255, 255, 255, 255);
+    const float shadowLength = 5.0f;
+    Rect casterBounds(1, 1);
+    casterBounds.offsetBy(shadowLength + 1, shadowLength + 1);
+    renderengine::LayerSettings castingLayer;
+    castingLayer.geometry.boundaries = casterBounds.toFloatRect();
+    castingLayer.alpha = 1.0f;
+    renderengine::ShadowSettings settings =
+            getShadowSettings(vec2(casterBounds.left, casterBounds.top), shadowLength,
+                              false /* casterIsTranslucent */);
+
+    drawShadow<ColorSourceVariant>(castingLayer, settings, casterColor, backgroundColor);
+    expectShadowColor(castingLayer, settings, casterColor, backgroundColor);
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillShadow_casterColorLayer) {
+    const ubyte4 casterColor(255, 0, 0, 255);
+    const ubyte4 backgroundColor(255, 255, 255, 255);
+    const float shadowLength = 5.0f;
+    Rect casterBounds(DEFAULT_DISPLAY_WIDTH / 3.0f, DEFAULT_DISPLAY_HEIGHT / 3.0f);
+    casterBounds.offsetBy(shadowLength + 1, shadowLength + 1);
+    renderengine::LayerSettings castingLayer;
+    castingLayer.geometry.boundaries = casterBounds.toFloatRect();
+    castingLayer.alpha = 1.0f;
+    renderengine::ShadowSettings settings =
+            getShadowSettings(vec2(casterBounds.left, casterBounds.top), shadowLength,
+                              false /* casterIsTranslucent */);
+
+    drawShadow<ColorSourceVariant>(castingLayer, settings, casterColor, backgroundColor);
+    expectShadowColor(castingLayer, settings, casterColor, backgroundColor);
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillShadow_casterOpaqueBufferLayer) {
+    const ubyte4 casterColor(255, 0, 0, 255);
+    const ubyte4 backgroundColor(255, 255, 255, 255);
+    const float shadowLength = 5.0f;
+    Rect casterBounds(DEFAULT_DISPLAY_WIDTH / 3.0f, DEFAULT_DISPLAY_HEIGHT / 3.0f);
+    casterBounds.offsetBy(shadowLength + 1, shadowLength + 1);
+    renderengine::LayerSettings castingLayer;
+    castingLayer.geometry.boundaries = casterBounds.toFloatRect();
+    castingLayer.alpha = 1.0f;
+    renderengine::ShadowSettings settings =
+            getShadowSettings(vec2(casterBounds.left, casterBounds.top), shadowLength,
+                              false /* casterIsTranslucent */);
+
+    drawShadow<BufferSourceVariant<ForceOpaqueBufferVariant>>(castingLayer, settings, casterColor,
+                                                              backgroundColor);
+    expectShadowColor(castingLayer, settings, casterColor, backgroundColor);
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillShadow_casterWithRoundedCorner) {
+    const ubyte4 casterColor(255, 0, 0, 255);
+    const ubyte4 backgroundColor(255, 255, 255, 255);
+    const float shadowLength = 5.0f;
+    Rect casterBounds(DEFAULT_DISPLAY_WIDTH / 3.0f, DEFAULT_DISPLAY_HEIGHT / 3.0f);
+    casterBounds.offsetBy(shadowLength + 1, shadowLength + 1);
+    renderengine::LayerSettings castingLayer;
+    castingLayer.geometry.boundaries = casterBounds.toFloatRect();
+    castingLayer.geometry.roundedCornersRadius = 3.0f;
+    castingLayer.geometry.roundedCornersCrop = casterBounds.toFloatRect();
+    castingLayer.alpha = 1.0f;
+    renderengine::ShadowSettings settings =
+            getShadowSettings(vec2(casterBounds.left, casterBounds.top), shadowLength,
+                              false /* casterIsTranslucent */);
+
+    drawShadow<BufferSourceVariant<ForceOpaqueBufferVariant>>(castingLayer, settings, casterColor,
+                                                              backgroundColor);
+    expectShadowColor(castingLayer, settings, casterColor, backgroundColor);
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillShadow_translucentCasterWithAlpha) {
+    const ubyte4 casterColor(255, 0, 0, 255);
+    const ubyte4 backgroundColor(255, 255, 255, 255);
+    const float shadowLength = 5.0f;
+    Rect casterBounds(DEFAULT_DISPLAY_WIDTH / 3.0f, DEFAULT_DISPLAY_HEIGHT / 3.0f);
+    casterBounds.offsetBy(shadowLength + 1, shadowLength + 1);
+    renderengine::LayerSettings castingLayer;
+    castingLayer.geometry.boundaries = casterBounds.toFloatRect();
+    castingLayer.alpha = 0.5f;
+    renderengine::ShadowSettings settings =
+            getShadowSettings(vec2(casterBounds.left, casterBounds.top), shadowLength,
+                              true /* casterIsTranslucent */);
+
+    drawShadow<BufferSourceVariant<RelaxOpaqueBufferVariant>>(castingLayer, settings, casterColor,
+                                                              backgroundColor);
+
+    // verify only the background since the shadow will draw behind the caster
+    const float shadowInset = settings.length * -1.0f;
+    const Rect casterWithShadow =
+            Rect(casterBounds).inset(shadowInset, shadowInset, shadowInset, shadowInset);
+    const Region backgroundRegion = Region(fullscreenRect()).subtractSelf(casterWithShadow);
+    expectBufferColor(backgroundRegion, backgroundColor.r, backgroundColor.g, backgroundColor.b,
+                      backgroundColor.a);
+}
+
+TEST_F(RenderEngineTest, cleanupPostRender_cleansUpOnce) {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = fullscreenRect().toFloatRect();
+    BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+    layer.alpha = 1.0;
+    layers.push_back(&layer);
+
+    base::unique_fd fenceOne;
+    sRE->drawLayers(settings, layers, mBuffer, true, base::unique_fd(), &fenceOne);
+    base::unique_fd fenceTwo;
+    sRE->drawLayers(settings, layers, mBuffer, true, std::move(fenceOne), &fenceTwo);
+
+    const int fd = fenceTwo.get();
+    if (fd >= 0) {
+        sync_wait(fd, -1);
+    }
+    // Only cleanup the first time.
+    EXPECT_TRUE(sRE->cleanupPostRender(
+            renderengine::RenderEngine::CleanupMode::CLEAN_OUTPUT_RESOURCES));
+    EXPECT_FALSE(sRE->cleanupPostRender(
+            renderengine::RenderEngine::CleanupMode::CLEAN_OUTPUT_RESOURCES));
+}
+
+TEST_F(RenderEngineTest, cleanupPostRender_whenCleaningAll_replacesTextureMemory) {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = fullscreenRect().toFloatRect();
+    BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+    layer.alpha = 1.0;
+    layers.push_back(&layer);
+
+    base::unique_fd fence;
+    sRE->drawLayers(settings, layers, mBuffer, true, base::unique_fd(), &fence);
+
+    const int fd = fence.get();
+    if (fd >= 0) {
+        sync_wait(fd, -1);
+    }
+
+    uint64_t bufferId = layer.source.buffer.buffer->getId();
+    uint32_t texName = layer.source.buffer.textureName;
+    EXPECT_TRUE(sRE->isImageCachedForTesting(bufferId));
+    EXPECT_EQ(bufferId, sRE->getBufferIdForTextureNameForTesting(texName));
+
+    EXPECT_TRUE(sRE->cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL));
+
+    // Now check that our view of memory is good.
+    EXPECT_FALSE(sRE->isImageCachedForTesting(bufferId));
+    EXPECT_EQ(std::nullopt, sRE->getBufferIdForTextureNameForTesting(bufferId));
+    EXPECT_TRUE(sRE->isTextureNameKnownForTesting(texName));
+}
+
+} // namespace android
+
+// TODO(b/129481165): remove the #pragma below and fix conversion issues
+#pragma clang diagnostic pop // ignored "-Wconversion"
diff --git a/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp b/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp
new file mode 100644
index 0000000..97c7442
--- /dev/null
+++ b/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp
@@ -0,0 +1,216 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cutils/properties.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <renderengine/mock/RenderEngine.h>
+#include "../threaded/RenderEngineThreaded.h"
+
+namespace android {
+
+using testing::_;
+using testing::Eq;
+using testing::Mock;
+using testing::Return;
+
+struct RenderEngineThreadedTest : public ::testing::Test {
+    ~RenderEngineThreadedTest() {}
+
+    void SetUp() override {
+        mThreadedRE = renderengine::threaded::RenderEngineThreaded::create(
+                [this]() { return std::unique_ptr<renderengine::RenderEngine>(mRenderEngine); });
+    }
+
+    std::unique_ptr<renderengine::threaded::RenderEngineThreaded> mThreadedRE;
+    renderengine::mock::RenderEngine* mRenderEngine = new renderengine::mock::RenderEngine();
+};
+
+TEST_F(RenderEngineThreadedTest, dump) {
+    std::string testString = "XYZ";
+    EXPECT_CALL(*mRenderEngine, dump(_));
+    mThreadedRE->dump(testString);
+}
+
+TEST_F(RenderEngineThreadedTest, primeCache) {
+    EXPECT_CALL(*mRenderEngine, primeCache());
+    mThreadedRE->primeCache();
+}
+
+TEST_F(RenderEngineThreadedTest, genTextures) {
+    uint32_t texName;
+    EXPECT_CALL(*mRenderEngine, genTextures(1, &texName));
+    mThreadedRE->genTextures(1, &texName);
+}
+
+TEST_F(RenderEngineThreadedTest, deleteTextures) {
+    uint32_t texName;
+    EXPECT_CALL(*mRenderEngine, deleteTextures(1, &texName));
+    mThreadedRE->deleteTextures(1, &texName);
+}
+
+TEST_F(RenderEngineThreadedTest, bindExternalBuffer_nullptrBuffer) {
+    EXPECT_CALL(*mRenderEngine, bindExternalTextureBuffer(0, Eq(nullptr), Eq(nullptr)))
+            .WillOnce(Return(BAD_VALUE));
+    status_t result = mThreadedRE->bindExternalTextureBuffer(0, nullptr, nullptr);
+    ASSERT_EQ(BAD_VALUE, result);
+}
+
+TEST_F(RenderEngineThreadedTest, bindExternalBuffer_withBuffer) {
+    sp<GraphicBuffer> buf = new GraphicBuffer();
+    EXPECT_CALL(*mRenderEngine, bindExternalTextureBuffer(0, buf, Eq(nullptr)))
+            .WillOnce(Return(NO_ERROR));
+    status_t result = mThreadedRE->bindExternalTextureBuffer(0, buf, nullptr);
+    ASSERT_EQ(NO_ERROR, result);
+}
+
+TEST_F(RenderEngineThreadedTest, cacheExternalTextureBuffer_nullptr) {
+    EXPECT_CALL(*mRenderEngine, cacheExternalTextureBuffer(Eq(nullptr)));
+    mThreadedRE->cacheExternalTextureBuffer(nullptr);
+}
+
+TEST_F(RenderEngineThreadedTest, cacheExternalTextureBuffer_withBuffer) {
+    sp<GraphicBuffer> buf = new GraphicBuffer();
+    EXPECT_CALL(*mRenderEngine, cacheExternalTextureBuffer(buf));
+    mThreadedRE->cacheExternalTextureBuffer(buf);
+}
+
+TEST_F(RenderEngineThreadedTest, unbindExternalTextureBuffer) {
+    EXPECT_CALL(*mRenderEngine, unbindExternalTextureBuffer(0x0));
+    mThreadedRE->unbindExternalTextureBuffer(0x0);
+}
+
+TEST_F(RenderEngineThreadedTest, bindFrameBuffer_returnsBadValue) {
+    std::unique_ptr<renderengine::Framebuffer> framebuffer;
+    EXPECT_CALL(*mRenderEngine, bindFrameBuffer(framebuffer.get())).WillOnce(Return(BAD_VALUE));
+    status_t result = mThreadedRE->bindFrameBuffer(framebuffer.get());
+    ASSERT_EQ(BAD_VALUE, result);
+}
+
+TEST_F(RenderEngineThreadedTest, bindFrameBuffer_returnsNoError) {
+    std::unique_ptr<renderengine::Framebuffer> framebuffer;
+    EXPECT_CALL(*mRenderEngine, bindFrameBuffer(framebuffer.get())).WillOnce(Return(NO_ERROR));
+    status_t result = mThreadedRE->bindFrameBuffer(framebuffer.get());
+    ASSERT_EQ(NO_ERROR, result);
+}
+
+TEST_F(RenderEngineThreadedTest, unbindFrameBuffer) {
+    std::unique_ptr<renderengine::Framebuffer> framebuffer;
+    EXPECT_CALL(*mRenderEngine, unbindFrameBuffer(framebuffer.get()));
+    mThreadedRE->unbindFrameBuffer(framebuffer.get());
+}
+
+TEST_F(RenderEngineThreadedTest, getMaxTextureSize_returns20) {
+    size_t size = 20;
+    EXPECT_CALL(*mRenderEngine, getMaxTextureSize()).WillOnce(Return(size));
+    size_t result = mThreadedRE->getMaxTextureSize();
+    ASSERT_EQ(size, result);
+}
+
+TEST_F(RenderEngineThreadedTest, getMaxTextureSize_returns0) {
+    size_t size = 0;
+    EXPECT_CALL(*mRenderEngine, getMaxTextureSize()).WillOnce(Return(size));
+    size_t result = mThreadedRE->getMaxTextureSize();
+    ASSERT_EQ(size, result);
+}
+
+TEST_F(RenderEngineThreadedTest, getMaxViewportDims_returns20) {
+    size_t dims = 20;
+    EXPECT_CALL(*mRenderEngine, getMaxViewportDims()).WillOnce(Return(dims));
+    size_t result = mThreadedRE->getMaxViewportDims();
+    ASSERT_EQ(dims, result);
+}
+
+TEST_F(RenderEngineThreadedTest, getMaxViewportDims_returns0) {
+    size_t dims = 0;
+    EXPECT_CALL(*mRenderEngine, getMaxViewportDims()).WillOnce(Return(dims));
+    size_t result = mThreadedRE->getMaxViewportDims();
+    ASSERT_EQ(dims, result);
+}
+
+TEST_F(RenderEngineThreadedTest, isProtected_returnsFalse) {
+    EXPECT_CALL(*mRenderEngine, isProtected()).WillOnce(Return(false));
+    status_t result = mThreadedRE->isProtected();
+    ASSERT_EQ(false, result);
+}
+
+TEST_F(RenderEngineThreadedTest, isProtected_returnsTrue) {
+    EXPECT_CALL(*mRenderEngine, isProtected()).WillOnce(Return(true));
+    size_t result = mThreadedRE->isProtected();
+    ASSERT_EQ(true, result);
+}
+
+TEST_F(RenderEngineThreadedTest, supportsProtectedContent_returnsFalse) {
+    EXPECT_CALL(*mRenderEngine, supportsProtectedContent()).WillOnce(Return(false));
+    status_t result = mThreadedRE->supportsProtectedContent();
+    ASSERT_EQ(false, result);
+}
+
+TEST_F(RenderEngineThreadedTest, supportsProtectedContent_returnsTrue) {
+    EXPECT_CALL(*mRenderEngine, supportsProtectedContent()).WillOnce(Return(true));
+    status_t result = mThreadedRE->supportsProtectedContent();
+    ASSERT_EQ(true, result);
+}
+
+TEST_F(RenderEngineThreadedTest, useProtectedContext_returnsFalse) {
+    EXPECT_CALL(*mRenderEngine, useProtectedContext(false)).WillOnce(Return(false));
+    status_t result = mThreadedRE->useProtectedContext(false);
+    ASSERT_EQ(false, result);
+}
+
+TEST_F(RenderEngineThreadedTest, useProtectedContext_returnsTrue) {
+    EXPECT_CALL(*mRenderEngine, useProtectedContext(false)).WillOnce(Return(true));
+    status_t result = mThreadedRE->useProtectedContext(false);
+    ASSERT_EQ(true, result);
+}
+
+TEST_F(RenderEngineThreadedTest, cleanupPostRender_returnsFalse) {
+    EXPECT_CALL(*mRenderEngine,
+                cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL))
+            .WillOnce(Return(false));
+    status_t result =
+            mThreadedRE->cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL);
+    ASSERT_EQ(false, result);
+}
+
+TEST_F(RenderEngineThreadedTest, cleanupPostRender_returnsTrue) {
+    EXPECT_CALL(*mRenderEngine,
+                cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL))
+            .WillOnce(Return(true));
+    status_t result =
+            mThreadedRE->cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL);
+    ASSERT_EQ(true, result);
+}
+
+TEST_F(RenderEngineThreadedTest, drawLayers) {
+    renderengine::DisplaySettings settings;
+    std::vector<const renderengine::LayerSettings*> layers;
+    sp<GraphicBuffer> buffer = new GraphicBuffer();
+    base::unique_fd bufferFence;
+    base::unique_fd drawFence;
+
+    EXPECT_CALL(*mRenderEngine, drawLayers)
+            .WillOnce([](const renderengine::DisplaySettings&,
+                         const std::vector<const renderengine::LayerSettings*>&,
+                         const sp<GraphicBuffer>&, const bool, base::unique_fd&&,
+                         base::unique_fd*) -> status_t { return NO_ERROR; });
+
+    status_t result = mThreadedRE->drawLayers(settings, layers, buffer, false,
+                                              std::move(bufferFence), &drawFence);
+    ASSERT_EQ(NO_ERROR, result);
+}
+
+} // namespace android
diff --git a/media/libstagefright/renderfright/threaded/RenderEngineThreaded.cpp b/media/libstagefright/renderfright/threaded/RenderEngineThreaded.cpp
new file mode 100644
index 0000000..d4184fd
--- /dev/null
+++ b/media/libstagefright/renderfright/threaded/RenderEngineThreaded.cpp
@@ -0,0 +1,403 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "RenderEngineThreaded.h"
+
+#include <sched.h>
+#include <chrono>
+#include <future>
+
+#include <android-base/stringprintf.h>
+#include <private/gui/SyncFeatures.h>
+#include <utils/Trace.h>
+
+#include "gl/GLESRenderEngine.h"
+
+using namespace std::chrono_literals;
+
+namespace android {
+namespace renderengine {
+namespace threaded {
+
+std::unique_ptr<RenderEngineThreaded> RenderEngineThreaded::create(CreateInstanceFactory factory) {
+    return std::make_unique<RenderEngineThreaded>(std::move(factory));
+}
+
+RenderEngineThreaded::RenderEngineThreaded(CreateInstanceFactory factory) {
+    ATRACE_CALL();
+
+    std::lock_guard lockThread(mThreadMutex);
+    mThread = std::thread(&RenderEngineThreaded::threadMain, this, factory);
+}
+
+RenderEngineThreaded::~RenderEngineThreaded() {
+    {
+        std::lock_guard lock(mThreadMutex);
+        mRunning = false;
+        mCondition.notify_one();
+    }
+
+    if (mThread.joinable()) {
+        mThread.join();
+    }
+}
+
+// NO_THREAD_SAFETY_ANALYSIS is because std::unique_lock presently lacks thread safety annotations.
+void RenderEngineThreaded::threadMain(CreateInstanceFactory factory) NO_THREAD_SAFETY_ANALYSIS {
+    ATRACE_CALL();
+
+    struct sched_param param = {0};
+    param.sched_priority = 2;
+    if (sched_setscheduler(0, SCHED_FIFO, &param) != 0) {
+        ALOGE("Couldn't set SCHED_FIFO");
+    }
+
+    mRenderEngine = factory();
+
+    std::unique_lock<std::mutex> lock(mThreadMutex);
+    pthread_setname_np(pthread_self(), mThreadName);
+
+    while (mRunning) {
+        if (!mFunctionCalls.empty()) {
+            auto task = mFunctionCalls.front();
+            mFunctionCalls.pop();
+            task(*mRenderEngine);
+        }
+        mCondition.wait(lock, [this]() REQUIRES(mThreadMutex) {
+            return !mRunning || !mFunctionCalls.empty();
+        });
+    }
+}
+
+void RenderEngineThreaded::primeCache() const {
+    std::promise<void> resultPromise;
+    std::future<void> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::primeCache");
+            instance.primeCache();
+            resultPromise.set_value();
+        });
+    }
+    mCondition.notify_one();
+    resultFuture.wait();
+}
+
+void RenderEngineThreaded::dump(std::string& result) {
+    std::promise<std::string> resultPromise;
+    std::future<std::string> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, &result](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::dump");
+            std::string localResult = result;
+            instance.dump(localResult);
+            resultPromise.set_value(std::move(localResult));
+        });
+    }
+    mCondition.notify_one();
+    // Note: This is an rvalue.
+    result.assign(resultFuture.get());
+}
+
+bool RenderEngineThreaded::useNativeFenceSync() const {
+    std::promise<bool> resultPromise;
+    std::future<bool> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& /*instance*/) {
+            ATRACE_NAME("REThreaded::useNativeFenceSync");
+            bool returnValue = SyncFeatures::getInstance().useNativeFenceSync();
+            resultPromise.set_value(returnValue);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+bool RenderEngineThreaded::useWaitSync() const {
+    std::promise<bool> resultPromise;
+    std::future<bool> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& /*instance*/) {
+            ATRACE_NAME("REThreaded::useWaitSync");
+            bool returnValue = SyncFeatures::getInstance().useWaitSync();
+            resultPromise.set_value(returnValue);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+void RenderEngineThreaded::genTextures(size_t count, uint32_t* names) {
+    std::promise<void> resultPromise;
+    std::future<void> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, count, names](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::genTextures");
+            instance.genTextures(count, names);
+            resultPromise.set_value();
+        });
+    }
+    mCondition.notify_one();
+    resultFuture.wait();
+}
+
+void RenderEngineThreaded::deleteTextures(size_t count, uint32_t const* names) {
+    std::promise<void> resultPromise;
+    std::future<void> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, count, &names](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::deleteTextures");
+            instance.deleteTextures(count, names);
+            resultPromise.set_value();
+        });
+    }
+    mCondition.notify_one();
+    resultFuture.wait();
+}
+
+void RenderEngineThreaded::bindExternalTextureImage(uint32_t texName, const Image& image) {
+    std::promise<void> resultPromise;
+    std::future<void> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push(
+                [&resultPromise, texName, &image](renderengine::RenderEngine& instance) {
+                    ATRACE_NAME("REThreaded::bindExternalTextureImage");
+                    instance.bindExternalTextureImage(texName, image);
+                    resultPromise.set_value();
+                });
+    }
+    mCondition.notify_one();
+    resultFuture.wait();
+}
+
+status_t RenderEngineThreaded::bindExternalTextureBuffer(uint32_t texName,
+                                                         const sp<GraphicBuffer>& buffer,
+                                                         const sp<Fence>& fence) {
+    std::promise<status_t> resultPromise;
+    std::future<status_t> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push(
+                [&resultPromise, texName, &buffer, &fence](renderengine::RenderEngine& instance) {
+                    ATRACE_NAME("REThreaded::bindExternalTextureBuffer");
+                    status_t status = instance.bindExternalTextureBuffer(texName, buffer, fence);
+                    resultPromise.set_value(status);
+                });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+void RenderEngineThreaded::cacheExternalTextureBuffer(const sp<GraphicBuffer>& buffer) {
+    std::promise<void> resultPromise;
+    std::future<void> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, &buffer](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::cacheExternalTextureBuffer");
+            instance.cacheExternalTextureBuffer(buffer);
+            resultPromise.set_value();
+        });
+    }
+    mCondition.notify_one();
+    resultFuture.wait();
+}
+
+void RenderEngineThreaded::unbindExternalTextureBuffer(uint64_t bufferId) {
+    std::promise<void> resultPromise;
+    std::future<void> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, &bufferId](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::unbindExternalTextureBuffer");
+            instance.unbindExternalTextureBuffer(bufferId);
+            resultPromise.set_value();
+        });
+    }
+    mCondition.notify_one();
+    resultFuture.wait();
+}
+
+status_t RenderEngineThreaded::bindFrameBuffer(Framebuffer* framebuffer) {
+    std::promise<status_t> resultPromise;
+    std::future<status_t> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, &framebuffer](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::bindFrameBuffer");
+            status_t status = instance.bindFrameBuffer(framebuffer);
+            resultPromise.set_value(status);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+void RenderEngineThreaded::unbindFrameBuffer(Framebuffer* framebuffer) {
+    std::promise<void> resultPromise;
+    std::future<void> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, &framebuffer](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::unbindFrameBuffer");
+            instance.unbindFrameBuffer(framebuffer);
+            resultPromise.set_value();
+        });
+    }
+    mCondition.notify_one();
+    resultFuture.wait();
+}
+
+size_t RenderEngineThreaded::getMaxTextureSize() const {
+    std::promise<size_t> resultPromise;
+    std::future<size_t> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::getMaxTextureSize");
+            size_t size = instance.getMaxTextureSize();
+            resultPromise.set_value(size);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+size_t RenderEngineThreaded::getMaxViewportDims() const {
+    std::promise<size_t> resultPromise;
+    std::future<size_t> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::getMaxViewportDims");
+            size_t size = instance.getMaxViewportDims();
+            resultPromise.set_value(size);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+bool RenderEngineThreaded::isProtected() const {
+    std::promise<bool> resultPromise;
+    std::future<bool> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::isProtected");
+            bool returnValue = instance.isProtected();
+            resultPromise.set_value(returnValue);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+bool RenderEngineThreaded::supportsProtectedContent() const {
+    std::promise<bool> resultPromise;
+    std::future<bool> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::supportsProtectedContent");
+            bool returnValue = instance.supportsProtectedContent();
+            resultPromise.set_value(returnValue);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+bool RenderEngineThreaded::useProtectedContext(bool useProtectedContext) {
+    std::promise<bool> resultPromise;
+    std::future<bool> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push(
+                [&resultPromise, useProtectedContext](renderengine::RenderEngine& instance) {
+                    ATRACE_NAME("REThreaded::useProtectedContext");
+                    bool returnValue = instance.useProtectedContext(useProtectedContext);
+                    resultPromise.set_value(returnValue);
+                });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+Framebuffer* RenderEngineThreaded::getFramebufferForDrawing() {
+    std::promise<Framebuffer*> resultPromise;
+    std::future<Framebuffer*> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::getFramebufferForDrawing");
+            Framebuffer* framebuffer = instance.getFramebufferForDrawing();
+            resultPromise.set_value(framebuffer);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+bool RenderEngineThreaded::cleanupPostRender(CleanupMode mode) {
+    std::promise<bool> resultPromise;
+    std::future<bool> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, mode](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::cleanupPostRender");
+            bool returnValue = instance.cleanupPostRender(mode);
+            resultPromise.set_value(returnValue);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+status_t RenderEngineThreaded::drawLayers(const DisplaySettings& display,
+                                          const std::vector<const LayerSettings*>& layers,
+                                          const sp<GraphicBuffer>& buffer,
+                                          const bool useFramebufferCache,
+                                          base::unique_fd&& bufferFence,
+                                          base::unique_fd* drawFence) {
+    std::promise<status_t> resultPromise;
+    std::future<status_t> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, &display, &layers, &buffer, useFramebufferCache,
+                             &bufferFence, &drawFence](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::drawLayers");
+            status_t status = instance.drawLayers(display, layers, buffer, useFramebufferCache,
+                                                  std::move(bufferFence), drawFence);
+            resultPromise.set_value(status);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+} // namespace threaded
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/threaded/RenderEngineThreaded.h b/media/libstagefright/renderfright/threaded/RenderEngineThreaded.h
new file mode 100644
index 0000000..86a49e9
--- /dev/null
+++ b/media/libstagefright/renderfright/threaded/RenderEngineThreaded.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/thread_annotations.h>
+#include <condition_variable>
+#include <mutex>
+#include <queue>
+#include <thread>
+
+#include "renderengine/RenderEngine.h"
+
+namespace android {
+namespace renderengine {
+namespace threaded {
+
+using CreateInstanceFactory = std::function<std::unique_ptr<renderengine::RenderEngine>()>;
+
+/**
+ * This class extends a basic RenderEngine class. It contains a thread. Each time a function of
+ * this class is called, we create a lambda function that is put on a queue. The main thread then
+ * executes the functions in order.
+ */
+class RenderEngineThreaded : public RenderEngine {
+public:
+    static std::unique_ptr<RenderEngineThreaded> create(CreateInstanceFactory factory);
+
+    RenderEngineThreaded(CreateInstanceFactory factory);
+    ~RenderEngineThreaded() override;
+    void primeCache() const override;
+
+    void dump(std::string& result) override;
+
+    bool useNativeFenceSync() const override;
+    bool useWaitSync() const override;
+    void genTextures(size_t count, uint32_t* names) override;
+    void deleteTextures(size_t count, uint32_t const* names) override;
+    void bindExternalTextureImage(uint32_t texName, const Image& image) override;
+    status_t bindExternalTextureBuffer(uint32_t texName, const sp<GraphicBuffer>& buffer,
+                                       const sp<Fence>& fence) override;
+    void cacheExternalTextureBuffer(const sp<GraphicBuffer>& buffer) override;
+    void unbindExternalTextureBuffer(uint64_t bufferId) override;
+    status_t bindFrameBuffer(Framebuffer* framebuffer) override;
+    void unbindFrameBuffer(Framebuffer* framebuffer) override;
+    size_t getMaxTextureSize() const override;
+    size_t getMaxViewportDims() const override;
+
+    bool isProtected() const override;
+    bool supportsProtectedContent() const override;
+    bool useProtectedContext(bool useProtectedContext) override;
+    bool cleanupPostRender(CleanupMode mode) override;
+
+    status_t drawLayers(const DisplaySettings& display,
+                        const std::vector<const LayerSettings*>& layers,
+                        const sp<GraphicBuffer>& buffer, const bool useFramebufferCache,
+                        base::unique_fd&& bufferFence, base::unique_fd* drawFence) override;
+
+protected:
+    Framebuffer* getFramebufferForDrawing() override;
+
+private:
+    void threadMain(CreateInstanceFactory factory);
+
+    /* ------------------------------------------------------------------------
+     * Threading
+     */
+    const char* const mThreadName = "RenderEngineThread";
+    // Protects the creation and destruction of mThread.
+    mutable std::mutex mThreadMutex;
+    std::thread mThread GUARDED_BY(mThreadMutex);
+    bool mRunning GUARDED_BY(mThreadMutex) = true;
+    mutable std::queue<std::function<void(renderengine::RenderEngine& instance)>> mFunctionCalls
+            GUARDED_BY(mThreadMutex);
+    mutable std::condition_variable mCondition;
+
+    /* ------------------------------------------------------------------------
+     * Render Engine
+     */
+    std::unique_ptr<renderengine::RenderEngine> mRenderEngine;
+};
+} // namespace threaded
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index 4bc67e8..a0b66a7 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -37,12 +37,73 @@
       mAccessUnitRTPTime(0),
       mNextExpectedSeqNoValid(false),
       mNextExpectedSeqNo(0),
-      mAccessUnitDamaged(false) {
+      mAccessUnitDamaged(false),
+      mFirstIFrameProvided(false),
+      mLastIFrameProvidedAtMs(0) {
 }
 
 AAVCAssembler::~AAVCAssembler() {
 }
 
+int32_t AAVCAssembler::addNack(
+        const sp<ARTPSource> &source) {
+    List<sp<ABuffer>> *queue = source->queue();
+    int32_t nackCount = 0;
+
+    List<sp<ABuffer> >::iterator it = queue->begin();
+
+    if (it == queue->end()) {
+        return nackCount /* 0 */;
+    }
+
+    uint16_t queueHeadSeqNum = (*it)->int32Data();
+
+    // move to the packet after which RTCP:NACK was sent.
+    for (; it != queue->end(); ++it) {
+        int32_t seqNum = (*it)->int32Data();
+        if (seqNum >= source->mHighestNackNumber) {
+            break;
+        }
+    }
+
+    int32_t nackStartAt = -1;
+
+    while (it != queue->end()) {
+        int32_t seqBeforeLast = (*it)->int32Data();
+        // increase iterator.
+        if ((++it) == queue->end()) {
+            break;
+        }
+        int32_t seqLast = (*it)->int32Data();
+
+        if ((seqLast - seqBeforeLast) < 0) {
+            ALOGD("addNack: found end of seqNum from(%d) to(%d)", seqBeforeLast, seqLast);
+            source->mHighestNackNumber = 0;
+        }
+
+        // missed packet found
+        if (seqLast > (seqBeforeLast + 1) &&
+                // we didn't send RTCP:NACK for this packet yet.
+                (seqLast - 1) > source->mHighestNackNumber) {
+            source->mHighestNackNumber = seqLast - 1;
+            nackStartAt = seqBeforeLast + 1;
+            break;
+        }
+
+    }
+
+    if (nackStartAt != -1) {
+        nackCount = source->mHighestNackNumber - nackStartAt + 1;
+        ALOGD("addNack: nackCount=%d, nackFrom=%d, nackTo=%d", nackCount,
+                nackStartAt, source->mHighestNackNumber);
+
+        uint16_t mask = (uint16_t)(0xffff) >> (16 - nackCount + 1);
+        source->setSeqNumToNACK(nackStartAt, mask, queueHeadSeqNum);
+    }
+
+    return nackCount;
+}
+
 ARTPAssembler::AssemblyStatus AAVCAssembler::addNALUnit(
         const sp<ARTPSource> &source) {
     List<sp<ABuffer> > *queue = source->queue();
@@ -51,22 +112,62 @@
         return NOT_ENOUGH_DATA;
     }
 
+    sp<ABuffer> buffer = *queue->begin();
+    uint32_t rtpTime;
+    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+    int64_t startTime = source->mFirstSysTime / 1000;
+    int64_t nowTime = ALooper::GetNowUs() / 1000;
+    int64_t playedTime = nowTime - startTime;
+    int64_t playedTimeRtp =
+        source->mFirstRtpTime + (((uint32_t)playedTime) * (source->mClockRate / 1000));
+    const uint32_t jitterTime =
+        (uint32_t)(source->mClockRate / ((float)1000 / (source->mJbTimeMs)));
+    uint32_t expiredTimeInJb = rtpTime + jitterTime;
+    bool isExpired = expiredTimeInJb <= (playedTimeRtp);
+    bool isTooLate200 = expiredTimeInJb < (playedTimeRtp - jitterTime);
+    bool isTooLate300 = expiredTimeInJb < (playedTimeRtp - (jitterTime * 3 / 2));
+
+    if (mShowQueue && mShowQueueCnt < 20) {
+        showCurrentQueue(queue);
+        printNowTimeUs(startTime, nowTime, playedTime);
+        printRTPTime(rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
+        mShowQueueCnt++;
+    }
+
+    AAVCAssembler::addNack(source);
+
+    if (!isExpired) {
+        ALOGV("buffering in jitter buffer.");
+        return NOT_ENOUGH_DATA;
+    }
+
+    if (isTooLate200) {
+        ALOGW("=== WARNING === buffer arrived 200ms late. === WARNING === ");
+    }
+
+    if (isTooLate300) {
+        ALOGW("buffer arrived after 300ms ... \t Diff in Jb=%lld \t Seq# %d",
+              ((long long)playedTimeRtp) - expiredTimeInJb, buffer->int32Data());
+        printNowTimeUs(startTime, nowTime, playedTime);
+        printRTPTime(rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
+
+        mNextExpectedSeqNo = pickProperSeq(queue, jitterTime, playedTimeRtp);
+    }
+
     if (mNextExpectedSeqNoValid) {
-        List<sp<ABuffer> >::iterator it = queue->begin();
-        while (it != queue->end()) {
-            if ((uint32_t)(*it)->int32Data() >= mNextExpectedSeqNo) {
-                break;
-            }
+        int32_t size = queue->size();
+        int32_t cntRemove = deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
 
-            it = queue->erase(it);
+        if (cntRemove > 0) {
+            source->noticeAbandonBuffer(cntRemove);
+            ALOGW("delete %d of %d buffers", cntRemove, size);
         }
-
         if (queue->empty()) {
             return NOT_ENOUGH_DATA;
         }
     }
 
-    sp<ABuffer> buffer = *queue->begin();
+    buffer = *queue->begin();
 
     if (!mNextExpectedSeqNoValid) {
         mNextExpectedSeqNoValid = true;
@@ -123,12 +224,30 @@
     }
 }
 
+void AAVCAssembler::checkIFrameProvided(const sp<ABuffer> &buffer) {
+    if (buffer->size() == 0) {
+        return;
+    }
+    const uint8_t *data = buffer->data();
+    unsigned nalType = data[0] & 0x1f;
+    if (nalType == 0x5) {
+        mFirstIFrameProvided = true;
+        mLastIFrameProvidedAtMs = ALooper::GetNowUs() / 1000;
+
+        uint32_t rtpTime;
+        CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+        ALOGD("got First I-frame to be decoded. rtpTime=%u, size=%zu", rtpTime, buffer->size());
+    }
+}
+
 void AAVCAssembler::addSingleNALUnit(const sp<ABuffer> &buffer) {
     ALOGV("addSingleNALUnit of size %zu", buffer->size());
 #if !LOG_NDEBUG
     hexdump(buffer->data(), buffer->size());
 #endif
 
+    checkIFrameProvided(buffer);
+
     uint32_t rtpTime;
     CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
 
@@ -216,6 +335,11 @@
     size_t totalCount = 1;
     bool complete = false;
 
+    uint32_t rtpTimeStartAt;
+    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTimeStartAt));
+    uint32_t startSeqNo = buffer->int32Data();
+    bool pFrame = nalType == 0x1;
+
     if (data[1] & 0x40) {
         // Huh? End bit also set on the first buffer.
 
@@ -224,6 +348,8 @@
         complete = true;
     } else {
         List<sp<ABuffer> >::iterator it = ++queue->begin();
+        int32_t connected = 1;
+        bool snapped = false;
         while (it != queue->end()) {
             ALOGV("sequence length %zu", totalCount);
 
@@ -233,26 +359,32 @@
             size_t size = buffer->size();
 
             if ((uint32_t)buffer->int32Data() != expectedSeqNo) {
-                ALOGV("sequence not complete, expected seqNo %d, got %d",
-                     expectedSeqNo, (uint32_t)buffer->int32Data());
+                ALOGD("sequence not complete, expected seqNo %u, got %u, nalType %u",
+                     expectedSeqNo, (unsigned)buffer->int32Data(), nalType);
+                snapped = true;
 
-                return WRONG_SEQUENCE_NUMBER;
+                if (!pFrame) {
+                    return WRONG_SEQUENCE_NUMBER;
+                }
             }
 
+            if (!snapped) {
+                connected++;
+            }
+
+            uint32_t rtpTime;
+            CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
             if (size < 2
                     || data[0] != indicator
                     || (data[1] & 0x1f) != nalType
-                    || (data[1] & 0x80)) {
+                    || (data[1] & 0x80)
+                    || rtpTime != rtpTimeStartAt) {
                 ALOGV("Ignoring malformed FU buffer.");
 
                 // Delete the whole start of the FU.
 
-                it = queue->begin();
-                for (size_t i = 0; i <= totalCount; ++i) {
-                    it = queue->erase(it);
-                }
-
                 mNextExpectedSeqNo = expectedSeqNo + 1;
+                deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
 
                 return MALFORMED_PACKET;
             }
@@ -260,9 +392,17 @@
             totalSize += size - 2;
             ++totalCount;
 
-            expectedSeqNo = expectedSeqNo + 1;
+            expectedSeqNo = (uint32_t)buffer->int32Data() + 1;
 
             if (data[1] & 0x40) {
+                if (pFrame && !recycleUnit(startSeqNo, expectedSeqNo,
+                            connected, totalCount, 0.5f)) {
+                    mNextExpectedSeqNo = expectedSeqNo;
+                    deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
+
+                    return MALFORMED_PACKET;
+                }
+
                 // This is the last fragment.
                 complete = true;
                 break;
@@ -290,6 +430,7 @@
     unit->data()[0] = (nri << 5) | nalType;
 
     size_t offset = 1;
+    int32_t cvo = -1;
     List<sp<ABuffer> >::iterator it = queue->begin();
     for (size_t i = 0; i < totalCount; ++i) {
         const sp<ABuffer> &buffer = *it;
@@ -300,6 +441,8 @@
 #endif
 
         memcpy(unit->data() + offset, buffer->data() + 2, buffer->size() - 2);
+
+        buffer->meta()->findInt32("cvo", &cvo);
         offset += buffer->size() - 2;
 
         it = queue->erase(it);
@@ -307,6 +450,10 @@
 
     unit->setRange(0, totalSize);
 
+    if (cvo >= 0) {
+        unit->meta()->setInt32("cvo", cvo);
+    }
+
     addSingleNALUnit(unit);
 
     ALOGV("successfully assembled a NAL unit from fragments.");
@@ -327,6 +474,7 @@
 
     sp<ABuffer> accessUnit = new ABuffer(totalSize);
     size_t offset = 0;
+    int32_t cvo = -1;
     for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
          it != mNALUnits.end(); ++it) {
         memcpy(accessUnit->data() + offset, "\x00\x00\x00\x01", 4);
@@ -335,6 +483,8 @@
         sp<ABuffer> nal = *it;
         memcpy(accessUnit->data() + offset, nal->data(), nal->size());
         offset += nal->size();
+
+        nal->meta()->findInt32("cvo", &cvo);
     }
 
     CopyTimes(accessUnit, *mNALUnits.begin());
@@ -343,6 +493,9 @@
     printf(mAccessUnitDamaged ? "X" : ".");
     fflush(stdout);
 #endif
+    if (cvo >= 0) {
+        accessUnit->meta()->setInt32("cvo", cvo);
+    }
 
     if (mAccessUnitDamaged) {
         accessUnit->meta()->setInt32("damaged", true);
@@ -356,22 +509,78 @@
     msg->post();
 }
 
+int32_t AAVCAssembler::pickProperSeq(const Queue *queue, uint32_t jit, int64_t play) {
+    sp<ABuffer> buffer = *(queue->begin());
+    uint32_t rtpTime;
+    int32_t nextSeqNo = buffer->int32Data();
+
+    Queue::const_iterator it = queue->begin();
+    while (it != queue->end()) {
+        CHECK((*it)->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+        // if pkt in time exists, that should be the next pivot
+        if (rtpTime + jit >= play) {
+            nextSeqNo = (*it)->int32Data();
+            break;
+        }
+        it++;
+    }
+    return nextSeqNo;
+}
+
+bool AAVCAssembler::recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
+        size_t avail, float goodRatio) {
+    float total = end - start;
+    float valid = connected;
+    float exist = avail;
+    bool isRecycle = (valid / total) >= goodRatio;
+
+    ALOGV("checking p-frame losses.. recvBufs %f valid %f diff %f recycle? %d",
+            exist, valid, total, isRecycle);
+
+    return isRecycle;
+}
+
+int32_t AAVCAssembler::deleteUnitUnderSeq(Queue *queue, uint32_t seq) {
+    int32_t initSize = queue->size();
+    Queue::iterator it = queue->begin();
+    while (it != queue->end()) {
+        if ((uint32_t)(*it)->int32Data() >= seq) {
+            break;
+        }
+        it++;
+    }
+    queue->erase(queue->begin(), it);
+    return initSize - queue->size();
+}
+
+inline void AAVCAssembler::printNowTimeUs(int64_t start, int64_t now, int64_t play) {
+    ALOGD("start=%lld, now=%lld, played=%lld",
+            (long long)start, (long long)now, (long long)play);
+}
+
+inline void AAVCAssembler::printRTPTime(uint32_t rtp, int64_t play, uint32_t exp, bool isExp) {
+    ALOGD("rtp-time(JB)=%u, played-rtp-time(JB)=%lld, expired-rtp-time(JB)=%u isExpired=%d",
+            rtp, (long long)play, exp, isExp);
+}
+
 ARTPAssembler::AssemblyStatus AAVCAssembler::assembleMore(
         const sp<ARTPSource> &source) {
     AssemblyStatus status = addNALUnit(source);
     if (status == MALFORMED_PACKET) {
-        mAccessUnitDamaged = true;
+        uint64_t msecsSinceLastIFrame = (ALooper::GetNowUs() / 1000) - mLastIFrameProvidedAtMs;
+        if (msecsSinceLastIFrame > 1000) {
+            ALOGV("request FIR to get a new I-Frame, time since "
+                    "last I-Frame %llu ms", (unsigned long long)msecsSinceLastIFrame);
+            source->onIssueFIRByAssembler();
+        }
     }
     return status;
 }
 
 void AAVCAssembler::packetLost() {
     CHECK(mNextExpectedSeqNoValid);
-    ALOGV("packetLost (expected %d)", mNextExpectedSeqNo);
-
+    ALOGD("packetLost (expected %u)", mNextExpectedSeqNo);
     ++mNextExpectedSeqNo;
-
-    mAccessUnitDamaged = true;
 }
 
 void AAVCAssembler::onByeReceived() {
diff --git a/media/libstagefright/rtsp/AAVCAssembler.h b/media/libstagefright/rtsp/AAVCAssembler.h
index e19480c..913a868 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.h
+++ b/media/libstagefright/rtsp/AAVCAssembler.h
@@ -31,6 +31,7 @@
 struct AAVCAssembler : public ARTPAssembler {
     explicit AAVCAssembler(const sp<AMessage> &notify);
 
+    typedef List<sp<ABuffer> > Queue;
 protected:
     virtual ~AAVCAssembler();
 
@@ -45,8 +46,12 @@
     bool mNextExpectedSeqNoValid;
     uint32_t mNextExpectedSeqNo;
     bool mAccessUnitDamaged;
+    bool mFirstIFrameProvided;
+    uint64_t mLastIFrameProvidedAtMs;
     List<sp<ABuffer> > mNALUnits;
 
+    int32_t addNack(const sp<ARTPSource> &source);
+    void checkIFrameProvided(const sp<ABuffer> &buffer);
     AssemblyStatus addNALUnit(const sp<ARTPSource> &source);
     void addSingleNALUnit(const sp<ABuffer> &buffer);
     AssemblyStatus addFragmentedNALUnit(List<sp<ABuffer> > *queue);
@@ -54,6 +59,13 @@
 
     void submitAccessUnit();
 
+    int32_t pickProperSeq(const Queue *q, uint32_t jit, int64_t play);
+    bool recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
+            size_t avail, float goodRatio);
+    int32_t deleteUnitUnderSeq(Queue *q, uint32_t seq);
+    void printNowTimeUs(int64_t start, int64_t now, int64_t play);
+    void printRTPTime(uint32_t rtp, int64_t play, uint32_t exp, bool isExp);
+
     DISALLOW_EVIL_CONSTRUCTORS(AAVCAssembler);
 };
 
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
new file mode 100644
index 0000000..148a0ba
--- /dev/null
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -0,0 +1,662 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AHEVCAssembler"
+#include <utils/Log.h>
+
+#include "AHEVCAssembler.h"
+
+#include "ARTPSource.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <include/HevcUtils.h>
+#include <media/stagefright/foundation/hexdump.h>
+
+#include <stdint.h>
+
+#define H265_NALU_MASK 0x3F
+#define H265_NALU_VPS 0x20
+#define H265_NALU_SPS 0x21
+#define H265_NALU_PPS 0x22
+#define H265_NALU_AP 0x30
+#define H265_NALU_FU 0x31
+#define H265_NALU_PACI 0x32
+
+
+namespace android {
+
+// static
+AHEVCAssembler::AHEVCAssembler(const sp<AMessage> &notify)
+    : mNotifyMsg(notify),
+      mAccessUnitRTPTime(0),
+      mNextExpectedSeqNoValid(false),
+      mNextExpectedSeqNo(0),
+      mAccessUnitDamaged(false),
+      mFirstIFrameProvided(false),
+      mLastIFrameProvidedAtMs(0),
+      mWidth(0),
+      mHeight(0) {
+
+      ALOGV("Constructor");
+}
+
+AHEVCAssembler::~AHEVCAssembler() {
+}
+
+int32_t AHEVCAssembler::addNack(
+        const sp<ARTPSource> &source) {
+    List<sp<ABuffer>> *queue = source->queue();
+    int32_t nackCount = 0;
+
+    List<sp<ABuffer> >::iterator it = queue->begin();
+
+    if (it == queue->end()) {
+        return nackCount /* 0 */;
+    }
+
+    uint16_t queueHeadSeqNum = (*it)->int32Data();
+
+    // move to the packet after which RTCP:NACK was sent.
+    for (; it != queue->end(); ++it) {
+        int32_t seqNum = (*it)->int32Data();
+        if (seqNum >= source->mHighestNackNumber) {
+            break;
+        }
+    }
+
+    int32_t nackStartAt = -1;
+
+    while (it != queue->end()) {
+        int32_t seqBeforeLast = (*it)->int32Data();
+        // increase iterator.
+        if ((++it) == queue->end()) {
+            break;
+        }
+
+        int32_t seqLast = (*it)->int32Data();
+
+        if ((seqLast - seqBeforeLast) < 0) {
+            ALOGD("addNack: found end of seqNum from(%d) to(%d)", seqBeforeLast, seqLast);
+            source->mHighestNackNumber = 0;
+        }
+
+        // missed packet found
+        if (seqLast > (seqBeforeLast + 1) &&
+            // we didn't send RTCP:NACK for this packet yet.
+            (seqLast - 1) > source->mHighestNackNumber) {
+            source->mHighestNackNumber = seqLast -1;
+            nackStartAt = seqBeforeLast + 1;
+            break;
+        }
+
+    }
+
+    if (nackStartAt != -1) {
+        nackCount = source->mHighestNackNumber - nackStartAt + 1;
+        ALOGD("addNack: nackCount=%d, nackFrom=%d, nackTo=%d", nackCount,
+            nackStartAt, source->mHighestNackNumber);
+
+        uint16_t mask = (uint16_t)(0xffff) >> (16 - nackCount + 1);
+        source->setSeqNumToNACK(nackStartAt, mask, queueHeadSeqNum);
+    }
+
+    return nackCount;
+}
+
+ARTPAssembler::AssemblyStatus AHEVCAssembler::addNALUnit(
+        const sp<ARTPSource> &source) {
+    List<sp<ABuffer> > *queue = source->queue();
+
+    if (queue->empty()) {
+        return NOT_ENOUGH_DATA;
+    }
+
+    sp<ABuffer> buffer = *queue->begin();
+    buffer->meta()->setObject("source", source);
+    uint32_t rtpTime;
+    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+    int64_t startTime = source->mFirstSysTime / 1000;
+    int64_t nowTime = ALooper::GetNowUs() / 1000;
+    int64_t playedTime = nowTime - startTime;
+    int64_t playedTimeRtp = source->mFirstRtpTime +
+        (((uint32_t)playedTime) * (source->mClockRate / 1000));
+    const uint32_t jitterTime = (uint32_t)(source->mClockRate / ((float)1000 / (source->mJbTimeMs)));
+    uint32_t expiredTimeInJb = rtpTime + jitterTime;
+    bool isExpired = expiredTimeInJb <= (playedTimeRtp);
+    bool isTooLate200 = expiredTimeInJb < (playedTimeRtp - jitterTime);
+    bool isTooLate300 = expiredTimeInJb < (playedTimeRtp - (jitterTime * 3 / 2));
+
+    if (mShowQueueCnt < 20) {
+        showCurrentQueue(queue);
+        printNowTimeUs(startTime, nowTime, playedTime);
+        printRTPTime(rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
+        mShowQueueCnt++;
+    }
+
+    AHEVCAssembler::addNack(source);
+
+    if (!isExpired) {
+        ALOGV("buffering in jitter buffer.");
+        return NOT_ENOUGH_DATA;
+    }
+
+    if (isTooLate200) {
+        ALOGW("=== WARNING === buffer arrived 200ms late. === WARNING === ");
+    }
+
+    if (isTooLate300) {
+        ALOGW("buffer arrived after 300ms ... \t Diff in Jb=%lld \t Seq# %d",
+              ((long long)playedTimeRtp) - expiredTimeInJb, buffer->int32Data());
+        printNowTimeUs(startTime, nowTime, playedTime);
+        printRTPTime(rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
+
+        mNextExpectedSeqNo = pickProperSeq(queue, jitterTime, playedTimeRtp);
+    }
+
+    if (mNextExpectedSeqNoValid) {
+        int32_t size = queue->size();
+        int32_t cntRemove = deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
+
+        if (cntRemove > 0) {
+            source->noticeAbandonBuffer(cntRemove);
+            ALOGW("delete %d of %d buffers", cntRemove, size);
+        }
+
+        if (queue->empty()) {
+            return NOT_ENOUGH_DATA;
+        }
+    }
+
+    buffer = *queue->begin();
+
+    if (!mNextExpectedSeqNoValid) {
+        mNextExpectedSeqNoValid = true;
+        mNextExpectedSeqNo = (uint32_t)buffer->int32Data();
+    } else if ((uint32_t)buffer->int32Data() != mNextExpectedSeqNo) {
+        ALOGV("Not the sequence number I expected");
+
+        return WRONG_SEQUENCE_NUMBER;
+    }
+
+    const uint8_t *data = buffer->data();
+    size_t size = buffer->size();
+
+    if (size < 1 || (data[0] & 0x80)) {
+        // Corrupt.
+
+        ALOGV("Ignoring corrupt buffer.");
+        queue->erase(queue->begin());
+
+        ++mNextExpectedSeqNo;
+        return MALFORMED_PACKET;
+    }
+
+    unsigned nalType = (data[0] >> 1) & H265_NALU_MASK;
+    if (nalType > 0 && nalType < H265_NALU_AP) {
+        addSingleNALUnit(buffer);
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+        return OK;
+    } else if (nalType == H265_NALU_FU) {
+        // FU-A
+        return addFragmentedNALUnit(queue);
+    } else if (nalType == H265_NALU_AP) {
+        // STAP-A
+        bool success = addSingleTimeAggregationPacket(buffer);
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+
+        return success ? OK : MALFORMED_PACKET;
+    } else if (nalType == 0) {
+        ALOGV("Ignoring undefined nal type.");
+
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+
+        return OK;
+    } else {
+        ALOGV("Ignoring unsupported buffer (nalType=%d)", nalType);
+
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+
+        return MALFORMED_PACKET;
+    }
+}
+
+void AHEVCAssembler::checkSpsUpdated(const sp<ABuffer> &buffer) {
+    if (buffer->size() == 0) {
+        return;
+    }
+    const uint8_t *data = buffer->data();
+    HevcParameterSets paramSets;
+    unsigned nalType = (data[0] >> 1) & H265_NALU_MASK;
+    if (nalType == H265_NALU_SPS) {
+        int32_t width = 0, height = 0;
+        paramSets.FindHEVCDimensions(buffer, &width, &height);
+        ALOGV("existing resolution (%u x %u)", mWidth, mHeight);
+        if (width != mWidth || height != mHeight) {
+            mFirstIFrameProvided = false;
+            mWidth = width;
+            mHeight = height;
+            ALOGD("found a new resolution (%u x %u)", mWidth, mHeight);
+        }
+    }
+}
+
+void AHEVCAssembler::checkIFrameProvided(const sp<ABuffer> &buffer) {
+    if (buffer->size() == 0) {
+        return;
+    }
+    const uint8_t *data = buffer->data();
+    unsigned nalType = (data[0] >> 1) & H265_NALU_MASK;
+    if (nalType > 0x0F && nalType < 0x18) {
+        mLastIFrameProvidedAtMs = ALooper::GetNowUs() / 1000;
+        if (!mFirstIFrameProvided) {
+            mFirstIFrameProvided = true;
+            uint32_t rtpTime;
+            CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+            ALOGD("got First I-frame to be decoded. rtpTime=%d, size=%zu", rtpTime, buffer->size());
+        }
+    }
+}
+
+bool AHEVCAssembler::dropFramesUntilIframe(const sp<ABuffer> &buffer) {
+    if (buffer->size() == 0) {
+        return false;
+    }
+    const uint8_t *data = buffer->data();
+    unsigned nalType = (data[0] >> 1) & H265_NALU_MASK;
+    return !mFirstIFrameProvided && nalType < 0x10;
+}
+
+void AHEVCAssembler::addSingleNALUnit(const sp<ABuffer> &buffer) {
+    ALOGV("addSingleNALUnit of size %zu", buffer->size());
+#if !LOG_NDEBUG
+    hexdump(buffer->data(), buffer->size());
+#endif
+    checkSpsUpdated(buffer);
+    checkIFrameProvided(buffer);
+
+    uint32_t rtpTime;
+    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+
+    if (dropFramesUntilIframe(buffer)) {
+        sp<ARTPSource> source = nullptr;
+        buffer->meta()->findObject("source", (sp<android::RefBase>*)&source);
+        if (source != nullptr) {
+            ALOGD("Issued FIR to get the I-frame");
+            source->onIssueFIRByAssembler();
+        }
+        ALOGD("drop P-frames till an I-frame provided. rtpTime %u", rtpTime);
+        return;
+    }
+
+    if (!mNALUnits.empty() && rtpTime != mAccessUnitRTPTime) {
+        submitAccessUnit();
+    }
+    mAccessUnitRTPTime = rtpTime;
+
+    mNALUnits.push_back(buffer);
+}
+
+bool AHEVCAssembler::addSingleTimeAggregationPacket(const sp<ABuffer> &buffer) {
+    const uint8_t *data = buffer->data();
+    size_t size = buffer->size();
+
+    if (size < 3) {
+        ALOGV("Discarding too small STAP-A packet.");
+        return false;
+    }
+
+    ++data;
+    --size;
+    while (size >= 2) {
+        size_t nalSize = (data[0] << 8) | data[1];
+
+        if (size < nalSize + 2) {
+            ALOGV("Discarding malformed STAP-A packet.");
+            return false;
+        }
+
+        sp<ABuffer> unit = new ABuffer(nalSize);
+        memcpy(unit->data(), &data[2], nalSize);
+
+        CopyTimes(unit, buffer);
+
+        addSingleNALUnit(unit);
+
+        data += 2 + nalSize;
+        size -= 2 + nalSize;
+    }
+
+    if (size != 0) {
+        ALOGV("Unexpected padding at end of STAP-A packet.");
+    }
+
+    return true;
+}
+
+ARTPAssembler::AssemblyStatus AHEVCAssembler::addFragmentedNALUnit(
+        List<sp<ABuffer> > *queue) {
+    CHECK(!queue->empty());
+
+    sp<ABuffer> buffer = *queue->begin();
+    const uint8_t *data = buffer->data();
+    size_t size = buffer->size();
+
+    CHECK(size > 0);
+    /*   H265 payload header is 16 bit
+        0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7
+       +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+       |F|     Type  |  Layer ID | TID |
+       +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+     */
+    unsigned indicator = (data[0] >> 1);
+
+    CHECK((indicator & H265_NALU_MASK) == H265_NALU_FU);
+
+    if (size < 3) {
+        ALOGV("Ignoring malformed FU buffer (size = %zu)", size);
+
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+        return MALFORMED_PACKET;
+    }
+
+    if (!(data[2] & 0x80)) {
+        // Start bit not set on the first buffer.
+
+        ALOGV("Start bit not set on first buffer");
+
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+        return MALFORMED_PACKET;
+    }
+
+    /*  FU INDICATOR HDR
+        0 1 2 3 4 5 6 7
+       +-+-+-+-+-+-+-+-+
+       |S|E|   Type    |
+       +-+-+-+-+-+-+-+-+
+     */
+    uint32_t nalType = data[2] & H265_NALU_MASK;
+    uint32_t tid = data[1] & 0x7;
+    ALOGV("nalType =%u, tid =%u", nalType, tid);
+
+    uint32_t expectedSeqNo = (uint32_t)buffer->int32Data() + 1;
+    size_t totalSize = size - 3;
+    size_t totalCount = 1;
+    bool complete = false;
+
+    uint32_t rtpTimeStartAt;
+    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTimeStartAt));
+    uint32_t startSeqNo = buffer->int32Data();
+    bool pFrame = (nalType < 0x10);
+
+    if (data[2] & 0x40) {
+        // Huh? End bit also set on the first buffer.
+
+        ALOGV("Grrr. This isn't fragmented at all.");
+
+        complete = true;
+    } else {
+        List<sp<ABuffer> >::iterator it = ++queue->begin();
+        int32_t connected = 1;
+        bool snapped = false;
+        while (it != queue->end()) {
+            ALOGV("sequence length %zu", totalCount);
+
+            const sp<ABuffer> &buffer = *it;
+
+            const uint8_t *data = buffer->data();
+            size_t size = buffer->size();
+
+            if ((uint32_t)buffer->int32Data() != expectedSeqNo) {
+                ALOGV("sequence not complete, expected seqNo %u, got %u, nalType %u",
+                     expectedSeqNo, (uint32_t)buffer->int32Data(), nalType);
+                snapped = true;
+
+                if (!pFrame) {
+                    return WRONG_SEQUENCE_NUMBER;
+                }
+            }
+
+            if (!snapped) {
+                connected++;
+            }
+
+            uint32_t rtpTime;
+            CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+            if (size < 3
+                    || ((data[0] >> 1) & H265_NALU_MASK) != indicator
+                    || (data[2] & H265_NALU_MASK) != nalType
+                    || (data[2] & 0x80)
+                    || rtpTime != rtpTimeStartAt) {
+                ALOGV("Ignoring malformed FU buffer.");
+
+                // Delete the whole start of the FU.
+
+                mNextExpectedSeqNo = expectedSeqNo + 1;
+                deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
+
+                return MALFORMED_PACKET;
+            }
+
+            totalSize += size - 3;
+            ++totalCount;
+
+            expectedSeqNo = (uint32_t)buffer->int32Data() + 1;
+
+            if (data[2] & 0x40) {
+                if (pFrame && !recycleUnit(startSeqNo, expectedSeqNo,
+                        connected, totalCount, 0.5f)) {
+                    mNextExpectedSeqNo = expectedSeqNo;
+                    deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
+
+                    return MALFORMED_PACKET;
+                }
+                // This is the last fragment.
+                complete = true;
+                break;
+            }
+
+            ++it;
+        }
+    }
+
+    if (!complete) {
+        return NOT_ENOUGH_DATA;
+    }
+
+    mNextExpectedSeqNo = expectedSeqNo;
+
+    // We found all the fragments that make up the complete NAL unit.
+
+    // Leave room for the header. So far totalSize did not include the
+    // header byte.
+    totalSize += 2;
+
+    sp<ABuffer> unit = new ABuffer(totalSize);
+    CopyTimes(unit, *queue->begin());
+
+    unit->data()[0] = (nalType << 1);
+    unit->data()[1] = tid;
+
+    size_t offset = 2;
+    int32_t cvo = -1;
+    List<sp<ABuffer> >::iterator it = queue->begin();
+    for (size_t i = 0; i < totalCount; ++i) {
+        const sp<ABuffer> &buffer = *it;
+
+        ALOGV("piece #%zu/%zu", i + 1, totalCount);
+#if !LOG_NDEBUG
+        hexdump(buffer->data(), buffer->size());
+#endif
+
+        memcpy(unit->data() + offset, buffer->data() + 3, buffer->size() - 3);
+        buffer->meta()->findInt32("cvo", &cvo);
+        offset += buffer->size() - 3;
+
+        it = queue->erase(it);
+    }
+
+    unit->setRange(0, totalSize);
+
+    if (cvo >= 0) {
+        unit->meta()->setInt32("cvo", cvo);
+    }
+
+    addSingleNALUnit(unit);
+
+    ALOGV("successfully assembled a NAL unit from fragments.");
+
+    return OK;
+}
+
+void AHEVCAssembler::submitAccessUnit() {
+    CHECK(!mNALUnits.empty());
+
+    ALOGV("Access unit complete (%zu nal units)", mNALUnits.size());
+
+    size_t totalSize = 0;
+    for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
+         it != mNALUnits.end(); ++it) {
+        totalSize += 4 + (*it)->size();
+    }
+
+    sp<ABuffer> accessUnit = new ABuffer(totalSize);
+    size_t offset = 0;
+    int32_t cvo = -1;
+    for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
+         it != mNALUnits.end(); ++it) {
+        memcpy(accessUnit->data() + offset, "\x00\x00\x00\x01", 4);
+        offset += 4;
+
+        sp<ABuffer> nal = *it;
+        memcpy(accessUnit->data() + offset, nal->data(), nal->size());
+        offset += nal->size();
+        nal->meta()->findInt32("cvo", &cvo);
+    }
+
+    CopyTimes(accessUnit, *mNALUnits.begin());
+
+#if 0
+    printf(mAccessUnitDamaged ? "X" : ".");
+    fflush(stdout);
+#endif
+    if (cvo >= 0) {
+        accessUnit->meta()->setInt32("cvo", cvo);
+    }
+
+    if (mAccessUnitDamaged) {
+        accessUnit->meta()->setInt32("damaged", true);
+    }
+
+    mNALUnits.clear();
+    mAccessUnitDamaged = false;
+
+    sp<AMessage> msg = mNotifyMsg->dup();
+    msg->setBuffer("access-unit", accessUnit);
+    msg->post();
+}
+
+int32_t AHEVCAssembler::pickProperSeq(const Queue *queue, uint32_t jit, int64_t play) {
+    sp<ABuffer> buffer = *(queue->begin());
+    uint32_t rtpTime;
+    int32_t nextSeqNo = buffer->int32Data();
+
+    Queue::const_iterator it = queue->begin();
+    while (it != queue->end()) {
+        CHECK((*it)->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+        // if pkt in time exists, that should be the next pivot
+        if (rtpTime + jit >= play) {
+            nextSeqNo = (*it)->int32Data();
+            break;
+        }
+        it++;
+    }
+    return nextSeqNo;
+}
+
+bool AHEVCAssembler::recycleUnit(uint32_t start, uint32_t end,  uint32_t connected,
+         size_t avail, float goodRatio) {
+    float total = end - start;
+    float valid = connected;
+    float exist = avail;
+    bool isRecycle = (valid / total) >= goodRatio;
+
+    ALOGV("checking p-frame losses.. recvBufs %f valid %f diff %f recycle? %d",
+            exist, valid, total, isRecycle);
+
+    return isRecycle;
+}
+
+int32_t AHEVCAssembler::deleteUnitUnderSeq(Queue *queue, uint32_t seq) {
+    int32_t initSize = queue->size();
+    Queue::iterator it = queue->begin();
+    while (it != queue->end()) {
+        if ((uint32_t)(*it)->int32Data() >= seq) {
+            break;
+        }
+        it++;
+    }
+    queue->erase(queue->begin(), it);
+    return initSize - queue->size();
+}
+
+inline void AHEVCAssembler::printNowTimeUs(int64_t start, int64_t now, int64_t play) {
+    ALOGD("start=%lld, now=%lld, played=%lld",
+            (long long)start, (long long)now, (long long)play);
+}
+
+inline void AHEVCAssembler::printRTPTime(uint32_t rtp, int64_t play, uint32_t exp, bool isExp) {
+    ALOGD("rtp-time(JB)=%u, played-rtp-time(JB)=%lld, expired-rtp-time(JB)=%u isExpired=%d",
+            rtp, (long long)play, exp, isExp);
+}
+
+
+ARTPAssembler::AssemblyStatus AHEVCAssembler::assembleMore(
+        const sp<ARTPSource> &source) {
+    AssemblyStatus status = addNALUnit(source);
+    if (status == MALFORMED_PACKET) {
+        uint64_t msecsSinceLastIFrame = (ALooper::GetNowUs() / 1000) - mLastIFrameProvidedAtMs;
+        if (msecsSinceLastIFrame > 1000) {
+            ALOGV("request FIR to get a new I-Frame, time after "
+                    "last I-Frame in %llu ms", (unsigned long long)msecsSinceLastIFrame);
+            source->onIssueFIRByAssembler();
+        }
+    }
+    return status;
+}
+
+void AHEVCAssembler::packetLost() {
+    CHECK(mNextExpectedSeqNoValid);
+    ALOGD("packetLost (expected %u)", mNextExpectedSeqNo);
+
+    ++mNextExpectedSeqNo;
+}
+
+void AHEVCAssembler::onByeReceived() {
+    sp<AMessage> msg = mNotifyMsg->dup();
+    msg->setInt32("eos", true);
+    msg->post();
+}
+
+}  // namespace android
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/AHEVCAssembler.h
new file mode 100644
index 0000000..16fc1c8
--- /dev/null
+++ b/media/libstagefright/rtsp/AHEVCAssembler.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef A_HEVC_ASSEMBLER_H_
+
+#define A_HEVC_ASSEMBLER_H_
+
+#include "ARTPAssembler.h"
+
+#include <utils/List.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct ABuffer;
+struct AMessage;
+
+struct AHEVCAssembler : public ARTPAssembler {
+    AHEVCAssembler(const sp<AMessage> &notify);
+
+    typedef List<sp<ABuffer> > Queue;
+
+protected:
+    virtual ~AHEVCAssembler();
+
+    virtual AssemblyStatus assembleMore(const sp<ARTPSource> &source);
+    virtual void onByeReceived();
+    virtual void packetLost();
+
+private:
+    sp<AMessage> mNotifyMsg;
+
+    uint32_t mAccessUnitRTPTime;
+    bool mNextExpectedSeqNoValid;
+    uint32_t mNextExpectedSeqNo;
+    bool mAccessUnitDamaged;
+    bool mFirstIFrameProvided;
+    uint64_t mLastIFrameProvidedAtMs;
+    int32_t mWidth;
+    int32_t mHeight;
+    List<sp<ABuffer> > mNALUnits;
+
+    int32_t addNack(const sp<ARTPSource> &source);
+    void checkSpsUpdated(const sp<ABuffer> &buffer);
+    void checkIFrameProvided(const sp<ABuffer> &buffer);
+    bool dropFramesUntilIframe(const sp<ABuffer> &buffer);
+    AssemblyStatus addNALUnit(const sp<ARTPSource> &source);
+    void addSingleNALUnit(const sp<ABuffer> &buffer);
+    AssemblyStatus addFragmentedNALUnit(List<sp<ABuffer> > *queue);
+    bool addSingleTimeAggregationPacket(const sp<ABuffer> &buffer);
+
+    void submitAccessUnit();
+
+    int32_t pickProperSeq(const Queue *queue, uint32_t jit, int64_t play);
+    bool recycleUnit(uint32_t start, uint32_t end, uint32_t conneceted,
+             size_t avail, float goodRatio);
+    int32_t deleteUnitUnderSeq(Queue *queue, uint32_t seq);
+    void printNowTimeUs(int64_t start, int64_t now, int64_t play);
+    void printRTPTime(uint32_t rtp, int64_t play, uint32_t exp, bool isExp);
+
+    DISALLOW_EVIL_CONSTRUCTORS(AHEVCAssembler);
+};
+
+}  // namespace android
+
+#endif  // A_HEVC_ASSEMBLER_H_
diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp
index 574bd7a..8f4df8e 100644
--- a/media/libstagefright/rtsp/APacketSource.cpp
+++ b/media/libstagefright/rtsp/APacketSource.cpp
@@ -454,6 +454,17 @@
 
         mFormat->setInt32(kKeyWidth, width);
         mFormat->setInt32(kKeyHeight, height);
+    } else if (!strncmp(desc.c_str(), "H265/", 5)) {
+        mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_HEVC);
+
+        int32_t width, height;
+        if (!sessionDesc->getDimensions(index, PT, &width, &height)) {
+            width = -1;
+            height = -1;
+        }
+
+        mFormat->setInt32(kKeyWidth, width);
+        mFormat->setInt32(kKeyHeight, height);
     } else if (!strncmp(desc.c_str(), "H263-2000/", 10)
             || !strncmp(desc.c_str(), "H263-1998/", 10)) {
         mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
diff --git a/media/libstagefright/rtsp/ARTPAssembler.cpp b/media/libstagefright/rtsp/ARTPAssembler.cpp
index befc226..52aa3a0 100644
--- a/media/libstagefright/rtsp/ARTPAssembler.cpp
+++ b/media/libstagefright/rtsp/ARTPAssembler.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#define LOG_TAG "ARTPAssembler"
 #include "ARTPAssembler.h"
 
 #include <media/stagefright/foundation/ABuffer.h>
@@ -21,12 +22,16 @@
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
 
+#include <android-base/properties.h>
+
 #include <stdint.h>
 
 namespace android {
 
 ARTPAssembler::ARTPAssembler()
-    : mFirstFailureTimeUs(-1) {
+    : mShowQueueCnt(0),
+      mFirstFailureTimeUs(-1) {
+    mShowQueue = android::base::GetBoolProperty("debug.stagefright.rtp", false);
 }
 
 void ARTPAssembler::onPacketReceived(const sp<ARTPSource> &source) {
@@ -141,4 +146,15 @@
     return accessUnit;
 }
 
+void ARTPAssembler::showCurrentQueue(List<sp<ABuffer> > *queue) {
+    AString temp("Queue elem size : ");
+    List<sp<ABuffer> >::iterator it = queue->begin();
+    while (it != queue->end()) {
+        temp.append((*it)->size());
+        temp.append("  \t");
+        it++;
+    }
+    ALOGD("%s",temp.c_str());
+};
+
 }  // namespace android
diff --git a/media/libstagefright/rtsp/ARTPAssembler.h b/media/libstagefright/rtsp/ARTPAssembler.h
index 4082d4c..191f08e 100644
--- a/media/libstagefright/rtsp/ARTPAssembler.h
+++ b/media/libstagefright/rtsp/ARTPAssembler.h
@@ -56,6 +56,11 @@
     static sp<ABuffer> MakeCompoundFromPackets(
             const List<sp<ABuffer> > &frames);
 
+    void showCurrentQueue(List<sp<ABuffer> > *queue);
+
+    bool mShowQueue;
+    int32_t mShowQueueCnt;
+
 private:
     int64_t mFirstFailureTimeUs;
 
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 6a4706d..f57077c 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -30,6 +30,8 @@
 #include <media/stagefright/foundation/AString.h>
 #include <media/stagefright/foundation/hexdump.h>
 
+#include <android/multinetwork.h>
+
 #include <arpa/inet.h>
 #include <sys/socket.h>
 
@@ -53,6 +55,7 @@
 const int64_t ARTPConnection::kSelectTimeoutUs = 1000LL;
 
 struct ARTPConnection::StreamInfo {
+    bool isIPv6;
     int mRTPSocket;
     int mRTCPSocket;
     sp<ASessionDescription> mSessionDesc;
@@ -63,14 +66,21 @@
     int64_t mNumRTCPPacketsReceived;
     int64_t mNumRTPPacketsReceived;
     struct sockaddr_in mRemoteRTCPAddr;
+    struct sockaddr_in6 mRemoteRTCPAddr6;
 
     bool mIsInjected;
+
+    // RTCP Extension for CVO
+    int mCVOExtMap; // will be set to 0 if cvo is not negotiated in sdp
 };
 
 ARTPConnection::ARTPConnection(uint32_t flags)
     : mFlags(flags),
       mPollEventPending(false),
-      mLastReceiverReportTimeUs(-1) {
+      mLastReceiverReportTimeUs(-1),
+      mLastBitrateReportTimeUs(-1),
+      mTargetBitrate(-1),
+      mJbTimeMs(300) {
 }
 
 ARTPConnection::~ARTPConnection() {
@@ -145,6 +155,117 @@
     TRESPASS();
 }
 
+// static
+void ARTPConnection::MakeRTPSocketPair(
+        int *rtpSocket, int *rtcpSocket, const char *localIp, const char *remoteIp,
+        unsigned localPort, unsigned remotePort, int64_t socketNetwork) {
+    bool isIPv6 = false;
+    if (strchr(localIp, ':') != NULL)
+        isIPv6 = true;
+
+    *rtpSocket = socket(isIPv6 ? AF_INET6 : AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(*rtpSocket, 0);
+
+    bumpSocketBufferSize(*rtpSocket);
+
+    *rtcpSocket = socket(isIPv6 ? AF_INET6 : AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(*rtcpSocket, 0);
+
+    if (socketNetwork != 0) {
+        ALOGD("trying to bind rtp socket(%d) to network(%llu).",
+                *rtpSocket, (unsigned long long)socketNetwork);
+
+        int result = android_setsocknetwork((net_handle_t)socketNetwork, *rtpSocket);
+        if (result != 0) {
+            ALOGW("failed(%d) to bind rtp socket(%d) to network(%llu)",
+                    result, *rtpSocket, (unsigned long long)socketNetwork);
+        }
+        result = android_setsocknetwork((net_handle_t)socketNetwork, *rtcpSocket);
+        if (result != 0) {
+            ALOGW("failed(%d) to bind rtcp socket(%d) to network(%llu)",
+                    result, *rtcpSocket, (unsigned long long)socketNetwork);
+        }
+    }
+
+    bumpSocketBufferSize(*rtcpSocket);
+
+    struct sockaddr *addr;
+    struct sockaddr_in addr4;
+    struct sockaddr_in6 addr6;
+
+    if (isIPv6) {
+        addr = (struct sockaddr *)&addr6;
+        memset(&addr6, 0, sizeof(addr6));
+        addr6.sin6_family = AF_INET6;
+        inet_pton(AF_INET6, localIp, &addr6.sin6_addr);
+        addr6.sin6_port = htons((uint16_t)localPort);
+    } else {
+        addr = (struct sockaddr *)&addr4;
+        memset(&addr4, 0, sizeof(addr4));
+        addr4.sin_family = AF_INET;
+        addr4.sin_addr.s_addr = inet_addr(localIp);
+        addr4.sin_port = htons((uint16_t)localPort);
+    }
+
+    int sockopt = 1;
+    setsockopt(*rtpSocket, SOL_SOCKET, SO_REUSEADDR, (int *)&sockopt, sizeof(sockopt));
+    setsockopt(*rtcpSocket, SOL_SOCKET, SO_REUSEADDR, (int *)&sockopt, sizeof(sockopt));
+
+    int sizeSockSt = isIPv6 ? sizeof(addr6) : sizeof(addr4);
+
+    if (bind(*rtpSocket, addr, sizeSockSt) == 0) {
+        ALOGI("rtp socket successfully binded. addr=%s:%d", localIp, localPort);
+    } else {
+        ALOGE("failed to bind rtp socket addr=%s:%d err=%s", localIp, localPort, strerror(errno));
+        return;
+    }
+
+    if (isIPv6)
+        addr6.sin6_port = htons(localPort + 1);
+    else
+        addr4.sin_port = htons(localPort + 1);
+
+    if (bind(*rtcpSocket, addr, sizeSockSt) == 0) {
+        ALOGI("rtcp socket successfully binded. addr=%s:%d", localIp, localPort + 1);
+    } else {
+        ALOGE("failed to bind rtcp socket addr=%s:%d err=%s", localIp,
+                localPort + 1, strerror(errno));
+    }
+
+    // Re uses addr variable as remote addr.
+    if (isIPv6) {
+        memset(&addr6, 0, sizeof(addr6));
+        addr6.sin6_family = AF_INET6;
+        inet_pton(AF_INET6, remoteIp, &addr6.sin6_addr);
+        addr6.sin6_port = htons((uint16_t)remotePort);
+    } else {
+        memset(&addr4, 0, sizeof(addr4));
+        addr4.sin_family = AF_INET;
+        addr4.sin_addr.s_addr = inet_addr(remoteIp);
+        addr4.sin_port = htons((uint16_t)remotePort);
+    }
+    if (connect(*rtpSocket, addr, sizeSockSt) == 0) {
+        ALOGI("rtp socket successfully connected to remote=%s:%d", remoteIp, remotePort);
+    } else {
+        ALOGE("failed to connect rtp socket to remote addr=%s:%d err=%s", remoteIp,
+                remotePort, strerror(errno));
+        return;
+    }
+
+    if (isIPv6)
+        addr6.sin6_port = htons(remotePort + 1);
+    else
+        addr4.sin_port = htons(remotePort + 1);
+
+    if (connect(*rtcpSocket, addr, sizeSockSt) == 0) {
+        ALOGI("rtcp socket successfully connected to remote=%s:%d", remoteIp, remotePort + 1);
+    } else {
+        ALOGE("failed to connect rtcp socket addr=%s:%d err=%s", remoteIp,
+                remotePort + 1, strerror(errno));
+        return;
+    }
+}
+
 void ARTPConnection::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
         case kWhatAddStream:
@@ -204,6 +325,19 @@
     info->mNumRTCPPacketsReceived = 0;
     info->mNumRTPPacketsReceived = 0;
     memset(&info->mRemoteRTCPAddr, 0, sizeof(info->mRemoteRTCPAddr));
+    memset(&info->mRemoteRTCPAddr6, 0, sizeof(info->mRemoteRTCPAddr6));
+
+    sp<ASessionDescription> sessionDesc = info->mSessionDesc;
+    info->mCVOExtMap = 0;
+    for (size_t i = 1; i < sessionDesc->countTracks(); ++i) {
+        int32_t cvoExtMap;
+        if (sessionDesc->getCvoExtMap(i, &cvoExtMap)) {
+            info->mCVOExtMap = cvoExtMap;
+            ALOGI("urn:3gpp:video-orientation(cvo) found as extmap:%d", info->mCVOExtMap);
+        } else {
+            ALOGI("urn:3gpp:video-orientation(cvo) not found :%d", info->mCVOExtMap);
+        }
+    }
 
     if (!injected) {
         postPollEvent();
@@ -295,17 +429,43 @@
 
             if (err == -ECONNRESET) {
                 // socket failure, this stream is dead, Jim.
+                sp<AMessage> notify = it->mNotifyMsg->dup();
+                notify->setInt32("rtcp-event", 1);
+                notify->setInt32("payload-type", 400);
+                notify->setInt32("feedback-type", 1);
+                notify->setInt32("sender", it->mSources.valueAt(0)->getSelfID());
+                notify->post();
 
                 ALOGW("failed to receive RTP/RTCP datagram.");
                 it = mStreams.erase(it);
                 continue;
             }
 
+            // add NACK and FIR that needs to be sent immediately.
+            sp<ABuffer> buffer = new ABuffer(kMaxUDPSize);
+            for (size_t i = 0; i < it->mSources.size(); ++i) {
+                buffer->setRange(0, 0);
+                int cnt = it->mSources.valueAt(i)->addNACK(buffer);
+                if (cnt > 0) {
+                    ALOGV("Send NACK for lost %d Packets", cnt);
+                    send(&*it, buffer);
+                }
+
+                buffer->setRange(0, 0);
+                it->mSources.valueAt(i)->addFIR(buffer);
+                if (buffer->size() > 0) {
+                    ALOGD("Send FIR immediately for lost Packets");
+                    send(&*it, buffer);
+                }
+            }
+
             ++it;
         }
     }
 
     int64_t nowUs = ALooper::GetNowUs();
+    checkRxBitrate(nowUs);
+
     if (mLastReceiverReportTimeUs <= 0
             || mLastReceiverReportTimeUs + 5000000LL <= nowUs) {
         sp<ABuffer> buffer = new ABuffer(kMaxUDPSize);
@@ -340,13 +500,7 @@
             if (buffer->size() > 0) {
                 ALOGV("Sending RR...");
 
-                ssize_t n;
-                do {
-                    n = sendto(
-                        s->mRTCPSocket, buffer->data(), buffer->size(), 0,
-                        (const struct sockaddr *)&s->mRemoteRTCPAddr,
-                        sizeof(s->mRemoteRTCPAddr));
-                } while (n < 0 && errno == EINTR);
+                ssize_t n = send(s, buffer);
 
                 if (n <= 0) {
                     ALOGW("failed to send RTCP receiver report (%s).",
@@ -377,9 +531,22 @@
 
     sp<ABuffer> buffer = new ABuffer(65536);
 
+    struct sockaddr *pRemoteRTCPAddr;
+    int sizeSockSt;
+    if (s->isIPv6) {
+        pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr6;
+        sizeSockSt = sizeof(struct sockaddr_in6);
+    } else {
+        pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr;
+        sizeSockSt = sizeof(struct sockaddr_in);
+    }
     socklen_t remoteAddrLen =
         (!receiveRTP && s->mNumRTCPPacketsReceived == 0)
-            ? sizeof(s->mRemoteRTCPAddr) : 0;
+            ? sizeSockSt : 0;
+
+    if (mFlags & kViLTEConnection) {
+        remoteAddrLen = 0;
+    }
 
     ssize_t nbytes;
     do {
@@ -388,8 +555,9 @@
             buffer->data(),
             buffer->capacity(),
             0,
-            remoteAddrLen > 0 ? (struct sockaddr *)&s->mRemoteRTCPAddr : NULL,
+            remoteAddrLen > 0 ? pRemoteRTCPAddr : NULL,
             remoteAddrLen > 0 ? &remoteAddrLen : NULL);
+        mCumulativeBytes += nbytes;
     } while (nbytes < 0 && errno == EINTR);
 
     if (nbytes <= 0) {
@@ -410,6 +578,36 @@
     return err;
 }
 
+ssize_t ARTPConnection::send(const StreamInfo *info, const sp<ABuffer> buffer) {
+        struct sockaddr* pRemoteRTCPAddr;
+        int sizeSockSt;
+
+        /* It seems this isIPv6 variable is useless.
+         * We should remove it to prevent confusion */
+        if (info->isIPv6) {
+            pRemoteRTCPAddr = (struct sockaddr *)&info->mRemoteRTCPAddr6;
+            sizeSockSt = sizeof(struct sockaddr_in6);
+        } else {
+            pRemoteRTCPAddr = (struct sockaddr *)&info->mRemoteRTCPAddr;
+            sizeSockSt = sizeof(struct sockaddr_in);
+        }
+
+        if (mFlags & kViLTEConnection) {
+            ALOGV("ViLTE RTCP");
+            pRemoteRTCPAddr = NULL;
+            sizeSockSt = 0;
+        }
+
+        ssize_t n;
+        do {
+            n = sendto(
+                    info->mRTCPSocket, buffer->data(), buffer->size(), 0,
+                    pRemoteRTCPAddr, sizeSockSt);
+        } while (n < 0 && errno == EINTR);
+
+        return n;
+}
+
 status_t ARTPConnection::parseRTP(StreamInfo *s, const sp<ABuffer> &buffer) {
     if (s->mNumRTPPacketsReceived++ == 0) {
         sp<AMessage> notify = s->mNotifyMsg->dup();
@@ -431,6 +629,11 @@
         return -1;
     }
 
+    if ((data[1] & 0x7f) == 20 /* decimal */) {
+        // Unassigned payload type
+        return -1;
+    }
+
     if (data[0] & 0x20) {
         // Padding present.
 
@@ -454,6 +657,7 @@
         return -1;
     }
 
+    int32_t cvoDegrees = -1;
     if (data[0] & 0x10) {
         // Header eXtension present.
 
@@ -473,6 +677,7 @@
             return -1;
         }
 
+        parseRTPExt(s, (const uint8_t *)extensionData, extensionLength, &cvoDegrees);
         payloadOffset += 4 + extensionLength;
     }
 
@@ -487,6 +692,8 @@
     meta->setInt32("rtp-time", rtpTime);
     meta->setInt32("PT", data[1] & 0x7f);
     meta->setInt32("M", data[1] >> 7);
+    if (cvoDegrees >= 0)
+        meta->setInt32("cvo", cvoDegrees);
 
     buffer->setInt32Data(u16at(&data[2]));
     buffer->setRange(payloadOffset, size - payloadOffset);
@@ -496,11 +703,65 @@
     return OK;
 }
 
+status_t ARTPConnection::parseRTPExt(StreamInfo *s,
+        const uint8_t *extHeader, size_t extLen, int32_t *cvoDegrees) {
+    if (extLen < 4)
+        return -1;
+
+    uint16_t header = (extHeader[0] << 8) | (extHeader[1]);
+    bool isOnebyteHeader = false;
+
+    if (header == 0xBEDE) {
+        isOnebyteHeader = true;
+    } else if (header == 0x1000) {
+        ALOGW("parseRTPExt: two-byte header is not implemented yet");
+        return -1;
+    } else {
+        ALOGW("parseRTPExt: can not recognize header");
+        return -1;
+    }
+
+    const uint8_t *extPayload = extHeader + 4;
+    extLen -= 4;
+    size_t offset = 0; //start from first payload of rtp extension.
+    // one-byte header parser
+    while (isOnebyteHeader && offset < extLen) {
+        uint8_t extmapId = extPayload[offset] >> 4;
+        uint8_t length = (extPayload[offset] & 0xF) + 1;
+        offset++;
+
+        // padding case
+        if (extmapId == 0)
+            continue;
+
+        uint8_t data[16]; // maximum length value
+        for (uint8_t j = 0; offset + j <= extLen && j < length; j++) {
+            data[j] = extPayload[offset + j];
+        }
+
+        offset += length;
+
+        if (extmapId == s->mCVOExtMap) {
+            *cvoDegrees = (int32_t)data[0];
+            return OK;
+        }
+    }
+
+    return BAD_VALUE;
+}
+
 status_t ARTPConnection::parseRTCP(StreamInfo *s, const sp<ABuffer> &buffer) {
     if (s->mNumRTCPPacketsReceived++ == 0) {
         sp<AMessage> notify = s->mNotifyMsg->dup();
         notify->setInt32("first-rtcp", true);
         notify->post();
+
+        ALOGI("send first-rtcp event to upper layer as ImsRxNotice");
+        sp<AMessage> imsNotify = s->mNotifyMsg->dup();
+        imsNotify->setInt32("rtcp-event", 1);
+        imsNotify->setInt32("payload-type", 101);
+        imsNotify->setInt32("feedback-type", 0);
+        imsNotify->post();
     }
 
     const uint8_t *data = buffer->data();
@@ -551,8 +812,12 @@
                 break;
 
             case 205:  // TSFB (transport layer specific feedback)
+                parseTSFB(s, data, headerLength);
+                break;
             case 206:  // PSFB (payload specific feedback)
                 // hexdump(data, headerLength);
+                parsePSFB(s, data, headerLength);
+                ALOGI("RTCP packet type %u of size %zu", (unsigned)data[1], headerLength);
                 break;
 
             case 203:
@@ -621,6 +886,144 @@
     return 0;
 }
 
+status_t ARTPConnection::parseTSFB(
+        StreamInfo *s, const uint8_t *data, size_t size) {
+    if (size < 12) {
+        // broken packet
+        return -1;
+    }
+
+    uint8_t msgType = data[0] & 0x1f;
+    uint32_t id = u32at(&data[4]);
+
+    const uint8_t *ptr = &data[12];
+    size -= 12;
+
+    using namespace std;
+    size_t FCISize;
+    switch(msgType) {
+        case 1:     // Generic NACK
+        {
+            FCISize = 4;
+            while (size >= FCISize) {
+                uint16_t PID = u16at(&ptr[0]);  // lost packet RTP number
+                uint16_t BLP = u16at(&ptr[2]);  // Bitmask of following Lost Packets
+
+                size -= FCISize;
+                ptr += FCISize;
+
+                AString list_of_losts;
+                list_of_losts.append(PID);
+                for (int i=0 ; i<16 ; i++) {
+                    bool is_lost = BLP & (0x1 << i);
+                    if (is_lost) {
+                        list_of_losts.append(", ");
+                        list_of_losts.append(PID + i);
+                    }
+                }
+                ALOGI("Opponent losts packet of RTP %s", list_of_losts.c_str());
+            }
+            break;
+        }
+        case 3:     // TMMBR
+        case 4:     // TMMBN
+        {
+            FCISize = 8;
+            while (size >= FCISize) {
+                uint32_t MxTBR = u32at(&ptr[4]);
+                uint32_t MxTBRExp = MxTBR >> 26;
+                uint32_t MxTBRMantissa = (MxTBR >> 9) & 0x01FFFF;
+                uint32_t overhead = MxTBR & 0x01FF;
+
+                size -= FCISize;
+                ptr += FCISize;
+
+                uint32_t bitRate = (1 << MxTBRExp) * MxTBRMantissa;
+
+                if (msgType == 3)
+                    ALOGI("Op -> UE Req Tx bitrate : %d X 2^%d = %d",
+                        MxTBRMantissa, MxTBRExp, bitRate);
+                else if (msgType == 4)
+                    ALOGI("OP -> UE Noti Rx bitrate : %d X 2^%d = %d",
+                        MxTBRMantissa, MxTBRExp, bitRate);
+
+                sp<AMessage> notify = s->mNotifyMsg->dup();
+                notify->setInt32("rtcp-event", 1);
+                notify->setInt32("payload-type", 205);
+                notify->setInt32("feedback-type", msgType);
+                notify->setInt32("sender", id);
+                notify->setInt32("bit-rate", bitRate);
+                notify->post();
+                ALOGI("overhead : %d", overhead);
+            }
+            break;
+        }
+        default:
+        {
+            ALOGI("Not supported TSFB type %d", msgType);
+            break;
+        }
+    }
+
+    return 0;
+}
+
+status_t ARTPConnection::parsePSFB(
+        StreamInfo *s, const uint8_t *data, size_t size) {
+    if (size < 12) {
+        // broken packet
+        return -1;
+    }
+
+    uint8_t msgType = data[0] & 0x1f;
+    uint32_t id = u32at(&data[4]);
+
+    const uint8_t *ptr = &data[12];
+    size -= 12;
+
+    using namespace std;
+    switch(msgType) {
+        case 1:     // Picture Loss Indication (PLI)
+        {
+            if (size > 0) {
+                // PLI does not need parameters
+                break;
+            };
+            sp<AMessage> notify = s->mNotifyMsg->dup();
+            notify->setInt32("rtcp-event", 1);
+            notify->setInt32("payload-type", 206);
+            notify->setInt32("feedback-type", msgType);
+            notify->setInt32("sender", id);
+            notify->post();
+            ALOGI("PLI detected.");
+            break;
+        }
+        case 4:     // Full Intra Request (FIR)
+        {
+            if (size < 4) {
+                break;
+            }
+            uint32_t requestedId = u32at(&ptr[0]);
+            if (requestedId == (uint32_t)mSelfID) {
+                sp<AMessage> notify = s->mNotifyMsg->dup();
+                notify->setInt32("rtcp-event", 1);
+                notify->setInt32("payload-type", 206);
+                notify->setInt32("feedback-type", msgType);
+                notify->setInt32("sender", id);
+                notify->post();
+                ALOGI("FIR detected.");
+            }
+            break;
+        }
+        default:
+        {
+            ALOGI("Not supported PSFB type %d", msgType);
+            break;
+        }
+    }
+
+    return 0;
+}
 sp<ARTPSource> ARTPConnection::findSource(StreamInfo *info, uint32_t srcId) {
     sp<ARTPSource> source;
     ssize_t index = info->mSources.indexOfKey(srcId);
@@ -630,6 +1033,12 @@
         source = new ARTPSource(
                 srcId, info->mSessionDesc, info->mIndex, info->mNotifyMsg);
 
+        if (mFlags & kViLTEConnection) {
+            source->setPeriodicFIR(false);
+        }
+
+        source->setSelfID(mSelfID);
+        source->setJbTime(mJbTimeMs > 0 ? mJbTimeMs : 300);
         info->mSources.add(srcId, source);
     } else {
         source = info->mSources.valueAt(index);
@@ -645,6 +1054,72 @@
     msg->post();
 }
 
+void ARTPConnection::setSelfID(const uint32_t selfID) {
+    mSelfID = selfID;
+}
+
+void ARTPConnection::setJbTime(const uint32_t jbTimeMs) {
+    mJbTimeMs = jbTimeMs;
+}
+
+void ARTPConnection::setTargetBitrate(int32_t targetBitrate) {
+    mTargetBitrate = targetBitrate;
+}
+
+void ARTPConnection::checkRxBitrate(int64_t nowUs) {
+    if (mLastBitrateReportTimeUs <= 0) {
+        mCumulativeBytes = 0;
+        mLastBitrateReportTimeUs = nowUs;
+    }
+    else if (mLastBitrateReportTimeUs + 1000000ll <= nowUs) {
+        int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
+        int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
+        ALOGI("Actual Rx bitrate : %d bits/sec", bitrate);
+
+        sp<ABuffer> buffer = new ABuffer(kMaxUDPSize);
+        List<StreamInfo>::iterator it = mStreams.begin();
+        while (it != mStreams.end()) {
+            StreamInfo *s = &*it;
+            if (s->mIsInjected) {
+                ++it;
+                continue;
+            }
+
+            if (s->mNumRTCPPacketsReceived == 0) {
+                // We have never received any RTCP packets on this stream,
+                // we don't even know where to send a report.
+                ++it;
+                continue;
+            }
+
+            buffer->setRange(0, 0);
+
+            for (size_t i = 0; i < s->mSources.size(); ++i) {
+                sp<ARTPSource> source = s->mSources.valueAt(i);
+                source->notifyPktInfo(bitrate, nowUs);
+                source->addTMMBR(buffer, mTargetBitrate);
+            }
+            if (buffer->size() > 0) {
+                ALOGV("Sending TMMBR...");
+
+                ssize_t n = send(s, buffer);
+
+                if (n <= 0) {
+                    ALOGW("failed to send RTCP TMMBR (%s).",
+                         n == 0 ? "connection gone" : strerror(errno));
+
+                    it = mStreams.erase(it);
+                    continue;
+                }
+
+                CHECK_EQ(n, (ssize_t)buffer->size());
+            }
+            ++it;
+        }
+        mCumulativeBytes = 0;
+        mLastBitrateReportTimeUs = nowUs;
+    }
+}
 void ARTPConnection::onInjectPacket(const sp<AMessage> &msg) {
     int32_t index;
     CHECK(msg->findInt32("index", &index));
@@ -672,4 +1147,3 @@
 }
 
 }  // namespace android
-
diff --git a/media/libstagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/ARTPConnection.h
index d5f7c2e..7c8218f 100644
--- a/media/libstagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/ARTPConnection.h
@@ -30,6 +30,7 @@
 struct ARTPConnection : public AHandler {
     enum Flags {
         kRegularlyRequestFIR = 2,
+        kViLTEConnection = 4,
     };
 
     explicit ARTPConnection(uint32_t flags = 0);
@@ -44,11 +45,22 @@
 
     void injectPacket(int index, const sp<ABuffer> &buffer);
 
+    void setSelfID(const uint32_t selfID);
+    void setJbTime(const uint32_t jbTimeMs);
+    void setTargetBitrate(int32_t targetBitrate);
+
     // Creates a pair of UDP datagram sockets bound to adjacent ports
     // (the rtpSocket is bound to an even port, the rtcpSocket to the
     // next higher port).
     static void MakePortPair(
             int *rtpSocket, int *rtcpSocket, unsigned *rtpPort);
+    // Creates a pair of UDP datagram sockets bound to assigned ip and
+    // ports (the rtpSocket is bound to an even port, the rtcpSocket
+    // to the next higher port).
+    static void MakeRTPSocketPair(
+            int *rtpSocket, int *rtcpSocket,
+            const char *localIp, const char *remoteIp,
+            unsigned localPort, unsigned remotePort, int64_t socketNetwork = 0);
 
 protected:
     virtual ~ARTPConnection();
@@ -71,18 +83,31 @@
 
     bool mPollEventPending;
     int64_t mLastReceiverReportTimeUs;
+    int64_t mLastBitrateReportTimeUs;
+
+    int32_t mSelfID;
+    int32_t mTargetBitrate;
+
+    uint32_t mJbTimeMs;
+
+    int32_t mCumulativeBytes;
 
     void onAddStream(const sp<AMessage> &msg);
     void onRemoveStream(const sp<AMessage> &msg);
     void onPollStreams();
     void onInjectPacket(const sp<AMessage> &msg);
     void onSendReceiverReports();
+    void checkRxBitrate(int64_t nowUs);
 
     status_t receive(StreamInfo *info, bool receiveRTP);
+    ssize_t send(const StreamInfo *info, const sp<ABuffer> buffer);
 
     status_t parseRTP(StreamInfo *info, const sp<ABuffer> &buffer);
+    status_t parseRTPExt(StreamInfo *s, const uint8_t *extData, size_t extLen, int32_t *cvoDegrees);
     status_t parseRTCP(StreamInfo *info, const sp<ABuffer> &buffer);
     status_t parseSR(StreamInfo *info, const uint8_t *data, size_t size);
+    status_t parseTSFB(StreamInfo *info, const uint8_t *data, size_t size);
+    status_t parsePSFB(StreamInfo *info, const uint8_t *data, size_t size);
     status_t parseBYE(StreamInfo *info, const uint8_t *data, size_t size);
 
     sp<ARTPSource> findSource(StreamInfo *info, uint32_t id);
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index f5f8128..6303fc4 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -22,6 +22,7 @@
 
 #include "AAMRAssembler.h"
 #include "AAVCAssembler.h"
+#include "AHEVCAssembler.h"
 #include "AH263Assembler.h"
 #include "AMPEG2TSAssembler.h"
 #include "AMPEG4AudioAssembler.h"
@@ -35,21 +36,31 @@
 
 namespace android {
 
-static const uint32_t kSourceID = 0xdeadbeef;
+static uint32_t kSourceID = 0xdeadbeef;
 
 ARTPSource::ARTPSource(
         uint32_t id,
         const sp<ASessionDescription> &sessionDesc, size_t index,
         const sp<AMessage> &notify)
-    : mID(id),
+    : mFirstSeqNumber(0),
+      mFirstRtpTime(0),
+      mFirstSysTime(0),
+      mClockRate(0),
+      mJbTimeMs(300), // default jitter buffer time is 300ms.
+      mFirstSsrc(0),
+      mHighestNackNumber(0),
+      mID(id),
       mHighestSeqNumber(0),
       mPrevExpected(0),
       mBaseSeqNumber(0),
       mNumBuffersReceived(0),
       mPrevNumBuffersReceived(0),
+      mPrevExpectedForRR(0),
+      mPrevNumBuffersReceivedForRR(0),
       mLastNTPTime(0),
       mLastNTPTimeUpdateUs(0),
       mIssueFIRRequests(false),
+      mIssueFIRByAssembler(false),
       mLastFIRRequestUs(-1),
       mNextFIRSeqNo((rand() * 256.0) / RAND_MAX),
       mNotify(notify) {
@@ -61,6 +72,9 @@
     if (!strncmp(desc.c_str(), "H264/", 5)) {
         mAssembler = new AAVCAssembler(notify);
         mIssueFIRRequests = true;
+    } else if (!strncmp(desc.c_str(), "H265/", 5)) {
+        mAssembler = new AHEVCAssembler(notify);
+        mIssueFIRRequests = true;
     } else if (!strncmp(desc.c_str(), "MP4A-LATM/", 10)) {
         mAssembler = new AMPEG4AudioAssembler(notify, params);
     } else if (!strncmp(desc.c_str(), "H263-1998/", 10)
@@ -112,13 +126,29 @@
 bool ARTPSource::queuePacket(const sp<ABuffer> &buffer) {
     uint32_t seqNum = (uint32_t)buffer->int32Data();
 
-    if (mNumBuffersReceived++ == 0) {
+    int32_t ssrc = 0;
+    buffer->meta()->findInt32("ssrc", &ssrc);
+
+    if (mNumBuffersReceived++ == 0 && mFirstSysTime == 0) {
+        uint32_t firstRtpTime;
+        CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&firstRtpTime));
+        mFirstSysTime = ALooper::GetNowUs();
         mHighestSeqNumber = seqNum;
         mBaseSeqNumber = seqNum;
+        mFirstRtpTime = firstRtpTime;
+        mFirstSsrc = ssrc;
+        ALOGD("first-rtp arrived: first-rtp-time=%d, sys-time=%lld, seq-num=%u, ssrc=%d",
+                mFirstRtpTime, (long long)mFirstSysTime, mHighestSeqNumber, mFirstSsrc);
+        mClockRate = 90000;
         mQueue.push_back(buffer);
         return true;
     }
 
+    if (mFirstSsrc != ssrc) {
+        ALOGW("Discarding a buffer due to unexpected ssrc");
+        return false;
+    }
+
     // Only the lower 16-bit of the sequence numbers are transmitted,
     // derive the high-order bits by choosing the candidate closest
     // to the highest sequence number (extended to 32 bits) received so far.
@@ -181,20 +211,34 @@
 }
 
 void ARTPSource::addFIR(const sp<ABuffer> &buffer) {
-    if (!mIssueFIRRequests) {
+    if (!mIssueFIRRequests && !mIssueFIRByAssembler) {
         return;
     }
 
+    bool send = false;
     int64_t nowUs = ALooper::GetNowUs();
-    if (mLastFIRRequestUs >= 0 && mLastFIRRequestUs + 5000000LL > nowUs) {
-        // Send FIR requests at most every 5 secs.
+    int64_t usecsSinceLastFIR = nowUs - mLastFIRRequestUs;
+    if (mLastFIRRequestUs < 0) {
+        // A first FIR, just send it.
+        send = true;
+    }  else if (mIssueFIRByAssembler && (usecsSinceLastFIR > 1000000)) {
+        // A FIR issued by Assembler.
+        // Send it if last FIR is not sent within a sec.
+        send = true;
+    } else if (mIssueFIRRequests && (usecsSinceLastFIR > 5000000)) {
+        // A FIR issued periodically reagardless packet loss.
+        // Send it if last FIR is not sent within 5 secs.
+        send = true;
+    }
+
+    if (!send) {
         return;
     }
 
     mLastFIRRequestUs = nowUs;
 
     if (buffer->size() + 20 > buffer->capacity()) {
-        ALOGW("RTCP buffer too small to accomodate FIR.");
+        ALOGW("RTCP buffer too small to accommodate FIR.");
         return;
     }
 
@@ -203,7 +247,7 @@
     data[0] = 0x80 | 4;
     data[1] = 206;  // PSFB
     data[2] = 0;
-    data[3] = 4;
+    data[3] = 4;    // total (4+1) * sizeof(int32_t) = 20 bytes
     data[4] = kSourceID >> 24;
     data[5] = (kSourceID >> 16) & 0xff;
     data[6] = (kSourceID >> 8) & 0xff;
@@ -225,14 +269,16 @@
     data[18] = 0x00;
     data[19] = 0x00;
 
-    buffer->setRange(buffer->offset(), buffer->size() + 20);
+    buffer->setRange(buffer->offset(), buffer->size() + (data[3] + 1) * sizeof(int32_t));
+
+    mIssueFIRByAssembler = false;
 
     ALOGV("Added FIR request.");
 }
 
 void ARTPSource::addReceiverReport(const sp<ABuffer> &buffer) {
     if (buffer->size() + 32 > buffer->capacity()) {
-        ALOGW("RTCP buffer too small to accomodate RR.");
+        ALOGW("RTCP buffer too small to accommodate RR.");
         return;
     }
 
@@ -240,16 +286,16 @@
 
     // According to appendix A.3 in RFC 3550
     uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
-    int64_t intervalExpected = expected - mPrevExpected;
-    int64_t intervalReceived = mNumBuffersReceived - mPrevNumBuffersReceived;
+    int64_t intervalExpected = expected - mPrevExpectedForRR;
+    int64_t intervalReceived = mNumBuffersReceived - mPrevNumBuffersReceivedForRR;
     int64_t intervalPacketLost = intervalExpected - intervalReceived;
 
     if (intervalExpected > 0 && intervalPacketLost > 0) {
         fraction = (intervalPacketLost << 8) / intervalExpected;
     }
 
-    mPrevExpected = expected;
-    mPrevNumBuffersReceived = mNumBuffersReceived;
+    mPrevExpectedForRR = expected;
+    mPrevNumBuffersReceivedForRR = mNumBuffersReceived;
     int32_t cumulativePacketLost = (int32_t)expected - mNumBuffersReceived;
 
     uint8_t *data = buffer->data() + buffer->size();
@@ -257,7 +303,7 @@
     data[0] = 0x80 | 1;
     data[1] = 201;  // RR
     data[2] = 0;
-    data[3] = 7;
+    data[3] = 7;    // total (7+1) * sizeof(int32_t) = 32 bytes
     data[4] = kSourceID >> 24;
     data[5] = (kSourceID >> 16) & 0xff;
     data[6] = (kSourceID >> 8) & 0xff;
@@ -303,9 +349,193 @@
     data[30] = (DLSR >> 8) & 0xff;
     data[31] = DLSR & 0xff;
 
-    buffer->setRange(buffer->offset(), buffer->size() + 32);
+    buffer->setRange(buffer->offset(), buffer->size() + (data[3] + 1) * sizeof(int32_t));
 }
 
+void ARTPSource::addTMMBR(const sp<ABuffer> &buffer, int32_t targetBitrate) {
+    if (buffer->size() + 20 > buffer->capacity()) {
+        ALOGW("RTCP buffer too small to accommodate RR.");
+        return;
+    }
+
+    if (targetBitrate <= 0) {
+        return;
+    }
+
+    uint8_t *data = buffer->data() + buffer->size();
+
+    data[0] = 0x80 | 3; // TMMBR
+    data[1] = 205;      // TSFB
+    data[2] = 0;
+    data[3] = 4;        // total (4+1) * sizeof(int32_t) = 20 bytes
+    data[4] = kSourceID >> 24;
+    data[5] = (kSourceID >> 16) & 0xff;
+    data[6] = (kSourceID >> 8) & 0xff;
+    data[7] = kSourceID & 0xff;
+
+    *(int32_t*)(&data[8]) = 0;  // 4 bytes blank
+
+    data[12] = mID >> 24;
+    data[13] = (mID >> 16) & 0xff;
+    data[14] = (mID >> 8) & 0xff;
+    data[15] = mID & 0xff;
+
+    int32_t exp, mantissa;
+
+    // Round off to the nearest 2^4th
+    ALOGI("UE -> Op Req Rx bitrate : %d ", targetBitrate & 0xfffffff0);
+    for (exp=4 ; exp < 32 ; exp++)
+        if (((targetBitrate >> exp) & 0x01) != 0)
+            break;
+    mantissa = targetBitrate >> exp;
+
+    data[16] = ((exp << 2) & 0xfc) | ((mantissa & 0x18000) >> 15);
+    data[17] =                        (mantissa & 0x07f80) >> 7;
+    data[18] =                        (mantissa & 0x0007f) << 1;
+    data[19] = 40;              // 40 bytes overhead;
+
+    buffer->setRange(buffer->offset(), buffer->size() + (data[3] + 1) * sizeof(int32_t));
+}
+
+int ARTPSource::addNACK(const sp<ABuffer> &buffer) {
+    constexpr size_t kMaxFCIs = 10; // max number of FCIs
+    if (buffer->size() + (3 + kMaxFCIs) * sizeof(int32_t) > buffer->capacity()) {
+        ALOGW("RTCP buffer too small to accommodate NACK.");
+        return -1;
+    }
+
+    uint8_t *data = buffer->data() + buffer->size();
+
+    data[0] = 0x80 | 1; // Generic NACK
+    data[1] = 205;      // TSFB
+    data[2] = 0;
+    data[3] = 0;        // will be decided later
+    data[4] = kSourceID >> 24;
+    data[5] = (kSourceID >> 16) & 0xff;
+    data[6] = (kSourceID >> 8) & 0xff;
+    data[7] = kSourceID & 0xff;
+
+    data[8] = mID >> 24;
+    data[9] = (mID >> 16) & 0xff;
+    data[10] = (mID >> 8) & 0xff;
+    data[11] = mID & 0xff;
+
+    List<int> list;
+    List<int>::iterator it;
+    getSeqNumToNACK(list, kMaxFCIs);
+    size_t cnt = 0;
+
+    int *FCI = (int *)(data + 12);
+    for (it = list.begin(); it != list.end() && cnt < kMaxFCIs; it++) {
+        *(FCI + cnt) = *it;
+        cnt++;
+    }
+
+    data[3] = (3 + cnt) - 1;  // total (3 + #ofFCI) * sizeof(int32_t) byte
+
+    buffer->setRange(buffer->offset(), buffer->size() + (data[3] + 1) * sizeof(int32_t));
+
+    return cnt;
+}
+
+int ARTPSource::getSeqNumToNACK(List<int>& list, int size) {
+    AutoMutex _l(mMapLock);
+    int cnt = 0;
+
+    std::map<uint16_t, infoNACK>::iterator it;
+    for(it = mNACKMap.begin(); it != mNACKMap.end() && cnt < size; it++) {
+        infoNACK &info_it = it->second;
+        if (info_it.needToNACK) {
+            info_it.needToNACK = false;
+            // switch LSB to MSB for sending N/W
+            uint32_t FCI;
+            uint8_t *temp = (uint8_t *)&FCI;
+            temp[0] = (info_it.seqNum >> 8) & 0xff;
+            temp[1] = (info_it.seqNum)      & 0xff;
+            temp[2] = (info_it.mask >> 8)   & 0xff;
+            temp[3] = (info_it.mask)        & 0xff;
+
+            list.push_back(FCI);
+            cnt++;
+        }
+    }
+
+    return cnt;
+}
+
+void ARTPSource::setSeqNumToNACK(uint16_t seqNum, uint16_t mask, uint16_t nowJitterHeadSeqNum) {
+    AutoMutex _l(mMapLock);
+    infoNACK info = {seqNum, mask, nowJitterHeadSeqNum, true};
+    std::map<uint16_t, infoNACK>::iterator it;
+
+    it = mNACKMap.find(seqNum);
+    if (it != mNACKMap.end()) {
+        infoNACK &info_it = it->second;
+        // renew if (mask or head seq) is changed
+        if ((info_it.mask != mask) || (info_it.nowJitterHeadSeqNum != nowJitterHeadSeqNum)) {
+            info_it = info;
+        }
+    } else {
+        mNACKMap[seqNum] = info;
+    }
+
+    // delete all NACK far from current Jitter's first sequence number
+    it = mNACKMap.begin();
+    while (it != mNACKMap.end()) {
+        infoNACK &info_it = it->second;
+
+        int diff = nowJitterHeadSeqNum - info_it.nowJitterHeadSeqNum;
+        if (diff > 100) {
+            ALOGV("Delete %d pkt from NACK map ", info_it.seqNum);
+            it = mNACKMap.erase(it);
+        } else {
+            it++;
+        }
+    }
+
+}
+
+uint32_t ARTPSource::getSelfID() {
+    return kSourceID;
+}
+
+void ARTPSource::setSelfID(const uint32_t selfID) {
+    kSourceID = selfID;
+}
+
+void ARTPSource::setJbTime(const uint32_t jbTimeMs) {
+    mJbTimeMs = jbTimeMs;
+}
+
+void ARTPSource::setPeriodicFIR(bool enable) {
+    ALOGD("setPeriodicFIR %d", enable);
+    mIssueFIRRequests = enable;
+}
+
+void ARTPSource::notifyPktInfo(int32_t bitrate, int64_t /*time*/) {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("rtcp-event", 1);
+    notify->setInt32("payload-type", 102);
+    notify->setInt32("feedback-type", 0);
+    // sending target bitrate up to application to share rtp quality.
+    notify->setInt32("bit-rate", bitrate);
+    notify->setInt32("highest-seq-num", mHighestSeqNumber);
+    notify->setInt32("base-seq-num", mBaseSeqNumber);
+    notify->setInt32("prev-expected", mPrevExpected);
+    notify->setInt32("num-buf-recv", mNumBuffersReceived);
+    notify->setInt32("prev-num-buf-recv", mPrevNumBuffersReceived);
+    notify->post();
+
+    uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
+    mPrevExpected = expected;
+    mPrevNumBuffersReceived = mNumBuffersReceived;
+}
+
+void ARTPSource::onIssueFIRByAssembler() {
+    mIssueFIRByAssembler = true;
+}
+
+void ARTPSource::noticeAbandonBuffer(int cnt) {
+    mNumBuffersReceived -= cnt;
+}
 }  // namespace android
-
-
diff --git a/media/libstagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/ARTPSource.h
index f44e83f..ea683a0 100644
--- a/media/libstagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/ARTPSource.h
@@ -23,6 +23,9 @@
 #include <media/stagefright/foundation/ABase.h>
 #include <utils/List.h>
 #include <utils/RefBase.h>
+#include <utils/Thread.h>
+
+#include <map>
 
 namespace android {
 
@@ -45,22 +48,58 @@
 
     void addReceiverReport(const sp<ABuffer> &buffer);
     void addFIR(const sp<ABuffer> &buffer);
+    void addTMMBR(const sp<ABuffer> &buffer, int32_t targetBitrate);
+    int addNACK(const sp<ABuffer> &buffer);
+    void setSeqNumToNACK(uint16_t seqNum, uint16_t mask, uint16_t nowJitterHeadSeqNum);
+    uint32_t getSelfID();
+    void setSelfID(const uint32_t selfID);
+    void setJbTime(const uint32_t jbTimeMs);
+    void setPeriodicFIR(bool enable);
+    void notifyPktInfo(int32_t bitrate, int64_t time);
+    // FIR needs to be sent by missing packet or broken video image.
+    void onIssueFIRByAssembler();
+
+    void noticeAbandonBuffer(int cnt=1);
+
+    int32_t mFirstSeqNumber;
+    uint32_t mFirstRtpTime;
+    int64_t mFirstSysTime;
+    int32_t mClockRate;
+
+    uint32_t mJbTimeMs;
+    int32_t mFirstSsrc;
+    int32_t mHighestNackNumber;
 
 private:
+
     uint32_t mID;
     uint32_t mHighestSeqNumber;
     uint32_t mPrevExpected;
     uint32_t mBaseSeqNumber;
     int32_t mNumBuffersReceived;
     int32_t mPrevNumBuffersReceived;
+    uint32_t mPrevExpectedForRR;
+    int32_t mPrevNumBuffersReceivedForRR;
 
     List<sp<ABuffer> > mQueue;
     sp<ARTPAssembler> mAssembler;
 
+    typedef struct infoNACK {
+        uint16_t seqNum;
+        uint16_t mask;
+        uint16_t nowJitterHeadSeqNum;
+        bool    needToNACK;
+    } infoNACK;
+
+    Mutex mMapLock;
+    std::map<uint16_t, infoNACK> mNACKMap;
+    int getSeqNumToNACK(List<int>& list, int size);
+
     uint64_t mLastNTPTime;
     int64_t mLastNTPTimeUpdateUs;
 
     bool mIssueFIRRequests;
+    bool mIssueFIRByAssembler;
     int64_t mLastFIRRequestUs;
     uint8_t mNextFIRSeqNo;
 
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 58d6086..76afb04 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -35,10 +35,32 @@
 #define PT      97
 #define PT_STR  "97"
 
+#define H264_NALU_MASK 0x1F
+#define H264_NALU_SPS 0x7
+#define H264_NALU_PPS 0x8
+#define H264_NALU_IFRAME 0x5
+#define H264_NALU_PFRAME 0x1
+
+#define H265_NALU_MASK 0x3F
+#define H265_NALU_VPS 0x20
+#define H265_NALU_SPS 0x21
+#define H265_NALU_PPS 0x22
+
+#define LINK_HEADER_SIZE 14
+#define IP_HEADER_SIZE 20
+#define UDP_HEADER_SIZE 8
+#define TCPIP_HEADER_SIZE (LINK_HEADER_SIZE + IP_HEADER_SIZE + UDP_HEADER_SIZE)
+#define RTP_HEADER_SIZE 12
+#define RTP_HEADER_EXT_SIZE 8
+#define RTP_FU_HEADER_SIZE 2
+#define RTP_PAYLOAD_ROOM_SIZE 100 // ROOM size for IPv6 header, ESP and etc.
+
+
 namespace android {
 
 // static const size_t kMaxPacketSize = 65507;  // maximum payload in UDP over IP
-static const size_t kMaxPacketSize = 1500;
+static const size_t kMaxPacketSize = 1280;
+static char kCNAME[255] = "someone@somewhere";
 
 static int UniformRand(int limit) {
     return ((double)rand() * limit) / RAND_MAX;
@@ -48,15 +70,19 @@
     : mFlags(0),
       mFd(dup(fd)),
       mLooper(new ALooper),
-      mReflector(new AHandlerReflector<ARTPWriter>(this)) {
+      mReflector(new AHandlerReflector<ARTPWriter>(this)),
+      mTrafficRec(new TrafficRecorder<uint32_t, size_t>(128)) {
     CHECK_GE(fd, 0);
+    mIsIPv6 = false;
 
     mLooper->setName("rtp writer");
     mLooper->registerHandler(mReflector);
     mLooper->start();
 
-    mSocket = socket(AF_INET, SOCK_DGRAM, 0);
-    CHECK_GE(mSocket, 0);
+    mRTPSocket = socket(AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(mRTPSocket, 0);
+    mRTCPSocket = socket(AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(mRTCPSocket, 0);
 
     memset(mRTPAddr.sin_zero, 0, sizeof(mRTPAddr.sin_zero));
     mRTPAddr.sin_family = AF_INET;
@@ -72,6 +98,44 @@
 
     mRTCPAddr = mRTPAddr;
     mRTCPAddr.sin_port = htons(ntohs(mRTPAddr.sin_port) | 1);
+    mSPSBuf = NULL;
+    mPPSBuf = NULL;
+
+#if LOG_TO_FILES
+    mRTPFd = open(
+            "/data/misc/rtpout.bin",
+            O_WRONLY | O_CREAT | O_TRUNC,
+            0644);
+    CHECK_GE(mRTPFd, 0);
+
+    mRTCPFd = open(
+            "/data/misc/rtcpout.bin",
+            O_WRONLY | O_CREAT | O_TRUNC,
+            0644);
+    CHECK_GE(mRTCPFd, 0);
+#endif
+}
+
+ARTPWriter::ARTPWriter(int fd, String8& localIp, int localPort, String8& remoteIp,
+    int remotePort, uint32_t seqNo)
+    : mFlags(0),
+      mFd(dup(fd)),
+      mLooper(new ALooper),
+      mReflector(new AHandlerReflector<ARTPWriter>(this)),
+      mTrafficRec(new TrafficRecorder<uint32_t, size_t>(128)) {
+    CHECK_GE(fd, 0);
+    mIsIPv6 = false;
+
+    mLooper->setName("rtp writer");
+    mLooper->registerHandler(mReflector);
+    mLooper->start();
+
+    makeSocketPairAndBind(localIp, localPort, remoteIp , remotePort);
+    mVPSBuf = NULL;
+    mSPSBuf = NULL;
+    mPPSBuf = NULL;
+
+    mSeqNo = seqNo;
 
 #if LOG_TO_FILES
     mRTPFd = open(
@@ -89,6 +153,21 @@
 }
 
 ARTPWriter::~ARTPWriter() {
+    if (mVPSBuf != NULL) {
+        mVPSBuf->release();
+        mVPSBuf = NULL;
+    }
+
+    if (mSPSBuf != NULL) {
+        mSPSBuf->release();
+        mSPSBuf = NULL;
+    }
+
+    if (mPPSBuf != NULL) {
+        mPPSBuf->release();
+        mPPSBuf = NULL;
+    }
+
 #if LOG_TO_FILES
     close(mRTCPFd);
     mRTCPFd = -1;
@@ -97,8 +176,11 @@
     mRTPFd = -1;
 #endif
 
-    close(mSocket);
-    mSocket = -1;
+    close(mRTPSocket);
+    mRTPSocket = -1;
+
+    close(mRTCPSocket);
+    mRTCPSocket = -1;
 
     close(mFd);
     mFd = -1;
@@ -114,28 +196,61 @@
     return (mFlags & kFlagEOS) != 0;
 }
 
-status_t ARTPWriter::start(MetaData * /* params */) {
+status_t ARTPWriter::start(MetaData * params) {
     Mutex::Autolock autoLock(mLock);
     if (mFlags & kFlagStarted) {
         return INVALID_OPERATION;
     }
 
     mFlags &= ~kFlagEOS;
-    mSourceID = rand();
-    mSeqNo = UniformRand(65536);
-    mRTPTimeBase = rand();
+    if (mSourceID == 0)
+        mSourceID = rand();
+    if (mSeqNo == 0)
+        mSeqNo = UniformRand(65536);
+    mRTPTimeBase = 0;
     mNumRTPSent = 0;
     mNumRTPOctetsSent = 0;
     mLastRTPTime = 0;
     mLastNTPTime = 0;
+    mOpponentID = 0;
+    mBitrate = 192000;
     mNumSRsSent = 0;
+    mRTPCVOExtMap = -1;
+    mRTPCVODegrees = 0;
+    mRTPSockNetwork = 0;
 
     const char *mime;
     CHECK(mSource->getFormat()->findCString(kKeyMIMEType, &mime));
 
+    int32_t selfID = 0;
+    if (params->findInt32(kKeySelfID, &selfID))
+        mSourceID = selfID;
+
+    int32_t payloadType = 0;
+    if (params->findInt32(kKeyPayloadType, &payloadType))
+        mPayloadType = payloadType;
+
+    int32_t rtpExtMap = 0;
+    if (params->findInt32(kKeyRtpExtMap, &rtpExtMap))
+        mRTPCVOExtMap = rtpExtMap;
+
+    int32_t rtpCVODegrees = 0;
+    if (params->findInt32(kKeyRtpCvoDegrees, &rtpCVODegrees))
+        mRTPCVODegrees = rtpCVODegrees;
+
+    int32_t dscp = 0;
+    if (params->findInt32(kKeyRtpDscp, &dscp))
+        updateSocketDscp(dscp);
+
+    int64_t sockNetwork = 0;
+    if (params->findInt64(kKeySocketNetwork, &sockNetwork))
+        updateSocketNetwork(sockNetwork);
+
     mMode = INVALID;
     if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
         mMode = H264;
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+        mMode = H265;
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_H263)) {
         mMode = H263;
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
@@ -187,11 +302,137 @@
     }
 }
 
+static const uint8_t SPCSize = 4;      // Start Prefix Code Size
+static const uint8_t startPrefixCode[SPCSize] = {0, 0, 0, 1};
+static const uint8_t spcKMPidx[SPCSize] = {0, 0, 2, 0};
+static void SpsPpsParser(MediaBufferBase *buffer,
+        MediaBufferBase **spsBuffer, MediaBufferBase **ppsBuffer) {
+
+    while (buffer->range_length() > 0) {
+        const uint8_t *NALPtr = (const uint8_t *)buffer->data() + buffer->range_offset();
+
+        MediaBufferBase **targetPtr = NULL;
+        if ((*NALPtr & H264_NALU_MASK) == H264_NALU_SPS) {
+            targetPtr = spsBuffer;
+        } else if ((*NALPtr & H264_NALU_MASK) == H264_NALU_PPS) {
+            targetPtr = ppsBuffer;
+        } else {
+            return;
+        }
+        ALOGV("SPS(7) or PPS(8) found. Type %d", *NALPtr & H264_NALU_MASK);
+
+        uint32_t bufferSize = buffer->range_length();
+        MediaBufferBase *&target = *targetPtr;
+        uint32_t i = 0, j = 0;
+        bool isBoundFound = false;
+        for (i = 0; i < bufferSize; i++) {
+            while (j > 0 && NALPtr[i] != startPrefixCode[j]) {
+                j = spcKMPidx[j - 1];
+            }
+            if (NALPtr[i] == startPrefixCode[j]) {
+                j++;
+                if (j == SPCSize) {
+                    isBoundFound = true;
+                    break;
+                }
+            }
+        }
+
+        uint32_t targetSize;
+        if (target != NULL) {
+            target->release();
+        }
+        // note that targetSize is never 0 as the first byte is never part
+        // of a start prefix
+        if (isBoundFound) {
+            targetSize = i - SPCSize + 1;
+            target = MediaBufferBase::Create(targetSize);
+            memcpy(target->data(),
+                   (const uint8_t *)buffer->data() + buffer->range_offset(),
+                   targetSize);
+            buffer->set_range(buffer->range_offset() + targetSize + SPCSize,
+                              buffer->range_length() - targetSize - SPCSize);
+        } else {
+            targetSize = bufferSize;
+            target = MediaBufferBase::Create(targetSize);
+            memcpy(target->data(),
+                   (const uint8_t *)buffer->data() + buffer->range_offset(),
+                   targetSize);
+            buffer->set_range(buffer->range_offset() + bufferSize, 0);
+            return;
+        }
+    }
+}
+
+static void VpsSpsPpsParser(MediaBufferBase *buffer,
+        MediaBufferBase **vpsBuffer, MediaBufferBase **spsBuffer, MediaBufferBase **ppsBuffer) {
+
+    while (buffer->range_length() > 0) {
+        const uint8_t *NALPtr = (const uint8_t *)buffer->data() + buffer->range_offset();
+        uint8_t nalType = ((*NALPtr) >> 1) & H265_NALU_MASK;
+
+        MediaBufferBase **targetPtr = NULL;
+        if (nalType == H265_NALU_VPS) {
+            targetPtr = vpsBuffer;
+        } else if (nalType == H265_NALU_SPS) {
+            targetPtr = spsBuffer;
+        } else if (nalType == H265_NALU_PPS) {
+            targetPtr = ppsBuffer;
+        } else {
+            return;
+        }
+        ALOGV("VPS(32) SPS(33) or PPS(34) found. Type %d", nalType);
+
+        uint32_t bufferSize = buffer->range_length();
+        MediaBufferBase *&target = *targetPtr;
+        uint32_t i = 0, j = 0;
+        bool isBoundFound = false;
+        for (i = 0; i < bufferSize; i++) {
+            while (j > 0 && NALPtr[i] != startPrefixCode[j]) {
+                j = spcKMPidx[j - 1];
+            }
+            if (NALPtr[i] == startPrefixCode[j]) {
+                j++;
+                if (j == SPCSize) {
+                    isBoundFound = true;
+                    break;
+                }
+            }
+        }
+
+        if (target != NULL) {
+            target->release();
+        }
+        uint32_t targetSize;
+        // note that targetSize is never 0 as the first byte is never part
+        // of a start prefix
+        if (isBoundFound) {
+            targetSize = i - SPCSize + 1;
+            target = MediaBufferBase::Create(j);
+            memcpy(target->data(),
+                   (const uint8_t *)buffer->data() + buffer->range_offset(),
+                   j);
+            buffer->set_range(buffer->range_offset() + targetSize + SPCSize,
+                              buffer->range_length() - targetSize - SPCSize);
+        } else {
+            targetSize = bufferSize;
+            target = MediaBufferBase::Create(targetSize);
+            memcpy(target->data(),
+                   (const uint8_t *)buffer->data() + buffer->range_offset(),
+                   targetSize);
+            buffer->set_range(buffer->range_offset() + bufferSize, 0);
+            return;
+        }
+    }
+}
+
 void ARTPWriter::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
         case kWhatStart:
         {
-            CHECK_EQ(mSource->start(), (status_t)OK);
+            sp<MetaData> meta = new MetaData();
+            meta->setInt64(kKeyTime, 10ll);
+            CHECK_EQ(mSource->start(meta.get()), (status_t)OK);
 
 #if 0
             if (mMode == H264) {
@@ -264,6 +505,18 @@
     }
 }
 
+void ARTPWriter::setTMMBNInfo(uint32_t opponentID, uint32_t bitrate) {
+    mOpponentID = opponentID;
+    mBitrate = bitrate;
+
+    sp<ABuffer> buffer = new ABuffer(65536);
+    buffer->setRange(0, 0);
+
+    addTMMBN(buffer);
+
+    send(buffer, true /* isRTCP */);
+}
+
 void ARTPWriter::onRead(const sp<AMessage> &msg) {
     MediaBufferBase *mediaBuf;
     status_t err = mSource->read(&mediaBuf);
@@ -281,7 +534,16 @@
 
         if (mMode == H264) {
             StripStartcode(mediaBuf);
-            sendAVCData(mediaBuf);
+            SpsPpsParser(mediaBuf, &mSPSBuf, &mPPSBuf);
+            if (mediaBuf->range_length() > 0) {
+                sendAVCData(mediaBuf);
+            }
+        } else if (mMode == H265) {
+            StripStartcode(mediaBuf);
+            VpsSpsPpsParser(mediaBuf, &mVPSBuf, &mSPSBuf, &mPPSBuf);
+            if (mediaBuf->range_length() > 0) {
+                sendHEVCData(mediaBuf);
+            }
         } else if (mMode == H263) {
             sendH263Data(mediaBuf);
         } else if (mMode == AMR_NB || mMode == AMR_WB) {
@@ -309,12 +571,38 @@
 }
 
 void ARTPWriter::send(const sp<ABuffer> &buffer, bool isRTCP) {
-    ssize_t n = sendto(
-            mSocket, buffer->data(), buffer->size(), 0,
-            (const struct sockaddr *)(isRTCP ? &mRTCPAddr : &mRTPAddr),
-            sizeof(mRTCPAddr));
+    int sizeSockSt;
+    struct sockaddr *remAddr;
 
-    CHECK_EQ(n, (ssize_t)buffer->size());
+    if (mIsIPv6) {
+        sizeSockSt = sizeof(struct sockaddr_in6);
+        if (isRTCP)
+            remAddr = (struct sockaddr *)&mRTCPAddr6;
+        else
+            remAddr = (struct sockaddr *)&mRTPAddr6;
+    } else {
+        sizeSockSt = sizeof(struct sockaddr_in);
+        if (isRTCP)
+            remAddr = (struct sockaddr *)&mRTCPAddr;
+        else
+            remAddr = (struct sockaddr *)&mRTPAddr;
+    }
+
+    // Unseal code if moderator is needed (prevent overflow of instant bandwidth)
+    // Set limit bits per period through the moderator.
+    // ex) 6KByte/10ms = 48KBit/10ms = 4.8MBit/s instant limit
+    // ModerateInstantTraffic(10, 6 * 1024);
+
+    ssize_t n = sendto(isRTCP ? mRTCPSocket : mRTPSocket,
+            buffer->data(), buffer->size(), 0, remAddr, sizeSockSt);
+
+    if (n != (ssize_t)buffer->size()) {
+        ALOGW("packets can not be sent. ret=%d, buf=%d", (int)n, (int)buffer->size());
+    } else {
+        // Record current traffic & Print bits while last 1sec (1000ms)
+        mTrafficRec->writeBytes(buffer->size());
+        mTrafficRec->printAccuBitsForLastPeriod(1000, 1000);
+    }
 
 #if LOG_TO_FILES
     int fd = isRTCP ? mRTCPFd : mRTPFd;
@@ -379,7 +667,6 @@
 
     data[offset++] = 1;  // CNAME
 
-    static const char *kCNAME = "someone@somewhere";
     data[offset++] = strlen(kCNAME);
 
     memcpy(&data[offset], kCNAME, strlen(kCNAME));
@@ -416,9 +703,52 @@
     buffer->setRange(buffer->offset(), buffer->size() + offset);
 }
 
+void ARTPWriter::addTMMBN(const sp<ABuffer> &buffer) {
+    if (buffer->size() + 20 > buffer->capacity()) {
+        ALOGW("RTCP buffer too small to accommodate SR.");
+        return;
+    }
+    if (mOpponentID == 0)
+        return;
+
+    uint8_t *data = buffer->data() + buffer->size();
+
+    data[0] = 0x80 | 4; // TMMBN
+    data[1] = 205;      // TSFB
+    data[2] = 0;
+    data[3] = 4;        // total (4+1) * sizeof(int32_t) = 20 bytes
+    data[4] = mSourceID >> 24;
+    data[5] = (mSourceID >> 16) & 0xff;
+    data[6] = (mSourceID >> 8) & 0xff;
+    data[7] = mSourceID & 0xff;
+
+    *(int32_t*)(&data[8]) = 0;  // 4 bytes blank
+
+    data[12] = mOpponentID >> 24;
+    data[13] = (mOpponentID >> 16) & 0xff;
+    data[14] = (mOpponentID >> 8) & 0xff;
+    data[15] = mOpponentID & 0xff;
+
+    int32_t exp, mantissa;
+
+    // Round off to the nearest 2^4th
+    ALOGI("UE -> Op Noti Tx bitrate : %d ", mBitrate & 0xfffffff0);
+    for (exp=4 ; exp < 32 ; exp++)
+        if (((mBitrate >> exp) & 0x01) != 0)
+            break;
+    mantissa = mBitrate >> exp;
+
+    data[16] = ((exp << 2) & 0xfc) | ((mantissa & 0x18000) >> 15);
+    data[17] =                        (mantissa & 0x07f80) >> 7;
+    data[18] =                        (mantissa & 0x0007f) << 1;
+    data[19] = 40;              // 40 bytes overhead;
+
+    buffer->setRange(buffer->offset(), buffer->size() + 20);
+}
+
 // static
 uint64_t ARTPWriter::GetNowNTP() {
-    uint64_t nowUs = ALooper::GetNowUs();
+    uint64_t nowUs = systemTime(SYSTEM_TIME_REALTIME) / 1000ll;
 
     nowUs += ((70LL * 365 + 17) * 24) * 60 * 60 * 1000000LL;
 
@@ -463,7 +793,7 @@
         sdp.append("m=audio ");
     }
 
-    sdp.append(AStringPrintf("%d", ntohs(mRTPAddr.sin_port)));
+    sdp.append(AStringPrintf("%d", mIsIPv6 ? ntohs(mRTPAddr6.sin6_port) : ntohs(mRTPAddr.sin_port)));
     sdp.append(
           " RTP/AVP " PT_STR "\r\n"
           "b=AS 320000\r\n"
@@ -569,24 +899,91 @@
     send(buffer, true /* isRTCP */);
 }
 
-void ARTPWriter::sendAVCData(MediaBufferBase *mediaBuf) {
+void ARTPWriter::sendSPSPPSIfIFrame(MediaBufferBase *mediaBuf, int64_t timeUs) {
+    CHECK(mediaBuf->range_length() > 0);
+    const uint8_t *mediaData =
+        (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
+
+    if ((mediaData[0] & H264_NALU_MASK) != H264_NALU_IFRAME) {
+        return;
+    }
+
+    if (mSPSBuf != NULL) {
+        mSPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+        mSPSBuf->meta_data().setInt32(kKeySps, 1);
+        sendAVCData(mSPSBuf);
+    }
+
+    if (mPPSBuf != NULL) {
+        mPPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+        mPPSBuf->meta_data().setInt32(kKeyPps, 1);
+        sendAVCData(mPPSBuf);
+    }
+}
+
+void ARTPWriter::sendVPSSPSPPSIfIFrame(MediaBufferBase *mediaBuf, int64_t timeUs) {
+    CHECK(mediaBuf->range_length() > 0);
+    const uint8_t *mediaData =
+        (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
+
+    int nalType = ((mediaData[0] >> 1) & H265_NALU_MASK);
+    if (!(nalType >= 16 && nalType <= 21) /*H265_NALU_IFRAME*/) {
+        return;
+    }
+
+    if (mVPSBuf != NULL) {
+        mVPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+        mVPSBuf->meta_data().setInt32(kKeyVps, 1);
+        sendHEVCData(mVPSBuf);
+    }
+
+    if (mSPSBuf != NULL) {
+        mSPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+        mSPSBuf->meta_data().setInt32(kKeySps, 1);
+        sendHEVCData(mSPSBuf);
+    }
+
+    if (mPPSBuf != NULL) {
+        mPPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+        mPPSBuf->meta_data().setInt32(kKeyPps, 1);
+        sendHEVCData(mPPSBuf);
+    }
+}
+
+void ARTPWriter::sendHEVCData(MediaBufferBase *mediaBuf) {
     // 12 bytes RTP header + 2 bytes for the FU-indicator and FU-header.
     CHECK_GE(kMaxPacketSize, 12u + 2u);
 
     int64_t timeUs;
     CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
 
-    uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100LL);
+    sendVPSSPSPPSIfIFrame(mediaBuf, timeUs);
 
+    uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100ll);
+
+    CHECK(mediaBuf->range_length() > 0);
     const uint8_t *mediaData =
         (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
 
+    int32_t isNonVCL = 0;
+    if (mediaBuf->meta_data().findInt32(kKeyVps, &isNonVCL) ||
+            mediaBuf->meta_data().findInt32(kKeySps, &isNonVCL) ||
+            mediaBuf->meta_data().findInt32(kKeyPps, &isNonVCL)) {
+        isNonVCL = 1;
+    }
+
     sp<ABuffer> buffer = new ABuffer(kMaxPacketSize);
-    if (mediaBuf->range_length() + 12 <= buffer->capacity()) {
+
+    if (mediaBuf->range_length() + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE
+            + RTP_PAYLOAD_ROOM_SIZE <= buffer->capacity()) {
         // The data fits into a single packet
         uint8_t *data = buffer->data();
         data[0] = 0x80;
-        data[1] = (1 << 7) | PT;  // M-bit
+        if (isNonVCL) {
+            data[1] = mPayloadType;  // Marker bit should not be set in case of Non-VCL
+        } else {
+            data[1] = (1 << 7) | mPayloadType;  // M-bit
+        }
         data[2] = (mSeqNo >> 8) & 0xff;
         data[3] = mSeqNo & 0xff;
         data[4] = rtpTime >> 24;
@@ -611,21 +1008,24 @@
     } else {
         // FU-A
 
-        unsigned nalType = mediaData[0];
-        size_t offset = 1;
+        unsigned nalType = (mediaData[0] >> 1) & H265_NALU_MASK;
+        ALOGV("H265 nalType 0x%x, data[0]=0x%x", nalType, mediaData[0]);
+        size_t offset = 2; //H265 payload header is 16 bit.
 
         bool firstPacket = true;
         while (offset < mediaBuf->range_length()) {
             size_t size = mediaBuf->range_length() - offset;
             bool lastPacket = true;
-            if (size + 12 + 2 > buffer->capacity()) {
+            if (size + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE +
+                    RTP_FU_HEADER_SIZE + RTP_PAYLOAD_ROOM_SIZE > buffer->capacity()) {
                 lastPacket = false;
-                size = buffer->capacity() - 12 - 2;
+                size = buffer->capacity() - TCPIP_HEADER_SIZE - RTP_HEADER_SIZE -
+                    RTP_HEADER_EXT_SIZE - RTP_FU_HEADER_SIZE - RTP_PAYLOAD_ROOM_SIZE;
             }
 
             uint8_t *data = buffer->data();
             data[0] = 0x80;
-            data[1] = (lastPacket ? (1 << 7) : 0x00) | PT;  // M-bit
+            data[1] = (lastPacket ? (1 << 7) : 0x00) | mPayloadType;  // M-bit
             data[2] = (mSeqNo >> 8) & 0xff;
             data[3] = mSeqNo & 0xff;
             data[4] = rtpTime >> 24;
@@ -637,18 +1037,39 @@
             data[10] = (mSourceID >> 8) & 0xff;
             data[11] = mSourceID & 0xff;
 
-            data[12] = 28 | (nalType & 0xe0);
+            /*  H265 payload header is 16 bit
+                 0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7
+                +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+                |F|     Type  |  Layer ID | TID |
+                +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+            */
+            ALOGV("H265 payload header 0x%x %x", mediaData[0], mediaData[1]);
+            // excludes Type from 1st byte of H265 payload header.
+            data[12] = mediaData[0] & 0x81;
+            // fills Type as FU (49 == 0x31)
+            data[12] = data[12] | (0x31 << 1);
+            data[13] = mediaData[1];
+
+            ALOGV("H265 FU header 0x%x %x", data[12], data[13]);
 
             CHECK(!firstPacket || !lastPacket);
+            /*
+                FU INDICATOR HDR
+                0 1 2 3 4 5 6 7
+                +-+-+-+-+-+-+-+
+                |S|E|   Type  |
+                +-+-+-+-+-+-+-+
+            */
 
-            data[13] =
+            data[14] =
                 (firstPacket ? 0x80 : 0x00)
                 | (lastPacket ? 0x40 : 0x00)
-                | (nalType & 0x1f);
+                | (nalType & H265_NALU_MASK);
+            ALOGV("H265 FU indicator 0x%x", data[14]);
 
-            memcpy(&data[14], &mediaData[offset], size);
+            memcpy(&data[15], &mediaData[offset], size);
 
-            buffer->setRange(0, 14 + size);
+            buffer->setRange(0, 15 + size);
 
             send(buffer, false /* isRTCP */);
 
@@ -663,6 +1084,172 @@
 
     mLastRTPTime = rtpTime;
     mLastNTPTime = GetNowNTP();
+
+}
+
+void ARTPWriter::sendAVCData(MediaBufferBase *mediaBuf) {
+    // 12 bytes RTP header + 2 bytes for the FU-indicator and FU-header.
+    CHECK_GE(kMaxPacketSize, 12u + 2u);
+
+    int64_t timeUs;
+    CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
+
+    sendSPSPPSIfIFrame(mediaBuf, timeUs);
+
+    uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100LL);
+
+    CHECK(mediaBuf->range_length() > 0);
+    const uint8_t *mediaData =
+        (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
+
+    int32_t sps, pps;
+    bool isSpsPps = false;
+    if (mediaBuf->meta_data().findInt32(kKeySps, &sps) ||
+            mediaBuf->meta_data().findInt32(kKeyPps, &pps)) {
+        isSpsPps = true;
+    }
+
+    mTrafficRec->updateClock(ALooper::GetNowUs() / 1000);
+    sp<ABuffer> buffer = new ABuffer(kMaxPacketSize);
+    if (mediaBuf->range_length() + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE
+            + RTP_PAYLOAD_ROOM_SIZE <= buffer->capacity()) {
+        // The data fits into a single packet
+        uint8_t *data = buffer->data();
+        data[0] = 0x80;
+        if (mRTPCVOExtMap > 0)
+            data[0] |= 0x10;
+        if (isSpsPps)
+            data[1] = mPayloadType;  // Marker bit should not be set in case of sps/pps
+        else
+            data[1] = (1 << 7) | mPayloadType;
+        data[2] = (mSeqNo >> 8) & 0xff;
+        data[3] = mSeqNo & 0xff;
+        data[4] = rtpTime >> 24;
+        data[5] = (rtpTime >> 16) & 0xff;
+        data[6] = (rtpTime >> 8) & 0xff;
+        data[7] = rtpTime & 0xff;
+        data[8] = mSourceID >> 24;
+        data[9] = (mSourceID >> 16) & 0xff;
+        data[10] = (mSourceID >> 8) & 0xff;
+        data[11] = mSourceID & 0xff;
+
+        int rtpExtIndex = 0;
+        if (mRTPCVOExtMap > 0) {
+            /*
+                0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+               |       0xBE    |    0xDE       |           length=3            |
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+               |  ID   | L=0   |     data      |  ID   |  L=1  |   data...
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+                     ...data   |    0 (pad)    |    0 (pad)    |  ID   | L=3   |
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+               |                          data                                 |
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+
+              In the one-byte header form of extensions, the 16-bit value required
+              by the RTP specification for a header extension, labeled in the RTP
+              specification as "defined by profile", takes the fixed bit pattern
+              0xBEDE (the first version of this specification was written on the
+              feast day of the Venerable Bede).
+            */
+            data[12] = 0xBE;
+            data[13] = 0xDE;
+            // put a length of RTP Extension.
+            data[14] = 0x00;
+            data[15] = 0x01;
+            // put extmap of RTP assigned for CVO.
+            data[16] = (mRTPCVOExtMap << 4) | 0x0;
+            // put image degrees as per CVO specification.
+            data[17] = mRTPCVODegrees;
+            data[18] = 0x0;
+            data[19] = 0x0;
+            rtpExtIndex = 8;
+        }
+
+        memcpy(&data[12 + rtpExtIndex],
+               mediaData, mediaBuf->range_length());
+
+        buffer->setRange(0, mediaBuf->range_length() + (12 + rtpExtIndex));
+
+        send(buffer, false /* isRTCP */);
+
+        ++mSeqNo;
+        ++mNumRTPSent;
+        mNumRTPOctetsSent += buffer->size() - (12 + rtpExtIndex);
+    } else {
+        // FU-A
+
+        unsigned nalType = mediaData[0];
+        size_t offset = 1;
+
+        bool firstPacket = true;
+        while (offset < mediaBuf->range_length()) {
+            size_t size = mediaBuf->range_length() - offset;
+            bool lastPacket = true;
+            if (size + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE +
+                    RTP_FU_HEADER_SIZE + RTP_PAYLOAD_ROOM_SIZE > buffer->capacity()) {
+                lastPacket = false;
+                size = buffer->capacity() - TCPIP_HEADER_SIZE - RTP_HEADER_SIZE -
+                    RTP_HEADER_EXT_SIZE - RTP_FU_HEADER_SIZE - RTP_PAYLOAD_ROOM_SIZE;
+            }
+
+            uint8_t *data = buffer->data();
+            data[0] = 0x80;
+            if (lastPacket && mRTPCVOExtMap > 0)
+                data[0] |= 0x10;
+            data[1] = (lastPacket ? (1 << 7) : 0x00) | mPayloadType;  // M-bit
+            data[2] = (mSeqNo >> 8) & 0xff;
+            data[3] = mSeqNo & 0xff;
+            data[4] = rtpTime >> 24;
+            data[5] = (rtpTime >> 16) & 0xff;
+            data[6] = (rtpTime >> 8) & 0xff;
+            data[7] = rtpTime & 0xff;
+            data[8] = mSourceID >> 24;
+            data[9] = (mSourceID >> 16) & 0xff;
+            data[10] = (mSourceID >> 8) & 0xff;
+            data[11] = mSourceID & 0xff;
+
+            int rtpExtIndex = 0;
+            if (lastPacket && mRTPCVOExtMap > 0) {
+                data[12] = 0xBE;
+                data[13] = 0xDE;
+                data[14] = 0x00;
+                data[15] = 0x01;
+                data[16] = (mRTPCVOExtMap << 4) | 0x0;
+                data[17] = mRTPCVODegrees;
+                data[18] = 0x0;
+                data[19] = 0x0;
+                rtpExtIndex = 8;
+            }
+
+            data[12 + rtpExtIndex] = 28 | (nalType & 0xe0);
+
+            CHECK(!firstPacket || !lastPacket);
+
+            data[13 + rtpExtIndex] =
+                (firstPacket ? 0x80 : 0x00)
+                | (lastPacket ? 0x40 : 0x00)
+                | (nalType & 0x1f);
+
+            memcpy(&data[14 + rtpExtIndex], &mediaData[offset], size);
+
+            buffer->setRange(0, 14 + rtpExtIndex + size);
+
+            send(buffer, false /* isRTCP */);
+
+            ++mSeqNo;
+            ++mNumRTPSent;
+            mNumRTPOctetsSent += buffer->size() - (12 + rtpExtIndex);
+
+            firstPacket = false;
+            offset += size;
+        }
+    }
+
+    mLastRTPTime = rtpTime;
+    mLastNTPTime = GetNowNTP();
 }
 
 void ARTPWriter::sendH263Data(MediaBufferBase *mediaBuf) {
@@ -696,7 +1283,7 @@
 
         uint8_t *data = buffer->data();
         data[0] = 0x80;
-        data[1] = (lastPacket ? 0x80 : 0x00) | PT;  // M-bit
+        data[1] = (lastPacket ? 0x80 : 0x00) | mPayloadType;  // M-bit
         data[2] = (mSeqNo >> 8) & 0xff;
         data[3] = mSeqNo & 0xff;
         data[4] = rtpTime >> 24;
@@ -727,6 +1314,54 @@
     mLastNTPTime = GetNowNTP();
 }
 
+void ARTPWriter::updateCVODegrees(int32_t cvoDegrees) {
+    Mutex::Autolock autoLock(mLock);
+    mRTPCVODegrees = cvoDegrees;
+}
+
+void ARTPWriter::updatePayloadType(int32_t payloadType) {
+    Mutex::Autolock autoLock(mLock);
+    mPayloadType = payloadType;
+}
+
+void ARTPWriter::updateSocketDscp(int32_t dscp) {
+    mRtpLayer3Dscp = dscp << 2;
+
+    /* mRtpLayer3Dscp will be mapped to WMM(Wifi) as per operator's requirement */
+    if (setsockopt(mRTPSocket, IPPROTO_IP, IP_TOS,
+                (int *)&mRtpLayer3Dscp, sizeof(mRtpLayer3Dscp)) < 0) {
+        ALOGE("failed to set dscp on rtpsock. err=%s", strerror(errno));
+    } else {
+        ALOGD("successfully set dscp on rtpsock. opt=%d", mRtpLayer3Dscp);
+        setsockopt(mRTCPSocket, IPPROTO_IP, IP_TOS,
+                (int *)&mRtpLayer3Dscp, sizeof(mRtpLayer3Dscp));
+        ALOGD("successfully set dscp on rtcpsock. opt=%d", mRtpLayer3Dscp);
+    }
+}
+
+void ARTPWriter::updateSocketNetwork(int64_t socketNetwork) {
+    mRTPSockNetwork = (net_handle_t)socketNetwork;
+    ALOGI("trying to bind rtp socket(%d) to network(%llu).",
+                mRTPSocket, (unsigned long long)mRTPSockNetwork);
+
+    int result = android_setsocknetwork(mRTPSockNetwork, mRTPSocket);
+    if (result != 0) {
+        ALOGW("failed(%d) to bind rtp socket(%d) to network(%llu)",
+                result, mRTPSocket, (unsigned long long)mRTPSockNetwork);
+    }
+    result = android_setsocknetwork(mRTPSockNetwork, mRTCPSocket);
+    if (result != 0) {
+        ALOGW("failed(%d) to bind rtcp socket(%d) to network(%llu)",
+                result, mRTCPSocket, (unsigned long long)mRTPSockNetwork);
+    }
+    ALOGI("done. bind rtp socket(%d) to network(%llu)",
+                mRTPSocket, (unsigned long long)mRTPSockNetwork);
+}
+
+uint32_t ARTPWriter::getSequenceNum() {
+    return mSeqNo;
+}
+
 static size_t getFrameSize(bool isWide, unsigned FT) {
     static const size_t kFrameSizeNB[8] = {
         95, 103, 118, 134, 148, 159, 204, 244
@@ -778,7 +1413,7 @@
     // The data fits into a single packet
     uint8_t *data = buffer->data();
     data[0] = 0x80;
-    data[1] = PT;
+    data[1] = mPayloadType;
     if (mNumRTPSent == 0) {
         // Signal start of talk-spurt.
         data[1] |= 0x80;  // M-bit
@@ -834,5 +1469,91 @@
     mLastNTPTime = GetNowNTP();
 }
 
-}  // namespace android
+void ARTPWriter::makeSocketPairAndBind(String8& localIp, int localPort,
+        String8& remoteIp, int remotePort) {
+    static char kSomeone[16] = "someone@";
+    int nameLength = strlen(kSomeone);
+    memcpy(kCNAME, kSomeone, nameLength);
+    memcpy(kCNAME + nameLength, localIp.c_str(), localIp.length() + 1);
 
+    if (localIp.contains(":"))
+        mIsIPv6 = true;
+    else
+        mIsIPv6 = false;
+
+    mRTPSocket = socket(mIsIPv6 ? AF_INET6 : AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(mRTPSocket, 0);
+    mRTCPSocket = socket(mIsIPv6 ? AF_INET6 : AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(mRTCPSocket, 0);
+
+    int sockopt = 1;
+    setsockopt(mRTPSocket, SOL_SOCKET, SO_REUSEADDR, (int *)&sockopt, sizeof(sockopt));
+    setsockopt(mRTCPSocket, SOL_SOCKET, SO_REUSEADDR, (int *)&sockopt, sizeof(sockopt));
+
+    if (mIsIPv6) {
+        memset(&mLocalAddr6, 0, sizeof(mLocalAddr6));
+        memset(&mRTPAddr6, 0, sizeof(mRTPAddr6));
+        memset(&mRTCPAddr6, 0, sizeof(mRTCPAddr6));
+
+        mLocalAddr6.sin6_family = AF_INET6;
+        inet_pton(AF_INET6, localIp.string(), &mLocalAddr6.sin6_addr);
+        mLocalAddr6.sin6_port = htons((uint16_t)localPort);
+
+        mRTPAddr6.sin6_family = AF_INET6;
+        inet_pton(AF_INET6, remoteIp.string(), &mRTPAddr6.sin6_addr);
+        mRTPAddr6.sin6_port = htons((uint16_t)remotePort);
+
+        mRTCPAddr6 = mRTPAddr6;
+        mRTCPAddr6.sin6_port = htons((uint16_t)(remotePort + 1));
+    } else {
+        memset(&mLocalAddr, 0, sizeof(mLocalAddr));
+        memset(&mRTPAddr, 0, sizeof(mRTPAddr));
+        memset(&mRTCPAddr, 0, sizeof(mRTCPAddr));
+
+        mLocalAddr.sin_family = AF_INET;
+        mLocalAddr.sin_addr.s_addr = inet_addr(localIp.string());
+        mLocalAddr.sin_port = htons((uint16_t)localPort);
+
+        mRTPAddr.sin_family = AF_INET;
+        mRTPAddr.sin_addr.s_addr = inet_addr(remoteIp.string());
+        mRTPAddr.sin_port = htons((uint16_t)remotePort);
+
+        mRTCPAddr = mRTPAddr;
+        mRTCPAddr.sin_port = htons((uint16_t)(remotePort + 1));
+    }
+
+    struct sockaddr *localAddr = mIsIPv6 ?
+        (struct sockaddr*)&mLocalAddr6 : (struct sockaddr*)&mLocalAddr;
+
+    int sizeSockSt = mIsIPv6 ? sizeof(mLocalAddr6) : sizeof(mLocalAddr);
+
+    if (bind(mRTPSocket, localAddr, sizeSockSt) == -1) {
+        ALOGE("failed to bind rtp %s:%d err=%s", localIp.string(), localPort, strerror(errno));
+    } else {
+        ALOGD("succeed to bind rtp %s:%d", localIp.string(), localPort);
+    }
+
+    if (mIsIPv6)
+        mLocalAddr6.sin6_port = htons((uint16_t)(localPort + 1));
+    else
+        mLocalAddr.sin_port = htons((uint16_t)(localPort + 1));
+
+    if (bind(mRTCPSocket, localAddr, sizeSockSt) == -1) {
+        ALOGE("failed to bind rtcp %s:%d err=%s", localIp.string(), localPort + 1, strerror(errno));
+    } else {
+        ALOGD("succeed to bind rtcp %s:%d", localIp.string(), localPort + 1);
+    }
+}
+
+// TODO : Develop more advanced moderator based on AS & TMMBR value
+void ARTPWriter::ModerateInstantTraffic(uint32_t samplePeriod, uint32_t limitBytes) {
+    unsigned int bytes =  mTrafficRec->readBytesForLastPeriod(samplePeriod);
+    if (bytes > limitBytes) {
+        ALOGI("Nuclear moderator. #seq = %d \t\t %d bits / 10ms",
+              mSeqNo, bytes * 8);
+        usleep(4000);
+        mTrafficRec->updateClock(ALooper::GetNowUs() / 1000);
+    }
+}
+
+}  // namespace android
diff --git a/media/libstagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/ARTPWriter.h
index 2f13486..6f25a66 100644
--- a/media/libstagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/ARTPWriter.h
@@ -27,6 +27,9 @@
 #include <arpa/inet.h>
 #include <sys/socket.h>
 
+#include <android/multinetwork.h>
+#include "TrafficRecorder.h"
+
 #define LOG_TO_FILES    0
 
 namespace android {
@@ -36,14 +39,23 @@
 
 struct ARTPWriter : public MediaWriter {
     explicit ARTPWriter(int fd);
+    explicit ARTPWriter(int fd, String8& localIp, int localPort,
+                                String8& remoteIp, int remotePort,
+                                uint32_t seqNo);
 
     virtual status_t addSource(const sp<MediaSource> &source);
     virtual bool reachedEOS();
     virtual status_t start(MetaData *params);
     virtual status_t stop();
     virtual status_t pause();
+    void updateCVODegrees(int32_t cvoDegrees);
+    void updatePayloadType(int32_t payloadType);
+    void updateSocketDscp(int32_t dscp);
+    void updateSocketNetwork(int64_t socketNetwork);
+    uint32_t getSequenceNum();
 
     virtual void onMessageReceived(const sp<AMessage> &msg);
+    virtual void setTMMBNInfo(uint32_t opponentID, uint32_t bitrate);
 
 protected:
     virtual ~ARTPWriter();
@@ -76,15 +88,27 @@
     sp<ALooper> mLooper;
     sp<AHandlerReflector<ARTPWriter> > mReflector;
 
-    int mSocket;
+    bool mIsIPv6;
+    int mRTPSocket, mRTCPSocket;
+    struct sockaddr_in mLocalAddr;
     struct sockaddr_in mRTPAddr;
     struct sockaddr_in mRTCPAddr;
+    struct sockaddr_in6 mLocalAddr6;
+    struct sockaddr_in6 mRTPAddr6;
+    struct sockaddr_in6 mRTCPAddr6;
+    int32_t mRtpLayer3Dscp;
+    net_handle_t mRTPSockNetwork;
 
     AString mProfileLevel;
     AString mSeqParamSet;
     AString mPicParamSet;
 
+    MediaBufferBase *mVPSBuf;
+    MediaBufferBase *mSPSBuf;
+    MediaBufferBase *mPPSBuf;
+
     uint32_t mSourceID;
+    uint32_t mPayloadType;
     uint32_t mSeqNo;
     uint32_t mRTPTimeBase;
     uint32_t mNumRTPSent;
@@ -92,10 +116,17 @@
     uint32_t mLastRTPTime;
     uint64_t mLastNTPTime;
 
+    uint32_t mOpponentID;
+    uint32_t mBitrate;
+    sp<TrafficRecorder<uint32_t, size_t> > mTrafficRec;
+
     int32_t mNumSRsSent;
+    int32_t mRTPCVOExtMap;
+    int32_t mRTPCVODegrees;
 
     enum {
         INVALID,
+        H265,
         H264,
         H263,
         AMR_NB,
@@ -109,17 +140,23 @@
 
     void addSR(const sp<ABuffer> &buffer);
     void addSDES(const sp<ABuffer> &buffer);
+    void addTMMBN(const sp<ABuffer> &buffer);
 
     void makeH264SPropParamSets(MediaBufferBase *buffer);
     void dumpSessionDesc();
 
     void sendBye();
+    void sendVPSSPSPPSIfIFrame(MediaBufferBase *mediaBuf, int64_t timeUs);
+    void sendSPSPPSIfIFrame(MediaBufferBase *mediaBuf, int64_t timeUs);
+    void sendHEVCData(MediaBufferBase *mediaBuf);
     void sendAVCData(MediaBufferBase *mediaBuf);
     void sendH263Data(MediaBufferBase *mediaBuf);
     void sendAMRData(MediaBufferBase *mediaBuf);
 
     void send(const sp<ABuffer> &buffer, bool isRTCP);
+    void makeSocketPairAndBind(String8& localIp, int localPort, String8& remoteIp, int remotePort);
 
+    void ModerateInstantTraffic(uint32_t samplePeriod, uint32_t limitBytes);
     DISALLOW_EVIL_CONSTRUCTORS(ARTPWriter);
 };
 
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index bb66f4c..c33bf3f 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -329,6 +329,7 @@
     mPass.clear();
     mAuthType = NONE;
     mNonce.clear();
+    mRealm.clear();
 
     mState = DISCONNECTED;
 }
@@ -911,6 +912,14 @@
         CHECK_GE(j, 0);
 
         mNonce.setTo(value, i + 7, j - i - 7);
+
+        i = value.find("realm=");
+        CHECK_GE(i, 0);
+        CHECK_EQ(value.c_str()[i + 6], '\"');
+        j = value.find("\"", i + 7);
+        CHECK_GE(j, 0);
+
+        mRealm.setTo(value, i + 7, j - i - 7);
     }
 
     return true;
@@ -993,7 +1002,7 @@
     AString A1;
     A1.append(mUser);
     A1.append(":");
-    A1.append("Streaming Server");
+    A1.append(mRealm);
     A1.append(":");
     A1.append(mPass);
 
@@ -1029,6 +1038,9 @@
     fragment.append("\", ");
     fragment.append("response=\"");
     fragment.append(digest);
+    fragment.append("\", ");
+    fragment.append("realm=\"");
+    fragment.append(mRealm);
     fragment.append("\"");
     fragment.append("\r\n");
 
diff --git a/media/libstagefright/rtsp/ARTSPConnection.h b/media/libstagefright/rtsp/ARTSPConnection.h
index 56b604d..7cdd4c0 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.h
+++ b/media/libstagefright/rtsp/ARTSPConnection.h
@@ -84,6 +84,7 @@
     AString mUser, mPass;
     AuthType mAuthType;
     AString mNonce;
+    AString mRealm;
     int mSocket;
     int32_t mConnectionID;
     int32_t mNextCSeq;
diff --git a/media/libstagefright/rtsp/ASessionDescription.cpp b/media/libstagefright/rtsp/ASessionDescription.cpp
index 2b42040..5b5b4b1 100644
--- a/media/libstagefright/rtsp/ASessionDescription.cpp
+++ b/media/libstagefright/rtsp/ASessionDescription.cpp
@@ -27,6 +27,8 @@
 
 namespace android {
 
+constexpr unsigned kDefaultAs = 960; // kbps?
+
 ASessionDescription::ASessionDescription()
     : mIsValid(false) {
 }
@@ -103,7 +105,7 @@
                     key.setTo(line, 0, colonPos);
 
                     if (key == "a=fmtp" || key == "a=rtpmap"
-                            || key == "a=framesize") {
+                            || key == "a=framesize" || key == "a=extmap") {
                         ssize_t spacePos = line.find(" ", colonPos + 1);
                         if (spacePos < 0) {
                             return false;
@@ -201,6 +203,33 @@
     return true;
 }
 
+bool ASessionDescription::getCvoExtMap(
+        size_t index, int32_t *cvoExtMap) const {
+    CHECK_GE(index, 0u);
+    CHECK_LT(index, mTracks.size());
+
+    AString key, value;
+    *cvoExtMap = 0;
+
+    const Attribs &track = mTracks.itemAt(index);
+    for (size_t i = 0; i < track.size(); i++) {
+        value = track.valueAt(i);
+        if (value.size() > 0 && strcmp(value.c_str(), "urn:3gpp:video-orientation") == 0) {
+            key = track.keyAt(i);
+            break;
+        }
+    }
+
+    if (key.size() > 0) {
+        const char *colonPos = strrchr(key.c_str(), ':');
+        colonPos++;
+        *cvoExtMap = atoi(colonPos);
+        return true;
+    }
+
+    return false;
+}
+
 void ASessionDescription::getFormatType(
         size_t index, unsigned long *PT,
         AString *desc, AString *params) const {
@@ -345,5 +374,74 @@
     return *npt2 > *npt1;
 }
 
+// static
+void ASessionDescription::SDPStringFactory(AString &sdp,
+        const char *ip, bool isAudio, unsigned port, unsigned payloadType,
+        unsigned as, const char *codec, const char *fmtp,
+        int32_t width, int32_t height, int32_t cvoExtMap)
+{
+    bool isIPv4 = (AString(ip).find("::") == -1) ? true : false;
+    sdp.clear();
+    sdp.append("v=0\r\n");
+
+    sdp.append("a=range:npt=now-\r\n");
+
+    sdp.append("m=");
+    sdp.append(isAudio ? "audio " : "video ");
+    sdp.append(port);
+    sdp.append(" RTP/AVP ");
+    sdp.append(payloadType);
+    sdp.append("\r\n");
+
+    sdp.append("c= IN IP");
+    if (isIPv4) {
+        sdp.append("4 ");
+    } else {
+        sdp.append("6 ");
+    }
+    sdp.append(ip);
+    sdp.append("\r\n");
+
+    sdp.append("b=AS:");
+    sdp.append(as > 0 ? as : kDefaultAs);
+    sdp.append("\r\n");
+
+    sdp.append("a=rtpmap:");
+    sdp.append(payloadType);
+    sdp.append(" ");
+    sdp.append(codec);
+    sdp.append("/");
+    sdp.append(isAudio ? "8000" : "90000");
+    sdp.append("\r\n");
+
+    if (fmtp != NULL) {
+        sdp.append("a=fmtp:");
+        sdp.append(payloadType);
+        sdp.append(" ");
+        sdp.append(fmtp);
+        sdp.append("\r\n");
+    }
+
+    if (!isAudio && width > 0 && height > 0) {
+        sdp.append("a=framesize:");
+        sdp.append(payloadType);
+        sdp.append(" ");
+        sdp.append(width);
+        sdp.append("-");
+        sdp.append(height);
+        sdp.append("\r\n");
+    }
+
+    if (cvoExtMap > 0) {
+        sdp.append("a=extmap:");
+        sdp.append(cvoExtMap);
+        sdp.append(" ");
+        sdp.append("urn:3gpp:video-orientation");
+        sdp.append("\r\n");
+    }
+
+    ALOGV("SDPStringFactory => %s", sdp.c_str());
+}
+
 }  // namespace android
 
diff --git a/media/libstagefright/rtsp/ASessionDescription.h b/media/libstagefright/rtsp/ASessionDescription.h
index b462983..91f5442 100644
--- a/media/libstagefright/rtsp/ASessionDescription.h
+++ b/media/libstagefright/rtsp/ASessionDescription.h
@@ -40,6 +40,8 @@
     size_t countTracks() const;
     void getFormat(size_t index, AString *value) const;
 
+    bool getCvoExtMap(size_t index, int32_t *cvoExtMap) const;
+
     void getFormatType(
             size_t index, unsigned long *PT,
             AString *desc, AString *params) const;
@@ -63,6 +65,9 @@
     // i.e. we have a fixed duration, otherwise this is live streaming.
     static bool parseNTPRange(const char *s, float *npt1, float *npt2);
 
+    static void SDPStringFactory(AString &sdp, const char *ip, bool isAudio, unsigned port,
+        unsigned payloadType, unsigned as, const char *codec, const char *fmtp = NULL,
+        int32_t width = 0, int32_t height = 0, int32_t cvoExtMap = 0);
 protected:
     virtual ~ASessionDescription();
 
diff --git a/media/libstagefright/rtsp/Android.bp b/media/libstagefright/rtsp/Android.bp
index a5a895e..f990ecf 100644
--- a/media/libstagefright/rtsp/Android.bp
+++ b/media/libstagefright/rtsp/Android.bp
@@ -4,6 +4,7 @@
     srcs: [
         "AAMRAssembler.cpp",
         "AAVCAssembler.cpp",
+        "AHEVCAssembler.cpp",
         "AH263Assembler.cpp",
         "AMPEG2TSAssembler.cpp",
         "AMPEG4AudioAssembler.cpp",
@@ -20,6 +21,7 @@
     ],
 
     shared_libs: [
+        "libandroid_net",
         "libcrypto",
         "libdatasource",
         "libmedia",
@@ -28,6 +30,7 @@
     include_dirs: [
         "frameworks/av/media/libstagefright",
         "frameworks/native/include/media/openmax",
+        "frameworks/native/include/android",
     ],
 
     arch: {
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 7f025a5..0fdf431 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -1032,6 +1032,11 @@
                     break;
                 }
 
+                int32_t rtcpEvent;
+                if (msg->findInt32("rtcp-event", &rtcpEvent)) {
+                    break;
+                }
+
                 ++mNumAccessUnitsReceived;
                 postAccessUnitTimeoutCheck();
 
diff --git a/media/libstagefright/rtsp/NetworkUtils.cpp b/media/libstagefright/rtsp/NetworkUtils.cpp
index cc36b78..c053be8 100644
--- a/media/libstagefright/rtsp/NetworkUtils.cpp
+++ b/media/libstagefright/rtsp/NetworkUtils.cpp
@@ -14,6 +14,8 @@
  * limitations under the License.
  */
 
+#include <unistd.h>
+
 //#define LOG_NDEBUG 0
 #define LOG_TAG "NetworkUtils"
 #include <utils/Log.h>
diff --git a/media/libstagefright/rtsp/QualManager.cpp b/media/libstagefright/rtsp/QualManager.cpp
new file mode 100644
index 0000000..37aa326
--- /dev/null
+++ b/media/libstagefright/rtsp/QualManager.cpp
@@ -0,0 +1,174 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "QualManager"
+
+#include <algorithm>
+
+#include <sys/prctl.h>
+#include <utils/Log.h>
+
+#include "QualManager.h"
+
+namespace android {
+
+QualManager::Watcher::Watcher(int32_t timeLimit)
+    : Thread(false), mWatching(false), mSwitch(false),
+      mTimeLimit(timeLimit * 1000000LL)     // timeLimit ms
+{
+}
+
+bool QualManager::Watcher::isExpired() const
+{
+    return mSwitch;
+}
+
+void QualManager::Watcher::setup() {
+    AutoMutex _l(mMyLock);
+    if (mWatching == false) {
+        mWatching = true;
+        mMyCond.signal();
+    }
+}
+
+void QualManager::Watcher::release() {
+    AutoMutex _l(mMyLock);
+    if (mSwitch) {
+        ALOGW("%s DISARMED", name);
+        mSwitch = false;
+    }
+    if (mWatching == true) {
+        ALOGW("%s DISARMED", name);
+        mWatching = false;
+        mMyCond.signal();
+    }
+}
+
+void QualManager::Watcher::exit() {
+    AutoMutex _l(mMyLock);
+    // The order is important to avoid dead lock.
+    Thread::requestExit();
+    mMyCond.signal();
+}
+
+QualManager::Watcher::~Watcher() {
+    ALOGI("%s thread dead", name);
+}
+
+bool QualManager::Watcher::threadLoop() {
+    AutoMutex _l(mMyLock);
+#if defined(__linux__)
+    prctl(PR_GET_NAME, name, 0, 0, 0);
+#endif
+    while (!exitPending()) {
+        ALOGW("%s Timer init", name);
+        mMyCond.wait(mMyLock);                      // waits as non-watching state
+        if (exitPending())
+            return false;
+        ALOGW("%s timer BOOM after %d msec", name, (int)(mTimeLimit / 1000000LL));
+        mMyCond.waitRelative(mMyLock, mTimeLimit);  // waits as watching satte
+        if (mWatching == true) {
+            mSwitch = true;
+            ALOGW("%s BOOM!!!!", name);
+        }
+        mWatching = false;
+    }
+    return false;
+}
+
+
+QualManager::QualManager()
+    : mMinBitrate(-1), mMaxBitrate(-1),
+      mTargetBitrate(512000), mLastTargetBitrate(-1),
+      mLastSetBitrateTime(0), mIsNewTargetBitrate(false)
+{
+    VFPWatcher = new Watcher(3000);     //Very Few Packet Watcher
+    VFPWatcher->run("VeryFewPtk");
+    LBRWatcher = new Watcher(10000);    //Low Bit Rate Watcher
+    LBRWatcher->run("LowBitRate");
+}
+
+QualManager::~QualManager() {
+    VFPWatcher->exit();
+    LBRWatcher->exit();
+}
+
+int32_t QualManager::getTargetBitrate() {
+    if (mIsNewTargetBitrate) {
+        mIsNewTargetBitrate = false;
+        mLastTargetBitrate = clampingBitrate(mTargetBitrate);
+        mTargetBitrate = mLastTargetBitrate;
+        return mTargetBitrate;
+    } else {
+        return -1;
+    }
+}
+
+bool QualManager::isNeedToDowngrade() {
+    return LBRWatcher->isExpired();
+}
+
+void QualManager::setTargetBitrate(uint8_t fraction, int64_t nowUs, bool isTooLowPkts) {
+    /* Too Low Packet. Maybe opponent is switching camera.
+     * If this condition goes longer, we should down bitrate.
+     */
+    if (isTooLowPkts) {
+        VFPWatcher->setup();
+    } else {
+        VFPWatcher->release();
+    }
+
+    if ((fraction > (256 * 5 / 100) && !isTooLowPkts) || VFPWatcher->isExpired()) {
+        // loss more than 5%                          or  VFPWatcher BOOMED
+        mTargetBitrate -= mBitrateStep * 3;
+    } else if (fraction <= (256 * 2 /100)) {
+        // loss less than 2%
+        mTargetBitrate += mBitrateStep;
+    }
+
+    if (mTargetBitrate > mMaxBitrate) {
+        mTargetBitrate = mMaxBitrate + mBitrateStep;
+    } else if (mTargetBitrate < mMinBitrate) {
+        LBRWatcher->setup();
+        mTargetBitrate = mMinBitrate - mBitrateStep;
+    }
+
+    if (mLastTargetBitrate != clampingBitrate(mTargetBitrate) ||
+        nowUs - mLastSetBitrateTime > 5000000ll) {
+        mIsNewTargetBitrate = true;
+        mLastSetBitrateTime = nowUs;
+    }
+}
+
+void QualManager::setMinMaxBitrate(int32_t min, int32_t max) {
+    mMinBitrate = min;
+    mMaxBitrate = max;
+    mBitrateStep = (max - min) / 8;
+}
+
+void QualManager::setBitrateData(int32_t bitrate, int64_t /*now*/) {
+    // A bitrate that is considered packetloss also should be good.
+    if (bitrate >= mMinBitrate && mTargetBitrate >= mMinBitrate) {
+        LBRWatcher->release();
+    } else if (bitrate < mMinBitrate){
+        LBRWatcher->setup();
+    }
+}
+
+int32_t QualManager::clampingBitrate(int32_t bitrate) {
+    return std::min(std::max(mMinBitrate, bitrate), mMaxBitrate);
+}
+} // namespace android
diff --git a/media/libstagefright/rtsp/QualManager.h b/media/libstagefright/rtsp/QualManager.h
new file mode 100644
index 0000000..a7dc921
--- /dev/null
+++ b/media/libstagefright/rtsp/QualManager.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef QUAL_MANAGER_H_
+
+#define QUAL_MANAGER_H_
+
+#include <stdint.h>
+#include <utils/Thread.h>
+
+namespace android {
+class QualManager {
+public:
+    QualManager();
+    ~QualManager();
+
+    int32_t getTargetBitrate();
+    bool isNeedToDowngrade();
+
+    void setTargetBitrate(uint8_t fraction, int64_t nowUs, bool isTooLowPkts);
+    void setMinMaxBitrate(int32_t min, int32_t max);
+    void setBitrateData(int32_t bitrate, int64_t now);
+private:
+    class Watcher : public Thread
+    {
+    public:
+        Watcher(int32_t timeLimit);
+
+        void setup();
+        void release();
+        void exit();
+        bool isExpired() const;
+    private:
+        virtual ~Watcher();
+        virtual bool threadLoop();
+
+        char name[32] = {0,};
+
+        Condition mMyCond;
+        Mutex mMyLock;
+
+        bool mWatching;
+        bool mSwitch;
+        const nsecs_t mTimeLimit;
+    };
+    sp<Watcher> VFPWatcher;
+    sp<Watcher> LBRWatcher;
+    int32_t mMinBitrate;
+    int32_t mMaxBitrate;
+    int32_t mBitrateStep;
+
+    int32_t mTargetBitrate;
+    int32_t mLastTargetBitrate;
+    int64_t mLastSetBitrateTime;
+
+    bool mIsNewTargetBitrate;
+
+    int32_t clampingBitrate(int32_t bitrate);
+};
+} //namespace android
+
+#endif  // QUAL_MANAGER_H_
diff --git a/media/libstagefright/rtsp/TrafficRecorder.h b/media/libstagefright/rtsp/TrafficRecorder.h
new file mode 100644
index 0000000..f8e7c03
--- /dev/null
+++ b/media/libstagefright/rtsp/TrafficRecorder.h
@@ -0,0 +1,151 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef A_TRAFFIC_RECORDER_H_
+
+#define A_TRAFFIC_RECORDER_H_
+
+#include <android-base/logging.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+// Circular array to save recent amount of bytes
+template <class Time, class Bytes>
+class TrafficRecorder : public RefBase {
+private:
+    size_t mSize;
+    size_t mSizeMask;
+    Time *mTimeArray = NULL;
+    Bytes *mBytesArray = NULL;
+    size_t mHeadIdx = 0;
+    size_t mTailIdx = 0;
+
+    Time mClock = 0;
+    Time mLastTimeOfPrint = 0;
+    Bytes mAccuBytesOfPrint = 0;
+public:
+    TrafficRecorder();
+    TrafficRecorder(size_t size);
+    virtual ~TrafficRecorder();
+
+    void init();
+
+    void updateClock(Time now);
+
+    Bytes readBytesForLastPeriod(Time period);
+    void writeBytes(Bytes bytes);
+
+    void printAccuBitsForLastPeriod(Time period, Time unit);
+};
+
+template <class Time, class Bytes>
+TrafficRecorder<Time, Bytes>::TrafficRecorder() {
+    TrafficRecorder(128);
+}
+
+template <class Time, class Bytes>
+TrafficRecorder<Time, Bytes>::TrafficRecorder(size_t size) {
+    size_t exp;
+    for (exp = 0; exp < 32; exp++) {
+        if (size <= (1ul << exp)) {
+            break;
+        }
+    }
+    mSize = (1ul << exp);         // size = 2^exp
+    mSizeMask = mSize - 1;
+
+    LOG(VERBOSE) << "TrafficRecorder Init size " << mSize;
+    mTimeArray = new Time[mSize];
+    mBytesArray = new Bytes[mSize];
+
+    init();
+}
+
+template <class Time, class Bytes>
+TrafficRecorder<Time, Bytes>::~TrafficRecorder() {
+    delete[] mTimeArray;
+    delete[] mBytesArray;
+}
+
+template <class Time, class Bytes>
+void TrafficRecorder<Time, Bytes>::init() {
+    mHeadIdx = 0;
+    mTailIdx = 0;
+    mTimeArray[0] = 0;
+    mBytesArray[0] = 0;
+}
+
+template <class Time, class Bytes>
+void TrafficRecorder<Time, Bytes>::updateClock(Time now) {
+    mClock = now;
+}
+
+template <class Time, class Bytes>
+Bytes TrafficRecorder<Time, Bytes>::readBytesForLastPeriod(Time period) {
+    Bytes bytes = 0;
+
+    size_t i = mTailIdx;
+    while (i != mHeadIdx) {
+        LOG(VERBOSE) << "READ " << i << " time " << mTimeArray[i] << " \t EndOfPeriod " << mClock - period;
+        if (mTimeArray[i] < mClock - period) {
+            break;
+        }
+        bytes += mBytesArray[i];
+        i = (i + mSize - 1) & mSizeMask;
+    }
+    mHeadIdx = i;
+    return bytes;
+}
+
+template <class Time, class Bytes>
+void TrafficRecorder<Time, Bytes>::writeBytes(Bytes bytes) {
+    size_t writeIdx;
+    if (mClock == mTimeArray[mTailIdx]) {
+        writeIdx = mTailIdx;
+        mBytesArray[writeIdx] += bytes;
+    } else {
+        writeIdx = (mTailIdx + 1) % mSize;
+        mTimeArray[writeIdx] = mClock;
+        mBytesArray[writeIdx] = bytes;
+    }
+
+    LOG(VERBOSE) << "WRITE " << writeIdx << " time " << mClock;
+    if (writeIdx == mHeadIdx) {
+        LOG(WARNING) << "Traffic recorder size exceeded at " << mHeadIdx;
+        mHeadIdx = (mHeadIdx + 1) & mSizeMask;
+    }
+
+    mTailIdx = writeIdx;
+    mAccuBytesOfPrint += bytes;
+}
+
+template <class Time, class Bytes>
+void TrafficRecorder<Time, Bytes>::printAccuBitsForLastPeriod(Time period, Time unit) {
+    Time duration = mClock - mLastTimeOfPrint;
+    float numOfUnit = (float)duration / unit;
+    if (duration > period) {
+        ALOGD("Actual Tx period %.0f ms \t %.0f Bits/Unit",
+              numOfUnit * 1000.f, mAccuBytesOfPrint * 8.f / numOfUnit);
+        mLastTimeOfPrint = mClock;
+        mAccuBytesOfPrint = 0;
+        init();
+    }
+}
+
+}  // namespace android
+
+#endif  // A_TRAFFIC_RECORDER_H_
diff --git a/media/libstagefright/tests/ESDS/Android.bp b/media/libstagefright/tests/ESDS/Android.bp
new file mode 100644
index 0000000..1ad1a64
--- /dev/null
+++ b/media/libstagefright/tests/ESDS/Android.bp
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+    name: "ESDSTest",
+    gtest: true,
+
+    srcs: [
+        "ESDSTest.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "libdatasource",
+        "liblog",
+        "libmedia",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libstagefright_esds",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/tests/ESDS/AndroidTest.xml b/media/libstagefright/tests/ESDS/AndroidTest.xml
new file mode 100644
index 0000000..a4fbc7f
--- /dev/null
+++ b/media/libstagefright/tests/ESDS/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for ESDS unit test">
+    <option name="test-suite-tag" value="ESDSTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="ESDSTest->/data/local/tmp/ESDSTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/ESDS/ESDSTestRes-1.0.zip?unzip=true"
+            value="/data/local/tmp/ESDSTestRes/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="ESDSTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/ESDSTestRes/" />
+    </test>
+</configuration>
diff --git a/media/libstagefright/tests/ESDS/ESDSTest.cpp b/media/libstagefright/tests/ESDS/ESDSTest.cpp
new file mode 100644
index 0000000..101e00c
--- /dev/null
+++ b/media/libstagefright/tests/ESDS/ESDSTest.cpp
@@ -0,0 +1,210 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ESDSTest"
+#include <utils/Log.h>
+
+#include <stdio.h>
+#include <string.h>
+#include <fstream>
+
+#include <ESDS.h>
+#include <binder/ProcessState.h>
+#include <datasource/FileSource.h>
+#include <media/stagefright/MediaExtractorFactory.h>
+#include <media/stagefright/MetaData.h>
+
+#include "ESDSTestEnvironment.h"
+
+using namespace android;
+
+static ESDSTestEnvironment *gEnv = nullptr;
+
+struct ESDSParams {
+    const char *inputFile;
+    int32_t objectTypeIndication;
+    const char *codecSpecificInfoData;
+    int32_t codecSpecificInfoDataSize;
+    int32_t bitrateMax;
+    int32_t bitrateAvg;
+};
+
+class ESDSUnitTest : public ::testing::TestWithParam<tuple<
+                             /* InputFile */ const char *,
+                             /* ObjectTypeIndication */ int32_t,
+                             /* CodecSpecificInfoData */ const char *,
+                             /* CodecSpecificInfoDataSize */ int32_t,
+                             /* BitrateMax */ int32_t,
+                             /* BitrateAvg */ int32_t>> {
+  public:
+    ESDSUnitTest() : mESDSData(nullptr) {
+        mESDSParams.inputFile = get<0>(GetParam());
+        mESDSParams.objectTypeIndication = get<1>(GetParam());
+        mESDSParams.codecSpecificInfoData = get<2>(GetParam());
+        mESDSParams.codecSpecificInfoDataSize = get<3>(GetParam());
+        mESDSParams.bitrateMax = get<4>(GetParam());
+        mESDSParams.bitrateAvg = get<5>(GetParam());
+    };
+
+    virtual void TearDown() override {
+        if (mDataSource) mDataSource.clear();
+        if (mInputFp) {
+            fclose(mInputFp);
+            mInputFp = nullptr;
+        }
+    }
+
+    virtual void SetUp() override { ASSERT_NO_FATAL_FAILURE(readESDSData()); }
+    const void *mESDSData;
+    size_t mESDSSize;
+    ESDSParams mESDSParams;
+
+  private:
+    void readESDSData() {
+        string inputFile = gEnv->getRes() + mESDSParams.inputFile;
+        mInputFp = fopen(inputFile.c_str(), "rb");
+        ASSERT_NE(mInputFp, nullptr) << "File open failed for file: " << inputFile;
+        int32_t fd = fileno(mInputFp);
+        ASSERT_GE(fd, 0) << "File descriptor invalid for file: " << inputFile;
+
+        struct stat buf;
+        status_t status = stat(inputFile.c_str(), &buf);
+        ASSERT_EQ(status, 0) << "Failed to get properties of input file: " << mESDSParams.inputFile;
+        size_t fileSize = buf.st_size;
+
+        mDataSource = new FileSource(dup(fd), 0, fileSize);
+        ASSERT_NE(mDataSource, nullptr) << "Unable to create data source for file: " << inputFile;
+
+        sp<IMediaExtractor> extractor = MediaExtractorFactory::Create(mDataSource);
+        if (extractor == nullptr) {
+            mDataSource.clear();
+            ASSERT_TRUE(false) << "Unable to create extractor for file: " << inputFile;
+        }
+
+        size_t numTracks = extractor->countTracks();
+        ASSERT_GT(numTracks, 0) << "No tracks in file: " << inputFile;
+        ASSERT_TRUE(esdsDataPresent(numTracks, extractor))
+                << "Unable to find esds in any track in file: " << inputFile;
+    }
+
+    bool esdsDataPresent(size_t numTracks, sp<IMediaExtractor> extractor) {
+        bool foundESDS = false;
+        uint32_t type;
+        for (size_t i = 0; i < numTracks; ++i) {
+            sp<MetaData> trackMeta = extractor->getTrackMetaData(i);
+            if (trackMeta != nullptr &&
+                trackMeta->findData(kKeyESDS, &type, &mESDSData, &mESDSSize)) {
+                trackMeta->clear();
+                foundESDS = true;
+                break;
+            }
+        }
+        return foundESDS;
+    }
+
+    FILE *mInputFp;
+    sp<DataSource> mDataSource;
+};
+
+TEST_P(ESDSUnitTest, InvalidDataTest) {
+    void *invalidData = calloc(mESDSSize, 1);
+    ASSERT_NE(invalidData, nullptr) << "Unable to allocate memory";
+    ESDS esds(invalidData, mESDSSize);
+    free(invalidData);
+    ASSERT_NE(esds.InitCheck(), OK) << "invalid ESDS data accepted";
+}
+
+TEST(ESDSSanityUnitTest, ConstructorSanityTest) {
+    void *invalidData = malloc(1);
+    ASSERT_NE(invalidData, nullptr) << "Unable to allocate memory";
+    ESDS esds_zero(invalidData, 0);
+    free(invalidData);
+    ASSERT_NE(esds_zero.InitCheck(), OK) << "invalid ESDS data accepted";
+
+    ESDS esds_null(NULL, 0);
+    ASSERT_NE(esds_null.InitCheck(), OK) << "invalid ESDS data accepted";
+}
+
+TEST_P(ESDSUnitTest, CreateAndDestroyTest) {
+    ESDS esds(mESDSData, mESDSSize);
+    ASSERT_EQ(esds.InitCheck(), OK) << "ESDS data invalid";
+}
+
+TEST_P(ESDSUnitTest, ObjectTypeIndicationTest) {
+    ESDS esds(mESDSData, mESDSSize);
+    ASSERT_EQ(esds.InitCheck(), OK) << "ESDS data invalid";
+    uint8_t objectTypeIndication;
+    status_t status = esds.getObjectTypeIndication(&objectTypeIndication);
+    ASSERT_EQ(status, OK) << "ESDS objectTypeIndication data invalid";
+    ASSERT_EQ(objectTypeIndication, mESDSParams.objectTypeIndication)
+            << "ESDS objectTypeIndication data doesn't match";
+}
+
+TEST_P(ESDSUnitTest, CodecSpecificInfoTest) {
+    ESDS esds(mESDSData, mESDSSize);
+    ASSERT_EQ(esds.InitCheck(), OK) << "ESDS data invalid";
+    status_t status;
+    const void *codecSpecificInfo;
+    size_t codecSpecificInfoSize;
+    status = esds.getCodecSpecificInfo(&codecSpecificInfo, &codecSpecificInfoSize);
+    ASSERT_EQ(status, OK) << "ESDS getCodecSpecificInfo data invalid";
+    ASSERT_EQ(mESDSParams.codecSpecificInfoDataSize, codecSpecificInfoSize)
+            << "CodecSpecificInfo data doesn't match";
+    status = memcmp(codecSpecificInfo, mESDSParams.codecSpecificInfoData, codecSpecificInfoSize);
+    ASSERT_EQ(status, 0) << "CodecSpecificInfo data doesn't match";
+}
+
+TEST_P(ESDSUnitTest, GetBitrateTest) {
+    ESDS esds(mESDSData, mESDSSize);
+    ASSERT_EQ(esds.InitCheck(), OK) << "ESDS data invalid";
+    uint32_t bitrateMax;
+    uint32_t bitrateAvg;
+    status_t status = esds.getBitRate(&bitrateMax, &bitrateAvg);
+    ASSERT_EQ(status, OK) << "ESDS bitRate data invalid";
+    ASSERT_EQ(bitrateMax, mESDSParams.bitrateMax) << "ESDS bitrateMax doesn't match";
+    ASSERT_EQ(bitrateAvg, mESDSParams.bitrateAvg) << "ESDS bitrateAvg doesn't match";
+    ASSERT_LE(bitrateAvg, bitrateMax) << "ESDS bitrateMax is less than bitrateAvg";
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        ESDSUnitTestAll, ESDSUnitTest,
+        ::testing::Values(
+                // InputFile, ObjectTypeIndication, CodecSpecificInfoData,
+                // CodecSpecificInfoDataSize, BitrateMax, BitrateAvg
+                make_tuple("video_176x144_3gp_h263_56kbps_12fps_aac_stereo_128kbps_22050hz.3gp", 64,
+                           "\x13\x90", 2, 131072, 0),
+                make_tuple("video_1280x720_mp4_mpeg2_3000kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
+                           97,
+                           "\x00\x00\x01\xB3\x50\x02\xD0\x35\xFF\xFF\xE1\xA0\x00\x00\x01\xB5\x15"
+                           "\x6A\x00\x01\x00\x00",
+                           22, 3415452, 3415452),
+                make_tuple("video_176x144_3gp_h263_56kbps_25fps_aac_mono_24kbps_11025hz.3gp", 64,
+                           "\x15\x08", 2, 24576, 0)));
+
+int main(int argc, char **argv) {
+    // MediaExtractor needs binder thread pool
+    ProcessState::self()->startThreadPool();
+    gEnv = new ESDSTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("Test result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/tests/ESDS/ESDSTestEnvironment.h b/media/libstagefright/tests/ESDS/ESDSTestEnvironment.h
new file mode 100644
index 0000000..4ca2303
--- /dev/null
+++ b/media/libstagefright/tests/ESDS/ESDSTestEnvironment.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __ESDS_TEST_ENVIRONMENT_H__
+#define __ESDS_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class ESDSTestEnvironment : public ::testing::Environment {
+  public:
+    ESDSTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int ESDSTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"res", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P':
+                setRes(optarg);
+                break;
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __ESDS_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/tests/ESDS/README.md b/media/libstagefright/tests/ESDS/README.md
new file mode 100644
index 0000000..100fb86
--- /dev/null
+++ b/media/libstagefright/tests/ESDS/README.md
@@ -0,0 +1,40 @@
+## Media Testing ##
+---
+#### ESDS Unit Test :
+The ESDS Unit Test Suite validates the ESDS class available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m ESDSTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/ESDSTest/ESDSTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/ESDSTest/ESDSTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/ESDS/ESDSTestRes-1.0.zip)
+Download, unzip and push these files into device for testing.
+
+```
+adb push ESDSTestRes /data/local/tmp/
+```
+
+usage: ESDSTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/ESDSTest -P /data/local/tmp/ESDSTestRes/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest ESDSTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/tests/HEVC/Android.bp b/media/libstagefright/tests/HEVC/Android.bp
new file mode 100644
index 0000000..3762553
--- /dev/null
+++ b/media/libstagefright/tests/HEVC/Android.bp
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+    name: "HEVCUtilsUnitTest",
+    test_suites: ["device-tests"],
+    gtest: true,
+
+    srcs: [
+        "HEVCUtilsUnitTest.cpp",
+    ],
+
+    shared_libs: [
+        "libutils",
+        "liblog",
+    ],
+
+    static_libs: [
+        "libstagefright",
+        "libstagefright_foundation",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/tests/HEVC/AndroidTest.xml b/media/libstagefright/tests/HEVC/AndroidTest.xml
new file mode 100644
index 0000000..ff850a2
--- /dev/null
+++ b/media/libstagefright/tests/HEVC/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for HEVC Utils unit tests">
+    <option name="test-suite-tag" value="HEVCUtilsUnitTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="false" />
+        <option name="push" value="HEVCUtilsUnitTest->/data/local/tmp/HEVCUtilsUnitTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/HEVCUtils/HEVCUtilsUnitTest.zip?unzip=true"
+            value="/data/local/tmp/HEVCUtilsUnitTest/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="HEVCUtilsUnitTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/HEVCUtilsUnitTest/" />
+    </test>
+</configuration>
diff --git a/media/libstagefright/tests/HEVC/HEVCUtilsTestEnvironment.h b/media/libstagefright/tests/HEVC/HEVCUtilsTestEnvironment.h
new file mode 100644
index 0000000..e4481e1
--- /dev/null
+++ b/media/libstagefright/tests/HEVC/HEVCUtilsTestEnvironment.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __HEVC_UTILS_TEST_ENVIRONMENT_H__
+#define __HEVC_UTILS_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class HEVCUtilsTestEnvironment : public::testing::Environment {
+  public:
+    HEVCUtilsTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int HEVCUtilsTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"path", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P': {
+                setRes(optarg);
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __HEVC_UTILS_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/tests/HEVC/HEVCUtilsUnitTest.cpp b/media/libstagefright/tests/HEVC/HEVCUtilsUnitTest.cpp
new file mode 100644
index 0000000..324a042
--- /dev/null
+++ b/media/libstagefright/tests/HEVC/HEVCUtilsUnitTest.cpp
@@ -0,0 +1,208 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "HevcUtilityTest"
+#include <utils/Log.h>
+
+#include <fstream>
+
+#include <media/stagefright/foundation/ABitReader.h>
+#include "include/HevcUtils.h"
+
+#include "HEVCUtilsTestEnvironment.h"
+
+using namespace android;
+
+// max size of hvcc box is 2 KB
+constexpr uint32_t kHvccBoxMaxSize = 2048;
+constexpr uint32_t kHvccBoxMinSize = 20;
+constexpr uint32_t kVPSCode = 32;
+constexpr uint32_t kSPSCode = 33;
+constexpr uint32_t kPPSCode = 34;
+constexpr uint32_t kNALSizeLength = 2;
+
+static HEVCUtilsTestEnvironment *gEnv = nullptr;
+
+class HEVCUtilsUnitTest
+    : public ::testing::TestWithParam<
+              tuple</*fileName*/ string, /*infoFileName*/ string, /*numVPSNals*/ size_t,
+                    /*numSPSNals*/ size_t, /*numPPSNals*/ size_t, /*frameRate*/ int16_t,
+                    /*isHdr*/ bool>> {
+  public:
+    ~HEVCUtilsUnitTest() {
+        if (mMediaFileStream.is_open()) mMediaFileStream.close();
+        if (mInfoFileStream.is_open()) mInfoFileStream.close();
+    }
+
+    virtual void SetUp() override {
+        tuple<string, string, size_t, size_t, size_t, int16_t, bool> params = GetParam();
+        string inputMediaFile = gEnv->getRes() + get<0>(params);
+        mMediaFileStream.open(inputMediaFile, ifstream::in);
+        ASSERT_TRUE(mMediaFileStream.is_open()) << "Failed to open media file: " << inputMediaFile;
+
+        string inputInfoFile = gEnv->getRes() + get<1>(params);
+        mInfoFileStream.open(inputInfoFile, ifstream::in);
+        ASSERT_TRUE(mInfoFileStream.is_open()) << "Failed to open info file: " << inputInfoFile;
+
+        mNumVPSNals = get<2>(params);
+        mNumSPSNals = get<3>(params);
+        mNumPPSNals = get<4>(params);
+        mFrameRate = get<5>(params);
+        mIsHDR = get<6>(params);
+    }
+
+    size_t mNumVPSNals;
+    size_t mNumSPSNals;
+    size_t mNumPPSNals;
+    int16_t mFrameRate;
+    bool mIsHDR;
+    ifstream mMediaFileStream;
+    ifstream mInfoFileStream;
+};
+
+TEST_P(HEVCUtilsUnitTest, NALUnitTest) {
+    HevcParameterSets hevcParams;
+
+    string line;
+    int32_t index = 0;
+    status_t err;
+    while (getline(mInfoFileStream, line)) {
+        string type;
+        int32_t chunkLength;
+
+        istringstream stringLine(line);
+        stringLine >> type >> chunkLength;
+        ASSERT_GT(chunkLength, 0) << "Length of data chunk must be greater than 0";
+
+        char *data = (char *)malloc(chunkLength);
+        ASSERT_NE(data, nullptr) << "Failed to allocate data buffer of size: " << chunkLength;
+
+        mMediaFileStream.read(data, chunkLength);
+        ASSERT_EQ(mMediaFileStream.gcount(), chunkLength)
+                << "Failed to read complete file, bytes read: " << mMediaFileStream.gcount();
+
+        // A valid startcode consists of at least two 0x00 bytes followed by 0x01.
+        int32_t offset = 0;
+        for (; offset + 2 < chunkLength; ++offset) {
+            if (data[offset + 2] == 0x01 && data[offset + 1] == 0x00 && data[offset] == 0x00) {
+                break;
+            }
+        }
+        offset += 3;
+        ASSERT_LE(offset, chunkLength) << "NAL unit offset must not exceed the chunk length";
+
+        uint8_t *nalUnit = (uint8_t *)(data + offset);
+        size_t nalUnitLength = chunkLength - offset;
+
+        // Add NAL units only if they're of type: VPS/SPS/PPS/SEI
+        if (!((type.compare("VPS") && type.compare("SPS") && type.compare("PPS") &&
+               type.compare("SEI")))) {
+            err = hevcParams.addNalUnit(nalUnit, nalUnitLength);
+            ASSERT_EQ(err, (status_t)OK)
+                    << "Failed to add NAL Unit type: " << type << " Size: " << nalUnitLength;
+
+            size_t sizeNalUnit = hevcParams.getSize(index);
+            ASSERT_EQ(sizeNalUnit, nalUnitLength) << "Invalid size returned for NAL: " << type;
+
+            uint8_t *destination = (uint8_t *)malloc(nalUnitLength);
+            ASSERT_NE(destination, nullptr)
+                    << "Failed to allocate buffer of size: " << nalUnitLength;
+
+            bool status = hevcParams.write(index, destination, nalUnitLength);
+            ASSERT_TRUE(status) << "Unable to write NAL Unit data";
+
+            free(destination);
+            index++;
+        } else {
+            err = hevcParams.addNalUnit(nalUnit, nalUnitLength);
+            ASSERT_NE(err, (status_t)OK) << "Invalid NAL Unit added, type: " << type;
+        }
+        free(data);
+    }
+
+    size_t numNalUnits = hevcParams.getNumNalUnitsOfType(kVPSCode);
+    ASSERT_EQ(numNalUnits, mNumVPSNals) << "Wrong number of VPS NAL Units";
+
+    numNalUnits = hevcParams.getNumNalUnitsOfType(kSPSCode);
+    ASSERT_EQ(numNalUnits, mNumSPSNals) << "Wrong number of SPS NAL Units";
+
+    numNalUnits = hevcParams.getNumNalUnitsOfType(kPPSCode);
+    ASSERT_EQ(numNalUnits, mNumPPSNals) << "Wrong number of PPS NAL Units";
+
+    HevcParameterSets::Info info = hevcParams.getInfo();
+    ASSERT_EQ(info & HevcParameterSets::kInfoIsHdr,
+              (mIsHDR ? HevcParameterSets::kInfoIsHdr : HevcParameterSets::kInfoNone))
+            << "Wrong info about HDR";
+
+    ASSERT_EQ(info & HevcParameterSets::kInfoHasColorDescription,
+              (mIsHDR ? HevcParameterSets::kInfoHasColorDescription : HevcParameterSets::kInfoNone))
+            << "Wrong info about color description";
+
+    // an HEVC file starts with VPS, SPS and PPS NAL units in sequence.
+    uint8_t typeNalUnit = hevcParams.getType(0);
+    ASSERT_EQ(typeNalUnit, kHevcNalUnitTypeVps)
+            << "Expected NAL type: 32(VPS), found: " << typeNalUnit;
+
+    typeNalUnit = hevcParams.getType(1);
+    ASSERT_EQ(typeNalUnit, kHevcNalUnitTypeSps)
+            << "Expected NAL type: 33(SPS), found: " << typeNalUnit;
+
+    typeNalUnit = hevcParams.getType(2);
+    ASSERT_EQ(typeNalUnit, kHevcNalUnitTypePps)
+            << "Expected NAL type: 34(PPS), found: " << typeNalUnit;
+
+    size_t hvccBoxSize = kHvccBoxMaxSize;
+    uint8_t *hvcc = (uint8_t *)malloc(kHvccBoxMaxSize);
+    ASSERT_NE(hvcc, nullptr) << "Failed to allocate a hvcc buffer of size: " << kHvccBoxMaxSize;
+
+    err = hevcParams.makeHvcc(hvcc, &hvccBoxSize, kNALSizeLength);
+    ASSERT_EQ(err, (status_t)OK) << "Unable to create hvcc box";
+
+    ASSERT_GT(hvccBoxSize, kHvccBoxMinSize)
+            << "Hvcc box size must be greater than " << kHvccBoxMinSize;
+
+    int16_t frameRate = hvcc[kHvccBoxMinSize - 1] | (hvcc[kHvccBoxMinSize] << 8);
+    if (frameRate != mFrameRate)
+        cout << "[   WARN   ] Expected frame rate: " << mFrameRate << " Found: " << frameRate
+             << endl;
+
+    free(hvcc);
+}
+
+// Info File contains the type and length for each chunk/frame
+INSTANTIATE_TEST_SUITE_P(
+        HEVCUtilsUnitTestAll, HEVCUtilsUnitTest,
+        ::testing::Values(make_tuple("crowd_3840x2160p50f300_32500kbps.hevc",
+                                     "crowd_3840x2160p50f300_32500kbps.info", 1, 1, 1, 50, false),
+                          make_tuple("crowd_1920x1080p24f300_4500kbps.hevc",
+                                     "crowd_1920x1080p24f300_4500kbps.info", 1, 1, 1, 24, false),
+                          make_tuple("crowd_1280x720p24f300_3000kbps.hevc",
+                                     "crowd_1280x720p24f300_3000kbps.info", 1, 1, 1, 24, false),
+                          make_tuple("crowd_640x360p24f300_500kbps.hevc",
+                                     "crowd_640x360p24f300_500kbps.info", 1, 1, 1, 24, false)));
+
+int main(int argc, char **argv) {
+    gEnv = new HEVCUtilsTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("Test result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/tests/HEVC/README.md b/media/libstagefright/tests/HEVC/README.md
new file mode 100644
index 0000000..fa0e99c
--- /dev/null
+++ b/media/libstagefright/tests/HEVC/README.md
@@ -0,0 +1,39 @@
+## Media Testing ##
+---
+#### HEVC Utils Test
+The HEVC Utility Unit Test Suite validates the HevcUtils library available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m HEVCUtilsUnitTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/HEVCUtilsUnitTest/HEVCUtilsUnitTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/HEVCUtilsUnitTest/HEVCUtilsUnitTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/HEVCUtils/HEVCUtilsUnitTest.zip). Download, unzip and push these files into device for testing.
+
+```
+adb push HEVCUtilsUnitTest /data/local/tmp/
+```
+
+usage: HEVCUtilsUnitTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/HEVCUtilsUnitTest -P /data/local/tmp/HEVCUtilsUnitTest/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest HEVCUtilsUnitTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/tests/extractorFactory/Android.bp b/media/libstagefright/tests/extractorFactory/Android.bp
index e3e61d7..26ec507 100644
--- a/media/libstagefright/tests/extractorFactory/Android.bp
+++ b/media/libstagefright/tests/extractorFactory/Android.bp
@@ -17,6 +17,7 @@
 cc_test {
     name: "ExtractorFactoryTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "ExtractorFactoryTest.cpp",
diff --git a/media/libstagefright/tests/fuzzers/Android.bp b/media/libstagefright/tests/fuzzers/Android.bp
new file mode 100644
index 0000000..49ff69a
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/Android.bp
@@ -0,0 +1,53 @@
+cc_defaults {
+    name: "libstagefright_fuzzer_defaults",
+    cflags: [
+        "-Wno-multichar",
+        "-Werror",
+        "-Wno-error=deprecated-declarations",
+        "-Wall",
+    ],
+    shared_libs: [
+        "libstagefright",
+	"libstagefright_codecbase",
+        "libutils",
+        "libstagefright_foundation",
+        "libmedia",
+        "libaudioclient",
+        "libmedia_omx",
+        "libgui",
+        "libbinder",
+        "libcutils",
+    ],
+}
+
+cc_fuzz {
+    name: "libstagefright_mediaclock_fuzzer",
+    srcs: [
+        "MediaClockFuzzer.cpp",
+    ],
+    defaults: ["libstagefright_fuzzer_defaults"],
+}
+
+cc_fuzz {
+    name: "libstagefright_mediascanner_fuzzer",
+    srcs: [
+        "StagefrightMediaScannerFuzzer.cpp",
+    ],
+    defaults: ["libstagefright_fuzzer_defaults"],
+}
+
+cc_fuzz {
+    name: "libstagefright_skipcutbuffer_fuzzer",
+    srcs: [
+        "SkipCutBufferFuzzer.cpp",
+    ],
+    defaults: ["libstagefright_fuzzer_defaults"],
+}
+
+cc_fuzz {
+    name: "libstagefright_mediamuxer_fuzzer",
+    srcs: [
+        "MediaMuxerFuzzer.cpp",
+    ],
+    defaults: ["libstagefright_fuzzer_defaults"],
+}
diff --git a/media/libstagefright/tests/fuzzers/MediaClockFuzzer.cpp b/media/libstagefright/tests/fuzzers/MediaClockFuzzer.cpp
new file mode 100644
index 0000000..e473541
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/MediaClockFuzzer.cpp
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+//          dylan.katz@leviathansecurity.com
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/MediaClock.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+  sp<MediaClock> mClock(new MediaClock);
+
+  bool registered = false;
+  while (fdp.remaining_bytes() > 0) {
+    switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 5)) {
+    case 0: {
+      if (registered == false) {
+        mClock->init();
+        registered = true;
+      }
+      break;
+    }
+    case 1: {
+      int64_t startingTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
+      mClock->setStartingTimeMedia(startingTimeMediaUs);
+      break;
+    }
+    case 2: {
+      mClock->clearAnchor();
+      break;
+    }
+    case 3: {
+      int64_t anchorTimeRealUs = fdp.ConsumeIntegral<int64_t>();
+      int64_t anchorTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
+      int64_t maxTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
+      mClock->updateAnchor(anchorTimeMediaUs, anchorTimeRealUs, maxTimeMediaUs);
+      break;
+    }
+    case 4: {
+      int64_t maxTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
+      mClock->updateMaxTimeMedia(maxTimeMediaUs);
+      break;
+    }
+    case 5: {
+      wp<AMessage> msg(new AMessage);
+      mClock->setNotificationMessage(msg.promote());
+    }
+    }
+  }
+
+  return 0;
+}
+} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
new file mode 100644
index 0000000..5df3267
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+//          dylan.katz@leviathansecurity.com
+
+#include <MediaMuxerFuzzer.h>
+#include <cutils/ashmem.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/MediaMuxer.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+// Can't seem to get setBuffer or setString working. It always segfaults on a
+// null pointer read or memleaks. So that functionality is missing.
+void createMessage(AMessage *msg, FuzzedDataProvider *fdp) {
+  size_t count = fdp->ConsumeIntegralInRange<size_t>(0, 32);
+  while (fdp->remaining_bytes() > 0 && count > 0) {
+    uint8_t function_id =
+        fdp->ConsumeIntegralInRange<uint8_t>(0, amessage_setvals.size() - 1);
+    amessage_setvals[function_id](msg, fdp);
+    count--;
+  }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+  size_t data_size = fdp.ConsumeIntegralInRange<size_t>(0, size);
+  int fd = ashmem_create_region("mediamuxer_fuzz_region", data_size);
+  if (fd < 0)
+    return 0;
+
+  uint8_t *sh_data = static_cast<uint8_t *>(
+      mmap(NULL, data_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0));
+  if (sh_data == MAP_FAILED)
+    return 0;
+
+  MediaMuxer::OutputFormat format =
+      (MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(0, 4);
+  sp<MediaMuxer> mMuxer(new MediaMuxer(fd, format));
+
+  while (fdp.remaining_bytes() > 1) {
+    switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 4)) {
+    case 0: {
+      // For some reason it only likes mp4s here...
+      if (format == 1 || format == 4)
+        break;
+
+      sp<AMessage> a_format(new AMessage);
+      createMessage(a_format.get(), &fdp);
+      mMuxer->addTrack(a_format);
+      break;
+    }
+    case 1: {
+      mMuxer->start();
+      break;
+    }
+    case 2: {
+      int degrees = fdp.ConsumeIntegral<int>();
+      mMuxer->setOrientationHint(degrees);
+      break;
+    }
+    case 3: {
+      int latitude = fdp.ConsumeIntegral<int>();
+      int longitude = fdp.ConsumeIntegral<int>();
+      mMuxer->setLocation(latitude, longitude);
+      break;
+    }
+    case 4: {
+      size_t buf_size = fdp.ConsumeIntegralInRange<size_t>(0, data_size);
+      sp<ABuffer> a_buffer(new ABuffer(buf_size));
+
+      size_t trackIndex = fdp.ConsumeIntegral<size_t>();
+      int64_t timeUs = fdp.ConsumeIntegral<int64_t>();
+      uint32_t flags = fdp.ConsumeIntegral<uint32_t>();
+      mMuxer->writeSampleData(a_buffer, trackIndex, timeUs, flags);
+    }
+    }
+  }
+
+  if (fdp.ConsumeBool())
+    mMuxer->stop();
+
+  munmap(sh_data, data_size);
+  close(fd);
+  return 0;
+}
+} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
new file mode 100644
index 0000000..7d4421d
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+//          dylan.katz@leviathansecurity.com
+
+#pragma once
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+// Mappings vectors are the list of attributes that the MediaMuxer
+// class looks for in the message.
+static std::vector<const char *> floatMappings{
+    "capture-rate",
+    "time-lapse-fps",
+    "frame-rate",
+};
+
+static std::vector<const char *> int64Mappings{
+    "exif-offset",    "exif-size", "target-time",
+    "thumbnail-time", "timeUs",    "durationUs",
+};
+
+static std::vector<const char *> int32Mappings{"loop",
+                                               "time-scale",
+                                               "crypto-mode",
+                                               "crypto-default-iv-size",
+                                               "crypto-encrypted-byte-block",
+                                               "crypto-skip-byte-block",
+                                               "frame-count",
+                                               "max-bitrate",
+                                               "pcm-big-endian",
+                                               "temporal-layer-count",
+                                               "temporal-layer-id",
+                                               "thumbnail-width",
+                                               "thumbnail-height",
+                                               "track-id",
+                                               "valid-samples",
+                                               "color-format",
+                                               "ca-system-id",
+                                               "is-sync-frame",
+                                               "bitrate",
+                                               "max-bitrate",
+                                               "width",
+                                               "height",
+                                               "sar-width",
+                                               "sar-height",
+                                               "display-width",
+                                               "display-height",
+                                               "is-default",
+                                               "tile-width",
+                                               "tile-height",
+                                               "grid-rows",
+                                               "grid-cols",
+                                               "rotation-degrees",
+                                               "channel-count",
+                                               "sample-rate",
+                                               "bits-per-sample",
+                                               "channel-mask",
+                                               "encoder-delay",
+                                               "encoder-padding",
+                                               "is-adts",
+                                               "frame-rate",
+                                               "max-height",
+                                               "max-width",
+                                               "max-input-size",
+                                               "haptic-channel-count",
+                                               "pcm-encoding",
+                                               "aac-profile"};
+
+static const std::vector<std::function<void(AMessage *, FuzzedDataProvider *)>>
+    amessage_setvals = {
+        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
+          msg->setRect("crop", fdp->ConsumeIntegral<int32_t>(),
+                       fdp->ConsumeIntegral<int32_t>(),
+                       fdp->ConsumeIntegral<int32_t>(),
+                       fdp->ConsumeIntegral<int32_t>());
+        },
+        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
+          msg->setFloat(floatMappings[fdp->ConsumeIntegralInRange<size_t>(
+                            0, floatMappings.size() - 1)],
+                        fdp->ConsumeFloatingPoint<float>());
+        },
+        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
+          msg->setInt64(int64Mappings[fdp->ConsumeIntegralInRange<size_t>(
+                            0, int64Mappings.size() - 1)],
+                        fdp->ConsumeIntegral<int64_t>());
+        },
+        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
+          msg->setInt32(int32Mappings[fdp->ConsumeIntegralInRange<size_t>(
+                            0, int32Mappings.size() - 1)],
+                        fdp->ConsumeIntegral<int32_t>());
+        }};
+} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/SkipCutBufferFuzzer.cpp b/media/libstagefright/tests/fuzzers/SkipCutBufferFuzzer.cpp
new file mode 100644
index 0000000..1f78e6d
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/SkipCutBufferFuzzer.cpp
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+//          dylan.katz@leviathansecurity.com
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/SkipCutBuffer.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+  size_t skip = fdp.ConsumeIntegral<size_t>();
+  size_t cut = fdp.ConsumeIntegral<size_t>();
+  size_t num16Channels = fdp.ConsumeIntegral<size_t>();
+  sp<SkipCutBuffer> sBuffer(new SkipCutBuffer(skip, cut, num16Channels));
+
+  while (fdp.remaining_bytes() > 0) {
+    // Cap size to 1024 to limit max amount allocated.
+    size_t buf_size = fdp.ConsumeIntegralInRange<size_t>(0, 1024);
+    size_t range = fdp.ConsumeIntegralInRange<size_t>(0, buf_size);
+    size_t length = fdp.ConsumeIntegralInRange<size_t>(0, buf_size - range);
+
+    switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 4)) {
+    case 0: {
+      sp<ABuffer> a_buffer(new ABuffer(buf_size));
+      sp<AMessage> format(new AMessage);
+      sp<MediaCodecBuffer> s_buffer(new MediaCodecBuffer(format, a_buffer));
+      s_buffer->setRange(range, length);
+      sBuffer->submit(s_buffer);
+      break;
+    }
+    case 1: {
+      std::unique_ptr<MediaBufferBase> m_buffer(new MediaBuffer(buf_size));
+      m_buffer->set_range(range, length);
+      sBuffer->submit(reinterpret_cast<MediaBuffer *>(m_buffer.get()));
+      break;
+    }
+    case 2: {
+      sp<ABuffer> a_buffer(new ABuffer(buf_size));
+      sp<AMessage> format(new AMessage);
+      sp<MediaCodecBuffer> s_buffer(new MediaCodecBuffer(format, a_buffer));
+      a_buffer->setRange(range, length);
+      sBuffer->submit(a_buffer);
+      break;
+    }
+    case 3: {
+      sBuffer->clear();
+      break;
+    }
+    case 4: {
+      sBuffer->size();
+    }
+    }
+  }
+  return 0;
+}
+} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/StagefrightMediaScannerFuzzer.cpp b/media/libstagefright/tests/fuzzers/StagefrightMediaScannerFuzzer.cpp
new file mode 100644
index 0000000..a072b7c
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/StagefrightMediaScannerFuzzer.cpp
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+//          dylan.katz@leviathansecurity.com
+
+#include <cutils/ashmem.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/StagefrightMediaScanner.h>
+
+#include <cstdio>
+
+namespace android {
+class FuzzMediaScannerClient : public MediaScannerClient {
+public:
+  virtual status_t scanFile(const char *, long long, long long, bool, bool) {
+    return 0;
+  }
+
+  virtual status_t handleStringTag(const char *, const char *) { return 0; }
+
+  virtual status_t setMimeType(const char *) { return 0; }
+};
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+  StagefrightMediaScanner mScanner = StagefrightMediaScanner();
+  // Without this, the fuzzer crashes for some reason.
+  mScanner.setLocale("");
+
+  size_t data_size = fdp.ConsumeIntegralInRange<size_t>(0, size);
+  int fd =
+      ashmem_create_region("stagefrightmediascanner_fuzz_region", data_size);
+  if (fd < 0)
+    return 0;
+
+  uint8_t *sh_data = static_cast<uint8_t *>(
+      mmap(NULL, data_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0));
+  if (sh_data == MAP_FAILED)
+    return 0;
+
+  while (fdp.remaining_bytes() > 8) {
+    switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 1)) {
+    case 0: {
+      std::string path = fdp.ConsumeRandomLengthString(fdp.remaining_bytes());
+      std::string mimeType =
+          fdp.ConsumeRandomLengthString(fdp.remaining_bytes());
+      std::shared_ptr<MediaScannerClient> client(new FuzzMediaScannerClient());
+      mScanner.processFile(path.c_str(), mimeType.c_str(), *client);
+      break;
+    }
+    case 1: {
+      size_t to_copy = fdp.ConsumeIntegralInRange<size_t>(1, data_size);
+      std::vector<uint8_t> rand_buf = fdp.ConsumeBytes<uint8_t>(to_copy);
+
+      // If fdp doesn't have enough bytes left it will just make a shorter
+      // vector.
+      to_copy = std::min(rand_buf.size(), data_size);
+
+      std::copy(sh_data, sh_data + to_copy, rand_buf.begin());
+      mScanner.extractAlbumArt(fd);
+    }
+    }
+  }
+
+  munmap(sh_data, data_size);
+  close(fd);
+  return 0;
+}
+} // namespace android
diff --git a/media/libstagefright/tests/mediacodec/Android.bp b/media/libstagefright/tests/mediacodec/Android.bp
new file mode 100644
index 0000000..0bd0639
--- /dev/null
+++ b/media/libstagefright/tests/mediacodec/Android.bp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+    name: "mediacodecTest",
+    gtest: true,
+
+    srcs: [
+        "MediaCodecTest.cpp",
+        "MediaTestHelper.cpp",
+    ],
+
+    header_libs: [
+        "libmediadrm_headers",
+    ],
+
+    shared_libs: [
+        "libgui",
+        "libmedia",
+        "libmedia_codeclist",
+        "libmediametrics",
+        "libmediandk",
+        "libstagefright",
+        "libstagefright_codecbase",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libgmock",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+
+    test_suites: [
+        "general-tests",
+    ],
+}
diff --git a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
new file mode 100644
index 0000000..d00a50f
--- /dev/null
+++ b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
@@ -0,0 +1,350 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <future>
+#include <thread>
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <gui/Surface.h>
+#include <mediadrm/ICrypto.h>
+#include <media/stagefright/CodecBase.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaCodecListWriter.h>
+#include <media/MediaCodecInfo.h>
+
+#include "MediaTestHelper.h"
+
+namespace android {
+
+class MockBufferChannel : public BufferChannelBase {
+public:
+    ~MockBufferChannel() override = default;
+
+    MOCK_METHOD(void, setCrypto, (const sp<ICrypto> &crypto), (override));
+    MOCK_METHOD(void, setDescrambler, (const sp<IDescrambler> &descrambler), (override));
+    MOCK_METHOD(status_t, queueInputBuffer, (const sp<MediaCodecBuffer> &buffer), (override));
+    MOCK_METHOD(status_t, queueSecureInputBuffer,
+            (const sp<MediaCodecBuffer> &buffer,
+             bool secure,
+             const uint8_t *key,
+             const uint8_t *iv,
+             CryptoPlugin::Mode mode,
+             CryptoPlugin::Pattern pattern,
+             const CryptoPlugin::SubSample *subSamples,
+             size_t numSubSamples,
+             AString *errorDetailMsg),
+            (override));
+    MOCK_METHOD(status_t, attachBuffer,
+            (const std::shared_ptr<C2Buffer> &c2Buffer, const sp<MediaCodecBuffer> &buffer),
+            (override));
+    MOCK_METHOD(status_t, attachEncryptedBuffer,
+            (const sp<hardware::HidlMemory> &memory,
+             bool secure,
+             const uint8_t *key,
+             const uint8_t *iv,
+             CryptoPlugin::Mode mode,
+             CryptoPlugin::Pattern pattern,
+             size_t offset,
+             const CryptoPlugin::SubSample *subSamples,
+             size_t numSubSamples,
+             const sp<MediaCodecBuffer> &buffer),
+            (override));
+    MOCK_METHOD(status_t, renderOutputBuffer,
+            (const sp<MediaCodecBuffer> &buffer, int64_t timestampNs),
+            (override));
+    MOCK_METHOD(status_t, discardBuffer, (const sp<MediaCodecBuffer> &buffer), (override));
+    MOCK_METHOD(void, getInputBufferArray, (Vector<sp<MediaCodecBuffer>> *array), (override));
+    MOCK_METHOD(void, getOutputBufferArray, (Vector<sp<MediaCodecBuffer>> *array), (override));
+};
+
+class MockCodec : public CodecBase {
+public:
+    MockCodec(std::function<void(const std::shared_ptr<MockBufferChannel> &)> mock) {
+        mMockBufferChannel = std::make_shared<MockBufferChannel>();
+        mock(mMockBufferChannel);
+    }
+    ~MockCodec() override = default;
+
+    MOCK_METHOD(void, initiateAllocateComponent, (const sp<AMessage> &msg), (override));
+    MOCK_METHOD(void, initiateConfigureComponent, (const sp<AMessage> &msg), (override));
+    MOCK_METHOD(void, initiateCreateInputSurface, (), (override));
+    MOCK_METHOD(void, initiateSetInputSurface, (const sp<PersistentSurface> &surface), (override));
+    MOCK_METHOD(void, initiateStart, (), (override));
+    MOCK_METHOD(void, initiateShutdown, (bool keepComponentAllocated), (override));
+    MOCK_METHOD(void, onMessageReceived, (const sp<AMessage> &msg), (override));
+    MOCK_METHOD(status_t, setSurface, (const sp<Surface> &surface), (override));
+    MOCK_METHOD(void, signalFlush, (), (override));
+    MOCK_METHOD(void, signalResume, (), (override));
+    MOCK_METHOD(void, signalRequestIDRFrame, (), (override));
+    MOCK_METHOD(void, signalSetParameters, (const sp<AMessage> &msg), (override));
+    MOCK_METHOD(void, signalEndOfInputStream, (), (override));
+
+    std::shared_ptr<BufferChannelBase> getBufferChannel() override {
+        return mMockBufferChannel;
+    }
+
+    const std::unique_ptr<CodecCallback> &callback() {
+        return mCallback;
+    }
+
+    std::shared_ptr<MockBufferChannel> mMockBufferChannel;
+};
+
+class Counter {
+public:
+    Counter() = default;
+    explicit Counter(int32_t initCount) : mCount(initCount) {}
+    ~Counter() = default;
+
+    int32_t advance() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        ++mCount;
+        mCondition.notify_all();
+        return mCount;
+    }
+
+    template <typename Rep, typename Period, typename ...Args>
+    int32_t waitFor(const std::chrono::duration<Rep, Period> &duration, Args... values) {
+        std::initializer_list<int32_t> list = {values...};
+        std::unique_lock<std::mutex> lock(mMutex);
+        mCondition.wait_for(
+                lock,
+                duration,
+                [&list, this]{
+                    return std::find(list.begin(), list.end(), mCount) != list.end();
+                });
+        return mCount;
+    }
+
+    template <typename ...Args>
+    int32_t wait(Args... values) {
+        std::initializer_list<int32_t> list = {values...};
+        std::unique_lock<std::mutex> lock(mMutex);
+        mCondition.wait(
+                lock,
+                [&list, this]{
+                    return std::find(list.begin(), list.end(), mCount) != list.end();
+                });
+        return mCount;
+    }
+
+private:
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    int32_t mCount = 0;
+};
+
+}  // namespace android
+
+using namespace android;
+using ::testing::_;
+
+static sp<MediaCodec> SetupMediaCodec(
+        const AString &owner,
+        const AString &codecName,
+        const AString &mediaType,
+        const sp<ALooper> &looper,
+        std::function<sp<CodecBase>(const AString &name, const char *owner)> getCodecBase) {
+    std::shared_ptr<MediaCodecListWriter> listWriter =
+        MediaTestHelper::CreateCodecListWriter();
+    std::unique_ptr<MediaCodecInfoWriter> infoWriter = listWriter->addMediaCodecInfo();
+    infoWriter->setName(codecName.c_str());
+    infoWriter->setOwner(owner.c_str());
+    infoWriter->addMediaType(mediaType.c_str());
+    std::vector<sp<MediaCodecInfo>> codecInfos;
+    MediaTestHelper::WriteCodecInfos(listWriter, &codecInfos);
+    std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo =
+        [codecInfos](const AString &name, sp<MediaCodecInfo> *info) -> status_t {
+            auto it = std::find_if(
+                    codecInfos.begin(), codecInfos.end(),
+                    [&name](const sp<MediaCodecInfo> &info) {
+                        return name.equalsIgnoreCase(info->getCodecName());
+                    });
+
+            *info = (it == codecInfos.end()) ? nullptr : *it;
+            return (*info) ? OK : NAME_NOT_FOUND;
+        };
+
+    looper->start();
+    return MediaTestHelper::CreateCodec(
+            codecName, looper, getCodecBase, getCodecInfo);
+}
+
+TEST(MediaCodecTest, ReclaimReleaseRace) {
+    // Test scenario:
+    //
+    // 1) ResourceManager thread calls reclaim(), message posted to
+    //    MediaCodec looper thread.
+    // 2) MediaCodec looper thread calls initiateShutdown(), shutdown being
+    //    handled at the component thread.
+    // 3) Client thread calls release(), message posted to & handle at
+    //    MediaCodec looper thread.
+    // 4) MediaCodec looper thread may call initiateShutdown().
+    // 5) initiateShutdown() from 2) is handled at onReleaseComplete() event
+    //    posted to MediaCodec looper thread.
+    // 6) If called, initiateShutdown() from 4) is handled and
+    //    onReleaseComplete() event posted to MediaCodec looper thread.
+
+    static const AString kCodecName{"test.codec"};
+    static const AString kCodecOwner{"nobody"};
+    static const AString kMediaType{"video/x-test"};
+
+    enum {
+        kInit,
+        kShutdownFromReclaimReceived,
+        kReleaseCalled,
+    };
+    Counter counter{kInit};
+    sp<MockCodec> mockCodec;
+    std::function<sp<CodecBase>(const AString &name, const char *owner)> getCodecBase =
+        [&mockCodec, &counter](const AString &, const char *) {
+            mockCodec = new MockCodec([](const std::shared_ptr<MockBufferChannel> &) {
+                // No mock setup, as we don't expect any buffer operations
+                // in this scenario.
+            });
+            ON_CALL(*mockCodec, initiateAllocateComponent(_))
+                .WillByDefault([mockCodec](const sp<AMessage> &) {
+                    mockCodec->callback()->onComponentAllocated(kCodecName.c_str());
+                });
+            ON_CALL(*mockCodec, initiateShutdown(_))
+                .WillByDefault([mockCodec, &counter](bool) {
+                    int32_t stage = counter.wait(kInit, kReleaseCalled);
+                    if (stage == kInit) {
+                        // Mark that 2) happened, so test can proceed to 3)
+                        counter.advance();
+                    } else if (stage == kReleaseCalled) {
+                        // Handle 6)
+                        mockCodec->callback()->onReleaseCompleted();
+                    }
+                });
+            return mockCodec;
+        };
+
+    sp<ALooper> looper{new ALooper};
+    sp<MediaCodec> codec = SetupMediaCodec(
+            kCodecOwner, kCodecName, kMediaType, looper, getCodecBase);
+    ASSERT_NE(nullptr, codec) << "Codec must not be null";
+    ASSERT_NE(nullptr, mockCodec) << "MockCodec must not be null";
+    std::promise<void> reclaimCompleted;
+    std::promise<void> releaseCompleted;
+    Counter threadExitCounter;
+    std::thread([codec, &reclaimCompleted]{
+        // Simulate ResourceManager thread. Proceed with 1)
+        MediaTestHelper::Reclaim(codec, true /* force */);
+        reclaimCompleted.set_value();
+    }).detach();
+    std::thread([codec, &counter, &releaseCompleted]{
+        // Simulate client thread. Wait until 2) is complete
+        (void)counter.wait(kShutdownFromReclaimReceived);
+        // Proceed to 3), and mark that 5) is ready to happen.
+        // NOTE: it's difficult to pinpoint when 4) happens, so we will sleep
+        //       to meet the timing.
+        counter.advance();
+        codec->release();
+        releaseCompleted.set_value();
+    }).detach();
+    std::thread([mockCodec, &counter]{
+        // Simulate component thread. Wait until 3) is complete
+        (void)counter.wait(kReleaseCalled);
+        // We want 4) to complete before moving forward, but it is hard to
+        // wait for this exact event. Just sleep so that the other thread can
+        // proceed and complete 4).
+        std::this_thread::sleep_for(std::chrono::milliseconds(100));
+        // Proceed to 5).
+        mockCodec->callback()->onReleaseCompleted();
+    }).detach();
+    EXPECT_EQ(
+            std::future_status::ready,
+            reclaimCompleted.get_future().wait_for(std::chrono::seconds(5)))
+        << "reclaim timed out";
+    EXPECT_EQ(
+            std::future_status::ready,
+            releaseCompleted.get_future().wait_for(std::chrono::seconds(5)))
+        << "release timed out";
+    looper->stop();
+}
+
+TEST(MediaCodecTest, ErrorWhileStopping) {
+    // Test scenario:
+    //
+    // 1) Client thread calls stop(); MediaCodec looper thread calls
+    //    initiateShutdown(); shutdown is being handled at the component thread.
+    // 2) Error occurred, but the shutdown operation is still being done.
+    // 3) MediaCodec looper thread handles the error.
+    // 4) Component thread completes shutdown and posts onStopCompleted()
+
+    static const AString kCodecName{"test.codec"};
+    static const AString kCodecOwner{"nobody"};
+    static const AString kMediaType{"video/x-test"};
+
+    std::promise<void> errorOccurred;
+    sp<MockCodec> mockCodec;
+    std::function<sp<CodecBase>(const AString &name, const char *owner)> getCodecBase =
+        [&mockCodec, &errorOccurred](const AString &, const char *) {
+            mockCodec = new MockCodec([](const std::shared_ptr<MockBufferChannel> &) {
+                // No mock setup, as we don't expect any buffer operations
+                // in this scenario.
+            });
+            ON_CALL(*mockCodec, initiateAllocateComponent(_))
+                .WillByDefault([mockCodec](const sp<AMessage> &) {
+                    mockCodec->callback()->onComponentAllocated(kCodecName.c_str());
+                });
+            ON_CALL(*mockCodec, initiateConfigureComponent(_))
+                .WillByDefault([mockCodec](const sp<AMessage> &msg) {
+                    mockCodec->callback()->onComponentConfigured(
+                            msg->dup(), msg->dup());
+                });
+            ON_CALL(*mockCodec, initiateStart())
+                .WillByDefault([mockCodec]() {
+                    mockCodec->callback()->onStartCompleted();
+                });
+            ON_CALL(*mockCodec, initiateShutdown(true))
+                .WillByDefault([mockCodec, &errorOccurred](bool) {
+                    mockCodec->callback()->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+                    // Mark that 1) and 2) are complete.
+                    errorOccurred.set_value();
+                });
+            ON_CALL(*mockCodec, initiateShutdown(false))
+                .WillByDefault([mockCodec](bool) {
+                    mockCodec->callback()->onReleaseCompleted();
+                });
+            return mockCodec;
+        };
+
+    sp<ALooper> looper{new ALooper};
+    sp<MediaCodec> codec = SetupMediaCodec(
+            kCodecOwner, kCodecName, kMediaType, looper, getCodecBase);
+    ASSERT_NE(nullptr, codec) << "Codec must not be null";
+    ASSERT_NE(nullptr, mockCodec) << "MockCodec must not be null";
+
+    std::thread([mockCodec, &errorOccurred]{
+        // Simulate component thread that handles stop()
+        errorOccurred.get_future().wait();
+        // Error occurred but shutdown request still got processed.
+        mockCodec->callback()->onStopCompleted();
+    }).detach();
+
+    codec->configure(new AMessage, nullptr, nullptr, 0);
+    codec->start();
+    codec->stop();
+    // Sleep here to give time for the MediaCodec looper thread
+    // to process the messages.
+    std::this_thread::sleep_for(std::chrono::milliseconds(100));
+    codec->release();
+    looper->stop();
+}
diff --git a/media/libstagefright/tests/mediacodec/MediaTestHelper.cpp b/media/libstagefright/tests/mediacodec/MediaTestHelper.cpp
new file mode 100644
index 0000000..bbe3c05
--- /dev/null
+++ b/media/libstagefright/tests/mediacodec/MediaTestHelper.cpp
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaCodecListWriter.h>
+
+#include "MediaTestHelper.h"
+
+namespace android {
+
+// static
+sp<MediaCodec> MediaTestHelper::CreateCodec(
+        const AString &name,
+        const sp<ALooper> &looper,
+        std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
+        std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo) {
+    sp<MediaCodec> codec = new MediaCodec(
+            looper, MediaCodec::kNoPid, MediaCodec::kNoUid, getCodecBase, getCodecInfo);
+    if (codec->init(name) != OK) {
+        return nullptr;
+    }
+    return codec;
+}
+
+// static
+void MediaTestHelper::Reclaim(const sp<MediaCodec> &codec, bool force) {
+    codec->reclaim(force);
+}
+
+// static
+std::shared_ptr<MediaCodecListWriter> MediaTestHelper::CreateCodecListWriter() {
+    return std::shared_ptr<MediaCodecListWriter>(new MediaCodecListWriter);
+}
+
+// static
+void MediaTestHelper::WriteCodecInfos(
+        const std::shared_ptr<MediaCodecListWriter> &writer,
+        std::vector<sp<MediaCodecInfo>> *codecInfos) {
+    writer->writeCodecInfos(codecInfos);
+}
+
+}  // namespace android
diff --git a/media/libstagefright/tests/mediacodec/MediaTestHelper.h b/media/libstagefright/tests/mediacodec/MediaTestHelper.h
new file mode 100644
index 0000000..f3d6110
--- /dev/null
+++ b/media/libstagefright/tests/mediacodec/MediaTestHelper.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_TEST_HELPER_H_
+
+#define MEDIA_TEST_HELPER_H_
+
+#include <media/stagefright/foundation/AString.h>
+#include <utils/StrongPointer.h>
+
+namespace android {
+
+struct ALooper;
+struct CodecBase;
+struct MediaCodec;
+struct MediaCodecInfo;
+struct MediaCodecListWriter;
+
+class MediaTestHelper {
+public:
+    // MediaCodec
+    static sp<MediaCodec> CreateCodec(
+            const AString &name,
+            const sp<ALooper> &looper,
+            std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
+            std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo);
+    static void Reclaim(const sp<MediaCodec> &codec, bool force);
+
+    // MediaCodecListWriter
+    static std::shared_ptr<MediaCodecListWriter> CreateCodecListWriter();
+    static void WriteCodecInfos(
+            const std::shared_ptr<MediaCodecListWriter> &writer,
+            std::vector<sp<MediaCodecInfo>> *codecInfos);
+};
+
+}  // namespace android
+
+#endif  // MEDIA_TEST_HELPER_H_
diff --git a/media/libstagefright/tests/metadatautils/Android.bp b/media/libstagefright/tests/metadatautils/Android.bp
new file mode 100644
index 0000000..69830fc
--- /dev/null
+++ b/media/libstagefright/tests/metadatautils/Android.bp
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+    name: "MetaDataUtilsTest",
+    gtest: true,
+
+    srcs: [
+        "MetaDataUtilsTest.cpp",
+    ],
+
+    static_libs: [
+        "libstagefright_metadatautils",
+        "libstagefright_esds",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libutils",
+        "libmediandk",
+        "libstagefright",
+        "libstagefright_foundation",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/tests/metadatautils/AndroidTest.xml b/media/libstagefright/tests/metadatautils/AndroidTest.xml
new file mode 100644
index 0000000..d6497f3
--- /dev/null
+++ b/media/libstagefright/tests/metadatautils/AndroidTest.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for MetaDataUtils unit test">
+    <option name="test-suite-tag" value="MetaDataUtilsTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="false" />
+        <option name="push" value="MetaDataUtilsTest->/data/local/tmp/MetaDataUtilsTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/metadatautils/MetaDataUtilsTestRes-1.0.zip?unzip=true"
+            value="/data/local/tmp/MetaDataUtilsTestRes/" />
+    </target_preparer>
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="MetaDataUtilsTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/MetaDataUtilsTestRes/" />
+    </test>
+</configuration>
diff --git a/media/libstagefright/tests/metadatautils/MetaDataUtilsTest.cpp b/media/libstagefright/tests/metadatautils/MetaDataUtilsTest.cpp
new file mode 100644
index 0000000..9fd5fdb
--- /dev/null
+++ b/media/libstagefright/tests/metadatautils/MetaDataUtilsTest.cpp
@@ -0,0 +1,490 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MetaDataUtilsTest"
+#include <utils/Log.h>
+
+#include <fstream>
+#include <string>
+
+#include <ESDS.h>
+#include <media/NdkMediaFormat.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaDataBase.h>
+#include <media/stagefright/MetaDataUtils.h>
+#include <media/stagefright/foundation/ABitReader.h>
+
+#include "MetaDataUtilsTestEnvironment.h"
+
+constexpr uint8_t kAdtsCsdSize = 7;
+// from AAC specs: https://www.iso.org/standard/43345.html
+constexpr int32_t kSamplingFreq[] = {96000, 88200, 64000, 48000, 44100, 32000,
+                                     24000, 22050, 16000, 12000, 11025, 8000};
+constexpr uint8_t kMaxSamplingFreqIndex = sizeof(kSamplingFreq) / sizeof(kSamplingFreq[0]);
+
+static MetaDataUtilsTestEnvironment *gEnv = nullptr;
+
+using namespace android;
+
+class MetaDataValidate {
+  public:
+    MetaDataValidate() : mInputBuffer(nullptr) {}
+
+    ~MetaDataValidate() {
+        if (mInputBuffer) {
+            delete[] mInputBuffer;
+            mInputBuffer = nullptr;
+        }
+    }
+
+    void SetUpMetaDataValidate(string fileName) {
+        struct stat buf;
+        int8_t err = stat(fileName.c_str(), &buf);
+        ASSERT_EQ(err, 0) << "Failed to get file information for file: " << fileName;
+
+        mInputBufferSize = buf.st_size;
+        FILE *inputFilePtr = fopen(fileName.c_str(), "rb+");
+        ASSERT_NE(inputFilePtr, nullptr) << "Failed to open file: " << fileName;
+
+        mInputBuffer = new uint8_t[mInputBufferSize];
+        ASSERT_NE(mInputBuffer, nullptr)
+                << "Failed to allocate memory of size: " << mInputBufferSize;
+
+        int32_t numBytes =
+                fread((char *)mInputBuffer, sizeof(uint8_t), mInputBufferSize, inputFilePtr);
+        ASSERT_EQ(numBytes, mInputBufferSize) << numBytes << " of " << mInputBufferSize << " read";
+
+        fclose(inputFilePtr);
+    }
+
+    size_t mInputBufferSize;
+    const uint8_t *mInputBuffer;
+};
+
+class AvcCSDTest : public ::testing::TestWithParam<
+                           tuple<string /*inputFile*/, size_t /*avcWidth*/, size_t /*avcHeight*/>> {
+  public:
+    AvcCSDTest() : mInputBuffer(nullptr) {}
+
+    ~AvcCSDTest() {
+        if (mInputBuffer) {
+            delete[] mInputBuffer;
+            mInputBuffer = nullptr;
+        }
+    }
+    virtual void SetUp() override {
+        tuple<string, size_t, size_t> params = GetParam();
+        string inputFile = gEnv->getRes() + get<0>(params);
+        mFrameWidth = get<1>(params);
+        mFrameHeight = get<2>(params);
+
+        struct stat buf;
+        int8_t err = stat(inputFile.c_str(), &buf);
+        ASSERT_EQ(err, 0) << "Failed to get information for file: " << inputFile;
+
+        mInputBufferSize = buf.st_size;
+        FILE *inputFilePtr = fopen(inputFile.c_str(), "rb+");
+        ASSERT_NE(inputFilePtr, nullptr) << "Failed to open file: " << inputFile;
+
+        mInputBuffer = new uint8_t[mInputBufferSize];
+        ASSERT_NE(mInputBuffer, nullptr)
+                << "Failed to create a buffer of size: " << mInputBufferSize;
+
+        int32_t numBytes =
+                fread((char *)mInputBuffer, sizeof(uint8_t), mInputBufferSize, inputFilePtr);
+        ASSERT_EQ(numBytes, mInputBufferSize) << numBytes << " of " << mInputBufferSize << " read";
+
+        fclose(inputFilePtr);
+    }
+
+    size_t mFrameWidth;
+    size_t mFrameHeight;
+    size_t mInputBufferSize;
+    const uint8_t *mInputBuffer;
+};
+
+class AvcCSDValidateTest : public MetaDataValidate,
+                           public ::testing::TestWithParam<string /*inputFile*/> {
+  public:
+    virtual void SetUp() override {
+        string inputFile = gEnv->getRes() + GetParam();
+
+        ASSERT_NO_FATAL_FAILURE(SetUpMetaDataValidate(inputFile));
+    }
+};
+
+class AacCSDTest
+    : public ::testing::TestWithParam<tuple<uint32_t /*profile*/, uint32_t /*samplingFreqIndex*/,
+                                            uint32_t /*channelConfig*/>> {
+  public:
+    virtual void SetUp() override {
+        tuple<uint32_t, uint32_t, uint32_t> params = GetParam();
+        mAacProfile = get<0>(params);
+        mAacSamplingFreqIndex = get<1>(params);
+        mAacChannelConfig = get<2>(params);
+    }
+
+    uint32_t mAacProfile;
+    uint32_t mAacSamplingFreqIndex;
+    uint32_t mAacChannelConfig;
+};
+
+class AacADTSTest
+    : public ::testing::TestWithParam<
+              tuple<string /*adtsFile*/, uint32_t /*channelCount*/, uint32_t /*sampleRate*/>> {
+  public:
+    AacADTSTest() : mInputBuffer(nullptr) {}
+
+    virtual void SetUp() override {
+        tuple<string, uint32_t, uint32_t> params = GetParam();
+        string fileName = gEnv->getRes() + get<0>(params);
+        mAacChannelCount = get<1>(params);
+        mAacSampleRate = get<2>(params);
+
+        FILE *filePtr = fopen(fileName.c_str(), "r");
+        ASSERT_NE(filePtr, nullptr) << "Failed to open file: " << fileName;
+
+        mInputBuffer = new uint8_t[kAdtsCsdSize];
+        ASSERT_NE(mInputBuffer, nullptr) << "Failed to allocate a memory of size: " << kAdtsCsdSize;
+
+        int32_t numBytes = fread((void *)mInputBuffer, sizeof(uint8_t), kAdtsCsdSize, filePtr);
+        ASSERT_EQ(numBytes, kAdtsCsdSize)
+                << "Failed to read complete file, bytes read: " << numBytes;
+
+        fclose(filePtr);
+    }
+    int32_t mAacChannelCount;
+    int32_t mAacSampleRate;
+    const uint8_t *mInputBuffer;
+};
+
+class AacCSDValidateTest : public MetaDataValidate,
+                           public ::testing::TestWithParam<string /*inputFile*/> {
+  public:
+    virtual void SetUp() override {
+        string inputFile = gEnv->getRes() + GetParam();
+
+        ASSERT_NO_FATAL_FAILURE(SetUpMetaDataValidate(inputFile));
+    }
+};
+
+class VorbisTest : public ::testing::TestWithParam<pair<string /*fileName*/, string /*infoFile*/>> {
+  public:
+    virtual void SetUp() override {
+        pair<string, string> params = GetParam();
+        string inputMediaFile = gEnv->getRes() + params.first;
+        mInputFileStream.open(inputMediaFile, ifstream::in);
+        ASSERT_TRUE(mInputFileStream.is_open()) << "Failed to open data file: " << inputMediaFile;
+
+        string inputInfoFile = gEnv->getRes() + params.second;
+        mInfoFileStream.open(inputInfoFile, ifstream::in);
+        ASSERT_TRUE(mInputFileStream.is_open()) << "Failed to open data file: " << inputInfoFile;
+        ASSERT_FALSE(inputInfoFile.empty()) << "Empty info file: " << inputInfoFile;
+    }
+
+    ~VorbisTest() {
+        if (mInputFileStream.is_open()) mInputFileStream.close();
+        if (mInfoFileStream.is_open()) mInfoFileStream.close();
+    }
+
+    ifstream mInputFileStream;
+    ifstream mInfoFileStream;
+};
+
+TEST_P(AvcCSDTest, AvcCSDValidationTest) {
+    AMediaFormat *csdData = AMediaFormat_new();
+    ASSERT_NE(csdData, nullptr) << "Failed to create AMedia format";
+
+    bool status = MakeAVCCodecSpecificData(csdData, mInputBuffer, mInputBufferSize);
+    ASSERT_TRUE(status) << "Failed to make AVC CSD from AMediaFormat";
+
+    int32_t avcWidth = -1;
+    status = AMediaFormat_getInt32(csdData, AMEDIAFORMAT_KEY_WIDTH, &avcWidth);
+    ASSERT_TRUE(status) << "Failed to get avc width";
+    ASSERT_EQ(avcWidth, mFrameWidth);
+
+    int32_t avcHeight = -1;
+    status = AMediaFormat_getInt32(csdData, AMEDIAFORMAT_KEY_HEIGHT, &avcHeight);
+    ASSERT_TRUE(status) << "Failed to get avc height";
+    ASSERT_EQ(avcHeight, mFrameHeight);
+
+    const char *mimeType = "";
+    status = AMediaFormat_getString(csdData, AMEDIAFORMAT_KEY_MIME, &mimeType);
+    ASSERT_TRUE(status) << "Failed to get the mime type";
+    ASSERT_STREQ(mimeType, MEDIA_MIMETYPE_VIDEO_AVC);
+
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create MetaData Base";
+
+    status = MakeAVCCodecSpecificData(*metaData, mInputBuffer, mInputBufferSize);
+    ASSERT_TRUE(status) << "Failed to make AVC CSD from MetaDataBase";
+
+    avcWidth = -1;
+    status = metaData->findInt32(kKeyWidth, &avcWidth);
+    ASSERT_TRUE(status) << "Failed to find the width";
+    ASSERT_EQ(avcWidth, mFrameWidth);
+
+    avcHeight = -1;
+    status = metaData->findInt32(kKeyHeight, &avcHeight);
+    ASSERT_TRUE(status) << "Failed to find the height";
+    ASSERT_EQ(avcHeight, mFrameHeight);
+
+    void *csdAMediaFormatBuffer = nullptr;
+    size_t csdAMediaFormatSize;
+    status = AMediaFormat_getBuffer(csdData, AMEDIAFORMAT_KEY_CSD_AVC, &csdAMediaFormatBuffer,
+                                    &csdAMediaFormatSize);
+    ASSERT_TRUE(status) << "Failed to get the CSD from AMediaFormat";
+    ASSERT_NE(csdAMediaFormatBuffer, nullptr) << "Invalid CSD from AMediaFormat";
+
+    const void *csdMetaDataBaseBuffer = nullptr;
+    size_t csdMetaDataBaseSize = 0;
+    uint32_t mediaType;
+    status = metaData->findData(kKeyAVCC, &mediaType, &csdMetaDataBaseBuffer, &csdMetaDataBaseSize);
+    ASSERT_TRUE(status) << "Failed to get the CSD from MetaDataBase";
+    ASSERT_NE(csdMetaDataBaseBuffer, nullptr) << "Invalid CSD from MetaDataBase";
+    ASSERT_GT(csdMetaDataBaseSize, 0) << "CSD size must be greater than 0";
+    ASSERT_EQ(csdMetaDataBaseSize, csdAMediaFormatSize)
+            << "CSD size of MetaData type and AMediaFormat type must be same";
+
+    int32_t result = memcmp(csdAMediaFormatBuffer, csdMetaDataBaseBuffer, csdAMediaFormatSize);
+    ASSERT_EQ(result, 0) << "CSD from AMediaFormat and MetaDataBase do not match";
+
+    delete metaData;
+    AMediaFormat_delete(csdData);
+}
+
+TEST_P(AvcCSDValidateTest, AvcValidateTest) {
+    AMediaFormat *csdData = AMediaFormat_new();
+    ASSERT_NE(csdData, nullptr) << "Failed to create AMedia format";
+
+    bool status = MakeAVCCodecSpecificData(csdData, mInputBuffer, mInputBufferSize);
+    ASSERT_FALSE(status) << "MakeAVCCodecSpecificData with AMediaFormat succeeds with invalid data";
+
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create MetaData Base";
+
+    status = MakeAVCCodecSpecificData(*metaData, mInputBuffer, mInputBufferSize);
+    ASSERT_FALSE(status) << "MakeAVCCodecSpecificData with MetaDataBase succeeds with invalid data";
+}
+
+TEST_P(AacCSDTest, AacCSDValidationTest) {
+    AMediaFormat *csdData = AMediaFormat_new();
+    ASSERT_NE(csdData, nullptr) << "Failed to create AMedia format";
+
+    ASSERT_GE(mAacSamplingFreqIndex, 0);
+    ASSERT_LT(mAacSamplingFreqIndex, kMaxSamplingFreqIndex);
+    bool status = MakeAACCodecSpecificData(csdData, mAacProfile, mAacSamplingFreqIndex,
+                                           mAacChannelConfig);
+    ASSERT_TRUE(status) << "Failed to make AAC CSD from AMediaFormat";
+
+    int32_t sampleRate = -1;
+    status = AMediaFormat_getInt32(csdData, AMEDIAFORMAT_KEY_SAMPLE_RATE, &sampleRate);
+    ASSERT_TRUE(status) << "Failed to get sample rate";
+    ASSERT_EQ(kSamplingFreq[mAacSamplingFreqIndex], sampleRate);
+
+    int32_t channelCount = -1;
+    status = AMediaFormat_getInt32(csdData, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &channelCount);
+    ASSERT_TRUE(status) << "Failed to get channel count";
+    ASSERT_EQ(channelCount, mAacChannelConfig);
+
+    const char *mimeType = "";
+    status = AMediaFormat_getString(csdData, AMEDIAFORMAT_KEY_MIME, &mimeType);
+    ASSERT_TRUE(status) << "Failed to get the mime type";
+    ASSERT_STREQ(mimeType, MEDIA_MIMETYPE_AUDIO_AAC);
+
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create MetaData Base";
+
+    status = MakeAACCodecSpecificData(*metaData, mAacProfile, mAacSamplingFreqIndex,
+                                      mAacChannelConfig);
+    ASSERT_TRUE(status) << "Failed to make AAC CSD from MetaDataBase";
+
+    sampleRate = -1;
+    status = metaData->findInt32(kKeySampleRate, &sampleRate);
+    ASSERT_TRUE(status) << "Failed to get sampling rate";
+    ASSERT_EQ(kSamplingFreq[mAacSamplingFreqIndex], sampleRate);
+
+    channelCount = -1;
+    status = metaData->findInt32(kKeyChannelCount, &channelCount);
+    ASSERT_TRUE(status) << "Failed to get channel count";
+    ASSERT_EQ(channelCount, mAacChannelConfig);
+
+    mimeType = "";
+    status = metaData->findCString(kKeyMIMEType, &mimeType);
+    ASSERT_TRUE(status) << "Failed to get mime type";
+    ASSERT_STREQ(mimeType, MEDIA_MIMETYPE_AUDIO_AAC);
+
+    void *csdAMediaFormatBuffer = nullptr;
+    size_t csdAMediaFormatSize = 0;
+    status = AMediaFormat_getBuffer(csdData, AMEDIAFORMAT_KEY_CSD_0, &csdAMediaFormatBuffer,
+                                    &csdAMediaFormatSize);
+    ASSERT_TRUE(status) << "Failed to get the AMediaFormat CSD";
+    ASSERT_GT(csdAMediaFormatSize, 0) << "CSD size must be greater than 0";
+    ASSERT_NE(csdAMediaFormatBuffer, nullptr) << "Invalid CSD found";
+
+    const void *csdMetaDataBaseBuffer;
+    size_t csdMetaDataBaseSize = 0;
+    uint32_t mediaType;
+    status = metaData->findData(kKeyESDS, &mediaType, &csdMetaDataBaseBuffer, &csdMetaDataBaseSize);
+    ASSERT_TRUE(status) << "Failed to get the ESDS data from MetaDataBase";
+    ASSERT_GT(csdMetaDataBaseSize, 0) << "CSD size must be greater than 0";
+
+    ESDS esds(csdMetaDataBaseBuffer, csdMetaDataBaseSize);
+    status_t result = esds.getCodecSpecificInfo(&csdMetaDataBaseBuffer, &csdMetaDataBaseSize);
+    ASSERT_EQ(result, (status_t)OK) << "Failed to get CSD from ESDS data";
+    ASSERT_NE(csdMetaDataBaseBuffer, nullptr) << "Invalid CSD found";
+    ASSERT_EQ(csdAMediaFormatSize, csdMetaDataBaseSize)
+            << "CSD size do not match between AMediaFormat type and MetaDataBase type";
+
+    int32_t memcmpResult =
+            memcmp(csdAMediaFormatBuffer, csdMetaDataBaseBuffer, csdAMediaFormatSize);
+    ASSERT_EQ(memcmpResult, 0) << "AMediaFormat and MetaDataBase CSDs do not match";
+
+    AMediaFormat_delete(csdData);
+    delete metaData;
+}
+
+TEST_P(AacADTSTest, AacADTSValidationTest) {
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
+
+    bool status = MakeAACCodecSpecificData(*metaData, mInputBuffer, kAdtsCsdSize);
+    ASSERT_TRUE(status) << "Failed to make AAC CSD from MetaDataBase";
+
+    int32_t sampleRate = -1;
+    status = metaData->findInt32(kKeySampleRate, &sampleRate);
+    ASSERT_TRUE(status) << "Failed to get sampling rate";
+    ASSERT_EQ(sampleRate, mAacSampleRate);
+
+    int32_t channelCount = -1;
+    status = metaData->findInt32(kKeyChannelCount, &channelCount);
+    ASSERT_TRUE(status) << "Failed to get channel count";
+    ASSERT_EQ(channelCount, mAacChannelCount);
+
+    const char *mimeType = "";
+    status = metaData->findCString(kKeyMIMEType, &mimeType);
+    ASSERT_TRUE(status) << "Failed to get mime type";
+    ASSERT_STREQ(mimeType, MEDIA_MIMETYPE_AUDIO_AAC);
+
+    delete metaData;
+}
+
+TEST_P(AacCSDValidateTest, AacInvalidInputTest) {
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
+
+    bool status = MakeAACCodecSpecificData(*metaData, mInputBuffer, kAdtsCsdSize);
+    ASSERT_FALSE(status) << "MakeAACCodecSpecificData succeeds with invalid data";
+}
+
+TEST_P(VorbisTest, VorbisCommentTest) {
+    string line;
+    string tag;
+    string key;
+    string value;
+    size_t commentLength;
+    bool status;
+
+    while (getline(mInfoFileStream, line)) {
+        istringstream stringLine(line);
+        stringLine >> tag >> key >> value >> commentLength;
+        ASSERT_GT(commentLength, 0) << "Vorbis comment size must be greater than 0";
+
+        string comment;
+        string dataLine;
+
+        getline(mInputFileStream, dataLine);
+        istringstream dataStringLine(dataLine);
+        dataStringLine >> comment;
+
+        char *buffer = strndup(comment.c_str(), commentLength);
+        ASSERT_NE(buffer, nullptr) << "Failed to allocate buffer of size: " << commentLength;
+
+        AMediaFormat *fileMeta = AMediaFormat_new();
+        ASSERT_NE(fileMeta, nullptr) << "Failed to create AMedia format";
+
+        parseVorbisComment(fileMeta, buffer, commentLength);
+        free(buffer);
+
+        if (!strncasecmp(tag.c_str(), "ANDROID_HAPTIC", sizeof(tag))) {
+            int32_t numChannelExpected = stoi(value);
+            int32_t numChannelFound = -1;
+            status = AMediaFormat_getInt32(fileMeta, key.c_str(), &numChannelFound);
+            ASSERT_TRUE(status) << "Failed to get the channel count";
+            ASSERT_EQ(numChannelExpected, numChannelFound);
+        } else if (!strncasecmp(tag.c_str(), "ANDROID_LOOP", sizeof(tag))) {
+            int32_t loopExpected = !value.compare("true");
+            int32_t loopFound = -1;
+
+            status = AMediaFormat_getInt32(fileMeta, "loop", &loopFound);
+            ASSERT_TRUE(status) << "Failed to get the loop count";
+            ASSERT_EQ(loopExpected, loopFound);
+        } else {
+            const char *tagValue = "";
+            status = AMediaFormat_getString(fileMeta, key.c_str(), &tagValue);
+            ASSERT_TRUE(status) << "Failed to get the tag value";
+            ASSERT_STREQ(value.c_str(), tagValue);
+        }
+        AMediaFormat_delete(fileMeta);
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(MetaDataUtilsTestAll, AvcCSDTest,
+                         ::testing::Values(make_tuple("sps_pps_userdata.h264", 8, 8),
+                                           make_tuple("sps_userdata_pps.h264", 8, 8),
+                                           make_tuple("sps_pps_sps_pps.h264", 8, 8)));
+
+// TODO(b/158067691): Add invalid test vectors with incomplete PPS or no PPS
+INSTANTIATE_TEST_SUITE_P(MetaDataUtilsTestAll, AvcCSDValidateTest,
+                         ::testing::Values("sps_pps_only_startcode.h264",
+                                           "sps_incomplete_pps.h264",
+                                           // TODO(b/158067691) "sps_pps_incomplete.h264",
+                                           "randomdata.h264",
+                                           // TODO(b/158067691) "sps.h264",
+                                           "pps.h264"));
+
+INSTANTIATE_TEST_SUITE_P(MetaDataUtilsTestAll, AacCSDTest,
+                         ::testing::Values(make_tuple(AACObjectMain, 1, 1)));
+
+INSTANTIATE_TEST_SUITE_P(MetaDataUtilsTestAll, AacADTSTest,
+                         ::testing::Values(make_tuple("loudsoftaacadts", 1, 44100)));
+
+INSTANTIATE_TEST_SUITE_P(MetaDataUtilsTestAll, AacCSDValidateTest,
+                         ::testing::Values("loudsoftaacadts_invalidheader",
+                                           "loudsoftaacadts_invalidprofile",
+                                           "loudsoftaacadts_invalidchannelconfig"));
+
+// TODO(b/157974508) Add test vector for vorbis thumbnail tag
+// Info file contains TAG, Key, Value and size of the vorbis comment
+INSTANTIATE_TEST_SUITE_P(
+        MetaDataUtilsTestAll, VorbisTest,
+        ::testing::Values(make_pair("vorbiscomment_sintel.dat", "vorbiscomment_sintel.info"),
+                          make_pair("vorbiscomment_album.dat", "vorbiscomment_album.info"),
+                          make_pair("vorbiscomment_loop.dat", "vorbiscomment_loop.info")));
+
+int main(int argc, char **argv) {
+    gEnv = new MetaDataUtilsTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("Test result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/tests/metadatautils/MetaDataUtilsTestEnvironment.h b/media/libstagefright/tests/metadatautils/MetaDataUtilsTestEnvironment.h
new file mode 100644
index 0000000..4d642bc
--- /dev/null
+++ b/media/libstagefright/tests/metadatautils/MetaDataUtilsTestEnvironment.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __METADATA_UTILS_TEST_ENVIRONMENT_H__
+#define __METADATA_UTILS_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class MetaDataUtilsTestEnvironment : public::testing::Environment {
+  public:
+    MetaDataUtilsTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int MetaDataUtilsTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"path", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P': {
+                setRes(optarg);
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __METADATA_UTILS_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/tests/metadatautils/README.md b/media/libstagefright/tests/metadatautils/README.md
new file mode 100644
index 0000000..0862a07
--- /dev/null
+++ b/media/libstagefright/tests/metadatautils/README.md
@@ -0,0 +1,39 @@
+## Media Testing ##
+---
+#### MetaDataUtils Test
+The MetaDataUtils Unit Test Suite validates the libstagefright_metadatautils library available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m MetaDataUtilsTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/MetaDataUtilsTest/MetaDataUtilsTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/MetaDataUtilsTest/MetaDataUtilsTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/metadatautils/MetaDataUtilsTestRes-1.0.zip). Download, unzip and push these files into device for testing.
+
+```
+adb push MetaDataUtilsTestRes-1.0 /data/local/tmp/
+```
+
+usage: MetaDataUtilsTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/MetaDataUtilsTest -P /data/local/tmp/MetaDataUtilsTestRes-1.0/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest MetaDataUtilsTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/tests/writer/Android.bp b/media/libstagefright/tests/writer/Android.bp
index 7e169cb..b5d453e 100644
--- a/media/libstagefright/tests/writer/Android.bp
+++ b/media/libstagefright/tests/writer/Android.bp
@@ -28,13 +28,15 @@
         "libcutils",
         "liblog",
         "libutils",
+        "libmedia",
+        "libmediandk",
+        "libstagefright",
     ],
 
     static_libs: [
         "libstagefright_webm",
-        "libdatasource",
-        "libstagefright",
         "libstagefright_foundation",
+        "libdatasource",
         "libstagefright_esds",
         "libogg",
     ],
diff --git a/media/libstagefright/tests/writer/AndroidTest.xml b/media/libstagefright/tests/writer/AndroidTest.xml
index d831555..cc890fe 100644
--- a/media/libstagefright/tests/writer/AndroidTest.xml
+++ b/media/libstagefright/tests/writer/AndroidTest.xml
@@ -19,12 +19,13 @@
         <option name="cleanup" value="true" />
         <option name="push" value="writerTest->/data/local/tmp/writerTest" />
         <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/Writer.zip?unzip=true"
-            value="/data/local/tmp/writerTestRes/" />
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/WriterTestRes-1.1.zip?unzip=true"
+            value="/data/local/tmp/WriterTestRes/" />
     </target_preparer>
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="writerTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/writerTestRes/" />
+        <option name="native-test-flag" value="-P /data/local/tmp/WriterTestRes/" />
+        <option name="native-test-flag" value="-C true" />
     </test>
 </configuration>
diff --git a/media/libstagefright/tests/writer/README.md b/media/libstagefright/tests/writer/README.md
index ae07917..0e54ca7 100644
--- a/media/libstagefright/tests/writer/README.md
+++ b/media/libstagefright/tests/writer/README.md
@@ -19,13 +19,18 @@
 
 adb push ${OUT}/data/nativetest/writerTest/writerTest /data/local/tmp/
 
-The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/writerTestRes.zip).
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/WriterTestRes-1.1.zip).
 Download and extract the folder. Push all the files in this folder to /data/local/tmp/ on the device.
 ```
-adb push writerTestRes /data/local/tmp/
+adb push WriterTestRes-1.1/. /data/local/tmp/WriterTestRes/
 ```
 
-usage: writerTest -P \<path_to_res_folder\>
+usage: writerTest -P \<path_to_res_folder\> -C <remove_output_file>
 ```
-adb shell /data/local/tmp/writerTest -P /data/local/tmp/
+adb shell /data/local/tmp/writerTest -P /data/local/tmp/WriterTestRes/ -C true
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest writerTest -- --enable-module-dynamic-download=true
 ```
diff --git a/media/libstagefright/tests/writer/WriterListener.h b/media/libstagefright/tests/writer/WriterListener.h
new file mode 100644
index 0000000..81f0a7c
--- /dev/null
+++ b/media/libstagefright/tests/writer/WriterListener.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WRITER_LISTENER_H_
+#define WRITER_LISTENER_H_
+
+#include <mutex>
+
+#include <media/IMediaRecorderClient.h>
+#include <media/mediarecorder.h>
+
+using namespace android;
+using namespace std;
+
+class WriterListener : public BnMediaRecorderClient {
+  public:
+    WriterListener() : mSignaledSize(false), mSignaledDuration(false) {}
+
+    virtual void notify(int32_t msg, int32_t ext1, int32_t ext2) {
+        ALOGV("msg : %d, ext1 : %d, ext2 : %d", msg, ext1, ext2);
+        if (ext1 == MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED) {
+            mSignaledSize = true;
+        } else if (ext1 == MEDIA_RECORDER_INFO_MAX_DURATION_REACHED) {
+            mSignaledDuration = true;
+        }
+    }
+
+    volatile bool mSignaledSize;
+    volatile bool mSignaledDuration;
+};
+
+#endif  // WRITER_LISTENER_H_
diff --git a/media/libstagefright/tests/writer/WriterTest.cpp b/media/libstagefright/tests/writer/WriterTest.cpp
index ff063e3..d170e7c 100644
--- a/media/libstagefright/tests/writer/WriterTest.cpp
+++ b/media/libstagefright/tests/writer/WriterTest.cpp
@@ -18,9 +18,13 @@
 #define LOG_TAG "WriterTest"
 #include <utils/Log.h>
 
+#include <binder/ProcessState.h>
+
+#include <inttypes.h>
 #include <fstream>
 #include <iostream>
 
+#include <media/NdkMediaExtractor.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/Utils.h>
@@ -39,18 +43,40 @@
 
 #define OUTPUT_FILE_NAME "/data/local/tmp/writer.out"
 
+// Stts values within 0.1ms(100us) difference are fudged to save too
+// many stts entries in MPEG4Writer.
+constexpr int32_t kMpeg4MuxToleranceTimeUs = 100;
+// Tolerance value for other writers
+constexpr int32_t kMuxToleranceTimeUs = 1;
+
 static WriterTestEnvironment *gEnv = nullptr;
 
-struct configFormat {
-    char mime[128];
-    int32_t width;
-    int32_t height;
-    int32_t sampleRate;
-    int32_t channelCount;
+enum inputId {
+    // audio streams
+    AAC_1,
+    AAC_ADTS_1,
+    AMR_NB_1,
+    AMR_WB_1,
+    FLAC_1,
+    OPUS_1,
+    VORBIS_1,
+    // video streams
+    AV1_1,
+    AVC_1,
+    H263_1,
+    HEVC_1,
+    MPEG4_1,
+    VP8_1,
+    VP9_1,
+    // heif stream
+    HEIC_1,
+    UNUSED_ID,
+    UNKNOWN_ID,
 };
 
 // LookUpTable of clips and metadata for component testing
 static const struct InputData {
+    inputId inpId;
     const char *mime;
     string inputFile;
     string info;
@@ -58,65 +84,73 @@
     int32_t secondParam;
     bool isAudio;
 } kInputData[] = {
-        {MEDIA_MIMETYPE_AUDIO_OPUS, "bbb_opus_stereo_128kbps_48000hz.opus",
-         "bbb_opus_stereo_128kbps_48000hz.info", 48000, 2, true},
-        {MEDIA_MIMETYPE_AUDIO_AAC, "bbb_aac_stereo_128kbps_48000hz.aac",
-         "bbb_aac_stereo_128kbps_48000hz.info", 48000, 2, true},
-        {MEDIA_MIMETYPE_AUDIO_AAC_ADTS, "Mps_2_c2_fr1_Sc1_Dc2_0x03_raw.adts",
+        {AAC_1, MEDIA_MIMETYPE_AUDIO_AAC, "audio_aac_stereo_8kbps_11025hz.aac",
+         "audio_aac_stereo_8kbps_11025hz.info", 11025, 2, true},
+        {AAC_ADTS_1, MEDIA_MIMETYPE_AUDIO_AAC_ADTS, "Mps_2_c2_fr1_Sc1_Dc2_0x03_raw.adts",
          "Mps_2_c2_fr1_Sc1_Dc2_0x03_raw.info", 48000, 2, true},
-        {MEDIA_MIMETYPE_AUDIO_AMR_NB, "sine_amrnb_1ch_12kbps_8000hz.amrnb",
+        {AMR_NB_1, MEDIA_MIMETYPE_AUDIO_AMR_NB, "sine_amrnb_1ch_12kbps_8000hz.amrnb",
          "sine_amrnb_1ch_12kbps_8000hz.info", 8000, 1, true},
-        {MEDIA_MIMETYPE_AUDIO_AMR_WB, "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
+        {AMR_WB_1, MEDIA_MIMETYPE_AUDIO_AMR_WB, "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
          "bbb_amrwb_1ch_14kbps_16000hz.info", 16000, 1, true},
-        {MEDIA_MIMETYPE_AUDIO_VORBIS, "bbb_vorbis_stereo_128kbps_48000hz.vorbis",
-         "bbb_vorbis_stereo_128kbps_48000hz.info", 48000, 2, true},
-        {MEDIA_MIMETYPE_AUDIO_FLAC, "bbb_flac_stereo_680kbps_48000hz.flac",
+        {FLAC_1, MEDIA_MIMETYPE_AUDIO_FLAC, "bbb_flac_stereo_680kbps_48000hz.flac",
          "bbb_flac_stereo_680kbps_48000hz.info", 48000, 2, true},
-        {MEDIA_MIMETYPE_VIDEO_VP9, "bbb_vp9_176x144_285kbps_60fps.vp9",
-         "bbb_vp9_176x144_285kbps_60fps.info", 176, 144, false},
-        {MEDIA_MIMETYPE_VIDEO_VP8, "bbb_vp8_176x144_240kbps_60fps.vp8",
-         "bbb_vp8_176x144_240kbps_60fps.info", 176, 144, false},
-        {MEDIA_MIMETYPE_VIDEO_AVC, "bbb_avc_176x144_300kbps_60fps.h264",
-         "bbb_avc_176x144_300kbps_60fps.info", 176, 144, false},
-        {MEDIA_MIMETYPE_VIDEO_HEVC, "bbb_hevc_176x144_176kbps_60fps.hevc",
-         "bbb_hevc_176x144_176kbps_60fps.info", 176, 144, false},
-        {MEDIA_MIMETYPE_VIDEO_AV1, "bbb_av1_176_144.av1", "bbb_av1_176_144.info", 176, 144, false},
-        {MEDIA_MIMETYPE_VIDEO_H263, "bbb_h263_352x288_300kbps_12fps.h263",
+        {OPUS_1, MEDIA_MIMETYPE_AUDIO_OPUS, "bbb_opus_stereo_128kbps_48000hz.opus",
+         "bbb_opus_stereo_128kbps_48000hz.info", 48000, 2, true},
+        {VORBIS_1, MEDIA_MIMETYPE_AUDIO_VORBIS, "bbb_vorbis_1ch_64kbps_16kHz.vorbis",
+         "bbb_vorbis_1ch_64kbps_16kHz.info", 16000, 1, true},
+
+        {AV1_1, MEDIA_MIMETYPE_VIDEO_AV1, "bbb_av1_176_144.av1", "bbb_av1_176_144.info", 176, 144,
+         false},
+        {AVC_1, MEDIA_MIMETYPE_VIDEO_AVC, "bbb_avc_352x288_768kbps_30fps.avc",
+         "bbb_avc_352x288_768kbps_30fps.info", 352, 288, false},
+        {H263_1, MEDIA_MIMETYPE_VIDEO_H263, "bbb_h263_352x288_300kbps_12fps.h263",
          "bbb_h263_352x288_300kbps_12fps.info", 352, 288, false},
-        {MEDIA_MIMETYPE_VIDEO_MPEG4, "bbb_mpeg4_352x288_512kbps_30fps.m4v",
+        {HEVC_1, MEDIA_MIMETYPE_VIDEO_HEVC, "bbb_hevc_340x280_768kbps_30fps.hevc",
+         "bbb_hevc_340x280_768kbps_30fps.info", 340, 280, false},
+        {MPEG4_1, MEDIA_MIMETYPE_VIDEO_MPEG4, "bbb_mpeg4_352x288_512kbps_30fps.m4v",
          "bbb_mpeg4_352x288_512kbps_30fps.info", 352, 288, false},
+        {VP8_1, MEDIA_MIMETYPE_VIDEO_VP8, "bbb_vp8_176x144_240kbps_60fps.vp8",
+         "bbb_vp8_176x144_240kbps_60fps.info", 176, 144, false},
+        {VP9_1, MEDIA_MIMETYPE_VIDEO_VP9, "bbb_vp9_176x144_285kbps_60fps.vp9",
+         "bbb_vp9_176x144_285kbps_60fps.info", 176, 144, false},
+
+        {HEIC_1, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, "bbb_hevc_176x144_176kbps_60fps.hevc",
+         "bbb_heic_176x144_176kbps_60fps.info", 176, 144, false},
 };
 
-class WriterTest : public ::testing::TestWithParam<pair<string, int32_t>> {
+class WriterTest {
   public:
-    WriterTest() : mWriter(nullptr), mFileMeta(nullptr), mCurrentTrack(nullptr) {}
+    WriterTest() : mWriter(nullptr), mFileMeta(nullptr) {}
 
     ~WriterTest() {
-        if (mWriter) {
-            mWriter.clear();
-            mWriter = nullptr;
-        }
         if (mFileMeta) {
             mFileMeta.clear();
             mFileMeta = nullptr;
         }
-        if (mCurrentTrack) {
-            mCurrentTrack.clear();
-            mCurrentTrack = nullptr;
+        if (mWriter) {
+            mWriter.clear();
+            mWriter = nullptr;
+        }
+        if (gEnv->cleanUp()) remove(OUTPUT_FILE_NAME);
+
+        for (int32_t idx = 0; idx < kMaxTrackCount; idx++) {
+            mBufferInfo[idx].clear();
+            if (mCurrentTrack[idx]) {
+                mCurrentTrack[idx]->stop();
+                mCurrentTrack[idx].clear();
+                mCurrentTrack[idx] = nullptr;
+            }
+            if (mInputStream[idx].is_open()) mInputStream[idx].close();
         }
     }
 
-    virtual void SetUp() override {
-        mNumCsds = 0;
-        mInputFrameId = 0;
+    void setupWriterType(string writerFormat) {
         mWriterName = unknown_comp;
         mDisableTest = false;
-
         static const std::map<std::string, standardWriters> mapWriter = {
                 {"ogg", OGG},     {"aac", AAC},      {"aac_adts", AAC_ADTS}, {"webm", WEBM},
                 {"mpeg4", MPEG4}, {"amrnb", AMR_NB}, {"amrwb", AMR_WB},      {"mpeg2Ts", MPEG2TS}};
         // Find the component type
-        string writerFormat = GetParam().first;
         if (mapWriter.find(writerFormat) != mapWriter.end()) {
             mWriterName = mapWriter.at(writerFormat);
         }
@@ -126,16 +160,19 @@
         }
     }
 
-    virtual void TearDown() override {
-        mBufferInfo.clear();
-        if (mInputStream.is_open()) mInputStream.close();
-    }
-
-    void getInputBufferInfo(string inputFileName, string inputInfo);
+    void getInputBufferInfo(string inputFileName, string inputInfo, int32_t idx = 0);
 
     int32_t createWriter(int32_t fd);
 
-    int32_t addWriterSource(bool isAudio, configFormat params);
+    int32_t addWriterSource(bool isAudio, configFormat params, int32_t idx = 0);
+
+    void setupExtractor(AMediaExtractor *extractor, string inputFileName, int32_t &trackCount);
+
+    void extract(AMediaExtractor *extractor, configFormat &params, vector<BufferInfo> &bufferInfo,
+                 uint8_t *buffer, size_t bufSize, size_t *bytesExtracted, int32_t idx);
+
+    void compareParams(configFormat srcParam, configFormat dstParam, vector<BufferInfo> dstBufInfo,
+                       int32_t index);
 
     enum standardWriters {
         OGG,
@@ -152,32 +189,42 @@
     standardWriters mWriterName;
     sp<MediaWriter> mWriter;
     sp<MetaData> mFileMeta;
-    sp<MediaAdapter> mCurrentTrack;
+    sp<MediaAdapter> mCurrentTrack[kMaxTrackCount]{};
 
     bool mDisableTest;
-    int32_t mNumCsds;
-    int32_t mInputFrameId;
-    ifstream mInputStream;
-    vector<BufferInfo> mBufferInfo;
+    int32_t mNumCsds[kMaxTrackCount]{};
+    int32_t mInputFrameId[kMaxTrackCount]{};
+    ifstream mInputStream[kMaxTrackCount]{};
+    vector<BufferInfo> mBufferInfo[kMaxTrackCount];
 };
 
-void WriterTest::getInputBufferInfo(string inputFileName, string inputInfo) {
+class WriteFunctionalityTest
+    : public WriterTest,
+      public ::testing::TestWithParam<tuple<string /* writerFormat*/, inputId /* inputId0*/,
+                                            inputId /* inputId1*/, float /* BufferInterval*/>> {
+  public:
+    virtual void SetUp() override { setupWriterType(get<0>(GetParam())); }
+};
+
+void WriterTest::getInputBufferInfo(string inputFileName, string inputInfo, int32_t idx) {
     std::ifstream eleInfo;
     eleInfo.open(inputInfo.c_str());
     ASSERT_EQ(eleInfo.is_open(), true);
     int32_t bytesCount = 0;
     uint32_t flags = 0;
     int64_t timestamp = 0;
+    int32_t numCsds = 0;
     while (1) {
         if (!(eleInfo >> bytesCount)) break;
         eleInfo >> flags;
         eleInfo >> timestamp;
-        mBufferInfo.push_back({bytesCount, flags, timestamp});
-        if (flags == CODEC_CONFIG_FLAG) mNumCsds++;
+        mBufferInfo[idx].push_back({bytesCount, flags, timestamp});
+        if (flags == CODEC_CONFIG_FLAG) numCsds++;
     }
     eleInfo.close();
-    mInputStream.open(inputFileName.c_str(), std::ifstream::binary);
-    ASSERT_EQ(mInputStream.is_open(), true);
+    mNumCsds[idx] = numCsds;
+    mInputStream[idx].open(inputFileName.c_str(), std::ifstream::binary);
+    ASSERT_EQ(mInputStream[idx].is_open(), true);
 }
 
 int32_t WriterTest::createWriter(int32_t fd) {
@@ -223,10 +270,10 @@
     return 0;
 }
 
-int32_t WriterTest::addWriterSource(bool isAudio, configFormat params) {
-    if (mInputFrameId) return -1;
+int32_t WriterTest::addWriterSource(bool isAudio, configFormat params, int32_t idx) {
+    if (mInputFrameId[idx]) return -1;
     sp<AMessage> format = new AMessage;
-    if (mInputStream.is_open()) {
+    if (mInputStream[idx].is_open()) {
         format->setString("mime", params.mime);
         if (isAudio) {
             format->setInt32("channel-count", params.channelCount);
@@ -235,25 +282,34 @@
             format->setInt32("width", params.width);
             format->setInt32("height", params.height);
         }
-
-        int32_t status =
-                writeHeaderBuffers(mInputStream, mBufferInfo, mInputFrameId, format, mNumCsds);
-        if (status != 0) return -1;
+        if (mNumCsds[idx]) {
+            int32_t status = writeHeaderBuffers(mInputStream[idx], mBufferInfo[idx],
+                                                mInputFrameId[idx], format, mNumCsds[idx]);
+            if (status != 0) return -1;
+        }
     }
+
     sp<MetaData> trackMeta = new MetaData;
     convertMessageToMetaData(format, trackMeta);
-    mCurrentTrack = new MediaAdapter(trackMeta);
-    if (mCurrentTrack == nullptr) {
+    mCurrentTrack[idx] = new MediaAdapter(trackMeta);
+    if (mCurrentTrack[idx] == nullptr) {
         ALOGE("MediaAdapter returned nullptr");
         return -1;
     }
-    status_t result = mWriter->addSource(mCurrentTrack);
+    status_t result = mWriter->addSource(mCurrentTrack[idx]);
     return result;
 }
 
 void getFileDetails(string &inputFilePath, string &info, configFormat &params, bool &isAudio,
-                    int32_t streamIndex = 0) {
-    if (streamIndex >= sizeof(kInputData) / sizeof(kInputData[0])) {
+                    inputId inpId) {
+    int32_t inputDataSize = sizeof(kInputData) / sizeof(kInputData[0]);
+    int32_t streamIndex = 0;
+    for (; streamIndex < inputDataSize; streamIndex++) {
+        if (inpId == kInputData[streamIndex].inpId) {
+            break;
+        }
+    }
+    if (streamIndex == inputDataSize) {
         return;
     }
     inputFilePath += kInputData[streamIndex].inputFile;
@@ -270,7 +326,147 @@
     return;
 }
 
-TEST_P(WriterTest, CreateWriterTest) {
+void WriterTest::setupExtractor(AMediaExtractor *extractor, string inputFileName,
+                                int32_t &trackCount) {
+    ALOGV("Input file for extractor: %s", inputFileName.c_str());
+
+    int32_t fd = open(inputFileName.c_str(), O_RDONLY);
+    ASSERT_GE(fd, 0) << "Failed to open writer's output file to validate";
+
+    struct stat buf;
+    int32_t status = fstat(fd, &buf);
+    ASSERT_EQ(status, 0) << "Failed to get properties of input file for extractor";
+
+    size_t fileSize = buf.st_size;
+    ALOGV("Size of input file to extractor: %zu", fileSize);
+
+    status = AMediaExtractor_setDataSourceFd(extractor, fd, 0, fileSize);
+    ASSERT_EQ(status, AMEDIA_OK) << "Failed to set data source for extractor";
+
+    trackCount = AMediaExtractor_getTrackCount(extractor);
+    ASSERT_GT(trackCount, 0) << "No tracks reported by extractor";
+    ALOGV("Number of tracks reported by extractor : %d", trackCount);
+    return;
+}
+
+void WriterTest::extract(AMediaExtractor *extractor, configFormat &params,
+                         vector<BufferInfo> &bufferInfo, uint8_t *buffer, size_t bufSize,
+                         size_t *bytesExtracted, int32_t idx) {
+    AMediaExtractor_selectTrack(extractor, idx);
+    AMediaFormat *format = AMediaExtractor_getTrackFormat(extractor, idx);
+    ASSERT_NE(format, nullptr) << "Track format is NULL";
+    ALOGI("Track format = %s", AMediaFormat_toString(format));
+
+    const char *mime = nullptr;
+    AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
+    ASSERT_NE(mime, nullptr) << "Track mime is NULL";
+    ALOGI("Track mime = %s", mime);
+    strlcpy(params.mime, mime, kMimeSize);
+
+    if (!strncmp(mime, "audio/", 6)) {
+        ASSERT_TRUE(
+                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &params.channelCount))
+                << "Extractor did not report channel count";
+        ASSERT_TRUE(AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, &params.sampleRate))
+                << "Extractor did not report sample rate";
+    } else if (!strncmp(mime, "video/", 6) || !strncmp(mime, "image/", 6)) {
+        ASSERT_TRUE(AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_WIDTH, &params.width))
+                << "Extractor did not report width";
+        ASSERT_TRUE(AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_HEIGHT, &params.height))
+                << "Extractor did not report height";
+    } else {
+        ASSERT_TRUE(false) << "Invalid mime " << mime;
+    }
+
+    int32_t bufferOffset = 0;
+    // Get CSD data
+    int index = 0;
+    void *csdBuf;
+    while (1) {
+        csdBuf = nullptr;
+        char csdName[16];
+        snprintf(csdName, 16, "csd-%d", index);
+        size_t csdSize = 0;
+        bool csdFound = AMediaFormat_getBuffer(format, csdName, &csdBuf, &csdSize);
+        if (!csdFound || !csdBuf || !csdSize) break;
+
+        bufferInfo.push_back({static_cast<int32_t>(csdSize), CODEC_CONFIG_FLAG, 0});
+        memcpy(buffer + bufferOffset, csdBuf, csdSize);
+        bufferOffset += csdSize;
+        index++;
+    }
+
+    // Get frame data
+    while (1) {
+        ssize_t sampleSize = AMediaExtractor_getSampleSize(extractor);
+        if (sampleSize < 0) break;
+
+        uint8_t *sampleBuffer = (uint8_t *)malloc(sampleSize);
+        ASSERT_NE(sampleBuffer, nullptr) << "Failed to allocate the buffer of size " << sampleSize;
+
+        int bytesRead = AMediaExtractor_readSampleData(extractor, sampleBuffer, sampleSize);
+        ASSERT_EQ(bytesRead, sampleSize)
+                << "Number of bytes extracted does not match with sample size";
+        int64_t pts = AMediaExtractor_getSampleTime(extractor);
+        uint32_t flag = AMediaExtractor_getSampleFlags(extractor);
+
+        if (mime == MEDIA_MIMETYPE_AUDIO_VORBIS) {
+            // Removing 4 bytes of AMEDIAFORMAT_KEY_VALID_SAMPLES from sample size
+            bytesRead = bytesRead - 4;
+        }
+
+        ASSERT_LE(bufferOffset + bytesRead, bufSize)
+                << "Size of the buffer is insufficient to store the extracted data";
+        bufferInfo.push_back({bytesRead, flag, pts});
+        memcpy(buffer + bufferOffset, sampleBuffer, bytesRead);
+        bufferOffset += bytesRead;
+
+        AMediaExtractor_advance(extractor);
+        free(sampleBuffer);
+    }
+    *bytesExtracted = bufferOffset;
+    return;
+}
+
+void WriterTest::compareParams(configFormat srcParam, configFormat dstParam,
+                               vector<BufferInfo> dstBufInfo, int32_t index) {
+    ASSERT_STREQ(srcParam.mime, dstParam.mime)
+            << "Extracted mime type does not match with input mime type";
+
+    if (!strncmp(srcParam.mime, "audio/", 6)) {
+        ASSERT_EQ(srcParam.channelCount, dstParam.channelCount)
+                << "Extracted channel count does not match with input channel count";
+        ASSERT_EQ(srcParam.sampleRate, dstParam.sampleRate)
+                << "Extracted sample rate does not match with input sample rate";
+    } else if (!strncmp(srcParam.mime, "video/", 6) || !strncmp(srcParam.mime, "image/", 6)) {
+        ASSERT_EQ(srcParam.width, dstParam.width)
+                << "Extracted width does not match with input width";
+        ASSERT_EQ(srcParam.height, dstParam.height)
+                << "Extracted height does not match with input height";
+    } else {
+        ASSERT_TRUE(false) << "Invalid mime type" << srcParam.mime;
+    }
+
+    int32_t toleranceValueUs = kMuxToleranceTimeUs;
+    if (mWriterName == MPEG4) {
+        toleranceValueUs = kMpeg4MuxToleranceTimeUs;
+    }
+    for (int32_t i = 0; i < dstBufInfo.size(); i++) {
+        ASSERT_EQ(mBufferInfo[index][i].size, dstBufInfo[i].size)
+                << "Input size " << mBufferInfo[index][i].size << " mismatched with extracted size "
+                << dstBufInfo[i].size;
+        ASSERT_EQ(mBufferInfo[index][i].flags, dstBufInfo[i].flags)
+                << "Input flag " << mBufferInfo[index][i].flags
+                << " mismatched with extracted size " << dstBufInfo[i].flags;
+        ASSERT_LE(abs(mBufferInfo[index][i].timeUs - dstBufInfo[i].timeUs), toleranceValueUs)
+                << "Difference between original timestamp " << mBufferInfo[index][i].timeUs
+                << " and extracted timestamp " << dstBufInfo[i].timeUs
+                << "is greater than tolerance value = " << toleranceValueUs << " micro seconds";
+    }
+    return;
+}
+
+TEST_P(WriteFunctionalityTest, CreateWriterTest) {
     if (mDisableTest) return;
     ALOGV("Tests the creation of writers");
 
@@ -281,14 +477,14 @@
 
     // Creating writer within a test scope. Destructor should be called when the test ends
     ASSERT_EQ((status_t)OK, createWriter(fd))
-            << "Failed to create writer for output format:" << GetParam().first;
+            << "Failed to create writer for output format:" << get<0>(GetParam());
 }
 
-TEST_P(WriterTest, WriterTest) {
+TEST_P(WriteFunctionalityTest, WriterTest) {
     if (mDisableTest) return;
     ALOGV("Checks if for a given input, a valid muxed file has been created or not");
 
-    string writerFormat = GetParam().first;
+    string writerFormat = get<0>(GetParam());
     string outputFile = OUTPUT_FILE_NAME;
     int32_t fd =
             open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
@@ -297,35 +493,110 @@
     int32_t status = createWriter(fd);
     ASSERT_EQ((status_t)OK, status) << "Failed to create writer for output format:" << writerFormat;
 
-    string inputFile = gEnv->getRes();
-    string inputInfo = gEnv->getRes();
-    configFormat param;
-    bool isAudio;
-    int32_t inputFileIdx = GetParam().second;
-    getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
-    ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+    inputId inpId[] = {get<1>(GetParam()), get<2>(GetParam())};
+    ASSERT_NE(inpId[0], UNUSED_ID) << "Test expects first inputId to be a valid id";
 
-    ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
-    status = addWriterSource(isAudio, param);
-    ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+    int32_t numTracks = 1;
+    if (inpId[1] != UNUSED_ID) {
+        numTracks++;
+    }
+
+    size_t fileSize[numTracks];
+    configFormat param[numTracks];
+    for (int32_t idx = 0; idx < numTracks; idx++) {
+        string inputFile = gEnv->getRes();
+        string inputInfo = gEnv->getRes();
+        bool isAudio;
+        getFileDetails(inputFile, inputInfo, param[idx], isAudio, inpId[idx]);
+        ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+        struct stat buf;
+        status = stat(inputFile.c_str(), &buf);
+        ASSERT_EQ(status, 0) << "Failed to get properties of input file:" << inputFile;
+        fileSize[idx] = buf.st_size;
+
+        ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo, idx));
+        status = addWriterSource(isAudio, param[idx], idx);
+        ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+    }
 
     status = mWriter->start(mFileMeta.get());
     ASSERT_EQ((status_t)OK, status);
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
-                                 mBufferInfo.size());
-    ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
-    mCurrentTrack->stop();
+    float interval = get<3>(GetParam());
+    ASSERT_LE(interval, 1.0f) << "Buffer interval invalid. Should be less than or equal to 1.0";
 
+    size_t range = 0;
+    int32_t loopCount = 0;
+    int32_t offset[kMaxTrackCount]{};
+    while (loopCount < ceil(1.0 / interval)) {
+        for (int32_t idx = 0; idx < numTracks; idx++) {
+            range = mBufferInfo[idx].size() * interval;
+            status = sendBuffersToWriter(mInputStream[idx], mBufferInfo[idx], mInputFrameId[idx],
+                                         mCurrentTrack[idx], offset[idx], range);
+            ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+            offset[idx] += range;
+        }
+        loopCount++;
+    }
+    for (int32_t idx = 0; idx < kMaxTrackCount; idx++) {
+        if (mCurrentTrack[idx]) {
+            mCurrentTrack[idx]->stop();
+        }
+    }
     status = mWriter->stop();
     ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
     close(fd);
+
+    // Validate the output muxed file created by writer
+    // TODO(b/146423022): Skip validating output for webm writer
+    // TODO(b/146421018): Skip validating output for ogg writer
+    if (mWriterName != OGG && mWriterName != WEBM) {
+        configFormat extractorParams[numTracks];
+        vector<BufferInfo> extractorBufferInfo[numTracks];
+        int32_t trackCount = -1;
+
+        AMediaExtractor *extractor = AMediaExtractor_new();
+        ASSERT_NE(extractor, nullptr) << "Failed to create extractor";
+        ASSERT_NO_FATAL_FAILURE(setupExtractor(extractor, outputFile, trackCount));
+        ASSERT_EQ(trackCount, numTracks)
+                << "Tracks reported by extractor does not match with input number of tracks";
+
+        for (int32_t idx = 0; idx < numTracks; idx++) {
+            char *inputBuffer = (char *)malloc(fileSize[idx]);
+            ASSERT_NE(inputBuffer, nullptr)
+                    << "Failed to allocate the buffer of size " << fileSize[idx];
+            mInputStream[idx].seekg(0, mInputStream[idx].beg);
+            mInputStream[idx].read(inputBuffer, fileSize[idx]);
+            ASSERT_EQ(mInputStream[idx].gcount(), fileSize[idx]);
+
+            uint8_t *extractedBuffer = (uint8_t *)malloc(fileSize[idx]);
+            ASSERT_NE(extractedBuffer, nullptr)
+                    << "Failed to allocate the buffer of size " << fileSize[idx];
+            size_t bytesExtracted = 0;
+
+            ASSERT_NO_FATAL_FAILURE(extract(extractor, extractorParams[idx],
+                                            extractorBufferInfo[idx], extractedBuffer,
+                                            fileSize[idx], &bytesExtracted, idx));
+            ASSERT_GT(bytesExtracted, 0) << "Total bytes extracted by extractor cannot be zero";
+
+            ASSERT_NO_FATAL_FAILURE(
+                    compareParams(param[idx], extractorParams[idx], extractorBufferInfo[idx], idx));
+
+            ASSERT_EQ(memcmp(extractedBuffer, (uint8_t *)inputBuffer, bytesExtracted), 0)
+                    << "Extracted bit stream does not match with input bit stream";
+
+            free(inputBuffer);
+            free(extractedBuffer);
+        }
+        AMediaExtractor_delete(extractor);
+    }
 }
 
-TEST_P(WriterTest, PauseWriterTest) {
+TEST_P(WriteFunctionalityTest, PauseWriterTest) {
     if (mDisableTest) return;
     ALOGV("Validates the pause() api of writers");
 
-    string writerFormat = GetParam().first;
+    string writerFormat = get<0>(GetParam());
     string outputFile = OUTPUT_FILE_NAME;
     int32_t fd =
             open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
@@ -338,8 +609,10 @@
     string inputInfo = gEnv->getRes();
     configFormat param;
     bool isAudio;
-    int32_t inputFileIdx = GetParam().second;
-    getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
+    inputId inpId = get<1>(GetParam());
+    ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+    getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
     ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
 
     ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
@@ -348,8 +621,8 @@
 
     status = mWriter->start(mFileMeta.get());
     ASSERT_EQ((status_t)OK, status);
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
-                                 mBufferInfo.size() / 4);
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], 0, mBufferInfo[0].size() / 4);
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
 
     bool isPaused = false;
@@ -359,26 +632,26 @@
         isPaused = true;
     }
     // In the pause state, writers shouldn't write anything. Testing the writers for the same
-    int32_t numFramesPaused = mBufferInfo.size() / 4;
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
-                                  mInputFrameId, numFramesPaused, isPaused);
+    int32_t numFramesPaused = mBufferInfo[0].size() / 4;
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], mInputFrameId[0], numFramesPaused, isPaused);
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
 
     if (isPaused) {
         status = mWriter->start(mFileMeta.get());
         ASSERT_EQ((status_t)OK, status);
     }
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
-                                  mInputFrameId, mBufferInfo.size());
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], mInputFrameId[0], mBufferInfo[0].size());
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
-    mCurrentTrack->stop();
+    mCurrentTrack[0]->stop();
 
     status = mWriter->stop();
     ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
     close(fd);
 }
 
-TEST_P(WriterTest, MultiStartStopPauseTest) {
+TEST_P(WriteFunctionalityTest, MultiStartStopPauseTest) {
     // TODO: (b/144821804)
     // Enable the test for MPE2TS writer
     if (mDisableTest || mWriterName == standardWriters::MPEG2TS) return;
@@ -389,7 +662,7 @@
             open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
     ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
 
-    string writerFormat = GetParam().first;
+    string writerFormat = get<0>(GetParam());
     int32_t status = createWriter(fd);
     ASSERT_EQ(status, (status_t)OK) << "Failed to create writer for output format:" << writerFormat;
 
@@ -397,8 +670,10 @@
     string inputInfo = gEnv->getRes();
     configFormat param;
     bool isAudio;
-    int32_t inputFileIdx = GetParam().second;
-    getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
+    inputId inpId = get<1>(GetParam());
+    ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+    getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
     ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
 
     ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
@@ -415,8 +690,8 @@
         mWriter->start(mFileMeta.get());
     }
 
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
-                              mBufferInfo.size() / 4);
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], 0, mBufferInfo[0].size() / 4);
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
 
     for (int32_t count = 0; count < kMaxCount; count++) {
@@ -425,20 +700,20 @@
     }
 
     mWriter->pause();
-    int32_t numFramesPaused = mBufferInfo.size() / 4;
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
-                              mInputFrameId, numFramesPaused, true);
+    int32_t numFramesPaused = mBufferInfo[0].size() / 4;
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], mInputFrameId[0], numFramesPaused, true);
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
 
     for (int32_t count = 0; count < kMaxCount; count++) {
         mWriter->start(mFileMeta.get());
     }
 
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
-                              mInputFrameId, mBufferInfo.size());
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], mInputFrameId[0], mBufferInfo[0].size());
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
 
-    mCurrentTrack->stop();
+    mCurrentTrack[0]->stop();
 
     // first stop should succeed.
     status = mWriter->stop();
@@ -451,19 +726,380 @@
     close(fd);
 }
 
+class WriterValidityTest
+    : public WriterTest,
+      public ::testing::TestWithParam<
+              tuple<string /* writerFormat*/, inputId /* inputId0*/, bool /* addSourceFail*/>> {
+  public:
+    virtual void SetUp() override { setupWriterType(get<0>(GetParam())); }
+};
+
+TEST_P(WriterValidityTest, InvalidInputTest) {
+    if (mDisableTest) return;
+    ALOGV("Validates writer's behavior for invalid inputs");
+
+    string writerFormat = get<0>(GetParam());
+    inputId inpId = get<1>(GetParam());
+    bool addSourceFailExpected = get<2>(GetParam());
+
+    // Test writers for invalid FD value
+    int32_t fd = -1;
+    int32_t status = createWriter(fd);
+    if (status != OK) {
+        ALOGV("createWriter failed for invalid FD, this is expected behavior");
+        return;
+    }
+
+    // If writer was created for invalid fd, test it further.
+    string inputFile = gEnv->getRes();
+    string inputInfo = gEnv->getRes();
+    configFormat param;
+    bool isAudio;
+    ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+    getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
+    ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+    ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
+    status = addWriterSource(isAudio, param);
+    if (status != OK) {
+        ASSERT_TRUE(addSourceFailExpected)
+                << "Failed to add source for " << writerFormat << " writer";
+        ALOGV("addWriterSource failed for invalid FD, this is expected behavior");
+        return;
+    }
+
+    // start the writer with valid argument but invalid FD
+    status = mWriter->start(mFileMeta.get());
+    ASSERT_NE((status_t)OK, status) << "Writer did not fail for invalid FD";
+
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], 0, mBufferInfo[0].size());
+    ASSERT_NE((status_t)OK, status) << "Writer did not report error for invalid FD";
+
+    status = mCurrentTrack[0]->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop the track";
+
+    status = mWriter->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop " << writerFormat << " writer";
+}
+
+TEST_P(WriterValidityTest, MalFormedDataTest) {
+    if (mDisableTest) return;
+    // Enable test for Ogg writer
+    ASSERT_NE(mWriterName, OGG) << "TODO(b/160105646)";
+    ALOGV("Test writer for malformed inputs");
+
+    string writerFormat = get<0>(GetParam());
+    inputId inpId = get<1>(GetParam());
+    bool addSourceFailExpected = get<2>(GetParam());
+    int32_t fd =
+            open(OUTPUT_FILE_NAME, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+    ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
+    int32_t status = createWriter(fd);
+    ASSERT_EQ(status, (status_t)OK)
+            << "Failed to create writer for " << writerFormat << " output format";
+
+    string inputFile = gEnv->getRes();
+    string inputInfo = gEnv->getRes();
+    configFormat param;
+    bool isAudio;
+    ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+    getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
+    ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+    ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
+    // Remove CSD data from input
+    mNumCsds[0] = 0;
+    status = addWriterSource(isAudio, param);
+    if (status != OK) {
+        ASSERT_TRUE(addSourceFailExpected)
+                << "Failed to add source for " << writerFormat << " writer";
+        ALOGV("%s writer failed to addSource after removing CSD from input", writerFormat.c_str());
+        return;
+    }
+
+    status = mWriter->start(mFileMeta.get());
+    ASSERT_EQ((status_t)OK, status) << "Could not start " << writerFormat << "writer";
+
+    // Skip first few frames. These may contain sync frames also.
+    int32_t frameID = mInputFrameId[0] + mBufferInfo[0].size() / 4;
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], frameID, mCurrentTrack[0], 0,
+                                 mBufferInfo[0].size());
+    ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
+    status = mCurrentTrack[0]->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop the track";
+
+    Vector<String16> args;
+    status = mWriter->dump(fd, args);
+    ASSERT_EQ((status_t)OK, status) << "Failed to dump statistics from writer";
+
+    status = mWriter->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop " << writerFormat << " writer";
+    close(fd);
+}
+
+// This test is specific to MPEG4Writer to test more APIs
+TEST_P(WriteFunctionalityTest, Mpeg4WriterTest) {
+    if (mDisableTest) return;
+    if (mWriterName != standardWriters::MPEG4) return;
+    ALOGV("Test MPEG4 writer specific APIs");
+
+    inputId inpId = get<1>(GetParam());
+    int32_t fd =
+            open(OUTPUT_FILE_NAME, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+    ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
+    int32_t status = createWriter(fd);
+    ASSERT_EQ(status, (status_t)OK) << "Failed to create writer for mpeg4 output format";
+
+    string inputFile = gEnv->getRes();
+    string inputInfo = gEnv->getRes();
+    configFormat param;
+    bool isAudio;
+    ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+    getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
+    ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+    ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
+    status = addWriterSource(isAudio, param);
+    ASSERT_EQ((status_t)OK, status) << "Failed to add source for mpeg4 Writer";
+
+    // signal meta data for the writer
+    sp<MPEG4Writer> mp4writer = static_cast<MPEG4Writer *>(mWriter.get());
+    status = mp4writer->setInterleaveDuration(kDefaultInterleaveDuration);
+    ASSERT_EQ((status_t)OK, status) << "setInterleaveDuration failed";
+
+    status = mp4writer->setGeoData(kDefaultLatitudex10000, kDefaultLongitudex10000);
+    ASSERT_EQ((status_t)OK, status) << "setGeoData failed";
+
+    status = mp4writer->setCaptureRate(kDefaultFPS);
+    ASSERT_EQ((status_t)OK, status) << "setCaptureRate failed";
+
+    status = mWriter->start(mFileMeta.get());
+    ASSERT_EQ((status_t)OK, status) << "Could not start the writer";
+
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], 0, mBufferInfo[0].size());
+    ASSERT_EQ((status_t)OK, status) << "mpeg4 writer failed";
+
+    status = mCurrentTrack[0]->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop the track";
+
+    status = mWriter->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
+    mp4writer.clear();
+    close(fd);
+}
+
+class ListenerTest
+    : public WriterTest,
+      public ::testing::TestWithParam<tuple<
+              string /* writerFormat*/, inputId /* inputId0*/, inputId /* inputId1*/,
+              float /* FileSizeLimit*/, float /* FileDurationLimit*/, float /* BufferInterval*/>> {
+  public:
+    virtual void SetUp() override { setupWriterType(get<0>(GetParam())); }
+};
+
+TEST_P(ListenerTest, SetMaxFileLimitsTest) {
+    // TODO(b/151892414): Enable test for other writers
+    if (mDisableTest || mWriterName != MPEG4) return;
+    ALOGV("Validates writer when max file limits are set");
+
+    string writerFormat = get<0>(GetParam());
+    string outputFile = OUTPUT_FILE_NAME;
+    int32_t fd =
+            open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+    ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
+    int32_t status = createWriter(fd);
+    ASSERT_EQ((status_t)OK, status) << "Failed to create writer for output format:" << writerFormat;
+
+    inputId inpId[] = {get<1>(GetParam()), get<2>(GetParam())};
+    ASSERT_NE(inpId[0], UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+    size_t inputFileSize = 0;
+    int64_t lastFrameTimeStampUs = INT_MAX;
+    int32_t numTracks = 1;
+    if (inpId[1] != UNUSED_ID) {
+        numTracks++;
+    }
+    for (int32_t idx = 0; idx < numTracks; idx++) {
+        string inputFile = gEnv->getRes();
+        string inputInfo = gEnv->getRes();
+        configFormat param;
+        bool isAudio;
+        getFileDetails(inputFile, inputInfo, param, isAudio, inpId[idx]);
+        ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+        ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo, idx));
+        status = addWriterSource(isAudio, param, idx);
+        ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+
+        // Read file properties
+        struct stat buf;
+        status = stat(inputFile.c_str(), &buf);
+        ASSERT_EQ(0, status);
+
+        inputFileSize += buf.st_size;
+        if (lastFrameTimeStampUs > mBufferInfo[idx][mBufferInfo[idx].size() - 1].timeUs) {
+            lastFrameTimeStampUs = mBufferInfo[idx][mBufferInfo[idx].size() - 1].timeUs;
+        }
+    }
+
+    float fileSizeLimit = get<3>(GetParam());
+    float fileDurationLimit = get<4>(GetParam());
+    int64_t maxFileSize = 0;
+    int64_t maxFileDuration = 0;
+    if (fileSizeLimit > 0) {
+        maxFileSize = (int64_t)(fileSizeLimit * inputFileSize);
+        mWriter->setMaxFileSize(maxFileSize);
+    }
+    if (fileDurationLimit > 0) {
+        maxFileDuration = (int64_t)(fileDurationLimit * lastFrameTimeStampUs);
+        mWriter->setMaxFileDuration(maxFileDuration);
+    }
+
+    sp<WriterListener> listener = new WriterListener();
+    ASSERT_NE(listener, nullptr) << "unable to allocate listener";
+
+    mWriter->setListener(listener);
+    status = mWriter->start(mFileMeta.get());
+    ASSERT_EQ((status_t)OK, status);
+
+    float interval = get<5>(GetParam());
+    ASSERT_LE(interval, 1.0f) << "Buffer interval invalid. Should be less than or equal to 1.0";
+
+    size_t range = 0;
+    int32_t loopCount = 0;
+    int32_t offset[kMaxTrackCount]{};
+    while (loopCount < ceil(1.0 / interval)) {
+        for (int32_t idx = 0; idx < numTracks; idx++) {
+            range = mBufferInfo[idx].size() * interval;
+            status = sendBuffersToWriter(mInputStream[idx], mBufferInfo[idx], mInputFrameId[idx],
+                                         mCurrentTrack[idx], offset[idx], range, false, listener);
+            ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+            offset[idx] += range;
+        }
+        loopCount++;
+    }
+
+    ASSERT_TRUE(mWriter->reachedEOS()) << "EOS not signalled.";
+
+    for (int32_t idx = 0; idx < kMaxTrackCount; idx++) {
+        if (mCurrentTrack[idx]) {
+            mCurrentTrack[idx]->stop();
+        }
+    }
+
+    status = mWriter->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
+    close(fd);
+
+    if (maxFileSize <= 0) {
+        ASSERT_FALSE(listener->mSignaledSize);
+    } else if (maxFileDuration <= 0) {
+        ASSERT_FALSE(listener->mSignaledDuration);
+    } else if (maxFileSize > 0 && maxFileDuration <= 0) {
+        ASSERT_TRUE(listener->mSignaledSize);
+    } else if (maxFileDuration > 0 && maxFileSize <= 0) {
+        ASSERT_TRUE(listener->mSignaledDuration);
+    } else {
+        ASSERT_TRUE(listener->mSignaledSize || listener->mSignaledDuration);
+    }
+
+    if (maxFileSize > 0) {
+        struct stat buf;
+        status = stat(outputFile.c_str(), &buf);
+        ASSERT_EQ(0, status);
+        ASSERT_LE(buf.st_size, maxFileSize);
+    }
+}
+
+// TODO: (b/150923387)
+// Add WEBM input
+INSTANTIATE_TEST_SUITE_P(ListenerTestAll, ListenerTest,
+                         ::testing::Values(make_tuple("aac", AAC_1, UNUSED_ID, 0.6, 0.7, 1),
+                                           make_tuple("amrnb", AMR_NB_1, UNUSED_ID, 0.2, 0.6, 1),
+                                           make_tuple("amrwb", AMR_WB_1, UNUSED_ID, 0.5, 0.5, 1),
+                                           make_tuple("mpeg2Ts", AAC_1, UNUSED_ID, 0.2, 1, 1),
+                                           make_tuple("mpeg4", AAC_1, UNUSED_ID, 0.4, 0.3, 0.25),
+                                           make_tuple("mpeg4", AAC_1, UNUSED_ID, 0.3, 1, 0.5),
+                                           make_tuple("ogg", OPUS_1, UNUSED_ID, 0.7, 0.3, 1)));
+
 // TODO: (b/144476164)
 // Add AAC_ADTS, FLAC, AV1 input
-INSTANTIATE_TEST_SUITE_P(WriterTestAll, WriterTest,
-                         ::testing::Values(make_pair("ogg", 0), make_pair("webm", 0),
-                                           make_pair("aac", 1), make_pair("mpeg4", 1),
-                                           make_pair("amrnb", 3), make_pair("amrwb", 4),
-                                           make_pair("webm", 5), make_pair("webm", 7),
-                                           make_pair("webm", 8), make_pair("mpeg4", 9),
-                                           make_pair("mpeg4", 10), make_pair("mpeg4", 12),
-                                           make_pair("mpeg4", 13), make_pair("mpeg2Ts", 1),
-                                           make_pair("mpeg2Ts", 9)));
+INSTANTIATE_TEST_SUITE_P(
+        WriterTestAll, WriteFunctionalityTest,
+        ::testing::Values(
+                make_tuple("aac", AAC_1, UNUSED_ID, 1),
+
+                make_tuple("amrnb", AMR_NB_1, UNUSED_ID, 1),
+                make_tuple("amrwb", AMR_WB_1, UNUSED_ID, 1),
+
+                // TODO(b/144902018): Enable test for mpeg2ts
+                // make_tuple("mpeg2Ts", AAC_1, UNUSED_ID, 1),
+                // make_tuple("mpeg2Ts", AVC_1, UNUSED_ID, 1),
+                // TODO(b/156355857): Add multitrack for mpeg2ts
+                // make_tuple("mpeg2Ts", AAC_1, AVC_1, 0.50),
+                // make_tuple("mpeg2Ts", AVC_1, AAC_1, 0.25),
+
+                make_tuple("mpeg4", AAC_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", AMR_NB_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", AMR_WB_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", AVC_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", H263_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", HEIC_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", HEVC_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", MPEG4_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", AAC_1, AVC_1, 0.25),
+                make_tuple("mpeg4", AVC_1, AAC_1, 0.75),
+                make_tuple("mpeg4", AMR_WB_1, AAC_1, 0.75),
+                make_tuple("mpeg4", HEVC_1, AMR_WB_1, 0.25),
+                make_tuple("mpeg4", H263_1, AMR_NB_1, 0.50),
+                make_tuple("mpeg4", MPEG4_1, AAC_1, 0.75),
+                make_tuple("mpeg4", AMR_NB_1, AMR_WB_1, 0.25),
+                make_tuple("mpeg4", H263_1, AMR_NB_1, 0.50),
+                make_tuple("mpeg4", MPEG4_1, HEVC_1, 0.75),
+
+                make_tuple("ogg", OPUS_1, UNUSED_ID, 1),
+
+                make_tuple("webm", OPUS_1, UNUSED_ID, 1),
+                make_tuple("webm", VORBIS_1, UNUSED_ID, 1),
+                make_tuple("webm", VP8_1, UNUSED_ID, 1),
+                make_tuple("webm", VP9_1, UNUSED_ID, 1),
+                make_tuple("webm", VP8_1, OPUS_1, 0.50),
+                make_tuple("webm", VORBIS_1, VP8_1, 0.25)));
+
+INSTANTIATE_TEST_SUITE_P(
+        WriterValidityTest, WriterValidityTest,
+        ::testing::Values(
+                make_tuple("aac", AAC_1, true),
+
+                make_tuple("amrnb", AMR_NB_1, true),
+                make_tuple("amrwb", AMR_WB_1, true),
+
+                make_tuple("mpeg4", AAC_1, false),
+                make_tuple("mpeg4", AMR_NB_1, false),
+                make_tuple("mpeg4", AVC_1, false),
+                make_tuple("mpeg4", H263_1, false),
+                make_tuple("mpeg4", HEIC_1, false),
+                make_tuple("mpeg4", HEVC_1, false),
+                make_tuple("mpeg4", MPEG4_1, false),
+
+                make_tuple("ogg", OPUS_1, true),
+
+                make_tuple("webm", OPUS_1, false),
+                make_tuple("webm", VORBIS_1, true),
+                make_tuple("webm", VP8_1, false),
+                make_tuple("webm", VP9_1, false)));
 
 int main(int argc, char **argv) {
+    ProcessState::self()->startThreadPool();
     gEnv = new WriterTestEnvironment();
     ::testing::AddGlobalTestEnvironment(gEnv);
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/libstagefright/tests/writer/WriterTestEnvironment.h b/media/libstagefright/tests/writer/WriterTestEnvironment.h
index 99e686f..7da0a62 100644
--- a/media/libstagefright/tests/writer/WriterTestEnvironment.h
+++ b/media/libstagefright/tests/writer/WriterTestEnvironment.h
@@ -25,7 +25,7 @@
 
 class WriterTestEnvironment : public ::testing::Environment {
   public:
-    WriterTestEnvironment() : res("/data/local/tmp/") {}
+    WriterTestEnvironment() : res("/data/local/tmp/"), deleteOutput(true) {}
 
     // Parses the command line arguments
     int initFromOptions(int argc, char **argv);
@@ -34,16 +34,21 @@
 
     const string getRes() const { return res; }
 
+    bool cleanUp() const { return deleteOutput; }
+
   private:
     string res;
+    bool deleteOutput;
 };
 
 int WriterTestEnvironment::initFromOptions(int argc, char **argv) {
-    static struct option options[] = {{"res", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+    static struct option options[] = {{"res", required_argument, 0, 'P'},
+                                      {"cleanUp", optional_argument, 0, 'C'},
+                                      {0, 0, 0, 0}};
 
     while (true) {
         int index = 0;
-        int c = getopt_long(argc, argv, "P:", options, &index);
+        int c = getopt_long(argc, argv, "P:C:", options, &index);
         if (c == -1) {
             break;
         }
@@ -52,6 +57,11 @@
             case 'P':
                 setRes(optarg);
                 break;
+            case 'C':
+                if (!strcmp(optarg, "false")) {
+                    deleteOutput = false;
+                }
+                break;
             default:
                 break;
         }
@@ -62,7 +72,8 @@
                 "unrecognized option: %s\n\n"
                 "usage: %s <gtest options> <test options>\n\n"
                 "test options are:\n\n"
-                "-P, --path: Resource files directory location\n",
+                "-P, --path: Resource files directory location\n"
+                "-C, default:true. Delete output file after test completes\n",
                 argv[optind ?: 1], argv[0]);
         return 2;
     }
diff --git a/media/libstagefright/tests/writer/WriterUtility.cpp b/media/libstagefright/tests/writer/WriterUtility.cpp
index f24ccb6..a3043fe 100644
--- a/media/libstagefright/tests/writer/WriterUtility.cpp
+++ b/media/libstagefright/tests/writer/WriterUtility.cpp
@@ -24,9 +24,16 @@
 
 int32_t sendBuffersToWriter(ifstream &inputStream, vector<BufferInfo> &bufferInfo,
                             int32_t &inputFrameId, sp<MediaAdapter> &currentTrack, int32_t offset,
-                            int32_t range, bool isPaused) {
+                            int32_t range, bool isPaused, sp<WriterListener> listener) {
     while (1) {
         if (inputFrameId >= (int)bufferInfo.size() || inputFrameId >= (offset + range)) break;
+        if (listener != nullptr) {
+            if (listener->mSignaledDuration || listener->mSignaledSize) {
+                ALOGV("Max File limit reached. No more buffers will be sent to the writer");
+                break;
+            }
+        }
+
         int32_t size = bufferInfo[inputFrameId].size;
         char *data = (char *)malloc(size);
         if (!data) {
diff --git a/media/libstagefright/tests/writer/WriterUtility.h b/media/libstagefright/tests/writer/WriterUtility.h
index cdd6246..6b456fb 100644
--- a/media/libstagefright/tests/writer/WriterUtility.h
+++ b/media/libstagefright/tests/writer/WriterUtility.h
@@ -27,13 +27,19 @@
 
 #include <media/stagefright/MediaAdapter.h>
 
-using namespace android;
-using namespace std;
+#include "WriterListener.h"
 
 #define CODEC_CONFIG_FLAG 32
 
+constexpr uint32_t kMaxTrackCount = 2;
 constexpr uint32_t kMaxCSDStrlen = 16;
 constexpr uint32_t kMaxCount = 20;
+constexpr int32_t kMimeSize = 128;
+constexpr int32_t kDefaultInterleaveDuration = 0;
+// Geodata is set according to ISO-6709 standard.
+constexpr int32_t kDefaultLatitudex10000 = 500000;
+constexpr int32_t kDefaultLongitudex10000 = 1000000;
+constexpr float kDefaultFPS = 30.0f;
 
 struct BufferInfo {
     int32_t size;
@@ -41,9 +47,18 @@
     int64_t timeUs;
 };
 
+struct configFormat {
+    char mime[kMimeSize];
+    int32_t width;
+    int32_t height;
+    int32_t sampleRate;
+    int32_t channelCount;
+};
+
 int32_t sendBuffersToWriter(ifstream &inputStream, vector<BufferInfo> &bufferInfo,
                             int32_t &inputFrameId, sp<MediaAdapter> &currentTrack, int32_t offset,
-                            int32_t range, bool isPaused = false);
+                            int32_t range, bool isPaused = false,
+                            sp<WriterListener> listener = nullptr);
 
 int32_t writeHeaderBuffers(ifstream &inputStream, vector<BufferInfo> &bufferInfo,
                            int32_t &inputFrameId, sp<AMessage> &format, int32_t numCsds);
diff --git a/media/libstagefright/timedtext/TEST_MAPPING b/media/libstagefright/timedtext/TEST_MAPPING
new file mode 100644
index 0000000..35a5b11
--- /dev/null
+++ b/media/libstagefright/timedtext/TEST_MAPPING
@@ -0,0 +1,9 @@
+// mappings for frameworks/av/media/libstagefright/timedtext
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "TimedTextUnitTest" }
+  ]
+}
diff --git a/media/libstagefright/timedtext/test/Android.bp b/media/libstagefright/timedtext/test/Android.bp
new file mode 100644
index 0000000..11e5077
--- /dev/null
+++ b/media/libstagefright/timedtext/test/Android.bp
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+    name: "TimedTextUnitTest",
+    test_suites: ["device-tests"],
+    gtest: true,
+
+    srcs: [
+        "TimedTextUnitTest.cpp",
+    ],
+
+    static_libs: [
+        "libstagefright_timedtext",
+        "libstagefright_foundation",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmedia",
+        "libbinder",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/timedtext/test/AndroidTest.xml b/media/libstagefright/timedtext/test/AndroidTest.xml
new file mode 100644
index 0000000..3654e23
--- /dev/null
+++ b/media/libstagefright/timedtext/test/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for TimedText unit test">
+    <option name="test-suite-tag" value="TimedTextUnitTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="TimedTextUnitTest->/data/local/tmp/TimedTextUnitTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/timedtext/test/TimedTextUnitTest.zip?unzip=true"
+            value="/data/local/tmp/TimedTextUnitTestRes/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="TimedTextUnitTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/TimedTextUnitTestRes/" />
+    </test>
+</configuration>
diff --git a/media/libstagefright/timedtext/test/README.md b/media/libstagefright/timedtext/test/README.md
new file mode 100644
index 0000000..3a774bd
--- /dev/null
+++ b/media/libstagefright/timedtext/test/README.md
@@ -0,0 +1,40 @@
+## Media Testing ##
+---
+#### TimedText Unit Test :
+The TimedText Unit Test Suite validates the TextDescription class available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m TimedTextUnitTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/TimedTextUnitTest/TimedTextUnitTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/TimedTextUnitTest/TimedTextUnitTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/timedtext/test/TimedTextUnitTest.zip).
+Download, unzip and push these files into device for testing.
+
+```
+adb push TimedTextUnitTestRes/. /data/local/tmp/
+```
+
+usage: TimedTextUnitTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/TimedTextUnitTest -P /data/local/tmp/TimedTextUnitTestRes/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest TimedTextUnitTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/timedtext/test/TimedTextTestEnvironment.h b/media/libstagefright/timedtext/test/TimedTextTestEnvironment.h
new file mode 100644
index 0000000..52280c1
--- /dev/null
+++ b/media/libstagefright/timedtext/test/TimedTextTestEnvironment.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TIMEDTEXT_TEST_ENVIRONMENT_H__
+#define __TIMEDTEXT_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class TimedTextTestEnvironment : public ::testing::Environment {
+  public:
+    TimedTextTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int TimedTextTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"res", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P':
+                setRes(optarg);
+                break;
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __TIMEDTEXT_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/timedtext/test/TimedTextUnitTest.cpp b/media/libstagefright/timedtext/test/TimedTextUnitTest.cpp
new file mode 100644
index 0000000..d85ae39
--- /dev/null
+++ b/media/libstagefright/timedtext/test/TimedTextUnitTest.cpp
@@ -0,0 +1,379 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TimedTextUnitTest"
+#include <utils/Log.h>
+
+#include <stdio.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <fstream>
+
+#include <binder/Parcel.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/foundation/ByteUtils.h>
+
+#include "timedtext/TextDescriptions.h"
+
+#include "TimedTextTestEnvironment.h"
+
+constexpr int32_t kStartTimeMs = 10000;
+
+enum {
+    // These keys must be in sync with the keys in
+    // frameworks/av/media/libstagefright/timedtext/TextDescriptions.h
+    KEY_DISPLAY_FLAGS = 1,
+    KEY_STYLE_FLAGS = 2,
+    KEY_BACKGROUND_COLOR_RGBA = 3,
+    KEY_HIGHLIGHT_COLOR_RGBA = 4,
+    KEY_SCROLL_DELAY = 5,
+    KEY_WRAP_TEXT = 6,
+    KEY_START_TIME = 7,
+    KEY_STRUCT_BLINKING_TEXT_LIST = 8,
+    KEY_STRUCT_FONT_LIST = 9,
+    KEY_STRUCT_HIGHLIGHT_LIST = 10,
+    KEY_STRUCT_HYPER_TEXT_LIST = 11,
+    KEY_STRUCT_KARAOKE_LIST = 12,
+    KEY_STRUCT_STYLE_LIST = 13,
+    KEY_STRUCT_TEXT_POS = 14,
+    KEY_STRUCT_JUSTIFICATION = 15,
+    KEY_STRUCT_TEXT = 16,
+
+    KEY_GLOBAL_SETTING = 101,
+    KEY_LOCAL_SETTING = 102,
+    KEY_START_CHAR = 103,
+    KEY_END_CHAR = 104,
+    KEY_FONT_ID = 105,
+    KEY_FONT_SIZE = 106,
+    KEY_TEXT_COLOR_RGBA = 107,
+};
+
+struct FontInfo {
+    int32_t displayFlag = -1;
+    int32_t horizontalJustification = -1;
+    int32_t verticalJustification = -1;
+    int32_t rgbaBackground = -1;
+    int32_t leftPos = -1;
+    int32_t topPos = -1;
+    int32_t bottomPos = -1;
+    int32_t rightPos = -1;
+    int32_t startchar = -1;
+    int32_t endChar = -1;
+    int32_t fontId = -1;
+    int32_t faceStyle = -1;
+    int32_t fontSize = -1;
+    int32_t rgbaText = -1;
+    int32_t entryCount = -1;
+};
+
+struct FontRecord {
+    int32_t fontID = -1;
+    int32_t fontNameLength = -1;
+    const uint8_t *font = nullptr;
+};
+
+using namespace android;
+
+static TimedTextTestEnvironment *gEnv = nullptr;
+
+class TimedTextUnitTest : public ::testing::TestWithParam</*filename*/ string> {
+  public:
+    TimedTextUnitTest(){};
+
+    ~TimedTextUnitTest() {
+        if (mEleStream) mEleStream.close();
+    }
+
+    virtual void SetUp() override {
+        mInputFileName = gEnv->getRes() + GetParam();
+        mEleStream.open(mInputFileName, ifstream::binary);
+        ASSERT_EQ(mEleStream.is_open(), true) << "Failed to open " << GetParam();
+
+        struct stat buf;
+        status_t status = stat(mInputFileName.c_str(), &buf);
+        ASSERT_EQ(status, 0) << "Failed to get properties of input file: " << GetParam();
+        mFileSize = buf.st_size;
+        ALOGI("Size of the input file %s = %zu", GetParam().c_str(), mFileSize);
+    }
+
+    string mInputFileName;
+    size_t mFileSize;
+    ifstream mEleStream;
+};
+
+class SRTDescriptionTest : public TimedTextUnitTest {
+  public:
+    virtual void SetUp() override { TimedTextUnitTest::SetUp(); }
+};
+
+class Text3GPPDescriptionTest : public TimedTextUnitTest {
+  public:
+    virtual void SetUp() override { TimedTextUnitTest::SetUp(); }
+};
+
+TEST_P(SRTDescriptionTest, extractSRTDescriptionTest) {
+    char data[mFileSize];
+    mEleStream.read(data, sizeof(data));
+    ASSERT_EQ(mEleStream.gcount(), mFileSize);
+
+    Parcel parcel;
+    int32_t flag = TextDescriptions::OUT_OF_BAND_TEXT_SRT | TextDescriptions::LOCAL_DESCRIPTIONS;
+    status_t status = TextDescriptions::getParcelOfDescriptions((const uint8_t *)data, mFileSize,
+                                                                flag, kStartTimeMs, &parcel);
+    ASSERT_EQ(status, 0) << "getParcelOfDescriptions returned error";
+    ALOGI("Size of the Parcel: %zu", parcel.dataSize());
+    ASSERT_GT(parcel.dataSize(), 0) << "Parcel is empty";
+
+    parcel.setDataPosition(0);
+    int32_t key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_LOCAL_SETTING) << "Parcel has invalid key";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_START_TIME) << "Parcel has invalid start time key";
+    ASSERT_EQ(parcel.readInt32(), kStartTimeMs) << "Parcel has invalid timings";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_STRUCT_TEXT) << "Parcel has invalid struct text key";
+    ASSERT_EQ(parcel.readInt32(), mFileSize) << "Parcel has invalid text data";
+    int32_t fileSize = parcel.readInt32();
+    ASSERT_EQ(fileSize, mFileSize) << "Parcel has invalid file size value";
+    uint8_t tmpData[fileSize];
+    status = parcel.read((void *)tmpData, fileSize);
+    ASSERT_EQ(status, 0) << "Failed to read the data from parcel";
+    // To make sure end of parcel is reached
+    ASSERT_EQ(parcel.dataAvail(), 0) << "Parcel has some data left to read";
+}
+
+// This test uses the properties of tx3g box mentioned in 3GPP Timed Text Format
+// Specification#: 26.245 / Section: 5.16(Sample Description Format)
+// https://www.3gpp.org/ftp/Specs/archive/26_series/26.245/
+
+TEST_P(Text3GPPDescriptionTest, Text3GPPGlobalDescriptionTest) {
+    char data[mFileSize];
+    mEleStream.read(data, sizeof(data));
+    ASSERT_EQ(mEleStream.gcount(), mFileSize);
+
+    const uint8_t *tmpData = (const uint8_t *)data;
+    int32_t remaining = mFileSize;
+    FontInfo fontInfo;
+    vector<FontRecord> fontRecordEntries;
+
+    // Skipping the bytes containing information about the type of subbox(tx3g)
+    tmpData += 16;
+    remaining -= 16;
+
+    fontInfo.displayFlag = U32_AT(tmpData);
+    ALOGI("Display flag: %d", fontInfo.displayFlag);
+    fontInfo.horizontalJustification = tmpData[4];
+    ALOGI("Horizontal Justification: %d", fontInfo.horizontalJustification);
+    fontInfo.verticalJustification = tmpData[5];
+    ALOGI("Vertical Justification: %d", fontInfo.verticalJustification);
+    fontInfo.rgbaBackground =
+            *(tmpData + 6) << 24 | *(tmpData + 7) << 16 | *(tmpData + 8) << 8 | *(tmpData + 9);
+    ALOGI("rgba value of background: %d", fontInfo.rgbaBackground);
+
+    tmpData += 10;
+    remaining -= 10;
+
+    if (remaining >= 8) {
+        fontInfo.leftPos = U16_AT(tmpData);
+        ALOGI("Left: %d", fontInfo.leftPos);
+        fontInfo.topPos = U16_AT(tmpData + 2);
+        ALOGI("Top: %d", fontInfo.topPos);
+        fontInfo.bottomPos = U16_AT(tmpData + 4);
+        ALOGI("Bottom: %d", fontInfo.bottomPos);
+        fontInfo.rightPos = U16_AT(tmpData + 6);
+        ALOGI("Right: %d", fontInfo.rightPos);
+
+        tmpData += 8;
+        remaining -= 8;
+
+        if (remaining >= 12) {
+            fontInfo.startchar = U16_AT(tmpData);
+            ALOGI("Start character: %d", fontInfo.startchar);
+            fontInfo.endChar = U16_AT(tmpData + 2);
+            ALOGI("End character: %d", fontInfo.endChar);
+            fontInfo.fontId = U16_AT(tmpData + 4);
+            ALOGI("Value of font Identifier: %d", fontInfo.fontId);
+            fontInfo.faceStyle = *(tmpData + 6);
+            ALOGI("Face style flag : %d", fontInfo.faceStyle);
+            fontInfo.fontSize = *(tmpData + 7);
+            ALOGI("Size of the font: %d", fontInfo.fontSize);
+            fontInfo.rgbaText = *(tmpData + 8) << 24 | *(tmpData + 9) << 16 | *(tmpData + 10) << 8 |
+                                *(tmpData + 11);
+            ALOGI("rgba value of the text: %d", fontInfo.rgbaText);
+
+            tmpData += 12;
+            remaining -= 12;
+
+            if (remaining >= 10) {
+                // Skipping the bytes containing information about the type of subbox(ftab)
+                fontInfo.entryCount = U16_AT(tmpData + 8);
+                ALOGI("Value of entry count: %d", fontInfo.entryCount);
+
+                tmpData += 10;
+                remaining -= 10;
+
+                for (int32_t i = 0; i < fontInfo.entryCount; i++) {
+                    if (remaining < 3) break;
+                    int32_t tempFontID = U16_AT(tmpData);
+                    ALOGI("Font Id: %d", tempFontID);
+                    int32_t tempFontNameLength = *(tmpData + 2);
+                    ALOGI("Length of font name: %d", tempFontNameLength);
+
+                    tmpData += 3;
+                    remaining -= 3;
+
+                    if (remaining < tempFontNameLength) break;
+                    const uint8_t *tmpFont = tmpData;
+                    char *tmpFontName = strndup((const char *)tmpFont, tempFontNameLength);
+                    ASSERT_NE(tmpFontName, nullptr) << "Font Name is null";
+                    ALOGI("FontName = %s", tmpFontName);
+                    free(tmpFontName);
+                    tmpData += tempFontNameLength;
+                    remaining -= tempFontNameLength;
+                    fontRecordEntries.push_back({tempFontID, tempFontNameLength, tmpFont});
+                }
+            }
+        }
+    }
+
+    Parcel parcel;
+    int32_t flag = TextDescriptions::IN_BAND_TEXT_3GPP | TextDescriptions::GLOBAL_DESCRIPTIONS;
+    status_t status = TextDescriptions::getParcelOfDescriptions((const uint8_t *)data, mFileSize,
+                                                                flag, kStartTimeMs, &parcel);
+    ASSERT_EQ(status, 0) << "getParcelOfDescriptions returned error";
+    ALOGI("Size of the Parcel: %zu", parcel.dataSize());
+    ASSERT_GT(parcel.dataSize(), 0) << "Parcel is empty";
+
+    parcel.setDataPosition(0);
+    int32_t key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_GLOBAL_SETTING) << "Parcel has invalid key";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_DISPLAY_FLAGS) << "Parcel has invalid DISPLAY FLAGS Key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.displayFlag)
+            << "Parcel has invalid value of display flag";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_STRUCT_JUSTIFICATION) << "Parcel has invalid STRUCT JUSTIFICATION key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.horizontalJustification)
+            << "Parcel has invalid value of Horizontal justification";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.verticalJustification)
+            << "Parcel has invalid value of Vertical justification";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_BACKGROUND_COLOR_RGBA) << "Parcel has invalid BACKGROUND COLOR key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.rgbaBackground)
+            << "Parcel has invalid rgba background color value";
+
+    if (parcel.dataAvail() == 0) {
+        ALOGV("Completed reading the parcel");
+        return;
+    }
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_STRUCT_TEXT_POS) << "Parcel has invalid STRUCT TEXT POSITION key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.leftPos)
+            << "Parcel has invalid rgba background color value";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.topPos)
+            << "Parcel has invalid rgba background color value";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.bottomPos)
+            << "Parcel has invalid rgba background color value";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.rightPos)
+            << "Parcel has invalid rgba background color value";
+
+    if (parcel.dataAvail() == 0) {
+        ALOGV("Completed reading the parcel");
+        return;
+    }
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_STRUCT_STYLE_LIST) << "Parcel has invalid STRUCT STYLE LIST key";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_START_CHAR) << "Parcel has invalid START CHAR key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.startchar)
+            << "Parcel has invalid value of start character";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_END_CHAR) << "Parcel has invalid END CHAR key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.endChar) << "Parcel has invalid value of end character";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_FONT_ID) << "Parcel has invalid FONT ID key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.fontId) << "Parcel has invalid value of font Id";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_STYLE_FLAGS) << "Parcel has invalid STYLE FLAGS key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.faceStyle) << "Parcel has invalid value of style flags";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_FONT_SIZE) << "Parcel has invalid FONT SIZE key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.fontSize) << "Parcel has invalid value of font size";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_TEXT_COLOR_RGBA) << "Parcel has invalid TEXT COLOR RGBA key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.rgbaText) << "Parcel has invalid rgba text color value";
+
+    if (parcel.dataAvail() == 0) {
+        ALOGV("Completed reading the parcel");
+        return;
+    }
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_STRUCT_FONT_LIST) << "Parcel has invalid STRUCT FONT LIST key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.entryCount) << "Parcel has invalid value of entry count";
+    ASSERT_EQ(fontInfo.entryCount, fontRecordEntries.size())
+            << "Array size does not match expected number of entries";
+    for (int32_t i = 0; i < fontInfo.entryCount; i++) {
+        ASSERT_EQ(parcel.readInt32(), fontRecordEntries[i].fontID)
+                << "Parcel has invalid value of font Id";
+        ASSERT_EQ(parcel.readInt32(), fontRecordEntries[i].fontNameLength)
+                << "Parcel has invalid value of font name length";
+        uint8_t fontName[fontRecordEntries[i].fontNameLength];
+        // written with writeByteArray() writes count, then the actual data
+        ASSERT_EQ(parcel.readInt32(), fontRecordEntries[i].fontNameLength);
+        status = parcel.read((void *)fontName, fontRecordEntries[i].fontNameLength);
+        ASSERT_EQ(status, 0) << "Failed to read the font name from parcel";
+        ASSERT_EQ(memcmp(fontName, fontRecordEntries[i].font, fontRecordEntries[i].fontNameLength),
+                  0)
+                << "Parcel has invalid font";
+    }
+    // To make sure end of parcel is reached
+    ASSERT_EQ(parcel.dataAvail(), 0) << "Parcel has some data left to read";
+}
+
+INSTANTIATE_TEST_SUITE_P(TimedTextUnitTestAll, SRTDescriptionTest,
+                         ::testing::Values(("sampleTest1.srt"),
+                                           ("sampleTest2.srt")));
+
+INSTANTIATE_TEST_SUITE_P(TimedTextUnitTestAll, Text3GPPDescriptionTest,
+                         ::testing::Values(("tx3gBox1"),
+                                           ("tx3gBox2")));
+
+int main(int argc, char **argv) {
+    gEnv = new TimedTextTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("Test result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
index a232150..dbdb43c 100644
--- a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
+++ b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
@@ -493,7 +493,7 @@
       mPath(path),
       mStatus(NO_INIT) {
     // determine href_base
-    std::string::size_type end = path.rfind("/");
+    std::string::size_type end = path.rfind('/');
     if (end != std::string::npos) {
         mHrefBase = path.substr(0, end + 1);
     }
@@ -1525,7 +1525,7 @@
                 nodeInfo.attributeList.push_back(Attribute{"rank", rank});
             }
             nodeList->insert(std::make_pair(
-                    std::move(order), std::move(nodeInfo)));
+                    order, std::move(nodeInfo)));
         }
     }
 }
diff --git a/media/libstagefright/xmlparser/TEST_MAPPING b/media/libstagefright/xmlparser/TEST_MAPPING
new file mode 100644
index 0000000..8626d72
--- /dev/null
+++ b/media/libstagefright/xmlparser/TEST_MAPPING
@@ -0,0 +1,6 @@
+// test mapping for frameworks/av/media/libstagefright/xmlparser
+{
+  "presubmit": [
+    { "name": "XMLParserTest" }
+  ]
+}
diff --git a/media/libstagefright/xmlparser/test/Android.bp b/media/libstagefright/xmlparser/test/Android.bp
new file mode 100644
index 0000000..ba02f84
--- /dev/null
+++ b/media/libstagefright/xmlparser/test/Android.bp
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+    name: "XMLParserTest",
+    test_suites: ["device-tests"],
+    gtest: true,
+
+    srcs: [
+        "XMLParserTest.cpp",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libstagefright_xmlparser",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    data: [":xmlparsertest_test_files",],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+}
+
+filegroup {
+    name: "xmlparsertest_test_files",
+    srcs: [
+        "testdata/media_codecs_unit_test.xml",
+        "testdata/media_codecs_unit_test_caller.xml",
+    ],
+}
diff --git a/media/libstagefright/xmlparser/test/AndroidTest.xml b/media/libstagefright/xmlparser/test/AndroidTest.xml
new file mode 100644
index 0000000..2e11b1b
--- /dev/null
+++ b/media/libstagefright/xmlparser/test/AndroidTest.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for xml parser unit test">
+    <option name="test-suite-tag" value="XMLParserTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="media_codecs_unit_test.xml->/data/local/tmp/media_codecs_unit_test.xml" />
+        <option name="push" value="media_codecs_unit_test_caller.xml->/data/local/tmp/media_codecs_unit_test_caller.xml" />
+        <option name="push" value="XMLParserTest->/data/local/tmp/XMLParserTest" />
+    </target_preparer>
+   <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="XMLParserTest" />
+    </test>
+</configuration>
diff --git a/media/libstagefright/xmlparser/test/README.md b/media/libstagefright/xmlparser/test/README.md
new file mode 100644
index 0000000..e9363fd
--- /dev/null
+++ b/media/libstagefright/xmlparser/test/README.md
@@ -0,0 +1,33 @@
+## Media Testing ##
+---
+#### XML Parser
+The XMLParser Test Suite validates the XMLParser available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m XMLParserTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/XMLParserTest/XMLParserTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/XMLParserTest/XMLParserTest /data/local/tmp/
+```
+
+usage: XMLParserTest
+```
+adb shell /data/local/tmp/XMLParserTest
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest XMLParserTest
+```
diff --git a/media/libstagefright/xmlparser/test/XMLParserTest.cpp b/media/libstagefright/xmlparser/test/XMLParserTest.cpp
new file mode 100644
index 0000000..9ddd374
--- /dev/null
+++ b/media/libstagefright/xmlparser/test/XMLParserTest.cpp
@@ -0,0 +1,392 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "XMLParserTest"
+
+#include <utils/Log.h>
+
+#include <fstream>
+
+#include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
+
+#include "XMLParserTestEnvironment.h"
+
+#define XML_FILE_NAME "media_codecs_unit_test_caller.xml"
+
+using namespace android;
+
+static XMLParserTestEnvironment *gEnv = nullptr;
+
+struct CodecProperties {
+    string codecName;
+    MediaCodecsXmlParser::CodecProperties codecProp;
+};
+
+struct RoleProperties {
+    string roleName;
+    string typeName;
+    string codecName;
+    bool isEncoder;
+    size_t order;
+    vector<pair<string, string>> attributeMap;
+};
+
+class XMLParseTest : public ::testing::Test {
+  public:
+    ~XMLParseTest() {
+        if (mEleStream.is_open()) mEleStream.close();
+        mInputDataVector.clear();
+        mInputRoleVector.clear();
+    }
+
+    virtual void SetUp() override { setUpDatabase(); }
+
+    void setUpDatabase();
+
+    void setCodecProperties(string codecName, bool isEncoder, int32_t order, set<string> quirkSet,
+                            set<string> domainSet, set<string> variantSet, string typeName,
+                            vector<pair<string, string>> domain, vector<string> aliases,
+                            string rank);
+
+    void setRoleProperties(string roleName, bool isEncoder, int32_t order, string typeName,
+                           string codecName, vector<pair<string, string>> domain);
+
+    void setServiceAttribute(map<string, string> serviceAttributeNameValuePair);
+
+    void printCodecMap(const MediaCodecsXmlParser::Codec mcodec);
+
+    void checkRoleMap(int32_t index, bool isEncoder, string typeName, string codecName,
+                      vector<pair<string, string>> attrMap);
+
+    bool compareMap(const map<string, string> &lhs, const map<string, string> &rhs);
+
+    ifstream mEleStream;
+    MediaCodecsXmlParser mParser;
+    vector<CodecProperties> mInputDataVector;
+    vector<RoleProperties> mInputRoleVector;
+    map<string, string> mInputServiceAttributeMap;
+};
+
+void XMLParseTest::setUpDatabase() {
+    // The values set below are specific to test vector testdata/media_codecs_unit_test.xml
+    setCodecProperties("test1.decoder", false, 1, {"attribute::disabled", "quirk::quirk1"},
+                       {"telephony"}, {}, "audio/mpeg", {}, {"alias1.decoder"}, "4");
+
+    setCodecProperties("test2.decoder", false, 2, {"quirk::quirk1"}, {}, {}, "audio/3gpp", {}, {},
+                       "");
+
+    setCodecProperties("test3.decoder", false, 3, {}, {}, {}, "audio/amr-wb",
+                       {
+                               pair<string, string>("feature-feature1", "feature1Val"),
+                               pair<string, string>("feature-feature2", "0"),
+                               pair<string, string>("feature-feature3", "0"),
+                       },
+                       {}, "");
+
+    setCodecProperties("test4.decoder", false, 4, {}, {}, {}, "audio/flac",
+                       {pair<string, string>("feature-feature1", "feature1Val")}, {}, "");
+
+    setCodecProperties("test5.decoder", false, 5, {"attribute::attributeQuirk1"}, {}, {},
+                       "audio/g711-mlaw", {}, {}, "");
+
+    setCodecProperties("test6.decoder", false, 6, {}, {}, {"variant1", "variant2"},
+                       "audio/mp4a-latm",
+                       {pair<string, string>("variant1:::variant1Limit1-range",
+                                             "variant1Limit1Min-variant1Limit1Max"),
+                        pair<string, string>("variant1:::variant1Limit2-range",
+                                             "variant1Limit2Low-variant1Limit2High"),
+                        pair<string, string>("variant2:::variant2Limit1", "variant2Limit1Value")},
+                       {}, "");
+
+    setCodecProperties(
+            "test7.decoder", false, 7, {}, {}, {}, "audio/vorbis",
+            {
+                    pair<string, string>("-min-limit1", "limit1Min"),
+                    /*pair<string, string>("limit1-in", "limit1In"),*/
+                    pair<string, string>("limit2-range", "limit2Min-limit2Max"),
+                    pair<string, string>("limit2-scale", "limit2Scale"),
+                    pair<string, string>("limit3-default", "limit3Val3"),
+                    pair<string, string>("limit3-ranges", "limit3Val1,limit3Val2,limit3Val3"),
+            },
+            {}, "");
+
+    setCodecProperties("test8.encoder", true, 8, {}, {}, {}, "audio/opus",
+                       {pair<string, string>("max-limit1", "limit1Max")}, {}, "");
+
+    setRoleProperties("audio_decoder.mp3", false, 1, "audio/mpeg", "test1.decoder",
+                      {pair<string, string>("attribute::disabled", "present"),
+                       pair<string, string>("rank", "4")});
+
+    setRoleProperties("audio_decoder.amrnb", false, 2, "audio/3gpp", "test2.decoder", {});
+
+    setRoleProperties("audio_decoder.amrwb", false, 3, "audio/amr-wb", "test3.decoder",
+                      {pair<string, string>("feature-feature1", "feature1Val"),
+                       pair<string, string>("feature-feature2", "0"),
+                       pair<string, string>("feature-feature3", "0")});
+
+    setRoleProperties("audio/flac", false, 4, "audio/flac", "test4.decoder",
+                      {pair<string, string>("feature-feature1", "feature1Val")});
+
+    setRoleProperties("audio_decoder.g711mlaw", false, 5, "audio/g711-mlaw", "test5.decoder",
+                      {pair<string, string>("attribute::attributeQuirk1", "present")});
+
+    setRoleProperties("audio_decoder.aac", false, 6, "audio/mp4a-latm", "test6.decoder",
+                      {pair<string, string>("variant1:::variant1Limit1-range",
+                                            "variant1Limit1Min-variant1Limit1Max"),
+                       pair<string, string>("variant1:::variant1Limit2-range",
+                                            "variant1Limit2Low-variant1Limit2High"),
+                       pair<string, string>("variant2:::variant2Limit1", "variant2Limit1Value")});
+
+    setRoleProperties("audio_decoder.vorbis", false, 7, "audio/vorbis", "test7.decoder",
+                      {pair<string, string>("-min-limit1", "limit1Min"),
+                       /*pair<string, string>("limit1-in", "limit1In"),*/
+                       pair<string, string>("limit2-range", "limit2Min-limit2Max"),
+                       pair<string, string>("limit2-scale", "limit2Scale"),
+                       pair<string, string>("limit3-default", "limit3Val3"),
+                       pair<string, string>("limit3-ranges", "limit3Val1,limit3Val2,limit3Val3")});
+
+    setRoleProperties("audio_encoder.opus", true, 8, "audio/opus", "test8.encoder",
+                      {pair<string, string>("max-limit1", "limit1Max")});
+
+    setServiceAttribute(
+            {pair<string, string>("domain-telephony", "0"), pair<string, string>("domain-tv", "0"),
+             pair<string, string>("setting2", "0"), pair<string, string>("variant-variant1", "0")});
+}
+
+bool XMLParseTest::compareMap(const map<string, string> &lhs, const map<string, string> &rhs) {
+    return lhs.size() == rhs.size() && equal(lhs.begin(), lhs.end(), rhs.begin());
+}
+
+void XMLParseTest::setCodecProperties(string codecName, bool isEncoder, int32_t order,
+                                      set<string> quirkSet, set<string> domainSet,
+                                      set<string> variantSet, string typeName,
+                                      vector<pair<string, string>> domain, vector<string> aliases,
+                                      string rank) {
+    map<string, string> AttributeMapDB;
+    for (const auto &AttrStr : domain) {
+        AttributeMapDB.insert(AttrStr);
+    }
+    map<string, MediaCodecsXmlParser::AttributeMap> TypeMapDataBase;
+    TypeMapDataBase.insert(
+            pair<string, MediaCodecsXmlParser::AttributeMap>(typeName, AttributeMapDB));
+    CodecProperties codecProperty;
+    codecProperty.codecName = codecName;
+    codecProperty.codecProp.isEncoder = isEncoder;
+    codecProperty.codecProp.order = order;
+    codecProperty.codecProp.quirkSet = quirkSet;
+    codecProperty.codecProp.domainSet = domainSet;
+    codecProperty.codecProp.variantSet = variantSet;
+    codecProperty.codecProp.typeMap = TypeMapDataBase;
+    codecProperty.codecProp.aliases = aliases;
+    codecProperty.codecProp.rank = rank;
+    mInputDataVector.push_back(codecProperty);
+}
+
+void XMLParseTest::setRoleProperties(string roleName, bool isEncoder, int32_t order,
+                                     string typeName, string codecName,
+                                     vector<pair<string, string>> attributeNameValuePair) {
+    struct RoleProperties roleProperty;
+    roleProperty.roleName = roleName;
+    roleProperty.typeName = typeName;
+    roleProperty.codecName = codecName;
+    roleProperty.isEncoder = isEncoder;
+    roleProperty.order = order;
+    roleProperty.attributeMap = attributeNameValuePair;
+    mInputRoleVector.push_back(roleProperty);
+}
+
+void XMLParseTest::setServiceAttribute(map<string, string> serviceAttributeNameValuePair) {
+    for (const auto &serviceAttrStr : serviceAttributeNameValuePair) {
+        mInputServiceAttributeMap.insert(serviceAttrStr);
+    }
+}
+
+void XMLParseTest::printCodecMap(const MediaCodecsXmlParser::Codec mcodec) {
+    const string &name = mcodec.first;
+    ALOGV("codec name = %s\n", name.c_str());
+    const MediaCodecsXmlParser::CodecProperties &properties = mcodec.second;
+    bool isEncoder = properties.isEncoder;
+    ALOGV("isEncoder = %d\n", isEncoder);
+    size_t order = properties.order;
+    ALOGV("order = %zu\n", order);
+    string rank = properties.rank;
+    ALOGV("rank = %s\n", rank.c_str());
+
+    for (auto &itrQuirkSet : properties.quirkSet) {
+        ALOGV("quirkSet= %s", itrQuirkSet.c_str());
+    }
+
+    for (auto &itrDomainSet : properties.domainSet) {
+        ALOGV("domainSet= %s", itrDomainSet.c_str());
+    }
+
+    for (auto &itrVariantSet : properties.variantSet) {
+        ALOGV("variantSet= %s", itrVariantSet.c_str());
+    }
+
+    map<string, MediaCodecsXmlParser::AttributeMap> TypeMap = properties.typeMap;
+    ALOGV("The TypeMap is :");
+
+    for (auto &itrTypeMap : TypeMap) {
+        ALOGV("itrTypeMap->first\t%s\t", itrTypeMap.first.c_str());
+
+        for (auto &itrAttributeMap : itrTypeMap.second) {
+            ALOGV("AttributeMap->first = %s", itrAttributeMap.first.c_str());
+            ALOGV("AttributeMap->second = %s", itrAttributeMap.second.c_str());
+        }
+    }
+}
+
+void XMLParseTest::checkRoleMap(int32_t index, bool isEncoder, string typeName, string codecName,
+                                vector<pair<string, string>> AttributePairMap) {
+    ASSERT_EQ(isEncoder, mInputRoleVector.at(index).isEncoder)
+            << "Invalid RoleMap data. IsEncoder mismatch";
+    ASSERT_EQ(typeName, mInputRoleVector.at(index).typeName)
+            << "Invalid RoleMap data. typeName mismatch";
+    ASSERT_EQ(codecName, mInputRoleVector.at(index).codecName)
+            << "Invalid RoleMap data. codecName mismatch";
+
+    vector<pair<string, string>>::iterator itr_attributeMapDB =
+            (mInputRoleVector.at(index).attributeMap).begin();
+    vector<pair<string, string>>::iterator itr_attributeMap = AttributePairMap.begin();
+    for (; itr_attributeMap != AttributePairMap.end() &&
+           itr_attributeMapDB != mInputRoleVector.at(index).attributeMap.end();
+         ++itr_attributeMap, ++itr_attributeMapDB) {
+        string attributeName = itr_attributeMap->first;
+        string attributeNameDB = itr_attributeMapDB->first;
+        string attributevalue = itr_attributeMap->second;
+        string attributeValueDB = itr_attributeMapDB->second;
+        ASSERT_EQ(attributeName, attributeNameDB)
+                << "Invalid RoleMap data. Attribute name mismatch\t" << attributeName << " != "
+                << "attributeNameDB";
+        ASSERT_EQ(attributevalue, attributeValueDB)
+                << "Invalid RoleMap data. Attribute value mismatch\t" << attributevalue << " != "
+                << "attributeValueDB";
+    }
+}
+
+TEST_F(XMLParseTest, CodecMapParseTest) {
+    string inputFileName = gEnv->getRes() + XML_FILE_NAME;
+    mEleStream.open(inputFileName, ifstream::binary);
+    ASSERT_EQ(mEleStream.is_open(), true) << "Failed to open inputfile " << inputFileName;
+
+    mParser.parseXmlPath(inputFileName);
+    for (const MediaCodecsXmlParser::Codec &mcodec : mParser.getCodecMap()) {
+        printCodecMap(mcodec);
+        const MediaCodecsXmlParser::CodecProperties &properties = mcodec.second;
+        int32_t index = properties.order - 1;
+        ASSERT_GE(index, 0) << "Invalid order";
+        ASSERT_EQ(mInputDataVector.at(index).codecName, mcodec.first.c_str())
+                << "Invalid CodecMap data. codecName mismatch";
+        ASSERT_EQ(properties.isEncoder, mInputDataVector.at(index).codecProp.isEncoder)
+                << "Invalid CodecMap data. isEncoder mismatch";
+        ASSERT_EQ(properties.order, mInputDataVector.at(index).codecProp.order)
+                << "Invalid CodecMap data. order mismatch";
+
+        set<string> quirkSetDB = mInputDataVector.at(index).codecProp.quirkSet;
+        set<string> quirkSet = properties.quirkSet;
+        set<string> quirkDifference;
+        set_difference(quirkSetDB.begin(), quirkSetDB.end(), quirkSet.begin(), quirkSet.end(),
+                       inserter(quirkDifference, quirkDifference.end()));
+        ASSERT_EQ(quirkDifference.size(), 0) << "CodecMap:quirk mismatch";
+
+        map<string, MediaCodecsXmlParser::AttributeMap> TypeMapDB =
+                mInputDataVector.at(index).codecProp.typeMap;
+        map<string, MediaCodecsXmlParser::AttributeMap> TypeMap = properties.typeMap;
+        map<string, MediaCodecsXmlParser::AttributeMap>::iterator itr_TypeMapDB = TypeMapDB.begin();
+        map<string, MediaCodecsXmlParser::AttributeMap>::iterator itr_TypeMap = TypeMap.begin();
+
+        ASSERT_EQ(TypeMapDB.size(), TypeMap.size())
+                << "Invalid CodecMap data. Typemap size mismatch";
+
+        for (; itr_TypeMap != TypeMap.end() && itr_TypeMapDB != TypeMapDB.end();
+             ++itr_TypeMap, ++itr_TypeMapDB) {
+            ASSERT_EQ(itr_TypeMap->first, itr_TypeMapDB->first)
+                    << "Invalid CodecMap data. type mismatch";
+            bool flag = compareMap(itr_TypeMap->second, itr_TypeMapDB->second);
+            ASSERT_TRUE(flag) << "typeMap mismatch";
+        }
+        ASSERT_EQ(mInputDataVector.at(index).codecProp.rank, properties.rank)
+                << "Invalid CodecMap data. rank mismatch";
+    }
+}
+
+TEST_F(XMLParseTest, RoleMapParseTest) {
+    string inputFileName = gEnv->getRes() + XML_FILE_NAME;
+    mEleStream.open(inputFileName, ifstream::binary);
+    ASSERT_EQ(mEleStream.is_open(), true) << "Failed to open inputfile " << inputFileName;
+
+    mParser.parseXmlPath(inputFileName);
+
+    for (auto &mRole : mParser.getRoleMap()) {
+        typedef pair<string, string> Attribute;
+        const string &roleName = mRole.first;
+        ALOGV("Role map:name = %s\n", roleName.c_str());
+        const MediaCodecsXmlParser::RoleProperties &properties = mRole.second;
+        string type = properties.type;
+        ALOGV("Role map: type = %s\n", type.c_str());
+
+        bool isEncoder = properties.isEncoder;
+        ALOGV("Role map: isEncoder = %d\n", isEncoder);
+
+        multimap<size_t, MediaCodecsXmlParser::NodeInfo> nodeList = properties.nodeList;
+        multimap<size_t, MediaCodecsXmlParser::NodeInfo>::iterator itr_Node;
+        ALOGV("\nThe multimap nodeList is : \n");
+        for (itr_Node = nodeList.begin(); itr_Node != nodeList.end(); ++itr_Node) {
+            ALOGV("itr_Node->first=ORDER=\t%zu\t", itr_Node->first);
+            int32_t index = itr_Node->first - 1;
+            MediaCodecsXmlParser::NodeInfo nodePtr = itr_Node->second;
+            ALOGV("Role map:itr_Node->second.name = %s\n", nodePtr.name.c_str());
+            vector<Attribute> attrList = nodePtr.attributeList;
+            for (auto attrNameValueList = attrList.begin(); attrNameValueList != attrList.end();
+                 ++attrNameValueList) {
+                ALOGV("Role map:nodePtr.attributeList->first = %s\n",
+                      attrNameValueList->first.c_str());
+                ALOGV("Role map:nodePtr.attributeList->second = %s\n",
+                      attrNameValueList->second.c_str());
+            }
+            checkRoleMap(index, isEncoder, properties.type, nodePtr.name.c_str(), attrList);
+        }
+    }
+}
+
+TEST_F(XMLParseTest, ServiceAttributeMapParseTest) {
+    string inputFileName = gEnv->getRes() + XML_FILE_NAME;
+    mEleStream.open(inputFileName, ifstream::binary);
+    ASSERT_EQ(mEleStream.is_open(), true) << "Failed to open inputfile " << inputFileName;
+
+    mParser.parseXmlPath(inputFileName);
+    const auto serviceAttributeMap = mParser.getServiceAttributeMap();
+    for (const auto &attributePair : serviceAttributeMap) {
+        ALOGV("serviceAttribute.key = %s \t serviceAttribute.value = %s",
+              attributePair.first.c_str(), attributePair.second.c_str());
+    }
+    bool flag = compareMap(mInputServiceAttributeMap, serviceAttributeMap);
+    ASSERT_TRUE(flag) << "ServiceMapParseTest: typeMap mismatch";
+}
+
+int main(int argc, char **argv) {
+    gEnv = new XMLParserTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGD("XML Parser Test Result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/xmlparser/test/XMLParserTestEnvironment.h b/media/libstagefright/xmlparser/test/XMLParserTestEnvironment.h
new file mode 100644
index 0000000..61a09e6
--- /dev/null
+++ b/media/libstagefright/xmlparser/test/XMLParserTestEnvironment.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __XML_PARSER_TEST_ENVIRONMENT_H__
+#define __XML_PARSER_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class XMLParserTestEnvironment : public ::testing::Environment {
+  public:
+    XMLParserTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int XMLParserTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"path", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P': {
+                setRes(optarg);
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __XML_PARSER_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml b/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml
new file mode 100644
index 0000000..a7299d3
--- /dev/null
+++ b/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<!-- REFERENCE : frameworks/av/media/libstagefright/xmlparser/media_codecs.xsd -->
+<Included>
+    <Settings>
+        <Domain name="telephony" enabled="false" />
+        <Domain name="tv" enabled="false" />
+        <Variant name="variant1" enabled="false" />
+        <Setting name="setting1" value="settingValue1" update="true" />
+        <Setting name="setting2" enabled="false" />
+    </Settings>
+    <Decoders>
+        <!-- entry for enabled, domain, rank and update properties -->
+        <MediaCodec name="test1.decoder" type="audio/mpeg" update="false" domain="telephony" enabled="false" rank="4">
+            <Alias name="alias1.decoder" />
+            <Quirk name="quirk1" value="quirk1Value"/>
+        </MediaCodec>
+        <!-- entry for testing Quirk -->
+        <MediaCodec name="test2.decoder" type="audio/3gpp" enabled="true" >
+            <Quirk name="quirk1" value="quirk1Value"/>
+        </MediaCodec>
+        <!-- entry for testing Feature -->
+        <!-- feature2 takes value 0 (feature with same name takes lower feature's value) -->
+        <!-- feature3 gives value as 0 since it's optional -->
+        <!-- optional="true" required="true" is not a valid combination. -->
+        <!-- optional="false" required="false" is not a valid combination. -->
+        <MediaCodec name="test3.decoder" type="audio/amr-wb" >
+            <Feature name="feature1" value="feature1Val" />
+            <Feature name="feature2" value="feature2Val"/>
+            <Feature name="feature2" />
+            <Feature name="feature3" optional="true" required="false" />
+        </MediaCodec>
+        <!-- entry for testing Type -->
+        <MediaCodec name="test4.decoder">
+            <Type name="audio/flac">
+                <Feature name="feature1" value="feature1Val" />
+            </Type>
+        </MediaCodec>
+        <!-- entry for testing Attribute -->
+        <MediaCodec name="test5.decoder" type="audio/g711-mlaw" >
+            <Attribute name="attributeQuirk1" />
+        </MediaCodec>
+        <!-- entry for testing Variant -->
+        <MediaCodec name="test6.decoder" type="audio/mp4a-latm" variant="variant1,variant2" >
+            <Variant name="variant1">
+                <Limit name="variant1Limit1" min="variant1Limit1Min" max="variant1Limit1Max" />
+                <Limit name="variant1Limit2" range="variant1Limit2Low-variant1Limit2High" />
+            </Variant>
+            <Variant name="variant2">
+                <Limit name="variant2Limit1" value="variant2Limit1Value" />
+            </Variant>
+        </MediaCodec>
+        <!-- entry for testing Limit -->
+        <!-- 'in' is present in xsd file but not handled in MediaCodecsXmlParser -->
+        <MediaCodec name="test7.decoder" type="audio/vorbis" >
+            <Limit name="limit1" in="limit1In" min="limit1Min"/>
+            <Limit name="limit2" min="limit2Min" max="limit2Max" scale="limit2Scale" />
+            <Limit name="limit3" ranges="limit3Val1,limit3Val2,limit3Val3" default="limit3Val3" />
+        </MediaCodec>
+    </Decoders>
+    <Encoders>
+        <MediaCodec name="test8.encoder" type="audio/opus">
+            <Limit name="limit1" max="limit1Max" />
+        </MediaCodec>
+    </Encoders>
+</Included>
diff --git a/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test_caller.xml b/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test_caller.xml
new file mode 100644
index 0000000..d864ce9
--- /dev/null
+++ b/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test_caller.xml
@@ -0,0 +1,4 @@
+<!-- entry for testing Include -->
+<MediaCodecs>
+    <Include href="media_codecs_unit_test.xml" />
+</MediaCodecs>
diff --git a/media/libstagefright/xmlparser/vts/Android.mk b/media/libstagefright/xmlparser/vts/Android.mk
deleted file mode 100644
index d5290ba..0000000
--- a/media/libstagefright/xmlparser/vts/Android.mk
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := VtsValidateMediaCodecs
-include test/vts/tools/build/Android.host_config.mk
diff --git a/media/libstagefright/xmlparser/vts/AndroidTest.xml b/media/libstagefright/xmlparser/vts/AndroidTest.xml
deleted file mode 100644
index 97ee107..0000000
--- a/media/libstagefright/xmlparser/vts/AndroidTest.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2019 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-<configuration description="Config for VTS VtsValidateMediaCodecs.">
-    <option name="config-descriptor:metadata" key="plan" value="vts-treble" />
-    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.VtsFilePusher">
-        <option name="abort-on-push-failure" value="false"/>
-        <option name="push-group" value="HostDrivenTest.push"/>
-        <option name="push" value="DATA/etc/media_codecs.xsd->/data/local/tmp/media_codecs.xsd"/>
-    </target_preparer>
-    <test class="com.android.tradefed.testtype.VtsMultiDeviceTest">
-        <option name="test-module-name" value="VtsValidateMediaCodecs"/>
-        <option name="binary-test-source" value="_32bit::DATA/nativetest/vts_mediaCodecs_validate_test/vts_mediaCodecs_validate_test" />
-        <option name="binary-test-source" value="_64bit::DATA/nativetest64/vts_mediaCodecs_validate_test/vts_mediaCodecs_validate_test" />
-        <option name="binary-test-type" value="gtest"/>
-        <option name="test-timeout" value="30s"/>
-    </test>
-</configuration>
diff --git a/media/libwatchdog/Android.bp b/media/libwatchdog/Android.bp
index 1a87824..f7f0db7 100644
--- a/media/libwatchdog/Android.bp
+++ b/media/libwatchdog/Android.bp
@@ -14,6 +14,7 @@
 
 cc_library {
     name: "libwatchdog",
+    host_supported: true,
     srcs: [
         "Watchdog.cpp",
     ],
@@ -29,6 +30,11 @@
         darwin: {
             enabled: false,
         },
+        linux_glibc: {
+            cflags: [
+                "-Dsigev_notify_thread_id=_sigev_un._tid",
+            ],
+        },
     },
     apex_available: ["com.android.media"],
     min_sdk_version: "29",
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index afca7c4..8d5c77f 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -34,8 +34,10 @@
         "frameworks/av/services/mediaresourcemanager",
     ],
 
-    // back to 32-bit, b/126502613
-    compile_multilib: "32",
+    // mediaserver has only been verified on 32-bit, see b/126502613
+    // use "prefer32" to *only* enable 64-bit builds on 64-bit-only lunch
+    // targets, which allows them to reach 'boot_complete'.
+    compile_multilib: "prefer32",
 
     init_rc: ["mediaserver.rc"],
 
diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp
index 316732b..58e2d2a 100644
--- a/media/mediaserver/main_mediaserver.cpp
+++ b/media/mediaserver/main_mediaserver.cpp
@@ -18,7 +18,6 @@
 #define LOG_TAG "mediaserver"
 //#define LOG_NDEBUG 0
 
-#include <aicu/AIcu.h>
 #include <binder/IPCThreadState.h>
 #include <binder/ProcessState.h>
 #include <binder/IServiceManager.h>
@@ -39,7 +38,6 @@
     sp<ProcessState> proc(ProcessState::self());
     sp<IServiceManager> sm(defaultServiceManager());
     ALOGI("ServiceManager: %p", sm.get());
-    AIcu_initializeIcuOrDie();
     MediaPlayerService::instantiate();
     ResourceManagerService::instantiate();
     registerExtensions();
diff --git a/media/mtp/Android.bp b/media/mtp/Android.bp
index 66a3139..e572249 100644
--- a/media/mtp/Android.bp
+++ b/media/mtp/Android.bp
@@ -52,5 +52,6 @@
         "liblog",
         "libusbhost",
     ],
+    header_libs: ["libcutils_headers"],
 }
 
diff --git a/media/mtp/MtpFfsHandle.cpp b/media/mtp/MtpFfsHandle.cpp
index bd6a6c6..c8b4a03 100644
--- a/media/mtp/MtpFfsHandle.cpp
+++ b/media/mtp/MtpFfsHandle.cpp
@@ -114,11 +114,11 @@
 void MtpFfsHandle::advise(int fd) {
     for (unsigned i = 0; i < NUM_IO_BUFS; i++) {
         if (posix_madvise(mIobuf[i].bufs.data(), MAX_FILE_CHUNK_SIZE,
-                POSIX_MADV_SEQUENTIAL | POSIX_MADV_WILLNEED) < 0)
+                POSIX_MADV_SEQUENTIAL | POSIX_MADV_WILLNEED) != 0)
             PLOG(ERROR) << "Failed to madvise";
     }
     if (posix_fadvise(fd, 0, 0,
-                POSIX_FADV_SEQUENTIAL | POSIX_FADV_NOREUSE | POSIX_FADV_WILLNEED) < 0)
+                POSIX_FADV_SEQUENTIAL | POSIX_FADV_NOREUSE | POSIX_FADV_WILLNEED) != 0)
         PLOG(ERROR) << "Failed to fadvise";
 }
 
diff --git a/media/mtp/tests/Android.bp b/media/mtp/tests/Android.bp
deleted file mode 100644
index 0750208..0000000
--- a/media/mtp/tests/Android.bp
+++ /dev/null
@@ -1,47 +0,0 @@
-//
-// Copyright (C) 2017 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-cc_test {
-    name: "mtp_ffs_handle_test",
-    test_suites: ["device-tests"],
-    srcs: ["MtpFfsHandle_test.cpp"],
-    shared_libs: [
-        "libbase",
-        "libmtp",
-        "liblog",
-    ],
-    cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-    ],
-}
-
-cc_test {
-    name: "posix_async_io_test",
-    test_suites: ["device-tests"],
-    srcs: ["PosixAsyncIO_test.cpp"],
-    shared_libs: [
-        "libbase",
-        "libmtp",
-        "liblog",
-    ],
-    cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-    ],
-}
diff --git a/media/mtp/tests/AndroidTest.xml b/media/mtp/tests/AndroidTest.xml
deleted file mode 100644
index c1f4753..0000000
--- a/media/mtp/tests/AndroidTest.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2017 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-<configuration description="Config for mtp_ffs_handle_test">
-    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
-        <option name="cleanup" value="true" />
-        <option name="push" value="mtp_ffs_handle_test->/data/local/tmp/mtp_ffs_handle_test" />
-    </target_preparer>
-    <option name="test-suite-tag" value="apct" />
-    <test class="com.android.tradefed.testtype.GTest" >
-        <option name="native-test-device-path" value="/data/local/tmp" />
-        <option name="module-name" value="mtp_ffs_handle_test" />
-    </test>
-</configuration>
\ No newline at end of file
diff --git a/media/mtp/tests/MtpFfsHandleTest/Android.bp b/media/mtp/tests/MtpFfsHandleTest/Android.bp
new file mode 100644
index 0000000..e393067
--- /dev/null
+++ b/media/mtp/tests/MtpFfsHandleTest/Android.bp
@@ -0,0 +1,32 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+cc_test {
+    name: "mtp_ffs_handle_test",
+    test_suites: ["device-tests"],
+    srcs: ["MtpFfsHandle_test.cpp"],
+    shared_libs: [
+        "libbase",
+        "libmtp",
+        "liblog",
+    ],
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+    ],
+}
+
diff --git a/media/mtp/tests/MtpFfsHandleTest/AndroidTest.xml b/media/mtp/tests/MtpFfsHandleTest/AndroidTest.xml
new file mode 100644
index 0000000..38bab27
--- /dev/null
+++ b/media/mtp/tests/MtpFfsHandleTest/AndroidTest.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Config for mtp_ffs_handle_test">
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="mtp_ffs_handle_test->/data/local/tmp/mtp_ffs_handle_test" />
+    </target_preparer>
+    <option name="test-suite-tag" value="apct" />
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="mtp_ffs_handle_test" />
+    </test>
+</configuration>
\ No newline at end of file
diff --git a/media/mtp/tests/MtpFfsHandle_test.cpp b/media/mtp/tests/MtpFfsHandleTest/MtpFfsHandle_test.cpp
similarity index 100%
rename from media/mtp/tests/MtpFfsHandle_test.cpp
rename to media/mtp/tests/MtpFfsHandleTest/MtpFfsHandle_test.cpp
diff --git a/media/mtp/tests/MtpFuzzer/Android.bp b/media/mtp/tests/MtpFuzzer/Android.bp
new file mode 100644
index 0000000..9cd4669
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/Android.bp
@@ -0,0 +1,31 @@
+cc_fuzz {
+    name: "mtp_fuzzer",
+    srcs: [
+        "mtp_fuzzer.cpp",
+        "MtpMockDatabase.cpp",
+    ],
+    shared_libs: [
+	"libmtp",
+	"libbase",
+	"liblog",
+	"libutils",
+    ],
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+        "-DMTP_DEVICE",
+        "-Wno-unused-parameter",
+    ],
+    dictionary: "mtp_fuzzer.dict",
+    corpus: ["corpus/*"],
+
+    fuzz_config: {
+
+        cc: ["jameswei@google.com"],
+        componentid: 1344,
+        acknowledgement: [
+            "Grant Hernandez of Google",
+        ],
+    },
+}
diff --git a/media/mtp/tests/MtpFuzzer/MtpMockDatabase.cpp b/media/mtp/tests/MtpFuzzer/MtpMockDatabase.cpp
new file mode 100644
index 0000000..5d95aa2
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/MtpMockDatabase.cpp
@@ -0,0 +1,315 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdlib.h>
+#include <string.h>
+#include <sys/stat.h>
+
+#include <string>
+
+#define LOG_TAG "MtpFuzzer"
+
+#include <log/log.h>
+
+#include "MtpDebug.h"
+#include "MtpMockDatabase.h"
+#include "MtpObjectInfo.h"
+
+namespace android {
+
+MtpMockDatabase::MtpMockDatabase() : mLastObjectHandle(0) {}
+
+MtpMockDatabase::~MtpMockDatabase() {
+    for (MtpObjectInfo* i : mObjects) {
+        delete i;
+    }
+    mObjects.clear();
+}
+
+void MtpMockDatabase::addObject(MtpObjectInfo* info) {
+    assert(hasStorage(info->storageID));
+
+    // we take ownership
+    mObjects.push_back(info);
+
+    return;
+}
+
+MtpObjectHandle MtpMockDatabase::allocateObjectHandle() {
+    // this is in sync with our mObjects database
+    return mLastObjectHandle++;
+}
+
+// Called from SendObjectInfo to reserve a database entry for the incoming file.
+MtpObjectHandle MtpMockDatabase::beginSendObject(const char* path, MtpObjectFormat format,
+                                                 MtpObjectHandle parent, MtpStorageID storage) {
+    if (!hasStorage(storage)) {
+        ALOGW("%s: Tried to lookup storageID %u, but doesn't exist\n", __func__, storage);
+        return kInvalidObjectHandle;
+    }
+
+    ALOGD("MockDatabase %s: path=%s oformat=0x%04x parent_handle=%u "
+          "storage_id=%u\n",
+          __func__, path, format, parent, storage);
+
+    return mLastObjectHandle;
+}
+
+// Called to report success or failure of the SendObject file transfer.
+void MtpMockDatabase::endSendObject(MtpObjectHandle handle, bool succeeded) {
+    ALOGD("MockDatabase %s: ohandle=%u succeeded=%d\n", __func__, handle, succeeded);
+}
+
+// Called to rescan a file, such as after an edit.
+void MtpMockDatabase::rescanFile(const char* path, MtpObjectHandle handle, MtpObjectFormat format) {
+    ALOGD("MockDatabase %s: path=%s ohandle=%u, oformat=0x%04x\n", __func__, path, handle, format);
+}
+
+MtpObjectHandleList* MtpMockDatabase::getObjectList(MtpStorageID storageID, MtpObjectFormat format,
+                                                    MtpObjectHandle parent) {
+    ALOGD("MockDatabase %s: storage_id=%u oformat=0x%04x ohandle=%u\n", __func__, storageID, format,
+          parent);
+    return nullptr;
+}
+
+int MtpMockDatabase::getNumObjects(MtpStorageID storageID, MtpObjectFormat format,
+                                   MtpObjectHandle parent) {
+    ALOGD("MockDatabase %s: storage_id=%u oformat=0x%04x ohandle=%u\n", __func__, storageID, format,
+          parent);
+    // TODO: return MTP_RESPONSE_OK when it stops segfaulting
+    return 0;
+}
+
+// callee should delete[] the results from these
+// results can be NULL
+MtpObjectFormatList* MtpMockDatabase::getSupportedPlaybackFormats() {
+    ALOGD("MockDatabase %s\n", __func__);
+    return nullptr;
+}
+MtpObjectFormatList* MtpMockDatabase::getSupportedCaptureFormats() {
+    ALOGD("MockDatabase %s\n", __func__);
+    return nullptr;
+}
+MtpObjectPropertyList* MtpMockDatabase::getSupportedObjectProperties(MtpObjectFormat format) {
+    ALOGD("MockDatabase %s: oformat=0x%04x\n", __func__, format);
+    return nullptr;
+}
+MtpDevicePropertyList* MtpMockDatabase::getSupportedDeviceProperties() {
+    ALOGD("MockDatabase %s\n", __func__);
+    return nullptr;
+}
+
+MtpResponseCode MtpMockDatabase::getObjectPropertyValue(MtpObjectHandle handle,
+                                                        MtpObjectProperty property,
+                                                        MtpDataPacket& packet) {
+    ALOGD("MockDatabase %s: ohandle=%u property=%s\n", __func__, handle,
+          MtpDebug::getObjectPropCodeName(property));
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::setObjectPropertyValue(MtpObjectHandle handle,
+                                                        MtpObjectProperty property,
+                                                        MtpDataPacket& packet) {
+    ALOGD("MockDatabase %s: ohandle=%u property=%s\n", __func__, handle,
+          MtpDebug::getObjectPropCodeName(property));
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::getDevicePropertyValue(MtpDeviceProperty property,
+                                                        MtpDataPacket& packet) {
+    ALOGD("MockDatabase %s: property=%s\n", __func__, MtpDebug::getDevicePropCodeName(property));
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::setDevicePropertyValue(MtpDeviceProperty property,
+                                                        MtpDataPacket& packet) {
+    ALOGD("MockDatabase %s: property=%s\n", __func__, MtpDebug::getDevicePropCodeName(property));
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::resetDeviceProperty(MtpDeviceProperty property) {
+    ALOGD("MockDatabase %s: property=%s\n", __func__, MtpDebug::getDevicePropCodeName(property));
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::getObjectPropertyList(MtpObjectHandle handle, uint32_t format,
+                                                       uint32_t property, int groupCode, int depth,
+                                                       MtpDataPacket& packet) {
+    ALOGD("MockDatabase %s: ohandle=%u format=%s property=%s groupCode=%d "
+          "depth=%d\n",
+          __func__, handle, MtpDebug::getFormatCodeName(format),
+          MtpDebug::getObjectPropCodeName(property), groupCode, depth);
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::getObjectInfo(MtpObjectHandle handle, MtpObjectInfo& info) {
+    ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+
+    // used for the root
+    if (handle == kInvalidObjectHandle) {
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+    } else {
+        if (mObjects.size() == 0) {
+            return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+        }
+
+        // this is used to let the fuzzer make progress, otherwise
+        // it has to brute-force a 32-bit handle
+        MtpObjectHandle reducedHandle = handle % mObjects.size();
+        MtpObjectInfo* obj = mObjects[reducedHandle];
+
+        // make a copy, but make sure to maintain ownership of string pointers
+        info = *obj;
+
+        // fixup the response handle
+        info.mHandle = handle;
+
+        if (obj->mName) info.mName = strdup(obj->mName);
+        if (obj->mKeywords) info.mKeywords = strdup(obj->mKeywords);
+
+        return MTP_RESPONSE_OK;
+    }
+}
+
+void* MtpMockDatabase::getThumbnail(MtpObjectHandle handle, size_t& outThumbSize) {
+    ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+
+    size_t allocSize = handle % 0x1000;
+    void* data = calloc(allocSize, sizeof(uint8_t));
+    if (!data) {
+        return nullptr;
+    } else {
+        ALOGD("MockDatabase %s\n", __func__);
+        outThumbSize = allocSize;
+        return data;
+    }
+}
+
+MtpResponseCode MtpMockDatabase::getObjectFilePath(MtpObjectHandle handle,
+                                                   MtpStringBuffer& outFilePath,
+                                                   int64_t& outFileLength,
+                                                   MtpObjectFormat& outFormat) {
+    ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+
+    if (mObjects.size() == 0) {
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+    }
+
+    // this is used to let the fuzzer make progress, otherwise
+    // it has to brute-force a 32-bit handle
+    MtpObjectHandle reducedHandle = handle % mObjects.size();
+    MtpObjectInfo* obj = mObjects[reducedHandle];
+    MtpStorage* storage = mStorage[obj->mStorageID];
+
+    // walk up the tree to build a full path of the object
+    MtpObjectHandle currentHandle = reducedHandle;
+    std::string path = "";
+
+    while (currentHandle != MTP_PARENT_ROOT) {
+        MtpObjectInfo* next = mObjects[currentHandle];
+
+        // prepend the name
+        if (path == "")
+            path = std::string(next->mName);
+        else
+            path = std::string(next->mName) + "/" + path;
+
+        currentHandle = next->mParent;
+    }
+
+    outFilePath.set(storage->getPath());
+    outFilePath.append("/");
+    outFilePath.append(path.c_str());
+
+    outFormat = obj->mFormat;
+
+    ALOGD("MockDatabase %s: get file %s\n", __func__, (const char*)outFilePath);
+
+    struct stat sstat;
+    // this should not happen unless our database view of the filesystem is out of
+    // sync
+    if (stat((const char*)outFilePath, &sstat) < 0) {
+        ALOGE("MockDatabase %s: unable to stat %s\n", __func__, (const char*)outFilePath);
+
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+    }
+
+    outFileLength = sstat.st_size;
+
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::beginDeleteObject(MtpObjectHandle handle) {
+    ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+    return MTP_RESPONSE_OK;
+}
+void MtpMockDatabase::endDeleteObject(MtpObjectHandle handle, bool succeeded) {
+    ALOGD("MockDatabase %s: ohandle=%u succeeded=%d\n", __func__, handle, succeeded);
+    return;
+}
+
+MtpObjectHandleList* MtpMockDatabase::getObjectReferences(MtpObjectHandle handle) {
+    ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+    return nullptr;
+}
+
+MtpResponseCode MtpMockDatabase::setObjectReferences(MtpObjectHandle handle,
+                                                     MtpObjectHandleList* references) {
+    ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+    return MTP_RESPONSE_OK;
+}
+
+MtpProperty* MtpMockDatabase::getObjectPropertyDesc(MtpObjectProperty property,
+                                                    MtpObjectFormat format) {
+    ALOGD("MockDatabase %s: property=%s format=%s\n", __func__,
+          MtpDebug::getObjectPropCodeName(property), MtpDebug::getFormatCodeName(format));
+
+    return nullptr;
+}
+
+MtpProperty* MtpMockDatabase::getDevicePropertyDesc(MtpDeviceProperty property) {
+    ALOGD("MockDatabase %s: property=%s\n", __func__, MtpDebug::getDevicePropCodeName(property));
+    return nullptr;
+}
+
+MtpResponseCode MtpMockDatabase::beginMoveObject(MtpObjectHandle handle, MtpObjectHandle newParent,
+                                                 MtpStorageID newStorage) {
+    ALOGD("MockDatabase %s: ohandle=%u newParent=%u newStorage=%u\n", __func__, handle, newParent,
+          newStorage);
+    return MTP_RESPONSE_OK;
+}
+
+void MtpMockDatabase::endMoveObject(MtpObjectHandle oldParent, MtpObjectHandle newParent,
+                                    MtpStorageID oldStorage, MtpStorageID newStorage,
+                                    MtpObjectHandle handle, bool succeeded) {
+    ALOGD("MockDatabase %s: oldParent=%u newParent=%u oldStorage=%u newStorage=%u "
+          "ohandle=%u succeeded=%d\n",
+          __func__, oldParent, newParent, oldStorage, newStorage, handle, succeeded);
+    return;
+}
+
+MtpResponseCode MtpMockDatabase::beginCopyObject(MtpObjectHandle handle, MtpObjectHandle newParent,
+                                                 MtpStorageID newStorage) {
+    ALOGD("MockDatabase %s: ohandle=%u newParent=%u newStorage=%u\n", __func__, handle, newParent,
+          newStorage);
+    return MTP_RESPONSE_OK;
+}
+
+void MtpMockDatabase::endCopyObject(MtpObjectHandle handle, bool succeeded) {
+    ALOGD("MockDatabase %s: ohandle=%u succeeded=%d\n", __func__, handle, succeeded);
+}
+
+}; // namespace android
diff --git a/media/mtp/tests/MtpFuzzer/MtpMockDatabase.h b/media/mtp/tests/MtpFuzzer/MtpMockDatabase.h
new file mode 100644
index 0000000..876719e
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/MtpMockDatabase.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef _MTP_MOCK_DATABASE_H
+#define _MTP_MOCK_DATABASE_H
+
+#include <map>
+
+#include "IMtpDatabase.h"
+#include "MtpStorage.h"
+
+namespace android {
+
+class MtpMockDatabase : public IMtpDatabase {
+    std::map<MtpStorageID, MtpStorage*> mStorage;
+    std::vector<MtpObjectInfo*> mObjects;
+    uint32_t mLastObjectHandle;
+
+public:
+    MtpMockDatabase();
+    virtual ~MtpMockDatabase();
+
+    // MtpFuzzer methods
+    void addStorage(MtpStorage* storage) {
+        // we don't own this
+        mStorage[storage->getStorageID()] = storage;
+    }
+
+    bool hasStorage(MtpStorageID storage) { return mStorage.find(storage) != mStorage.end(); }
+
+    void addObject(MtpObjectInfo* info);
+    MtpObjectHandle allocateObjectHandle();
+
+    // libmtp interface methods
+    // Called from SendObjectInfo to reserve a database entry for the incoming
+    // file.
+    MtpObjectHandle beginSendObject(const char* path, MtpObjectFormat format,
+                                    MtpObjectHandle parent, MtpStorageID storage);
+
+    // Called to report success or failure of the SendObject file transfer.
+    void endSendObject(MtpObjectHandle handle, bool succeeded);
+
+    // Called to rescan a file, such as after an edit.
+    void rescanFile(const char* path, MtpObjectHandle handle, MtpObjectFormat format);
+
+    MtpObjectHandleList* getObjectList(MtpStorageID storageID, MtpObjectFormat format,
+                                       MtpObjectHandle parent);
+
+    int getNumObjects(MtpStorageID storageID, MtpObjectFormat format, MtpObjectHandle parent);
+
+    // callee should delete[] the results from these
+    // results can be NULL
+    MtpObjectFormatList* getSupportedPlaybackFormats();
+    MtpObjectFormatList* getSupportedCaptureFormats();
+    MtpObjectPropertyList* getSupportedObjectProperties(MtpObjectFormat format);
+    MtpDevicePropertyList* getSupportedDeviceProperties();
+
+    MtpResponseCode getObjectPropertyValue(MtpObjectHandle handle, MtpObjectProperty property,
+                                           MtpDataPacket& packet);
+
+    MtpResponseCode setObjectPropertyValue(MtpObjectHandle handle, MtpObjectProperty property,
+                                           MtpDataPacket& packet);
+
+    MtpResponseCode getDevicePropertyValue(MtpDeviceProperty property, MtpDataPacket& packet);
+
+    MtpResponseCode setDevicePropertyValue(MtpDeviceProperty property, MtpDataPacket& packet);
+
+    MtpResponseCode resetDeviceProperty(MtpDeviceProperty property);
+
+    MtpResponseCode getObjectPropertyList(MtpObjectHandle handle, uint32_t format,
+                                          uint32_t property, int groupCode, int depth,
+                                          MtpDataPacket& packet);
+
+    MtpResponseCode getObjectInfo(MtpObjectHandle handle, MtpObjectInfo& info);
+
+    void* getThumbnail(MtpObjectHandle handle, size_t& outThumbSize);
+
+    MtpResponseCode getObjectFilePath(MtpObjectHandle handle, MtpStringBuffer& outFilePath,
+                                      int64_t& outFileLength, MtpObjectFormat& outFormat);
+
+    MtpResponseCode beginDeleteObject(MtpObjectHandle handle);
+    void endDeleteObject(MtpObjectHandle handle, bool succeeded);
+
+    MtpObjectHandleList* getObjectReferences(MtpObjectHandle handle);
+
+    MtpResponseCode setObjectReferences(MtpObjectHandle handle, MtpObjectHandleList* references);
+
+    MtpProperty* getObjectPropertyDesc(MtpObjectProperty property, MtpObjectFormat format);
+
+    MtpProperty* getDevicePropertyDesc(MtpDeviceProperty property);
+
+    MtpResponseCode beginMoveObject(MtpObjectHandle handle, MtpObjectHandle newParent,
+                                    MtpStorageID newStorage);
+
+    void endMoveObject(MtpObjectHandle oldParent, MtpObjectHandle newParent,
+                       MtpStorageID oldStorage, MtpStorageID newStorage, MtpObjectHandle handle,
+                       bool succeeded);
+
+    MtpResponseCode beginCopyObject(MtpObjectHandle handle, MtpObjectHandle newParent,
+                                    MtpStorageID newStorage);
+    void endCopyObject(MtpObjectHandle handle, bool succeeded);
+};
+
+}; // namespace android
+
+#endif // _MTP_MOCK_DATABASE_H
diff --git a/media/mtp/tests/MtpFuzzer/MtpMockHandle.h b/media/mtp/tests/MtpFuzzer/MtpMockHandle.h
new file mode 100644
index 0000000..111485c
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/MtpMockHandle.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_MOCK_HANDLE_H
+#define _MTP_MOCK_HANDLE_H
+
+#include <vector>
+
+typedef std::vector<uint8_t> packet_t;
+
+namespace android {
+class MtpMockHandle : public IMtpHandle {
+private:
+    size_t mPacketNumber;
+    size_t mPacketOffset;
+    std::vector<packet_t> mPackets;
+
+public:
+    MtpMockHandle() : mPacketNumber(0), mPacketOffset(0) {}
+
+    void add_packet(packet_t pkt) { mPackets.push_back(pkt); }
+
+    // Return number of bytes read/written, or -1 and errno is set
+    int read(void *data, size_t len) {
+        if (mPacketNumber >= mPackets.size()) {
+            return 0;
+        } else {
+            int readAmt = 0;
+            packet_t pkt = mPackets[mPacketNumber];
+
+            ALOGD("%s: sz %zu, pkt %zu+%zu/%zu\n", __func__, len, mPacketNumber, mPacketOffset,
+                  pkt.size());
+
+            // packet is bigger than what the caller can handle,
+            if (pkt.size() > len) {
+                memcpy(data, pkt.data() + mPacketOffset, len);
+
+                mPacketOffset += len;
+                readAmt = len;
+                // packet is equal or smaller than the caller buffer
+            } else {
+                memcpy(data, pkt.data() + mPacketOffset, pkt.size());
+
+                mPacketNumber++;
+                mPacketOffset = 0;
+                readAmt = pkt.size();
+            }
+
+            return readAmt;
+        }
+    }
+    int write(const void *data, size_t len) {
+        ALOGD("MockHandle %s: len=%zu\n", __func__, len);
+        // fake the write
+        return len;
+    }
+
+    // Return 0 if send/receive is successful, or -1 and errno is set
+    int receiveFile(mtp_file_range mfr, bool zero_packet) {
+        ALOGD("MockHandle %s\n", __func__);
+        return 0;
+    }
+    int sendFile(mtp_file_range mfr) {
+        ALOGD("MockHandle %s\n", __func__);
+        return 0;
+    }
+    int sendEvent(mtp_event me) {
+        ALOGD("MockHandle %s: len=%zu\n", __func__, me.length);
+        return 0;
+    }
+
+    // Return 0 if operation is successful, or -1 else
+    int start(bool ptp) { return 0; }
+
+    void close() {}
+
+    virtual ~MtpMockHandle() {}
+};
+}; // namespace android
+
+#endif // _MTP_MOCK_HANDLE_H
diff --git a/media/mtp/tests/MtpFuzzer/corpus/1-mtp-open_session.pkt b/media/mtp/tests/MtpFuzzer/corpus/1-mtp-open_session.pkt
new file mode 100644
index 0000000..38f8ed2
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/1-mtp-open_session.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/corpus/2-mtp-get_device_info.pkt b/media/mtp/tests/MtpFuzzer/corpus/2-mtp-get_device_info.pkt
new file mode 100644
index 0000000..7759380
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/2-mtp-get_device_info.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/corpus/3-mtp-get_object_handles.pkt b/media/mtp/tests/MtpFuzzer/corpus/3-mtp-get_object_handles.pkt
new file mode 100644
index 0000000..e88410f
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/3-mtp-get_object_handles.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/corpus/4-mtp-get_object_info.pkt b/media/mtp/tests/MtpFuzzer/corpus/4-mtp-get_object_info.pkt
new file mode 100644
index 0000000..e283fb4
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/4-mtp-get_object_info.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/corpus/5-mtp-send_object_info.pkt b/media/mtp/tests/MtpFuzzer/corpus/5-mtp-send_object_info.pkt
new file mode 100644
index 0000000..7627f88
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/5-mtp-send_object_info.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp b/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp
new file mode 100644
index 0000000..f578462
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp
@@ -0,0 +1,165 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/unique_fd.h>
+#include <stddef.h>
+#include <stdint.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+#include <string>
+
+#define LOG_TAG "MtpFuzzer"
+
+#include "IMtpHandle.h"
+#include "MtpMockDatabase.h"
+#include "MtpMockHandle.h"
+#include "MtpObjectInfo.h"
+#include "MtpServer.h"
+#include "MtpStorage.h"
+#include "MtpUtils.h"
+
+const char* storage_desc = "Fuzz Storage";
+// prefer tmpfs for file operations to avoid wearing out flash
+const char* storage_path = "/storage/fuzzer/0";
+const char* source_database = "srcdb/";
+
+namespace android {
+class MtpMockServer {
+public:
+    std::unique_ptr<MtpMockHandle> mHandle;
+    std::unique_ptr<MtpStorage> mStorage;
+    std::unique_ptr<MtpMockDatabase> mDatabase;
+    std::unique_ptr<MtpServer> mMtp;
+    int mStorageId;
+
+    MtpMockServer(const char* storage_path) : mStorageId(0) {
+        bool ptp = false;
+        const char* manu = "Google";
+        const char* model = "Pixel 3XL";
+        const char* version = "1.0";
+        const char* serial = "ABDEF1231";
+
+        // This is unused in our harness
+        int controlFd = -1;
+
+        mHandle = std::make_unique<MtpMockHandle>();
+        mStorage = std::make_unique<MtpStorage>(mStorageId, storage_path, storage_desc, true,
+                                                0x200000000L);
+        mDatabase = std::make_unique<MtpMockDatabase>();
+        mDatabase->addStorage(mStorage.get());
+
+        mMtp = std::make_unique<MtpServer>(mDatabase.get(), controlFd, ptp, manu, model, version,
+                                           serial);
+        mMtp->addStorage(mStorage.get());
+
+        // clear the old handle first, so we don't leak memory
+        delete mMtp->mHandle;
+        mMtp->mHandle = mHandle.get();
+    }
+
+    void run() { mMtp->run(); }
+
+    int createDatabaseFromSourceDir(const char* fromPath, const char* toPath,
+                                    MtpObjectHandle parentHandle) {
+        int ret = 0;
+        std::string fromPathStr(fromPath);
+        std::string toPathStr(toPath);
+
+        DIR* dir = opendir(fromPath);
+        if (!dir) {
+            ALOGE("opendir %s failed", fromPath);
+            return -1;
+        }
+        if (fromPathStr[fromPathStr.size() - 1] != '/') fromPathStr += '/';
+        if (toPathStr[toPathStr.size() - 1] != '/') toPathStr += '/';
+
+        struct dirent* entry;
+        while ((entry = readdir(dir))) {
+            const char* name = entry->d_name;
+
+            // ignore "." and ".."
+            if (name[0] == '.' && (name[1] == 0 || (name[1] == '.' && name[2] == 0))) {
+                continue;
+            }
+
+            std::string oldFile = fromPathStr + name;
+            std::string newFile = toPathStr + name;
+
+            if (entry->d_type == DT_DIR) {
+                ret += makeFolder(newFile.c_str());
+
+                MtpObjectInfo* objectInfo = new MtpObjectInfo(mDatabase->allocateObjectHandle());
+                objectInfo->mStorageID = mStorage->getStorageID();
+                objectInfo->mParent = parentHandle;
+                objectInfo->mFormat = MTP_FORMAT_ASSOCIATION; // folder
+                objectInfo->mName = strdup(name);
+                objectInfo->mKeywords = strdup("");
+
+                mDatabase->addObject(objectInfo);
+
+                ret += createDatabaseFromSourceDir(oldFile.c_str(), newFile.c_str(),
+                                                   objectInfo->mHandle);
+            } else {
+                ret += copyFile(oldFile.c_str(), newFile.c_str());
+
+                MtpObjectInfo* objectInfo = new MtpObjectInfo(mDatabase->allocateObjectHandle());
+                objectInfo->mStorageID = mStorage->getStorageID();
+                objectInfo->mParent = parentHandle;
+                objectInfo->mFormat = MTP_FORMAT_TEXT;
+                objectInfo->mName = strdup(name);
+                objectInfo->mKeywords = strdup("");
+
+                mDatabase->addObject(objectInfo);
+            }
+        }
+
+        closedir(dir);
+        return ret;
+    }
+};
+}; // namespace android
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) __attribute__((optnone)) {
+    // reset our storage (from MtpUtils.h)
+    android::deletePath(storage_path);
+    android::makeFolder("/storage/fuzzer");
+    android::makeFolder(storage_path);
+
+    std::unique_ptr<android::MtpMockServer> mtp =
+            std::make_unique<android::MtpMockServer>(storage_path);
+
+    size_t off = 0;
+
+    // Packetize the input stream
+    for (size_t i = 0; i < size; i++) {
+        // A longer delimiter could be used, but this worked in practice
+        if (data[i] == '@') {
+            size_t pktsz = i - off;
+            if (pktsz > 0) {
+                packet_t pkt = packet_t((unsigned char*)data + off, (unsigned char*)data + i);
+                // insert into packet buffer
+                mtp->mHandle->add_packet(pkt);
+                off = i;
+            }
+        }
+    }
+
+    mtp->createDatabaseFromSourceDir(source_database, storage_path, MTP_PARENT_ROOT);
+    mtp->run();
+
+    return 0;
+}
diff --git a/media/mtp/tests/MtpFuzzer/mtp_fuzzer.dict b/media/mtp/tests/MtpFuzzer/mtp_fuzzer.dict
new file mode 100644
index 0000000..4c3f136
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/mtp_fuzzer.dict
@@ -0,0 +1,74 @@
+mtp_operation_get_device_info="\x01\x10"
+mtp_operation_open_session="\x02\x10"
+mtp_operation_close_session="\x03\x10"
+mtp_operation_get_storage_ids="\x04\x10"
+mtp_operation_get_storage_info="\x05\x10"
+mtp_operation_get_num_objects="\x06\x10"
+mtp_operation_get_object_handles="\x07\x10"
+mtp_operation_get_object_info="\x08\x10"
+mtp_operation_get_object="\x09\x10"
+mtp_operation_get_thumb="\x0A\x10"
+mtp_operation_delete_object="\x0B\x10"
+mtp_operation_send_object_info="\x0C\x10"
+mtp_operation_send_object="\x0D\x10"
+mtp_operation_initiate_capture="\x0E\x10"
+mtp_operation_format_store="\x0F\x10"
+mtp_operation_reset_device="\x10\x10"
+mtp_operation_self_test="\x11\x10"
+mtp_operation_set_object_protection="\x12\x10"
+mtp_operation_power_down="\x13\x10"
+mtp_operation_get_device_prop_desc="\x14\x10"
+mtp_operation_get_device_prop_value="\x15\x10"
+mtp_operation_set_device_prop_value="\x16\x10"
+mtp_operation_reset_device_prop_value="\x17\x10"
+mtp_operation_terminate_open_capture="\x18\x10"
+mtp_operation_move_object="\x19\x10"
+mtp_operation_copy_object="\x1A\x10"
+mtp_operation_get_partial_object="\x1B\x10"
+mtp_operation_initiate_open_capture="\x1C\x10"
+mtp_operation_get_object_props_supported="\x01\x98"
+mtp_operation_get_object_prop_desc="\x02\x98"
+mtp_operation_get_object_prop_value="\x03\x98"
+mtp_operation_set_object_prop_value="\x04\x98"
+mtp_operation_get_object_prop_list="\x05\x98"
+mtp_operation_set_object_prop_list="\x06\x98"
+mtp_operation_get_interdependent_prop_desc="\x07\x98"
+mtp_operation_send_object_prop_list="\x08\x98"
+mtp_operation_get_object_references="\x10\x98"
+mtp_operation_set_object_references="\x11\x98"
+mtp_operation_skip="\x20\x98"
+mtp_operation_get_partial_object_64="\xC1\x95"
+mtp_operation_send_partial_object="\xC2\x95"
+mtp_operation_truncate_object="\xC3\x95"
+mtp_operation_begin_edit_object="\xC4\x95"
+mtp_operation_end_edit_object="\xC5\x95"
+
+# Association (for example, a folder)
+mtp_format_association="\x01\x30"
+
+# types
+mtp_type_undefined="\x00\x00"
+mtp_type_int8="\x01\x00"
+mtp_type_uint8="\x02\x00"
+mtp_type_int16="\x03\x00"
+mtp_type_uint16="\x04\x00"
+mtp_type_int32="\x05\x00"
+mtp_type_uint32="\x06\x00"
+mtp_type_int64="\x07\x00"
+mtp_type_uint64="\x08\x00"
+mtp_type_int128="\x09\x00"
+mtp_type_uint128="\x0A\x00"
+mtp_type_aint8="\x01\x40"
+mtp_type_auint8="\x02\x40"
+mtp_type_aint16="\x03\x40"
+mtp_type_auint16="\x04\x40"
+mtp_type_aint32="\x05\x40"
+mtp_type_auint32="\x06\x40"
+mtp_type_aint64="\x07\x40"
+mtp_type_auint64="\x08\x40"
+mtp_type_aint128="\x09\x40"
+mtp_type_auint128="\x0A\x40"
+mtp_type_str="\xFF\xFF"
+
+# also used for max size (>4GB)
+mtp_parent_root="\xFF\xFF\xFF\xFF"
diff --git a/media/mtp/tests/PosixAsyncIOTest/Android.bp b/media/mtp/tests/PosixAsyncIOTest/Android.bp
new file mode 100644
index 0000000..1d401b8
--- /dev/null
+++ b/media/mtp/tests/PosixAsyncIOTest/Android.bp
@@ -0,0 +1,31 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+cc_test {
+    name: "posix_async_io_test",
+    test_suites: ["device-tests"],
+    srcs: ["PosixAsyncIO_test.cpp"],
+    shared_libs: [
+        "libbase",
+        "libmtp",
+        "liblog",
+    ],
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+    ],
+}
diff --git a/media/mtp/tests/PosixAsyncIOTest/AndroidTest.xml b/media/mtp/tests/PosixAsyncIOTest/AndroidTest.xml
new file mode 100644
index 0000000..cbb10fb
--- /dev/null
+++ b/media/mtp/tests/PosixAsyncIOTest/AndroidTest.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Config for posix_async_io_test">
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="posix_async_io_test->/data/local/tmp/posix_async_io_test" />
+    </target_preparer>
+    <option name="test-suite-tag" value="apct" />
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="posix_async_io_test" />
+    </test>
+</configuration>
\ No newline at end of file
diff --git a/media/mtp/tests/PosixAsyncIO_test.cpp b/media/mtp/tests/PosixAsyncIOTest/PosixAsyncIO_test.cpp
similarity index 100%
rename from media/mtp/tests/PosixAsyncIO_test.cpp
rename to media/mtp/tests/PosixAsyncIOTest/PosixAsyncIO_test.cpp
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 43989bb..755d6e6 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -43,11 +43,18 @@
         "com.android.media.swcodec",
     ],
     min_sdk_version: "29",
-    export_include_dirs: ["include"]
+    export_include_dirs: ["include"],
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 cc_library_shared {
     name: "libmediandk",
+    llndk_stubs: "libmediandk.llndk",
 
     srcs: [
         "NdkJavaVMHelper.cpp",
@@ -79,6 +86,7 @@
     ],
 
     header_libs: [
+        "jni_headers",
         "libmediadrm_headers",
         "libmediametrics_headers",
     ],
@@ -106,6 +114,8 @@
         "libnativehelper",
     ],
 
+    export_header_lib_headers: ["jni_headers"],
+
     export_include_dirs: ["include"],
 
     export_shared_lib_headers: [
@@ -125,7 +135,7 @@
 }
 
 llndk_library {
-    name: "libmediandk",
+    name: "libmediandk.llndk",
     symbol_file: "libmediandk.map.txt",
     export_include_dirs: [
         "include",
@@ -172,6 +182,7 @@
 
 cc_test {
     name: "AImageReaderWindowHandleTest",
+    test_suites: ["device-tests"],
     srcs: ["tests/AImageReaderWindowHandleTest.cpp"],
     shared_libs: [
         "libbinder",
@@ -195,3 +206,41 @@
         "frameworks/av/media/ndk/",
     ],
 }
+
+cc_library_static {
+    name: "libmediandk_format",
+
+    host_supported: true,
+
+    srcs: [
+        "NdkMediaFormat.cpp",
+    ],
+
+    header_libs: [
+        "libstagefright_foundation_headers",
+    ],
+
+    cflags: [
+        "-DEXPORT=__attribute__((visibility(\"default\")))",
+        "-Werror",
+        "-Wall",
+    ],
+
+    export_include_dirs: ["include"],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
+    apex_available: ["com.android.media"],
+}
diff --git a/media/ndk/NdkImagePriv.h b/media/ndk/NdkImagePriv.h
index 0e8cbcb..b019448 100644
--- a/media/ndk/NdkImagePriv.h
+++ b/media/ndk/NdkImagePriv.h
@@ -30,6 +30,18 @@
 
 using namespace android;
 
+// Formats not listed in the public API, but still available to AImageReader
+enum AIMAGE_PRIVATE_FORMATS {
+    /**
+     * Unprocessed implementation-dependent raw
+     * depth measurements, opaque with 16 bit
+     * samples.
+     *
+     */
+
+    AIMAGE_FORMAT_RAW_DEPTH = 0x1002,
+};
+
 // TODO: this only supports ImageReader
 struct AImage {
     AImage(AImageReader* reader, int32_t format, uint64_t usage, BufferItem* buffer,
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index c0ceb3d..5d8f0b8 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -21,7 +21,6 @@
 
 #include "NdkImagePriv.h"
 #include "NdkImageReaderPriv.h"
-#include <private/media/NdkImage.h>
 
 #include <cutils/atomic.h>
 #include <utils/Log.h>
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index af21a99..d771095 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -45,6 +45,10 @@
         return AMEDIA_OK;
     } else if (err == -EAGAIN) {
         return (media_status_t) AMEDIACODEC_INFO_TRY_AGAIN_LATER;
+    } else if (err == NO_MEMORY) {
+        return AMEDIACODEC_ERROR_INSUFFICIENT_RESOURCE;
+    } else if (err == DEAD_OBJECT) {
+        return AMEDIACODEC_ERROR_RECLAIMED;
     }
     ALOGE("sf error code: %d", err);
     return AMEDIA_ERROR_UNKNOWN;
@@ -255,7 +259,7 @@
                          break;
                      }
                      msg->findString("detail", &detail);
-                     ALOGE("Decoder reported error(0x%x), actionCode(%d), detail(%s)",
+                     ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
                            err, actionCode, detail.c_str());
 
                      Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index ab0cb63..47214c5 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -26,9 +26,6 @@
 #include <utils/StrongPointer.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
-#include <android_util_Binder.h>
-
-#include <jni.h>
 
 using namespace android;
 
@@ -367,12 +364,14 @@
 EXPORT const char* AMEDIAFORMAT_KEY_SAR_WIDTH = "sar-width";
 EXPORT const char* AMEDIAFORMAT_KEY_SEI = "sei";
 EXPORT const char* AMEDIAFORMAT_KEY_SLICE_HEIGHT = "slice-height";
+EXPORT const char* AMEDIAFORMAT_KEY_SLOW_MOTION_MARKERS = "slow-motion-markers";
 EXPORT const char* AMEDIAFORMAT_KEY_STRIDE = "stride";
 EXPORT const char* AMEDIAFORMAT_KEY_TARGET_TIME = "target-time";
 EXPORT const char* AMEDIAFORMAT_KEY_TEMPORAL_LAYER_COUNT = "temporal-layer-count";
 EXPORT const char* AMEDIAFORMAT_KEY_TEMPORAL_LAYER_ID = "temporal-layer-id";
 EXPORT const char* AMEDIAFORMAT_KEY_TEMPORAL_LAYERING = "ts-schema";
 EXPORT const char* AMEDIAFORMAT_KEY_TEXT_FORMAT_DATA = "text-format-data";
+EXPORT const char* AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C = "thumbnail-csd-av1c";
 EXPORT const char* AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC = "thumbnail-csd-hevc";
 EXPORT const char* AMEDIAFORMAT_KEY_THUMBNAIL_HEIGHT = "thumbnail-height";
 EXPORT const char* AMEDIAFORMAT_KEY_THUMBNAIL_TIME = "thumbnail-time";
diff --git a/media/ndk/TEST_MAPPING b/media/ndk/TEST_MAPPING
new file mode 100644
index 0000000..1a81538
--- /dev/null
+++ b/media/ndk/TEST_MAPPING
@@ -0,0 +1,6 @@
+// mappings for frameworks/av/media/ndk
+{
+  "presubmit": [
+    { "name": "AImageReaderWindowHandleTest" }
+  ]
+}
diff --git a/media/ndk/include/media/NdkMediaCodec.h b/media/ndk/include/media/NdkMediaCodec.h
index 8fb6a87..80d5d50 100644
--- a/media/ndk/include/media/NdkMediaCodec.h
+++ b/media/ndk/include/media/NdkMediaCodec.h
@@ -114,12 +114,12 @@
         int32_t actionCode,
         const char *detail);
 
-struct AMediaCodecOnAsyncNotifyCallback {
+typedef struct AMediaCodecOnAsyncNotifyCallback {
       AMediaCodecOnAsyncInputAvailable  onAsyncInputAvailable;
       AMediaCodecOnAsyncOutputAvailable onAsyncOutputAvailable;
       AMediaCodecOnAsyncFormatChanged   onAsyncFormatChanged;
       AMediaCodecOnAsyncError           onAsyncError;
-};
+} AMediaCodecOnAsyncNotifyCallback;
 
 #if __ANDROID_API__ >= 21
 
diff --git a/media/ndk/include/media/NdkMediaExtractor.h b/media/ndk/include/media/NdkMediaExtractor.h
index 14319c4..a1cd9e3 100644
--- a/media/ndk/include/media/NdkMediaExtractor.h
+++ b/media/ndk/include/media/NdkMediaExtractor.h
@@ -36,6 +36,7 @@
 #ifndef _NDK_MEDIA_EXTRACTOR_H
 #define _NDK_MEDIA_EXTRACTOR_H
 
+#include <stdbool.h>
 #include <sys/cdefs.h>
 #include <sys/types.h>
 
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index 49d8b4a..8f39929 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -36,9 +36,24 @@
 #ifndef _NDK_MEDIA_FORMAT_H
 #define _NDK_MEDIA_FORMAT_H
 
+#include <stdbool.h>
 #include <sys/cdefs.h>
 #include <sys/types.h>
 
+#ifndef __ANDROID__
+// Value copied from 'bionic/libc/include/android/api-level.h' which is not available on
+// non Android systems. It is set to 10000 which is same as __ANDROID_API_FUTURE__ value.
+#ifndef __ANDROID_API__
+#define __ANDROID_API__ 10000
+#endif
+
+// Value copied from 'bionic/libc/include/android/versioning.h' which is not available on
+// non Android systems
+#ifndef __INTRODUCED_IN
+#define __INTRODUCED_IN(api_level)
+#endif
+#endif
+
 #include "NdkMediaError.h"
 
 __BEGIN_DECLS
@@ -307,6 +322,11 @@
 extern const char* AMEDIAFORMAT_KEY_LOW_LATENCY __INTRODUCED_IN(30);
 #endif /* __ANDROID_API__ >= 30 */
 
+#if __ANDROID_API__ >= 31
+extern const char* AMEDIAFORMAT_KEY_SLOW_MOTION_MARKERS __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C __INTRODUCED_IN(31);
+#endif /* __ANDROID_API__ >= 31 */
+
 __END_DECLS
 
 #endif // _NDK_MEDIA_FORMAT_H
diff --git a/media/ndk/include/private/media/NdkImage.h b/media/ndk/include/private/media/NdkImage.h
deleted file mode 100644
index 4368a56..0000000
--- a/media/ndk/include/private/media/NdkImage.h
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef _PRIVATE_MEDIA_NDKIMAGE_H_
-#define _PRIVATE_MEDIA_NDKIMAGE_H_
-// Formats not listed in the public API, but still available to AImageReader
-enum AIMAGE_PRIVATE_FORMATS {
-    /**
-     * Unprocessed implementation-dependent raw
-     * depth measurements, opaque with 16 bit
-     * samples.
-     *
-     */
-
-    AIMAGE_FORMAT_RAW_DEPTH = 0x1002,
-};
-#endif // _PRIVATE_MEDIA_NDKIMAGE
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 29f1da8..44c3e52 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -131,12 +131,14 @@
     AMEDIAFORMAT_KEY_SAR_WIDTH; # var introduced=29
     AMEDIAFORMAT_KEY_SEI; # var introduced=28
     AMEDIAFORMAT_KEY_SLICE_HEIGHT; # var introduced=28
+    AMEDIAFORMAT_KEY_SLOW_MOTION_MARKERS; # var introduced=31
     AMEDIAFORMAT_KEY_STRIDE; # var introduced=21
     AMEDIAFORMAT_KEY_TARGET_TIME; # var introduced=29
     AMEDIAFORMAT_KEY_TEMPORAL_LAYER_COUNT; # var introduced=29
     AMEDIAFORMAT_KEY_TEMPORAL_LAYER_ID; # var introduced=28
     AMEDIAFORMAT_KEY_TEMPORAL_LAYERING; # var introduced=28
     AMEDIAFORMAT_KEY_TEXT_FORMAT_DATA; # var introduced=29
+    AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C; # var introduced=31
     AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC; # var introduced=29
     AMEDIAFORMAT_KEY_THUMBNAIL_HEIGHT; # var introduced=29
     AMEDIAFORMAT_KEY_THUMBNAIL_TIME; # var introduced=29
diff --git a/media/ndk/tests/AImageReaderWindowHandleTest.cpp b/media/ndk/tests/AImageReaderWindowHandleTest.cpp
index 5b65064..27864c2 100644
--- a/media/ndk/tests/AImageReaderWindowHandleTest.cpp
+++ b/media/ndk/tests/AImageReaderWindowHandleTest.cpp
@@ -17,10 +17,10 @@
 #include <gtest/gtest.h>
 #include <media/NdkImageReader.h>
 #include <media/NdkImage.h>
-#include <private/media/NdkImage.h>
 #include <mediautils/AImageReaderUtils.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
+#include <NdkImagePriv.h>
 #include <NdkImageReaderPriv.h>
 #include <vndk/hardware_buffer.h>
 #include <memory>
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
index 48e1422..4202732 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
@@ -34,6 +34,7 @@
 import com.android.media.benchmark.library.Native;
 import com.android.media.benchmark.library.Stats;
 
+import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -57,40 +58,87 @@
 public class EncoderTest {
     private static final Context mContext =
             InstrumentationRegistry.getInstrumentation().getTargetContext();
+    private static final String mFileDirPath = mContext.getFilesDir() + "/";
     private static final String mInputFilePath = mContext.getString(R.string.input_file_path);
     private static final String mOutputFilePath = mContext.getString(R.string.output_file_path);
     private static final String mStatsFile =
             mContext.getExternalFilesDir(null) + "/Encoder." + System.currentTimeMillis() + ".csv";
     private static final String TAG = "EncoderTest";
-    private static final long PER_TEST_TIMEOUT_MS = 120000;
     private static final boolean DEBUG = false;
     private static final boolean WRITE_OUTPUT = false;
+    private static final long PER_TEST_TIMEOUT_MS = 120000;
     private static final int ENCODE_DEFAULT_FRAME_RATE = 25;
-    private static final int ENCODE_DEFAULT_BIT_RATE = 8000000 /* 8 Mbps */;
-    private static final int ENCODE_MIN_BIT_RATE = 600000 /* 600 Kbps */;
+    private static final int ENCODE_DEFAULT_VIDEO_BIT_RATE = 8000000 /* 8 Mbps */;
+    private static final int ENCODE_MIN_VIDEO_BIT_RATE = 600000 /* 600 Kbps */;
     private static final int ENCODE_DEFAULT_AUDIO_BIT_RATE = 128000 /* 128 Kbps */;
+    private static int mColorFormat = COLOR_FormatYUV420Flexible;
+    private static File mDecodedFileQcif;
+    private static File mDecodedFileFullHd;
+    private static File mDecodedFileAudio;
     private String mInputFile;
+    private String mMime;
+    private int mBitRate;
+    private int mIFrameInterval;
+    private int mWidth;
+    private int mHeight;
+    private int mProfile;
+    private int mLevel;
+    private int mSampleRate;
+    private int mNumChannel;
+    private static final String DECODE_FULLHD_INPUT = "crowd_1920x1080_25fps_4000kbps_h265.mkv";
+    private static final String DECODE_QCIF_INPUT = "crowd_176x144_25fps_6000kbps_mpeg4.mp4";
+    private static final String DECODE_AUDIO_INPUT = "bbb_48000hz_2ch_100kbps_opus_30sec.webm";
+    private static final String DECODE_FULLHD_UNPACKED = "crowd_1920x1080_25fps_4000kbps_h265.yuv";
+    private static final String DECODE_QCIF_UNPACKED = "crowd_176x144_25fps_6000kbps_mpeg4.yuv";
+    private static final String DECODE_AUDIO_UNPACKED = "bbb_48000hz_2ch_100kbps_opus_30sec.raw";
 
     @Parameterized.Parameters
     public static Collection<Object[]> inputFiles() {
         return Arrays.asList(new Object[][]{
                 // Audio Test
-                {"bbb_44100hz_2ch_128kbps_aac_30sec.mp4"},
-                {"bbb_8000hz_1ch_8kbps_amrnb_30sec.3gp"},
-                {"bbb_16000hz_1ch_9kbps_amrwb_30sec.3gp"},
-                {"bbb_44100hz_2ch_600kbps_flac_30sec.mp4"},
-                {"bbb_48000hz_2ch_100kbps_opus_30sec.webm"},
+                // Parameters: Filename, mimeType, bitrate, width, height, iFrameInterval,
+                // profile, level, sampleRate, channelCount
+                {DECODE_AUDIO_UNPACKED, MediaFormat.MIMETYPE_AUDIO_AAC,
+                        ENCODE_DEFAULT_AUDIO_BIT_RATE, -1, -1, -1, -1, -1, 44100, 2},
+                {DECODE_AUDIO_UNPACKED, MediaFormat.MIMETYPE_AUDIO_AMR_NB,
+                        ENCODE_DEFAULT_AUDIO_BIT_RATE, -1, -1, -1, -1, -1, 8000, 1},
+                {DECODE_AUDIO_UNPACKED, MediaFormat.MIMETYPE_AUDIO_AMR_WB,
+                        ENCODE_DEFAULT_AUDIO_BIT_RATE, -1, -1, -1, -1, -1, 16000, 1},
+                {DECODE_AUDIO_UNPACKED, MediaFormat.MIMETYPE_AUDIO_FLAC,
+                        ENCODE_DEFAULT_AUDIO_BIT_RATE, -1, -1, -1, -1, -1, 44100, 2},
+                {DECODE_AUDIO_UNPACKED, MediaFormat.MIMETYPE_AUDIO_OPUS,
+                        ENCODE_DEFAULT_AUDIO_BIT_RATE, -1, -1, -1, -1, -1, 48000, 2},
+
                 // Video Test
-                {"crowd_1920x1080_25fps_4000kbps_vp8.webm"},
-                {"crowd_1920x1080_25fps_6700kbps_h264.ts"},
-                {"crowd_1920x1080_25fps_4000kbps_h265.mkv"},
-                {"crowd_1920x1080_25fps_4000kbps_vp9.webm"},
-                {"crowd_176x144_25fps_6000kbps_mpeg4.mp4"},
-                {"crowd_176x144_25fps_6000kbps_h263.3gp"}});
+                // Parameters: Filename, mimeType, bitrate, width, height, iFrameInterval,
+                // profile, level, sampleRate, channelCount
+                {DECODE_FULLHD_UNPACKED, MediaFormat.MIMETYPE_VIDEO_VP8,
+                        ENCODE_DEFAULT_VIDEO_BIT_RATE, 1920, 1080, 1, -1, -1, -1, -1},
+                {DECODE_FULLHD_UNPACKED, MediaFormat.MIMETYPE_VIDEO_AVC,
+                        ENCODE_DEFAULT_VIDEO_BIT_RATE, 1920, 1080, 1, -1, -1, -1, -1},
+                {DECODE_FULLHD_UNPACKED, MediaFormat.MIMETYPE_VIDEO_HEVC,
+                        ENCODE_DEFAULT_VIDEO_BIT_RATE, 1920, 1080, 1, -1, -1, -1, -1},
+                {DECODE_FULLHD_UNPACKED, MediaFormat.MIMETYPE_VIDEO_VP9,
+                        ENCODE_DEFAULT_VIDEO_BIT_RATE, 1920, 1080, 1, -1, -1, -1, -1},
+                {DECODE_QCIF_UNPACKED, MediaFormat.MIMETYPE_VIDEO_MPEG4, ENCODE_MIN_VIDEO_BIT_RATE,
+                        176, 144, 1, -1, -1, -1, -1},
+                {DECODE_QCIF_UNPACKED, MediaFormat.MIMETYPE_VIDEO_H263, ENCODE_MIN_VIDEO_BIT_RATE,
+                        176, 144, 1, -1, -1, -1, -1}});
     }
 
-    public EncoderTest(String inputFileName) {
-        this.mInputFile = inputFileName;
+    public EncoderTest(String filename, String mime, int bitrate, int width, int height,
+                       int frameInterval, int profile, int level, int samplerate,
+                       int channelCount) {
+        this.mInputFile = filename;
+        this.mMime = mime;
+        this.mBitRate = bitrate;
+        this.mIFrameInterval = frameInterval;
+        this.mWidth = width;
+        this.mHeight = height;
+        this.mProfile = profile;
+        this.mLevel = level;
+        this.mSampleRate = samplerate;
+        this.mNumChannel = channelCount;
     }
 
     @BeforeClass
@@ -101,33 +149,36 @@
         Log.d(TAG, "Saving Benchmark results in: " + mStatsFile);
     }
 
-    @Test(timeout = PER_TEST_TIMEOUT_MS)
-    public void testEncoder() throws Exception {
-        int status;
-        int frameSize;
-        //Parameters for video
-        int width = 0;
-        int height = 0;
-        int profile = 0;
-        int level = 0;
-        int frameRate = 0;
+    @BeforeClass
+    public static void prepareInput() throws IOException {
 
-        //Parameters for audio
-        int bitRate = 0;
-        int sampleRate = 0;
-        int numChannels = 0;
-        File inputFile = new File(mInputFilePath + mInputFile);
-        assertTrue("Cannot find " + mInputFile + " in directory " + mInputFilePath,
-                inputFile.exists());
+        mDecodedFileFullHd = new File(mFileDirPath + DECODE_FULLHD_UNPACKED);
+        int status = decodeFile(mInputFilePath + DECODE_FULLHD_INPUT, mDecodedFileFullHd);
+        assertEquals("Decoder returned error " + status, 0, status);
+
+        mDecodedFileQcif = new File(mFileDirPath + DECODE_QCIF_UNPACKED);
+        status = decodeFile(mInputFilePath + DECODE_QCIF_INPUT, mDecodedFileQcif);
+        assertEquals("Decoder returned error " + status, 0, status);
+
+        mDecodedFileAudio = new File(mFileDirPath + DECODE_AUDIO_UNPACKED);
+        status = decodeFile(mInputFilePath + DECODE_AUDIO_INPUT, mDecodedFileAudio);
+        assertEquals("Decoder returned error " + status, 0, status);
+    }
+
+    private static int decodeFile(String inputFileName, File outputDecodeFile) throws IOException {
+        int status = -1;
+        File inputFile = new File(inputFileName);
+        assertTrue("Cannot open input file " + inputFileName, inputFile.exists());
         FileInputStream fileInput = new FileInputStream(inputFile);
         FileDescriptor fileDescriptor = fileInput.getFD();
+        FileOutputStream decodeOutputStream = new FileOutputStream(outputDecodeFile);
+
         Extractor extractor = new Extractor();
         int trackCount = extractor.setUpExtractor(fileDescriptor);
-        assertTrue("Extraction failed. No tracks for file: " + mInputFile, (trackCount > 0));
+        assertTrue("Extraction failed. No tracks for the given input file", (trackCount > 0));
         ArrayList<ByteBuffer> inputBuffer = new ArrayList<>();
         ArrayList<MediaCodec.BufferInfo> frameInfo = new ArrayList<>();
         for (int currentTrack = 0; currentTrack < trackCount; currentTrack++) {
-            int colorFormat = COLOR_FormatYUV420Flexible;
             extractor.selectExtractorTrack(currentTrack);
             MediaFormat format = extractor.getFormat(currentTrack);
             // Get samples from extractor
@@ -146,163 +197,135 @@
                             bufInfo.presentationTimeUs + " size = " + bufInfo.size);
                 }
             } while (sampleSize > 0);
-            int tid = android.os.Process.myTid();
-            File decodedFile = new File(mContext.getFilesDir() + "/decoder_" + tid + ".out");
-            FileOutputStream decodeOutputStream = new FileOutputStream(decodedFile);
             Decoder decoder = new Decoder();
             decoder.setupDecoder(decodeOutputStream);
             status = decoder.decode(inputBuffer, frameInfo, false, format, "");
-            assertEquals("Decoder returned error " + status + " for file: " + mInputFile, 0,
-                    status);
             MediaFormat decoderFormat = decoder.getFormat();
+            if (decoderFormat.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
+                mColorFormat = decoderFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT);
+            }
             decoder.deInitCodec();
             extractor.unselectExtractorTrack(currentTrack);
             inputBuffer.clear();
             frameInfo.clear();
-            if (decodeOutputStream != null) {
-                decodeOutputStream.close();
-            }
-            String mime = format.getString(MediaFormat.KEY_MIME);
-            ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mime, true);
-            assertTrue("No suitable codecs found for file: " + mInputFile + " track : " +
-                    currentTrack + " mime: " + mime, (mediaCodecs.size() > 0));
-            Boolean[] encodeMode = {true, false};
-            /* Encoding the decoder's output */
-            for (Boolean asyncMode : encodeMode) {
-                for (String codecName : mediaCodecs) {
-                    FileOutputStream encodeOutputStream = null;
-                    if (WRITE_OUTPUT) {
-                        File outEncodeFile = new File(mOutputFilePath + "encoder.out");
-                        if (outEncodeFile.exists()) {
-                            assertTrue(" Unable to delete existing file" + outEncodeFile.toString(),
-                                    outEncodeFile.delete());
-                        }
-                        assertTrue("Unable to create file to write encoder output: " +
-                                outEncodeFile.toString(), outEncodeFile.createNewFile());
-                        encodeOutputStream = new FileOutputStream(outEncodeFile);
-                    }
-                    File rawFile = new File(mContext.getFilesDir() + "/decoder_" + tid + ".out");
-                    assertTrue("Cannot open file to write decoded output", rawFile.exists());
-                    if (DEBUG) {
-                        Log.i(TAG, "Path of decoded input file: " + rawFile.toString());
-                    }
-                    FileInputStream eleStream = new FileInputStream(rawFile);
-                    if (mime.startsWith("video/")) {
-                        width = format.getInteger(MediaFormat.KEY_WIDTH);
-                        height = format.getInteger(MediaFormat.KEY_HEIGHT);
-                        if (format.containsKey(MediaFormat.KEY_FRAME_RATE)) {
-                            frameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE);
-                        } else if (frameRate <= 0) {
-                            frameRate = ENCODE_DEFAULT_FRAME_RATE;
-                        }
-                        if (format.containsKey(MediaFormat.KEY_BIT_RATE)) {
-                            bitRate = format.getInteger(MediaFormat.KEY_BIT_RATE);
-                        } else if (bitRate <= 0) {
-                            if (mime.contains("video/3gpp") || mime.contains("video/mp4v-es")) {
-                                bitRate = ENCODE_MIN_BIT_RATE;
-                            } else {
-                                bitRate = ENCODE_DEFAULT_BIT_RATE;
-                            }
-                        }
-                        if (format.containsKey(MediaFormat.KEY_PROFILE)) {
-                            profile = format.getInteger(MediaFormat.KEY_PROFILE);
-                        }
-                        if (format.containsKey(MediaFormat.KEY_PROFILE)) {
-                            level = format.getInteger(MediaFormat.KEY_LEVEL);
-                        }
-                        if (decoderFormat.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
-                            colorFormat = decoderFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT);
-                        }
-                    } else {
-                        sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
-                        numChannels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
-                        if (decoderFormat.containsKey(MediaFormat.KEY_BIT_RATE)) {
-                            bitRate = decoderFormat.getInteger(MediaFormat.KEY_BIT_RATE);
-                        } else {
-                            bitRate = ENCODE_DEFAULT_AUDIO_BIT_RATE;
-                        }
-                    }
-                    /*Setup Encode Format*/
-                    MediaFormat encodeFormat;
-                    if (mime.startsWith("video/")) {
-                        frameSize = width * height * 3 / 2;
-                        encodeFormat = MediaFormat.createVideoFormat(mime, width, height);
-                        encodeFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
-                        encodeFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
-                        encodeFormat.setInteger(MediaFormat.KEY_PROFILE, profile);
-                        encodeFormat.setInteger(MediaFormat.KEY_LEVEL, level);
-                        encodeFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
-                        encodeFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, frameSize);
-                        encodeFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
-                    } else {
-                        encodeFormat = MediaFormat.createAudioFormat(mime, sampleRate, numChannels);
-                        encodeFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
-                        frameSize = 4096;
-                    }
-                    Encoder encoder = new Encoder();
-                    encoder.setupEncoder(encodeOutputStream, eleStream);
-                    status = encoder.encode(codecName, encodeFormat, mime, frameRate, sampleRate,
-                            frameSize, asyncMode);
-                    encoder.deInitEncoder();
-                    assertEquals(
-                            codecName + " encoder returned error " + status + " for " + "file:" +
-                                    " " + mInputFile, 0, status);
-                    encoder.dumpStatistics(mInputFile, codecName, (asyncMode ? "async" : "sync"),
-                            extractor.getClipDuration(), mStatsFile);
-                    Log.i(TAG, "Encoding complete for file: " + mInputFile + " with codec: " +
-                            codecName + " for aSyncMode = " + asyncMode);
-                    encoder.resetEncoder();
-                    eleStream.close();
-                    if (encodeOutputStream != null) {
-                        encodeOutputStream.close();
-                    }
-
-                }
-            }
-            //Cleanup temporary input file
-            if (decodedFile.exists()) {
-                assertTrue(" Unable to delete decoded file" + decodedFile.toString(),
-                        decodedFile.delete());
-                Log.i(TAG, "Successfully deleted decoded file");
-            }
         }
         extractor.deinitExtractor();
         fileInput.close();
+        decodeOutputStream.close();
+        return status;
     }
 
     @Test(timeout = PER_TEST_TIMEOUT_MS)
-    public void testNativeEncoder() throws Exception {
-        File inputFile = new File(mInputFilePath + mInputFile);
-        assertTrue("Cannot find " + mInputFile + " in directory " + mInputFilePath,
-                inputFile.exists());
-        int tid = android.os.Process.myTid();
-        final String mDecodedFile = mContext.getFilesDir() + "/decoder_" + tid + ".out";
-        FileInputStream fileInput = new FileInputStream(inputFile);
-        FileDescriptor fileDescriptor = fileInput.getFD();
-        Extractor extractor = new Extractor();
-        int trackCount = extractor.setUpExtractor(fileDescriptor);
-        assertTrue("Extraction failed. No tracks for file: ", trackCount > 0);
-        for (int currentTrack = 0; currentTrack < trackCount; currentTrack++) {
-            extractor.selectExtractorTrack(currentTrack);
-            MediaFormat format = extractor.getFormat(currentTrack);
-            String mime = format.getString(MediaFormat.KEY_MIME);
-            ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mime, true);
-            // Encoding the decoder's output
+    public void testEncoder() throws Exception {
+        int status;
+        int frameSize;
+
+        ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mMime, true);
+        assertTrue("No suitable codecs found for mimetype: " + mMime, (mediaCodecs.size() > 0));
+        Boolean[] encodeMode = {true, false};
+        // Encoding the decoded input file
+        for (Boolean asyncMode : encodeMode) {
             for (String codecName : mediaCodecs) {
-                Native nativeEncoder = new Native();
-                int status = nativeEncoder
-                        .Encode(mInputFilePath, mInputFile, mDecodedFile, mStatsFile, codecName);
+                FileOutputStream encodeOutputStream = null;
+                if (WRITE_OUTPUT) {
+                    File outEncodeFile = new File(mOutputFilePath + "encoder.out");
+                    if (outEncodeFile.exists()) {
+                        assertTrue(" Unable to delete existing file" + outEncodeFile.toString(),
+                                outEncodeFile.delete());
+                    }
+                    assertTrue("Unable to create file to write encoder output: " +
+                            outEncodeFile.toString(), outEncodeFile.createNewFile());
+                    encodeOutputStream = new FileOutputStream(outEncodeFile);
+                }
+                File rawFile = new File(mFileDirPath + mInputFile);
+                assertTrue("Cannot open decoded input file", rawFile.exists());
+                if (DEBUG) {
+                    Log.i(TAG, "Path of decoded input file: " + rawFile.toString());
+                }
+                FileInputStream eleStream = new FileInputStream(rawFile);
+                // Setup Encode Format
+                MediaFormat encodeFormat;
+                if (mMime.startsWith("video/")) {
+                    frameSize = mWidth * mHeight * 3 / 2;
+                    encodeFormat = MediaFormat.createVideoFormat(mMime, mWidth, mHeight);
+                    encodeFormat.setInteger(MediaFormat.KEY_FRAME_RATE, ENCODE_DEFAULT_FRAME_RATE);
+                    encodeFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, mIFrameInterval);
+                    encodeFormat.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
+                    encodeFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mColorFormat);
+                    if (mProfile != -1 && mLevel != -1) {
+                        encodeFormat.setInteger(MediaFormat.KEY_PROFILE, mProfile);
+                        encodeFormat.setInteger(MediaFormat.KEY_LEVEL, mLevel);
+                    }
+                } else {
+                    frameSize = 4096;
+                    encodeFormat = MediaFormat.createAudioFormat(mMime, mSampleRate, mNumChannel);
+                    encodeFormat.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
+                }
+                Encoder encoder = new Encoder();
+                encoder.setupEncoder(encodeOutputStream, eleStream);
+                status = encoder.encode(codecName, encodeFormat, mMime, ENCODE_DEFAULT_FRAME_RATE,
+                        mSampleRate, frameSize, asyncMode);
+                encoder.deInitEncoder();
                 assertEquals(
-                        codecName + " encoder returned error " + status + " for " + "file:" + " " +
-                                mInputFile, 0, status);
+                        codecName + " encoder returned error " + status + " for " + "mime:" + " " +
+                                mMime, 0, status);
+                String inputReference;
+                long durationUs;
+                if (mMime.startsWith("video/")) {
+                    inputReference =
+                            mInputFile + "_" + mWidth + "x" + mHeight + "_" + mBitRate + "bps";
+                    durationUs = (((eleStream.getChannel().size() + frameSize - 1) / frameSize) /
+                            ENCODE_DEFAULT_FRAME_RATE) * 1000000;
+                } else {
+                    inputReference = mInputFile + "_" + mSampleRate + "hz_" + mNumChannel + "ch_" +
+                            mBitRate + "bps";
+                    durationUs =
+                            (eleStream.getChannel().size() / (mSampleRate * mNumChannel)) * 1000000;
+                }
+                encoder.dumpStatistics(inputReference, codecName, (asyncMode ? "async" : "sync"),
+                        durationUs, mStatsFile);
+                Log.i(TAG, "Encoding complete for mime: " + mMime + " with codec: " + codecName +
+                        " for aSyncMode = " + asyncMode);
+                encoder.resetEncoder();
+                eleStream.close();
+                if (encodeOutputStream != null) {
+                    encodeOutputStream.close();
+                }
             }
         }
-        File decodedFile = new File(mDecodedFile);
-        // Cleanup temporary input file
-        if (decodedFile.exists()) {
-            assertTrue("Unable to delete - " + mDecodedFile, decodedFile.delete());
-            Log.i(TAG, "Successfully deleted - " + mDecodedFile);
+    }
+
+    @Test(timeout = PER_TEST_TIMEOUT_MS)
+    public void testNativeEncoder() {
+        ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mMime, true);
+        assertTrue("No suitable codecs found for mimetype: " + mMime, (mediaCodecs.size() > 0));
+        for (String codecName : mediaCodecs) {
+            Native nativeEncoder = new Native();
+            int status = nativeEncoder
+                    .Encode(mFileDirPath, mInputFile, mStatsFile, codecName, mMime, mBitRate,
+                            mColorFormat, mIFrameInterval, mWidth, mHeight, mProfile, mLevel,
+                            mSampleRate, mNumChannel);
+            assertEquals(codecName + " encoder returned error " + status + " for " + "mime:" + " " +
+                    mMime, 0, status);
         }
-        fileInput.close();
+    }
+
+    @AfterClass
+    public static void deleteDecodedFiles() {
+        if (mDecodedFileFullHd.exists()) {
+            assertTrue(" Unable to delete decoded file" + mDecodedFileFullHd.toString(),
+                    mDecodedFileFullHd.delete());
+            Log.i(TAG, "Successfully deleted decoded file" + mDecodedFileFullHd.toString());
+        }
+        if (mDecodedFileQcif.exists()) {
+            assertTrue(" Unable to delete decoded file" + mDecodedFileQcif.toString(),
+                    mDecodedFileQcif.delete());
+            Log.i(TAG, "Successfully deleted decoded file" + mDecodedFileQcif.toString());
+        }
+        if (mDecodedFileAudio.exists()) {
+            assertTrue(" Unable to delete decoded file" + mDecodedFileAudio.toString(),
+                    mDecodedFileAudio.delete());
+            Log.i(TAG, "Successfully deleted decoded file" + mDecodedFileAudio.toString());
+        }
     }
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeEncoder.cpp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeEncoder.cpp
index 1277c8b..2f658e3 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeEncoder.cpp
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeEncoder.cpp
@@ -30,189 +30,81 @@
 #include <stdio.h>
 
 constexpr int32_t ENCODE_DEFAULT_FRAME_RATE = 25;
-constexpr int32_t ENCODE_DEFAULT_AUDIO_BIT_RATE = 128000 /* 128 Kbps */;
-constexpr int32_t ENCODE_DEFAULT_BIT_RATE = 8000000 /* 8 Mbps */;
-constexpr int32_t ENCODE_MIN_BIT_RATE = 600000 /* 600 Kbps */;
 
 extern "C" JNIEXPORT int JNICALL Java_com_android_media_benchmark_library_Native_Encode(
-        JNIEnv *env, jobject thiz, jstring jFilePath, jstring jFileName, jstring jOutFilePath,
-        jstring jStatsFile, jstring jCodecName) {
+        JNIEnv *env, jobject thiz, jstring jFilePath, jstring jFileName, jstring jStatsFile,
+        jstring jCodecName, jstring jMime, jint jBitRate, jint jColorFormat, jint jFrameInterval,
+        jint jWidth, jint jHeight, jint jProfile, jint jLevel, jint jSampleRate,
+        jint jNumChannels) {
+    UNUSED(thiz);
     const char *filePath = env->GetStringUTFChars(jFilePath, nullptr);
     const char *fileName = env->GetStringUTFChars(jFileName, nullptr);
-    string sFilePath = string(filePath) + string(fileName);
-    UNUSED(thiz);
-    FILE *inputFp = fopen(sFilePath.c_str(), "rb");
-    env->ReleaseStringUTFChars(jFileName, fileName);
-    env->ReleaseStringUTFChars(jFilePath, filePath);
-    if (!inputFp) {
-        ALOGE("Unable to open input file for reading");
+    string inputFile = string(filePath) + string(fileName);
+    const char *codecName = env->GetStringUTFChars(jCodecName, nullptr);
+    string sCodecName = string(codecName);
+    const char *mime = env->GetStringUTFChars(jMime, nullptr);
+
+    ifstream eleStream;
+    eleStream.open(inputFile, ifstream::binary | ifstream::ate);
+    if (!eleStream.is_open()) {
+        ALOGE("%s - File failed to open for reading!", fileName);
+        env->ReleaseStringUTFChars(jFileName, fileName);
         return -1;
     }
 
-    Decoder *decoder = new Decoder();
-    Extractor *extractor = decoder->getExtractor();
-    if (!extractor) {
-        ALOGE("Extractor creation failed");
-        return -1;
-    }
+    bool asyncMode[2] = {true, false};
+    for (bool mode : asyncMode) {
+        size_t eleSize = eleStream.tellg();
+        eleStream.seekg(0, ifstream::beg);
 
-    // Read file properties
-    struct stat buf;
-    stat(sFilePath.c_str(), &buf);
-    size_t fileSize = buf.st_size;
-    if (fileSize > kMaxBufferSize) {
-        ALOGE("File size greater than maximum buffer size");
-        return -1;
-    }
-    int32_t fd = fileno(inputFp);
-    int32_t trackCount = extractor->initExtractor(fd, fileSize);
-    if (trackCount <= 0) {
-        ALOGE("initExtractor failed");
-        return -1;
-    }
+        // Set encoder params
+        encParameter encParams;
+        encParams.width = jWidth;
+        encParams.height = jHeight;
+        encParams.bitrate = jBitRate;
+        encParams.iFrameInterval = jFrameInterval;
+        encParams.sampleRate = jSampleRate;
+        encParams.numChannels = jNumChannels;
+        encParams.frameRate = ENCODE_DEFAULT_FRAME_RATE;
+        encParams.colorFormat = jColorFormat;
+        encParams.profile = jProfile;
+        encParams.level = jLevel;
 
-    for (int curTrack = 0; curTrack < trackCount; curTrack++) {
-        int32_t status = extractor->setupTrackFormat(curTrack);
-        if (status != 0) {
-            ALOGE("Track Format invalid");
-            return -1;
-        }
-        uint8_t *inputBuffer = (uint8_t *)malloc(fileSize);
-        if (!inputBuffer) {
-            ALOGE("Insufficient memory");
-            return -1;
-        }
-        vector<AMediaCodecBufferInfo> frameInfo;
-        AMediaCodecBufferInfo info;
-        uint32_t inputBufferOffset = 0;
-
-        // Get frame data
-        while (1) {
-            status = extractor->getFrameSample(info);
-            if (status || !info.size) break;
-            // copy the meta data and buffer to be passed to decoder
-            if (inputBufferOffset + info.size > kMaxBufferSize) {
-                ALOGE("Memory allocated not sufficient");
-                free(inputBuffer);
-                return -1;
-            }
-            memcpy(inputBuffer + inputBufferOffset, extractor->getFrameBuf(), info.size);
-            frameInfo.push_back(info);
-            inputBufferOffset += info.size;
-        }
-        string decName = "";
-        const char *outputFilePath = env->GetStringUTFChars(jOutFilePath, nullptr);
-        FILE *outFp = fopen(outputFilePath, "wb");
-        if (outFp == nullptr) {
-            ALOGE("%s - File failed to open for writing!", outputFilePath);
-            free(inputBuffer);
-            return -1;
-        }
-        decoder->setupDecoder();
-        status = decoder->decode(inputBuffer, frameInfo, decName, false /*asyncMode */, outFp);
+        Encoder *encoder = new Encoder();
+        encoder->setupEncoder();
+        auto status = encoder->encode(sCodecName, eleStream, eleSize, mode, encParams,
+                                      const_cast<char *>(mime));
         if (status != AMEDIA_OK) {
-            ALOGE("Decode returned error");
-            free(inputBuffer);
+            ALOGE("Encoder returned error");
             return -1;
         }
-
-        AMediaFormat *decoderFormat = decoder->getFormat();
-        AMediaFormat *format = extractor->getFormat();
-        if (inputBuffer) {
-            free(inputBuffer);
-            inputBuffer = nullptr;
+        ALOGV("Encoding complete with codec %s for asyncMode = %d", sCodecName.c_str(), mode);
+        encoder->deInitCodec();
+        const char *statsFile = env->GetStringUTFChars(jStatsFile, nullptr);
+        string inputReference;
+        int64_t clipDurationUs;
+        if (!strncmp(mime, "video/", 6)) {
+            inputReference = string(fileName) + "_" + to_string(jWidth) + "x" + to_string(jHeight) +
+                             "_" + to_string(jBitRate) + "bps";
+            int32_t frameSize = jWidth * jHeight * 3 / 2;
+            clipDurationUs =
+                    (((eleSize + frameSize - 1) / frameSize) / ENCODE_DEFAULT_FRAME_RATE) * 1000000;
+        } else {
+            inputReference = string(fileName) + "_" + to_string(jSampleRate) + "hz_" +
+                             to_string(jNumChannels) + "ch_" + to_string(jBitRate) + "bps";
+            clipDurationUs = (eleSize / (jSampleRate * jNumChannels)) * 1000000;
         }
-        const char *mime = nullptr;
-        AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
-        if (!mime) {
-            ALOGE("Error in AMediaFormat_getString");
-            return -1;
-        }
-        ifstream eleStream;
-        eleStream.open(outputFilePath, ifstream::binary | ifstream::ate);
-        if (!eleStream.is_open()) {
-            ALOGE("%s - File failed to open for reading!", outputFilePath);
-            env->ReleaseStringUTFChars(jOutFilePath, outputFilePath);
-            return -1;
-        }
-        const char *codecName = env->GetStringUTFChars(jCodecName, NULL);
-        const char *inputReference = env->GetStringUTFChars(jFileName, nullptr);
-        string sCodecName = string(codecName);
-        string sInputReference = string(inputReference);
-
-        bool asyncMode[2] = {true, false};
-        for (int i = 0; i < 2; i++) {
-            size_t eleSize = eleStream.tellg();
-            eleStream.seekg(0, ifstream::beg);
-
-            // Get encoder params
-            encParameter encParams;
-            if (!strncmp(mime, "video/", 6)) {
-                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_WIDTH, &encParams.width);
-                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_HEIGHT, &encParams.height);
-                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_FRAME_RATE, &encParams.frameRate);
-                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, &encParams.bitrate);
-                if (encParams.bitrate <= 0 || encParams.frameRate <= 0) {
-                    encParams.frameRate = ENCODE_DEFAULT_FRAME_RATE;
-                    if (!strcmp(mime, "video/3gpp") || !strcmp(mime, "video/mp4v-es")) {
-                        encParams.bitrate = ENCODE_MIN_BIT_RATE /* 600 Kbps */;
-                    } else {
-                        encParams.bitrate = ENCODE_DEFAULT_BIT_RATE /* 8 Mbps */;
-                    }
-                }
-                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_PROFILE, &encParams.profile);
-                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_LEVEL, &encParams.level);
-                AMediaFormat_getInt32(decoderFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT,
-                                      &encParams.colorFormat);
-            } else {
-                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, &encParams.sampleRate);
-                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
-                                      &encParams.numChannels);
-                encParams.bitrate = ENCODE_DEFAULT_AUDIO_BIT_RATE;
-            }
-            Encoder *encoder = new Encoder();
-            encoder->setupEncoder();
-            status = encoder->encode(sCodecName, eleStream, eleSize, asyncMode[i], encParams,
-                                     (char *)mime);
-            if (status != AMEDIA_OK) {
-                ALOGE("Encoder returned error");
-                return -1;
-            }
-            ALOGV("Encoding complete with codec %s for asyncMode = %d", sCodecName.c_str(),
-                  asyncMode[i]);
-            encoder->deInitCodec();
-            const char *statsFile = env->GetStringUTFChars(jStatsFile, nullptr);
-            encoder->dumpStatistics(sInputReference, extractor->getClipDuration(), sCodecName,
-                                    (asyncMode[i] ? "async" : "sync"), statsFile);
-            env->ReleaseStringUTFChars(jStatsFile, statsFile);
-            encoder->resetEncoder();
-            delete encoder;
-            encoder = nullptr;
-        }
-        eleStream.close();
-        if (outFp) {
-            fclose(outFp);
-            outFp = nullptr;
-        }
-        env->ReleaseStringUTFChars(jFileName, inputReference);
-        env->ReleaseStringUTFChars(jCodecName, codecName);
-        env->ReleaseStringUTFChars(jOutFilePath, outputFilePath);
-        if (format) {
-            AMediaFormat_delete(format);
-            format = nullptr;
-        }
-        if (decoderFormat) {
-            AMediaFormat_delete(decoderFormat);
-            decoderFormat = nullptr;
-        }
-        decoder->deInitCodec();
-        decoder->resetDecoder();
+        encoder->dumpStatistics(inputReference, clipDurationUs, sCodecName,
+                                (mode ? "async" : "sync"), statsFile);
+        env->ReleaseStringUTFChars(jStatsFile, statsFile);
+        encoder->resetEncoder();
+        delete encoder;
+        encoder = nullptr;
     }
-    if (inputFp) {
-        fclose(inputFp);
-        inputFp = nullptr;
-    }
-    extractor->deInitExtractor();
-    delete decoder;
+    eleStream.close();
+    env->ReleaseStringUTFChars(jFilePath, filePath);
+    env->ReleaseStringUTFChars(jFileName, fileName);
+    env->ReleaseStringUTFChars(jMime, mime);
+    env->ReleaseStringUTFChars(jCodecName, codecName);
     return 0;
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
index 45e5574..754cd8e 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
@@ -175,10 +175,10 @@
 
                 @Override
                 public void onError(@NonNull MediaCodec mediaCodec, @NonNull CodecException e) {
-                    mediaCodec.stop();
-                    mediaCodec.release();
-                    Log.e(TAG, "CodecError: " + e.toString());
+                    mSignalledError = true;
+                    Log.e(TAG, "Codec Error: " + e.toString());
                     e.printStackTrace();
+                    synchronized (mLock) { mLock.notify(); }
                 }
 
                 @Override
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Native.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Native.java
index 38b608a..3e3969c 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Native.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Native.java
@@ -27,6 +27,7 @@
     public native int Decode(String inputFilePath, String inputFileName, String statsFile,
             String codecName, boolean asyncMode);
 
-    public native int Encode(String inputFilePath, String inputFileName, String outputFilePath,
-            String statsFile, String codecName);
+    public native int Encode(String inputFilePath, String inputFileName, String statsFile,
+            String codecName, String mime, int bitRate, int colorFormat, int frameInterval,
+            int width, int height, int profile, int level, int sampleRate, int numChannel);
 }
diff --git a/media/tests/benchmark/README.md b/media/tests/benchmark/README.md
index 05fbe6f..047c289 100644
--- a/media/tests/benchmark/README.md
+++ b/media/tests/benchmark/README.md
@@ -1,7 +1,7 @@
 # Benchmark tests
 
 Benchmark app analyses the time taken by MediaCodec, MediaExtractor and MediaMuxer for given set of inputs. It is used to benchmark these modules on android devices.
-Benchmark results are emitted to logcat.
+Benchmark results are published as a CSV report.
 
 This page describes steps to run the NDK and SDK layer test.
 
@@ -10,35 +10,49 @@
 mmm frameworks/av/media/tests/benchmark/
 ```
 
-# NDK
-
-To run the test suite for measuring performance of the native layer, follow the following steps:
-
-The binaries will be created in the following path : $OUT/data/nativetest64/
-
-adb push $OUT/data/nativetest64/* /data/local/tmp/
-
-Eg. adb push $OUT/data/nativetest64/extractorTest/extractorTest /data/local/tmp/
-
-To run the binary, follow the commands mentioned below under each module.
-
-The resource file for the tests is taken from [here](https://drive.google.com/open?id=1ghMr17BBJ7n0pqbm7oREiTN_MNemJUqy)
+# Resources
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/tests/benchmark/MediaBenchmark.zip)
 
 Download the MediaBenchmark.zip file, unzip and push it to /data/local/tmp/ on the device.
 
 ```
 unzip MediaBenchmark.zip
-adb push MediaBenchmark /data/local/tmp
+adb push MediaBenchmark /data/local/tmp/MediaBenchmark/res/
 ```
 
+The resource files are assumed to be at /data/local/tmp/MediaBenchmark/res/. You can use a different location, but you have to modify the rest of the instructions to replace /data/local/tmp/MediaBenchmark/res/ with wherever you chose to put the files.
+
+# NDK CLI Tests
+Note: [Benchmark Application](#BenchmarkApplication) now supports profiling both SDK and NDK APIs and that is the preferred way to benchmark codecs
+
+To run the test suite for measuring performance of the native layer, follow the following steps:
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+
+adb push $OUT/data/nativetest64/* /data/local/tmp/. For example
+
+```
+adb push $OUT/data/nativetest64/extractorTest/extractorTest /data/local/tmp/
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+To test 32-bit binary push binaries from nativetest.
+
+adb push $OUT/data/nativetest/* /data/local/tmp/. For example
+
+```
+adb push $OUT/data/nativetest/extractorTest/extractorTest /data/local/tmp/
+```
+
+To run the binary, follow the commands mentioned below under each module.
+
 ## Extractor
 
 The test extracts elementary stream and benchmarks the extractors available in NDK.
 
-The resource files are assumed to be at /data/local/tmp/MediaBenchmark/res/. You can use a different location, but you have to modify the rest of the instructions to replace /data/local/tmp/MediaBenchmark/res/ with wherever you chose to put the files.
-
-The path to these files on the device is required to be given for the test.
-
 ```
 adb shell /data/local/tmp/extractorTest -P /data/local/tmp/MediaBenchmark/res/
 ```
@@ -47,8 +61,6 @@
 
 The test decodes input stream and benchmarks the decoders available in NDK.
 
-Setup steps are same as extractor.
-
 ```
 adb shell /data/local/tmp/decoderTest -P /data/local/tmp/MediaBenchmark/res/
 ```
@@ -57,8 +69,6 @@
 
 The test muxes elementary stream and benchmarks the muxers available in NDK.
 
-Setup steps are same as extractor.
-
 ```
 adb shell /data/local/tmp/muxerTest -P /data/local/tmp/MediaBenchmark/res/
 ```
@@ -67,55 +77,82 @@
 
 The test encodes input stream and benchmarks the encoders available in NDK.
 
-Setup steps are same as extractor.
-
 ```
 adb shell /data/local/tmp/encoderTest -P /data/local/tmp/MediaBenchmark/res/
 ```
 
-# SDK
+# <a name="BenchmarkApplication"></a> Benchmark Application
+To run the test suite for measuring performance of the SDK and NDK APIs, follow the following steps:
+Benchmark Application can be run in two ways.
 
-To run the test suite for measuring performance of the SDK APIs, follow the following steps:
+## Steps to run with atest
+Note that atest command will install Benchmark application and push the required test files to the device as well.
+
+For running all the tests, run the following command
+```
+atest com.android.media.benchmark.tests -- --enable-module-dynamic-download=true
+```
+
+For running the tests individually, run the following atest commands:
+
+```
+atest com.android.media.benchmark.tests.ExtractorTest -- --enable-module-dynamic-download=true
+atest com.android.media.benchmark.tests.DecoderTest -- --enable-module-dynamic-download=true
+atest com.android.media.benchmark.tests.MuxerTest -- --enable-module-dynamic-download=true
+atest com.android.media.benchmark.tests.EncoderTest -- --enable-module-dynamic-download=true
+```
+
+## Steps to run without atest
 
 The apk will be created at the following path:
-$OUT/testcases/MediaBenchmarkTest/arm64/
 
-To get the resorce files for the test follow instructions given in [NDK](#NDK)
+The 64-bit apk will be created in the following path :
+$OUT/testcases/MediaBenchmarkTest/arm64/
 
 For installing the apk, run the command:
 ```
 adb install -f -r $OUT/testcases/MediaBenchmarkTest/arm64/MediaBenchmarkTest.apk
 ```
 
-For running all the tests, run the command:
+The 32-bit apk will be created in the following path :
+$OUT/testcases/MediaBenchmarkTest/arm/
+
+For installing the apk, run the command:
+```
+adb install -f -r $OUT/testcases/MediaBenchmarkTest/arm/MediaBenchmarkTest.apk
+```
+
+To get the resource files for the test follow instructions given in [Resources](#Resources)
+
+For running all the tests, run the following command
 ```
 adb shell am instrument -w -r -e package com.android.media.benchmark.tests com.android.media.benchmark/androidx.test.runner.AndroidJUnitRunner
 ```
 
 ## Extractor
 
-The test extracts elementary stream and benchmarks the extractors available in SDK.
+The test extracts elementary stream and benchmarks the extractors available in SDK and NDK.
 ```
 adb shell am instrument -w -r -e class 'com.android.media.benchmark.tests.ExtractorTest' com.android.media.benchmark/androidx.test.runner.AndroidJUnitRunner
 ```
 
 ## Decoder
 
-The test decodes input stream and benchmarks the decoders available in SDK.
+The test decodes input stream and benchmarks the decoders available in SDK and NDK.
 ```
 adb shell am instrument -w -r -e class 'com.android.media.benchmark.tests.DecoderTest' com.android.media.benchmark/androidx.test.runner.AndroidJUnitRunner
 ```
 
 ## Muxer
 
-The test muxes elementary stream and benchmarks different writers available in SDK.
+The test muxes elementary stream and benchmarks different writers available in SDK and NDK.
 ```
 adb shell am instrument -w -r -e class 'com.android.media.benchmark.tests.MuxerTest' com.android.media.benchmark/androidx.test.runner.AndroidJUnitRunner
 ```
 
 ## Encoder
 
-The test encodes input stream and benchmarks the encoders available in SDK.
+The test encodes input stream and benchmarks the encoders available in SDK and NDK.
 ```
 adb shell am instrument -w -r -e class 'com.android.media.benchmark.tests.EncoderTest' com.android.media.benchmark/androidx.test.runner.AndroidJUnitRunner
 ```
@@ -124,24 +161,27 @@
 To run the test suite for measuring performance of the codec2 layer, follow the following steps:
 
 The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
 The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
 
 To test 64-bit binary push binaries from nativetest64.
 adb push $(OUT)/data/nativetest64/* /data/local/tmp/
-Eg. adb push $(OUT)/data/nativetest64/C2DecoderTest/C2DecoderTest /data/local/tmp/
+```
+adb push $(OUT)/data/nativetest64/C2DecoderTest/C2DecoderTest /data/local/tmp/
+```
 
 To test 32-bit binary push binaries from nativetest.
 adb push $(OUT)/data/nativetest/* /data/local/tmp/
-Eg. adb push $(OUT)/data/nativetest/C2DecoderTest/C2DecoderTest /data/local/tmp/
+```
+adb push $(OUT)/data/nativetest/C2DecoderTest/C2DecoderTest /data/local/tmp/
+```
 
-To get the resource files for the test follow instructions given in [NDK](#NDK)
+To get the resource files for the test follow instructions given in [Resources](#Resources)
 
 ## C2 Decoder
 
 The test decodes input stream and benchmarks the codec2 decoders available in device.
 
-Setup steps are same as [extractor](#extractor).
-
 ```
 adb shell /data/local/tmp/C2DecoderTest -P /data/local/tmp/MediaBenchmark/res/
 ```
@@ -149,8 +189,95 @@
 
 The test encodes input stream and benchmarks the codec2 encoders available in device.
 
-Setup steps are same as [extractor](#extractor).
-
 ```
 adb shell /data/local/tmp/C2EncoderTest -P /data/local/tmp/MediaBenchmark/res/
 ```
+
+# Analysis
+
+The benchmark results are stored in a CSV file which can be used for analysis. These results are stored in following format:
+<app directory>/<module_name>.<timestamp>.csv
+
+Note: This timestamp is in nano seconds and will change based on current system time.
+
+To find the location of the CSV file, look for the path in logs. Example log below -
+
+```
+com.android.media.benchmark D/DecoderTest: Saving Benchmark results in: /storage/emulated/0/Android/data/com.android.media.benchmark/files/Decoder.1587732395387.csv
+```
+
+This file can be pulled from the device using "adb pull" command.
+```
+adb pull /storage/emulated/0/Android/data/com.android.media.benchmark/files/Decoder.1587732395387.csv ./Decoder.1587732395387.csv
+```
+
+## CSV Columns
+
+Following columns are available in CSV.
+
+Note: All time values are in nano seconds
+
+1. **currentTime** : The time recorded at the creation of the stats. This may be used to estimate time between consecutive test clips.
+
+2. **fileName**: The file being used as an input for the benchmark test.
+
+3. **operation**: The current operation on the input test vector i.e. Extract/Mux/Encode/Decode.
+
+4. **NDK/SDK**: The target APIs i.e. AMedia vs Media calls for the operation being performed.
+
+5. **sync/async**: This is specific to MediaCodec objects (i.e. Encoder and Decoder). It specifies the mode in which MediaCodec APIs are working. For async mode, callbacks are set. For sync mode, we have to poll the dequeueBuffer APIs to queue and dequeue input output buffers respectively.
+
+6. **setupTime**: The time taken to set up the MediaExtractor/Muxer/Codec instance.
+
+    * MediaCodec: includes setting async/sync mode, configuring with a format and codec.start
+
+    * MediaExtractor: includes AMediaExtractor_new and setDataSource.
+
+    * MediaMuxer: includes creating the object, adding track, and starting the muxer.
+
+7. **destroyTime**: The time taken to stop and close MediaExtractor/Muxer/Codec instance.
+
+8. **minimumTime**: The minimum time taken to extract/mux/encode/decode a frame.
+
+9. **maximumTime**: The maximum time taken to extract/mux/encode/decode a frame.
+
+10. **averageTime**: Average time taken to extract/mux/encode/decode per frame.
+
+    * MediaCodec: computed as the total time taken to encode/decode all frames divided by the number of frames encoded/decoded.
+
+    * MediaExtractor: computed as the total time taken to extract all frames divided by the number of frames extracted.
+
+    * MediaMuxer: computed as the total time taken to mux all frames divided by the number of frames muxed.
+
+11. **timeToProcess1SecContent**: The time required to process one second worth input data.
+
+12. **totalBytesProcessedPerSec**: The number of bytes extracted/muxed/decoded/encoded per second.
+
+13. **timeToFirstFrame**: The time taken to receive the first output frame.
+
+14. **totalSizeInBytes**: The total output size of the operation (in bytes).
+
+15. **totalTime**: The time taken to perform the complete operation (i.e. Extract/Mux/Decode/Encode) for respective test vector.
+
+
+## Muxer
+1. **componentName**: The format of the output Media file. Following muxers are currently supported:
+     * Ogg, Webm, 3gpp, and mp4.
+
+## Decoder
+1. **componentName**: Includes all supported codecs on the device. Aliased components are skipped.
+    *   Video: H263, H264, H265, VPx, Mpeg4, Mpeg2, AV1
+    *   Audio: AAC, Flac, Opus, MP3, Vorbis, GSM, AMR-NB/WB
+
+## Encoder
+1. **componentName**: Includes all supported codecs on the device. Aliased components are skipped.
+    *   Video: H263, H264, H265, VPx, Mpeg4
+    *   Audio: AAC, Flac, Opus, AMR-NB/WB
+
+## Common Failures
+On some devices, if a codec isn't supported some tests may report a failure like "codec not found for"
+
+For example: On mobile devices without support for mpeg2 decoder, following failure is observed:
+```
+Unable to create codec by mime: video/mpeg2
+```
diff --git a/media/tests/benchmark/src/native/decoder/C2Decoder.cpp b/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
index 20a1468..46c4a7c 100644
--- a/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
+++ b/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
@@ -156,9 +156,11 @@
     mStats->setDeInitTime(timeTaken);
 }
 
-void C2Decoder::dumpStatistics(string inputReference, int64_t durationUs) {
+void C2Decoder::dumpStatistics(string inputReference, int64_t durationUs, string componentName,
+                               string statsFile) {
     string operation = "c2decode";
-    mStats->dumpStatistics(operation, inputReference, durationUs);
+    string mode = "async";
+    mStats->dumpStatistics(operation, inputReference, durationUs, componentName, mode, statsFile);
 }
 
 void C2Decoder::resetDecoder() {
diff --git a/media/tests/benchmark/src/native/decoder/C2Decoder.h b/media/tests/benchmark/src/native/decoder/C2Decoder.h
index 4a3eb96..fb35a66 100644
--- a/media/tests/benchmark/src/native/decoder/C2Decoder.h
+++ b/media/tests/benchmark/src/native/decoder/C2Decoder.h
@@ -31,7 +31,8 @@
 
     void deInitCodec();
 
-    void dumpStatistics(string inputReference, int64_t durationUs);
+    void dumpStatistics(string inputReference, int64_t durationUs, string componentName,
+                        string statsFile);
 
     void resetDecoder();
 
diff --git a/media/tests/benchmark/src/native/encoder/C2Encoder.cpp b/media/tests/benchmark/src/native/encoder/C2Encoder.cpp
index 33429ef..6a50d40 100644
--- a/media/tests/benchmark/src/native/encoder/C2Encoder.cpp
+++ b/media/tests/benchmark/src/native/encoder/C2Encoder.cpp
@@ -251,9 +251,11 @@
     mStats->setDeInitTime(timeTaken);
 }
 
-void C2Encoder::dumpStatistics(string inputReference, int64_t durationUs) {
+void C2Encoder::dumpStatistics(string inputReference, int64_t durationUs, string componentName,
+                               string statsFile) {
     string operation = "c2encode";
-    mStats->dumpStatistics(operation, inputReference, durationUs);
+    string mode = "async";
+    mStats->dumpStatistics(operation, inputReference, durationUs, componentName, mode, statsFile);
 }
 
 void C2Encoder::resetEncoder() {
diff --git a/media/tests/benchmark/src/native/encoder/C2Encoder.h b/media/tests/benchmark/src/native/encoder/C2Encoder.h
index a4ca097..7a021f4 100644
--- a/media/tests/benchmark/src/native/encoder/C2Encoder.h
+++ b/media/tests/benchmark/src/native/encoder/C2Encoder.h
@@ -44,7 +44,8 @@
 
     void deInitCodec();
 
-    void dumpStatistics(string inputReference, int64_t durationUs);
+    void dumpStatistics(string inputReference, int64_t durationUs, string componentName,
+                        string statsFile);
 
     void resetEncoder();
 
diff --git a/media/tests/benchmark/src/native/encoder/Encoder.cpp b/media/tests/benchmark/src/native/encoder/Encoder.cpp
index 26fb1b9..15c479d 100644
--- a/media/tests/benchmark/src/native/encoder/Encoder.cpp
+++ b/media/tests/benchmark/src/native/encoder/Encoder.cpp
@@ -203,13 +203,13 @@
         AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_WIDTH, mParams.width);
         AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_HEIGHT, mParams.height);
         AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_FRAME_RATE, mParams.frameRate);
+        AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, mParams.iFrameInterval);
         AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_BIT_RATE, mParams.bitrate);
-        AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, 1);
-        if (mParams.profile && mParams.level) {
+        AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT, mParams.colorFormat);
+        if (mParams.profile != -1 && mParams.level != -1) {
             AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_PROFILE, mParams.profile);
             AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_LEVEL, mParams.level);
         }
-        AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT, mParams.colorFormat);
     } else {
         AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_SAMPLE_RATE, mParams.sampleRate);
         AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_CHANNEL_COUNT, mParams.numChannels);
diff --git a/media/tests/benchmark/src/native/encoder/Encoder.h b/media/tests/benchmark/src/native/encoder/Encoder.h
index 5ad142b..324317c 100644
--- a/media/tests/benchmark/src/native/encoder/Encoder.h
+++ b/media/tests/benchmark/src/native/encoder/Encoder.h
@@ -23,10 +23,11 @@
 #include <queue>
 #include <thread>
 
-#include "media/NdkImage.h"
 #include "BenchmarkCommon.h"
 #include "Stats.h"
 
+// constant not defined in NDK api
+constexpr int32_t COLOR_FormatYUV420Flexible = 0x7F420888;
 
 struct encParameter {
     int32_t bitrate = -1;
@@ -38,9 +39,10 @@
     int32_t width = 0;
     int32_t height = 0;
     int32_t frameRate = -1;
-    int32_t profile = 0;
-    int32_t level = 0;
-    int32_t colorFormat = AIMAGE_FORMAT_YUV_420_888;
+    int32_t iFrameInterval = 0;
+    int32_t profile = -1;
+    int32_t level = -1;
+    int32_t colorFormat = COLOR_FormatYUV420Flexible;
 };
 
 class Encoder : public CallBackHandle {
diff --git a/media/tests/benchmark/tests/BenchmarkTestEnvironment.h b/media/tests/benchmark/tests/BenchmarkTestEnvironment.h
index ae2eee1..4edb048 100644
--- a/media/tests/benchmark/tests/BenchmarkTestEnvironment.h
+++ b/media/tests/benchmark/tests/BenchmarkTestEnvironment.h
@@ -25,7 +25,9 @@
 
 class BenchmarkTestEnvironment : public ::testing::Environment {
   public:
-    BenchmarkTestEnvironment() : res("/sdcard/media/") {}
+    BenchmarkTestEnvironment()
+        : res("/data/local/tmp/MediaBenchmark/res/"),
+          statsFile("/data/local/tmp/MediaBenchmark/res/stats.csv") {}
 
     // Parses the command line argument
     int initFromOptions(int argc, char **argv);
@@ -34,8 +36,15 @@
 
     const string getRes() const { return res; }
 
+    void setStatsFile(const string module) { statsFile = getRes() + module; }
+
+    const string getStatsFile() const { return statsFile; }
+
+    bool writeStatsHeader();
+
   private:
     string res;
+    string statsFile;
 };
 
 int BenchmarkTestEnvironment::initFromOptions(int argc, char **argv) {
@@ -70,4 +79,26 @@
     return 0;
 }
 
+/**
+ * Writes the stats header to a file
+ * <p>
+ * \param statsFile    file where the stats data is to be written
+ **/
+bool BenchmarkTestEnvironment::writeStatsHeader() {
+    char statsHeader[] =
+        "currentTime, fileName, operation, componentName, NDK/SDK, sync/async, setupTime, "
+        "destroyTime, minimumTime, maximumTime, averageTime, timeToProcess1SecContent, "
+        "totalBytesProcessedPerSec, timeToFirstFrame, totalSizeInBytes, totalTime\n";
+    FILE *fpStats = fopen(statsFile.c_str(), "w");
+    if(!fpStats) {
+        return false;
+    }
+    int32_t numBytes = fwrite(statsHeader, sizeof(char), sizeof(statsHeader), fpStats);
+    fclose(fpStats);
+    if(numBytes != sizeof(statsHeader)) {
+        return false;
+    }
+    return true;
+}
+
 #endif  // __BENCHMARK_TEST_ENVIRONMENT_H__
diff --git a/media/tests/benchmark/tests/C2DecoderTest.cpp b/media/tests/benchmark/tests/C2DecoderTest.cpp
index dedc743..85dcbc1 100644
--- a/media/tests/benchmark/tests/C2DecoderTest.cpp
+++ b/media/tests/benchmark/tests/C2DecoderTest.cpp
@@ -136,7 +136,8 @@
                 mDecoder->deInitCodec();
                 int64_t durationUs = extractor->getClipDuration();
                 ALOGV("codec : %s", codecName.c_str());
-                mDecoder->dumpStatistics(GetParam().first, durationUs);
+                mDecoder->dumpStatistics(GetParam().first, durationUs, codecName,
+                                         gEnv->getStatsFile());
                 mDecoder->resetDecoder();
             }
         }
@@ -178,6 +179,9 @@
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
     if (status == 0) {
+        gEnv->setStatsFile("C2Decoder.csv");
+        status = gEnv->writeStatsHeader();
+        ALOGV("Stats file = %d\n", status);
         status = RUN_ALL_TESTS();
         ALOGV("C2 Decoder Test result = %d\n", status);
     }
diff --git a/media/tests/benchmark/tests/C2EncoderTest.cpp b/media/tests/benchmark/tests/C2EncoderTest.cpp
index 98eb17a..b18d856 100644
--- a/media/tests/benchmark/tests/C2EncoderTest.cpp
+++ b/media/tests/benchmark/tests/C2EncoderTest.cpp
@@ -108,7 +108,7 @@
         }
 
         string decName = "";
-        string outputFileName = "decode.out";
+        string outputFileName = "/data/local/tmp/decode.out";
         FILE *outFp = fopen(outputFileName.c_str(), "wb");
         ASSERT_NE(outFp, nullptr) << "Unable to open output file" << outputFileName
                                   << " for dumping decoder's output";
@@ -140,7 +140,8 @@
                 mEncoder->deInitCodec();
                 int64_t durationUs = extractor->getClipDuration();
                 ALOGV("codec : %s", codecName.c_str());
-                mEncoder->dumpStatistics(GetParam().first, durationUs);
+                mEncoder->dumpStatistics(GetParam().first, durationUs, codecName,
+                                         gEnv->getStatsFile());
                 mEncoder->resetEncoder();
             }
         }
@@ -180,6 +181,9 @@
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
     if (status == 0) {
+        gEnv->setStatsFile("C2Encoder.csv");
+        status = gEnv->writeStatsHeader();
+        ALOGV("Stats file = %d\n", status);
         status = RUN_ALL_TESTS();
         ALOGV("C2 Encoder Test result = %d\n", status);
     }
diff --git a/media/tests/benchmark/tests/DecoderTest.cpp b/media/tests/benchmark/tests/DecoderTest.cpp
index 9f96d3b..81ef02a 100644
--- a/media/tests/benchmark/tests/DecoderTest.cpp
+++ b/media/tests/benchmark/tests/DecoderTest.cpp
@@ -84,7 +84,8 @@
         decoder->deInitCodec();
         ALOGV("codec : %s", codecName.c_str());
         string inputReference = get<0>(params);
-        decoder->dumpStatistics(inputReference);
+        decoder->dumpStatistics(inputReference, codecName, (asyncMode ? "async" : "sync"),
+                                gEnv->getStatsFile());
         free(inputBuffer);
         decoder->resetDecoder();
     }
@@ -179,8 +180,11 @@
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
     if (status == 0) {
+        gEnv->setStatsFile("Decoder.csv");
+        status = gEnv->writeStatsHeader();
+        ALOGV("Stats file = %d\n", status);
         status = RUN_ALL_TESTS();
-        ALOGD("Decoder Test result = %d\n", status);
+        ALOGV("Decoder Test result = %d\n", status);
     }
     return status;
 }
\ No newline at end of file
diff --git a/media/tests/benchmark/tests/EncoderTest.cpp b/media/tests/benchmark/tests/EncoderTest.cpp
index dc2a2dd..7e1681d 100644
--- a/media/tests/benchmark/tests/EncoderTest.cpp
+++ b/media/tests/benchmark/tests/EncoderTest.cpp
@@ -23,6 +23,10 @@
 #include "Decoder.h"
 #include "Encoder.h"
 
+constexpr int32_t kEncodeDefaultVideoBitRate = 8000000 /* 8 Mbps */;
+constexpr int32_t kEncodeMinVideoBitRate = 600000 /* 600 Kbps */;
+constexpr int32_t kEncodeDefaultAudioBitRate = 128000 /* 128 Kbps */;
+
 static BenchmarkTestEnvironment *gEnv = nullptr;
 
 class EncoderTest : public ::testing::TestWithParam<tuple<string, string, bool>> {};
@@ -78,7 +82,7 @@
         }
 
         string decName = "";
-        string outputFileName = "decode.out";
+        string outputFileName = "/data/local/tmp/decode.out";
         FILE *outFp = fopen(outputFileName.c_str(), "wb");
         ASSERT_NE(outFp, nullptr) << "Unable to open output file" << outputFileName
                                   << " for dumping decoder's output";
@@ -86,6 +90,7 @@
         decoder->setupDecoder();
         status = decoder->decode(inputBuffer, frameInfo, decName, false /*asyncMode */, outFp);
         ASSERT_EQ(status, AMEDIA_OK) << "Decode returned error : " << status;
+        AMediaFormat *decoderFormat = decoder->getFormat();
 
         ifstream eleStream;
         eleStream.open(outputFileName.c_str(), ifstream::binary | ifstream::ate);
@@ -108,11 +113,13 @@
             if (encParams.bitrate <= 0 || encParams.frameRate <= 0) {
                 encParams.frameRate = 25;
                 if (!strcmp(mime, "video/3gpp") || !strcmp(mime, "video/mp4v-es")) {
-                    encParams.bitrate = 600000 /* 600 Kbps */;
+                    encParams.bitrate = kEncodeMinVideoBitRate;
                 } else {
-                    encParams.bitrate = 8000000 /* 8 Mbps */;
+                    encParams.bitrate = kEncodeDefaultVideoBitRate;
                 }
             }
+            AMediaFormat_getInt32(decoderFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT,
+                                  &encParams.colorFormat);
             AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_PROFILE, &encParams.profile);
             AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_LEVEL, &encParams.level);
         } else {
@@ -120,8 +127,7 @@
                                               &encParams.sampleRate));
             ASSERT_TRUE(AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
                                               &encParams.numChannels));
-            encParams.bitrate =
-                    encParams.sampleRate * encParams.numChannels * 16 /* bitsPerSample */;
+            encParams.bitrate = kEncodeDefaultAudioBitRate;
         }
 
         encoder->setupEncoder();
@@ -133,7 +139,8 @@
         encoder->deInitCodec();
         ALOGV("codec : %s", codecName.c_str());
         string inputReference = get<0>(params);
-        encoder->dumpStatistics(inputReference, extractor->getClipDuration());
+        encoder->dumpStatistics(inputReference, extractor->getClipDuration(), codecName,
+                                (asyncMode ? "async" : "sync"), gEnv->getStatsFile());
         eleStream.close();
         if (outFp) fclose(outFp);
 
@@ -141,6 +148,10 @@
             AMediaFormat_delete(format);
             format = nullptr;
         }
+        if (decoderFormat) {
+            AMediaFormat_delete(decoderFormat);
+            decoderFormat = nullptr;
+        }
         encoder->resetEncoder();
         decoder->deInitCodec();
         free(inputBuffer);
@@ -214,8 +225,11 @@
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
     if (status == 0) {
+        gEnv->setStatsFile("Encoder.csv");
+        status = gEnv->writeStatsHeader();
+        ALOGV("Stats file = %d\n", status);
         status = RUN_ALL_TESTS();
-        ALOGD("Encoder Test result = %d\n", status);
+        ALOGV("Encoder Test result = %d\n", status);
     }
     return status;
 }
diff --git a/media/tests/benchmark/tests/ExtractorTest.cpp b/media/tests/benchmark/tests/ExtractorTest.cpp
index ad8f1e6..d14d15b 100644
--- a/media/tests/benchmark/tests/ExtractorTest.cpp
+++ b/media/tests/benchmark/tests/ExtractorTest.cpp
@@ -48,8 +48,7 @@
     ASSERT_EQ(status, AMEDIA_OK) << "Extraction failed \n";
 
     extractObj->deInitExtractor();
-
-    extractObj->dumpStatistics(GetParam().first);
+    extractObj->dumpStatistics(GetParam().first, "", gEnv->getStatsFile());
 
     fclose(inputFp);
     delete extractObj;
@@ -79,8 +78,11 @@
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
     if (status == 0) {
+        gEnv->setStatsFile("Extractor.csv");
+        status = gEnv->writeStatsHeader();
+        ALOGV("Stats file = %d\n", status);
         status = RUN_ALL_TESTS();
-        ALOGD(" Extractor Test result = %d\n", status);
+        ALOGV("Extractor Test result = %d\n", status);
     }
     return status;
 }
diff --git a/media/tests/benchmark/tests/MuxerTest.cpp b/media/tests/benchmark/tests/MuxerTest.cpp
index fa2635d..991644b 100644
--- a/media/tests/benchmark/tests/MuxerTest.cpp
+++ b/media/tests/benchmark/tests/MuxerTest.cpp
@@ -113,7 +113,7 @@
         ASSERT_EQ(status, 0) << "Mux failed";
 
         muxerObj->deInitMuxer();
-        muxerObj->dumpStatistics(GetParam().first + "." + fmt.c_str());
+        muxerObj->dumpStatistics(GetParam().first + "." + fmt.c_str(), fmt, gEnv->getStatsFile());
         free(inputBuffer);
         fclose(outputFp);
         muxerObj->resetMuxer();
@@ -151,8 +151,11 @@
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
     if (status == 0) {
+        gEnv->setStatsFile("Muxer.csv");
+        status = gEnv->writeStatsHeader();
+        ALOGV("Stats file = %d\n", status);
         status = RUN_ALL_TESTS();
-        ALOGV("Test result = %d\n", status);
+        ALOGV("Muxer Test result = %d\n", status);
     }
     return status;
 }
diff --git a/media/utils/EventLogTags.logtags b/media/utils/EventLogTags.logtags
index 67f0ea8..c397f34 100644
--- a/media/utils/EventLogTags.logtags
+++ b/media/utils/EventLogTags.logtags
@@ -31,7 +31,7 @@
 # 6: Percent
 # Default value for data of type int/long is 2 (bytes).
 #
-# See system/core/logcat/event.logtags for the master copy of the tags.
+# See system/core/logcat/event.logtags for the original definition of the tags.
 
 # 61000 - 61199 reserved for audioserver
 
diff --git a/media/utils/ProcessInfo.cpp b/media/utils/ProcessInfo.cpp
index 113e4a7..19225d3 100644
--- a/media/utils/ProcessInfo.cpp
+++ b/media/utils/ProcessInfo.cpp
@@ -27,6 +27,9 @@
 
 namespace android {
 
+static constexpr int32_t INVALID_ADJ = -10000;
+static constexpr int32_t NATIVE_ADJ = -1000;
+
 ProcessInfo::ProcessInfo() {}
 
 bool ProcessInfo::getPriority(int pid, int* priority) {
@@ -35,8 +38,6 @@
 
     size_t length = 1;
     int32_t state;
-    static const int32_t INVALID_ADJ = -10000;
-    static const int32_t NATIVE_ADJ = -1000;
     int32_t score = INVALID_ADJ;
     status_t err = service->getProcessStatesAndOomScoresFromPids(length, &pid, &state, &score);
     if (err != OK) {
@@ -45,8 +46,17 @@
     }
     ALOGV("pid %d state %d score %d", pid, state, score);
     if (score <= NATIVE_ADJ) {
-        ALOGE("pid %d invalid OOM adjustments value %d", pid, score);
-        return false;
+        std::scoped_lock lock{mOverrideLock};
+
+        // If this process if not tracked by ActivityManagerService, look for overrides.
+        auto it = mOverrideMap.find(pid);
+        if (it != mOverrideMap.end()) {
+            ALOGI("pid %d invalid OOM score %d, override to %d", pid, score, it->second.oomScore);
+            score = it->second.oomScore;
+        } else {
+            ALOGE("pid %d invalid OOM score %d", pid, score);
+            return false;
+        }
     }
 
     // Use OOM adjustments value as the priority. Lower the value, higher the priority.
@@ -61,6 +71,26 @@
     return (callingPid == getpid()) || (callingPid == pid) || (callingUid == AID_MEDIA);
 }
 
+bool ProcessInfo::overrideProcessInfo(int pid, int procState, int oomScore) {
+    std::scoped_lock lock{mOverrideLock};
+
+    mOverrideMap.erase(pid);
+
+    // Disable the override if oomScore is set to NATIVE_ADJ or below.
+    if (oomScore <= NATIVE_ADJ) {
+        return false;
+    }
+
+    mOverrideMap.emplace(pid, ProcessInfoOverride{procState, oomScore});
+    return true;
+}
+
+void ProcessInfo::removeProcessInfoOverride(int pid) {
+    std::scoped_lock lock{mOverrideLock};
+
+    mOverrideMap.erase(pid);
+}
+
 ProcessInfo::~ProcessInfo() {}
 
 }  // namespace android
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
new file mode 100644
index 0000000..ca1123c
--- /dev/null
+++ b/media/utils/fuzzers/Android.bp
@@ -0,0 +1,51 @@
+cc_defaults {
+    name: "libmediautils_fuzzer_defaults",
+    shared_libs: [
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libmediautils",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+        "-Wno-c++2a-extensions",
+    ],
+
+    header_libs: [
+        "bionic_libc_platform_headers",
+        "libmedia_headers",
+    ],
+
+    include_dirs: [
+        // For DEBUGGER_SIGNAL
+        "system/core/debuggerd/include",
+    ],
+}
+
+cc_fuzz {
+    name: "libmediautils_fuzzer_battery_notifier",
+    defaults: ["libmediautils_fuzzer_defaults"],
+    srcs: ["BatteryNotifierFuzz.cpp"],
+}
+
+cc_fuzz {
+    name: "libmediautils_fuzzer_scheduling_policy_service",
+    defaults: ["libmediautils_fuzzer_defaults"],
+    srcs: ["SchedulingPolicyServiceFuzz.cpp"],
+}
+
+cc_fuzz {
+    name: "libmediautils_fuzzer_service_utilities",
+    defaults: ["libmediautils_fuzzer_defaults"],
+    srcs: ["ServiceUtilitiesFuzz.cpp"],
+}
+
+cc_fuzz {
+    name: "libmediautils_fuzzer_time_check",
+    defaults: ["libmediautils_fuzzer_defaults"],
+    srcs: ["TimeCheckFuzz.cpp"],
+}
diff --git a/media/utils/fuzzers/BatteryNotifierFuzz.cpp b/media/utils/fuzzers/BatteryNotifierFuzz.cpp
new file mode 100644
index 0000000..00b3cce
--- /dev/null
+++ b/media/utils/fuzzers/BatteryNotifierFuzz.cpp
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <functional>
+#include <string>
+#include <vector>
+
+#include <utils/String8.h>
+
+#include "fuzzer/FuzzedDataProvider.h"
+#include "mediautils/BatteryNotifier.h"
+
+static constexpr int kMaxOperations = 30;
+static constexpr int kMaxStringLength = 500;
+using android::BatteryNotifier;
+
+std::vector<std::function<void(std::string /*flashlight_name*/, std::string /*camera_name*/,
+                               uid_t /*video_id*/, uid_t /*audio_id*/, uid_t /*light_id*/,
+                               uid_t /*camera_id*/)>>
+    operations = {
+        [](std::string, std::string, uid_t, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteResetVideo();
+        },
+        [](std::string, std::string, uid_t, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteResetAudio();
+        },
+        [](std::string, std::string, uid_t, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteResetFlashlight();
+        },
+        [](std::string, std::string, uid_t, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteResetCamera();
+        },
+        [](std::string, std::string, uid_t video_id, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteStartVideo(video_id);
+        },
+        [](std::string, std::string, uid_t video_id, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteStopVideo(video_id);
+        },
+        [](std::string, std::string, uid_t, uid_t audio_id, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteStartAudio(audio_id);
+        },
+        [](std::string, std::string, uid_t, uid_t audio_id, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteStopAudio(audio_id);
+        },
+        [](std::string flashlight_name, std::string, uid_t, uid_t, uid_t light_id, uid_t) -> void {
+            android::String8 name(flashlight_name.c_str());
+            BatteryNotifier::getInstance().noteFlashlightOn(name, light_id);
+        },
+        [](std::string flashlight_name, std::string, uid_t, uid_t, uid_t light_id, uid_t) -> void {
+            android::String8 name(flashlight_name.c_str());
+            BatteryNotifier::getInstance().noteFlashlightOff(name, light_id);
+        },
+        [](std::string, std::string camera_name, uid_t, uid_t, uid_t, uid_t camera_id) -> void {
+            android::String8 name(camera_name.c_str());
+            BatteryNotifier::getInstance().noteStartCamera(name, camera_id);
+        },
+        [](std::string, std::string camera_name, uid_t, uid_t, uid_t, uid_t camera_id) -> void {
+            android::String8 name(camera_name.c_str());
+            BatteryNotifier::getInstance().noteStopCamera(name, camera_id);
+        },
+};
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider data_provider(data, size);
+    std::string camera_name = data_provider.ConsumeRandomLengthString(kMaxStringLength);
+    std::string flashlight_name = data_provider.ConsumeRandomLengthString(kMaxStringLength);
+    uid_t video_id = data_provider.ConsumeIntegral<uid_t>();
+    uid_t audio_id = data_provider.ConsumeIntegral<uid_t>();
+    uid_t light_id = data_provider.ConsumeIntegral<uid_t>();
+    uid_t camera_id = data_provider.ConsumeIntegral<uid_t>();
+    size_t ops_run = 0;
+    while (data_provider.remaining_bytes() > 0 && ops_run++ < kMaxOperations) {
+        uint8_t op = data_provider.ConsumeIntegralInRange<uint8_t>(0, operations.size() - 1);
+        operations[op](flashlight_name, camera_name, video_id, audio_id, light_id, camera_id);
+    }
+    return 0;
+}
diff --git a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
new file mode 100644
index 0000000..4521853
--- /dev/null
+++ b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "BatteryNotifierFuzzer"
+#include <binder/IBatteryStats.h>
+#include <binder/IServiceManager.h>
+#include <utils/String16.h>
+#include <android/log.h>
+#include <mediautils/SchedulingPolicyService.h>
+#include "fuzzer/FuzzedDataProvider.h"
+using android::IBatteryStats;
+using android::IBinder;
+using android::IInterface;
+using android::IServiceManager;
+using android::sp;
+using android::String16;
+using android::defaultServiceManager;
+using android::requestCpusetBoost;
+using android::requestPriority;
+sp<IBatteryStats> getBatteryService() {
+    sp<IBatteryStats> batteryStatService;
+    const sp<IServiceManager> sm(defaultServiceManager());
+    if (sm != nullptr) {
+        const String16 name("batterystats");
+        batteryStatService = checked_interface_cast<IBatteryStats>(sm->checkService(name));
+        if (batteryStatService == nullptr) {
+            ALOGW("batterystats service unavailable!");
+            return nullptr;
+        }
+    }
+    return batteryStatService;
+}
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider data_provider(data, size);
+    sp<IBatteryStats> batteryStatService = getBatteryService();
+    // There is some state here, but it's mostly focused around thread-safety, so
+    // we won't worry about order.
+    int32_t priority = data_provider.ConsumeIntegral<int32_t>();
+    bool is_for_app = data_provider.ConsumeBool();
+    bool async = data_provider.ConsumeBool();
+    requestPriority(getpid(), gettid(), priority, is_for_app, async);
+    // TODO: Verify and re-enable in AOSP (R).
+    // bool enable = data_provider.ConsumeBool();
+    // We are just using batterystats to avoid the need
+    // to register a new service.
+    // requestCpusetBoost(enable, IInterface::asBinder(batteryStatService));
+    return 0;
+}
+
diff --git a/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
new file mode 100644
index 0000000..3d141b5
--- /dev/null
+++ b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fcntl.h>
+
+#include <functional>
+
+#include "fuzzer/FuzzedDataProvider.h"
+#include "mediautils/ServiceUtilities.h"
+
+static constexpr int kMaxOperations = 50;
+static constexpr int kMaxStringLen = 256;
+
+const std::vector<std::function<void(FuzzedDataProvider*, android::MediaPackageManager)>>
+    operations = {
+        [](FuzzedDataProvider* data_provider, android::MediaPackageManager pm) -> void {
+            uid_t uid = data_provider->ConsumeIntegral<uid_t>();
+            pm.allowPlaybackCapture(uid);
+        },
+        [](FuzzedDataProvider* data_provider, android::MediaPackageManager pm) -> void {
+            int spaces = data_provider->ConsumeIntegral<int>();
+
+            // Dump everything into /dev/null
+            int fd = open("/dev/null", O_WRONLY);
+            pm.dump(fd, spaces);
+            close(fd);
+        },
+};
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider data_provider(data, size);
+    uid_t uid = data_provider.ConsumeIntegral<uid_t>();
+    pid_t pid = data_provider.ConsumeIntegral<pid_t>();
+
+    // There is not state here, and order is not significant,
+    // so we can simply call all of the target functions
+    android::isServiceUid(uid);
+    android::isAudioServerUid(uid);
+    android::isAudioServerOrSystemServerUid(uid);
+    android::isAudioServerOrMediaServerUid(uid);
+    std::string packageNameStr = data_provider.ConsumeRandomLengthString(kMaxStringLen);
+    android::String16 opPackageName(packageNameStr.c_str());
+    android::recordingAllowed(opPackageName, pid, uid);
+    android::startRecording(opPackageName, pid, uid);
+    android::finishRecording(opPackageName, uid);
+    android::captureAudioOutputAllowed(pid, uid);
+    android::captureMediaOutputAllowed(pid, uid);
+    android::captureHotwordAllowed(opPackageName, pid, uid);
+    android::modifyPhoneStateAllowed(uid, pid);
+    android::bypassInterruptionPolicyAllowed(uid, pid);
+    android::settingsAllowed();
+    android::modifyAudioRoutingAllowed();
+    android::modifyDefaultAudioEffectsAllowed();
+    android::dumpAllowed();
+
+    // MediaPackageManager does have state, so we need the fuzzer to decide order
+    android::MediaPackageManager packageManager;
+    size_t ops_run = 0;
+    while (data_provider.remaining_bytes() > 0 && ops_run++ < kMaxOperations) {
+        uint8_t op = data_provider.ConsumeIntegralInRange<uint8_t>(0, operations.size() - 1);
+        operations[op](&data_provider, packageManager);
+    }
+
+    return 0;
+}
diff --git a/media/utils/fuzzers/TimeCheckFuzz.cpp b/media/utils/fuzzers/TimeCheckFuzz.cpp
new file mode 100644
index 0000000..eeb6ba6
--- /dev/null
+++ b/media/utils/fuzzers/TimeCheckFuzz.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <chrono>
+#include <thread>
+
+#include "fuzzer/FuzzedDataProvider.h"
+#include "mediautils/TimeCheck.h"
+
+static constexpr int kMaxStringLen = 256;
+
+// While it might be interesting to test long-running
+// jobs, it seems unlikely it'd lead to the types of crashes
+// we're looking for, and would mean a significant increase in fuzzer time.
+// Therefore, we are setting a low cap.
+static constexpr uint32_t kMaxTimeoutMs = 1000;
+static constexpr uint32_t kMinTimeoutMs = 200;
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider data_provider(data, size);
+
+    // There's essentially 5 operations that we can access in this class
+    // 1. The time it takes to run this operation. As mentioned above,
+    //    long-running tasks are not good for fuzzing, but there will be
+    //    some change in the run time.
+    uint32_t timeoutMs =
+        data_provider.ConsumeIntegralInRange<uint32_t>(kMinTimeoutMs, kMaxTimeoutMs);
+    uint8_t pid_size = data_provider.ConsumeIntegral<uint8_t>();
+    std::vector<pid_t> pids(pid_size);
+    for (auto& pid : pids) {
+        pid = data_provider.ConsumeIntegral<pid_t>();
+    }
+
+    // 2. We also have setAudioHalPids, which is populated with the pids set
+    // above.
+    android::TimeCheck::setAudioHalPids(pids);
+    std::string name = data_provider.ConsumeRandomLengthString(kMaxStringLen);
+
+    // 3. The constructor, which is fuzzed here:
+    android::TimeCheck timeCheck(name.c_str(), timeoutMs);
+    // We will leave some buffer to avoid sleeping too long
+    uint8_t sleep_amount_ms = data_provider.ConsumeIntegralInRange<uint8_t>(0, timeoutMs / 2);
+
+    // We want to make sure we can cover the time out functionality.
+    if (sleep_amount_ms) {
+        auto ms = std::chrono::milliseconds(sleep_amount_ms);
+        std::this_thread::sleep_for(ms);
+    }
+
+    // 4. Finally, the destructor on timecheck. These seem to be the only factors
+    // in play.
+    return 0;
+}
diff --git a/services/OWNERS b/services/OWNERS
index 66a4bcb..f0b5e2f 100644
--- a/services/OWNERS
+++ b/services/OWNERS
@@ -5,3 +5,5 @@
 gkasten@google.com
 hunga@google.com
 marcone@google.com
+nchalko@google.com
+quxiangfang@google.com
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 3873600..7443320 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -35,6 +35,10 @@
     ],
 
     shared_libs: [
+        "audioflinger-aidl-unstable-cpp",
+        "audioclient-types-aidl-unstable-cpp",
+        "av-types-aidl-unstable-cpp",
+        "libaudioclient_aidl_conversion",
         "libaudiofoundation",
         "libaudiohal",
         "libaudioprocessing",
@@ -54,6 +58,7 @@
         "libmediautils",
         "libmemunreachable",
         "libmedia_helper",
+        "libshmemcompat",
         "libvibrator",
     ],
 
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index f014209..959e858 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -22,6 +22,15 @@
 // Define AUDIO_ARRAYS_STATIC_CHECK to check all audio arrays are correct
 #define AUDIO_ARRAYS_STATIC_CHECK 1
 
+#define VALUE_OR_FATAL(result)                   \
+    ({                                           \
+       auto _tmp = (result);                     \
+       LOG_ALWAYS_FATAL_IF(!_tmp.ok(),           \
+                           "Failed result (%d)", \
+                           _tmp.error());        \
+       std::move(_tmp.value());                  \
+     })
+
 #include "Configuration.h"
 #include <dirent.h>
 #include <math.h>
@@ -61,12 +70,14 @@
 #include <system/audio_effects/effect_visualizer.h>
 #include <system/audio_effects/effect_ns.h>
 #include <system/audio_effects/effect_aec.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
 
 #include <audio_utils/primitives.h>
 
 #include <powermanager/PowerManager.h>
 
 #include <media/IMediaLogService.h>
+#include <media/AidlConversion.h>
 #include <media/nbaio/Pipe.h>
 #include <media/nbaio/PipeReader.h>
 #include <mediautils/BatteryNotifier.h>
@@ -97,6 +108,8 @@
 
 namespace android {
 
+using media::IEffectClient;
+
 static const char kDeadlockedString[] = "AudioFlinger may be deadlocked\n";
 static const char kHardwareLockedString[] = "Hardware lock is taken\n";
 static const char kClientLockedString[] = "Client lock is taken\n";
@@ -194,7 +207,12 @@
         mNextUniqueIds[use] = AUDIO_UNIQUE_ID_USE_MAX;
     }
 
+#if 1
+    // FIXME See bug 165702394 and bug 168511485
+    const bool doLog = false;
+#else
     const bool doLog = property_get_bool("ro.test_harness", false);
+#endif
     if (doLog) {
         mLogMemoryDealer = new MemoryDealer(kLogMemorySize, "LogWriters",
                 MemoryHeapBase::READ_ONLY);
@@ -398,7 +416,7 @@
             return ret;
         }
     }
-    return AudioMixer::HAPTIC_SCALE_MUTE;
+    return static_cast<int>(os::HapticScale::MUTE);
 }
 
 /* static */
@@ -684,8 +702,8 @@
 
 sp<NBLog::Writer> AudioFlinger::newWriter_l(size_t size, const char *name)
 {
-    // If there is no memory allocated for logs, return a dummy writer that does nothing.
-    // Similarly if we can't contact the media.log service, also return a dummy writer.
+    // If there is no memory allocated for logs, return a no-op writer that does nothing.
+    // Similarly if we can't contact the media.log service, also return a no-op writer.
     if (mLogMemoryDealer == 0 || sMediaLogService == 0) {
         return new NBLog::Writer();
     }
@@ -711,7 +729,7 @@
             }
         }
         // Even after garbage-collecting all old writers, there is still not enough memory,
-        // so return a dummy writer
+        // so return a no-op writer
         return new NBLog::Writer();
     }
 success:
@@ -739,10 +757,27 @@
 
 // IAudioFlinger interface
 
-sp<IAudioTrack> AudioFlinger::createTrack(const CreateTrackInput& input,
-                                          CreateTrackOutput& output,
-                                          status_t *status)
+sp<IAudioTrack> AudioFlinger::createTrack(const media::CreateTrackRequest& _input,
+                                          media::CreateTrackResponse& _output,
+                                          status_t* status)
 {
+    // Local version of VALUE_OR_RETURN, specific to this method's calling conventions.
+#define VALUE_OR_EXIT(expr)         \
+    ({                              \
+        auto _tmp = (expr);         \
+        if (!_tmp.ok()) {           \
+            *status = _tmp.error(); \
+            return nullptr;         \
+        }                           \
+        std::move(_tmp.value());    \
+    })
+
+    CreateTrackInput input = VALUE_OR_EXIT(CreateTrackInput::fromAidl(_input));
+
+#undef VALUE_OR_EXIT
+
+    CreateTrackOutput output;
+
     sp<PlaybackThread::Track> track;
     sp<TrackHandle> trackHandle;
     sp<Client> client;
@@ -852,7 +887,8 @@
                                       input.notificationsPerBuffer, input.speed,
                                       input.sharedBuffer, sessionId, &output.flags,
                                       callingPid, input.clientInfo.clientTid, clientUid,
-                                      &lStatus, portId, input.audioTrackCallback);
+                                      &lStatus, portId, input.audioTrackCallback,
+                                      input.opPackageName);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
         // we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
 
@@ -998,6 +1034,8 @@
         AudioSystem::moveEffectsToIo(effectIds, effectThreadId);
     }
 
+    _output = VALUE_OR_FATAL(output.toAidl());
+
     // return handle to client
     trackHandle = new TrackHandle(track);
 
@@ -1283,9 +1321,9 @@
     }
 
     // Now set the master mute in each playback thread.  Playback threads
-    // assigned to HALs which do not have master mute support will apply master
-    // mute during the mix operation.  Threads with HALs which do support master
-    // mute will simply ignore the setting.
+    // assigned to HALs which do not have master mute support will apply master mute
+    // during the mix operation.  Threads with HALs which do support master mute
+    // will simply ignore the setting.
     Vector<VolumeInterface *> volumeInterfaces = getAllVolumeInterfaces_l();
     for (size_t i = 0; i < volumeInterfaces.size(); i++) {
         volumeInterfaces[i]->setMasterMute(muted);
@@ -1430,7 +1468,7 @@
 }
 
 
-void AudioFlinger::broacastParametersToRecordThreads_l(const String8& keyValuePairs)
+void AudioFlinger::broadcastParametersToRecordThreads_l(const String8& keyValuePairs)
 {
     for (size_t i = 0; i < mRecordThreads.size(); i++) {
         mRecordThreads.valueAt(i)->setParameters(keyValuePairs);
@@ -1588,7 +1626,7 @@
             int value;
             if ((param.getInt(String8(AudioParameter::keyRouting), value) == NO_ERROR) &&
                     (value != 0)) {
-                broacastParametersToRecordThreads_l(filteredKeyValuePairs);
+                broadcastParametersToRecordThreads_l(filteredKeyValuePairs);
             }
         }
     }
@@ -1765,7 +1803,7 @@
     return BAD_VALUE;
 }
 
-void AudioFlinger::registerClient(const sp<IAudioFlingerClient>& client)
+void AudioFlinger::registerClient(const sp<media::IAudioFlingerClient>& client)
 {
     Mutex::Autolock _l(mLock);
     if (client == 0) {
@@ -1840,13 +1878,18 @@
 
 void AudioFlinger::ioConfigChanged(audio_io_config_event event,
                                    const sp<AudioIoDescriptor>& ioDesc,
-                                   pid_t pid)
-{
+                                   pid_t pid) {
+    media::AudioIoDescriptor descAidl = VALUE_OR_FATAL(
+            legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(ioDesc));
+    media::AudioIoConfigEvent eventAidl = VALUE_OR_FATAL(
+            legacy2aidl_audio_io_config_event_AudioIoConfigEvent(event));
+
     Mutex::Autolock _l(mClientLock);
     size_t size = mNotificationClients.size();
     for (size_t i = 0; i < size; i++) {
         if ((pid == 0) || (mNotificationClients.keyAt(i) == pid)) {
-            mNotificationClients.valueAt(i)->audioFlingerClient()->ioConfigChanged(event, ioDesc);
+            mNotificationClients.valueAt(i)->audioFlingerClient()->ioConfigChanged(eventAidl,
+                                                                                   descAidl);
         }
     }
 }
@@ -1920,7 +1963,7 @@
 // ----------------------------------------------------------------------------
 
 AudioFlinger::NotificationClient::NotificationClient(const sp<AudioFlinger>& audioFlinger,
-                                                     const sp<IAudioFlingerClient>& client,
+                                                     const sp<media::IAudioFlingerClient>& client,
                                                      pid_t pid,
                                                      uid_t uid)
     : mAudioFlinger(audioFlinger), mPid(pid), mUid(uid), mAudioFlingerClient(client)
@@ -1975,10 +2018,26 @@
 
 // ----------------------------------------------------------------------------
 
-sp<media::IAudioRecord> AudioFlinger::createRecord(const CreateRecordInput& input,
-                                                   CreateRecordOutput& output,
-                                                   status_t *status)
+sp<media::IAudioRecord> AudioFlinger::createRecord(const media::CreateRecordRequest& _input,
+                                                   media::CreateRecordResponse& _output,
+                                                   status_t* status)
 {
+    // Local version of VALUE_OR_RETURN, specific to this method's calling conventions.
+#define VALUE_OR_EXIT(expr)         \
+    ({                              \
+        auto _tmp = (expr);         \
+        if (!_tmp.ok()) {           \
+            *status = _tmp.error(); \
+            return nullptr;         \
+        }                           \
+        std::move(_tmp.value());    \
+    })
+
+    CreateRecordInput input = VALUE_OR_EXIT(CreateRecordInput::fromAidl(_input));
+
+#undef VALUE_OR_EXIT
+    CreateRecordOutput output;
+
     sp<RecordThread::RecordTrack> recordTrack;
     sp<RecordHandle> recordHandle;
     sp<Client> client;
@@ -2068,8 +2127,8 @@
         Mutex::Autolock _l(mLock);
         RecordThread *thread = checkRecordThread_l(output.inputId);
         if (thread == NULL) {
-            ALOGE("createRecord() checkRecordThread_l failed, input handle %d", output.inputId);
-            lStatus = BAD_VALUE;
+            ALOGW("createRecord() checkRecordThread_l failed, input handle %d", output.inputId);
+            lStatus = FAILED_TRANSACTION;
             goto Exit;
         }
 
@@ -2116,6 +2175,8 @@
     output.buffers = recordTrack->getBuffers();
     output.portId = portId;
 
+    _output = VALUE_OR_FATAL(output.toAidl());
+
     // return handle to client
     recordHandle = new RecordHandle(recordTrack);
 
@@ -3141,7 +3202,8 @@
 // dumpToThreadLog_l() must be called with AudioFlinger::mLock held
 void AudioFlinger::dumpToThreadLog_l(const sp<ThreadBase> &thread)
 {
-    audio_utils::FdToString fdToString;
+    constexpr int THREAD_DUMP_TIMEOUT_MS = 2;
+    audio_utils::FdToString fdToString("- ", THREAD_DUMP_TIMEOUT_MS);
     const int fd = fdToString.fd();
     if (fd >= 0) {
         thread->dump(fd, {} /* args */);
@@ -3297,6 +3359,16 @@
     return minThread;
 }
 
+AudioFlinger::ThreadBase *AudioFlinger::hapticPlaybackThread_l() const {
+    for (size_t i  = 0; i < mPlaybackThreads.size(); ++i) {
+        PlaybackThread *thread = mPlaybackThreads.valueAt(i).get();
+        if (thread->hapticChannelMask() != AUDIO_CHANNEL_NONE) {
+            return thread;
+        }
+    }
+    return nullptr;
+}
+
 sp<AudioFlinger::SyncEvent> AudioFlinger::createSyncEvent(AudioSystem::sync_event_t type,
                                     audio_session_t triggerSession,
                                     audio_session_t listenerSession,
@@ -3427,7 +3499,7 @@
     return status;
 }
 
-sp<IEffect> AudioFlinger::createEffect(
+sp<media::IEffect> AudioFlinger::createEffect(
         effect_descriptor_t *pDesc,
         const sp<IEffectClient>& effectClient,
         int32_t priority,
@@ -3538,6 +3610,16 @@
             goto Exit;
         }
 
+        const bool hapticPlaybackRequired = EffectModule::isHapticGenerator(&desc.type);
+        if (hapticPlaybackRequired
+                && (sessionId == AUDIO_SESSION_DEVICE
+                        || sessionId == AUDIO_SESSION_OUTPUT_MIX
+                        || sessionId == AUDIO_SESSION_OUTPUT_STAGE)) {
+            // haptic-generating effect is only valid when the session id is a general session id
+            lStatus = INVALID_OPERATION;
+            goto Exit;
+        }
+
         // return effect descriptor
         *pDesc = desc;
         if (io == AUDIO_IO_HANDLE_NONE && sessionId == AUDIO_SESSION_OUTPUT_MIX) {
@@ -3612,7 +3694,17 @@
             // allow only one effect chain per sessionId on mPlaybackThreads.
             for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
                 const audio_io_handle_t checkIo = mPlaybackThreads.keyAt(i);
-                if (io == checkIo) continue;
+                if (io == checkIo) {
+                    if (hapticPlaybackRequired
+                            && mPlaybackThreads.valueAt(i)
+                                    ->hapticChannelMask() == AUDIO_CHANNEL_NONE) {
+                        ALOGE("%s: haptic playback thread is required while the required playback "
+                              "thread(io=%d) doesn't support", __func__, (int)io);
+                        lStatus = BAD_VALUE;
+                        goto Exit;
+                    }
+                    continue;
+                }
                 const uint32_t sessionType =
                         mPlaybackThreads.valueAt(i)->hasAudioSession(sessionId);
                 if ((sessionType & ThreadBase::EFFECT_SESSION) != 0) {
@@ -3649,6 +3741,20 @@
 
         // create effect on selected output thread
         bool pinned = !audio_is_global_session(sessionId) && isSessionAcquired_l(sessionId);
+        ThreadBase *oriThread = nullptr;
+        if (hapticPlaybackRequired && thread->hapticChannelMask() == AUDIO_CHANNEL_NONE) {
+            ThreadBase *hapticThread = hapticPlaybackThread_l();
+            if (hapticThread == nullptr) {
+                ALOGE("%s haptic thread not found while it is required", __func__);
+                lStatus = INVALID_OPERATION;
+                goto Exit;
+            }
+            if (hapticThread != thread) {
+                // Force to use haptic thread for haptic-generating effect.
+                oriThread = thread;
+                thread = hapticThread;
+            }
+        }
         handle = thread->createEffect_l(client, effectClient, priority, sessionId,
                 &desc, enabled, &lStatus, pinned, probe);
         if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
@@ -3658,6 +3764,11 @@
         } else {
             // handle must be valid here, but check again to be safe.
             if (handle.get() != nullptr && id != nullptr) *id = handle->id();
+            // Invalidate audio session when haptic playback is created.
+            if (hapticPlaybackRequired && oriThread != nullptr) {
+                // invalidateTracksForAudioSession will trigger locking the thread.
+                oriThread->invalidateTracksForAudioSession(sessionId);
+            }
         }
     }
 
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 20f561e..cfe9264 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -33,16 +33,16 @@
 #include <sys/types.h>
 #include <limits.h>
 
+#include <android/media/IAudioFlingerClient.h>
 #include <android/media/IAudioTrackCallback.h>
 #include <android/os/BnExternalVibrationController.h>
-#include <android-base/macros.h>
 
+#include <android-base/macros.h>
 #include <cutils/atomic.h>
 #include <cutils/compiler.h>
-#include <cutils/properties.h>
 
+#include <cutils/properties.h>
 #include <media/IAudioFlinger.h>
-#include <media/IAudioFlingerClient.h>
 #include <media/IAudioTrack.h>
 #include <media/AudioSystem.h>
 #include <media/AudioTrack.h>
@@ -91,15 +91,17 @@
 #include "ThreadMetrics.h"
 #include "TrackMetrics.h"
 
-#include <powermanager/IPowerManager.h>
+#include <android/os/IPowerManager.h>
 
 #include <media/nblog/NBLog.h>
 #include <private/media/AudioEffectShared.h>
 #include <private/media/AudioTrackShared.h>
 
 #include <vibrator/ExternalVibration.h>
+#include <vibrator/ExternalVibrationUtils.h>
 
 #include "android/media/BnAudioRecord.h"
+#include "android/media/BnEffect.h"
 
 namespace android {
 
@@ -133,13 +135,13 @@
     virtual     status_t    dump(int fd, const Vector<String16>& args);
 
     // IAudioFlinger interface, in binder opcode order
-    virtual sp<IAudioTrack> createTrack(const CreateTrackInput& input,
-                                        CreateTrackOutput& output,
-                                        status_t *status);
+    virtual sp<IAudioTrack> createTrack(const media::CreateTrackRequest& input,
+                                        media::CreateTrackResponse& output,
+                                        status_t* status) override;
 
-    virtual sp<media::IAudioRecord> createRecord(const CreateRecordInput& input,
-                                                 CreateRecordOutput& output,
-                                                 status_t *status);
+    virtual sp<media::IAudioRecord> createRecord(const media::CreateRecordRequest& input,
+                                                 media::CreateRecordResponse& output,
+                                                 status_t* status) override;
 
     virtual     uint32_t    sampleRate(audio_io_handle_t ioHandle) const;
     virtual     audio_format_t format(audio_io_handle_t output) const;
@@ -175,7 +177,7 @@
     virtual     status_t    setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs);
     virtual     String8     getParameters(audio_io_handle_t ioHandle, const String8& keys) const;
 
-    virtual     void        registerClient(const sp<IAudioFlingerClient>& client);
+    virtual     void        registerClient(const sp<media::IAudioFlingerClient>& client);
 
     virtual     size_t      getInputBufferSize(uint32_t sampleRate, audio_format_t format,
                                                audio_channel_mask_t channelMask) const;
@@ -231,9 +233,9 @@
                                          uint32_t preferredTypeFlag,
                                          effect_descriptor_t *descriptor) const;
 
-    virtual sp<IEffect> createEffect(
+    virtual sp<media::IEffect> createEffect(
                         effect_descriptor_t *pDesc,
-                        const sp<IEffectClient>& effectClient,
+                        const sp<media::IEffectClient>& effectClient,
                         int32_t priority,
                         audio_io_handle_t io,
                         audio_session_t sessionId,
@@ -405,7 +407,7 @@
         case AUDIO_CHANNEL_REPRESENTATION_POSITION: {
             // Haptic channel mask is only applicable for channel position mask.
             const uint32_t channelCount = audio_channel_count_from_out_mask(
-                    channelMask & ~AUDIO_CHANNEL_HAPTIC_ALL);
+                    static_cast<audio_channel_mask_t>(channelMask & ~AUDIO_CHANNEL_HAPTIC_ALL));
             const uint32_t maxChannelCount = kEnableExtendedChannels
                     ? AudioMixer::MAX_NUM_CHANNELS : FCC_2;
             if (channelCount < FCC_2 // mono is not supported at this time
@@ -488,12 +490,12 @@
     class NotificationClient : public IBinder::DeathRecipient {
     public:
                             NotificationClient(const sp<AudioFlinger>& audioFlinger,
-                                                const sp<IAudioFlingerClient>& client,
+                                                const sp<media::IAudioFlingerClient>& client,
                                                 pid_t pid,
                                                 uid_t uid);
         virtual             ~NotificationClient();
 
-                sp<IAudioFlingerClient> audioFlingerClient() const { return mAudioFlingerClient; }
+                sp<media::IAudioFlingerClient> audioFlingerClient() const { return mAudioFlingerClient; }
                 pid_t getPid() const { return mPid; }
                 uid_t getUid() const { return mUid; }
 
@@ -506,7 +508,7 @@
         const sp<AudioFlinger>  mAudioFlinger;
         const pid_t             mPid;
         const uid_t             mUid;
-        const sp<IAudioFlingerClient> mAudioFlingerClient;
+        const sp<media::IAudioFlingerClient> mAudioFlingerClient;
     };
 
     // --- MediaLogNotifier ---
@@ -682,6 +684,7 @@
         virtual status_t createMmapBuffer(int32_t minSizeFrames,
                                           struct audio_mmap_buffer_info *info);
         virtual status_t getMmapPosition(struct audio_mmap_position *position);
+        virtual status_t getExternalPosition(uint64_t *position, int64_t *timeNanos);
         virtual status_t start(const AudioClient& client,
                                const audio_attributes_t *attr,
                                audio_port_handle_t *handle);
@@ -756,6 +759,8 @@
 
               sp<ThreadBase> getEffectThread_l(audio_session_t sessionId, int effectId);
 
+              ThreadBase *hapticPlaybackThread_l() const;
+
 
                 void        removeClient_l(pid_t pid);
                 void        removeNotificationClient(pid_t pid);
@@ -782,7 +787,7 @@
 
                 std::vector< sp<EffectModule> > purgeStaleEffects_l();
 
-                void broacastParametersToRecordThreads_l(const String8& keyValuePairs);
+                void broadcastParametersToRecordThreads_l(const String8& keyValuePairs);
                 void updateOutDevicesForRecordThreads_l(const DeviceDescriptorBaseVector& devices);
                 void forwardParametersToDownstreamPatches_l(
                         audio_io_handle_t upStream, const String8& keyValuePairs,
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index 5ff7215..cecd52b 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -30,6 +30,8 @@
 
 namespace android {
 
+using media::IEffectClient;
+
 void AudioFlinger::DeviceEffectManager::createAudioPatch(audio_patch_handle_t handle,
         const PatchPanel::Patch& patch) {
     ALOGV("%s handle %d mHalHandle %d num sinks %d device sink %08x",
@@ -115,10 +117,19 @@
 
 status_t AudioFlinger::DeviceEffectManager::checkEffectCompatibility(
         const effect_descriptor_t *desc) {
+    sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
+    if (effectsFactory == nullptr) {
+        return BAD_VALUE;
+    }
 
-    if ((desc->flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_PRE_PROC
-        && (desc->flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_POST_PROC) {
-        ALOGW("%s() non pre/post processing device effect %s", __func__, desc->name);
+    static const float sMinDeviceEffectHalVersion = 6.0;
+    float halVersion = effectsFactory->getHalVersion();
+
+    if (((desc->flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_PRE_PROC
+            && (desc->flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_POST_PROC)
+            || halVersion < sMinDeviceEffectHalVersion) {
+        ALOGW("%s() non pre/post processing device effect %s or incompatible API version %f",
+                __func__, desc->name, halVersion);
         return BAD_VALUE;
     }
 
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index 81e6065..d187df2 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -33,7 +33,7 @@
     sp<EffectHandle> createEffect_l(effect_descriptor_t *descriptor,
                 const AudioDeviceTypeAddr& device,
                 const sp<AudioFlinger::Client>& client,
-                const sp<IEffectClient>& effectClient,
+                const sp<media::IEffectClient>& effectClient,
                 const std::map<audio_patch_handle_t, PatchPanel::Patch>& patches,
                 int *enabled,
                 status_t *status,
@@ -165,6 +165,7 @@
     uint32_t  sampleRate() const override { return 0; }
     audio_channel_mask_t channelMask() const override { return AUDIO_CHANNEL_NONE; }
     uint32_t channelCount() const override { return 0; }
+    audio_channel_mask_t hapticChannelMask() const override { return AUDIO_CHANNEL_NONE; }
     size_t    frameCount() const override  { return 0; }
     uint32_t  latency() const override  { return 0; }
 
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 3dfeb83..eaad6ef 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -25,6 +25,7 @@
 #include <utils/Log.h>
 #include <system/audio_effects/effect_aec.h>
 #include <system/audio_effects/effect_dynamicsprocessing.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
 #include <system/audio_effects/effect_ns.h>
 #include <system/audio_effects/effect_visualizer.h>
 #include <audio_utils/channels.h>
@@ -33,6 +34,7 @@
 #include <media/AudioContainers.h>
 #include <media/AudioEffect.h>
 #include <media/AudioDeviceTypeAddr.h>
+#include <media/ShmemCompat.h>
 #include <media/audiohal/EffectHalInterface.h>
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <mediautils/ServiceUtilities.h>
@@ -58,6 +60,27 @@
 
 namespace android {
 
+using binder::Status;
+
+namespace {
+
+// Append a POD value into a vector of bytes.
+template<typename T>
+void appendToBuffer(const T& value, std::vector<uint8_t>* buffer) {
+    const uint8_t* ar(reinterpret_cast<const uint8_t*>(&value));
+    buffer->insert(buffer->end(), ar, ar + sizeof(T));
+}
+
+// Write a POD value into a vector of bytes (clears the previous buffer
+// content).
+template<typename T>
+void writeToBuffer(const T& value, std::vector<uint8_t>* buffer) {
+    buffer->clear();
+    appendToBuffer(value, buffer);
+}
+
+}  // namespace
+
 // ----------------------------------------------------------------------------
 //  EffectBase implementation
 // ----------------------------------------------------------------------------
@@ -292,6 +315,9 @@
         }
     }
 
+    // Prevent calls to process() and other functions on effect interface from now on.
+    // The effect engine will be released by the destructor when the last strong reference on
+    // this object is released which can happen after next process is called.
     if (mHandles.size() == 0 && !mPinned) {
         mState = DESTROYED;
     }
@@ -565,20 +591,6 @@
 
 }
 
-ssize_t AudioFlinger::EffectModule::removeHandle_l(EffectHandle *handle)
-{
-    ssize_t status = EffectBase::removeHandle_l(handle);
-
-    // Prevent calls to process() and other functions on effect interface from now on.
-    // The effect engine will be released by the destructor when the last strong reference on
-    // this object is released which can happen after next process is called.
-    if (status == 0 && !mPinned) {
-        mEffectInterface->close();
-    }
-
-    return status;
-}
-
 bool AudioFlinger::EffectModule::updateState() {
     Mutex::Autolock _l(mLock);
 
@@ -879,6 +891,11 @@
         }
 #endif
     }
+    if (isHapticGenerator()) {
+        audio_channel_mask_t hapticChannelMask = mCallback->hapticChannelMask();
+        mConfig.inputCfg.channels |= hapticChannelMask;
+        mConfig.outputCfg.channels |= hapticChannelMask;
+    }
     mInChannelCountRequested =
             audio_channel_count_from_out_mask(mConfig.inputCfg.channels);
     mOutChannelCountRequested =
@@ -1160,11 +1177,10 @@
     return remainder == 0 ? 0 : divisor - remainder;
 }
 
-status_t AudioFlinger::EffectModule::command(uint32_t cmdCode,
-                                             uint32_t cmdSize,
-                                             void *pCmdData,
-                                             uint32_t *replySize,
-                                             void *pReplyData)
+status_t AudioFlinger::EffectModule::command(int32_t cmdCode,
+                     const std::vector<uint8_t>& cmdData,
+                     int32_t maxReplySize,
+                     std::vector<uint8_t>* reply)
 {
     Mutex::Autolock _l(mLock);
     ALOGVV("command(), cmdCode: %d, mEffectInterface: %p", cmdCode, mEffectInterface.get());
@@ -1175,63 +1191,68 @@
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
+    if (maxReplySize < 0 || maxReplySize > EFFECT_PARAM_SIZE_MAX) {
+        return -EINVAL;
+    }
+    size_t cmdSize = cmdData.size();
+    const effect_param_t* param = cmdSize >= sizeof(effect_param_t)
+                                  ? reinterpret_cast<const effect_param_t*>(cmdData.data())
+                                  : nullptr;
     if (cmdCode == EFFECT_CMD_GET_PARAM &&
-            (sizeof(effect_param_t) > cmdSize ||
-                    ((effect_param_t *)pCmdData)->psize > cmdSize
-                                                          - sizeof(effect_param_t))) {
+            (param == nullptr || param->psize > cmdSize - sizeof(effect_param_t))) {
         android_errorWriteLog(0x534e4554, "32438594");
         android_errorWriteLog(0x534e4554, "33003822");
         return -EINVAL;
     }
     if (cmdCode == EFFECT_CMD_GET_PARAM &&
-            (*replySize < sizeof(effect_param_t) ||
-                    ((effect_param_t *)pCmdData)->psize > *replySize - sizeof(effect_param_t))) {
+            (maxReplySize < sizeof(effect_param_t) ||
+                   param->psize > maxReplySize - sizeof(effect_param_t))) {
         android_errorWriteLog(0x534e4554, "29251553");
         return -EINVAL;
     }
     if (cmdCode == EFFECT_CMD_GET_PARAM &&
-        (sizeof(effect_param_t) > *replySize
-          || ((effect_param_t *)pCmdData)->psize > *replySize
-                                                   - sizeof(effect_param_t)
-          || ((effect_param_t *)pCmdData)->vsize > *replySize
-                                                   - sizeof(effect_param_t)
-                                                   - ((effect_param_t *)pCmdData)->psize
-          || roundUpDelta(((effect_param_t *)pCmdData)->psize, (uint32_t)sizeof(int)) >
-                                                   *replySize
-                                                   - sizeof(effect_param_t)
-                                                   - ((effect_param_t *)pCmdData)->psize
-                                                   - ((effect_param_t *)pCmdData)->vsize)) {
+            (sizeof(effect_param_t) > maxReplySize
+                    || param->psize > maxReplySize - sizeof(effect_param_t)
+                    || param->vsize > maxReplySize - sizeof(effect_param_t)
+                            - param->psize
+                    || roundUpDelta(param->psize, (uint32_t) sizeof(int)) >
+                            maxReplySize
+                                    - sizeof(effect_param_t)
+                                    - param->psize
+                                    - param->vsize)) {
         ALOGV("\tLVM_ERROR : EFFECT_CMD_GET_PARAM: reply size inconsistent");
                      android_errorWriteLog(0x534e4554, "32705438");
         return -EINVAL;
     }
     if ((cmdCode == EFFECT_CMD_SET_PARAM
-            || cmdCode == EFFECT_CMD_SET_PARAM_DEFERRED) &&  // DEFERRED not generally used
-        (sizeof(effect_param_t) > cmdSize
-            || ((effect_param_t *)pCmdData)->psize > cmdSize
-                                                     - sizeof(effect_param_t)
-            || ((effect_param_t *)pCmdData)->vsize > cmdSize
-                                                     - sizeof(effect_param_t)
-                                                     - ((effect_param_t *)pCmdData)->psize
-            || roundUpDelta(((effect_param_t *)pCmdData)->psize, (uint32_t)sizeof(int)) >
-                                                     cmdSize
-                                                     - sizeof(effect_param_t)
-                                                     - ((effect_param_t *)pCmdData)->psize
-                                                     - ((effect_param_t *)pCmdData)->vsize)) {
+            || cmdCode == EFFECT_CMD_SET_PARAM_DEFERRED)
+            &&  // DEFERRED not generally used
+                    (param == nullptr
+                            || param->psize > cmdSize - sizeof(effect_param_t)
+                            || param->vsize > cmdSize - sizeof(effect_param_t)
+                                    - param->psize
+                            || roundUpDelta(param->psize,
+                                            (uint32_t) sizeof(int)) >
+                                    cmdSize
+                                            - sizeof(effect_param_t)
+                                            - param->psize
+                                            - param->vsize)) {
         android_errorWriteLog(0x534e4554, "30204301");
         return -EINVAL;
     }
+    uint32_t replySize = maxReplySize;
+    reply->resize(replySize);
     status_t status = mEffectInterface->command(cmdCode,
                                                 cmdSize,
-                                                pCmdData,
-                                                replySize,
-                                                pReplyData);
+                                                const_cast<uint8_t*>(cmdData.data()),
+                                                &replySize,
+                                                reply->data());
+    reply->resize(status == NO_ERROR ? replySize : 0);
     if (cmdCode != EFFECT_CMD_GET_PARAM && status == NO_ERROR) {
-        uint32_t size = (replySize == NULL) ? 0 : *replySize;
         for (size_t i = 1; i < mHandles.size(); i++) {
             EffectHandle *h = mHandles[i];
             if (h != NULL && !h->disconnected()) {
-                h->commandExecuted(cmdCode, cmdSize, pCmdData, size, pReplyData);
+                h->commandExecuted(cmdCode, cmdData, *reply);
             }
         }
     }
@@ -1522,6 +1543,41 @@
     return mOffloaded;
 }
 
+/*static*/
+bool AudioFlinger::EffectModule::isHapticGenerator(const effect_uuid_t *type) {
+    return memcmp(type, FX_IID_HAPTICGENERATOR, sizeof(effect_uuid_t)) == 0;
+}
+
+bool AudioFlinger::EffectModule::isHapticGenerator() const {
+    return isHapticGenerator(&mDescriptor.type);
+}
+
+status_t AudioFlinger::EffectModule::setHapticIntensity(int id, int intensity)
+{
+    if (mStatus != NO_ERROR) {
+        return mStatus;
+    }
+    if (!isHapticGenerator()) {
+        ALOGW("Should not set haptic intensity for effects that are not HapticGenerator");
+        return INVALID_OPERATION;
+    }
+
+    std::vector<uint8_t> request(sizeof(effect_param_t) + 3 * sizeof(uint32_t));
+    effect_param_t *param = (effect_param_t*) request.data();
+    param->psize = sizeof(int32_t);
+    param->vsize = sizeof(int32_t) * 2;
+    *(int32_t*)param->data = HG_PARAM_HAPTIC_INTENSITY;
+    *((int32_t*)param->data + 1) = id;
+    *((int32_t*)param->data + 2) = intensity;
+    std::vector<uint8_t> response;
+    status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
+    if (status == NO_ERROR) {
+        LOG_ALWAYS_FATAL_IF(response.size() != 4);
+        status = *reinterpret_cast<const status_t*>(response.data());
+    }
+    return status;
+}
+
 static std::string dumpInOutBuffer(bool isInput, const sp<EffectBufferHalInterface> &buffer) {
     std::stringstream ss;
 
@@ -1600,9 +1656,9 @@
 #define LOG_TAG "AudioFlinger::EffectHandle"
 
 AudioFlinger::EffectHandle::EffectHandle(const sp<EffectBase>& effect,
-                                        const sp<AudioFlinger::Client>& client,
-                                        const sp<IEffectClient>& effectClient,
-                                        int32_t priority)
+                                         const sp<AudioFlinger::Client>& client,
+                                         const sp<media::IEffectClient>& effectClient,
+                                         int32_t priority)
     : BnEffect(),
     mEffect(effect), mEffectClient(effectClient), mClient(client), mCblk(NULL),
     mPriority(priority), mHasControl(false), mEnabled(false), mDisconnected(false)
@@ -1636,20 +1692,24 @@
     return mClient == 0 || mCblkMemory != 0 ? OK : NO_MEMORY;
 }
 
-status_t AudioFlinger::EffectHandle::enable()
+#define RETURN(code) \
+  *_aidl_return = (code); \
+  return Status::ok();
+
+Status AudioFlinger::EffectHandle::enable(int32_t* _aidl_return)
 {
     AutoMutex _l(mLock);
     ALOGV("enable %p", this);
     sp<EffectBase> effect = mEffect.promote();
     if (effect == 0 || mDisconnected) {
-        return DEAD_OBJECT;
+        RETURN(DEAD_OBJECT);
     }
     if (!mHasControl) {
-        return INVALID_OPERATION;
+        RETURN(INVALID_OPERATION);
     }
 
     if (mEnabled) {
-        return NO_ERROR;
+        RETURN(NO_ERROR);
     }
 
     mEnabled = true;
@@ -1657,54 +1717,55 @@
     status_t status = effect->updatePolicyState();
     if (status != NO_ERROR) {
         mEnabled = false;
-        return status;
+        RETURN(status);
     }
 
     effect->checkSuspendOnEffectEnabled(true, false /*threadLocked*/);
 
     // checkSuspendOnEffectEnabled() can suspend this same effect when enabled
     if (effect->suspended()) {
-        return NO_ERROR;
+        RETURN(NO_ERROR);
     }
 
     status = effect->setEnabled(true, true /*fromHandle*/);
     if (status != NO_ERROR) {
         mEnabled = false;
     }
-    return status;
+    RETURN(status);
 }
 
-status_t AudioFlinger::EffectHandle::disable()
+Status AudioFlinger::EffectHandle::disable(int32_t* _aidl_return)
 {
     ALOGV("disable %p", this);
     AutoMutex _l(mLock);
     sp<EffectBase> effect = mEffect.promote();
     if (effect == 0 || mDisconnected) {
-        return DEAD_OBJECT;
+        RETURN(DEAD_OBJECT);
     }
     if (!mHasControl) {
-        return INVALID_OPERATION;
+        RETURN(INVALID_OPERATION);
     }
 
     if (!mEnabled) {
-        return NO_ERROR;
+        RETURN(NO_ERROR);
     }
     mEnabled = false;
 
     effect->updatePolicyState();
 
     if (effect->suspended()) {
-        return NO_ERROR;
+        RETURN(NO_ERROR);
     }
 
     status_t status = effect->setEnabled(false, true /*fromHandle*/);
-    return status;
+    RETURN(status);
 }
 
-void AudioFlinger::EffectHandle::disconnect()
+Status AudioFlinger::EffectHandle::disconnect()
 {
     ALOGV("%s %p", __FUNCTION__, this);
     disconnect(true);
+    return Status::ok();
 }
 
 void AudioFlinger::EffectHandle::disconnect(bool unpinIfLast)
@@ -1741,11 +1802,16 @@
     }
 }
 
-status_t AudioFlinger::EffectHandle::command(uint32_t cmdCode,
-                                             uint32_t cmdSize,
-                                             void *pCmdData,
-                                             uint32_t *replySize,
-                                             void *pReplyData)
+Status AudioFlinger::EffectHandle::getCblk(media::SharedFileRegion* _aidl_return) {
+    LOG_ALWAYS_FATAL_IF(!convertIMemoryToSharedFileRegion(mCblkMemory, _aidl_return));
+    return Status::ok();
+}
+
+Status AudioFlinger::EffectHandle::command(int32_t cmdCode,
+                       const std::vector<uint8_t>& cmdData,
+                       int32_t maxResponseSize,
+                       std::vector<uint8_t>* response,
+                       int32_t* _aidl_return)
 {
     ALOGVV("command(), cmdCode: %d, mHasControl: %d, mEffect: %p",
             cmdCode, mHasControl, mEffect.unsafe_get());
@@ -1765,49 +1831,46 @@
                 break;
             }
             android_errorWriteLog(0x534e4554, "62019992");
-            return BAD_VALUE;
+            RETURN(BAD_VALUE);
     }
 
     if (cmdCode == EFFECT_CMD_ENABLE) {
-        if (*replySize < sizeof(int)) {
+        if (maxResponseSize < sizeof(int)) {
             android_errorWriteLog(0x534e4554, "32095713");
-            return BAD_VALUE;
+            RETURN(BAD_VALUE);
         }
-        *(int *)pReplyData = NO_ERROR;
-        *replySize = sizeof(int);
-        return enable();
+        writeToBuffer(NO_ERROR, response);
+        return enable(_aidl_return);
     } else if (cmdCode == EFFECT_CMD_DISABLE) {
-        if (*replySize < sizeof(int)) {
+        if (maxResponseSize < sizeof(int)) {
             android_errorWriteLog(0x534e4554, "32095713");
-            return BAD_VALUE;
+            RETURN(BAD_VALUE);
         }
-        *(int *)pReplyData = NO_ERROR;
-        *replySize = sizeof(int);
-        return disable();
+        writeToBuffer(NO_ERROR, response);
+        return disable(_aidl_return);
     }
 
     AutoMutex _l(mLock);
     sp<EffectBase> effect = mEffect.promote();
     if (effect == 0 || mDisconnected) {
-        return DEAD_OBJECT;
+        RETURN(DEAD_OBJECT);
     }
     // only get parameter command is permitted for applications not controlling the effect
     if (!mHasControl && cmdCode != EFFECT_CMD_GET_PARAM) {
-        return INVALID_OPERATION;
+        RETURN(INVALID_OPERATION);
     }
 
     // handle commands that are not forwarded transparently to effect engine
     if (cmdCode == EFFECT_CMD_SET_PARAM_COMMIT) {
         if (mClient == 0) {
-            return INVALID_OPERATION;
+            RETURN(INVALID_OPERATION);
         }
 
-        if (*replySize < sizeof(int)) {
+        if (maxResponseSize < sizeof(int)) {
             android_errorWriteLog(0x534e4554, "32095713");
-            return BAD_VALUE;
+            RETURN(BAD_VALUE);
         }
-        *(int *)pReplyData = NO_ERROR;
-        *replySize = sizeof(int);
+        writeToBuffer(NO_ERROR, response);
 
         // No need to trylock() here as this function is executed in the binder thread serving a
         // particular client process:  no risk to block the whole media server process or mixer
@@ -1820,10 +1883,10 @@
             serverIndex > EFFECT_PARAM_BUFFER_SIZE) {
             mCblk->serverIndex = 0;
             mCblk->clientIndex = 0;
-            return BAD_VALUE;
+            RETURN(BAD_VALUE);
         }
         status_t status = NO_ERROR;
-        effect_param_t *param = NULL;
+        std::vector<uint8_t> param;
         for (uint32_t index = serverIndex; index < clientIndex;) {
             int *p = (int *)(mBuffer + index);
             const int size = *p++;
@@ -1835,23 +1898,16 @@
                 break;
             }
 
-            // copy to local memory in case of client corruption b/32220769
-            auto *newParam = (effect_param_t *)realloc(param, size);
-            if (newParam == NULL) {
-                ALOGW("command(): out of memory");
-                status = NO_MEMORY;
-                break;
-            }
-            param = newParam;
-            memcpy(param, p, size);
+            std::copy(reinterpret_cast<const uint8_t*>(p),
+                      reinterpret_cast<const uint8_t*>(p) + size,
+                      std::back_inserter(param));
 
-            int reply = 0;
-            uint32_t rsize = sizeof(reply);
+            std::vector<uint8_t> replyBuffer;
             status_t ret = effect->command(EFFECT_CMD_SET_PARAM,
-                                            size,
                                             param,
-                                            &rsize,
-                                            &reply);
+                                            sizeof(int),
+                                            &replyBuffer);
+            int reply = *reinterpret_cast<const int*>(replyBuffer.data());
 
             // verify shared memory: server index shouldn't change; client index can't go back.
             if (serverIndex != mCblk->serverIndex
@@ -1864,21 +1920,24 @@
             // stop at first error encountered
             if (ret != NO_ERROR) {
                 status = ret;
-                *(int *)pReplyData = reply;
+                writeToBuffer(reply, response);
                 break;
             } else if (reply != NO_ERROR) {
-                *(int *)pReplyData = reply;
+                writeToBuffer(reply, response);
                 break;
             }
             index += size;
         }
-        free(param);
         mCblk->serverIndex = 0;
         mCblk->clientIndex = 0;
-        return status;
+        RETURN(status);
     }
 
-    return effect->command(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+    status_t status = effect->command(cmdCode,
+                                      cmdData,
+                                      maxResponseSize,
+                                      response);
+    RETURN(status);
 }
 
 void AudioFlinger::EffectHandle::setControl(bool hasControl, bool signal, bool enabled)
@@ -1894,13 +1953,11 @@
 }
 
 void AudioFlinger::EffectHandle::commandExecuted(uint32_t cmdCode,
-                                                 uint32_t cmdSize,
-                                                 void *pCmdData,
-                                                 uint32_t replySize,
-                                                 void *pReplyData)
+                         const std::vector<uint8_t>& cmdData,
+                         const std::vector<uint8_t>& replyData)
 {
     if (mEffectClient != 0) {
-        mEffectClient->commandExecuted(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+        mEffectClient->commandExecuted(cmdCode, cmdData, replyData);
     }
 }
 
@@ -1913,13 +1970,6 @@
     }
 }
 
-status_t AudioFlinger::EffectHandle::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    return BnEffect::onTransact(code, data, reply, flags);
-}
-
-
 void AudioFlinger::EffectHandle::dumpToBuffer(char* buffer, size_t size)
 {
     bool locked = mCblk != NULL && AudioFlinger::dumpTryLock(mCblk->lock);
@@ -2385,6 +2435,25 @@
     }
 }
 
+// containsHapticGeneratingEffect_l must be called with ThreadBase::mLock or EffectChain::mLock held
+bool AudioFlinger::EffectChain::containsHapticGeneratingEffect_l()
+{
+    for (size_t i = 0; i < mEffects.size(); ++i) {
+        if (mEffects[i]->isHapticGenerator()) {
+            return true;
+        }
+    }
+    return false;
+}
+
+void AudioFlinger::EffectChain::setHapticIntensity_l(int id, int intensity)
+{
+    Mutex::Autolock _l(mLock);
+    for (size_t i = 0; i < mEffects.size(); ++i) {
+        mEffects[i]->setHapticIntensity(id, intensity);
+    }
+}
+
 void AudioFlinger::EffectChain::syncHalEffectsState()
 {
     Mutex::Autolock _l(mLock);
@@ -2839,6 +2908,14 @@
     return t->channelCount();
 }
 
+audio_channel_mask_t AudioFlinger::EffectChain::EffectCallback::hapticChannelMask() const {
+    sp<ThreadBase> t = mThread.promote();
+    if (t == nullptr) {
+        return AUDIO_CHANNEL_NONE;
+    }
+    return t->hapticChannelMask();
+}
+
 size_t AudioFlinger::EffectChain::EffectCallback::frameCount() const {
     sp<ThreadBase> t = mThread.promote();
     if (t == nullptr) {
@@ -2943,10 +3020,14 @@
     Mutex::Autolock _l(mProxyLock);
     if (status == NO_ERROR) {
         for (auto& handle : mEffectHandles) {
+            Status bs;
             if (enabled) {
-                status = handle.second->enable();
+                bs = handle.second->enable(&status);
             } else {
-                status = handle.second->disable();
+                bs = handle.second->disable(&status);
+            }
+            if (!bs.isOk()) {
+              status = bs.transactionError();
             }
         }
     }
@@ -3005,7 +3086,7 @@
             __func__, port->type, port->ext.device.type,
             port->ext.device.address, port->id, patch.isSoftware());
     if (port->type != AUDIO_PORT_TYPE_DEVICE || port->ext.device.type != mDevice.mType
-        || port->ext.device.address != mDevice.mAddress) {
+        || port->ext.device.address != mDevice.address()) {
         return NAME_NOT_FOUND;
     }
     status_t status = NAME_NOT_FOUND;
@@ -3054,10 +3135,14 @@
         status = BAD_VALUE;
     }
     if (status == NO_ERROR || status == ALREADY_EXISTS) {
+        Status bs;
         if (isEnabled()) {
-            (*handle)->enable();
+            bs = (*handle)->enable(&status);
         } else {
-            (*handle)->disable();
+            bs = (*handle)->disable(&status);
+        }
+        if (!bs.isOk()) {
+            status = bs.transactionError();
         }
     }
     return status;
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 2826297..03bdc60 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -36,6 +36,7 @@
     virtual uint32_t sampleRate() const = 0;
     virtual audio_channel_mask_t channelMask() const = 0;
     virtual uint32_t channelCount() const = 0;
+    virtual audio_channel_mask_t hapticChannelMask() const = 0;
     virtual size_t frameCount() const = 0;
 
     // Non trivial methods usually implemented with help from ThreadBase:
@@ -132,11 +133,10 @@
     void             setSuspended(bool suspended);
     bool             suspended() const;
 
-    virtual status_t command(uint32_t cmdCode __unused,
-                 uint32_t cmdSize __unused,
-                 void *pCmdData __unused,
-                 uint32_t *replySize __unused,
-                 void *pReplyData __unused) { return NO_ERROR; };
+    virtual status_t command(int32_t __unused,
+                             const std::vector<uint8_t>& __unused,
+                             int32_t __unused,
+                             std::vector<uint8_t>* __unused) { return NO_ERROR; };
 
     void setCallback(const sp<EffectCallbackInterface>& callback) { mCallback = callback; }
     sp<EffectCallbackInterface>&     callback() { return mCallback; }
@@ -144,7 +144,7 @@
     status_t addHandle(EffectHandle *handle);
     ssize_t disconnectHandle(EffectHandle *handle, bool unpinIfLast);
     ssize_t removeHandle(EffectHandle *handle);
-    virtual ssize_t removeHandle_l(EffectHandle *handle);
+    ssize_t removeHandle_l(EffectHandle *handle);
     EffectHandle* controlHandle_l();
     bool purgeHandles();
 
@@ -213,11 +213,10 @@
 
     void process();
     bool updateState();
-    status_t command(uint32_t cmdCode,
-                     uint32_t cmdSize,
-                     void *pCmdData,
-                     uint32_t *replySize,
-                     void *pReplyData) override;
+    status_t command(int32_t cmdCode,
+                     const std::vector<uint8_t>& cmdData,
+                     int32_t maxReplySize,
+                     std::vector<uint8_t>* reply) override;
 
     void reset_l();
     status_t configure();
@@ -240,8 +239,6 @@
         return mOutBuffer != 0 ? reinterpret_cast<int16_t*>(mOutBuffer->ptr()) : NULL;
     }
 
-    ssize_t removeHandle_l(EffectHandle *handle) override;
-
     status_t         setDevices(const AudioDeviceTypeAddrVector &devices);
     status_t         setInputDevice(const AudioDeviceTypeAddr &device);
     status_t         setVolume(uint32_t *left, uint32_t *right, bool controller);
@@ -257,6 +254,11 @@
 
     sp<EffectModule> asEffectModule() override { return this; }
 
+    static bool      isHapticGenerator(const effect_uuid_t* type);
+    bool             isHapticGenerator() const;
+
+    status_t         setHapticIntensity(int id, int intensity);
+
     void             dump(int fd, const Vector<String16>& args);
 
 private:
@@ -316,32 +318,29 @@
 // There is one EffectHandle object for each application controlling (or using)
 // an effect module.
 // The EffectHandle is obtained by calling AudioFlinger::createEffect().
-class EffectHandle: public android::BnEffect {
+class EffectHandle: public android::media::BnEffect {
 public:
 
     EffectHandle(const sp<EffectBase>& effect,
             const sp<AudioFlinger::Client>& client,
-            const sp<IEffectClient>& effectClient,
+            const sp<media::IEffectClient>& effectClient,
             int32_t priority);
     virtual ~EffectHandle();
     virtual status_t initCheck();
 
     // IEffect
-    virtual status_t enable();
-    virtual status_t disable();
-    virtual status_t command(uint32_t cmdCode,
-                             uint32_t cmdSize,
-                             void *pCmdData,
-                             uint32_t *replySize,
-                             void *pReplyData);
-    virtual void disconnect();
-private:
-            void disconnect(bool unpinIfLast);
-public:
-    virtual sp<IMemory> getCblk() const { return mCblkMemory; }
-    virtual status_t onTransact(uint32_t code, const Parcel& data,
-            Parcel* reply, uint32_t flags);
+    android::binder::Status enable(int32_t* _aidl_return) override;
+    android::binder::Status disable(int32_t* _aidl_return) override;
+    android::binder::Status command(int32_t cmdCode,
+                                    const std::vector<uint8_t>& cmdData,
+                                    int32_t maxResponseSize,
+                                    std::vector<uint8_t>* response,
+                                    int32_t* _aidl_return) override;
+    android::binder::Status disconnect() override;
+    android::binder::Status getCblk(media::SharedFileRegion* _aidl_return) override;
 
+private:
+    void disconnect(bool unpinIfLast);
 
     // Give or take control of effect module
     // - hasControl: true if control is given, false if removed
@@ -349,10 +348,8 @@
     // - enabled: state of the effect when control is passed
     void setControl(bool hasControl, bool signal, bool enabled);
     void commandExecuted(uint32_t cmdCode,
-                         uint32_t cmdSize,
-                         void *pCmdData,
-                         uint32_t replySize,
-                         void *pReplyData);
+                         const std::vector<uint8_t>& cmdData,
+                         const std::vector<uint8_t>& replyData);
     void setEnabled(bool enabled);
     bool enabled() const { return mEnabled; }
 
@@ -375,19 +372,20 @@
     friend class AudioFlinger;          // for mEffect, mHasControl, mEnabled
     DISALLOW_COPY_AND_ASSIGN(EffectHandle);
 
-    Mutex mLock;                        // protects IEffect method calls
-    wp<EffectBase> mEffect;           // pointer to controlled EffectModule
-    sp<IEffectClient> mEffectClient;    // callback interface for client notifications
-    /*const*/ sp<Client> mClient;       // client for shared memory allocation, see disconnect()
-    sp<IMemory>         mCblkMemory;    // shared memory for control block
-    effect_param_cblk_t* mCblk;         // control block for deferred parameter setting via
-                                        // shared memory
-    uint8_t*            mBuffer;        // pointer to parameter area in shared memory
-    int mPriority;                      // client application priority to control the effect
-    bool mHasControl;                   // true if this handle is controlling the effect
-    bool mEnabled;                      // cached enable state: needed when the effect is
-                                        // restored after being suspended
-    bool mDisconnected;                 // Set to true by disconnect()
+    Mutex mLock;                             // protects IEffect method calls
+    wp<EffectBase> mEffect;                  // pointer to controlled EffectModule
+    sp<media::IEffectClient> mEffectClient;  // callback interface for client notifications
+    /*const*/ sp<Client> mClient;            // client for shared memory allocation, see
+                                             //   disconnect()
+    sp<IMemory> mCblkMemory;                 // shared memory for control block
+    effect_param_cblk_t* mCblk;              // control block for deferred parameter setting via
+                                             // shared memory
+    uint8_t* mBuffer;                        // pointer to parameter area in shared memory
+    int mPriority;                           // client application priority to control the effect
+    bool mHasControl;                        // true if this handle is controlling the effect
+    bool mEnabled;                           // cached enable state: needed when the effect is
+                                             // restored after being suspended
+    bool mDisconnected;                      // Set to true by disconnect()
 };
 
 // the EffectChain class represents a group of effects associated to one audio session.
@@ -503,6 +501,10 @@
     // isCompatibleWithThread_l() must be called with thread->mLock held
     bool isCompatibleWithThread_l(const sp<ThreadBase>& thread) const;
 
+    bool containsHapticGeneratingEffect_l();
+
+    void setHapticIntensity_l(int id, int intensity);
+
     sp<EffectCallbackInterface> effectCallback() const { return mEffectCallback; }
     wp<ThreadBase> thread() const { return mEffectCallback->thread(); }
 
@@ -534,6 +536,7 @@
         uint32_t sampleRate() const override;
         audio_channel_mask_t channelMask() const override;
         uint32_t channelCount() const override;
+        audio_channel_mask_t hapticChannelMask() const override;
         size_t frameCount() const override;
         uint32_t latency() const override;
 
@@ -685,6 +688,7 @@
         uint32_t sampleRate() const override;
         audio_channel_mask_t channelMask() const override;
         uint32_t channelCount() const override;
+        audio_channel_mask_t hapticChannelMask() const override { return AUDIO_CHANNEL_NONE; }
         size_t frameCount() const override  { return 0; }
         uint32_t latency() const override  { return 0; }
 
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index 3eacc8c..cd3c743 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -27,7 +27,6 @@
 
 #include "Configuration.h"
 #include <time.h>
-#include <utils/Debug.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
 #include <system/audio.h>
@@ -40,6 +39,7 @@
 #include <audio_utils/channels.h>
 #include <audio_utils/format.h>
 #include <audio_utils/mono_blend.h>
+#include <cutils/bitops.h>
 #include <media/AudioMixer.h>
 #include "FastMixer.h"
 #include "TypedLogger.h"
diff --git a/services/audioflinger/FastMixerDumpState.cpp b/services/audioflinger/FastMixerDumpState.cpp
index a42e09c..3f20282 100644
--- a/services/audioflinger/FastMixerDumpState.cpp
+++ b/services/audioflinger/FastMixerDumpState.cpp
@@ -24,7 +24,6 @@
 #include <cpustats/ThreadCpuUsage.h>
 #endif
 #endif
-#include <utils/Debug.h>
 #include <utils/Log.h>
 #include "FastMixerDumpState.h"
 
diff --git a/services/audioflinger/FastMixerState.h b/services/audioflinger/FastMixerState.h
index 396c797..857d3de 100644
--- a/services/audioflinger/FastMixerState.h
+++ b/services/audioflinger/FastMixerState.h
@@ -23,6 +23,7 @@
 #include <media/ExtendedAudioBufferProvider.h>
 #include <media/nbaio/NBAIO.h>
 #include <media/nblog/NBLog.h>
+#include <vibrator/ExternalVibrationUtils.h>
 #include "FastThreadState.h"
 
 namespace android {
@@ -49,8 +50,7 @@
     audio_format_t          mFormat;         // track format
     int                     mGeneration;     // increment when any field is assigned
     bool                    mHapticPlaybackEnabled = false; // haptic playback is enabled or not
-    AudioMixer::haptic_intensity_t mHapticIntensity = AudioMixer::HAPTIC_SCALE_MUTE; // intensity of
-                                                                                     // haptic data
+    os::HapticScale         mHapticIntensity = os::HapticScale::MUTE; // intensity of haptic data
 };
 
 // Represents a single state of the fast mixer
diff --git a/services/audioflinger/OWNERS b/services/audioflinger/OWNERS
index d02d9e0..034d161 100644
--- a/services/audioflinger/OWNERS
+++ b/services/audioflinger/OWNERS
@@ -1,4 +1,4 @@
+gkasten@google.com
 hunga@google.com
 jmtrivi@google.com
 mnaganov@google.com
-gkasten@google.com
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index d8eebf3..a4b8650 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -26,10 +26,11 @@
     bool hasOpPlayAudio() const;
 
     static sp<OpPlayAudioMonitor> createIfNeeded(
-            uid_t uid, const audio_attributes_t& attr, int id, audio_stream_type_t streamType);
+            uid_t uid, const audio_attributes_t& attr, int id, audio_stream_type_t streamType,
+            const std::string& opPackageName);
 
 private:
-    OpPlayAudioMonitor(uid_t uid, audio_usage_t usage, int id);
+    OpPlayAudioMonitor(uid_t uid, audio_usage_t usage, int id, const String16& opPackageName);
     void onFirstRef() override;
     static void getPackagesForUid(uid_t uid, Vector<String16>& packages);
 
@@ -49,10 +50,10 @@
     void checkPlayAudioForUsage();
 
     std::atomic_bool mHasOpPlayAudio;
-    Vector<String16> mPackages;
     const uid_t mUid;
     const int32_t mUsage; // on purpose not audio_usage_t because always checked in appOps as int32_t
     const int mId; // for logging purposes only
+    const String16 mOpPackageName;
 };
 
 // playback track
@@ -77,7 +78,8 @@
                                 audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
                                 /** default behaviour is to start when there are as many frames
                                   * ready as possible (aka. Buffer is full). */
-                                size_t frameCountToBeReady = SIZE_MAX);
+                                size_t frameCountToBeReady = SIZE_MAX,
+                                const std::string opPackageName = "");
     virtual             ~Track();
     virtual status_t    initCheck() const;
 
@@ -159,12 +161,12 @@
                 mHapticPlaybackEnabled = hapticPlaybackEnabled;
             }
             /** Return at what intensity to play haptics, used in mixer. */
-            AudioMixer::haptic_intensity_t getHapticIntensity() const { return mHapticIntensity; }
+            os::HapticScale getHapticIntensity() const { return mHapticIntensity; }
             /** Set intensity of haptic playback, should be set after querying vibrator service. */
-            void    setHapticIntensity(AudioMixer::haptic_intensity_t hapticIntensity) {
-                if (AudioMixer::isValidHapticIntensity(hapticIntensity)) {
+            void    setHapticIntensity(os::HapticScale hapticIntensity) {
+                if (os::isValidHapticScale(hapticIntensity)) {
                     mHapticIntensity = hapticIntensity;
-                    setHapticPlaybackEnabled(mHapticIntensity != AudioMixer::HAPTIC_SCALE_MUTE);
+                    setHapticPlaybackEnabled(mHapticIntensity != os::HapticScale::MUTE);
                 }
             }
             sp<os::ExternalVibration> getExternalVibration() const { return mExternalVibration; }
@@ -265,7 +267,7 @@
 
     bool                mHapticPlaybackEnabled = false; // indicates haptic playback enabled or not
     // intensity to play haptic data
-    AudioMixer::haptic_intensity_t mHapticIntensity = AudioMixer::HAPTIC_SCALE_MUTE;
+    os::HapticScale mHapticIntensity = os::HapticScale::MUTE;
     class AudioVibrationController : public os::BnExternalVibrationController {
     public:
         explicit AudioVibrationController(Track* track) : mTrack(track) {}
diff --git a/services/audioflinger/SpdifStreamOut.cpp b/services/audioflinger/SpdifStreamOut.cpp
index c7aba79..0ce5681 100644
--- a/services/audioflinger/SpdifStreamOut.cpp
+++ b/services/audioflinger/SpdifStreamOut.cpp
@@ -39,7 +39,7 @@
         , mSpdifEncoder(this, format)
         , mApplicationFormat(AUDIO_FORMAT_DEFAULT)
         , mApplicationSampleRate(0)
-        , mApplicationChannelMask(0)
+        , mApplicationChannelMask(AUDIO_CHANNEL_NONE)
 {
 }
 
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 2af27d8..b13b7be 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -29,6 +29,7 @@
 #include <linux/futex.h>
 #include <sys/stat.h>
 #include <sys/syscall.h>
+#include <cutils/bitops.h>
 #include <cutils/properties.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
@@ -115,6 +116,8 @@
 
 namespace android {
 
+using media::IEffectClient;
+
 // retry counts for buffer fill timeout
 // 50 * ~20msecs = 1 second
 static const int8_t kMaxTrackRetries = 50;
@@ -985,15 +988,16 @@
     if (mPowerManager != 0) {
         sp<IBinder> binder = new BBinder();
         // Uses AID_AUDIOSERVER for wakelock.  updateWakeLockUids_l() updates with client uids.
-        status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK,
-                    binder,
+        binder::Status status = mPowerManager->acquireWakeLockAsync(binder,
+                    POWERMANAGER_PARTIAL_WAKE_LOCK,
                     getWakeLockTag(),
                     String16("audioserver"),
-                    true /* FIXME force oneway contrary to .aidl */);
-        if (status == NO_ERROR) {
+                    {} /* workSource */,
+                    {} /* historyTag */);
+        if (status.isOk()) {
             mWakeLockToken = binder;
         }
-        ALOGV("acquireWakeLock_l() %s status %d", mThreadName, status);
+        ALOGV("acquireWakeLock_l() %s status %d", mThreadName, status.exceptionCode());
     }
 
     gBoottime.acquire(mWakeLockToken);
@@ -1013,8 +1017,7 @@
     if (mWakeLockToken != 0) {
         ALOGV("releaseWakeLock_l() %s", mThreadName);
         if (mPowerManager != 0) {
-            mPowerManager->releaseWakeLock(mWakeLockToken, 0,
-                    true /* FIXME force oneway contrary to .aidl */);
+            mPowerManager->releaseWakeLockAsync(mWakeLockToken, 0);
         }
         mWakeLockToken.clear();
     }
@@ -1028,7 +1031,7 @@
         if (binder == 0) {
             ALOGW("Thread %s cannot connect to the power manager service", mThreadName);
         } else {
-            mPowerManager = interface_cast<IPowerManager>(binder);
+            mPowerManager = interface_cast<os::IPowerManager>(binder);
             binder->linkToDeath(mDeathRecipient);
         }
     }
@@ -1055,10 +1058,9 @@
     }
     if (mPowerManager != 0) {
         std::vector<int> uidsAsInt(uids.begin(), uids.end()); // powermanager expects uids as ints
-        status_t status = mPowerManager->updateWakeLockUids(
-                mWakeLockToken, uidsAsInt.size(), uidsAsInt.data(),
-                true /* FIXME force oneway contrary to .aidl */);
-        ALOGV("updateWakeLockUids_l() %s status %d", mThreadName, status);
+        binder::Status status = mPowerManager->updateWakeLockUidsAsync(
+                mWakeLockToken, uidsAsInt);
+        ALOGV("updateWakeLockUids_l() %s status %d", mThreadName, status.exceptionCode());
     }
 }
 
@@ -1243,6 +1245,11 @@
             return BAD_VALUE;
         }
     }
+
+    if (EffectModule::isHapticGenerator(&desc->type)) {
+        ALOGE("%s(): HapticGenerator is not supported in RecordThread", __func__);
+        return BAD_VALUE;
+    }
     return NO_ERROR;
 }
 
@@ -1262,6 +1269,12 @@
         return NO_ERROR;
     }
 
+    if (EffectModule::isHapticGenerator(&desc->type) && mHapticChannelCount == 0) {
+        ALOGW("%s: thread doesn't support haptic playback while the effect is HapticGenerator",
+                __func__);
+        return BAD_VALUE;
+    }
+
     switch (mType) {
     case MIXER: {
 #ifndef MULTICHANNEL_EFFECT_CHAIN
@@ -1901,9 +1914,8 @@
                                        : AUDIO_DEVICE_NONE));
     }
 
-    // ++ operator does not compile
-    for (audio_stream_type_t stream = AUDIO_STREAM_MIN; stream < AUDIO_STREAM_FOR_POLICY_CNT;
-            stream = (audio_stream_type_t) (stream + 1)) {
+    for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
+        const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
         mStreamTypes[stream].volume = 0.0f;
         mStreamTypes[stream].mute = mAudioFlinger->streamMute_l(stream);
     }
@@ -1933,7 +1945,7 @@
         // here instead of constructor of PlaybackThread so that the onFirstRef
         // callback would not be made on an incompletely constructed object.
         if (mOutput->stream->setEventCallback(this) != OK) {
-            ALOGE("Failed to add event callback");
+            ALOGD("Failed to add event callback");
         }
     }
     run(mThreadName, ANDROID_PRIORITY_URGENT_AUDIO);
@@ -2067,7 +2079,8 @@
         uid_t uid,
         status_t *status,
         audio_port_handle_t portId,
-        const sp<media::IAudioTrackCallback>& callback)
+        const sp<media::IAudioTrackCallback>& callback,
+        const std::string& opPackageName)
 {
     size_t frameCount = *pFrameCount;
     size_t notificationFrameCount = *pNotificationFrameCount;
@@ -2345,10 +2358,21 @@
             }
         }
 
+        // Set DIRECT flag if current thread is DirectOutputThread. This can
+        // happen when the playback is rerouted to direct output thread by
+        // dynamic audio policy.
+        // Do NOT report the flag changes back to client, since the client
+        // doesn't explicitly request a direct flag.
+        audio_output_flags_t trackFlags = *flags;
+        if (mType == DIRECT) {
+            trackFlags = static_cast<audio_output_flags_t>(trackFlags | AUDIO_OUTPUT_FLAG_DIRECT);
+        }
+
         track = new Track(this, client, streamType, attr, sampleRate, format,
                           channelMask, frameCount,
                           nullptr /* buffer */, (size_t)0 /* bufferSize */, sharedBuffer,
-                          sessionId, creatorPid, uid, *flags, TrackBase::TYPE_DEFAULT, portId);
+                          sessionId, creatorPid, uid, trackFlags, TrackBase::TYPE_DEFAULT, portId,
+                          SIZE_MAX /*frameCountToBeReady*/, opPackageName);
 
         lStatus = track != 0 ? track->initCheck() : (status_t) NO_MEMORY;
         if (lStatus != NO_ERROR) {
@@ -2360,7 +2384,7 @@
         {
             Mutex::Autolock _atCbL(mAudioTrackCbLock);
             if (callback.get() != nullptr) {
-                mAudioTrackCallbacks.emplace(callback);
+                mAudioTrackCallbacks.emplace(track, callback);
             }
         }
 
@@ -2527,15 +2551,17 @@
                     track->sharedBuffer() != 0 ? Track::FS_FILLED : Track::FS_FILLING;
         }
 
-        if ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
-                && mHapticChannelMask != AUDIO_CHANNEL_NONE) {
+        sp<EffectChain> chain = getEffectChain_l(track->sessionId());
+        if (mHapticChannelMask != AUDIO_CHANNEL_NONE
+                && ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
+                        || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
             // Unlock due to VibratorService will lock for this call and will
             // call Tracks.mute/unmute which also require thread's lock.
             mLock.unlock();
             const int intensity = AudioFlinger::onExternalVibrationStart(
                     track->getExternalVibration());
             mLock.lock();
-            track->setHapticIntensity(static_cast<AudioMixer::haptic_intensity_t>(intensity));
+            track->setHapticIntensity(static_cast<os::HapticScale>(intensity));
             // Haptic playback should be enabled by vibrator service.
             if (track->getHapticPlaybackEnabled()) {
                 // Disable haptic playback of all active track to ensure only
@@ -2544,12 +2570,16 @@
                     t->setHapticPlaybackEnabled(false);
                 }
             }
+
+            // Set haptic intensity for effect
+            if (chain != nullptr) {
+                chain->setHapticIntensity_l(track->id(), intensity);
+            }
         }
 
         track->mResetDone = false;
         track->mPresentationCompleteFrames = 0;
         mActiveTracks.add(track);
-        sp<EffectChain> chain = getEffectChain_l(track->sessionId());
         if (chain != 0) {
             ALOGV("addTrack_l() starting track on chain %p for session %d", chain.get(),
                     track->sessionId());
@@ -2588,6 +2618,10 @@
     mLocalLog.log("removeTrack_l (%p) %s", track.get(), result.string());
 
     mTracks.remove(track);
+    {
+        Mutex::Autolock _atCbL(mAudioTrackCbLock);
+        mAudioTrackCallbacks.erase(track);
+    }
     if (track->isFastTrack()) {
         int index = track->mFastIndex;
         ALOG_ASSERT(0 < index && index < (int)FastMixerState::sMaxFastTracks);
@@ -2683,8 +2717,8 @@
                     audio_utils::metadata::byteStringFromData(metadata);
             std::vector metadataVec(metaDataStr.begin(), metaDataStr.end());
             Mutex::Autolock _l(mAudioTrackCbLock);
-            for (const auto& callback : mAudioTrackCallbacks) {
-                callback->onCodecFormatChanged(metadataVec);
+            for (const auto& callbackPair : mAudioTrackCallbacks) {
+                callbackPair.second->onCodecFormatChanged(metadataVec);
             }
     }).detach();
 }
@@ -2862,8 +2896,8 @@
         (void)posix_memalign(&mEffectBuffer, 32, mEffectBufferSize);
     }
 
-    mHapticChannelMask = mChannelMask & AUDIO_CHANNEL_HAPTIC_ALL;
-    mChannelMask &= ~mHapticChannelMask;
+    mHapticChannelMask = static_cast<audio_channel_mask_t>(mChannelMask & AUDIO_CHANNEL_HAPTIC_ALL);
+    mChannelMask = static_cast<audio_channel_mask_t>(mChannelMask & ~mHapticChannelMask);
     mHapticChannelCount = audio_channel_count_from_out_mask(mHapticChannelMask);
     mChannelCount -= mHapticChannelCount;
 
@@ -3481,7 +3515,7 @@
                     // latency of 5 seconds).
                     const double minLatency = 0., maxLatency = 5000.;
                     if (latencyMs >= minLatency && latencyMs <= maxLatency) {
-                        ALOGV("new downstream latency %lf ms", latencyMs);
+                        ALOGVV("new downstream latency %lf ms", latencyMs);
                     } else {
                         ALOGD("out of range downstream latency %lf ms", latencyMs);
                         if (latencyMs < minLatency) latencyMs = minLatency;
@@ -3530,7 +3564,7 @@
                         mSampleRate);
 
                 if (isTimestampCorrectionEnabled()) {
-                    ALOGV("TS_BEFORE: %d %lld %lld", id(),
+                    ALOGVV("TS_BEFORE: %d %lld %lld", id(),
                             (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
                             (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
                     auto correctedTimestamp = mTimestampVerifier.getLastCorrectedTimestamp();
@@ -3538,7 +3572,7 @@
                             = correctedTimestamp.mFrames;
                     timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]
                             = correctedTimestamp.mTimeNs;
-                    ALOGV("TS_AFTER: %d %lld %lld", id(),
+                    ALOGVV("TS_AFTER: %d %lld %lld", id(),
                             (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
                             (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
 
@@ -3732,9 +3766,15 @@
 
             // Determine which session to pick up haptic data.
             // This must be done under the same lock as prepareTracks_l().
+            // The haptic data from the effect is at a higher priority than the one from track.
             // TODO: Write haptic data directly to sink buffer when mixing.
             if (mHapticChannelCount > 0 && effectChains.size() > 0) {
                 for (const auto& track : mActiveTracks) {
+                    sp<EffectChain> effectChain = getEffectChain_l(track->sessionId());
+                    if (effectChain != nullptr && effectChain->containsHapticGeneratingEffect_l()) {
+                        activeHapticSessionId = track->sessionId();
+                        break;
+                    }
                     if (track->getHapticPlaybackEnabled()) {
                         activeHapticSessionId = track->sessionId();
                         break;
@@ -4104,13 +4144,20 @@
             // remove from our tracks vector
             removeTrack_l(track);
         }
-        if ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
-                && mHapticChannelCount > 0) {
+        if (mHapticChannelCount > 0 &&
+                ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
+                        || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
             mLock.unlock();
             // Unlock due to VibratorService will lock for this call and will
             // call Tracks.mute/unmute which also require thread's lock.
             AudioFlinger::onExternalVibrationStop(track->getExternalVibration());
             mLock.lock();
+
+            // When the track is stop, set the haptic intensity as MUTE
+            // for the HapticGenerator effect.
+            if (chain != nullptr) {
+                chain->setHapticIntensity_l(track->id(), static_cast<int>(os::HapticScale::MUTE));
+            }
         }
     }
 }
@@ -4199,7 +4246,7 @@
                             "Enumerated device type(%#x) must not be used "
                             "as it does not support audio patches",
                             patch->sinks[i].ext.device.type);
-        type |= patch->sinks[i].ext.device.type;
+        type = static_cast<audio_devices_t>(type | patch->sinks[i].ext.device.type);
         deviceTypeAddrs.push_back(AudioDeviceTypeAddr(patch->sinks[i].ext.device.type,
                 patch->sinks[i].ext.device.address));
     }
@@ -4449,11 +4496,12 @@
         // wrap the source side of the MonoPipe to make it an AudioBufferProvider
         fastTrack->mBufferProvider = new SourceAudioBufferProvider(new MonoPipeReader(monoPipe));
         fastTrack->mVolumeProvider = NULL;
-        fastTrack->mChannelMask = mChannelMask | mHapticChannelMask; // mPipeSink channel mask for
-                                                                     // audio to FastMixer
+        fastTrack->mChannelMask = static_cast<audio_channel_mask_t>(
+                mChannelMask | mHapticChannelMask); // mPipeSink channel mask for
+                                                    // audio to FastMixer
         fastTrack->mFormat = mFormat; // mPipeSink format for audio to FastMixer
         fastTrack->mHapticPlaybackEnabled = mHapticChannelMask != AUDIO_CHANNEL_NONE;
-        fastTrack->mHapticIntensity = AudioMixer::HAPTIC_SCALE_NONE;
+        fastTrack->mHapticIntensity = os::HapticScale::NONE;
         fastTrack->mGeneration++;
         state->mFastTracksGen++;
         state->mTrackMask = 1;
@@ -4464,7 +4512,8 @@
         // specify sink channel mask when haptic channel mask present as it can not
         // be calculated directly from channel count
         state->mSinkChannelMask = mHapticChannelMask == AUDIO_CHANNEL_NONE
-                ? AUDIO_CHANNEL_NONE : mChannelMask | mHapticChannelMask;
+                ? AUDIO_CHANNEL_NONE
+                : static_cast<audio_channel_mask_t>(mChannelMask | mHapticChannelMask);
         state->mCommand = FastMixerState::COLD_IDLE;
         // already done in constructor initialization list
         //mFastMixerFutex = 0;
@@ -6881,7 +6930,7 @@
     snprintf(mThreadName, kThreadNameLength, "AudioIn_%X", id);
     mNBLogWriter = audioFlinger->newWriter_l(kLogSize, mThreadName);
 
-    if (mInput != nullptr && mInput->audioHwDev != nullptr) {
+    if (mInput->audioHwDev != nullptr) {
         mIsMsdDevice = strcmp(
                 mInput->audioHwDev->moduleName(), AUDIO_HARDWARE_MODULE_ID_MSD) == 0;
     }
@@ -7312,7 +7361,7 @@
 
             const ssize_t availableToRead = mPipeSource->availableToRead();
             if (availableToRead >= 0) {
-                // PipeSource is the master clock.  It is up to the AudioRecord client to keep up.
+                // PipeSource is the primary clock.  It is up to the AudioRecord client to keep up.
                 LOG_ALWAYS_FATAL_IF((size_t)availableToRead > mPipeFramesP2,
                         "more frames to read than fifo size, %zd > %zu",
                         availableToRead, mPipeFramesP2);
@@ -7373,12 +7422,12 @@
 
                 // Correct timestamps
                 if (isTimestampCorrectionEnabled()) {
-                    ALOGV("TS_BEFORE: %d %lld %lld",
+                    ALOGVV("TS_BEFORE: %d %lld %lld",
                             id(), (long long)time, (long long)position);
                     auto correctedTimestamp = mTimestampVerifier.getLastCorrectedTimestamp();
                     position = correctedTimestamp.mFrames;
                     time = correctedTimestamp.mTimeNs;
-                    ALOGV("TS_AFTER: %d %lld %lld",
+                    ALOGVV("TS_AFTER: %d %lld %lld",
                             id(), (long long)time, (long long)position);
                 }
 
@@ -7428,7 +7477,7 @@
                         (framesRead - part1) * mFrameSize);
             }
         }
-        rear = mRsmpInRear += framesRead;
+        mRsmpInRear = audio_utils::safe_add_overflow(mRsmpInRear, (int32_t)framesRead);
 
         size = activeTracks.size();
 
@@ -7869,7 +7918,8 @@
         AutoMutex lock(mLock);
         if (recordTrack->isInvalid()) {
             recordTrack->clearSyncStartEvent();
-            return INVALID_OPERATION;
+            ALOGW("%s track %d: invalidated before startInput", __func__, recordTrack->portId());
+            return DEAD_OBJECT;
         }
         if (mActiveTracks.indexOf(recordTrack) >= 0) {
             if (recordTrack->mState == TrackBase::PAUSING) {
@@ -7899,7 +7949,8 @@
                     recordTrack->mState = TrackBase::STARTING_2;
                     // STARTING_2 forces destroy to call stopInput.
                 }
-                return INVALID_OPERATION;
+                ALOGW("%s track %d: invalidated after startInput", __func__, recordTrack->portId());
+                return DEAD_OBJECT;
             }
             if (recordTrack->mState != TrackBase::STARTING_1) {
                 ALOGW("%s(%d): unsynchronized mState:%d change",
@@ -8554,7 +8605,7 @@
 
     // store new device and send to effects
     mInDeviceTypeAddr.mType = patch->sources[0].ext.device.type;
-    mInDeviceTypeAddr.mAddress = patch->sources[0].ext.device.address;
+    mInDeviceTypeAddr.setAddress(patch->sources[0].ext.device.address);
     audio_port_handle_t deviceId = patch->sources[0].id;
     for (size_t i = 0; i < mEffectChains.size(); i++) {
         mEffectChains[i]->setInputDevice_l(inDeviceTypeAddr());
@@ -8696,6 +8747,11 @@
     return mThread->getMmapPosition(position);
 }
 
+status_t AudioFlinger::MmapThreadHandle::getExternalPosition(uint64_t *position,
+                                                             int64_t *timeNanos) {
+    return mThread->getExternalPosition(position, timeNanos);
+}
+
 status_t AudioFlinger::MmapThreadHandle::start(const AudioClient& client,
         const audio_attributes_t *attr, audio_port_handle_t *handle)
 
@@ -8731,7 +8787,6 @@
 
 AudioFlinger::MmapThread::~MmapThread()
 {
-    releaseWakeLock_l();
 }
 
 void AudioFlinger::MmapThread::onFirstRef()
@@ -8781,7 +8836,6 @@
         return NO_INIT;
     }
     mStandby = true;
-    acquireWakeLock();
     return mHalStream->createMmapBuffer(minSizeFrames, info);
 }
 
@@ -8820,8 +8874,12 @@
     status_t ret;
 
     if (*handle == mPortId) {
-        // for the first track, reuse portId and session allocated when the stream was opened
-        return exitStandby();
+        // For the first track, reuse portId and session allocated when the stream was opened.
+        ret = exitStandby();
+        if (ret == NO_ERROR) {
+            acquireWakeLock();
+        }
+        return ret;
     }
 
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
@@ -8942,6 +9000,7 @@
 
     if (handle == mPortId) {
         mHalStream->stop();
+        releaseWakeLock();
         return NO_ERROR;
     }
 
@@ -9184,7 +9243,7 @@
                                 "Enumerated device type(%#x) must not be used "
                                 "as it does not support audio patches",
                                 patch->sinks[i].ext.device.type);
-            type |= patch->sinks[i].ext.device.type;
+            type = static_cast<audio_devices_t>(type | patch->sinks[i].ext.device.type);
             sinkDeviceTypeAddrs.push_back(AudioDeviceTypeAddr(patch->sinks[i].ext.device.type,
                     patch->sinks[i].ext.device.address));
         }
@@ -9195,7 +9254,7 @@
         deviceId = patch->sources[0].id;
         numDevices = mPatch.num_sources;
         sourceDeviceTypeAddr.mType = patch->sources[0].ext.device.type;
-        sourceDeviceTypeAddr.mAddress = patch->sources[0].ext.device.address;
+        sourceDeviceTypeAddr.setAddress(patch->sources[0].ext.device.address);
     }
 
     for (size_t i = 0; i < mEffectChains.size(); i++) {
@@ -9393,6 +9452,11 @@
         return BAD_VALUE;
     }
 
+    if (EffectModule::isHapticGenerator(&desc->type)) {
+        ALOGE("%s(): HapticGenerator is not supported for MmapThread", __func__);
+        return BAD_VALUE;
+    }
+
     return NO_ERROR;
 }
 
@@ -9645,6 +9709,20 @@
     }
 }
 
+status_t AudioFlinger::MmapPlaybackThread::getExternalPosition(uint64_t *position,
+                                                               int64_t *timeNanos)
+{
+    if (mOutput == nullptr) {
+        return NO_INIT;
+    }
+    struct timespec timestamp;
+    status_t status = mOutput->getPresentationPosition(position, &timestamp);
+    if (status == NO_ERROR) {
+        *timeNanos = timestamp.tv_sec * NANOS_PER_SECOND + timestamp.tv_nsec;
+    }
+    return status;
+}
+
 void AudioFlinger::MmapPlaybackThread::dumpInternals_l(int fd, const Vector<String16>& args)
 {
     MmapThread::dumpInternals_l(fd, args);
@@ -9749,4 +9827,13 @@
     }
 }
 
+status_t AudioFlinger::MmapCaptureThread::getExternalPosition(
+        uint64_t *position, int64_t *timeNanos)
+{
+    if (mInput == nullptr) {
+        return NO_INIT;
+    }
+    return mInput->getCapturePosition((int64_t*)position, timeNanos);
+}
+
 } // namespace android
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index c1ac2e4..014f2d7 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -272,6 +272,7 @@
                 // Called by AudioFlinger::frameCount(audio_io_handle_t output) and effects,
                 // and returns the [normal mix] buffer's frame count.
     virtual     size_t      frameCount() const = 0;
+    virtual     audio_channel_mask_t hapticChannelMask() const { return AUDIO_CHANNEL_NONE; }
     virtual     uint32_t    latency_l() const { return 0; }
     virtual     void        setVolumeForOutput_l(float left __unused, float right __unused) const {}
 
@@ -348,7 +349,7 @@
 
                 sp<EffectHandle> createEffect_l(
                                     const sp<AudioFlinger::Client>& client,
-                                    const sp<IEffectClient>& effectClient,
+                                    const sp<media::IEffectClient>& effectClient,
                                     int32_t priority,
                                     audio_session_t sessionId,
                                     effect_descriptor_t *desc,
@@ -478,6 +479,25 @@
                 void onEffectEnable(const sp<EffectModule>& effect);
                 void onEffectDisable();
 
+                // invalidateTracksForAudioSession_l must be called with holding mLock.
+    virtual     void invalidateTracksForAudioSession_l(audio_session_t sessionId __unused) const { }
+                // Invalidate all the tracks with the given audio session.
+                void invalidateTracksForAudioSession(audio_session_t sessionId) const {
+                    Mutex::Autolock _l(mLock);
+                    invalidateTracksForAudioSession_l(sessionId);
+                }
+
+                template <typename T>
+                void invalidateTracksForAudioSession_l(audio_session_t sessionId,
+                                                       const T& tracks) const {
+                    for (size_t i = 0; i < tracks.size(); ++i) {
+                        const sp<TrackBase>& track = tracks[i];
+                        if (sessionId == track->sessionId()) {
+                            track->invalidate();
+                        }
+                    }
+                }
+
 protected:
 
                 // entry describing an effect being suspended in mSuspendedSessions keyed vector
@@ -575,7 +595,7 @@
 
                 static const int        kThreadNameLength = 16; // prctl(PR_SET_NAME) limit
                 char                    mThreadName[kThreadNameLength]; // guaranteed NUL-terminated
-                sp<IPowerManager>       mPowerManager;
+                sp<os::IPowerManager>   mPowerManager;
                 sp<IBinder>             mWakeLockToken;
                 const sp<PMDeathRecipient> mDeathRecipient;
                 // list of suspended effects per session and per type. The first (outer) vector is
@@ -864,7 +884,8 @@
                                 uid_t uid,
                                 status_t *status /*non-NULL*/,
                                 audio_port_handle_t portId,
-                                const sp<media::IAudioTrackCallback>& callback);
+                                const sp<media::IAudioTrackCallback>& callback,
+                                const std::string& opPackageName);
 
                 AudioStreamOut* getOutput() const;
                 AudioStreamOut* clearOutput();
@@ -939,6 +960,13 @@
                                         && outDeviceTypes().count(mTimestampCorrectedDevice) != 0;
                             }
 
+                audio_channel_mask_t hapticChannelMask() const override {
+                                         return mHapticChannelMask;
+                                     }
+                bool supportsHapticPlayback() const {
+                    return (mHapticChannelMask & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE;
+                }
+
 protected:
     // updated by readOutputParameters_l()
     size_t                          mNormalFrameCount;  // normal mixer and effects
@@ -1061,6 +1089,11 @@
 
                 uint32_t    trackCountForUid_l(uid_t uid) const;
 
+                void        invalidateTracksForAudioSession_l(
+                                    audio_session_t sessionId) const override {
+                                ThreadBase::invalidateTracksForAudioSession_l(sessionId, mTracks);
+                            }
+
 private:
 
     friend class AudioFlinger;      // for numerous
@@ -1186,7 +1219,7 @@
 
     Mutex                                    mAudioTrackCbLock;
     // Record of IAudioTrackCallback
-    std::set<sp<media::IAudioTrackCallback>> mAudioTrackCallbacks;
+    std::map<sp<Track>, sp<media::IAudioTrackCallback>> mAudioTrackCallbacks;
 
 private:
     // The HAL output sink is treated as non-blocking, but current implementation is blocking
@@ -1791,6 +1824,7 @@
                    audio_port_handle_t *handle);
     status_t stop(audio_port_handle_t handle);
     status_t standby();
+    virtual status_t getExternalPosition(uint64_t *position, int64_t *timeNaos) = 0;
 
     // RefBase
     virtual     void        onFirstRef();
@@ -1902,6 +1936,8 @@
 
     virtual     void        toAudioPortConfig(struct audio_port_config *config);
 
+                status_t    getExternalPosition(uint64_t *position, int64_t *timeNanos) override;
+
 protected:
                 void        dumpInternals_l(int fd, const Vector<String16>& args) override;
 
@@ -1932,6 +1968,8 @@
 
     virtual     void           toAudioPortConfig(struct audio_port_config *config);
 
+                status_t       getExternalPosition(uint64_t *position, int64_t *timeNanos) override;
+
 protected:
 
                 AudioStreamIn*  mInput;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 126015f..1a12a5f 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -386,11 +386,12 @@
 // static
 sp<AudioFlinger::PlaybackThread::OpPlayAudioMonitor>
 AudioFlinger::PlaybackThread::OpPlayAudioMonitor::createIfNeeded(
-            uid_t uid, const audio_attributes_t& attr, int id, audio_stream_type_t streamType)
+            uid_t uid, const audio_attributes_t& attr, int id, audio_stream_type_t streamType,
+            const std::string& opPackageName)
 {
+    Vector <String16> packages;
+    getPackagesForUid(uid, packages);
     if (isServiceUid(uid)) {
-        Vector <String16> packages;
-        getPackagesForUid(uid, packages);
         if (packages.isEmpty()) {
             ALOGD("OpPlayAudio: not muting track:%d usage:%d for service UID %d",
                   id,
@@ -410,12 +411,32 @@
             id, attr.flags);
         return nullptr;
     }
-    return new OpPlayAudioMonitor(uid, attr.usage, id);
+
+    String16 opPackageNameStr(opPackageName.c_str());
+    if (opPackageName.empty()) {
+        // If no package name is provided by the client, use the first associated with the uid
+        if (!packages.isEmpty()) {
+            opPackageNameStr = packages[0];
+        }
+    } else {
+        // If the provided package name is invalid, we force app ops denial by clearing the package
+        // name passed to OpPlayAudioMonitor
+        if (std::find_if(packages.begin(), packages.end(),
+                [&opPackageNameStr](const auto& package) {
+                return opPackageNameStr == package; }) == packages.end()) {
+            ALOGW("The package name(%s) provided does not correspond to the uid %d, "
+                  "force muting the track", opPackageName.c_str(), uid);
+            // Set package name as an empty string so that hasOpPlayAudio will always return false.
+            opPackageNameStr = String16("");
+        }
+    }
+    return new OpPlayAudioMonitor(uid, attr.usage, id, opPackageNameStr);
 }
 
 AudioFlinger::PlaybackThread::OpPlayAudioMonitor::OpPlayAudioMonitor(
-        uid_t uid, audio_usage_t usage, int id)
-        : mHasOpPlayAudio(true), mUid(uid), mUsage((int32_t) usage), mId(id)
+        uid_t uid, audio_usage_t usage, int id, const String16& opPackageName)
+        : mHasOpPlayAudio(true), mUid(uid), mUsage((int32_t) usage), mId(id),
+          mOpPackageName(opPackageName)
 {
 }
 
@@ -429,11 +450,10 @@
 
 void AudioFlinger::PlaybackThread::OpPlayAudioMonitor::onFirstRef()
 {
-    getPackagesForUid(mUid, mPackages);
     checkPlayAudioForUsage();
-    if (!mPackages.isEmpty()) {
+    if (mOpPackageName.size() != 0) {
         mOpCallback = new PlayAudioOpCallback(this);
-        mAppOpsManager.startWatchingMode(AppOpsManager::OP_PLAY_AUDIO, mPackages[0], mOpCallback);
+        mAppOpsManager.startWatchingMode(AppOpsManager::OP_PLAY_AUDIO, mOpPackageName, mOpCallback);
     }
 }
 
@@ -446,18 +466,11 @@
 // - not called from PlayAudioOpCallback because the callback is not installed in this case
 void AudioFlinger::PlaybackThread::OpPlayAudioMonitor::checkPlayAudioForUsage()
 {
-    if (mPackages.isEmpty()) {
+    if (mOpPackageName.size() == 0) {
         mHasOpPlayAudio.store(false);
     } else {
-        bool hasIt = true;
-        for (const String16& packageName : mPackages) {
-            const int32_t mode = mAppOpsManager.checkAudioOpNoThrow(AppOpsManager::OP_PLAY_AUDIO,
-                    mUsage, mUid, packageName);
-            if (mode != AppOpsManager::MODE_ALLOWED) {
-                hasIt = false;
-                break;
-            }
-        }
+        bool hasIt = mAppOpsManager.checkAudioOpNoThrow(AppOpsManager::OP_PLAY_AUDIO,
+                    mUsage, mUid, mOpPackageName) == AppOpsManager::MODE_ALLOWED;
         ALOGD("OpPlayAudio: track:%d usage:%d %smuted", mId, mUsage, hasIt ? "not " : "");
         mHasOpPlayAudio.store(hasIt);
     }
@@ -511,7 +524,8 @@
             audio_output_flags_t flags,
             track_type type,
             audio_port_handle_t portId,
-            size_t frameCountToBeReady)
+            size_t frameCountToBeReady,
+            const std::string opPackageName)
     :   TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
                   // TODO: Using unsecurePointer() has some associated security pitfalls
                   //       (see declaration for details).
@@ -534,7 +548,8 @@
     mPresentationCompleteFrames(0),
     mFrameMap(16 /* sink-frame-to-track-frame map memory */),
     mVolumeHandler(new media::VolumeHandler(sampleRate)),
-    mOpPlayAudioMonitor(OpPlayAudioMonitor::createIfNeeded(uid, attr, id(), streamType)),
+    mOpPlayAudioMonitor(OpPlayAudioMonitor::createIfNeeded(
+            uid, attr, id(), streamType, opPackageName)),
     // mSinkTimestamp
     mFrameCountToBeReady(frameCountToBeReady),
     mFastIndex(-1),
@@ -595,10 +610,13 @@
             + "_" + std::to_string(mId) + "_T");
 #endif
 
-    if (channelMask & AUDIO_CHANNEL_HAPTIC_ALL) {
+    if (thread->supportsHapticPlayback()) {
+        // If the track is attached to haptic playback thread, it is potentially to have
+        // HapticGenerator effect, which will generate haptic data, on the track. In that case,
+        // external vibration is always created for all tracks attached to haptic playback thread.
         mAudioVibrationController = new AudioVibrationController(this);
         mExternalVibration = new os::ExternalVibration(
-                mUid, "" /* pkg */, mAttr, mAudioVibrationController);
+                mUid, opPackageName, mAttr, mAudioVibrationController);
     }
 
     // Once this item is logged by the server, the client can add properties.
@@ -936,6 +954,11 @@
         // initial state-stopping. next state-pausing.
         // What if resume is called ?
 
+        if (state == FLUSHED) {
+            // avoid underrun glitches when starting after flush
+            reset();
+        }
+
         if (state == PAUSED || state == PAUSING) {
             if (mResumeToStopping) {
                 // happened we need to resume to STOPPING_1
@@ -1918,6 +1941,25 @@
 {
     mProxy->releaseBuffer(buffer);
     restartIfDisabled();
+
+    // Check if the PatchTrack has enough data to write once in releaseBuffer().
+    // If not, prevent an underrun from occurring by moving the track into FS_FILLING;
+    // this logic avoids glitches when suspending A2DP with AudioPlaybackCapture.
+    // TODO: perhaps underrun avoidance could be a track property checked in isReady() instead.
+    if (mFillingUpStatus == FS_ACTIVE
+            && audio_is_linear_pcm(mFormat)
+            && !isOffloadedOrDirect()) {
+        if (sp<ThreadBase> thread = mThread.promote();
+            thread != 0) {
+            PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
+            const size_t frameCount = playbackThread->frameCount() * sampleRate()
+                    / playbackThread->sampleRate();
+            if (framesReady() < frameCount) {
+                ALOGD("%s(%d) Not enough data, wait for buffer to fill", __func__, mId);
+                mFillingUpStatus = FS_FILLING;
+            }
+        }
+    }
 }
 
 void AudioFlinger::PlaybackThread::PatchTrack::restartIfDisabled()
@@ -2207,7 +2249,8 @@
         RecordThread *recordThread = (RecordThread *)thread.get();
         return recordThread->start(this, event, triggerSession);
     } else {
-        return BAD_VALUE;
+        ALOGW("%s track %d: thread was destroyed", __func__, portId());
+        return DEAD_OBJECT;
     }
 }
 
diff --git a/services/audioflinger/TypedLogger.h b/services/audioflinger/TypedLogger.h
index 6ef19bf..feb71e3 100644
--- a/services/audioflinger/TypedLogger.h
+++ b/services/audioflinger/TypedLogger.h
@@ -80,7 +80,7 @@
 
 // TODO Permit disabling of logging at compile-time.
 
-// TODO A non-nullptr dummy implementation that is a nop would be faster than checking for nullptr
+// TODO A non-nullptr stub implementation that is a nop would be faster than checking for nullptr
 //      in the case when logging is enabled at compile-time and enabled at runtime, but it might be
 //      slower than nullptr check when logging is enabled at compile-time and disabled at runtime.
 
@@ -129,8 +129,8 @@
 
 namespace android {
 extern "C" {
-// TODO consider adding a thread_local NBLog::Writer tlDummyNBLogWriter and then
-// initialize below tlNBLogWriter to &tlDummyNBLogWriter to remove the need to
+// TODO consider adding a thread_local NBLog::Writer tlStubNBLogWriter and then
+// initialize below tlNBLogWriter to &tlStubNBLogWriter to remove the need to
 // check for nullptr every time. Also reduces the need to add a new logging macro above
 // each time we want to log a new type.
 extern thread_local NBLog::Writer *tlNBLogWriter;
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 8d0e5db..93819f5 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -250,12 +250,12 @@
     virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes) = 0;
     virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes) = 0;
 
-    virtual status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices)
+    virtual status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices)
             = 0;
     virtual status_t removeUidDeviceAffinities(uid_t uid) = 0;
 
     virtual status_t setUserIdDeviceAffinities(int userId,
-            const Vector<AudioDeviceTypeAddr>& devices) = 0;
+            const AudioDeviceTypeAddrVector& devices) = 0;
     virtual status_t removeUserIdDeviceAffinities(int userId) = 0;
 
     virtual status_t startAudioSource(const struct audio_port_config *source,
@@ -295,13 +295,36 @@
 
     virtual bool     isCallScreenModeSupported() = 0;
 
-    virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   const AudioDeviceTypeAddr &device) = 0;
+    virtual status_t setDevicesRoleForStrategy(product_strategy_t strategy,
+                                               device_role_t role,
+                                               const AudioDeviceTypeAddrVector &devices) = 0;
 
-    virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy) = 0;
+    virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
+                                                  device_role_t role) = 0;
 
-    virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   AudioDeviceTypeAddr &device) = 0;
+
+    virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
+                                                  device_role_t role,
+                                                  AudioDeviceTypeAddrVector &devices) = 0;
+
+    virtual status_t setDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                    device_role_t role,
+                                                    const AudioDeviceTypeAddrVector &devices) = 0;
+
+    virtual status_t addDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                    device_role_t role,
+                                                    const AudioDeviceTypeAddrVector &devices) = 0;
+
+    virtual status_t removeDevicesRoleForCapturePreset(
+            audio_source_t audioSource, device_role_t role,
+            const AudioDeviceTypeAddrVector& devices) = 0;
+
+    virtual status_t clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                      device_role_t role) = 0;
+
+    virtual status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+                                                       device_role_t role,
+                                                       AudioDeviceTypeAddrVector &devices) = 0;
 };
 
 
diff --git a/services/audiopolicy/OWNERS b/services/audiopolicy/OWNERS
index a8483fa..da9d32f 100644
--- a/services/audiopolicy/OWNERS
+++ b/services/audiopolicy/OWNERS
@@ -1,3 +1,2 @@
 jmtrivi@google.com
-krocard@google.com
 mnaganov@google.com
diff --git a/services/audiopolicy/common/include/Volume.h b/services/audiopolicy/common/include/Volume.h
index 7c8ce83..736f8b2 100644
--- a/services/audiopolicy/common/include/Volume.h
+++ b/services/audiopolicy/common/include/Volume.h
@@ -126,6 +126,7 @@
         case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
         case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
         case AUDIO_DEVICE_OUT_USB_HEADSET:
+        case AUDIO_DEVICE_OUT_BLE_HEADSET:
             return DEVICE_CATEGORY_HEADSET;
         case AUDIO_DEVICE_OUT_HEARING_AID:
             return DEVICE_CATEGORY_HEARING_AID;
@@ -139,6 +140,7 @@
         case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER:
         case AUDIO_DEVICE_OUT_USB_ACCESSORY:
         case AUDIO_DEVICE_OUT_REMOTE_SUBMIX:
+        case AUDIO_DEVICE_OUT_BLE_SPEAKER:
         default:
             return DEVICE_CATEGORY_SPEAKER;
         }
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 39d1140..1d9223e 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -498,11 +498,6 @@
      */
     bool isA2dpOffloadedOnPrimary() const;
 
-    /**
-     * returns true if A2DP is supported (either via hardware offload or software encoding)
-     */
-    bool isA2dpSupported() const;
-
     sp<SwAudioOutputDescriptor> getOutputFromId(audio_port_handle_t id) const;
 
     sp<SwAudioOutputDescriptor> getPrimaryOutput() const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
index 395bc70..cf1f64c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
@@ -33,6 +33,15 @@
 
 namespace android {
 
+// This class gathers together various bits of AudioPolicyManager
+// configuration, which are usually filled out as a result of parsing
+// the audio_policy_configuration.xml file.
+//
+// Note that AudioPolicyConfig doesn't own some of the data,
+// it simply proxies access to the fields of AudioPolicyManager
+// class. Be careful about the fields that are references,
+// e.g. 'mOutputDevices'. This also means that it's impossible
+// to implement "deep copying" of this class without re-designing it.
 class AudioPolicyConfig
 {
 public:
@@ -40,14 +49,24 @@
                       DeviceVector &outputDevices,
                       DeviceVector &inputDevices,
                       sp<DeviceDescriptor> &defaultOutputDevice)
-        : mEngineLibraryNameSuffix(kDefaultEngineLibraryNameSuffix),
-          mHwModules(hwModules),
+        : mHwModules(hwModules),
           mOutputDevices(outputDevices),
           mInputDevices(inputDevices),
-          mDefaultOutputDevice(defaultOutputDevice),
-          mIsSpeakerDrcEnabled(false),
-          mIsCallScreenModeSupported(false)
-    {}
+          mDefaultOutputDevice(defaultOutputDevice) {
+        clear();
+    }
+
+    void clear() {
+        mSource = {};
+        mEngineLibraryNameSuffix = kDefaultEngineLibraryNameSuffix;
+        mHwModules.clear();
+        mOutputDevices.clear();
+        mInputDevices.clear();
+        mDefaultOutputDevice.clear();
+        mIsSpeakerDrcEnabled = false;
+        mIsCallScreenModeSupported = false;
+        mSurroundFormats.clear();
+    }
 
     const std::string& getSource() const {
         return mSource;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index b82305d..c6bdb04 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -101,7 +101,7 @@
      *    An example of failure is when there are already rules in place to restrict
      *    a mix to the given uid (i.e. when a MATCH_UID rule was set for it).
      */
-    status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices);
+    status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices);
     status_t removeUidDeviceAffinities(uid_t uid);
     status_t getDevicesForUid(uid_t uid, Vector<AudioDeviceTypeAddr>& devices) const;
 
@@ -115,7 +115,7 @@
      *    An example of failure is when there are already rules in place to restrict
      *    a mix to the given userId (i.e. when a MATCH_USERID rule was set for it).
      */
-    status_t setUserIdDeviceAffinities(int userId, const Vector<AudioDeviceTypeAddr>& devices);
+    status_t setUserIdDeviceAffinities(int userId, const AudioDeviceTypeAddrVector& devices);
     status_t removeUserIdDeviceAffinities(int userId);
     status_t getDevicesForUserId(int userId, Vector<AudioDeviceTypeAddr>& devices) const;
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index 923310c..80afe9d 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -113,6 +113,9 @@
     const sp<AudioPolicyMix> getPrimaryMix() const {
         return mPrimaryMix.promote();
     };
+    bool hasLostPrimaryMix() const {
+        return mPrimaryMix.unsafe_get() && !mPrimaryMix.promote();
+    }
 
     void setActive(bool active) override
     {
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index dd1499c..ca29591 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -65,6 +65,9 @@
 
     bool supportsFormat(audio_format_t format);
 
+    void setDynamic() { mIsDynamic = true; }
+    bool isDynamic() const { return mIsDynamic; }
+
     // PolicyAudioPortConfig
     virtual sp<PolicyAudioPort> getPolicyAudioPort() const {
         return static_cast<PolicyAudioPort*>(const_cast<DeviceDescriptor*>(this));
@@ -97,6 +100,8 @@
     std::string mTagName; // Unique human readable identifier for a device port found in conf file.
     FormatVector        mEncodedFormats;
     audio_format_t      mCurrentEncodedFormat;
+    bool                mIsDynamic = false;
+    const std::string   mDeclaredAddress; // Original device address
 };
 
 class DeviceVector : public SortedVector<sp<DeviceDescriptor> >
@@ -146,6 +151,15 @@
     //     4) the combination of all devices is invalid for selection
     sp<DeviceDescriptor> getDeviceForOpening() const;
 
+    // Return the device descriptor that matches the given AudioDeviceTypeAddr
+    sp<DeviceDescriptor> getDeviceFromDeviceTypeAddr(
+            const AudioDeviceTypeAddr& deviceTypeAddr) const;
+
+    // Return the device vector that contains device descriptor whose AudioDeviceTypeAddr appears
+    // in the given AudioDeviceTypeAddrVector
+    DeviceVector getDevicesFromDeviceTypeAddrVec(
+            const AudioDeviceTypeAddrVector& deviceTypeAddrVector) const;
+
     // If there are devices with the given type and the devices to add is not empty,
     // remove all the devices with the given type and add all the devices to add.
     void replaceDevicesByType(audio_devices_t typeToRemove, const DeviceVector &devicesToAdd);
@@ -248,7 +262,9 @@
         return String8("");
     }
 
-    std::string toString() const;
+    // Return a string to describe the DeviceVector. The sensitive information will only be
+    // added to the string if `includeSensitiveInfo` is true.
+    std::string toString(bool includeSensitiveInfo = false) const;
 
     void dump(String8 *dst, const String8 &tag, int spaces = 0, bool verbose = true) const;
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
index c4eab30..59eee52 100644
--- a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
@@ -72,6 +72,9 @@
                      audio_io_handle_t dstOutput);
     void moveEffects(const std::vector<int>& ids, audio_io_handle_t dstOutput);
 
+    audio_io_handle_t getIoForSession(audio_session_t sessionId,
+                                      const effect_uuid_t *effectType = nullptr);
+
     void dump(String8 *dst, int spaces = 0, bool verbose = true) const;
 
 private:
diff --git a/services/audiopolicy/common/managerdefinitions/include/HwModule.h b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
index 23f0c9a..b5b10f3 100644
--- a/services/audiopolicy/common/managerdefinitions/include/HwModule.h
+++ b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
@@ -131,8 +131,17 @@
 public:
     sp<HwModule> getModuleFromName(const char *name) const;
 
+    /**
+     * @brief getModuleForDeviceType try to get a device from type / format on all modules
+     * @param device type to consider
+     * @param encodedFormat to consider
+     * @param[out] tagName if not null, if a matching device is found, will return the tagName
+     * of original device from XML file so that audio routes matchin rules work.
+     * @return valid module if considered device found, nullptr otherwise.
+     */
     sp<HwModule> getModuleForDeviceType(audio_devices_t device,
-                                        audio_format_t encodedFormat) const;
+                                        audio_format_t encodedFormat,
+                                        std::string *tagName = nullptr) const;
 
     sp<HwModule> getModuleForDevice(const sp<DeviceDescriptor> &device,
                                     audio_format_t encodedFormat) const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index 5f551d5..11d3a99 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -112,6 +112,19 @@
     }
 
     /**
+     * @brief getTag
+     * @param deviceTypes to be considered
+     * @return tagName of first matching device for the considered types, empty string otherwise.
+     */
+    std::string getTag(const DeviceTypeSet& deviceTypes) const
+    {
+        if (supportsDeviceTypes(deviceTypes)) {
+            return mSupportedDevices.getDevicesFromTypes(deviceTypes).itemAt(0)->getTagName();
+        }
+        return {};
+    }
+
+    /**
      * @brief supportsDevice
      * @param device to be checked against
      *        forceCheckOnAddress if true, check on type and address whatever the type, otherwise
@@ -150,6 +163,12 @@
     }
     void removeSupportedDevice(const sp<DeviceDescriptor> &device)
     {
+        ssize_t ret = mSupportedDevices.indexOf(device);
+        if (ret >= 0 && !mSupportedDevices.itemAt(ret)->isDynamic()) {
+            // devices equality checks only type, address, name and format
+            // Prevents from removing non dynamically added devices
+            return;
+        }
         mSupportedDevices.remove(device);
     }
     void setSupportedDevices(const DeviceVector &devices)
diff --git a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
index d2f6297..e6eef24 100644
--- a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
+++ b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
@@ -42,6 +42,11 @@
 
     virtual const std::string getTagName() const = 0;
 
+    bool equals(const sp<PolicyAudioPort> &right) const
+    {
+        return getTagName() == right->getTagName();
+    }
+
     virtual sp<AudioPort> asAudioPort() const = 0;
 
     virtual void setFlags(uint32_t flags)
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index b963121..4922ebe 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -516,7 +516,7 @@
     dst->appendFormat(" Sampling rate: %d\n", mSamplingRate);
     dst->appendFormat(" Format: %d\n", mFormat);
     dst->appendFormat(" Channels: %08x\n", mChannelMask);
-    dst->appendFormat(" Devices %s\n", mDevice->toString().c_str());
+    dst->appendFormat(" Devices %s\n", mDevice->toString(true /*includeSensitiveInfo*/).c_str());
     mEnabledEffects.dump(dst, 1 /*spaces*/, false /*verbose*/);
     dst->append(" AudioRecord Clients:\n");
     ClientMapHandler<RecordClientDescriptor>::dump(dst);
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index d6d472b..25f7c27 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -245,7 +245,7 @@
     dst->appendFormat(" Sampling rate: %d\n", mSamplingRate);
     dst->appendFormat(" Format: %08x\n", mFormat);
     dst->appendFormat(" Channels: %08x\n", mChannelMask);
-    dst->appendFormat(" Devices: %s\n", devices().toString().c_str());
+    dst->appendFormat(" Devices: %s\n", devices().toString(true /*includeSensitiveInfo*/).c_str());
     dst->appendFormat(" Global active count: %u\n", mGlobalActiveCount);
     for (const auto &iter : mRoutingActivities) {
         dst->appendFormat(" Product Strategy id: %d", iter.first);
@@ -764,11 +764,6 @@
     return false;
 }
 
-bool SwAudioOutputCollection::isA2dpSupported() const
-{
-    return (isA2dpOffloadedOnPrimary() || (getA2dpOutput() != 0));
-}
-
 sp<SwAudioOutputDescriptor> SwAudioOutputCollection::getPrimaryOutput() const
 {
     for (size_t i = 0; i < size(); i++) {
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index b6de4be..fc1d0e2 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -463,7 +463,7 @@
 }
 
 status_t AudioPolicyMixCollection::setUidDeviceAffinities(uid_t uid,
-        const Vector<AudioDeviceTypeAddr>& devices) {
+        const AudioDeviceTypeAddrVector& devices) {
     // verify feasibility: for each player mix: if it already contains a
     //    "match uid" rule for this uid, return an error
     //    (adding a uid-device affinity would result in contradictory rules)
@@ -565,7 +565,7 @@
 }
 
 status_t AudioPolicyMixCollection::setUserIdDeviceAffinities(int userId,
-        const Vector<AudioDeviceTypeAddr>& devices) {
+        const AudioDeviceTypeAddrVector& devices) {
     // verify feasibility: for each player mix: if it already contains a
     //    "match userId" rule for this userId, return an error
     //    (adding a userId-device affinity would result in contradictory rules)
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp
index 2a18f19..c8e4e76 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp
@@ -39,12 +39,12 @@
 bool AudioRoute::supportsPatch(const sp<PolicyAudioPort> &srcPort,
                                const sp<PolicyAudioPort> &dstPort) const
 {
-    if (mSink == 0 || dstPort == 0 || dstPort != mSink) {
+    if (mSink == 0 || dstPort == 0 || !dstPort->equals(mSink)) {
         return false;
     }
     ALOGV("%s: sinks %s matching", __FUNCTION__, mSink->getTagName().c_str());
     for (const auto &sourcePort : mSources) {
-        if (sourcePort == srcPort) {
+        if (sourcePort->equals(srcPort)) {
             ALOGV("%s: sources %s matching", __FUNCTION__, sourcePort->getTagName().c_str());
             return true;
         }
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index a29e60e..6ff1a98 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -52,7 +52,8 @@
 DeviceDescriptor::DeviceDescriptor(const AudioDeviceTypeAddr &deviceTypeAddr,
                                    const std::string &tagName,
                                    const FormatVector &encodedFormats) :
-        DeviceDescriptorBase(deviceTypeAddr), mTagName(tagName), mEncodedFormats(encodedFormats)
+        DeviceDescriptorBase(deviceTypeAddr), mTagName(tagName), mEncodedFormats(encodedFormats),
+        mDeclaredAddress(deviceTypeAddr.getAddress())
 {
     mCurrentEncodedFormat = AUDIO_FORMAT_DEFAULT;
     /* If framework runs against a pre 5.0 Audio HAL, encoded formats are absent from the config.
@@ -75,6 +76,10 @@
 void DeviceDescriptor::detach() {
     mId = AUDIO_PORT_HANDLE_NONE;
     PolicyAudioPort::detach();
+    // The device address may have been overwritten on device connection
+    setAddress(mDeclaredAddress);
+    // Device Port does not have a name unless provided by setDeviceConnectionState
+    setName("");
 }
 
 template<typename T>
@@ -227,6 +232,7 @@
 {
     bool added = false;
     for (const auto& device : devices) {
+        ALOG_ASSERT(device != nullptr, "Null pointer found when adding DeviceVector");
         if (indexOf(device) < 0 && SortedVector::add(device) >= 0) {
             added = true;
         }
@@ -238,6 +244,7 @@
 
 ssize_t DeviceVector::add(const sp<DeviceDescriptor>& item)
 {
+    ALOG_ASSERT(item != nullptr, "Adding null pointer to DeviceVector");
     ssize_t ret = indexOf(item);
 
     if (ret < 0) {
@@ -375,7 +382,7 @@
     if (isEmpty()) {
         // Return nullptr if this collection is empty.
         return nullptr;
-    } else if (areAllOfSameDeviceType(types(), audio_is_input_device)) {
+    } else if (areAllOfSameDeviceType(types(), audio_call_is_input_device)) {
         // For input case, return the first one when there is only one device.
         return size() > 1 ? nullptr : *begin();
     } else if (areAllOfSameDeviceType(types(), audio_is_output_device)) {
@@ -388,6 +395,24 @@
     return nullptr;
 }
 
+sp<DeviceDescriptor> DeviceVector::getDeviceFromDeviceTypeAddr(
+            const AudioDeviceTypeAddr& deviceTypeAddr) const {
+    return getDevice(deviceTypeAddr.mType, String8(deviceTypeAddr.getAddress()),
+            AUDIO_FORMAT_DEFAULT);
+}
+
+DeviceVector DeviceVector::getDevicesFromDeviceTypeAddrVec(
+        const AudioDeviceTypeAddrVector& deviceTypeAddrVector) const {
+    DeviceVector devices;
+    for (const auto& deviceTypeAddr : deviceTypeAddrVector) {
+        sp<DeviceDescriptor> device = getDeviceFromDeviceTypeAddr(deviceTypeAddr);
+        if (device != nullptr) {
+            devices.add(device);
+        }
+    }
+    return devices;
+}
+
 void DeviceVector::replaceDevicesByType(
         audio_devices_t typeToRemove, const DeviceVector &devicesToAdd) {
     DeviceVector devicesToRemove = getDevicesFromType(typeToRemove);
@@ -408,7 +433,7 @@
     }
 }
 
-std::string DeviceVector::toString() const
+std::string DeviceVector::toString(bool includeSensitiveInfo) const
 {
     if (isEmpty()) {
         return {"AUDIO_DEVICE_NONE"};
@@ -418,7 +443,7 @@
         if (device != *begin()) {
            result += ";";
         }
-        result += device->toString();
+        result += device->toString(includeSensitiveInfo);
     }
     return result + "}";
 }
diff --git a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
index 415962a..843f5da 100644
--- a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
@@ -202,6 +202,19 @@
     }
 }
 
+audio_io_handle_t EffectDescriptorCollection::getIoForSession(audio_session_t sessionId,
+                                                              const effect_uuid_t *effectType)
+{
+    for (size_t i = 0; i < size(); ++i) {
+        sp<EffectDescriptor> effect = valueAt(i);
+        if (effect->mSession == sessionId && (effectType == nullptr ||
+                memcmp(&effect->mDesc.type, effectType, sizeof(effect_uuid_t)) == 0)) {
+            return effect->mIo;
+        }
+    }
+    return AUDIO_IO_HANDLE_NONE;
+}
+
 EffectDescriptorCollection EffectDescriptorCollection::getEffectsForIo(audio_io_handle_t io) const
 {
     EffectDescriptorCollection effects;
diff --git a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
index d31e443..2967014 100644
--- a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
@@ -271,8 +271,9 @@
     return nullptr;
 }
 
-sp <HwModule> HwModuleCollection::getModuleForDeviceType(audio_devices_t type,
-                                                         audio_format_t encodedFormat) const
+sp<HwModule> HwModuleCollection::getModuleForDeviceType(audio_devices_t type,
+                                                        audio_format_t encodedFormat,
+                                                        std::string *tagName) const
 {
     for (const auto& module : *this) {
         const auto& profiles = audio_is_output_device(type) ?
@@ -284,9 +285,15 @@
                     sp <DeviceDescriptor> deviceDesc =
                             declaredDevices.getDevice(type, String8(), encodedFormat);
                     if (deviceDesc) {
+                        if (tagName != nullptr) {
+                            *tagName = deviceDesc->getTagName();
+                        }
                         return module;
                     }
                 } else {
+                    if (tagName != nullptr) {
+                        *tagName = profile->getTag({type});
+                    }
                     return module;
                 }
             }
@@ -325,15 +332,32 @@
     }
 
     for (const auto& hwModule : *this) {
+        if (!allowToCreate) {
+            auto dynamicDevices = hwModule->getDynamicDevices();
+            auto dynamicDevice = dynamicDevices.getDevice(deviceType, devAddress, encodedFormat);
+            if (dynamicDevice) {
+                return dynamicDevice;
+            }
+        }
         DeviceVector moduleDevices = hwModule->getAllDevices();
         auto moduleDevice = moduleDevices.getDevice(deviceType, devAddress, encodedFormat);
+
+        // Prevent overwritting moduleDevice address if connected device does not have the same
+        // address (since getDevice with empty address ignores match on address), use dynamic device
+        if (moduleDevice && allowToCreate &&
+                (!moduleDevice->address().empty() &&
+                 (moduleDevice->address().compare(devAddress.c_str()) != 0))) {
+            break;
+        }
         if (moduleDevice) {
             if (encodedFormat != AUDIO_FORMAT_DEFAULT) {
                 moduleDevice->setEncodedFormat(encodedFormat);
             }
             if (allowToCreate) {
                 moduleDevice->attach(hwModule);
+                // Name may be overwritten, restored on detach.
                 moduleDevice->setAddress(devAddress.string());
+                // Name may be overwritten, restored on detach.
                 moduleDevice->setName(name);
             }
             return moduleDevice;
@@ -352,18 +376,19 @@
                                                       const char *name,
                                                       const audio_format_t encodedFormat) const
 {
-    sp<HwModule> hwModule = getModuleForDeviceType(type, encodedFormat);
+    std::string tagName = {};
+    sp<HwModule> hwModule = getModuleForDeviceType(type, encodedFormat, &tagName);
     if (hwModule == 0) {
         ALOGE("%s: could not find HW module for device %04x address %s", __FUNCTION__, type,
               address);
         return nullptr;
     }
 
-    sp<DeviceDescriptor> device = new DeviceDescriptor(type, name, address);
+    sp<DeviceDescriptor> device = new DeviceDescriptor(type, tagName, address);
     device->setName(name);
     device->setEncodedFormat(encodedFormat);
-
-  // Add the device to the list of dynamic devices
+    device->setDynamic();
+    // Add the device to the list of dynamic devices
     hwModule->addDynamicDevice(device);
     // Reciprocally attach the device to the module
     device->attach(hwModule);
@@ -375,7 +400,7 @@
     for (const auto &profile : profiles) {
         // Add the device as supported to all profile supporting "weakly" or not the device
         // according to its type
-        if (profile->supportsDevice(device, false /*matchAdress*/)) {
+        if (profile->supportsDevice(device, false /*matchAddress*/)) {
 
             // @todo quid of audio profile? import the profile from device of the same type?
             const auto &isoTypeDeviceForProfile =
@@ -406,10 +431,9 @@
 
         device->detach();
         // Only remove from dynamic list, not from declared list!!!
-        if (!hwModule->getDynamicDevices().contains(device)) {
+        if (!hwModule->removeDynamicDevice(device)) {
             return;
         }
-        hwModule->removeDynamicDevice(device);
         ALOGV("%s: removed dynamic device %s from module %s", __FUNCTION__,
               device->toString().c_str(), hwModule->getName());
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index bf1a0f7..ae92b40 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -17,7 +17,7 @@
 #define LOG_TAG "APM::IOProfile"
 //#define LOG_NDEBUG 0
 
-#include <system/audio-base.h>
+#include <system/audio.h>
 #include "IOProfile.h"
 #include "HwModule.h"
 #include "TypeConverter.h"
@@ -112,12 +112,11 @@
     dst->append(portStr.c_str());
 
     dst->appendFormat("    - flags: 0x%04x", getFlags());
-    std::string flagsLiteral;
-    if (getRole() == AUDIO_PORT_ROLE_SINK) {
-        InputFlagConverter::maskToString(getFlags(), flagsLiteral);
-    } else if (getRole() == AUDIO_PORT_ROLE_SOURCE) {
-        OutputFlagConverter::maskToString(getFlags(), flagsLiteral);
-    }
+    std::string flagsLiteral =
+            getRole() == AUDIO_PORT_ROLE_SINK ?
+            toString(static_cast<audio_input_flags_t>(getFlags())) :
+            getRole() == AUDIO_PORT_ROLE_SOURCE ?
+            toString(static_cast<audio_output_flags_t>(getFlags())) : "";
     if (!flagsLiteral.empty()) {
         dst->appendFormat(" (%s)", flagsLiteral.c_str());
     }
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 883e713..0cc3a68 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -254,9 +254,8 @@
 constexpr void (*xmlDeleter)(T* t);
 template <>
 constexpr auto xmlDeleter<xmlDoc> = xmlFreeDoc;
-// http://b/111067277 - Add back constexpr when we switch to C++17.
 template <>
-auto xmlDeleter<xmlChar> = [](xmlChar *s) { xmlFree(s); };
+constexpr auto xmlDeleter<xmlChar> = [](xmlChar *s) { xmlFree(s); };
 
 /** @return a unique_ptr with the correct deleter for the libxml2 object. */
 template <class T>
@@ -337,7 +336,7 @@
 
     std::string mode = getXmlAttribute(cur, Attributes::mode);
     if (!mode.empty()) {
-        gain->setMode(GainModeConverter::maskFromString(mode));
+        gain->setMode(GainModeConverter::maskFromString(mode, " "));
     }
 
     std::string channelsLiteral = getXmlAttribute(cur, Attributes::channelMask);
@@ -501,7 +500,7 @@
                 AUDIO_PORT_ROLE_SOURCE : AUDIO_PORT_ROLE_SINK;
 
     audio_devices_t type = AUDIO_DEVICE_NONE;
-    if (!deviceFromString(typeName, type) ||
+    if (!DeviceConverter::fromString(typeName, type) ||
             (!audio_is_input_device(type) && portRole == AUDIO_PORT_ROLE_SOURCE) ||
             (!audio_is_output_devices(type) && portRole == AUDIO_PORT_ROLE_SINK)) {
         ALOGW("%s: bad type %08x", __func__, type);
@@ -804,7 +803,9 @@
 status_t deserializeAudioPolicyFile(const char *fileName, AudioPolicyConfig *config)
 {
     PolicySerializer serializer;
-    return serializer.deserialize(fileName, config);
+    status_t status = serializer.deserialize(fileName, config);
+    if (status != OK) config->clear();
+    return status;
 }
 
 } // namespace android
diff --git a/services/audiopolicy/config/a2dp_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/a2dp_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..2d323f6
--- /dev/null
+++ b/services/audiopolicy/config/a2dp_audio_policy_configuration_7_0.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- A2dp Audio HAL Audio Policy Configuration file -->
+<module name="a2dp" halVersion="2.0">
+    <mixPorts>
+        <mixPort name="a2dp output" role="source"/>
+        <mixPort name="a2dp input" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+        </mixPort>
+    </mixPorts>
+    <devicePorts>
+        <devicePort tagName="BT A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <devicePort tagName="BT A2DP Headphones" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <devicePort tagName="BT A2DP Speaker" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <devicePort tagName="BT A2DP In" type="AUDIO_DEVICE_IN_BLUETOOTH_A2DP" role="source">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+        </devicePort>
+    </devicePorts>
+    <routes>
+        <route type="mix" sink="BT A2DP Out"
+               sources="a2dp output"/>
+        <route type="mix" sink="BT A2DP Headphones"
+               sources="a2dp output"/>
+        <route type="mix" sink="BT A2DP Speaker"
+               sources="a2dp output"/>
+        <route type="mix" sink="a2dp input"
+               sources="BT A2DP In"/>
+    </routes>
+</module>
diff --git a/services/audiopolicy/config/a2dp_in_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/a2dp_in_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..d59ad70
--- /dev/null
+++ b/services/audiopolicy/config/a2dp_in_audio_policy_configuration_7_0.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Bluetooth Input Audio HAL Audio Policy Configuration file -->
+<module name="a2dp" halVersion="2.0">
+    <mixPorts>
+        <mixPort name="a2dp input" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+        </mixPort>
+    </mixPorts>
+    <devicePorts>
+        <devicePort tagName="BT A2DP In" type="AUDIO_DEVICE_IN_BLUETOOTH_A2DP" role="source">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+        </devicePort>
+    </devicePorts>
+    <routes>
+        <route type="mix" sink="a2dp input"
+               sources="BT A2DP In"/>
+    </routes>
+</module>
diff --git a/services/audiopolicy/config/audio_policy_configuration.xml b/services/audiopolicy/config/audio_policy_configuration.xml
index b28381b..dcdc035 100644
--- a/services/audiopolicy/config/audio_policy_configuration.xml
+++ b/services/audiopolicy/config/audio_policy_configuration.xml
@@ -91,7 +91,7 @@
                 <!-- Output devices declaration, i.e. Sink DEVICE PORT -->
                 <devicePort tagName="Earpiece" type="AUDIO_DEVICE_OUT_EARPIECE" role="sink">
                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
-                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
                 </devicePort>
                 <devicePort tagName="Speaker" role="sink" type="AUDIO_DEVICE_OUT_SPEAKER" address="">
                     <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
diff --git a/services/audiopolicy/config/audio_policy_configuration_7_0.xml b/services/audiopolicy/config/audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..31c8954
--- /dev/null
+++ b/services/audiopolicy/config/audio_policy_configuration_7_0.xml
@@ -0,0 +1,211 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2019 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <!-- version section contains a “version” tag in the form “major.minor” e.g version=”1.0” -->
+
+    <!-- Global configuration Decalaration -->
+    <globalConfiguration speaker_drc_enabled="true"/>
+
+
+    <!-- Modules section:
+        There is one section per audio HW module present on the platform.
+        Each module section will contains two mandatory tags for audio HAL “halVersion” and “name”.
+        The module names are the same as in current .conf file:
+                “primary”, “A2DP”, “remote_submix”, “USB”
+        Each module will contain the following sections:
+        “devicePorts”: a list of device descriptors for all input and output devices accessible via this
+        module.
+        This contains both permanently attached devices and removable devices.
+        “mixPorts”: listing all output and input streams exposed by the audio HAL
+        “routes”: list of possible connections between input and output devices or between stream and
+        devices.
+            "route": is defined by an attribute:
+                -"type": <mux|mix> means all sources are mutual exclusive (mux) or can be mixed (mix)
+                -"sink": the sink involved in this route
+                -"sources": all the sources than can be connected to the sink via vis route
+        “attachedDevices”: permanently attached devices.
+        The attachedDevices section is a list of devices names. The names correspond to device names
+        defined in <devicePorts> section.
+        “defaultOutputDevice”: device to be used by default when no policy rule applies
+    -->
+    <modules>
+        <!-- Primary Audio HAL -->
+        <module name="primary" halVersion="3.0">
+            <attachedDevices>
+                <item>Speaker</item>
+                <item>Built-In Mic</item>
+                <item>Built-In Back Mic</item>
+            </attachedDevices>
+            <defaultOutputDevice>Speaker</defaultOutputDevice>
+            <mixPorts>
+                <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="deep_buffer" role="source"
+                        flags="AUDIO_OUTPUT_FLAG_DEEP_BUFFER">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="compressed_offload" role="source"
+                         flags="AUDIO_OUTPUT_FLAG_DIRECT AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD AUDIO_OUTPUT_FLAG_NON_BLOCKING">
+                    <profile name="" format="AUDIO_FORMAT_MP3"
+                             samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_MONO"/>
+                    <profile name="" format="AUDIO_FORMAT_AAC"
+                             samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_MONO"/>
+                    <profile name="" format="AUDIO_FORMAT_AAC_LC"
+                             samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_MONO"/>
+                </mixPort>
+                <mixPort name="voice_tx" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+                </mixPort>
+                <mixPort name="primary input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_IN_FRONT_BACK"/>
+                </mixPort>
+                <mixPort name="voice_rx" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+                </mixPort>
+            </mixPorts>
+            <devicePorts>
+                <!-- Output devices declaration, i.e. Sink DEVICE PORT -->
+                <devicePort tagName="Earpiece" type="AUDIO_DEVICE_OUT_EARPIECE" role="sink">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+                </devicePort>
+                <devicePort tagName="Speaker" role="sink" type="AUDIO_DEVICE_OUT_SPEAKER" address="">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                    <gains>
+                        <gain name="gain_1" mode="AUDIO_GAIN_MODE_JOINT"
+                              minValueMB="-8400"
+                              maxValueMB="4000"
+                              defaultValueMB="0"
+                              stepValueMB="100"/>
+                    </gains>
+                </devicePort>
+                <devicePort tagName="Wired Headset" type="AUDIO_DEVICE_OUT_WIRED_HEADSET" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </devicePort>
+                <devicePort tagName="Wired Headphones" type="AUDIO_DEVICE_OUT_WIRED_HEADPHONE" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </devicePort>
+                <devicePort tagName="BT SCO" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+                </devicePort>
+                <devicePort tagName="BT SCO Headset" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+                </devicePort>
+                <devicePort tagName="BT SCO Car Kit" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+                </devicePort>
+                <devicePort tagName="Telephony Tx" type="AUDIO_DEVICE_OUT_TELEPHONY_TX" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+                </devicePort>
+
+                <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_IN_FRONT_BACK"/>
+                </devicePort>
+                <devicePort tagName="Built-In Back Mic" type="AUDIO_DEVICE_IN_BACK_MIC" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_IN_FRONT_BACK"/>
+                </devicePort>
+                <devicePort tagName="Wired Headset Mic" type="AUDIO_DEVICE_IN_WIRED_HEADSET" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_IN_FRONT_BACK"/>
+                </devicePort>
+                <devicePort tagName="BT SCO Headset Mic" type="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+                </devicePort>
+                <devicePort tagName="Telephony Rx" type="AUDIO_DEVICE_IN_TELEPHONY_RX" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+                </devicePort>
+            </devicePorts>
+            <!-- route declaration, i.e. list all available sources for a given sink -->
+            <routes>
+                <route type="mix" sink="Earpiece"
+                       sources="primary output,deep_buffer,BT SCO Headset Mic"/>
+                <route type="mix" sink="Speaker"
+                       sources="primary output,deep_buffer,compressed_offload,BT SCO Headset Mic,Telephony Rx"/>
+                <route type="mix" sink="Wired Headset"
+                       sources="primary output,deep_buffer,compressed_offload,BT SCO Headset Mic,Telephony Rx"/>
+                <route type="mix" sink="Wired Headphones"
+                       sources="primary output,deep_buffer,compressed_offload,BT SCO Headset Mic,Telephony Rx"/>
+                <route type="mix" sink="primary input"
+                       sources="Built-In Mic,Built-In Back Mic,Wired Headset Mic,BT SCO Headset Mic"/>
+                <route type="mix" sink="Telephony Tx"
+                       sources="Built-In Mic,Built-In Back Mic,Wired Headset Mic,BT SCO Headset Mic, voice_tx"/>
+                <route type="mix" sink="voice_rx"
+                       sources="Telephony Rx"/>
+            </routes>
+
+        </module>
+
+        <!-- A2dp Input Audio HAL -->
+        <xi:include href="a2dp_in_audio_policy_configuration_7_0.xml"/>
+
+        <!-- Usb Audio HAL -->
+        <xi:include href="usb_audio_policy_configuration.xml"/>
+
+        <!-- Remote Submix Audio HAL -->
+        <xi:include href="r_submix_audio_policy_configuration.xml"/>
+
+        <!-- Bluetooth Audio HAL -->
+        <xi:include href="bluetooth_audio_policy_configuration_7_0.xml"/>
+
+        <!-- MSD Audio HAL (optional) -->
+        <xi:include href="msd_audio_policy_configuration_7_0.xml"/>
+
+    </modules>
+    <!-- End of Modules section -->
+
+    <!-- Volume section:
+        IMPORTANT NOTE: Volume tables have been moved to engine configuration.
+                        Keep it here for legacy.
+                        Engine will fallback on these files if none are provided by engine.
+     -->
+
+    <xi:include href="audio_policy_volumes.xml"/>
+    <xi:include href="default_volume_tables.xml"/>
+
+    <!-- End of Volume section -->
+
+    <!-- Surround Sound configuration -->
+
+    <xi:include href="surround_sound_configuration_5_0.xml"/>
+
+    <!-- End of Surround Sound configuration -->
+
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..2dffe02
--- /dev/null
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Bluetooth Audio HAL Audio Policy Configuration file -->
+<module name="bluetooth" halVersion="2.0">
+    <mixPorts>
+        <!-- A2DP Audio Ports -->
+        <mixPort name="a2dp output" role="source"/>
+        <!-- Hearing AIDs Audio Ports -->
+        <mixPort name="hearing aid output" role="source">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="24000 16000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </mixPort>
+    </mixPorts>
+    <devicePorts>
+        <!-- A2DP Audio Ports -->
+        <devicePort tagName="BT A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000 88200 96000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <devicePort tagName="BT A2DP Headphones" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000 88200 96000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <devicePort tagName="BT A2DP Speaker" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000 88200 96000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <!-- Hearing AIDs Audio Ports -->
+        <devicePort tagName="BT Hearing Aid Out" type="AUDIO_DEVICE_OUT_HEARING_AID" role="sink"/>
+    </devicePorts>
+    <routes>
+        <route type="mix" sink="BT A2DP Out"
+               sources="a2dp output"/>
+        <route type="mix" sink="BT A2DP Headphones"
+               sources="a2dp output"/>
+        <route type="mix" sink="BT A2DP Speaker"
+               sources="a2dp output"/>
+        <route type="mix" sink="BT Hearing Aid Out"
+               sources="hearing aid output"/>
+    </routes>
+</module>
diff --git a/services/audiopolicy/config/hearing_aid_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/hearing_aid_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..8c364e4
--- /dev/null
+++ b/services/audiopolicy/config/hearing_aid_audio_policy_configuration_7_0.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Hearing aid Audio HAL Audio Policy Configuration file -->
+<module name="hearing_aid" halVersion="2.0">
+    <mixPorts>
+        <mixPort name="hearing aid output" role="source">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="24000 16000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </mixPort>
+    </mixPorts>
+    <devicePorts>
+        <devicePort tagName="BT Hearing Aid Out" type="AUDIO_DEVICE_OUT_HEARING_AID" role="sink"/>
+    </devicePorts>
+    <routes>
+        <route type="mix" sink="BT Hearing Aid Out" sources="hearing aid output"/>
+    </routes>
+</module>
diff --git a/services/audiopolicy/config/msd_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/msd_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..f167f0b
--- /dev/null
+++ b/services/audiopolicy/config/msd_audio_policy_configuration_7_0.xml
@@ -0,0 +1,78 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright (C) 2017-2018 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<!-- Multi Stream Decoder Audio Policy Configuration file -->
+<module name="msd" halVersion="2.0">
+    <attachedDevices>
+        <item>MS12 Input</item>
+        <item>MS12 Output</item>
+    </attachedDevices>
+    <mixPorts>
+        <mixPort name="ms12 input" role="source">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </mixPort>
+        <mixPort name="ms12 compressed input" role="source"
+                flags="AUDIO_OUTPUT_FLAG_DIRECT AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD AUDIO_OUTPUT_FLAG_NON_BLOCKING">
+            <profile name="" format="AUDIO_FORMAT_AC3"
+                     samplingRates="32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1"/>
+            <profile name="" format="AUDIO_FORMAT_E_AC3"
+                     samplingRates="32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+            <profile name="" format="AUDIO_FORMAT_E_AC3_JOC"
+                     samplingRates="32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+            <profile name="" format="AUDIO_FORMAT_AC4"
+                     samplingRates="32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+        </mixPort>
+        <!-- The HW AV Sync flag is not required, but is recommended -->
+        <mixPort name="ms12 output" role="sink" flags="AUDIO_INPUT_FLAG_HW_AV_SYNC AUDIO_INPUT_FLAG_DIRECT">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_AC3"
+                     samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_5POINT1"/>
+            <profile name="" format="AUDIO_FORMAT_E_AC3"
+                     samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_5POINT1"/>
+        </mixPort>
+   </mixPorts>
+   <devicePorts>
+       <devicePort tagName="MS12 Input" type="AUDIO_DEVICE_OUT_BUS"  role="sink">
+           <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                    samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+           <profile name="" format="AUDIO_FORMAT_AC3"
+                    samplingRates="32000 44100 48000"
+                    channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1"/>
+           <profile name="" format="AUDIO_FORMAT_E_AC3"
+                    samplingRates="32000 44100 48000"
+                    channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+            <profile name="" format="AUDIO_FORMAT_E_AC3_JOC"
+                     samplingRates="32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+           <profile name="" format="AUDIO_FORMAT_AC4"
+                    samplingRates="32000 44100 48000"
+                    channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+       </devicePort>
+       <devicePort tagName="MS12 Output" type="AUDIO_DEVICE_IN_BUS"  role="source">
+           <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                    samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+        </devicePort>
+    </devicePorts>
+    <routes>
+        <route type="mix" sink="MS12 Input" sources="ms12 input,ms12 compressed input"/>
+        <route type="mix" sink="ms12 output" sources="MS12 Output"/>
+    </routes>
+</module>
diff --git a/services/audiopolicy/config/primary_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/primary_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..68a56b2
--- /dev/null
+++ b/services/audiopolicy/config/primary_audio_policy_configuration_7_0.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Default Primary Audio HAL Module Audio Policy Configuration include file -->
+<module name="primary" halVersion="2.0">
+    <attachedDevices>
+        <item>Speaker</item>
+        <item>Built-In Mic</item>
+    </attachedDevices>
+    <defaultOutputDevice>Speaker</defaultOutputDevice>
+    <mixPorts>
+        <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </mixPort>
+        <mixPort name="primary input" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+        </mixPort>
+   </mixPorts>
+   <devicePorts>
+        <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
+        </devicePort>
+
+        <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+        </devicePort>
+    </devicePorts>
+    <routes>
+        <route type="mix" sink="Speaker"
+               sources="primary output"/>
+        <route type="mix" sink="primary input"
+               sources="Built-In Mic"/>
+    </routes>
+</module>
diff --git a/services/audiopolicy/engine/common/Android.bp b/services/audiopolicy/engine/common/Android.bp
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engine/common/include/EngineBase.h b/services/audiopolicy/engine/common/include/EngineBase.h
old mode 100755
new mode 100644
index 7f339dc..4510f63
--- a/services/audiopolicy/engine/common/include/EngineBase.h
+++ b/services/audiopolicy/engine/common/include/EngineBase.h
@@ -93,13 +93,13 @@
 
     void dump(String8 *dst) const override;
 
-    status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
-            const AudioDeviceTypeAddr &device) override;
+    status_t setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices) override;
 
-    status_t removePreferredDeviceForStrategy(product_strategy_t strategy) override;
+    status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) override;
 
-    status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
-            AudioDeviceTypeAddr &device) const override;
+    status_t getDevicesForRoleAndStrategy(product_strategy_t strategy, device_role_t role,
+            AudioDeviceTypeAddrVector &devices) const override;
 
     engineConfig::ParsingResult loadAudioPolicyEngineConfig();
 
@@ -127,11 +127,36 @@
 
     status_t restoreOriginVolumeCurve(audio_stream_type_t stream);
 
+    status_t setDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices) override;
+
+    status_t addDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices) override;
+
+    /**
+     * Remove devices role for capture preset. When `forceMatched` is true, the devices to be
+     * removed must all show as role for the capture preset. Otherwise, only devices that has shown
+     * as role for the capture preset will be remove.
+     */
+    status_t doRemoveDevicesRoleForCapturePreset(audio_source_t audioSource,
+            device_role_t role, const AudioDeviceTypeAddrVector& devices,
+            bool forceMatched=true);
+
+    status_t removeDevicesRoleForCapturePreset(audio_source_t audioSource,
+            device_role_t role, const AudioDeviceTypeAddrVector& devices) override;
+
+    status_t clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+            device_role_t role) override;
+
+    status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+            device_role_t role, AudioDeviceTypeAddrVector &devices) const override;
+
 private:
     AudioPolicyManagerObserver *mApmObserver = nullptr;
 
     ProductStrategyMap mProductStrategies;
     ProductStrategyPreferredRoutingMap mProductStrategyPreferredDevices;
+    CapturePresetDevicesRoleMap mCapturePresetDevicesRole;
     VolumeGroupMap mVolumeGroups;
     LastRemovableMediaDevices mLastRemovableMediaDevices;
     audio_mode_t mPhoneState = AUDIO_MODE_NORMAL;  /**< current phone state. */
diff --git a/services/audiopolicy/engine/common/include/LastRemovableMediaDevices.h b/services/audiopolicy/engine/common/include/LastRemovableMediaDevices.h
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engine/common/include/ProductStrategy.h b/services/audiopolicy/engine/common/include/ProductStrategy.h
index 3ebe7d1..c505456 100644
--- a/services/audiopolicy/engine/common/include/ProductStrategy.h
+++ b/services/audiopolicy/engine/common/include/ProductStrategy.h
@@ -28,8 +28,11 @@
 #include <utils/String8.h>
 #include <media/AudioAttributes.h>
 #include <media/AudioContainers.h>
+#include <media/AudioDeviceTypeAddr.h>
 #include <media/AudioPolicy.h>
 
+#include <vector>
+
 namespace android {
 
 /**
@@ -164,7 +167,8 @@
     product_strategy_t mDefaultStrategy = PRODUCT_STRATEGY_NONE;
 };
 
-class ProductStrategyPreferredRoutingMap : public std::map<product_strategy_t, AudioDeviceTypeAddr>
+class ProductStrategyPreferredRoutingMap : public std::map<product_strategy_t,
+                                                           AudioDeviceTypeAddrVector>
 {
 public:
     void dump(String8 *dst, int spaces = 0) const;
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 1bc7fe3..8c7fb97 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -339,8 +339,8 @@
     return NO_ERROR;
 }
 
-status_t EngineBase::setPreferredDeviceForStrategy(product_strategy_t strategy,
-            const AudioDeviceTypeAddr &device)
+status_t EngineBase::setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices)
 {
     // verify strategy exists
     if (mProductStrategies.find(strategy) == mProductStrategies.end()) {
@@ -348,11 +348,24 @@
         return BAD_VALUE;
     }
 
-    mProductStrategyPreferredDevices[strategy] = device;
+    switch (role) {
+    case DEVICE_ROLE_PREFERRED:
+        mProductStrategyPreferredDevices[strategy] = devices;
+        break;
+    case DEVICE_ROLE_DISABLED:
+        // TODO: support set devices role as disabled for strategy.
+        ALOGI("%s no implemented for role as %d", __func__, role);
+        break;
+    case DEVICE_ROLE_NONE:
+        // Intentionally fall-through as it is no need to set device role as none for a strategy.
+    default:
+        ALOGE("%s invalid role %d", __func__, role);
+        return BAD_VALUE;
+    }
     return NO_ERROR;
 }
 
-status_t EngineBase::removePreferredDeviceForStrategy(product_strategy_t strategy)
+status_t EngineBase::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role)
 {
     // verify strategy exists
     if (mProductStrategies.find(strategy) == mProductStrategies.end()) {
@@ -360,29 +373,218 @@
         return BAD_VALUE;
     }
 
-    if (mProductStrategyPreferredDevices.erase(strategy) == 0) {
-        // no preferred device was set
-        return NAME_NOT_FOUND;
+    switch (role) {
+    case DEVICE_ROLE_PREFERRED:
+        if (mProductStrategyPreferredDevices.erase(strategy) == 0) {
+            // no preferred device was set
+            return NAME_NOT_FOUND;
+        }
+        break;
+    case DEVICE_ROLE_DISABLED:
+        // TODO: support remove devices role as disabled for strategy.
+        ALOGI("%s no implemented for role as %d", __func__, role);
+        break;
+    case DEVICE_ROLE_NONE:
+        // Intentionally fall-through as it makes no sense to remove devices with
+        // role as DEVICE_ROLE_NONE for a strategy
+    default:
+        ALOGE("%s invalid role %d", __func__, role);
+        return BAD_VALUE;
     }
     return NO_ERROR;
 }
 
-status_t EngineBase::getPreferredDeviceForStrategy(product_strategy_t strategy,
-            AudioDeviceTypeAddr &device) const
+status_t EngineBase::getDevicesForRoleAndStrategy(product_strategy_t strategy, device_role_t role,
+            AudioDeviceTypeAddrVector &devices) const
 {
     // verify strategy exists
     if (mProductStrategies.find(strategy) == mProductStrategies.end()) {
         ALOGE("%s unknown strategy %u", __func__, strategy);
         return BAD_VALUE;
     }
-    // preferred device for this strategy?
-    auto devIt = mProductStrategyPreferredDevices.find(strategy);
-    if (devIt == mProductStrategyPreferredDevices.end()) {
-        ALOGV("%s no preferred device for strategy %u", __func__, strategy);
-        return NAME_NOT_FOUND;
+
+    switch (role) {
+    case DEVICE_ROLE_PREFERRED: {
+        // preferred device for this strategy?
+        auto devIt = mProductStrategyPreferredDevices.find(strategy);
+        if (devIt == mProductStrategyPreferredDevices.end()) {
+            ALOGV("%s no preferred device for strategy %u", __func__, strategy);
+            return NAME_NOT_FOUND;
+        }
+
+        devices = devIt->second;
+    } break;
+    case DEVICE_ROLE_NONE:
+        // Intentionally fall-through as the DEVICE_ROLE_NONE is never set
+    default:
+        ALOGE("%s invalid role %d", __func__, role);
+        return BAD_VALUE;
+    }
+    return NO_ERROR;
+}
+
+status_t EngineBase::setDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+        const AudioDeviceTypeAddrVector &devices)
+{
+    // verify if the audio source is valid
+    if (!audio_is_valid_audio_source(audioSource)) {
+        ALOGE("%s unknown audio source %u", __func__, audioSource);
     }
 
-    device = devIt->second;
+    switch (role) {
+    case DEVICE_ROLE_PREFERRED:
+        mCapturePresetDevicesRole[audioSource][role] = devices;
+        // When the devices are set as preferred devices, remove them from the disabled devices.
+        doRemoveDevicesRoleForCapturePreset(
+                audioSource, DEVICE_ROLE_DISABLED, devices, false /*forceMatched*/);
+        break;
+    case DEVICE_ROLE_DISABLED:
+        // TODO: support setting devices role as disabled for capture preset.
+        ALOGI("%s no implemented for role as %d", __func__, role);
+        break;
+    case DEVICE_ROLE_NONE:
+        // Intentionally fall-through as it is no need to set device role as none
+    default:
+        ALOGE("%s invalid role %d", __func__, role);
+        return BAD_VALUE;
+    }
+    return NO_ERROR;
+}
+
+status_t EngineBase::addDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+        const AudioDeviceTypeAddrVector &devices)
+{
+    // verify if the audio source is valid
+    if (!audio_is_valid_audio_source(audioSource)) {
+        ALOGE("%s unknown audio source %u", __func__, audioSource);
+    }
+
+    switch (role) {
+    case DEVICE_ROLE_PREFERRED:
+        mCapturePresetDevicesRole[audioSource][role] = excludeDeviceTypeAddrsFrom(
+                mCapturePresetDevicesRole[audioSource][role], devices);
+        for (const auto& device : devices) {
+            mCapturePresetDevicesRole[audioSource][role].push_back(device);
+        }
+        // When the devices are set as preferred devices, remove them from the disabled devices.
+        doRemoveDevicesRoleForCapturePreset(
+                audioSource, DEVICE_ROLE_DISABLED, devices, false /*forceMatched*/);
+        break;
+    case DEVICE_ROLE_DISABLED:
+        // TODO: support setting devices role as disabled for capture preset.
+        ALOGI("%s no implemented for role as %d", __func__, role);
+        break;
+    case DEVICE_ROLE_NONE:
+        // Intentionally fall-through as it is no need to set device role as none
+    default:
+        ALOGE("%s invalid role %d", __func__, role);
+        return BAD_VALUE;
+    }
+    return NO_ERROR;
+}
+
+status_t EngineBase::removeDevicesRoleForCapturePreset(
+        audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector& devices) {
+    return doRemoveDevicesRoleForCapturePreset(audioSource, role, devices);
+}
+
+status_t EngineBase::doRemoveDevicesRoleForCapturePreset(audio_source_t audioSource,
+        device_role_t role, const AudioDeviceTypeAddrVector& devices, bool forceMatched)
+{
+    // verify if the audio source is valid
+    if (!audio_is_valid_audio_source(audioSource)) {
+        ALOGE("%s unknown audio source %u", __func__, audioSource);
+    }
+
+    switch (role) {
+    case DEVICE_ROLE_PREFERRED:
+    case DEVICE_ROLE_DISABLED: {
+        if (mCapturePresetDevicesRole.count(audioSource) == 0 ||
+                mCapturePresetDevicesRole[audioSource].count(role) == 0) {
+            return NAME_NOT_FOUND;
+        }
+        AudioDeviceTypeAddrVector remainingDevices = excludeDeviceTypeAddrsFrom(
+                mCapturePresetDevicesRole[audioSource][role], devices);
+        if (forceMatched && remainingDevices.size() !=
+                mCapturePresetDevicesRole[audioSource][role].size() - devices.size()) {
+            // There are some devices from `devicesToRemove` that are not shown in the cached record
+            return BAD_VALUE;
+        }
+        mCapturePresetDevicesRole[audioSource][role] = remainingDevices;
+        if (mCapturePresetDevicesRole[audioSource][role].empty()) {
+            // Remove the role when device list is empty
+            mCapturePresetDevicesRole[audioSource].erase(role);
+        }
+    } break;
+    case DEVICE_ROLE_NONE:
+        // Intentionally fall-through as it makes no sense to remove devices with
+        // role as DEVICE_ROLE_NONE
+    default:
+        ALOGE("%s invalid role %d", __func__, role);
+        return BAD_VALUE;
+    }
+    return NO_ERROR;
+}
+
+status_t EngineBase::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                      device_role_t role)
+{
+    // verify if the audio source is valid
+    if (!audio_is_valid_audio_source(audioSource)) {
+        ALOGE("%s unknown audio source %u", __func__, audioSource);
+    }
+
+    switch (role) {
+    case DEVICE_ROLE_PREFERRED:
+        if (mCapturePresetDevicesRole.count(audioSource) == 0 ||
+                mCapturePresetDevicesRole[audioSource].erase(role) == 0) {
+            // no preferred device for the given audio source
+            return NAME_NOT_FOUND;
+        }
+        break;
+    case DEVICE_ROLE_DISABLED:
+        // TODO: support remove devices role as disabled for strategy.
+        ALOGI("%s no implemented for role as %d", __func__, role);
+        break;
+    case DEVICE_ROLE_NONE:
+        // Intentionally fall-through as it makes no sense to remove devices with
+        // role as DEVICE_ROLE_NONE for a strategy
+    default:
+        ALOGE("%s invalid role %d", __func__, role);
+        return BAD_VALUE;
+    }
+    return NO_ERROR;
+}
+
+status_t EngineBase::getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+        device_role_t role, AudioDeviceTypeAddrVector &devices) const
+{
+    // verify if the audio source is valid
+    if (!audio_is_valid_audio_source(audioSource)) {
+        ALOGE("%s unknown audio source %u", __func__, audioSource);
+        return BAD_VALUE;
+    }
+
+    switch (role) {
+    case DEVICE_ROLE_PREFERRED:
+    case DEVICE_ROLE_DISABLED: {
+        if (mCapturePresetDevicesRole.count(audioSource) == 0) {
+            return NAME_NOT_FOUND;
+        }
+        auto devIt = mCapturePresetDevicesRole.at(audioSource).find(role);
+        if (devIt == mCapturePresetDevicesRole.at(audioSource).end()) {
+            ALOGV("%s no devices role(%d) for capture preset %u", __func__, role, audioSource);
+            return NAME_NOT_FOUND;
+        }
+
+        devices = devIt->second;
+    } break;
+    case DEVICE_ROLE_NONE:
+        // Intentionally fall-through as the DEVICE_ROLE_NONE is never set
+    default:
+        ALOGE("%s invalid role %d", __func__, role);
+        return BAD_VALUE;
+    }
     return NO_ERROR;
 }
 
diff --git a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
index 981582e..d39eff6 100644
--- a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
+++ b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
@@ -26,8 +26,8 @@
     {"STRATEGY_PHONE",
      {
          {"phone", AUDIO_STREAM_VOICE_CALL, "AUDIO_STREAM_VOICE_CALL",
-          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VOICE_COMMUNICATION, AUDIO_SOURCE_DEFAULT, 0,
-            ""}},
+          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VOICE_COMMUNICATION, AUDIO_SOURCE_DEFAULT,
+            AUDIO_FLAG_NONE, ""}},
          },
          {"sco", AUDIO_STREAM_BLUETOOTH_SCO, "AUDIO_STREAM_BLUETOOTH_SCO",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_SCO,
@@ -39,10 +39,11 @@
      {
          {"ring", AUDIO_STREAM_RING, "AUDIO_STREAM_RING",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
-            AUDIO_SOURCE_DEFAULT, 0, ""}}
+            AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
          },
          {"alarm", AUDIO_STREAM_ALARM, "AUDIO_STREAM_ALARM",
-          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM, AUDIO_SOURCE_DEFAULT, 0, ""}},
+          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM, AUDIO_SOURCE_DEFAULT,
+            AUDIO_FLAG_NONE, ""}},
          }
      },
     },
@@ -58,7 +59,7 @@
      {
          {"", AUDIO_STREAM_ACCESSIBILITY, "AUDIO_STREAM_ACCESSIBILITY",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
-            AUDIO_SOURCE_DEFAULT, 0, ""}}
+            AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
          }
      },
     },
@@ -66,15 +67,16 @@
      {
          {"", AUDIO_STREAM_NOTIFICATION, "AUDIO_STREAM_NOTIFICATION",
           {
-              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION, AUDIO_SOURCE_DEFAULT, 0, ""},
+              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION, AUDIO_SOURCE_DEFAULT,
+               AUDIO_FLAG_NONE, ""},
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
-               AUDIO_SOURCE_DEFAULT, 0, ""},
+               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
-               AUDIO_SOURCE_DEFAULT, 0, ""},
+               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
-               AUDIO_SOURCE_DEFAULT, 0, ""},
+               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_EVENT,
-               AUDIO_SOURCE_DEFAULT, 0, ""}
+               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
           }
          }
      },
@@ -83,21 +85,25 @@
      {
          {"assistant", AUDIO_STREAM_ASSISTANT, "AUDIO_STREAM_ASSISTANT",
           {{AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
-            AUDIO_SOURCE_DEFAULT, 0, ""}}
+            AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
          },
          {"music", AUDIO_STREAM_MUSIC, "AUDIO_STREAM_MUSIC",
           {
-              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA, AUDIO_SOURCE_DEFAULT, 0, ""},
-              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_GAME, AUDIO_SOURCE_DEFAULT, 0, ""},
-              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANT, AUDIO_SOURCE_DEFAULT, 0, ""},
+              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA, AUDIO_SOURCE_DEFAULT,
+               AUDIO_FLAG_NONE, ""},
+              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_GAME, AUDIO_SOURCE_DEFAULT,
+               AUDIO_FLAG_NONE, ""},
+              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANT, AUDIO_SOURCE_DEFAULT,
+               AUDIO_FLAG_NONE, ""},
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
-               AUDIO_SOURCE_DEFAULT, 0, ""},
-              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT, 0, ""}
+               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
+              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
+               AUDIO_FLAG_NONE, ""}
           },
          },
          {"system", AUDIO_STREAM_SYSTEM, "AUDIO_STREAM_SYSTEM",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_SONIFICATION,
-            AUDIO_SOURCE_DEFAULT, 0, ""}}
+            AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
          }
      },
     },
@@ -106,7 +112,7 @@
          {"", AUDIO_STREAM_DTMF, "AUDIO_STREAM_DTMF",
           {
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
-               AUDIO_SOURCE_DEFAULT, 0, ""}
+               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
           }
          }
      },
@@ -114,7 +120,8 @@
     {"STRATEGY_CALL_ASSISTANT",
      {
          {"", AUDIO_STREAM_CALL_ASSISTANT, "AUDIO_STREAM_CALL_ASSISTANT",
-          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_CALL_ASSISTANT, AUDIO_SOURCE_DEFAULT, 0, ""}}
+          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_CALL_ASSISTANT, AUDIO_SOURCE_DEFAULT,
+            AUDIO_FLAG_NONE, ""}}
          }
      },
     },
@@ -136,14 +143,16 @@
     {"rerouting",
      {
          {"", AUDIO_STREAM_REROUTING, "AUDIO_STREAM_REROUTING",
-          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT, 0, ""}}
+          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VIRTUAL_SOURCE, AUDIO_SOURCE_DEFAULT,
+            AUDIO_FLAG_NONE, ""}}
          }
      },
     },
     {"patch",
      {
          {"", AUDIO_STREAM_PATCH, "AUDIO_STREAM_PATCH",
-          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT, 0, ""}}
+          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
+            AUDIO_FLAG_NONE, ""}}
          }
      },
     }
diff --git a/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp b/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
old mode 100755
new mode 100644
index 87b6aaf..96cc140
--- a/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
+++ b/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
@@ -69,6 +69,11 @@
     case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
     case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
     case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER:
+    // TODO (b/122931261): remove when preferred device for strategy media will be used instead of
+    //  AUDIO_POLICY_FORCE_NO_BT_A2DP.
+    case AUDIO_DEVICE_OUT_HEARING_AID:
+    case AUDIO_DEVICE_OUT_BLE_HEADSET:
+    case AUDIO_DEVICE_OUT_BLE_SPEAKER:
         return GROUP_BT_A2DP;
     default:
         return GROUP_NONE;
diff --git a/services/audiopolicy/engine/common/src/ProductStrategy.cpp b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
index 151c7bb..060568a 100644
--- a/services/audiopolicy/engine/common/src/ProductStrategy.cpp
+++ b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
@@ -321,10 +321,11 @@
 void ProductStrategyPreferredRoutingMap::dump(android::String8* dst, int spaces) const {
     dst->appendFormat("\n%*sPreferred devices per product strategy dump:", spaces, "");
     for (const auto& iter : *this) {
-        dst->appendFormat("\n%*sStrategy %u dev:%08x addr:%s",
+        dst->appendFormat("\n%*sStrategy %u %s",
                           spaces + 2, "",
                           (uint32_t) iter.first,
-                          iter.second.mType, iter.second.mAddress.c_str());
+                          dumpAudioDeviceTypeAddrVector(iter.second, true /*includeSensitiveInfo*/)
+                                  .c_str());
     }
     dst->appendFormat("\n");
 }
diff --git a/services/audiopolicy/engine/common/src/VolumeCurve.cpp b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
index c352578..8aa4b08 100644
--- a/services/audiopolicy/engine/common/src/VolumeCurve.cpp
+++ b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
@@ -43,10 +43,24 @@
         indexInUi = volIndexMax;
     }
 
+    // Calculate the new volume index
     size_t nbCurvePoints = mCurvePoints.size();
-    // the volume index in the UI is relative to the min and max volume indices for this stream
-    int nbSteps = 1 + mCurvePoints[nbCurvePoints - 1].mIndex - mCurvePoints[0].mIndex;
-    int volIdx = (nbSteps * (indexInUi - volIndexMin)) / (volIndexMax - volIndexMin);
+
+    int volIdx;
+    if (volIndexMin == volIndexMax) {
+        if (indexInUi == volIndexMin) {
+            volIdx = volIndexMin;
+        } else {
+            // This would result in a divide-by-zero below
+            ALOG_ASSERT(volIndexmin != volIndexMax, "Invalid volume index range & value: 0");
+            return NAN;
+        }
+    } else {
+        // interpolaate
+        // the volume index in the UI is relative to the min and max volume indices for this stream
+        int nbSteps = 1 + mCurvePoints[nbCurvePoints - 1].mIndex - mCurvePoints[0].mIndex;
+        volIdx = (nbSteps * (indexInUi - volIndexMin)) / (volIndexMax - volIndexMin);
+    }
 
     // Where would this volume index been inserted in the curve point
     size_t indexInUiPosition = mCurvePoints.orderOf(CurvePoint(volIdx, 0));
diff --git a/services/audiopolicy/engine/config/TEST_MAPPING b/services/audiopolicy/engine/config/TEST_MAPPING
new file mode 100644
index 0000000..06ce111
--- /dev/null
+++ b/services/audiopolicy/engine/config/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+  "presubmit": [
+    {
+       "name": "audiopolicy_engineconfig_tests"
+    }
+  ]
+}
diff --git a/services/audiopolicy/engine/config/include/EngineConfig.h b/services/audiopolicy/engine/config/include/EngineConfig.h
index 5d22c24..c565926 100644
--- a/services/audiopolicy/engine/config/include/EngineConfig.h
+++ b/services/audiopolicy/engine/config/include/EngineConfig.h
@@ -111,6 +111,8 @@
  */
 ParsingResult parse(const char* path = DEFAULT_PATH);
 android::status_t parseLegacyVolumes(VolumeGroups &volumeGroups);
+// Exposed for testing.
+android::status_t parseLegacyVolumeFile(const char* path, VolumeGroups &volumeGroups);
 
 } // namespace engineConfig
 } // namespace android
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index 4842cb2..7cfef5b 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -228,7 +228,8 @@
             std::string flags = getXmlAttribute(cur, "value");
 
             ALOGV("%s flags %s",  __FUNCTION__, flags.c_str());
-            attributes.flags = AudioFlagConverter::maskFromString(flags, " ");
+            attributes.flags = static_cast<audio_flags_mask_t>(
+                    AudioFlagConverter::maskFromString(flags, " "));
         }
         if (!xmlStrcmp(cur->name, (const xmlChar *)("Bundle"))) {
             std::string bundleKey = getXmlAttribute(cur, "key");
@@ -588,6 +589,7 @@
             }
         }
     }
+    VolumeGroups tempVolumeGroups = volumeGroups;
     for (const auto &volumeMapIter : legacyVolumeMap) {
         // In order to let AudioService setting the min and max (compatibility), set Min and Max
         // to -1 except for private streams
@@ -598,8 +600,10 @@
         }
         int indexMin = streamType >= AUDIO_STREAM_PUBLIC_CNT ? 0 : -1;
         int indexMax = streamType >= AUDIO_STREAM_PUBLIC_CNT ? 100 : -1;
-        volumeGroups.push_back({ volumeMapIter.first, indexMin, indexMax, volumeMapIter.second });
+        tempVolumeGroups.push_back(
+                { volumeMapIter.first, indexMin, indexMax, volumeMapIter.second });
     }
+    std::swap(tempVolumeGroups, volumeGroups);
     return NO_ERROR;
 }
 
@@ -694,35 +698,14 @@
     return deserializeLegacyVolumeCollection(doc, cur, volumeGroups, nbSkippedElements);
 }
 
-static const int gApmXmlConfigFilePathMaxLength = 128;
-
-static constexpr const char *apmXmlConfigFileName = "audio_policy_configuration.xml";
-static constexpr const char *apmA2dpOffloadDisabledXmlConfigFileName =
-        "audio_policy_configuration_a2dp_offload_disabled.xml";
-
 android::status_t parseLegacyVolumes(VolumeGroups &volumeGroups) {
-    char audioPolicyXmlConfigFile[gApmXmlConfigFilePathMaxLength];
-    std::vector<const char *> fileNames;
-    status_t ret;
-
-    if (property_get_bool("ro.bluetooth.a2dp_offload.supported", false) &&
-            property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
-        // A2DP offload supported but disabled: try to use special XML file
-        fileNames.push_back(apmA2dpOffloadDisabledXmlConfigFileName);
+    if (std::string audioPolicyXmlConfigFile = audio_get_audio_policy_config_file();
+            !audioPolicyXmlConfigFile.empty()) {
+        return parseLegacyVolumeFile(audioPolicyXmlConfigFile.c_str(), volumeGroups);
+    } else {
+        ALOGE("No readable audio policy config file found");
+        return BAD_VALUE;
     }
-    fileNames.push_back(apmXmlConfigFileName);
-
-    for (const char* fileName : fileNames) {
-        for (const auto& path : audio_get_configuration_paths()) {
-            snprintf(audioPolicyXmlConfigFile, sizeof(audioPolicyXmlConfigFile),
-                     "%s/%s", path.c_str(), fileName);
-            ret = parseLegacyVolumeFile(audioPolicyXmlConfigFile, volumeGroups);
-            if (ret == NO_ERROR) {
-                return ret;
-            }
-        }
-    }
-    return BAD_VALUE;
 }
 
 } // namespace engineConfig
diff --git a/services/audiopolicy/engine/config/tests/Android.bp b/services/audiopolicy/engine/config/tests/Android.bp
new file mode 100644
index 0000000..6b0774f
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/Android.bp
@@ -0,0 +1,25 @@
+cc_test {
+    name: "audiopolicy_engineconfig_tests",
+
+    shared_libs: [
+        "libbase",
+        "liblog",
+        "libmedia_helper",
+        "libutils",
+        "libxml2",
+    ],
+    static_libs: [
+        "libaudiopolicyengine_config",
+    ],
+
+    srcs: ["engineconfig_tests.cpp"],
+
+    data: [":audiopolicy_engineconfig_files"],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    test_suites: ["device-tests"],
+}
diff --git a/services/audiopolicy/engine/config/tests/engineconfig_tests.cpp b/services/audiopolicy/engine/config/tests/engineconfig_tests.cpp
new file mode 100644
index 0000000..f61e02f
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/engineconfig_tests.cpp
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#define LOG_TAG "APM_Test"
+#include <android-base/file.h>
+#include <log/log.h>
+
+#include "EngineConfig.h"
+
+using namespace android;
+
+TEST(EngineConfigTestInit, LegacyVolumeGroupsLoadingIsTransactional) {
+    engineConfig::VolumeGroups groups;
+    ASSERT_TRUE(groups.empty());
+    status_t status = engineConfig::parseLegacyVolumeFile(
+            (base::GetExecutableDirectory() + "/test_invalid_apm_volume_tables.xml").c_str(),
+            groups);
+    ASSERT_NE(NO_ERROR, status);
+    EXPECT_TRUE(groups.empty());
+    status = engineConfig::parseLegacyVolumeFile(
+            (base::GetExecutableDirectory() + "/test_apm_volume_tables.xml").c_str(),
+            groups);
+    ASSERT_EQ(NO_ERROR, status);
+    EXPECT_FALSE(groups.empty());
+}
diff --git a/services/audiopolicy/engine/config/tests/resources/Android.bp b/services/audiopolicy/engine/config/tests/resources/Android.bp
new file mode 100644
index 0000000..0aee0e9
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/resources/Android.bp
@@ -0,0 +1,7 @@
+filegroup {
+    name: "audiopolicy_engineconfig_files",
+    srcs: [
+        "test_apm_volume_tables.xml",
+        "test_invalid_apm_volume_tables.xml",
+    ],
+}
diff --git a/services/audiopolicy/engine/config/tests/resources/test_apm_volume_tables.xml b/services/audiopolicy/engine/config/tests/resources/test_apm_volume_tables.xml
new file mode 100644
index 0000000..16126b6
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/resources/test_apm_volume_tables.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <globalConfiguration speaker_drc_enabled="true"/>
+    <volumes>
+        <volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_HEADSET">
+            <point>0,-4200</point>
+            <point>33,-2800</point>
+            <point>66,-1400</point>
+            <point>100,0</point>
+        </volume>
+        <volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_SPEAKER">
+            <point>0,-2400</point>
+            <point>33,-1600</point>
+            <point>66,-800</point>
+            <point>100,0</point>
+        </volume>
+        <volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_HEARING_AID"
+                ref="FULL_SCALE_VOLUME_CURVE"/>
+    </volumes>
+    <volumes>
+        <reference name="FULL_SCALE_VOLUME_CURVE">
+            <!-- Full Scale reference Volume Curve -->
+            <point>0,0</point>
+            <point>100,0</point>
+        </reference>
+    </volumes>
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/engine/config/tests/resources/test_invalid_apm_volume_tables.xml b/services/audiopolicy/engine/config/tests/resources/test_invalid_apm_volume_tables.xml
new file mode 100644
index 0000000..3ec5d10
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/resources/test_invalid_apm_volume_tables.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<!-- This file uses a non-existent audio stream name. -->
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <globalConfiguration speaker_drc_enabled="true"/>
+    <volumes>
+        <volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_HEADSET">
+            <point>0,-4200</point>
+            <point>33,-2800</point>
+            <point>66,-1400</point>
+            <point>100,0</point>
+        </volume>
+        <volume stream="AUDIO_STREAM_NON_EXISTING" deviceCategory="DEVICE_CATEGORY_SPEAKER">
+            <point>0,-2400</point>
+            <point>33,-1600</point>
+            <point>66,-800</point>
+            <point>100,0</point>
+        </volume>
+        <volume stream="AUDIO_STREAM_RING" deviceCategory="DEVICE_CATEGORY_HEADSET"
+                ref="FULL_SCALE_VOLUME_CURVE"/>
+        <volume stream="AUDIO_STREAM_MUSIC" deviceCategory="DEVICE_CATEGORY_HEADSET"
+                ref="FULL_SCALE_VOLUME_CURVE"/>
+        <volume stream="AUDIO_STREAM_ALARM" deviceCategory="DEVICE_CATEGORY_SPEAKER">
+            <point>0,-2970</point>
+            <point>33,-2010</point>
+            <point>66,-1020</point>
+            <point>100,0</point>
+        </volume>
+        <volume stream="AUDIO_STREAM_NOTIFICATION" deviceCategory="DEVICE_CATEGORY_HEADSET"
+                ref="FULL_SCALE_VOLUME_CURVE"/>
+        <volume stream="AUDIO_STREAM_BLUETOOTH_SCO" deviceCategory="DEVICE_CATEGORY_EXT_MEDIA"
+                ref="FULL_SCALE_VOLUME_CURVE"/>
+        <volume stream="AUDIO_STREAM_ENFORCED_AUDIBLE" deviceCategory="DEVICE_CATEGORY_HEARING_AID"
+                ref="FULL_SCALE_VOLUME_CURVE"/>
+        <volume stream="AUDIO_STREAM_DTMF" deviceCategory="DEVICE_CATEGORY_SPEAKER"
+                ref="FULL_SCALE_VOLUME_CURVE"/>
+    </volumes>
+    <volumes>
+        <reference name="FULL_SCALE_VOLUME_CURVE">
+            <!-- Full Scale reference Volume Curve -->
+            <point>0,0</point>
+            <point>100,0</point>
+        </reference>
+    </volumes>
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/engine/interface/EngineInterface.h b/services/audiopolicy/engine/interface/EngineInterface.h
index dfb20b5..f64608d 100644
--- a/services/audiopolicy/engine/interface/EngineInterface.h
+++ b/services/audiopolicy/engine/interface/EngineInterface.h
@@ -34,6 +34,8 @@
 using DeviceStrategyMap = std::map<product_strategy_t, DeviceVector>;
 using StrategyVector = std::vector<product_strategy_t>;
 using VolumeGroupVector = std::vector<volume_group_t>;
+using CapturePresetDevicesRoleMap =
+        std::map<audio_source_t, std::map<device_role_t, AudioDeviceTypeAddrVector>>;
 
 /**
  * This interface is dedicated to the policy manager that a Policy Engine shall implement.
@@ -293,36 +295,113 @@
     virtual status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups) const = 0;
 
     /**
-     * @brief setPreferredDeviceForStrategy sets the default device to be used for a
-     * strategy when available
+     * @brief setDevicesRoleForStrategy sets devices role for a strategy when available. To remove
+     * devices role, removeDevicesRoleForStrategy must be called. When devices role is set
+     * successfully, previously set devices for the same role and strategy will be removed.
      * @param strategy the audio strategy whose routing will be affected
-     * @param device the audio device to route to when available
-     * @return BAD_VALUE if the strategy is invalid,
-     *     or NO_ERROR if the preferred device was set
+     * @param role the role of the devices for the strategy. All device roles are defined at
+     *             system/media/audio/include/system/audio_policy.h. DEVICE_ROLE_NONE is invalid
+     *             for setting.
+     * @param devices the audio devices to be set
+     * @return BAD_VALUE if the strategy or role is invalid,
+     *     or NO_ERROR if the role of the devices for strategy was set
      */
-    virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
-            const AudioDeviceTypeAddr &device) = 0;
+    virtual status_t setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices) = 0;
 
     /**
-     * @brief removePreferredDeviceForStrategy removes the preferred device previously set
+     * @brief removeDevicesRoleForStrategy removes the role of device(s) previously set
      * for the given strategy
      * @param strategy the audio strategy whose routing will be affected
-     * @return BAD_VALUE if the strategy is invalid,
-     *     or NO_ERROR if the preferred device was removed
+     * @param role the role of the devices for strategy
+     * @return BAD_VALUE if the strategy or role is invalid,
+     *     or NO_ERROR if the devices for this role was removed
      */
-    virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy) = 0;
+    virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
+            device_role_t role) = 0;
 
     /**
-     * @brief getPreferredDeviceForStrategy queries which device is set as the
-     * preferred device for the given strategy
+     * @brief getDevicesForRoleAndStrategy queries which devices have the specified role for the
+     * specified strategy
      * @param strategy the strategy to query
-     * @param device returns configured as the preferred device if one was set
-     * @return BAD_VALUE if the strategy is invalid,
-     *     or NAME_NOT_FOUND if no preferred device was set
-     *     or NO_ERROR if the device parameter was initialized to the preferred device
+     * @param role the role of the devices to query
+     * @param devices returns list of devices with matching role for the specified strategy.
+     *                DEVICE_ROLE_NONE is invalid as input.
+     * @return BAD_VALUE if the strategy or role is invalid,
+     *     or NAME_NOT_FOUND if no device for the role and strategy was set
+     *     or NO_ERROR if the devices parameter contains a list of devices
      */
-    virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
-            AudioDeviceTypeAddr &device) const = 0;
+    virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy, device_role_t role,
+            AudioDeviceTypeAddrVector &devices) const = 0;
+
+    /**
+     * @brief setDevicesRoleForCapturePreset sets devices role for a capture preset when available.
+     * To remove devices role, removeDevicesRoleForCapturePreset must be called. Calling
+     * clearDevicesRoleForCapturePreset will remove all devices as role. When devices role is set
+     * successfully, previously set devices for the same role and capture preset will be removed.
+     * @param audioSource the audio capture preset whose routing will be affected
+     * @param role the role of the devices for the capture preset. All device roles are defined at
+     *             system/media/audio/include/system/audio_policy.h. DEVICE_ROLE_NONE is invalid
+     *             for setting.
+     * @param devices the audio devices to be set
+     * @return BAD_VALUE if the capture preset or role is invalid,
+     *     or NO_ERROR if the role of the devices for capture preset was set
+     */
+    virtual status_t setDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices) = 0;
+
+    /**
+     * @brief addDevicesRoleForCapturePreset adds devices role for a capture preset when available.
+     * To remove devices role, removeDevicesRoleForCapturePreset must be called. Calling
+     * clearDevicesRoleForCapturePreset will remove all devices as role.
+     * @param audioSource the audio capture preset whose routing will be affected
+     * @param role the role of the devices for the capture preset. All device roles are defined at
+     *             system/media/audio/include/system/audio_policy.h. DEVICE_ROLE_NONE is invalid
+     *             for setting.
+     * @param devices the audio devices to be added
+     * @return BAD_VALUE if the capture preset or role is invalid,
+     *     or NO_ERROR if the role of the devices for capture preset was added
+     */
+    virtual status_t addDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices) = 0;
+
+    /**
+     * @brief removeDevicesRoleForCapturePreset removes the role of device(s) previously set
+     * for the given capture preset
+     * @param audioSource the audio capture preset whose routing will be affected
+     * @param role the role of the devices for the capture preset
+     * @param devices the devices to be removed
+     * @return BAD_VALUE if 1) the capture preset is invalid, 2) role is invalid or 3) the list of
+     *     devices to be removed are not all present as role for a capture preset
+     *     or NO_ERROR if the devices for this role was removed
+     */
+    virtual status_t removeDevicesRoleForCapturePreset(audio_source_t audioSource,
+            device_role_t role, const AudioDeviceTypeAddrVector& devices) = 0;
+
+    /**
+     * @brief clearDevicesRoleForCapturePreset removes the role of all device(s) previously set
+     * for the given capture preset
+     * @param audioSource the audio capture preset whose routing will be affected
+     * @param role the role of the devices for the capture preset
+     * @return BAD_VALUE if the capture preset or role is invalid,
+     *     or NO_ERROR if the devices for this role was removed
+     */
+    virtual status_t clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+            device_role_t role);
+
+    /**
+     * @brief getDevicesForRoleAndCapturePreset queries which devices have the specified role for
+     * the specified capture preset
+     * @param audioSource the capture preset to query
+     * @param role the role of the devices to query
+     * @param devices returns list of devices with matching role for the specified capture preset.
+     *                DEVICE_ROLE_NONE is invalid as input.
+     * @return BAD_VALUE if the capture preset or role is invalid,
+     *     or NAME_NOT_FOUND if no device for the role and capture preset was set
+     *     or NO_ERROR if the devices parameter contains a list of devices
+     */
+    virtual status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+            device_role_t role, AudioDeviceTypeAddrVector &devices) const = 0;
 
 
     virtual void dump(String8 *dst) const = 0;
diff --git a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
index a7388da..bc32416 100644
--- a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
@@ -65,6 +65,12 @@
     </ProductStrategy>
 
     <ProductStrategy name="STRATEGY_MEDIA">
+        <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
+            <Attributes>
+                <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
+                <Usage value="AUDIO_USAGE_ASSISTANT"/>
+            </Attributes>
+        </AttributesGroup>
          <AttributesGroup streamType="AUDIO_STREAM_MUSIC" volumeGroup="music">
             <Attributes> <Usage value="AUDIO_USAGE_MEDIA"/> </Attributes>
             <Attributes> <Usage value="AUDIO_USAGE_GAME"/> </Attributes>
@@ -72,12 +78,6 @@
             <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"/> </Attributes>
             <Attributes></Attributes>
         </AttributesGroup>
-        <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
-            <Attributes>
-                <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
-                <Usage value="AUDIO_USAGE_ASSISTANT"/>
-            </Attributes>
-        </AttributesGroup>
         <AttributesGroup streamType="AUDIO_STREAM_SYSTEM" volumeGroup="system">
             <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_SONIFICATION"/> </Attributes>
         </AttributesGroup>
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp
index f91f8d7..f8a6fc0 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp
@@ -45,7 +45,7 @@
 
 bool InputSource::sendToHW(string & /*error*/)
 {
-    uint32_t applicableInputDevice;
+    audio_devices_t applicableInputDevice;
     blackboardRead(&applicableInputDevice, sizeof(applicableInputDevice));
     return mPolicyPluginInterface->setDeviceForInputSource(mId, applicableInputDevice);
 }
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.h b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.h
index 244f082..6c8eb65 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.h
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.h
@@ -32,7 +32,7 @@
 
     struct Device
     {
-        uint32_t applicableDevice; /**< applicable device for this strategy. */
+        audio_devices_t applicableDevice; /**< applicable device for this strategy. */
         char deviceAddress[mMaxStringSize]; /**< device address associated with this strategy. */
     } __attribute__((packed));
 
diff --git a/services/audiopolicy/engineconfigurable/src/InputSource.cpp b/services/audiopolicy/engineconfigurable/src/InputSource.cpp
index aa06ae3..f4645e6 100644
--- a/services/audiopolicy/engineconfigurable/src/InputSource.cpp
+++ b/services/audiopolicy/engineconfigurable/src/InputSource.cpp
@@ -51,7 +51,7 @@
         mApplicableDevices = devices;
         return NO_ERROR;
     }
-    devices |= AUDIO_DEVICE_BIT_IN;
+    devices = static_cast<audio_devices_t>(devices | AUDIO_DEVICE_BIT_IN);
     if (!audio_is_input_device(devices)) {
         ALOGE("%s: trying to set an invalid device 0x%X for input source %s",
               __FUNCTION__, devices, getName().c_str());
diff --git a/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py b/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
index 9a7fa8f..5083b14 100755
--- a/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
+++ b/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
@@ -52,13 +52,19 @@
 def findBitPos(decimal):
     pos = 0
     i = 1
-    while i != decimal:
+    while i < decimal:
         i = i << 1
         pos = pos + 1
         if pos == 32:
             return -1
-    return pos
 
+    # TODO: b/168065706. This is just to fix the build. That the problem of devices with
+    # multiple bits set must be addressed more generally in the configurable audio policy
+    # and parameter framework.
+    if i > decimal:
+        logging.info("Device:{} which has multiple bits set is skipped. b/168065706".format(decimal))
+        return -2
+    return pos
 
 def generateXmlStructureFile(componentTypeDict, structureTypesFile, outputFile):
 
@@ -74,10 +80,12 @@
                 if bitparameters_node is not None:
                     ordered_values = OrderedDict(sorted(values_dict.items(), key=lambda x: x[1]))
                     for key, value in ordered_values.items():
-                        value_node = ET.SubElement(bitparameters_node, "BitParameter")
-                        value_node.set('Name', key)
-                        value_node.set('Size', "1")
-                        value_node.set('Pos', str(findBitPos(value)))
+                        pos = findBitPos(value)
+                        if pos >= 0:
+                            value_node = ET.SubElement(bitparameters_node, "BitParameter")
+                            value_node.set('Name', key)
+                            value_node.set('Size', "1")
+                            value_node.set('Pos', str(pos))
 
                 enum_parameter_node = component_type.find("EnumParameter")
                 if enum_parameter_node is not None:
@@ -118,9 +126,9 @@
     ignored_values = ['CNT', 'MAX', 'ALL', 'NONE']
 
     criteria_pattern = re.compile(
-        r"\s*(?P<type>(?:"+'|'.join(component_type_mapping_table.keys()) + "))_" \
-        r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*=\s*" \
-        r"(?P<values>(?:0[xX])?[0-9a-fA-F]+)")
+        r"\s*V\((?P<type>(?:"+'|'.join(component_type_mapping_table.keys()) + "))_" \
+        r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*,\s*" \
+        r"(?:AUDIO_DEVICE_BIT_IN \| )?(?P<values>(?:0[xX])[0-9a-fA-F]+|[0-9]+)")
 
     logging.info("Checking Android Header file {}".format(androidaudiobaseheaderFile))
 
@@ -156,6 +164,13 @@
 
             logging.debug("type:{}, literal:{}, values:{}.".format(component_type_name, component_type_literal, component_type_numerical_value))
 
+    if "stub" not in all_component_types["OutputDevicesMask"]:
+        all_component_types["OutputDevicesMask"]["stub"] = 0x40000000
+        logging.info("added stub output device mask")
+    if "stub" not in all_component_types["InputDevicesMask"]:
+        all_component_types["InputDevicesMask"]["stub"] = 0x40000000
+        logging.info("added stub input device mask")
+
     # Transform input source in inclusive criterion
     shift = len(all_component_types['OutputDevicesMask'])
     if shift > 32:
diff --git a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
index a7388da..bc32416 100644
--- a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
@@ -65,6 +65,12 @@
     </ProductStrategy>
 
     <ProductStrategy name="STRATEGY_MEDIA">
+        <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
+            <Attributes>
+                <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
+                <Usage value="AUDIO_USAGE_ASSISTANT"/>
+            </Attributes>
+        </AttributesGroup>
          <AttributesGroup streamType="AUDIO_STREAM_MUSIC" volumeGroup="music">
             <Attributes> <Usage value="AUDIO_USAGE_MEDIA"/> </Attributes>
             <Attributes> <Usage value="AUDIO_USAGE_GAME"/> </Attributes>
@@ -72,12 +78,6 @@
             <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"/> </Attributes>
             <Attributes></Attributes>
         </AttributesGroup>
-        <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
-            <Attributes>
-                <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
-                <Usage value="AUDIO_USAGE_ASSISTANT"/>
-            </Attributes>
-        </AttributesGroup>
         <AttributesGroup streamType="AUDIO_STREAM_SYSTEM" volumeGroup="system">
             <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_SONIFICATION"/> </Attributes>
         </AttributesGroup>
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
old mode 100755
new mode 100644
index b14d2bb..eccde7b
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -241,10 +241,15 @@
         default:    // FORCE_NONE
             devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID);
             if (!devices.isEmpty()) break;
+
+            // TODO (b/161358428): remove when preferred device
+            //  for strategy phone will be used instead of AUDIO_POLICY_FORCE_FOR_COMMUNICATION
+            devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_BLE_HEADSET);
+            if (!devices.isEmpty()) break;
+
             // when not in a phone call, phone strategy should route STREAM_VOICE_CALL to A2DP
             if (!isInCall() &&
-                    (getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP) &&
-                     outputs.isA2dpSupported()) {
+                    (getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP)) {
                 devices = availableOutputDevices.getFirstDevicesFromTypes({
                         AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
                         AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES});
@@ -267,12 +272,16 @@
         case AUDIO_POLICY_FORCE_SPEAKER:
             // when not in a phone call, phone strategy should route STREAM_VOICE_CALL to
             // A2DP speaker when forcing to speaker output
-            if (!isInCall() &&
-                    (getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP) &&
-                     outputs.isA2dpSupported()) {
+            if (!isInCall()) {
                 devices = availableOutputDevices.getDevicesFromType(
-                        AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER);
+                        AUDIO_DEVICE_OUT_BLE_SPEAKER);
                 if (!devices.isEmpty()) break;
+
+                if ((getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP)) {
+                    devices = availableOutputDevices.getDevicesFromType(
+                            AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER);
+                    if (!devices.isEmpty()) break;
+                }
             }
             if (!isInCall()) {
                 devices = availableOutputDevices.getFirstDevicesFromTypes({
@@ -386,18 +395,13 @@
                     STRATEGY_PHONE, availableOutputDevices, availableInputDevices, outputs);
             break;
         }
-        // FIXME: Find a better solution to prevent routing to BT hearing aid(b/122931261).
-        if ((devices2.isEmpty()) &&
-                (getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP)) {
-            devices2 = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID);
-        }
+
         if ((devices2.isEmpty()) &&
             (getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) == AUDIO_POLICY_FORCE_SPEAKER)) {
             devices2 = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER);
         }
         if (devices2.isEmpty() && (getLastRemovableMediaDevices().size() > 0)) {
-            if ((getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP) &&
-                    outputs.isA2dpSupported()) {
+            if ((getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP)) {
                 // Get the last connected device of wired and bluetooth a2dp
                 devices2 = availableOutputDevices.getFirstDevicesFromTypes(
                         getLastRemovableMediaDevices());
@@ -452,22 +456,26 @@
         devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_TELEPHONY_TX);
         break;
 
+    case STRATEGY_NONE:
+        // Happens when internal strategies are processed ("rerouting", "patch"...)
+        break;
+
     default:
-        ALOGW("getDevicesForStrategy() unknown strategy: %d", strategy);
+        ALOGW("%s unknown strategy: %d", __func__, strategy);
         break;
     }
 
     if (devices.isEmpty()) {
-        ALOGV("getDevicesForStrategy() no device found for strategy %d", strategy);
+        ALOGV("%s no device found for strategy %d", __func__, strategy);
         sp<DeviceDescriptor> defaultOutputDevice = getApmObserver()->getDefaultOutputDevice();
         if (defaultOutputDevice != nullptr) {
             devices.add(defaultOutputDevice);
         }
         ALOGE_IF(devices.isEmpty(),
-                 "getDevicesForStrategy() no default device defined");
+                 "%s no default device defined", __func__);
     }
 
-    ALOGVV("getDevices ForStrategy() strategy %d, device %s",
+    ALOGVV("%s strategy %d, device %s", __func__,
            strategy, dumpDeviceTypes(devices.types()).c_str());
     return devices;
 }
@@ -514,8 +522,9 @@
             if (device != nullptr) break;
         }
         device = availableDevices.getFirstExistingDevice({
-                AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_IN_USB_HEADSET,
-                AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BUILTIN_MIC});
+                AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_IN_WIRED_HEADSET,
+                AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_IN_USB_DEVICE,
+                AUDIO_DEVICE_IN_BUILTIN_MIC});
         break;
 
     case AUDIO_SOURCE_VOICE_COMMUNICATION:
@@ -539,9 +548,13 @@
             FALLTHROUGH_INTENDED;
 
         default:    // FORCE_NONE
+            // TODO (b/161358428): remove AUDIO_DEVICE_IN_BLE_HEADSET from the list
+            //  when preferred device for strategy phone will be used instead of
+            //  AUDIO_POLICY_FORCE_FOR_COMMUNICATION.
             device = availableDevices.getFirstExistingDevice({
-                    AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_IN_USB_HEADSET,
-                    AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BUILTIN_MIC});
+                    AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_IN_WIRED_HEADSET,
+                    AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_IN_USB_DEVICE,
+                    AUDIO_DEVICE_IN_BUILTIN_MIC});
             break;
 
         case AUDIO_POLICY_FORCE_SPEAKER:
@@ -566,8 +579,9 @@
             if (device != nullptr) break;
         }
         device = availableDevices.getFirstExistingDevice({
-                AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_IN_USB_HEADSET,
-                AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BUILTIN_MIC});
+                AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_IN_WIRED_HEADSET,
+                AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_IN_USB_DEVICE,
+                AUDIO_DEVICE_IN_BUILTIN_MIC});
         break;
     case AUDIO_SOURCE_CAMCORDER:
         // For a device without built-in mic, adding usb device
@@ -631,19 +645,17 @@
 
     // check if this strategy has a preferred device that is available,
     // if yes, give priority to it
-    AudioDeviceTypeAddr preferredStrategyDevice;
-    const status_t status = getPreferredDeviceForStrategy(strategy, preferredStrategyDevice);
+    AudioDeviceTypeAddrVector preferredStrategyDevices;
+    const status_t status = getDevicesForRoleAndStrategy(
+            strategy, DEVICE_ROLE_PREFERRED, preferredStrategyDevices);
     if (status == NO_ERROR) {
         // there is a preferred device, is it available?
-        sp<DeviceDescriptor> preferredAvailableDevDescr = availableOutputDevices.getDevice(
-                preferredStrategyDevice.mType,
-                String8(preferredStrategyDevice.mAddress.c_str()),
-                AUDIO_FORMAT_DEFAULT);
-        if (preferredAvailableDevDescr != nullptr) {
-            ALOGVV("%s using pref device 0x%08x/%s for strategy %u",
-                   __func__, preferredStrategyDevice.mType,
-                   preferredStrategyDevice.mAddress.c_str(), strategy);
-            return DeviceVector(preferredAvailableDevDescr);
+        DeviceVector preferredAvailableDevVec =
+                availableOutputDevices.getDevicesFromDeviceTypeAddrVec(preferredStrategyDevices);
+        if (preferredAvailableDevVec.size() == preferredAvailableDevVec.size()) {
+            ALOGVV("%s using pref device %s for strategy %u",
+                   __func__, preferredAvailableDevVec.toString().c_str(), strategy);
+            return preferredAvailableDevVec;
         }
     }
 
diff --git a/services/audiopolicy/manager/Android.bp b/services/audiopolicy/manager/Android.bp
new file mode 100644
index 0000000..5bb432f
--- /dev/null
+++ b/services/audiopolicy/manager/Android.bp
@@ -0,0 +1,32 @@
+cc_library_shared {
+    name: "libaudiopolicymanager",
+
+    srcs: [
+        "AudioPolicyFactory.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/services/audioflinger"
+    ],
+
+    shared_libs: [
+        "libaudiopolicymanagerdefault",
+    ],
+
+    static_libs: [
+        "libaudiopolicycomponents",
+    ],
+
+    header_libs: [
+        "libaudiopolicycommon",
+        "libaudiopolicyengine_interface_headers",
+        "libaudiopolicymanager_interface_headers",
+        "libaudioutils_headers",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+}
diff --git a/services/audiopolicy/manager/Android.mk b/services/audiopolicy/manager/Android.mk
deleted file mode 100644
index cae6cfa..0000000
--- a/services/audiopolicy/manager/Android.mk
+++ /dev/null
@@ -1,30 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-ifneq ($(USE_CUSTOM_AUDIO_POLICY), 1)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-    AudioPolicyFactory.cpp
-
-LOCAL_SHARED_LIBRARIES := \
-    libaudiopolicymanagerdefault
-
-LOCAL_STATIC_LIBRARIES := \
-    libaudiopolicycomponents
-
-LOCAL_C_INCLUDES += \
-    $(call include-path-for, audio-utils)
-
-LOCAL_HEADER_LIBRARIES := \
-    libaudiopolicycommon \
-    libaudiopolicyengine_interface_headers \
-    libaudiopolicymanager_interface_headers
-
-LOCAL_CFLAGS := -Wall -Werror
-
-LOCAL_MODULE:= libaudiopolicymanager
-
-include $(BUILD_SHARED_LIBRARY)
-
-endif #ifneq ($(USE_CUSTOM_AUDIO_POLICY), 1)
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index c5c13e9..4a3e31f 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -29,29 +29,26 @@
 #define ALOGVV(a...) do { } while(0)
 #endif
 
-#define AUDIO_POLICY_XML_CONFIG_FILE_PATH_MAX_LENGTH 128
-#define AUDIO_POLICY_XML_CONFIG_FILE_NAME "audio_policy_configuration.xml"
-#define AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME \
-        "audio_policy_configuration_a2dp_offload_disabled.xml"
-#define AUDIO_POLICY_BLUETOOTH_LEGACY_HAL_XML_CONFIG_FILE_NAME \
-        "audio_policy_configuration_bluetooth_legacy_hal.xml"
-
 #include <algorithm>
 #include <inttypes.h>
 #include <math.h>
 #include <set>
 #include <unordered_set>
 #include <vector>
+
+#include <Serializer.h>
+#include <cutils/bitops.h>
 #include <cutils/properties.h>
-#include <utils/Log.h>
 #include <media/AudioParameter.h>
+#include <policy.h>
 #include <private/android_filesystem_config.h>
 #include <system/audio.h>
 #include <system/audio_config.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
+#include <utils/Log.h>
+
 #include "AudioPolicyManager.h"
-#include <Serializer.h>
 #include "TypeConverter.h"
-#include <policy.h>
 
 namespace android {
 
@@ -461,7 +458,16 @@
             }
         }
     }
-
+    auto musicStrategy = streamToStrategy(AUDIO_STREAM_MUSIC);
+    for (size_t i = 0; i < mOutputs.size(); i++) {
+       sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
+       // mute media strategies and delay device switch by the largest
+       // This avoid sending the music tail into the earpiece or headset.
+       setStrategyMute(musicStrategy, true, desc);
+       setStrategyMute(musicStrategy, false, desc, MUTE_TIME_MS,
+          mEngine->getOutputDevicesForAttributes(attributes_initializer(AUDIO_USAGE_MEDIA),
+                                              nullptr, true /*fromCache*/).types());
+    }
     // Toggle the device state: UNAVAILABLE -> AVAILABLE
     // This will force reading again the device configuration
     status = setDeviceConnectionState(device,
@@ -893,7 +899,8 @@
     // Only honor audibility enforced when required. The client will be
     // forced to reconnect if the forced usage changes.
     if (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) != AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) {
-        dstAttr->flags &= ~AUDIO_FLAG_AUDIBILITY_ENFORCED;
+        dstAttr->flags = static_cast<audio_flags_mask_t>(
+                dstAttr->flags & ~AUDIO_FLAG_AUDIBILITY_ENFORCED);
     }
 
     return NO_ERROR;
@@ -925,7 +932,7 @@
         return status;
     }
     if (auto it = mAllowedCapturePolicies.find(uid); it != end(mAllowedCapturePolicies)) {
-        resultAttr->flags |= it->second;
+        resultAttr->flags = static_cast<audio_flags_mask_t>(resultAttr->flags | it->second);
     }
     *stream = mEngine->getStreamTypeForAttributes(*resultAttr);
 
@@ -1243,7 +1250,8 @@
 
     // Discard haptic channel mask when forcing muting haptic channels.
     audio_channel_mask_t channelMask = forceMutingHaptic
-            ? (config->channel_mask & ~AUDIO_CHANNEL_HAPTIC_ALL) : config->channel_mask;
+            ? static_cast<audio_channel_mask_t>(config->channel_mask & ~AUDIO_CHANNEL_HAPTIC_ALL)
+            : config->channel_mask;
 
     // open a direct output if required by specified parameters
     //force direct flag if offload flag is set: offloading implies a direct output stream
@@ -1299,7 +1307,8 @@
 
         // at this stage we should ignore the DIRECT flag as no direct output could be found earlier
         *flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
-        output = selectOutput(outputs, *flags, config->format, channelMask, config->sample_rate);
+        output = selectOutput(
+                outputs, *flags, config->format, channelMask, config->sample_rate, session);
     }
     ALOGW_IF((output == 0), "getOutputForDevices() could not find output for stream %d, "
             "sampling rate %d, format %#x, channels %#x, flags %#x",
@@ -1472,14 +1481,26 @@
 }
 
 audio_io_handle_t AudioPolicyManager::selectOutput(const SortedVector<audio_io_handle_t>& outputs,
-                                                       audio_output_flags_t flags,
-                                                       audio_format_t format,
-                                                       audio_channel_mask_t channelMask,
-                                                       uint32_t samplingRate)
+                                                   audio_output_flags_t flags,
+                                                   audio_format_t format,
+                                                   audio_channel_mask_t channelMask,
+                                                   uint32_t samplingRate,
+                                                   audio_session_t sessionId)
 {
     LOG_ALWAYS_FATAL_IF(!(format == AUDIO_FORMAT_INVALID || audio_is_linear_pcm(format)),
         "%s called with format %#x", __func__, format);
 
+    // Return the output that haptic-generating attached to when 1) session id is specified,
+    // 2) haptic-generating effect exists for given session id and 3) the output that
+    // haptic-generating effect attached to is in given outputs.
+    if (sessionId != AUDIO_SESSION_NONE) {
+        audio_io_handle_t hapticGeneratingOutput = mEffects.getIoForSession(
+                sessionId, FX_IID_HAPTICGENERATOR);
+        if (outputs.indexOf(hapticGeneratingOutput) >= 0) {
+            return hapticGeneratingOutput;
+        }
+    }
+
     // Flags disqualifying an output: the match must happen before calling selectOutput()
     static const audio_output_flags_t kExcludedFlags = (audio_output_flags_t)
         (AUDIO_OUTPUT_FLAG_HW_AV_SYNC | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ | AUDIO_OUTPUT_FLAG_DIRECT);
@@ -1765,7 +1786,8 @@
         checkAndSetVolume(curves, client->volumeSource(),
                           curves.getVolumeIndex(outputDesc->devices().types()),
                           outputDesc,
-                          outputDesc->devices().types());
+                          outputDesc->devices().types(), 0 /*delay*/,
+                          outputDesc->useHwGain() /*force*/);
 
         // update the outputs if starting an output with a stream that can affect notification
         // routing
@@ -2256,7 +2278,7 @@
     sp<AudioInputDescriptor> inputDesc = mInputs.getInputForClient(portId);
     if (inputDesc == 0) {
         ALOGW("%s no input for client %d", __FUNCTION__, portId);
-        return BAD_VALUE;
+        return DEAD_OBJECT;
     }
     audio_io_handle_t input = inputDesc->mIoHandle;
     sp<RecordClientDescriptor> client = inputDesc->getClient(portId);
@@ -3083,16 +3105,16 @@
 
 // Returns true if all devices types match the predicate and are supported by one HW module
 bool  AudioPolicyManager::areAllDevicesSupported(
-        const Vector<AudioDeviceTypeAddr>& devices,
+        const AudioDeviceTypeAddrVector& devices,
         std::function<bool(audio_devices_t)> predicate,
         const char *context) {
     for (size_t i = 0; i < devices.size(); i++) {
         sp<DeviceDescriptor> devDesc = mHwModules.getDeviceDescriptor(
-                devices[i].mType, devices[i].mAddress.c_str(), String8(),
+                devices[i].mType, devices[i].getAddress(), String8(),
                 AUDIO_FORMAT_DEFAULT, false /*allowToCreate*/, true /*matchAddress*/);
         if (devDesc == nullptr || (predicate != nullptr && !predicate(devices[i].mType))) {
-            ALOGE("%s: device type %#x address %s not supported or not an output device",
-                    context, devices[i].mType, devices[i].mAddress.c_str());
+            ALOGE("%s: device type %#x address %s not supported or not match predicate",
+                    context, devices[i].mType, devices[i].getAddress());
             return false;
         }
     }
@@ -3100,7 +3122,7 @@
 }
 
 status_t AudioPolicyManager::setUidDeviceAffinities(uid_t uid,
-        const Vector<AudioDeviceTypeAddr>& devices) {
+        const AudioDeviceTypeAddrVector& devices) {
     ALOGV("%s() uid=%d num devices %zu", __FUNCTION__, uid, devices.size());
     if (!areAllDevicesSupported(devices, audio_is_output_device, __func__)) {
         return BAD_VALUE;
@@ -3132,20 +3154,19 @@
     return res;
 }
 
-status_t AudioPolicyManager::setPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   const AudioDeviceTypeAddr &device) {
-    ALOGV("%s() strategy=%d device=%08x addr=%s", __FUNCTION__,
-            strategy, device.mType, device.mAddress.c_str());
+status_t AudioPolicyManager::setDevicesRoleForStrategy(product_strategy_t strategy,
+                                                       device_role_t role,
+                                                       const AudioDeviceTypeAddrVector &devices) {
+    ALOGV("%s() strategy=%d role=%d %s", __func__, strategy, role,
+            dumpAudioDeviceTypeAddrVector(devices).c_str());
 
-    Vector<AudioDeviceTypeAddr> devices;
-    devices.add(device);
     if (!areAllDevicesSupported(devices, audio_is_output_device, __func__)) {
         return BAD_VALUE;
     }
-    status_t status = mEngine->setPreferredDeviceForStrategy(strategy, device);
+    status_t status = mEngine->setDevicesRoleForStrategy(strategy, role, devices);
     if (status != NO_ERROR) {
-        ALOGW("Engine could not set preferred device %08x %s for strategy %d",
-                device.mType, device.mAddress.c_str(), strategy);
+        ALOGW("Engine could not set preferred devices %s for strategy %d role %d",
+                dumpAudioDeviceTypeAddrVector(devices).c_str(), strategy, role);
         return status;
     }
 
@@ -3161,6 +3182,8 @@
     if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
         DeviceVector newDevices = getNewOutputDevices(mPrimaryOutput, true /*fromCache*/);
         waitMs = updateCallRouting(newDevices, delayMs);
+        // Only apply special touch sound delay once
+        delayMs = 0;
     }
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueAt(i);
@@ -3170,6 +3193,8 @@
             // preventing the force re-routing in case of default dev that distinguishes on address.
             // Let's give back to engine full device choice decision however.
             waitMs = setOutputDevices(outputDesc, newDevices, !newDevices.isEmpty(), delayMs);
+            // Only apply special touch sound delay once
+            delayMs = 0;
         }
         if (forceVolumeReeval && !newDevices.isEmpty()) {
             applyStreamVolumes(outputDesc, newDevices.types(), waitMs, true);
@@ -3177,11 +3202,12 @@
     }
 }
 
-status_t AudioPolicyManager::removePreferredDeviceForStrategy(product_strategy_t strategy)
+status_t AudioPolicyManager::removeDevicesRoleForStrategy(product_strategy_t strategy,
+                                                          device_role_t role)
 {
-    ALOGI("%s() strategy=%d", __FUNCTION__, strategy);
+    ALOGI("%s() strategy=%d role=%d", __func__, strategy, role);
 
-    status_t status = mEngine->removePreferredDeviceForStrategy(strategy);
+    status_t status = mEngine->removeDevicesRoleForStrategy(strategy, role);
     if (status != NO_ERROR) {
         ALOGW("Engine could not remove preferred device for strategy %d", strategy);
         return status;
@@ -3193,14 +3219,81 @@
     return NO_ERROR;
 }
 
-status_t AudioPolicyManager::getPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   AudioDeviceTypeAddr &device) {
-    return mEngine->getPreferredDeviceForStrategy(strategy, device);
+status_t AudioPolicyManager::getDevicesForRoleAndStrategy(product_strategy_t strategy,
+                                                          device_role_t role,
+                                                          AudioDeviceTypeAddrVector &devices) {
+    return mEngine->getDevicesForRoleAndStrategy(strategy, role, devices);
+}
+
+status_t AudioPolicyManager::setDevicesRoleForCapturePreset(
+        audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector &devices) {
+    ALOGV("%s() audioSource=%d role=%d %s", __func__, audioSource, role,
+            dumpAudioDeviceTypeAddrVector(devices).c_str());
+
+    if (!areAllDevicesSupported(devices, audio_call_is_input_device, __func__)) {
+        return BAD_VALUE;
+    }
+    status_t status = mEngine->setDevicesRoleForCapturePreset(audioSource, role, devices);
+    ALOGW_IF(status != NO_ERROR,
+            "Engine could not set preferred devices %s for audio source %d role %d",
+            dumpAudioDeviceTypeAddrVector(devices).c_str(), audioSource, role);
+
+    return status;
+}
+
+status_t AudioPolicyManager::addDevicesRoleForCapturePreset(
+        audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector &devices) {
+    ALOGV("%s() audioSource=%d role=%d %s", __func__, audioSource, role,
+            dumpAudioDeviceTypeAddrVector(devices).c_str());
+
+    if (!areAllDevicesSupported(devices, audio_call_is_input_device, __func__)) {
+        return BAD_VALUE;
+    }
+    status_t status = mEngine->addDevicesRoleForCapturePreset(audioSource, role, devices);
+    ALOGW_IF(status != NO_ERROR,
+            "Engine could not add preferred devices %s for audio source %d role %d",
+            dumpAudioDeviceTypeAddrVector(devices).c_str(), audioSource, role);
+
+    return status;
+}
+
+status_t AudioPolicyManager::removeDevicesRoleForCapturePreset(
+        audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector& devices)
+{
+    ALOGV("%s() audioSource=%d role=%d devices=%s", __func__, audioSource, role,
+            dumpAudioDeviceTypeAddrVector(devices).c_str());
+
+    if (!areAllDevicesSupported(devices, audio_call_is_input_device, __func__)) {
+        return BAD_VALUE;
+    }
+
+    status_t status = mEngine->removeDevicesRoleForCapturePreset(
+            audioSource, role, devices);
+    ALOGW_IF(status != NO_ERROR,
+            "Engine could not remove devices role (%d) for capture preset %d", role, audioSource);
+
+    return status;
+}
+
+status_t AudioPolicyManager::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                              device_role_t role) {
+    ALOGV("%s() audioSource=%d role=%d", __func__, audioSource, role);
+
+    status_t status = mEngine->clearDevicesRoleForCapturePreset(audioSource, role);
+    ALOGW_IF(status != NO_ERROR,
+            "Engine could not clear devices role (%d) for capture preset %d", role, audioSource);
+
+    return status;
+}
+
+status_t AudioPolicyManager::getDevicesForRoleAndCapturePreset(
+        audio_source_t audioSource, device_role_t role, AudioDeviceTypeAddrVector &devices) {
+    return mEngine->getDevicesForRoleAndCapturePreset(audioSource, role, devices);
 }
 
 status_t AudioPolicyManager::setUserIdDeviceAffinities(int userId,
-        const Vector<AudioDeviceTypeAddr>& devices) {
-    ALOGI("%s() userId=%d num devices %zu", __FUNCTION__, userId, devices.size());\
+        const AudioDeviceTypeAddrVector& devices) {
+    ALOGI("%s() userId=%d num devices %zu", __func__, userId, devices.size());
     if (!areAllDevicesSupported(devices, audio_is_output_device, __func__)) {
         return BAD_VALUE;
     }
@@ -4441,37 +4534,15 @@
 }
 
 static status_t deserializeAudioPolicyXmlConfig(AudioPolicyConfig &config) {
-    char audioPolicyXmlConfigFile[AUDIO_POLICY_XML_CONFIG_FILE_PATH_MAX_LENGTH];
-    std::vector<const char*> fileNames;
-    status_t ret;
-
-    if (property_get_bool("ro.bluetooth.a2dp_offload.supported", false)) {
-        if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.disabled", false) &&
-            property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
-            // Both BluetoothAudio@2.0 and BluetoothA2dp@1.0 (Offlaod) are disabled, and uses
-            // the legacy hardware module for A2DP and hearing aid.
-            fileNames.push_back(AUDIO_POLICY_BLUETOOTH_LEGACY_HAL_XML_CONFIG_FILE_NAME);
-        } else if (property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
-            // A2DP offload supported but disabled: try to use special XML file
-            fileNames.push_back(AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME);
+    if (std::string audioPolicyXmlConfigFile = audio_get_audio_policy_config_file();
+            !audioPolicyXmlConfigFile.empty()) {
+        status_t ret = deserializeAudioPolicyFile(audioPolicyXmlConfigFile.c_str(), &config);
+        if (ret == NO_ERROR) {
+            config.setSource(audioPolicyXmlConfigFile);
         }
-    } else if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.disabled", false)) {
-        fileNames.push_back(AUDIO_POLICY_BLUETOOTH_LEGACY_HAL_XML_CONFIG_FILE_NAME);
+        return ret;
     }
-    fileNames.push_back(AUDIO_POLICY_XML_CONFIG_FILE_NAME);
-
-    for (const char* fileName : fileNames) {
-        for (const auto& path : audio_get_configuration_paths()) {
-            snprintf(audioPolicyXmlConfigFile, sizeof(audioPolicyXmlConfigFile),
-                     "%s/%s", path.c_str(), fileName);
-            ret = deserializeAudioPolicyFile(audioPolicyXmlConfigFile, &config);
-            if (ret == NO_ERROR) {
-                config.setSource(audioPolicyXmlConfigFile);
-                return ret;
-            }
-        }
-    }
-    return ret;
+    return BAD_VALUE;
 }
 
 AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterface,
@@ -5258,7 +5329,7 @@
             if (status != OK) {
                 continue;
             }
-            if (client->getPrimaryMix() != primaryMix) {
+            if (client->getPrimaryMix() != primaryMix || client->hasLostPrimaryMix()) {
                 invalidate = true;
                 if (desc->isStrategyActive(psId)) {
                     maxLatency = desc->latency();
@@ -5289,7 +5360,8 @@
                                                                      client->flags(),
                                                                      client->config().format,
                                                                      client->config().channel_mask,
-                                                                     client->config().sample_rate);
+                                                                     client->config().sample_rate,
+                                                                     client->session());
                     if (newOutput != srcOut) {
                         invalidate = true;
                         break;
@@ -5446,6 +5518,12 @@
         }
     }
 
+    // Do not retrieve engine device for outputs through MSD
+    // TODO: support explicit routing requests by resetting MSD patch to engine device.
+    if (outputDesc->devices() == getMsdAudioOutDevices()) {
+        return outputDesc->devices();
+    }
+
     // Honor explicit routing requests only if no client using default routing is active on this
     // input: a specific app can not force routing for other apps by setting a preferred device.
     bool active; // unused
@@ -5533,8 +5611,8 @@
     }
     DeviceVector activeDevices;
     DeviceVector devices;
-    for (audio_stream_type_t curStream = AUDIO_STREAM_MIN; curStream < AUDIO_STREAM_PUBLIC_CNT;
-         curStream = (audio_stream_type_t) (curStream + 1)) {
+    for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_PUBLIC_CNT; ++i) {
+        const audio_stream_type_t curStream{static_cast<audio_stream_type_t>(i)};
         if (!streamsMatchForvolume(stream, curStream)) {
             continue;
         }
@@ -5648,7 +5726,7 @@
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
             setVolumeSourceMute(ttsVolumeSource, mute/*on*/, desc, 0 /*delay*/, DeviceTypeSet());
             const uint32_t latency = desc->latency() * 2;
-            if (latency > maxLatency) {
+            if (desc->isActive(latency * 2) && latency > maxLatency) {
                 maxLatency = latency;
             }
         }
@@ -6031,7 +6109,8 @@
     if (!Intersection(deviceTypes,
             {AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES,
              AUDIO_DEVICE_OUT_WIRED_HEADSET, AUDIO_DEVICE_OUT_WIRED_HEADPHONE,
-             AUDIO_DEVICE_OUT_USB_HEADSET, AUDIO_DEVICE_OUT_HEARING_AID}).empty() &&
+             AUDIO_DEVICE_OUT_USB_HEADSET, AUDIO_DEVICE_OUT_HEARING_AID,
+             AUDIO_DEVICE_OUT_BLE_HEADSET}).empty() &&
             ((volumeSource == alarmVolumeSrc ||
               volumeSource == ringVolumeSrc) ||
              (volumeSource == toVolumeSource(AUDIO_STREAM_NOTIFICATION)) ||
@@ -6135,7 +6214,9 @@
              (isBtScoVolSrc && forceUseForComm != AUDIO_POLICY_FORCE_BT_SCO))) {
         ALOGV("%s cannot set volume group %d volume with force use = %d for comm", __func__,
              volumeSource, forceUseForComm);
-        return INVALID_OPERATION;
+        // Do not return an error here as AudioService will always set both voice call
+        // and bluetooth SCO volumes due to stream aliasing.
+        return NO_ERROR;
     }
     if (deviceTypes.empty()) {
         deviceTypes = outputDesc->devices().types();
@@ -6143,9 +6224,8 @@
 
     float volumeDb = computeVolume(curves, volumeSource, index, deviceTypes);
     if (outputDesc->isFixedVolume(deviceTypes) ||
-            // Force VoIP volume to max for bluetooth SCO
-
-            ((isVoiceVolSrc || isBtScoVolSrc) &&
+            // Force VoIP volume to max for bluetooth SCO device except if muted
+            (index != 0 && (isVoiceVolSrc || isBtScoVolSrc) &&
                     isSingleDeviceType(deviceTypes, audio_is_bluetooth_out_sco_device))) {
         volumeDb = 0.0f;
     }
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index b588f89..217013f 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -263,17 +263,42 @@
         virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes);
         virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes);
         virtual status_t setUidDeviceAffinities(uid_t uid,
-                const Vector<AudioDeviceTypeAddr>& devices);
+                const AudioDeviceTypeAddrVector& devices);
         virtual status_t removeUidDeviceAffinities(uid_t uid);
         virtual status_t setUserIdDeviceAffinities(int userId,
-                const Vector<AudioDeviceTypeAddr>& devices);
+                const AudioDeviceTypeAddrVector& devices);
         virtual status_t removeUserIdDeviceAffinities(int userId);
 
-        virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   const AudioDeviceTypeAddr &device);
-        virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy);
-        virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   AudioDeviceTypeAddr &device);
+        virtual status_t setDevicesRoleForStrategy(product_strategy_t strategy,
+                                                   device_role_t role,
+                                                   const AudioDeviceTypeAddrVector &devices);
+
+        virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
+                                                      device_role_t role);
+
+
+        virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
+                                                      device_role_t role,
+                                                      AudioDeviceTypeAddrVector &devices);
+
+        virtual status_t setDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                        device_role_t role,
+                                                        const AudioDeviceTypeAddrVector &devices);
+
+        virtual status_t addDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                        device_role_t role,
+                                                        const AudioDeviceTypeAddrVector &devices);
+
+        virtual status_t removeDevicesRoleForCapturePreset(
+                audio_source_t audioSource, device_role_t role,
+                const AudioDeviceTypeAddrVector& devices);
+
+        virtual status_t clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                          device_role_t role);
+
+        virtual status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+                                                           device_role_t role,
+                                                           AudioDeviceTypeAddrVector &devices);
 
         virtual status_t startAudioSource(const struct audio_port_config *source,
                                           const audio_attributes_t *attributes,
@@ -608,7 +633,8 @@
                                        audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
                                        audio_format_t format = AUDIO_FORMAT_INVALID,
                                        audio_channel_mask_t channelMask = AUDIO_CHANNEL_NONE,
-                                       uint32_t samplingRate = 0);
+                                       uint32_t samplingRate = 0,
+                                       audio_session_t sessionId = AUDIO_SESSION_NONE);
         // samplingRate, format, channelMask are in/out and so may be modified
         sp<IOProfile> getInputProfile(const sp<DeviceDescriptor> & device,
                                       uint32_t& samplingRate,
@@ -938,7 +964,7 @@
                 sp<AudioPatch> *patchDescPtr);
 
         bool areAllDevicesSupported(
-                const Vector<AudioDeviceTypeAddr>& devices,
+                const AudioDeviceTypeAddrVector& devices,
                 std::function<bool(audio_devices_t)> predicate,
                 const char* context);
 
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
new file mode 100644
index 0000000..8a7a1b2
--- /dev/null
+++ b/services/audiopolicy/service/Android.bp
@@ -0,0 +1,55 @@
+cc_library_shared {
+    name: "libaudiopolicyservice",
+
+    srcs: [
+        "AudioPolicyClientImpl.cpp",
+        "AudioPolicyEffects.cpp",
+        "AudioPolicyInterfaceImpl.cpp",
+        "AudioPolicyService.cpp",
+        "CaptureStateNotifier.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/services/audioflinger"
+    ],
+
+    shared_libs: [
+        "libaudioclient",
+        "libaudiofoundation",
+        "libaudiopolicymanager",
+        "libaudioutils",
+        "libbinder",
+        "libcutils",
+        "libeffectsconfig",
+        "libhardware_legacy",
+        "liblog",
+        "libmedia_helper",
+        "libmediametrics",
+        "libmediautils",
+        "libsensorprivacy",
+        "libutils",
+        "capture_state_listener-aidl-cpp",
+    ],
+
+    static_libs: [
+        "libaudiopolicycomponents",
+    ],
+
+    header_libs: [
+        "libaudiopolicycommon",
+        "libaudiopolicyengine_interface_headers",
+        "libaudiopolicymanager_interface_headers",
+        "libaudioutils_headers",
+    ],
+
+    cflags: [
+        "-fvisibility=hidden",
+        "-Werror",
+        "-Wall",
+        "-Wthread-safety",
+    ],
+
+    export_shared_lib_headers: [
+        "libsensorprivacy",
+    ],
+}
diff --git a/services/audiopolicy/service/Android.mk b/services/audiopolicy/service/Android.mk
deleted file mode 100644
index 680b077..0000000
--- a/services/audiopolicy/service/Android.mk
+++ /dev/null
@@ -1,50 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-    AudioPolicyService.cpp \
-    AudioPolicyEffects.cpp \
-    AudioPolicyInterfaceImpl.cpp \
-    AudioPolicyClientImpl.cpp \
-    CaptureStateNotifier.cpp
-
-LOCAL_C_INCLUDES := \
-    frameworks/av/services/audioflinger \
-    $(call include-path-for, audio-utils)
-
-LOCAL_HEADER_LIBRARIES := \
-    libaudiopolicycommon \
-    libaudiopolicyengine_interface_headers \
-    libaudiopolicymanager_interface_headers
-
-LOCAL_SHARED_LIBRARIES := \
-    libcutils \
-    libutils \
-    liblog \
-    libbinder \
-    libaudioclient \
-    libaudioutils \
-    libaudiofoundation \
-    libhardware_legacy \
-    libaudiopolicymanager \
-    libmedia_helper \
-    libmediametrics \
-    libmediautils \
-    libeffectsconfig \
-    libsensorprivacy \
-    capture_state_listener-aidl-cpp
-
-LOCAL_EXPORT_SHARED_LIBRARY_HEADERS := \
-    libsensorprivacy
-
-LOCAL_STATIC_LIBRARIES := \
-    libaudiopolicycomponents
-
-LOCAL_MODULE:= libaudiopolicyservice
-
-LOCAL_CFLAGS += -fvisibility=hidden
-LOCAL_CFLAGS += -Wall -Werror -Wthread-safety
-
-include $(BUILD_SHARED_LIBRARY)
-
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 1ec0c5e..b738633 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -121,8 +121,8 @@
         Vector <EffectDesc *> effects = mInputSources.valueAt(index)->mEffects;
         for (size_t i = 0; i < effects.size(); i++) {
             EffectDesc *effect = effects[i];
-            sp<AudioEffect> fx = new AudioEffect(NULL, String16("android"), &effect->mUuid, -1, 0,
-                                                 0, audioSession, input);
+            sp<AudioEffect> fx = new AudioEffect(String16("android"));
+            fx->set(NULL, &effect->mUuid, -1, 0, 0, audioSession, input);
             status_t status = fx->initCheck();
             if (status != NO_ERROR && status != ALREADY_EXISTS) {
                 ALOGW("addInputEffects(): failed to create Fx %s on source %d",
@@ -270,8 +270,8 @@
         Vector <EffectDesc *> effects = mOutputStreams.valueAt(index)->mEffects;
         for (size_t i = 0; i < effects.size(); i++) {
             EffectDesc *effect = effects[i];
-            sp<AudioEffect> fx = new AudioEffect(NULL, String16("android"), &effect->mUuid, 0, 0, 0,
-                                                 audioSession, output);
+            sp<AudioEffect> fx = new AudioEffect(String16("android"));
+            fx->set(NULL, &effect->mUuid, 0, 0, 0, audioSession, output);
             status_t status = fx->initCheck();
             if (status != NO_ERROR && status != ALREADY_EXISTS) {
                 ALOGE("addOutputSessionEffects(): failed to create Fx  %s on session %d",
@@ -970,11 +970,11 @@
     for (const auto& deviceEffectsIter : mDeviceEffects) {
         const auto& deviceEffects =  deviceEffectsIter.second;
         for (const auto& effectDesc : deviceEffects->mEffectDescriptors->mEffects) {
-            auto fx = std::make_unique<AudioEffect>(
-                        EFFECT_UUID_NULL, String16("android"), &effectDesc->mUuid, 0, nullptr,
-                        nullptr, AUDIO_SESSION_DEVICE, AUDIO_IO_HANDLE_NONE,
-                        AudioDeviceTypeAddr{deviceEffects->getDeviceType(),
-                                            deviceEffects->getDeviceAddress()});
+            auto fx = std::make_unique<AudioEffect>(String16("android"));
+            fx->set(EFFECT_UUID_NULL, &effectDesc->mUuid, 0, nullptr,
+                    nullptr, AUDIO_SESSION_DEVICE, AUDIO_IO_HANDLE_NONE,
+                    AudioDeviceTypeAddr{deviceEffects->getDeviceType(),
+                                        deviceEffects->getDeviceAddress()});
             status_t status = fx->initCheck();
             if (status != NO_ERROR && status != ALREADY_EXISTS) {
                 ALOGE("%s(): failed to create Fx %s on port type=%d address=%s", __func__,
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 34d07b6..df8e4c5 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -244,11 +244,12 @@
         uid = callingUid;
     }
     if (!mPackageManager.allowPlaybackCapture(uid)) {
-        attr->flags |= AUDIO_FLAG_NO_MEDIA_PROJECTION;
+        attr->flags = static_cast<audio_flags_mask_t>(attr->flags | AUDIO_FLAG_NO_MEDIA_PROJECTION);
     }
     if (((attr->flags & (AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE)) != 0)
             && !bypassInterruptionPolicyAllowed(pid, uid)) {
-        attr->flags &= ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE);
+        attr->flags = static_cast<audio_flags_mask_t>(
+                attr->flags & ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE));
     }
     AutoCallerClear acc;
     AudioPolicyInterface::output_type_t outputType;
@@ -1257,7 +1258,7 @@
 }
 
 status_t AudioPolicyService::setUidDeviceAffinities(uid_t uid,
-        const Vector<AudioDeviceTypeAddr>& devices) {
+        const AudioDeviceTypeAddrVector& devices) {
     Mutex::Autolock _l(mLock);
     if(!modifyAudioRoutingAllowed()) {
         return PERMISSION_DENIED;
@@ -1282,7 +1283,7 @@
 }
 
 status_t AudioPolicyService::setUserIdDeviceAffinities(int userId,
-        const Vector<AudioDeviceTypeAddr>& devices) {
+        const AudioDeviceTypeAddrVector& devices) {
     Mutex::Autolock _l(mLock);
     if(!modifyAudioRoutingAllowed()) {
         return PERMISSION_DENIED;
@@ -1494,33 +1495,36 @@
     return mAudioPolicyManager->isCallScreenModeSupported();
 }
 
-status_t AudioPolicyService::setPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   const AudioDeviceTypeAddr &device)
+status_t AudioPolicyService::setDevicesRoleForStrategy(product_strategy_t strategy,
+                                                       device_role_t role,
+                                                       const AudioDeviceTypeAddrVector &devices)
 {
     if (mAudioPolicyManager == NULL) {
         return NO_INIT;
     }
     Mutex::Autolock _l(mLock);
-    return mAudioPolicyManager->setPreferredDeviceForStrategy(strategy, device);
+    return mAudioPolicyManager->setDevicesRoleForStrategy(strategy, role, devices);
 }
 
-status_t AudioPolicyService::removePreferredDeviceForStrategy(product_strategy_t strategy)
+status_t AudioPolicyService::removeDevicesRoleForStrategy(product_strategy_t strategy,
+                                                          device_role_t role)
 {
     if (mAudioPolicyManager == NULL) {
         return NO_INIT;
     }
     Mutex::Autolock _l(mLock);
-    return mAudioPolicyManager->removePreferredDeviceForStrategy(strategy);
+    return mAudioPolicyManager->removeDevicesRoleForStrategy(strategy, role);
 }
 
-status_t AudioPolicyService::getPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   AudioDeviceTypeAddr &device)
+status_t AudioPolicyService::getDevicesForRoleAndStrategy(product_strategy_t strategy,
+                                                          device_role_t role,
+                                                          AudioDeviceTypeAddrVector &devices)
 {
     if (mAudioPolicyManager == NULL) {
         return NO_INIT;
     }
     Mutex::Autolock _l(mLock);
-    return mAudioPolicyManager->getPreferredDeviceForStrategy(strategy, device);
+    return mAudioPolicyManager->getDevicesForRoleAndStrategy(strategy, role, devices);
 }
 
 status_t AudioPolicyService::registerSoundTriggerCaptureStateListener(
@@ -1531,4 +1535,55 @@
     return NO_ERROR;
 }
 
+status_t AudioPolicyService::setDevicesRoleForCapturePreset(
+        audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector &devices)
+{
+    if (mAudioPolicyManager == nullptr) {
+        return NO_INIT;
+    }
+    Mutex::Autolock _l(mLock);
+    return mAudioPolicyManager->setDevicesRoleForCapturePreset(audioSource, role, devices);
+}
+
+status_t AudioPolicyService::addDevicesRoleForCapturePreset(
+        audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector &devices)
+{
+    if (mAudioPolicyManager == nullptr) {
+        return NO_INIT;
+    }
+    Mutex::Autolock _l(mLock);
+    return mAudioPolicyManager->addDevicesRoleForCapturePreset(audioSource, role, devices);
+}
+
+status_t AudioPolicyService::removeDevicesRoleForCapturePreset(
+        audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector& devices)
+{
+    if (mAudioPolicyManager == nullptr) {
+        return NO_INIT;
+    }
+    Mutex::Autolock _l(mLock);
+    return mAudioPolicyManager->removeDevicesRoleForCapturePreset(audioSource, role, devices);
+}
+
+status_t AudioPolicyService::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                              device_role_t role)
+{
+    if (mAudioPolicyManager == nullptr) {
+        return NO_INIT;
+    }
+    Mutex::Autolock _l(mLock);
+    return mAudioPolicyManager->clearDevicesRoleForCapturePreset(audioSource, role);
+}
+
+status_t AudioPolicyService::getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+                                                               device_role_t role,
+                                                               AudioDeviceTypeAddrVector &devices)
+{
+    if (mAudioPolicyManager == nullptr) {
+        return NO_INIT;
+    }
+    Mutex::Autolock _l(mLock);
+    return mAudioPolicyManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices);
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index e847f9f..a6e8989 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -488,9 +488,9 @@
         }
 
         bool isAccessibility = mUidPolicy->isA11yUid(current->uid);
-        // Clients capturing for Accessibility services are not considered
+        // Clients capturing for Accessibility services or virtual sources are not considered
         // for top or latest active to avoid masking regular clients started before
-        if (!isAccessibility) {
+        if (!isAccessibility && !isVirtualSource(current->attributes.source)) {
             bool isAssistant = mUidPolicy->isAssistantUid(current->uid);
             bool isPrivacySensitive =
                     (current->attributes.flags & AUDIO_FLAG_CAPTURE_PRIVATE) != 0;
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 869a963..0b218c2 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -226,19 +226,41 @@
 
     virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration);
 
-    virtual status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices);
+    virtual status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices);
 
     virtual status_t removeUidDeviceAffinities(uid_t uid);
 
-    virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   const AudioDeviceTypeAddr &device);
+    virtual status_t setDevicesRoleForStrategy(product_strategy_t strategy,
+                                               device_role_t role,
+                                               const AudioDeviceTypeAddrVector &devices);
 
-    virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy);
+    virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role);
 
+    virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
+                                                  device_role_t role,
+                                                  AudioDeviceTypeAddrVector &devices);
 
-    virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   AudioDeviceTypeAddr &device);
-    virtual status_t setUserIdDeviceAffinities(int userId, const Vector<AudioDeviceTypeAddr>& devices);
+    virtual status_t setDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                    device_role_t role,
+                                                    const AudioDeviceTypeAddrVector &devices);
+
+    virtual status_t addDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                    device_role_t role,
+                                                    const AudioDeviceTypeAddrVector &devices);
+
+    virtual status_t removeDevicesRoleForCapturePreset(
+            audio_source_t audioSource, device_role_t role,
+            const AudioDeviceTypeAddrVector& devices);
+
+    virtual status_t clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                      device_role_t role);
+
+    virtual status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+                                                       device_role_t role,
+                                                       AudioDeviceTypeAddrVector &devices);
+
+    virtual status_t setUserIdDeviceAffinities(int userId,
+            const AudioDeviceTypeAddrVector& devices);
 
     virtual status_t removeUserIdDeviceAffinities(int userId);
 
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index efdb241..daedf31 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -18,7 +18,10 @@
         "libxml2",
     ],
 
-    static_libs: ["libaudiopolicycomponents"],
+    static_libs: [
+        "libaudiopolicycomponents",
+        "libgmock"
+    ],
 
     header_libs: [
         "libaudiopolicycommon",
@@ -42,6 +45,7 @@
 
 cc_test {
     name: "audio_health_tests",
+    require_root: true,
 
     shared_libs: [
         "libaudiofoundation",
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index e1721ea..bdddf06 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -75,6 +75,10 @@
     status_t createAudioPatch(const struct audio_patch *patch,
                               audio_patch_handle_t *handle,
                               int /*delayMs*/) override {
+        auto iter = mActivePatches.find(*handle);
+        if (iter != mActivePatches.end()) {
+            mActivePatches.erase(*handle);
+        }
         *handle = mNextPatchHandle++;
         mActivePatches.insert(std::make_pair(*handle, *patch));
         return NO_ERROR;
diff --git a/services/audiopolicy/tests/audio_health_tests.cpp b/services/audiopolicy/tests/audio_health_tests.cpp
index b5c67a1..9a62e72 100644
--- a/services/audiopolicy/tests/audio_health_tests.cpp
+++ b/services/audiopolicy/tests/audio_health_tests.cpp
@@ -16,6 +16,7 @@
 
 #define LOG_TAG "AudioPolicy_Boot_Test"
 
+#include <string>
 #include <unordered_set>
 
 #include <gtest/gtest.h>
@@ -74,3 +75,43 @@
         ASSERT_NE(attachedDevices.end(), attachedDevices.find(desc->type()));
     }
 }
+
+TEST(AudioHealthTest, ConnectSupportedDevice) {
+    AudioPolicyManagerTestClient client;
+    AudioPolicyTestManager manager(&client);
+    manager.loadConfig();
+    ASSERT_NE("AudioPolicyConfig::setDefault", manager.getConfig().getSource());
+
+    DeviceVector devices;
+    for (const auto& hwModule : manager.getConfig().getHwModules()) {
+        for (const auto& profile : hwModule->getOutputProfiles()) {
+            devices.merge(profile->getSupportedDevices());
+        }
+        for (const auto& profile : hwModule->getInputProfiles()) {
+            devices.merge(profile->getSupportedDevices());
+        }
+    }
+    for (const auto& device : devices) {
+        if (!audio_is_bluetooth_out_sco_device(device->type()) &&
+            !audio_is_bluetooth_in_sco_device(device->type())) {
+            // There are two reasons to only test connecting BT devices.
+            // 1) It is easier to construct a fake address.
+            // 2) This test will be run in presubmit. In that case, it makes sense to make the test
+            //    processing time short.
+            continue;
+        }
+        std::string address = "11:22:33:44:55:66";
+        ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
+        ASSERT_EQ(NO_ERROR, AudioSystem::setDeviceConnectionState(
+                device->type(), AUDIO_POLICY_DEVICE_STATE_AVAILABLE, address.c_str(),
+                "" /*device_name*/, AUDIO_FORMAT_DEFAULT));
+        ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
+        ASSERT_EQ(NO_ERROR, AudioSystem::setDeviceConnectionState(
+                device->type(), AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, address.c_str(),
+                "" /*device_name*/, AUDIO_FORMAT_DEFAULT));
+        ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
+    }
+}
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index a0074bc..7972dbf 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -20,6 +20,7 @@
 #include <unistd.h>
 
 #include <gtest/gtest.h>
+#include <gmock/gmock.h>
 
 #define LOG_TAG "APM_Test"
 #include <Serializer.h>
@@ -36,6 +37,7 @@
 #include "AudioPolicyTestManager.h"
 
 using namespace android;
+using testing::UnorderedElementsAre;
 
 TEST(AudioPolicyManagerTestInit, EngineFailure) {
     AudioPolicyTestClient client;
@@ -56,6 +58,34 @@
     ASSERT_EQ(NO_INIT, manager.initCheck());
 }
 
+// Verifies that a failure while loading a config doesn't leave
+// APM config in a "dirty" state. Since AudioPolicyConfig object
+// is a proxy for the data hosted by APM, it isn't possible
+// to "deep copy" it, and thus we have to test its elements
+// individually.
+TEST(AudioPolicyManagerTestInit, ConfigLoadingIsTransactional) {
+    AudioPolicyTestClient client;
+    AudioPolicyTestManager manager(&client);
+    ASSERT_TRUE(manager.getConfig().getHwModules().isEmpty());
+    ASSERT_TRUE(manager.getConfig().getInputDevices().isEmpty());
+    ASSERT_TRUE(manager.getConfig().getOutputDevices().isEmpty());
+    status_t status = deserializeAudioPolicyFile(
+            (base::GetExecutableDirectory() +
+                    "/test_invalid_audio_policy_configuration.xml").c_str(),
+            &manager.getConfig());
+    ASSERT_NE(NO_ERROR, status);
+    EXPECT_TRUE(manager.getConfig().getHwModules().isEmpty());
+    EXPECT_TRUE(manager.getConfig().getInputDevices().isEmpty());
+    EXPECT_TRUE(manager.getConfig().getOutputDevices().isEmpty());
+    status = deserializeAudioPolicyFile(
+            (base::GetExecutableDirectory() + "/test_audio_policy_configuration.xml").c_str(),
+            &manager.getConfig());
+    ASSERT_EQ(NO_ERROR, status);
+    EXPECT_FALSE(manager.getConfig().getHwModules().isEmpty());
+    EXPECT_FALSE(manager.getConfig().getInputDevices().isEmpty());
+    EXPECT_FALSE(manager.getConfig().getOutputDevices().isEmpty());
+}
+
 
 class PatchCountCheck {
   public:
@@ -87,7 +117,7 @@
     void getOutputForAttr(
             audio_port_handle_t *selectedDeviceId,
             audio_format_t format,
-            int channelMask,
+            audio_channel_mask_t channelMask,
             int sampleRate,
             audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
             audio_io_handle_t *output = nullptr,
@@ -98,7 +128,7 @@
             audio_unique_id_t riid,
             audio_port_handle_t *selectedDeviceId,
             audio_format_t format,
-            int channelMask,
+            audio_channel_mask_t channelMask,
             int sampleRate,
             audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
             audio_port_handle_t *portId = nullptr);
@@ -164,7 +194,7 @@
 void AudioPolicyManagerTest::getOutputForAttr(
         audio_port_handle_t *selectedDeviceId,
         audio_format_t format,
-        int channelMask,
+        audio_channel_mask_t channelMask,
         int sampleRate,
         audio_output_flags_t flags,
         audio_io_handle_t *output,
@@ -194,7 +224,7 @@
         audio_unique_id_t riid,
         audio_port_handle_t *selectedDeviceId,
         audio_format_t format,
-        int channelMask,
+        audio_channel_mask_t channelMask,
         int sampleRate,
         audio_input_flags_t flags,
         audio_port_handle_t *portId) {
@@ -707,7 +737,8 @@
 
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     audio_source_t source = AUDIO_SOURCE_REMOTE_SUBMIX;
-    audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, 0, ""};
+    audio_attributes_t attr = {
+        AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, AUDIO_FLAG_NONE, ""};
     std::string tags = "addr=" + mMixAddress;
     strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
     getInputForAttr(attr, mTracker->getRiid(), &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
@@ -757,9 +788,9 @@
         AudioPolicyManagerTestDPPlaybackReRouting,
         testing::Values(
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_MEDIA,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ALARM,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""}
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
                 )
         );
 
@@ -768,47 +799,47 @@
         AudioPolicyManagerTestDPPlaybackReRouting,
         testing::Values(
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_MEDIA,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VOICE_COMMUNICATION,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ALARM,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION_EVENT,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_ASSISTANCE_SONIFICATION,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_USAGE_ASSISTANCE_SONIFICATION,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_GAME,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VIRTUAL_SOURCE,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ASSISTANT,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"}
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"}
                 )
         );
 
@@ -817,41 +848,41 @@
         AudioPolicyManagerTestDPPlaybackReRouting,
         testing::Values(
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VOICE_COMMUNICATION,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
                                      AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
                                      AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
                                      AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
                                      AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
                                      AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION_EVENT,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
                                      AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
                                      AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
                                      AUDIO_USAGE_ASSISTANCE_SONIFICATION,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_GAME,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ASSISTANT,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""}
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
                 )
         );
 
@@ -892,7 +923,8 @@
 
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     audio_usage_t usage = AUDIO_USAGE_VIRTUAL_SOURCE;
-    audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, usage, AUDIO_SOURCE_DEFAULT, 0, ""};
+    audio_attributes_t attr =
+            {AUDIO_CONTENT_TYPE_UNKNOWN, usage, AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
     std::string tags = std::string("addr=") + mMixAddress;
     strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
     getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
@@ -941,17 +973,19 @@
         AudioPolicyManagerTestDPMixRecordInjection,
         testing::Values(
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_CAMCORDER, 0, ""},
+                                     AUDIO_SOURCE_CAMCORDER, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_CAMCORDER, 0, "addr=remote_submix_media"},
+                                     AUDIO_SOURCE_CAMCORDER, AUDIO_FLAG_NONE,
+                                     "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_MIC, 0, "addr=remote_submix_media"},
+                                     AUDIO_SOURCE_MIC, AUDIO_FLAG_NONE,
+                                     "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_MIC, 0, ""},
+                                     AUDIO_SOURCE_MIC, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_VOICE_COMMUNICATION, 0, ""},
+                                     AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_VOICE_COMMUNICATION, 0,
+                                     AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE,
                                      "addr=remote_submix_media"}
                 )
         );
@@ -962,14 +996,15 @@
         AudioPolicyManagerTestDPMixRecordInjection,
         testing::Values(
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_VOICE_RECOGNITION, 0, ""},
+                                     AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_HOTWORD, 0, ""},
+                                     AUDIO_SOURCE_HOTWORD, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_VOICE_RECOGNITION, 0,
+                                     AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_FLAG_NONE,
                                      "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_HOTWORD, 0, "addr=remote_submix_media"}
+                                     AUDIO_SOURCE_HOTWORD, AUDIO_FLAG_NONE,
+                                     "addr=remote_submix_media"}
                 )
         );
 
@@ -1188,3 +1223,109 @@
     EXPECT_GT(mClient->getAudioPortListUpdateCount(), prevAudioPortListUpdateCount);
     EXPECT_GT(mManager->getAudioPortGeneration(), prevAudioPortGeneration);
 }
+
+using DevicesRoleForCapturePresetParam = std::tuple<audio_source_t, device_role_t>;
+
+class AudioPolicyManagerDevicesRoleForCapturePresetTest
+        : public AudioPolicyManagerTestWithConfigurationFile,
+          public testing::WithParamInterface<DevicesRoleForCapturePresetParam> {
+protected:
+    // The `inputDevice` and `inputDevice2` indicate the audio devices type to be used for setting
+    // device role. They must be declared in the test_audio_policy_configuration.xml
+    AudioDeviceTypeAddr inputDevice = AudioDeviceTypeAddr(AUDIO_DEVICE_IN_BUILTIN_MIC, "");
+    AudioDeviceTypeAddr inputDevice2 = AudioDeviceTypeAddr(AUDIO_DEVICE_IN_HDMI, "");
+};
+
+TEST_P(AudioPolicyManagerDevicesRoleForCapturePresetTest, DevicesRoleForCapturePreset) {
+    const audio_source_t audioSource = std::get<0>(GetParam());
+    const device_role_t role = std::get<1>(GetParam());
+
+    // Test invalid device when setting
+    const AudioDeviceTypeAddr outputDevice(AUDIO_DEVICE_OUT_SPEAKER, "");
+    const AudioDeviceTypeAddrVector outputDevices = {outputDevice};
+    ASSERT_EQ(BAD_VALUE,
+              mManager->setDevicesRoleForCapturePreset(audioSource, role, outputDevices));
+    ASSERT_EQ(BAD_VALUE,
+              mManager->addDevicesRoleForCapturePreset(audioSource, role, outputDevices));
+    AudioDeviceTypeAddrVector devices;
+    ASSERT_EQ(NAME_NOT_FOUND,
+              mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    ASSERT_TRUE(devices.empty());
+    ASSERT_EQ(BAD_VALUE,
+              mManager->removeDevicesRoleForCapturePreset(audioSource, role, outputDevices));
+
+    // Without setting, call get/remove/clear must fail
+    ASSERT_EQ(NAME_NOT_FOUND,
+              mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    ASSERT_EQ(NAME_NOT_FOUND,
+              mManager->removeDevicesRoleForCapturePreset(audioSource, role, devices));
+    ASSERT_EQ(NAME_NOT_FOUND,
+              mManager->clearDevicesRoleForCapturePreset(audioSource, role));
+
+    // Test set/get devices role
+    const AudioDeviceTypeAddrVector inputDevices = {inputDevice};
+    ASSERT_EQ(NO_ERROR,
+              mManager->setDevicesRoleForCapturePreset(audioSource, role, inputDevices));
+    ASSERT_EQ(NO_ERROR, mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    EXPECT_THAT(devices, UnorderedElementsAre(inputDevice));
+
+    // Test setting will change the previously set devices
+    const AudioDeviceTypeAddrVector inputDevices2 = {inputDevice2};
+    ASSERT_EQ(NO_ERROR,
+              mManager->setDevicesRoleForCapturePreset(audioSource, role, inputDevices2));
+    devices.clear();
+    ASSERT_EQ(NO_ERROR, mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    EXPECT_THAT(devices, UnorderedElementsAre(inputDevice2));
+
+    // Test add devices
+    ASSERT_EQ(NO_ERROR,
+              mManager->addDevicesRoleForCapturePreset(audioSource, role, inputDevices));
+    devices.clear();
+    ASSERT_EQ(NO_ERROR, mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    EXPECT_THAT(devices, UnorderedElementsAre(inputDevice, inputDevice2));
+
+    // Test remove devices
+    ASSERT_EQ(NO_ERROR,
+              mManager->removeDevicesRoleForCapturePreset(audioSource, role, inputDevices));
+    devices.clear();
+    ASSERT_EQ(NO_ERROR, mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    EXPECT_THAT(devices, UnorderedElementsAre(inputDevice2));
+
+    // Test remove devices that are not set as the device role
+    ASSERT_EQ(BAD_VALUE,
+              mManager->removeDevicesRoleForCapturePreset(audioSource, role, inputDevices));
+
+    // Test clear devices
+    ASSERT_EQ(NO_ERROR,
+              mManager->clearDevicesRoleForCapturePreset(audioSource, role));
+    devices.clear();
+    ASSERT_EQ(NAME_NOT_FOUND,
+              mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+}
+
+INSTANTIATE_TEST_CASE_P(
+        DevicesRoleForCapturePresetOperation,
+        AudioPolicyManagerDevicesRoleForCapturePresetTest,
+        testing::Values(
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_MIC, DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_UPLINK,
+                                                  DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_DOWNLINK,
+                                                  DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_CALL, DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_CAMCORDER, DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_RECOGNITION,
+                                                  DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_COMMUNICATION,
+                                                  DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_REMOTE_SUBMIX,
+                                                  DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_UNPROCESSED, DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_PERFORMANCE,
+                                                  DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_ECHO_REFERENCE,
+                                                  DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_FM_TUNER, DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_HOTWORD, DEVICE_ROLE_PREFERRED})
+                )
+        );
diff --git a/services/audiopolicy/tests/resources/Android.bp b/services/audiopolicy/tests/resources/Android.bp
index d9476d9..4f50dad 100644
--- a/services/audiopolicy/tests/resources/Android.bp
+++ b/services/audiopolicy/tests/resources/Android.bp
@@ -3,6 +3,7 @@
     srcs: [
         "test_audio_policy_configuration.xml",
         "test_audio_policy_primary_only_configuration.xml",
+        "test_invalid_audio_policy_configuration.xml",
         "test_tv_apm_configuration.xml",
     ],
 }
diff --git a/services/audiopolicy/tests/resources/test_invalid_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_invalid_audio_policy_configuration.xml
new file mode 100644
index 0000000..25641d5
--- /dev/null
+++ b/services/audiopolicy/tests/resources/test_invalid_audio_policy_configuration.xml
@@ -0,0 +1,113 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<!-- This file contains an unnamed device port in the "r_submix" module section. -->
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <globalConfiguration speaker_drc_enabled="true"/>
+
+    <modules>
+        <!-- Primary module -->
+        <module name="primary" halVersion="2.0">
+            <attachedDevices>
+                <item>Speaker</item>
+                <item>Built-In Mic</item>
+            </attachedDevices>
+            <defaultOutputDevice>Speaker</defaultOutputDevice>
+            <mixPorts>
+                <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="primary input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bt_hfp_output" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bt_hfp_input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000,11025,16000,44100,48000"
+                             channelMasks="AUDIO_CHANNEL_IN_STEREO,AUDIO_CHANNEL_IN_MONO"/>
+                </mixPort>
+            </mixPorts>
+            <devicePorts>
+                <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
+                </devicePort>
+                <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+                </devicePort>
+                <devicePort tagName="Hdmi" type="AUDIO_DEVICE_OUT_HDMI" role="sink">
+                </devicePort>
+                <devicePort tagName="Hdmi-In Mic" type="AUDIO_DEVICE_IN_HDMI" role="source">
+                </devicePort>
+                <devicePort tagName="BT SCO" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO"
+                            role="sink" address="hfp_client_out">
+                </devicePort>
+                <devicePort tagName="BT SCO Headset Mic" type="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET"
+                            role="source" address="hfp_client_in">
+                </devicePort>
+            </devicePorts>
+            <routes>
+                <route type="mix" sink="Speaker"
+                       sources="primary output"/>
+                <route type="mix" sink="primary input"
+                       sources="Built-In Mic,Hdmi-In Mic"/>
+                <route type="mix" sink="Hdmi"
+                       sources="primary output"/>
+                <route type="mix" sink="BT SCO"
+                       sources="mixport_bt_hfp_output"/>
+                <route type="mix" sink="mixport_bt_hfp_input"
+                       sources="BT SCO Headset Mic"/>
+            </routes>
+        </module>
+
+        <!-- Remote Submix module -->
+        <module name="r_submix" halVersion="2.0">
+            <attachedDevices>
+                <item>Remote Submix In</item>
+            </attachedDevices>
+            <mixPorts>
+                <mixPort name="r_submix output" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="r_submix input" role="sink">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+           </mixPorts>
+           <devicePorts>
+               <!-- This port is missing "tagName" attribute. -->
+               <devicePort type="AUDIO_DEVICE_OUT_REMOTE_SUBMIX"  role="sink">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+               </devicePort>
+               <devicePort tagName="Remote Submix In" type="AUDIO_DEVICE_IN_REMOTE_SUBMIX"  role="source">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </devicePort>
+            </devicePorts>
+            <routes>
+                <route type="mix" sink="Remote Submix Out"
+                       sources="r_submix output"/>
+                <route type="mix" sink="r_submix input"
+                       sources="Remote Submix In"/>
+            </routes>
+        </module>
+    </modules>
+</audioPolicyConfiguration>
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 501d922..4a36865 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -30,7 +30,6 @@
         "common/CameraProviderManager.cpp",
         "common/DepthPhotoProcessor.cpp",
         "common/FrameProcessorBase.cpp",
-        "api1/CameraClient.cpp",
         "api1/Camera2Client.cpp",
         "api1/client2/Parameters.cpp",
         "api1/client2/FrameProcessor.cpp",
@@ -46,7 +45,6 @@
         "api2/DepthCompositeStream.cpp",
         "api2/HeicEncoderInfoManager.cpp",
         "api2/HeicCompositeStream.cpp",
-        "device1/CameraHardwareInterface.cpp",
         "device3/BufferUtils.cpp",
         "device3/Camera3Device.cpp",
         "device3/Camera3OfflineSession.cpp",
@@ -54,7 +52,7 @@
         "device3/Camera3IOStreamBase.cpp",
         "device3/Camera3InputStream.cpp",
         "device3/Camera3OutputStream.cpp",
-        "device3/Camera3DummyStream.cpp",
+        "device3/Camera3FakeStream.cpp",
         "device3/Camera3SharedOutputStream.cpp",
         "device3/StatusTracker.cpp",
         "device3/Camera3BufferManager.cpp",
@@ -122,7 +120,6 @@
         "android.hardware.camera.provider@2.4",
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
-        "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.3",
         "android.hardware.camera.device@3.4",
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index e629cdd..ccdd9e5 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -59,9 +59,8 @@
     if (mProviderManager->supportSetTorchMode(cameraId.string())) {
         mFlashControl = new ProviderFlashControl(mProviderManager);
     } else {
-        // Only HAL1 devices do not support setTorchMode
-        mFlashControl =
-                new CameraHardwareInterfaceFlashControl(mProviderManager, mCallbacks);
+        ALOGE("Flashlight control not supported by this device!");
+        return NO_INIT;
     }
 
     return OK;
@@ -309,271 +308,4 @@
 }
 // ProviderFlashControl implementation ends
 
-/////////////////////////////////////////////////////////////////////
-// CameraHardwareInterfaceFlashControl implementation begins
-// Flash control for camera module <= v2.3 and camera HAL v1
-/////////////////////////////////////////////////////////////////////
-
-CameraHardwareInterfaceFlashControl::CameraHardwareInterfaceFlashControl(
-        sp<CameraProviderManager> manager,
-        CameraProviderManager::StatusListener* callbacks) :
-        mProviderManager(manager),
-        mCallbacks(callbacks),
-        mTorchEnabled(false) {
-}
-
-CameraHardwareInterfaceFlashControl::~CameraHardwareInterfaceFlashControl() {
-    disconnectCameraDevice();
-
-    mSurface.clear();
-    mSurfaceTexture.clear();
-    mProducer.clear();
-    mConsumer.clear();
-
-    if (mTorchEnabled) {
-        if (mCallbacks) {
-            ALOGV("%s: notify the framework that torch was turned off",
-                    __FUNCTION__);
-            mCallbacks->onTorchStatusChanged(mCameraId, TorchModeStatus::AVAILABLE_OFF);
-        }
-    }
-}
-
-status_t CameraHardwareInterfaceFlashControl::setTorchMode(
-        const String8& cameraId, bool enabled) {
-    Mutex::Autolock l(mLock);
-
-    // pre-check
-    status_t res;
-    if (enabled) {
-        bool hasFlash = false;
-        // Check if it has a flash unit and leave camera device open.
-        res = hasFlashUnitLocked(cameraId, &hasFlash, /*keepDeviceOpen*/true);
-        // invalid camera?
-        if (res) {
-            // hasFlashUnitLocked() returns BAD_INDEX if mDevice is connected to
-            // another camera device.
-            return res == BAD_INDEX ? BAD_INDEX : -EINVAL;
-        }
-        // no flash unit?
-        if (!hasFlash) {
-            // Disconnect camera device if it has no flash.
-            disconnectCameraDevice();
-            return -ENOSYS;
-        }
-    } else if (mDevice == NULL || cameraId != mCameraId) {
-        // disabling the torch mode of an un-opened or different device.
-        return OK;
-    } else {
-        // disabling the torch mode of currently opened device
-        disconnectCameraDevice();
-        mTorchEnabled = false;
-        mCallbacks->onTorchStatusChanged(cameraId, TorchModeStatus::AVAILABLE_OFF);
-        return OK;
-    }
-
-    res = startPreviewAndTorch();
-    if (res) {
-        return res;
-    }
-
-    mTorchEnabled = true;
-    mCallbacks->onTorchStatusChanged(cameraId, TorchModeStatus::AVAILABLE_ON);
-    return OK;
-}
-
-status_t CameraHardwareInterfaceFlashControl::hasFlashUnit(
-        const String8& cameraId, bool *hasFlash) {
-    Mutex::Autolock l(mLock);
-    // Close device after checking if it has a flash unit.
-    return hasFlashUnitLocked(cameraId, hasFlash, /*keepDeviceOpen*/false);
-}
-
-status_t CameraHardwareInterfaceFlashControl::hasFlashUnitLocked(
-        const String8& cameraId, bool *hasFlash, bool keepDeviceOpen) {
-    bool closeCameraDevice = false;
-
-    if (!hasFlash) {
-        return BAD_VALUE;
-    }
-
-    status_t res;
-    if (mDevice == NULL) {
-        // Connect to camera device to query if it has a flash unit.
-        res = connectCameraDevice(cameraId);
-        if (res) {
-            return res;
-        }
-        // Close camera device only when it is just opened and the caller doesn't want to keep
-        // the camera device open.
-        closeCameraDevice = !keepDeviceOpen;
-    }
-
-    if (cameraId != mCameraId) {
-        return BAD_INDEX;
-    }
-
-    const char *flashMode =
-            mParameters.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
-    if (flashMode && strstr(flashMode, CameraParameters::FLASH_MODE_TORCH)) {
-        *hasFlash = true;
-    } else {
-        *hasFlash = false;
-    }
-
-    if (closeCameraDevice) {
-        res = disconnectCameraDevice();
-        if (res != OK) {
-            ALOGE("%s: Failed to disconnect camera device. %s (%d)", __FUNCTION__,
-                    strerror(-res), res);
-            return res;
-        }
-    }
-
-    return OK;
-}
-
-status_t CameraHardwareInterfaceFlashControl::startPreviewAndTorch() {
-    status_t res = OK;
-    res = mDevice->startPreview();
-    if (res) {
-        ALOGE("%s: start preview failed. %s (%d)", __FUNCTION__,
-                strerror(-res), res);
-        return res;
-    }
-
-    mParameters.set(CameraParameters::KEY_FLASH_MODE,
-            CameraParameters::FLASH_MODE_TORCH);
-
-    return mDevice->setParameters(mParameters);
-}
-
-status_t CameraHardwareInterfaceFlashControl::getSmallestSurfaceSize(
-        int32_t *width, int32_t *height) {
-    if (!width || !height) {
-        return BAD_VALUE;
-    }
-
-    int32_t w = INT32_MAX;
-    int32_t h = 1;
-    Vector<Size> sizes;
-
-    mParameters.getSupportedPreviewSizes(sizes);
-    for (size_t i = 0; i < sizes.size(); i++) {
-        Size s = sizes[i];
-        if (w * h > s.width * s.height) {
-            w = s.width;
-            h = s.height;
-        }
-    }
-
-    if (w == INT32_MAX) {
-        return NAME_NOT_FOUND;
-    }
-
-    *width = w;
-    *height = h;
-
-    return OK;
-}
-
-status_t CameraHardwareInterfaceFlashControl::initializePreviewWindow(
-        const sp<CameraHardwareInterface>& device, int32_t width, int32_t height) {
-    status_t res;
-    BufferQueue::createBufferQueue(&mProducer, &mConsumer);
-
-    mSurfaceTexture = new GLConsumer(mConsumer, 0, GLConsumer::TEXTURE_EXTERNAL,
-            true, true);
-    if (mSurfaceTexture == NULL) {
-        return NO_MEMORY;
-    }
-
-    int32_t format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-    res = mSurfaceTexture->setDefaultBufferSize(width, height);
-    if (res) {
-        return res;
-    }
-    res = mSurfaceTexture->setDefaultBufferFormat(format);
-    if (res) {
-        return res;
-    }
-
-    mSurface = new Surface(mProducer, /*useAsync*/ true);
-    if (mSurface == NULL) {
-        return NO_MEMORY;
-    }
-
-    res = native_window_api_connect(mSurface.get(), NATIVE_WINDOW_API_CAMERA);
-    if (res) {
-        ALOGE("%s: Unable to connect to native window", __FUNCTION__);
-        return res;
-    }
-
-    return device->setPreviewWindow(mSurface);
-}
-
-status_t CameraHardwareInterfaceFlashControl::connectCameraDevice(
-        const String8& cameraId) {
-    sp<CameraHardwareInterface> device =
-            new CameraHardwareInterface(cameraId.string());
-
-    status_t res = device->initialize(mProviderManager);
-    if (res) {
-        ALOGE("%s: initializing camera %s failed", __FUNCTION__,
-                cameraId.string());
-        return res;
-    }
-
-    // need to set __get_memory in set_callbacks().
-    device->setCallbacks(NULL, NULL, NULL, NULL, NULL);
-
-    mParameters = device->getParameters();
-
-    int32_t width, height;
-    res = getSmallestSurfaceSize(&width, &height);
-    if (res) {
-        ALOGE("%s: failed to get smallest surface size for camera %s",
-                __FUNCTION__, cameraId.string());
-        return res;
-    }
-
-    res = initializePreviewWindow(device, width, height);
-    if (res) {
-        ALOGE("%s: failed to initialize preview window for camera %s",
-                __FUNCTION__, cameraId.string());
-        return res;
-    }
-
-    mCameraId = cameraId;
-    mDevice = device;
-    return OK;
-}
-
-status_t CameraHardwareInterfaceFlashControl::disconnectCameraDevice() {
-    if (mDevice == NULL) {
-        return OK;
-    }
-
-    if (mParameters.get(CameraParameters::KEY_FLASH_MODE)) {
-        // There is a flash, turn if off.
-        // (If there isn't one, leave the parameter null)
-        mParameters.set(CameraParameters::KEY_FLASH_MODE,
-                CameraParameters::FLASH_MODE_OFF);
-        mDevice->setParameters(mParameters);
-    }
-    mDevice->stopPreview();
-    status_t res = native_window_api_disconnect(mSurface.get(),
-            NATIVE_WINDOW_API_CAMERA);
-    if (res) {
-        ALOGW("%s: native_window_api_disconnect failed: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-    }
-    mDevice->setPreviewWindow(NULL);
-    mDevice->release();
-    mDevice = NULL;
-
-    return OK;
-}
-// CameraHardwareInterfaceFlashControl implementation ends
-
 }
diff --git a/services/camera/libcameraservice/CameraFlashlight.h b/services/camera/libcameraservice/CameraFlashlight.h
index 1baaba2..b97fa5f 100644
--- a/services/camera/libcameraservice/CameraFlashlight.h
+++ b/services/camera/libcameraservice/CameraFlashlight.h
@@ -23,8 +23,6 @@
 #include <utils/SortedVector.h>
 #include "common/CameraProviderManager.h"
 #include "common/CameraDeviceBase.h"
-#include "device1/CameraHardwareInterface.h"
-
 
 namespace android {
 
@@ -124,59 +122,6 @@
         Mutex mLock;
 };
 
-/**
- * Flash control for camera module <= v2.3 and camera HAL v1
- */
-class CameraHardwareInterfaceFlashControl : public FlashControlBase {
-    public:
-        CameraHardwareInterfaceFlashControl(
-                sp<CameraProviderManager> manager,
-                CameraProviderManager::StatusListener* callbacks);
-        virtual ~CameraHardwareInterfaceFlashControl();
-
-        // FlashControlBase
-        status_t setTorchMode(const String8& cameraId, bool enabled);
-        status_t hasFlashUnit(const String8& cameraId, bool *hasFlash);
-
-    private:
-        // connect to a camera device
-        status_t connectCameraDevice(const String8& cameraId);
-
-        // disconnect and free mDevice
-        status_t disconnectCameraDevice();
-
-        // initialize the preview window
-        status_t initializePreviewWindow(const sp<CameraHardwareInterface>& device,
-                int32_t width, int32_t height);
-
-        // start preview and enable torch
-        status_t startPreviewAndTorch();
-
-        // get the smallest surface
-        status_t getSmallestSurfaceSize(int32_t *width, int32_t *height);
-
-        // protected by mLock
-        // If this function opens camera device in order to check if it has a flash unit, the
-        // camera device will remain open if keepDeviceOpen is true and the camera device will be
-        // closed if keepDeviceOpen is false. If camera device is already open when calling this
-        // function, keepDeviceOpen is ignored.
-        status_t hasFlashUnitLocked(const String8& cameraId, bool *hasFlash, bool keepDeviceOpen);
-
-        sp<CameraProviderManager> mProviderManager;
-        CameraProviderManager::StatusListener* mCallbacks;
-        sp<CameraHardwareInterface> mDevice;
-        String8 mCameraId;
-        CameraParameters mParameters;
-        bool mTorchEnabled;
-
-        sp<IGraphicBufferProducer> mProducer;
-        sp<IGraphicBufferConsumer>  mConsumer;
-        sp<GLConsumer> mSurfaceTexture;
-        sp<Surface> mSurface;
-
-        Mutex mLock;
-};
-
 } // namespace android
 
 #endif
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index af1e01d..138e429 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -70,7 +70,6 @@
 #include <system/camera.h>
 
 #include "CameraService.h"
-#include "api1/CameraClient.h"
 #include "api1/Camera2Client.h"
 #include "api2/CameraDeviceClient.h"
 #include "utils/CameraTraces.h"
@@ -679,9 +678,15 @@
     status_t res = mCameraProviderManager->getCameraCharacteristics(
             String8(cameraId).string(), cameraInfo);
     if (res != OK) {
-        return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera "
-                "characteristics for device %s: %s (%d)", String8(cameraId).string(),
-                strerror(-res), res);
+        if (res == NAME_NOT_FOUND) {
+            return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to retrieve camera "
+                    "characteristics for unknown device %s: %s (%d)", String8(cameraId).string(),
+                    strerror(-res), res);
+        } else {
+            return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera "
+                    "characteristics for device %s: %s (%d)", String8(cameraId).string(),
+                    strerror(-res), res);
+        }
     }
     SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
     if (getSystemCameraKind(String8(cameraId), &deviceKind) != OK) {
@@ -802,35 +807,26 @@
 
 Status CameraService::makeClient(const sp<CameraService>& cameraService,
         const sp<IInterface>& cameraCb, const String16& packageName,
-        const std::unique_ptr<String16>& featureId, const String8& cameraId, int api1CameraId,
-        int facing, int clientPid, uid_t clientUid, int servicePid, int halVersion,
+        const std::optional<String16>& featureId,  const String8& cameraId,
+        int api1CameraId, int facing, int clientPid, uid_t clientUid, int servicePid,
         int deviceVersion, apiLevel effectiveApiLevel,
         /*out*/sp<BasicClient>* client) {
 
-    if (halVersion < 0 || halVersion == deviceVersion) {
-        // Default path: HAL version is unspecified by caller, create CameraClient
-        // based on device version reported by the HAL.
-        switch(deviceVersion) {
-          case CAMERA_DEVICE_API_VERSION_1_0:
-            if (effectiveApiLevel == API_1) {  // Camera1 API route
-                sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
-                *client = new CameraClient(cameraService, tmp, packageName, featureId,
-                        api1CameraId, facing, clientPid, clientUid,
-                        getpid());
-            } else { // Camera2 API route
-                ALOGW("Camera using old HAL version: %d", deviceVersion);
-                return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
-                        "Camera device \"%s\" HAL version %d does not support camera2 API",
-                        cameraId.string(), deviceVersion);
-            }
+    // Create CameraClient based on device version reported by the HAL.
+    switch(deviceVersion) {
+        case CAMERA_DEVICE_API_VERSION_1_0:
+            ALOGE("Camera using old HAL version: %d", deviceVersion);
+            return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
+                    "Camera device \"%s\" HAL version %d no longer supported",
+                    cameraId.string(), deviceVersion);
             break;
-          case CAMERA_DEVICE_API_VERSION_3_0:
-          case CAMERA_DEVICE_API_VERSION_3_1:
-          case CAMERA_DEVICE_API_VERSION_3_2:
-          case CAMERA_DEVICE_API_VERSION_3_3:
-          case CAMERA_DEVICE_API_VERSION_3_4:
-          case CAMERA_DEVICE_API_VERSION_3_5:
-          case CAMERA_DEVICE_API_VERSION_3_6:
+        case CAMERA_DEVICE_API_VERSION_3_0:
+        case CAMERA_DEVICE_API_VERSION_3_1:
+        case CAMERA_DEVICE_API_VERSION_3_2:
+        case CAMERA_DEVICE_API_VERSION_3_3:
+        case CAMERA_DEVICE_API_VERSION_3_4:
+        case CAMERA_DEVICE_API_VERSION_3_5:
+        case CAMERA_DEVICE_API_VERSION_3_6:
             if (effectiveApiLevel == API_1) { // Camera1 API route
                 sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
                 *client = new Camera2Client(cameraService, tmp, packageName, featureId,
@@ -844,32 +840,12 @@
                         cameraId, facing, clientPid, clientUid, servicePid);
             }
             break;
-          default:
+        default:
             // Should not be reachable
             ALOGE("Unknown camera device HAL version: %d", deviceVersion);
             return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
                     "Camera device \"%s\" has unknown HAL version %d",
                     cameraId.string(), deviceVersion);
-        }
-    } else {
-        // A particular HAL version is requested by caller. Create CameraClient
-        // based on the requested HAL version.
-        if (deviceVersion > CAMERA_DEVICE_API_VERSION_1_0 &&
-            halVersion == CAMERA_DEVICE_API_VERSION_1_0) {
-            // Only support higher HAL version device opened as HAL1.0 device.
-            sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
-            *client = new CameraClient(cameraService, tmp, packageName, featureId,
-                    api1CameraId, facing, clientPid, clientUid,
-                    servicePid);
-        } else {
-            // Other combinations (e.g. HAL3.x open as HAL2.x) are not supported yet.
-            ALOGE("Invalid camera HAL version %x: HAL %x device can only be"
-                    " opened as HAL %x device", halVersion, deviceVersion,
-                    CAMERA_DEVICE_API_VERSION_1_0);
-            return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                    "Camera device \"%s\" (HAL version %d) cannot be opened as HAL version %d",
-                    cameraId.string(), deviceVersion, halVersion);
-        }
     }
     return Status::ok();
 }
@@ -957,8 +933,7 @@
     sp<Client> tmp = nullptr;
     if (!(ret = connectHelper<ICameraClient,Client>(
             sp<ICameraClient>{nullptr}, id, cameraId,
-            static_cast<int>(CAMERA_HAL_API_VERSION_UNSPECIFIED),
-            internalPackageName, std::unique_ptr<String16>(), uid, USE_CALLING_PID,
+            internalPackageName, {}, uid, USE_CALLING_PID,
             API_1, /*shimUpdateOnly*/ true, /*out*/ tmp)
             ).isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().string());
@@ -1476,34 +1451,7 @@
     String8 id = cameraIdIntToStr(api1CameraId);
     sp<Client> client = nullptr;
     ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId,
-            CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageName, std::unique_ptr<String16>(),
-            clientUid, clientPid, API_1, /*shimUpdateOnly*/ false, /*out*/client);
-
-    if(!ret.isOk()) {
-        logRejected(id, CameraThreadState::getCallingPid(), String8(clientPackageName),
-                ret.toString8());
-        return ret;
-    }
-
-    *device = client;
-    return ret;
-}
-
-Status CameraService::connectLegacy(
-        const sp<ICameraClient>& cameraClient,
-        int api1CameraId, int halVersion,
-        const String16& clientPackageName,
-        int clientUid,
-        /*out*/
-        sp<ICamera>* device) {
-
-    ATRACE_CALL();
-    String8 id = cameraIdIntToStr(api1CameraId);
-
-    Status ret = Status::ok();
-    sp<Client> client = nullptr;
-    ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId, halVersion,
-            clientPackageName, std::unique_ptr<String16>(), clientUid, USE_CALLING_PID, API_1,
+            clientPackageName, {}, clientUid, clientPid, API_1,
             /*shimUpdateOnly*/ false, /*out*/client);
 
     if(!ret.isOk()) {
@@ -1545,8 +1493,9 @@
     int cUid = CameraThreadState::getCallingUid();
     SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
     if (getSystemCameraKind(cameraId, &systemCameraKind) != OK) {
-        ALOGE("%s: Invalid camera id %s, ", __FUNCTION__, cameraId.c_str());
-        return true;
+        // This isn't a known camera ID, so it's not a system camera
+        ALOGV("%s: Unknown camera id %s, ", __FUNCTION__, cameraId.c_str());
+        return false;
     }
 
     // (1) Cameraserver trying to connect, accept.
@@ -1578,7 +1527,7 @@
         const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
         const String16& cameraId,
         const String16& clientPackageName,
-        const std::unique_ptr<String16>& clientFeatureId,
+        const std::optional<String16>& clientFeatureId,
         int clientUid,
         /*out*/
         sp<hardware::camera2::ICameraDeviceUser>* device) {
@@ -1595,8 +1544,7 @@
         clientPackageNameAdj = String16(vendorClient.c_str());
     }
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
-            /*api1CameraId*/-1,
-            CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageNameAdj, clientFeatureId,
+            /*api1CameraId*/-1, clientPackageNameAdj, clientFeatureId,
             clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, /*out*/client);
 
     if(!ret.isOk()) {
@@ -1611,8 +1559,8 @@
 
 template<class CALLBACK, class CLIENT>
 Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
-        int api1CameraId, int halVersion, const String16& clientPackageName,
-        const std::unique_ptr<String16>& clientFeatureId, int clientUid, int clientPid,
+        int api1CameraId, const String16& clientPackageName,
+        const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
         apiLevel effectiveApiLevel, bool shimUpdateOnly,
         /*out*/sp<CLIENT>& device) {
     binder::Status ret = binder::Status::ok();
@@ -1621,9 +1569,8 @@
 
     int originalClientPid = 0;
 
-    ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) for HAL version %s and "
+    ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) and "
             "Camera API version %d", clientPid, clientName8.string(), cameraId.string(),
-            (halVersion == -1) ? "default" : std::to_string(halVersion).c_str(),
             static_cast<int>(effectiveApiLevel));
 
     sp<CLIENT> client = nullptr;
@@ -1703,7 +1650,7 @@
         if(!(ret = makeClient(this, cameraCb, clientPackageName, clientFeatureId,
                 cameraId, api1CameraId, facing,
                 clientPid, clientUid, getpid(),
-                halVersion, deviceVersion, effectiveApiLevel,
+                deviceVersion, effectiveApiLevel,
                 /*out*/&tmp)).isOk()) {
             return ret;
         }
@@ -2714,7 +2661,7 @@
 CameraService::Client::Client(const sp<CameraService>& cameraService,
         const sp<ICameraClient>& cameraClient,
         const String16& clientPackageName,
-        const std::unique_ptr<String16>& clientFeatureId,
+        const std::optional<String16>& clientFeatureId,
         const String8& cameraIdStr,
         int api1CameraId, int cameraFacing,
         int clientPid, uid_t clientUid,
@@ -2751,24 +2698,18 @@
 
 CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService,
         const sp<IBinder>& remoteCallback,
-        const String16& clientPackageName, const std::unique_ptr<String16>& clientFeatureId,
+        const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
         const String8& cameraIdStr, int cameraFacing,
         int clientPid, uid_t clientUid,
         int servicePid):
         mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing),
-        mClientPackageName(clientPackageName),
+        mClientPackageName(clientPackageName), mClientFeatureId(clientFeatureId),
         mClientPid(clientPid), mClientUid(clientUid),
         mServicePid(servicePid),
         mDisconnected(false), mUidIsTrusted(false),
         mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE),
         mRemoteBinder(remoteCallback)
 {
-    if (clientFeatureId) {
-        mClientFeatureId = std::unique_ptr<String16>(new String16(*clientFeatureId));
-    } else {
-        mClientFeatureId = std::unique_ptr<String16>();
-    }
-
     if (sCameraService == nullptr) {
         sCameraService = cameraService;
     }
@@ -3751,9 +3692,14 @@
                 __FUNCTION__, cameraId.string());
         return;
     }
+
+    // Collect the logical cameras without holding mStatusLock in updateStatus
+    // as that can lead to a deadlock(b/162192331).
+    auto logicalCameraIds = getLogicalCameras(cameraId);
     // Update the status for this camera state, then send the onStatusChangedCallbacks to each
     // of the listeners with both the mStatusLock and mStatusListenerLock held
-    state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind, &supportsHAL3]
+    state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind, &supportsHAL3,
+                        &logicalCameraIds]
             (const String8& cameraId, StatusInternal status) {
 
             if (status != StatusInternal::ENUMERATING) {
@@ -3773,8 +3719,8 @@
             }
 
             Mutex::Autolock lock(mStatusListenerLock);
-
-            notifyPhysicalCameraStatusLocked(mapToInterface(status), cameraId, deviceKind);
+            notifyPhysicalCameraStatusLocked(mapToInterface(status), String16(cameraId),
+                    logicalCameraIds, deviceKind);
 
             for (auto& listener : mListenerList) {
                 bool isVendorListener = listener->isVendorListener();
@@ -3892,8 +3838,9 @@
     return OK;
 }
 
-void CameraService::notifyPhysicalCameraStatusLocked(int32_t status, const String8& cameraId,
-        SystemCameraKind deviceKind) {
+std::list<String16> CameraService::getLogicalCameras(
+        const String8& physicalCameraId) {
+    std::list<String16> retList;
     Mutex::Autolock lock(mCameraStatesLock);
     for (const auto& state : mCameraStates) {
         std::vector<std::string> physicalCameraIds;
@@ -3901,26 +3848,39 @@
             // This is not a logical multi-camera.
             continue;
         }
-        if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(), cameraId.c_str())
+        if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(), physicalCameraId.c_str())
                 == physicalCameraIds.end()) {
             // cameraId is not a physical camera of this logical multi-camera.
             continue;
         }
 
-        String16 id16(state.first), physicalId16(cameraId);
+        retList.emplace_back(String16(state.first));
+    }
+    return retList;
+}
+
+void CameraService::notifyPhysicalCameraStatusLocked(int32_t status,
+        const String16& physicalCameraId, const std::list<String16>& logicalCameraIds,
+        SystemCameraKind deviceKind) {
+    // mStatusListenerLock is expected to be locked
+    for (const auto& logicalCameraId : logicalCameraIds) {
         for (auto& listener : mListenerList) {
+            // Note: we check only the deviceKind of the physical camera id
+            // since, logical camera ids and their physical camera ids are
+            // guaranteed to have the same system camera kind.
             if (shouldSkipStatusUpdates(deviceKind, listener->isVendorListener(),
                     listener->getListenerPid(), listener->getListenerUid())) {
                 ALOGV("Skipping discovery callback for system-only camera device %s",
-                        cameraId.c_str());
+                        String8(physicalCameraId).c_str());
                 continue;
             }
             listener->getListener()->onPhysicalCameraStatusChanged(status,
-                    id16, physicalId16);
+                    logicalCameraId, physicalCameraId);
         }
     }
 }
 
+
 void CameraService::blockClientsForUid(uid_t uid) {
     const auto clients = mActiveClientManager.getAll();
     for (auto& current : clients) {
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 4321201..6f37e9f 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -49,8 +49,10 @@
 
 #include <set>
 #include <string>
+#include <list>
 #include <map>
 #include <memory>
+#include <optional>
 #include <utility>
 #include <unordered_map>
 #include <unordered_set>
@@ -69,7 +71,6 @@
     public virtual CameraProviderManager::StatusListener
 {
     friend class BinderService<CameraService>;
-    friend class CameraClient;
     friend class CameraOfflineSessionClient;
 public:
     class Client;
@@ -133,15 +134,9 @@
             /*out*/
             sp<hardware::ICamera>* device);
 
-    virtual binder::Status     connectLegacy(const sp<hardware::ICameraClient>& cameraClient,
-            int32_t cameraId, int32_t halVersion,
-            const String16& clientPackageName, int32_t clientUid,
-            /*out*/
-            sp<hardware::ICamera>* device);
-
     virtual binder::Status     connectDevice(
             const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb, const String16& cameraId,
-            const String16& clientPackageName, const std::unique_ptr<String16>& clientFeatureId,
+            const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
             int32_t clientUid,
             /*out*/
             sp<hardware::camera2::ICameraDeviceUser>* device);
@@ -298,7 +293,7 @@
         BasicClient(const sp<CameraService>& cameraService,
                 const sp<IBinder>& remoteCallback,
                 const String16& clientPackageName,
-                const std::unique_ptr<String16>& clientFeatureId,
+                const std::optional<String16>& clientFeatureId,
                 const String8& cameraIdStr,
                 int cameraFacing,
                 int clientPid,
@@ -318,7 +313,7 @@
         const String8                   mCameraIdStr;
         const int                       mCameraFacing;
         String16                        mClientPackageName;
-        std::unique_ptr<String16>       mClientFeatureId;
+        std::optional<String16>         mClientFeatureId;
         pid_t                           mClientPid;
         const uid_t                     mClientUid;
         const pid_t                     mServicePid;
@@ -390,7 +385,7 @@
         Client(const sp<CameraService>& cameraService,
                 const sp<hardware::ICameraClient>& cameraClient,
                 const String16& clientPackageName,
-                const std::unique_ptr<String16>& clientFeatureId,
+                const std::optional<String16>& clientFeatureId,
                 const String8& cameraIdStr,
                 int api1CameraId,
                 int cameraFacing,
@@ -727,8 +722,8 @@
     // Single implementation shared between the various connect calls
     template<class CALLBACK, class CLIENT>
     binder::Status connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
-            int api1CameraId, int halVersion, const String16& clientPackageName,
-            const std::unique_ptr<String16>& clientFeatureId, int clientUid, int clientPid,
+            int api1CameraId, const String16& clientPackageName,
+            const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
             apiLevel effectiveApiLevel, bool shimUpdateOnly, /*out*/sp<CLIENT>& device);
 
     // Lock guarding camera service state
@@ -1005,8 +1000,13 @@
             hardware::camera::common::V1_0::TorchModeStatus status);
 
     // notify physical camera status when the physical camera is public.
-    void notifyPhysicalCameraStatusLocked(int32_t status, const String8& cameraId,
-            SystemCameraKind deviceKind);
+    // Expects mStatusListenerLock to be locked.
+    void notifyPhysicalCameraStatusLocked(int32_t status, const String16& physicalCameraId,
+            const std::list<String16>& logicalCameraIds, SystemCameraKind deviceKind);
+
+    // get list of logical cameras which are backed by physicalCameraId
+    std::list<String16> getLogicalCameras(const String8& physicalCameraId);
+
 
     // IBinder::DeathRecipient implementation
     virtual void        binderDied(const wp<IBinder> &who);
@@ -1057,8 +1057,8 @@
 
     static binder::Status makeClient(const sp<CameraService>& cameraService,
             const sp<IInterface>& cameraCb, const String16& packageName,
-            const std::unique_ptr<String16>& featureId, const String8& cameraId, int api1CameraId,
-            int facing, int clientPid, uid_t clientUid, int servicePid, int halVersion,
+            const std::optional<String16>& featureId, const String8& cameraId, int api1CameraId,
+            int facing, int clientPid, uid_t clientUid, int servicePid,
             int deviceVersion, apiLevel effectiveApiLevel,
             /*out*/sp<BasicClient>* client);
 
diff --git a/services/camera/libcameraservice/TEST_MAPPING b/services/camera/libcameraservice/TEST_MAPPING
index 6fdac68..ca6cc58 100644
--- a/services/camera/libcameraservice/TEST_MAPPING
+++ b/services/camera/libcameraservice/TEST_MAPPING
@@ -1,7 +1,12 @@
 {
   "presubmit": [
     {
-       "name": "cameraservice_test"
+      "name": "cameraservice_test"
+    }
+  ],
+  "imports": [
+    {
+      "path": "frameworks/av/camera"
     }
   ]
 }
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index b043c0b..09e2c3f 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -50,7 +50,7 @@
 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
         const sp<hardware::ICameraClient>& cameraClient,
         const String16& clientPackageName,
-        const std::unique_ptr<String16>& clientFeatureId,
+        const std::optional<String16>& clientFeatureId,
         const String8& cameraDeviceId,
         int api1CameraId,
         int cameraFacing,
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 03ca44a..f8da0b6 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -94,7 +94,7 @@
     Camera2Client(const sp<CameraService>& cameraService,
             const sp<hardware::ICameraClient>& cameraClient,
             const String16& clientPackageName,
-            const std::unique_ptr<String16>& clientFeatureId,
+            const std::optional<String16>& clientFeatureId,
             const String8& cameraDeviceId,
             int api1CameraId,
             int cameraFacing,
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
deleted file mode 100644
index 892996c..0000000
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ /dev/null
@@ -1,1208 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "CameraClient"
-//#define LOG_NDEBUG 0
-
-#include <cutils/atomic.h>
-#include <cutils/properties.h>
-#include <gui/Surface.h>
-#include <media/hardware/HardwareAPI.h>
-
-#include "api1/CameraClient.h"
-#include "device1/CameraHardwareInterface.h"
-#include "CameraService.h"
-#include "utils/CameraThreadState.h"
-
-namespace android {
-
-#define LOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
-#define LOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
-
-CameraClient::CameraClient(const sp<CameraService>& cameraService,
-        const sp<hardware::ICameraClient>& cameraClient,
-        const String16& clientPackageName, const std::unique_ptr<String16>& clientFeatureId,
-        int cameraId, int cameraFacing,
-        int clientPid, int clientUid,
-        int servicePid):
-        Client(cameraService, cameraClient, clientPackageName, clientFeatureId,
-                String8::format("%d", cameraId), cameraId, cameraFacing, clientPid,
-                clientUid, servicePid)
-{
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("CameraClient::CameraClient E (pid %d, id %d)", callingPid, cameraId);
-
-    mHardware = NULL;
-    mMsgEnabled = 0;
-    mSurface = 0;
-    mPreviewWindow = 0;
-    mDestructionStarted = false;
-
-    // Callback is disabled by default
-    mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
-    mOrientation = getOrientation(0, mCameraFacing == CAMERA_FACING_FRONT);
-    mPlayShutterSound = true;
-    LOG1("CameraClient::CameraClient X (pid %d, id %d)", callingPid, cameraId);
-}
-
-status_t CameraClient::initialize(sp<CameraProviderManager> manager,
-        const String8& /*monitorTags*/) {
-    int callingPid = CameraThreadState::getCallingPid();
-    status_t res;
-
-    LOG1("CameraClient::initialize E (pid %d, id %d)", callingPid, mCameraId);
-
-    // Verify ops permissions
-    res = startCameraOps();
-    if (res != OK) {
-        return res;
-    }
-
-    char camera_device_name[10];
-    snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId);
-
-    mHardware = new CameraHardwareInterface(camera_device_name);
-    res = mHardware->initialize(manager);
-    if (res != OK) {
-        ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
-                __FUNCTION__, mCameraId, strerror(-res), res);
-        mHardware.clear();
-        return res;
-    }
-
-    mHardware->setCallbacks(notifyCallback,
-            dataCallback,
-            dataCallbackTimestamp,
-            handleCallbackTimestampBatch,
-            (void *)(uintptr_t)mCameraId);
-
-    // Enable zoom, error, focus, and metadata messages by default
-    enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM | CAMERA_MSG_FOCUS |
-                  CAMERA_MSG_PREVIEW_METADATA | CAMERA_MSG_FOCUS_MOVE);
-
-    LOG1("CameraClient::initialize X (pid %d, id %d)", callingPid, mCameraId);
-    return OK;
-}
-
-
-// tear down the client
-CameraClient::~CameraClient() {
-    mDestructionStarted = true;
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("CameraClient::~CameraClient E (pid %d, this %p)", callingPid, this);
-
-    disconnect();
-    LOG1("CameraClient::~CameraClient X (pid %d, this %p)", callingPid, this);
-}
-
-status_t CameraClient::dump(int fd, const Vector<String16>& args) {
-    return BasicClient::dump(fd, args);
-}
-
-status_t CameraClient::dumpClient(int fd, const Vector<String16>& args) {
-    const size_t SIZE = 256;
-    char buffer[SIZE];
-
-    size_t len = snprintf(buffer, SIZE, "Client[%d] (%p) with UID %d\n",
-            mCameraId,
-            (getRemoteCallback() != NULL ?
-                    IInterface::asBinder(getRemoteCallback()).get() : NULL),
-            mClientUid);
-    len = (len > SIZE - 1) ? SIZE - 1 : len;
-    write(fd, buffer, len);
-
-    len = snprintf(buffer, SIZE, "Latest set parameters:\n");
-    len = (len > SIZE - 1) ? SIZE - 1 : len;
-    write(fd, buffer, len);
-
-    mLatestSetParameters.dump(fd, args);
-
-    const char *enddump = "\n\n";
-    write(fd, enddump, strlen(enddump));
-
-    sp<CameraHardwareInterface> hardware = mHardware;
-    if (hardware != nullptr) {
-        return hardware->dump(fd, args);
-    }
-    ALOGI("%s: camera device closed already, skip dumping", __FUNCTION__);
-    return OK;
-}
-
-// ----------------------------------------------------------------------------
-
-status_t CameraClient::checkPid() const {
-    int callingPid = CameraThreadState::getCallingPid();
-    if (callingPid == mClientPid) return NO_ERROR;
-
-    ALOGW("attempt to use a locked camera from a different process"
-         " (old pid %d, new pid %d)", mClientPid, callingPid);
-    return EBUSY;
-}
-
-status_t CameraClient::checkPidAndHardware() const {
-    if (mHardware == 0) {
-        ALOGE("attempt to use a camera after disconnect() (pid %d)",
-              CameraThreadState::getCallingPid());
-        return INVALID_OPERATION;
-    }
-    status_t result = checkPid();
-    if (result != NO_ERROR) return result;
-    return NO_ERROR;
-}
-
-status_t CameraClient::lock() {
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("lock (pid %d)", callingPid);
-    Mutex::Autolock lock(mLock);
-
-    // lock camera to this client if the the camera is unlocked
-    if (mClientPid == 0) {
-        mClientPid = callingPid;
-        return NO_ERROR;
-    }
-
-    // returns NO_ERROR if the client already owns the camera, EBUSY otherwise
-    return checkPid();
-}
-
-status_t CameraClient::unlock() {
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("unlock (pid %d)", callingPid);
-    Mutex::Autolock lock(mLock);
-
-    // allow anyone to use camera (after they lock the camera)
-    status_t result = checkPid();
-    if (result == NO_ERROR) {
-        if (mHardware->recordingEnabled()) {
-            ALOGE("Not allowed to unlock camera during recording.");
-            return INVALID_OPERATION;
-        }
-        mClientPid = 0;
-        LOG1("clear mRemoteCallback (pid %d)", callingPid);
-        // we need to remove the reference to ICameraClient so that when the app
-        // goes away, the reference count goes to 0.
-        mRemoteCallback.clear();
-    }
-    return result;
-}
-
-// connect a new client to the camera
-status_t CameraClient::connect(const sp<hardware::ICameraClient>& client) {
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("connect E (pid %d)", callingPid);
-    Mutex::Autolock lock(mLock);
-
-    if (mClientPid != 0 && checkPid() != NO_ERROR) {
-        ALOGW("Tried to connect to a locked camera (old pid %d, new pid %d)",
-                mClientPid, callingPid);
-        return EBUSY;
-    }
-
-    if (mRemoteCallback != 0 &&
-        (IInterface::asBinder(client) == IInterface::asBinder(mRemoteCallback))) {
-        LOG1("Connect to the same client");
-        return NO_ERROR;
-    }
-
-    mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
-    mClientPid = callingPid;
-    mRemoteCallback = client;
-
-    LOG1("connect X (pid %d)", callingPid);
-    return NO_ERROR;
-}
-
-static void disconnectWindow(const sp<ANativeWindow>& window) {
-    if (window != 0) {
-        status_t result = native_window_api_disconnect(window.get(),
-                NATIVE_WINDOW_API_CAMERA);
-        if (result != NO_ERROR) {
-            ALOGW("native_window_api_disconnect failed: %s (%d)", strerror(-result),
-                    result);
-        }
-    }
-}
-
-binder::Status CameraClient::disconnect() {
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("disconnect E (pid %d)", callingPid);
-    Mutex::Autolock lock(mLock);
-
-    binder::Status res = binder::Status::ok();
-    // Allow both client and the cameraserver to disconnect at all times
-    if (callingPid != mClientPid && callingPid != mServicePid) {
-        ALOGW("different client - don't disconnect");
-        return res;
-    }
-
-    // Make sure disconnect() is done once and once only, whether it is called
-    // from the user directly, or called by the destructor.
-    if (mHardware == 0) return res;
-
-    LOG1("hardware teardown");
-    // Before destroying mHardware, we must make sure it's in the
-    // idle state.
-    // Turn off all messages.
-    disableMsgType(CAMERA_MSG_ALL_MSGS);
-    mHardware->stopPreview();
-    sCameraService->updateProxyDeviceState(
-            hardware::ICameraServiceProxy::CAMERA_STATE_IDLE,
-            mCameraIdStr, mCameraFacing, mClientPackageName,
-            hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1);
-    mHardware->cancelPicture();
-    // Release the hardware resources.
-    mHardware->release();
-
-    // Release the held ANativeWindow resources.
-    if (mPreviewWindow != 0) {
-        disconnectWindow(mPreviewWindow);
-        mPreviewWindow = 0;
-        mHardware->setPreviewWindow(mPreviewWindow);
-    }
-    mHardware.clear();
-
-    CameraService::Client::disconnect();
-
-    LOG1("disconnect X (pid %d)", callingPid);
-
-    return res;
-}
-
-// ----------------------------------------------------------------------------
-
-status_t CameraClient::setPreviewWindow(const sp<IBinder>& binder,
-        const sp<ANativeWindow>& window) {
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    // return if no change in surface.
-    if (binder == mSurface) {
-        return NO_ERROR;
-    }
-
-    if (window != 0) {
-        result = native_window_api_connect(window.get(), NATIVE_WINDOW_API_CAMERA);
-        if (result != NO_ERROR) {
-            ALOGE("native_window_api_connect failed: %s (%d)", strerror(-result),
-                    result);
-            return result;
-        }
-    }
-
-    // If preview has been already started, register preview buffers now.
-    if (mHardware->previewEnabled()) {
-        if (window != 0) {
-            mHardware->setPreviewScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
-            mHardware->setPreviewTransform(mOrientation);
-            result = mHardware->setPreviewWindow(window);
-        }
-    }
-
-    if (result == NO_ERROR) {
-        // Everything has succeeded.  Disconnect the old window and remember the
-        // new window.
-        disconnectWindow(mPreviewWindow);
-        mSurface = binder;
-        mPreviewWindow = window;
-    } else {
-        // Something went wrong after we connected to the new window, so
-        // disconnect here.
-        disconnectWindow(window);
-    }
-
-    return result;
-}
-
-// set the buffer consumer that the preview will use
-status_t CameraClient::setPreviewTarget(
-        const sp<IGraphicBufferProducer>& bufferProducer) {
-    LOG1("setPreviewTarget(%p) (pid %d)", bufferProducer.get(),
-            CameraThreadState::getCallingPid());
-
-    sp<IBinder> binder;
-    sp<ANativeWindow> window;
-    if (bufferProducer != 0) {
-        binder = IInterface::asBinder(bufferProducer);
-        // Using controlledByApp flag to ensure that the buffer queue remains in
-        // async mode for the old camera API, where many applications depend
-        // on that behavior.
-        window = new Surface(bufferProducer, /*controlledByApp*/ true);
-    }
-    return setPreviewWindow(binder, window);
-}
-
-// set the preview callback flag to affect how the received frames from
-// preview are handled.
-void CameraClient::setPreviewCallbackFlag(int callback_flag) {
-    LOG1("setPreviewCallbackFlag(%d) (pid %d)", callback_flag, CameraThreadState::getCallingPid());
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) return;
-
-    mPreviewCallbackFlag = callback_flag;
-    if (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
-        enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-    } else {
-        disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-    }
-}
-
-status_t CameraClient::setPreviewCallbackTarget(
-        const sp<IGraphicBufferProducer>& callbackProducer) {
-    (void)callbackProducer;
-    ALOGE("%s: Unimplemented!", __FUNCTION__);
-    return INVALID_OPERATION;
-}
-
-// start preview mode
-status_t CameraClient::startPreview() {
-    LOG1("startPreview (pid %d)", CameraThreadState::getCallingPid());
-    return startCameraMode(CAMERA_PREVIEW_MODE);
-}
-
-// start recording mode
-status_t CameraClient::startRecording() {
-    LOG1("startRecording (pid %d)", CameraThreadState::getCallingPid());
-    return startCameraMode(CAMERA_RECORDING_MODE);
-}
-
-// start preview or recording
-status_t CameraClient::startCameraMode(camera_mode mode) {
-    LOG1("startCameraMode(%d)", mode);
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    switch(mode) {
-        case CAMERA_PREVIEW_MODE:
-            if (mSurface == 0 && mPreviewWindow == 0) {
-                LOG1("mSurface is not set yet.");
-                // still able to start preview in this case.
-            }
-            return startPreviewMode();
-        case CAMERA_RECORDING_MODE:
-            if (mSurface == 0 && mPreviewWindow == 0) {
-                ALOGE("mSurface or mPreviewWindow must be set before startRecordingMode.");
-                return INVALID_OPERATION;
-            }
-            return startRecordingMode();
-        default:
-            return UNKNOWN_ERROR;
-    }
-}
-
-status_t CameraClient::startPreviewMode() {
-    LOG1("startPreviewMode");
-    status_t result = NO_ERROR;
-
-    // if preview has been enabled, nothing needs to be done
-    if (mHardware->previewEnabled()) {
-        return NO_ERROR;
-    }
-
-    if (mPreviewWindow != 0) {
-        mHardware->setPreviewScalingMode(
-            NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
-        mHardware->setPreviewTransform(mOrientation);
-    }
-    mHardware->setPreviewWindow(mPreviewWindow);
-    result = mHardware->startPreview();
-    if (result == NO_ERROR) {
-        sCameraService->updateProxyDeviceState(
-            hardware::ICameraServiceProxy::CAMERA_STATE_ACTIVE,
-            mCameraIdStr, mCameraFacing, mClientPackageName,
-            hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1);
-    }
-    return result;
-}
-
-status_t CameraClient::startRecordingMode() {
-    LOG1("startRecordingMode");
-    status_t result = NO_ERROR;
-
-    // if recording has been enabled, nothing needs to be done
-    if (mHardware->recordingEnabled()) {
-        return NO_ERROR;
-    }
-
-    // if preview has not been started, start preview first
-    if (!mHardware->previewEnabled()) {
-        result = startPreviewMode();
-        if (result != NO_ERROR) {
-            return result;
-        }
-    }
-
-    // start recording mode
-    enableMsgType(CAMERA_MSG_VIDEO_FRAME);
-    sCameraService->playSound(CameraService::SOUND_RECORDING_START);
-    result = mHardware->startRecording();
-    if (result != NO_ERROR) {
-        ALOGE("mHardware->startRecording() failed with status %d", result);
-    }
-    return result;
-}
-
-// stop preview mode
-void CameraClient::stopPreview() {
-    LOG1("stopPreview (pid %d)", CameraThreadState::getCallingPid());
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) return;
-
-
-    disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-    mHardware->stopPreview();
-    sCameraService->updateProxyDeviceState(
-        hardware::ICameraServiceProxy::CAMERA_STATE_IDLE,
-        mCameraIdStr, mCameraFacing, mClientPackageName,
-        hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1);
-    mPreviewBuffer.clear();
-}
-
-// stop recording mode
-void CameraClient::stopRecording() {
-    LOG1("stopRecording (pid %d)", CameraThreadState::getCallingPid());
-    {
-        Mutex::Autolock lock(mLock);
-        if (checkPidAndHardware() != NO_ERROR) return;
-
-        disableMsgType(CAMERA_MSG_VIDEO_FRAME);
-        mHardware->stopRecording();
-        sCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
-
-        mPreviewBuffer.clear();
-    }
-
-    {
-        Mutex::Autolock l(mAvailableCallbackBuffersLock);
-        if (!mAvailableCallbackBuffers.empty()) {
-            mAvailableCallbackBuffers.clear();
-        }
-    }
-}
-
-// release a recording frame
-void CameraClient::releaseRecordingFrame(const sp<IMemory>& mem) {
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) return;
-    if (mem == nullptr) {
-        android_errorWriteWithInfoLog(CameraService::SN_EVENT_LOG_ID, "26164272",
-                CameraThreadState::getCallingUid(), nullptr, 0);
-        return;
-    }
-
-    mHardware->releaseRecordingFrame(mem);
-}
-
-void CameraClient::releaseRecordingFrameHandle(native_handle_t *handle) {
-    if (handle == nullptr) return;
-    Mutex::Autolock lock(mLock);
-    sp<IMemory> dataPtr;
-    {
-        Mutex::Autolock l(mAvailableCallbackBuffersLock);
-        if (!mAvailableCallbackBuffers.empty()) {
-            dataPtr = mAvailableCallbackBuffers.back();
-            mAvailableCallbackBuffers.pop_back();
-        }
-    }
-
-    if (dataPtr == nullptr) {
-        ALOGE("%s: %d: No callback buffer available. Dropping a native handle.", __FUNCTION__,
-                __LINE__);
-        native_handle_close(handle);
-        native_handle_delete(handle);
-        return;
-    } else if (dataPtr->size() != sizeof(VideoNativeHandleMetadata)) {
-        ALOGE("%s: %d: Callback buffer size doesn't match VideoNativeHandleMetadata", __FUNCTION__,
-                __LINE__);
-        native_handle_close(handle);
-        native_handle_delete(handle);
-        return;
-    }
-
-    if (mHardware != nullptr) {
-        VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(dataPtr->unsecurePointer());
-        metadata->eType = kMetadataBufferTypeNativeHandleSource;
-        metadata->pHandle = handle;
-        mHardware->releaseRecordingFrame(dataPtr);
-    }
-}
-
-void CameraClient::releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
-    Mutex::Autolock lock(mLock);
-    bool disconnected = (mHardware == nullptr);
-    size_t n = handles.size();
-    std::vector<sp<IMemory>> frames;
-    if (!disconnected) {
-        frames.reserve(n);
-    }
-    bool error = false;
-    for (auto& handle : handles) {
-        sp<IMemory> dataPtr;
-        {
-            Mutex::Autolock l(mAvailableCallbackBuffersLock);
-            if (!mAvailableCallbackBuffers.empty()) {
-                dataPtr = mAvailableCallbackBuffers.back();
-                mAvailableCallbackBuffers.pop_back();
-            }
-        }
-
-        if (dataPtr == nullptr) {
-            ALOGE("%s: %d: No callback buffer available. Dropping frames.", __FUNCTION__,
-                    __LINE__);
-            error = true;
-            break;
-        } else if (dataPtr->size() != sizeof(VideoNativeHandleMetadata)) {
-            ALOGE("%s: %d: Callback buffer must be VideoNativeHandleMetadata", __FUNCTION__,
-                    __LINE__);
-            error = true;
-            break;
-        }
-
-        if (!disconnected) {
-            VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(dataPtr->unsecurePointer());
-            metadata->eType = kMetadataBufferTypeNativeHandleSource;
-            metadata->pHandle = handle;
-            frames.push_back(dataPtr);
-        }
-    }
-
-    if (error) {
-        for (auto& handle : handles) {
-            native_handle_close(handle);
-            native_handle_delete(handle);
-        }
-    } else if (!disconnected) {
-        mHardware->releaseRecordingFrameBatch(frames);
-    }
-    return;
-}
-
-status_t CameraClient::setVideoBufferMode(int32_t videoBufferMode) {
-    LOG1("setVideoBufferMode: %d", videoBufferMode);
-    bool enableMetadataInBuffers = false;
-
-    if (videoBufferMode == VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA) {
-        enableMetadataInBuffers = true;
-    } else if (videoBufferMode != VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV) {
-        ALOGE("%s: %d: videoBufferMode %d is not supported.", __FUNCTION__, __LINE__,
-                videoBufferMode);
-        return BAD_VALUE;
-    }
-
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) {
-        return UNKNOWN_ERROR;
-    }
-
-    return mHardware->storeMetaDataInBuffers(enableMetadataInBuffers);
-}
-
-bool CameraClient::previewEnabled() {
-    LOG1("previewEnabled (pid %d)", CameraThreadState::getCallingPid());
-
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) return false;
-    return mHardware->previewEnabled();
-}
-
-bool CameraClient::recordingEnabled() {
-    LOG1("recordingEnabled (pid %d)", CameraThreadState::getCallingPid());
-
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) return false;
-    return mHardware->recordingEnabled();
-}
-
-status_t CameraClient::autoFocus() {
-    LOG1("autoFocus (pid %d)", CameraThreadState::getCallingPid());
-
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    return mHardware->autoFocus();
-}
-
-status_t CameraClient::cancelAutoFocus() {
-    LOG1("cancelAutoFocus (pid %d)", CameraThreadState::getCallingPid());
-
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    return mHardware->cancelAutoFocus();
-}
-
-// take a picture - image is returned in callback
-status_t CameraClient::takePicture(int msgType) {
-    LOG1("takePicture (pid %d): 0x%x", CameraThreadState::getCallingPid(), msgType);
-
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    if ((msgType & CAMERA_MSG_RAW_IMAGE) &&
-        (msgType & CAMERA_MSG_RAW_IMAGE_NOTIFY)) {
-        ALOGE("CAMERA_MSG_RAW_IMAGE and CAMERA_MSG_RAW_IMAGE_NOTIFY"
-                " cannot be both enabled");
-        return BAD_VALUE;
-    }
-
-    // We only accept picture related message types
-    // and ignore other types of messages for takePicture().
-    int picMsgType = msgType
-                        & (CAMERA_MSG_SHUTTER |
-                           CAMERA_MSG_POSTVIEW_FRAME |
-                           CAMERA_MSG_RAW_IMAGE |
-                           CAMERA_MSG_RAW_IMAGE_NOTIFY |
-                           CAMERA_MSG_COMPRESSED_IMAGE);
-
-    enableMsgType(picMsgType);
-
-    return mHardware->takePicture();
-}
-
-// set preview/capture parameters - key/value pairs
-status_t CameraClient::setParameters(const String8& params) {
-    LOG1("setParameters (pid %d) (%s)", CameraThreadState::getCallingPid(), params.string());
-
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    mLatestSetParameters = CameraParameters(params);
-    CameraParameters p(params);
-    return mHardware->setParameters(p);
-}
-
-// get preview/capture parameters - key/value pairs
-String8 CameraClient::getParameters() const {
-    Mutex::Autolock lock(mLock);
-    // The camera service can unconditionally get the parameters at all times
-    if (CameraThreadState::getCallingPid() != mServicePid && checkPidAndHardware() != NO_ERROR) {
-        return String8();
-    }
-
-    String8 params(mHardware->getParameters().flatten());
-    LOG1("getParameters (pid %d) (%s)", CameraThreadState::getCallingPid(), params.string());
-    return params;
-}
-
-// enable shutter sound
-status_t CameraClient::enableShutterSound(bool enable) {
-    LOG1("enableShutterSound (pid %d)", CameraThreadState::getCallingPid());
-
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    if (enable) {
-        mPlayShutterSound = true;
-        return OK;
-    }
-
-    mPlayShutterSound = false;
-    return OK;
-}
-
-status_t CameraClient::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
-    LOG1("sendCommand (pid %d)", CameraThreadState::getCallingPid());
-    int orientation;
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    if (cmd == CAMERA_CMD_SET_DISPLAY_ORIENTATION) {
-        // Mirror the preview if the camera is front-facing.
-        orientation = getOrientation(arg1, mCameraFacing == CAMERA_FACING_FRONT);
-        if (orientation == -1) return BAD_VALUE;
-
-        if (mOrientation != orientation) {
-            mOrientation = orientation;
-            if (mPreviewWindow != 0) {
-                mHardware->setPreviewTransform(mOrientation);
-            }
-        }
-        return OK;
-    } else if (cmd == CAMERA_CMD_ENABLE_SHUTTER_SOUND) {
-        switch (arg1) {
-            case 0:
-                return enableShutterSound(false);
-            case 1:
-                return enableShutterSound(true);
-            default:
-                return BAD_VALUE;
-        }
-        return OK;
-    } else if (cmd == CAMERA_CMD_PLAY_RECORDING_SOUND) {
-        sCameraService->playSound(CameraService::SOUND_RECORDING_START);
-    } else if (cmd == CAMERA_CMD_SET_VIDEO_BUFFER_COUNT) {
-        // Silently ignore this command
-        return INVALID_OPERATION;
-    } else if (cmd == CAMERA_CMD_PING) {
-        // If mHardware is 0, checkPidAndHardware will return error.
-        return OK;
-    }
-
-    return mHardware->sendCommand(cmd, arg1, arg2);
-}
-
-// ----------------------------------------------------------------------------
-
-void CameraClient::enableMsgType(int32_t msgType) {
-    android_atomic_or(msgType, &mMsgEnabled);
-    mHardware->enableMsgType(msgType);
-}
-
-void CameraClient::disableMsgType(int32_t msgType) {
-    android_atomic_and(~msgType, &mMsgEnabled);
-    mHardware->disableMsgType(msgType);
-}
-
-#define CHECK_MESSAGE_INTERVAL 10 // 10ms
-bool CameraClient::lockIfMessageWanted(int32_t msgType) {
-    int sleepCount = 0;
-    while (mMsgEnabled & msgType) {
-        if (mLock.tryLock() == NO_ERROR) {
-            if (sleepCount > 0) {
-                LOG1("lockIfMessageWanted(%d): waited for %d ms",
-                    msgType, sleepCount * CHECK_MESSAGE_INTERVAL);
-            }
-
-            // If messages are no longer enabled after acquiring lock, release and drop message
-            if ((mMsgEnabled & msgType) == 0) {
-                mLock.unlock();
-                break;
-            }
-
-            return true;
-        }
-        if (sleepCount++ == 0) {
-            LOG1("lockIfMessageWanted(%d): enter sleep", msgType);
-        }
-        usleep(CHECK_MESSAGE_INTERVAL * 1000);
-    }
-    ALOGW("lockIfMessageWanted(%d): dropped unwanted message", msgType);
-    return false;
-}
-
-sp<CameraClient> CameraClient::getClientFromCookie(void* user) {
-    String8 cameraId = String8::format("%d", (int)(intptr_t) user);
-    auto clientDescriptor = sCameraService->mActiveClientManager.get(cameraId);
-    if (clientDescriptor != nullptr) {
-        return sp<CameraClient>{
-                static_cast<CameraClient*>(clientDescriptor->getValue().get())};
-    }
-    return sp<CameraClient>{nullptr};
-}
-
-// Callback messages can be dispatched to internal handlers or pass to our
-// client's callback functions, depending on the message type.
-//
-// notifyCallback:
-//      CAMERA_MSG_SHUTTER              handleShutter
-//      (others)                        c->notifyCallback
-// dataCallback:
-//      CAMERA_MSG_PREVIEW_FRAME        handlePreviewData
-//      CAMERA_MSG_POSTVIEW_FRAME       handlePostview
-//      CAMERA_MSG_RAW_IMAGE            handleRawPicture
-//      CAMERA_MSG_COMPRESSED_IMAGE     handleCompressedPicture
-//      (others)                        c->dataCallback
-// dataCallbackTimestamp
-//      (others)                        c->dataCallbackTimestamp
-
-void CameraClient::notifyCallback(int32_t msgType, int32_t ext1,
-        int32_t ext2, void* user) {
-    LOG2("notifyCallback(%d)", msgType);
-
-    sp<CameraClient> client = getClientFromCookie(user);
-    if (client.get() == nullptr) return;
-
-    if (!client->lockIfMessageWanted(msgType)) return;
-
-    switch (msgType) {
-        case CAMERA_MSG_SHUTTER:
-            // ext1 is the dimension of the yuv picture.
-            client->handleShutter();
-            break;
-        default:
-            client->handleGenericNotify(msgType, ext1, ext2);
-            break;
-    }
-}
-
-void CameraClient::dataCallback(int32_t msgType,
-        const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata, void* user) {
-    LOG2("dataCallback(%d)", msgType);
-
-    sp<CameraClient> client = getClientFromCookie(user);
-    if (client.get() == nullptr) return;
-
-    if (!client->lockIfMessageWanted(msgType)) return;
-    if (dataPtr == 0 && metadata == NULL) {
-        ALOGE("Null data returned in data callback");
-        client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
-        return;
-    }
-
-    switch (msgType & ~CAMERA_MSG_PREVIEW_METADATA) {
-        case CAMERA_MSG_PREVIEW_FRAME:
-            client->handlePreviewData(msgType, dataPtr, metadata);
-            break;
-        case CAMERA_MSG_POSTVIEW_FRAME:
-            client->handlePostview(dataPtr);
-            break;
-        case CAMERA_MSG_RAW_IMAGE:
-            client->handleRawPicture(dataPtr);
-            break;
-        case CAMERA_MSG_COMPRESSED_IMAGE:
-            client->handleCompressedPicture(dataPtr);
-            break;
-        default:
-            client->handleGenericData(msgType, dataPtr, metadata);
-            break;
-    }
-}
-
-void CameraClient::dataCallbackTimestamp(nsecs_t timestamp,
-        int32_t msgType, const sp<IMemory>& dataPtr, void* user) {
-    LOG2("dataCallbackTimestamp(%d)", msgType);
-
-    sp<CameraClient> client = getClientFromCookie(user);
-    if (client.get() == nullptr) return;
-
-    if (!client->lockIfMessageWanted(msgType)) return;
-
-    if (dataPtr == 0) {
-        ALOGE("Null data returned in data with timestamp callback");
-        client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
-        return;
-    }
-
-    client->handleGenericDataTimestamp(timestamp, msgType, dataPtr);
-}
-
-void CameraClient::handleCallbackTimestampBatch(
-        int32_t msgType, const std::vector<HandleTimestampMessage>& msgs, void* user) {
-    LOG2("dataCallbackTimestampBatch");
-    sp<CameraClient> client = getClientFromCookie(user);
-    if (client.get() == nullptr) return;
-    if (!client->lockIfMessageWanted(msgType)) return;
-
-    sp<hardware::ICameraClient> c = client->mRemoteCallback;
-    client->mLock.unlock();
-    if (c != 0 && msgs.size() > 0) {
-        size_t n = msgs.size();
-        std::vector<nsecs_t> timestamps;
-        std::vector<native_handle_t*> handles;
-        timestamps.reserve(n);
-        handles.reserve(n);
-        for (auto& msg : msgs) {
-            native_handle_t* handle = nullptr;
-            if (msg.dataPtr->size() != sizeof(VideoNativeHandleMetadata)) {
-                ALOGE("%s: dataPtr does not contain VideoNativeHandleMetadata!", __FUNCTION__);
-                return;
-            }
-            // TODO: Using unsecurePointer() has some associated security pitfalls
-            //       (see declaration for details).
-            //       Either document why it is safe in this case or address the
-            //       issue (e.g. by copying).
-            VideoNativeHandleMetadata *metadata =
-                (VideoNativeHandleMetadata*)(msg.dataPtr->unsecurePointer());
-            if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
-                handle = metadata->pHandle;
-            }
-
-            if (handle == nullptr) {
-                ALOGE("%s: VideoNativeHandleMetadata type mismatch or null handle passed!",
-                        __FUNCTION__);
-                return;
-            }
-            {
-                Mutex::Autolock l(client->mAvailableCallbackBuffersLock);
-                client->mAvailableCallbackBuffers.push_back(msg.dataPtr);
-            }
-            timestamps.push_back(msg.timestamp);
-            handles.push_back(handle);
-        }
-        c->recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
-    }
-}
-
-// snapshot taken callback
-void CameraClient::handleShutter(void) {
-    if (mPlayShutterSound) {
-        sCameraService->playSound(CameraService::SOUND_SHUTTER);
-    }
-
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    if (c != 0) {
-        mLock.unlock();
-        c->notifyCallback(CAMERA_MSG_SHUTTER, 0, 0);
-        if (!lockIfMessageWanted(CAMERA_MSG_SHUTTER)) return;
-    }
-    disableMsgType(CAMERA_MSG_SHUTTER);
-
-    // Shutters only happen in response to takePicture, so mark device as
-    // idle now, until preview is restarted
-    sCameraService->updateProxyDeviceState(
-        hardware::ICameraServiceProxy::CAMERA_STATE_IDLE,
-        mCameraIdStr, mCameraFacing, mClientPackageName,
-        hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1);
-
-    mLock.unlock();
-}
-
-// preview callback - frame buffer update
-void CameraClient::handlePreviewData(int32_t msgType,
-                                              const sp<IMemory>& mem,
-                                              camera_frame_metadata_t *metadata) {
-    ssize_t offset;
-    size_t size;
-    sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
-
-    // local copy of the callback flags
-    int flags = mPreviewCallbackFlag;
-
-    // is callback enabled?
-    if (!(flags & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK)) {
-        // If the enable bit is off, the copy-out and one-shot bits are ignored
-        LOG2("frame callback is disabled");
-        mLock.unlock();
-        return;
-    }
-
-    // hold a strong pointer to the client
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-
-    // clear callback flags if no client or one-shot mode
-    if (c == 0 || (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)) {
-        LOG2("Disable preview callback");
-        mPreviewCallbackFlag &= ~(CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK |
-                                  CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK |
-                                  CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK);
-        disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-    }
-
-    if (c != 0) {
-        // Is the received frame copied out or not?
-        if (flags & CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK) {
-            LOG2("frame is copied");
-            copyFrameAndPostCopiedFrame(msgType, c, heap, offset, size, metadata);
-        } else {
-            LOG2("frame is forwarded");
-            mLock.unlock();
-            c->dataCallback(msgType, mem, metadata);
-        }
-    } else {
-        mLock.unlock();
-    }
-}
-
-// picture callback - postview image ready
-void CameraClient::handlePostview(const sp<IMemory>& mem) {
-    disableMsgType(CAMERA_MSG_POSTVIEW_FRAME);
-
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0) {
-        c->dataCallback(CAMERA_MSG_POSTVIEW_FRAME, mem, NULL);
-    }
-}
-
-// picture callback - raw image ready
-void CameraClient::handleRawPicture(const sp<IMemory>& mem) {
-    disableMsgType(CAMERA_MSG_RAW_IMAGE);
-
-    ssize_t offset;
-    size_t size;
-    sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
-
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0) {
-        c->dataCallback(CAMERA_MSG_RAW_IMAGE, mem, NULL);
-    }
-}
-
-// picture callback - compressed picture ready
-void CameraClient::handleCompressedPicture(const sp<IMemory>& mem) {
-    disableMsgType(CAMERA_MSG_COMPRESSED_IMAGE);
-
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0) {
-        c->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, mem, NULL);
-    }
-}
-
-
-void CameraClient::handleGenericNotify(int32_t msgType,
-    int32_t ext1, int32_t ext2) {
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0) {
-        c->notifyCallback(msgType, ext1, ext2);
-    }
-}
-
-void CameraClient::handleGenericData(int32_t msgType,
-    const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata) {
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0) {
-        c->dataCallback(msgType, dataPtr, metadata);
-    }
-}
-
-void CameraClient::handleGenericDataTimestamp(nsecs_t timestamp,
-    int32_t msgType, const sp<IMemory>& dataPtr) {
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0 && dataPtr != nullptr) {
-        native_handle_t* handle = nullptr;
-
-        // Check if dataPtr contains a VideoNativeHandleMetadata.
-        if (dataPtr->size() == sizeof(VideoNativeHandleMetadata)) {
-            // TODO: Using unsecurePointer() has some associated security pitfalls
-            //       (see declaration for details).
-            //       Either document why it is safe in this case or address the
-            //       issue (e.g. by copying).
-            VideoNativeHandleMetadata *metadata =
-                (VideoNativeHandleMetadata*)(dataPtr->unsecurePointer());
-            if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
-                handle = metadata->pHandle;
-            }
-        }
-
-        // If dataPtr contains a native handle, send it via recordingFrameHandleCallbackTimestamp.
-        if (handle != nullptr) {
-            {
-                Mutex::Autolock l(mAvailableCallbackBuffersLock);
-                mAvailableCallbackBuffers.push_back(dataPtr);
-            }
-            c->recordingFrameHandleCallbackTimestamp(timestamp, handle);
-        } else {
-            c->dataCallbackTimestamp(timestamp, msgType, dataPtr);
-        }
-    }
-}
-
-void CameraClient::copyFrameAndPostCopiedFrame(
-        int32_t msgType, const sp<hardware::ICameraClient>& client,
-        const sp<IMemoryHeap>& heap, size_t offset, size_t size,
-        camera_frame_metadata_t *metadata) {
-    LOG2("copyFrameAndPostCopiedFrame");
-    // It is necessary to copy out of pmem before sending this to
-    // the callback. For efficiency, reuse the same MemoryHeapBase
-    // provided it's big enough. Don't allocate the memory or
-    // perform the copy if there's no callback.
-    // hold the preview lock while we grab a reference to the preview buffer
-    sp<MemoryHeapBase> previewBuffer;
-
-    if (mPreviewBuffer == 0) {
-        mPreviewBuffer = new MemoryHeapBase(size, 0, NULL);
-    } else if (size > mPreviewBuffer->virtualSize()) {
-        mPreviewBuffer.clear();
-        mPreviewBuffer = new MemoryHeapBase(size, 0, NULL);
-    }
-    if (mPreviewBuffer == 0) {
-        ALOGE("failed to allocate space for preview buffer");
-        mLock.unlock();
-        return;
-    }
-    previewBuffer = mPreviewBuffer;
-
-    void* previewBufferBase = previewBuffer->base();
-    void* heapBase = heap->base();
-
-    if (heapBase == MAP_FAILED) {
-        ALOGE("%s: Failed to mmap heap for preview frame.", __FUNCTION__);
-        mLock.unlock();
-        return;
-    } else if (previewBufferBase == MAP_FAILED) {
-        ALOGE("%s: Failed to mmap preview buffer for preview frame.", __FUNCTION__);
-        mLock.unlock();
-        return;
-    }
-
-    memcpy(previewBufferBase, (uint8_t *) heapBase + offset, size);
-
-    sp<MemoryBase> frame = new MemoryBase(previewBuffer, 0, size);
-    if (frame == 0) {
-        ALOGE("failed to allocate space for frame callback");
-        mLock.unlock();
-        return;
-    }
-
-    mLock.unlock();
-    client->dataCallback(msgType, frame, metadata);
-}
-
-int CameraClient::getOrientation(int degrees, bool mirror) {
-    if (!mirror) {
-        if (degrees == 0) return 0;
-        else if (degrees == 90) return HAL_TRANSFORM_ROT_90;
-        else if (degrees == 180) return HAL_TRANSFORM_ROT_180;
-        else if (degrees == 270) return HAL_TRANSFORM_ROT_270;
-    } else {  // Do mirror (horizontal flip)
-        if (degrees == 0) {           // FLIP_H and ROT_0
-            return HAL_TRANSFORM_FLIP_H;
-        } else if (degrees == 90) {   // FLIP_H and ROT_90
-            return HAL_TRANSFORM_FLIP_H | HAL_TRANSFORM_ROT_90;
-        } else if (degrees == 180) {  // FLIP_H and ROT_180
-            return HAL_TRANSFORM_FLIP_V;
-        } else if (degrees == 270) {  // FLIP_H and ROT_270
-            return HAL_TRANSFORM_FLIP_V | HAL_TRANSFORM_ROT_90;
-        }
-    }
-    ALOGE("Invalid setDisplayOrientation degrees=%d", degrees);
-    return -1;
-}
-
-status_t CameraClient::setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) {
-    (void)bufferProducer;
-    ALOGE("%s: %d: CameraClient doesn't support setting a video target.", __FUNCTION__, __LINE__);
-    return INVALID_OPERATION;
-}
-
-status_t CameraClient::setAudioRestriction(int mode) {
-    if (!isValidAudioRestriction(mode)) {
-        ALOGE("%s: invalid audio restriction mode %d", __FUNCTION__, mode);
-        return BAD_VALUE;
-    }
-
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) {
-        return INVALID_OPERATION;
-    }
-    return BasicClient::setAudioRestriction(mode);
-}
-
-int32_t CameraClient::getGlobalAudioRestriction() {
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) {
-        return INVALID_OPERATION;
-    }
-    return BasicClient::getServiceAudioRestriction();
-}
-
-// API1->Device1 does not support this feature
-status_t CameraClient::setRotateAndCropOverride(uint8_t /*rotateAndCrop*/) {
-    return OK;
-}
-
-}; // namespace android
diff --git a/services/camera/libcameraservice/api1/CameraClient.h b/services/camera/libcameraservice/api1/CameraClient.h
deleted file mode 100644
index a7eb960..0000000
--- a/services/camera/libcameraservice/api1/CameraClient.h
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_CAMERACLIENT_H
-#define ANDROID_SERVERS_CAMERA_CAMERACLIENT_H
-
-#include "CameraService.h"
-
-namespace android {
-
-class MemoryHeapBase;
-class CameraHardwareInterface;
-
-/**
- * Interface between android.hardware.Camera API and Camera HAL device for version
- * CAMERA_DEVICE_API_VERSION_1_0.
- */
-
-class CameraClient : public CameraService::Client
-{
-public:
-    // ICamera interface (see ICamera for details)
-    virtual binder::Status  disconnect();
-    virtual status_t        connect(const sp<hardware::ICameraClient>& client);
-    virtual status_t        lock();
-    virtual status_t        unlock();
-    virtual status_t        setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer);
-    virtual void            setPreviewCallbackFlag(int flag);
-    virtual status_t        setPreviewCallbackTarget(
-            const sp<IGraphicBufferProducer>& callbackProducer);
-    virtual status_t        startPreview();
-    virtual void            stopPreview();
-    virtual bool            previewEnabled();
-    virtual status_t        setVideoBufferMode(int32_t videoBufferMode);
-    virtual status_t        startRecording();
-    virtual void            stopRecording();
-    virtual bool            recordingEnabled();
-    virtual void            releaseRecordingFrame(const sp<IMemory>& mem);
-    virtual void            releaseRecordingFrameHandle(native_handle_t *handle);
-    virtual void            releaseRecordingFrameHandleBatch(
-                                    const std::vector<native_handle_t*>& handles);
-    virtual status_t        autoFocus();
-    virtual status_t        cancelAutoFocus();
-    virtual status_t        takePicture(int msgType);
-    virtual status_t        setParameters(const String8& params);
-    virtual String8         getParameters() const;
-    virtual status_t        sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
-    virtual status_t        setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer);
-    virtual status_t        setAudioRestriction(int mode);
-    virtual int32_t         getGlobalAudioRestriction();
-
-    virtual status_t        setRotateAndCropOverride(uint8_t override);
-
-    // Interface used by CameraService
-    CameraClient(const sp<CameraService>& cameraService,
-            const sp<hardware::ICameraClient>& cameraClient,
-            const String16& clientPackageName,
-            const std::unique_ptr<String16>& clientFeatureId,
-            int cameraId,
-            int cameraFacing,
-            int clientPid,
-            int clientUid,
-            int servicePid);
-    ~CameraClient();
-
-    virtual status_t initialize(sp<CameraProviderManager> manager,
-            const String8& monitorTags) override;
-
-    virtual status_t dump(int fd, const Vector<String16>& args);
-
-    virtual status_t dumpClient(int fd, const Vector<String16>& args);
-
-private:
-
-    // check whether the calling process matches mClientPid.
-    status_t                checkPid() const;
-    status_t                checkPidAndHardware() const;  // also check mHardware != 0
-
-    // these are internal functions used to set up preview buffers
-    status_t                registerPreviewBuffers();
-
-    // camera operation mode
-    enum camera_mode {
-        CAMERA_PREVIEW_MODE   = 0,  // frame automatically released
-        CAMERA_RECORDING_MODE = 1,  // frame has to be explicitly released by releaseRecordingFrame()
-    };
-    // these are internal functions used for preview/recording
-    status_t                startCameraMode(camera_mode mode);
-    status_t                startPreviewMode();
-    status_t                startRecordingMode();
-
-    // internal function used by sendCommand to enable/disable shutter sound.
-    status_t                enableShutterSound(bool enable);
-
-    static sp<CameraClient>        getClientFromCookie(void* user);
-
-    // these are static callback functions
-    static void             notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2, void* user);
-    static void             dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
-            camera_frame_metadata_t *metadata, void* user);
-    static void             dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr, void* user);
-    static void             handleCallbackTimestampBatch(
-                                    int32_t msgType, const std::vector<HandleTimestampMessage>&, void* user);
-    // handlers for messages
-    void                    handleShutter(void);
-    void                    handlePreviewData(int32_t msgType, const sp<IMemory>& mem,
-            camera_frame_metadata_t *metadata);
-    void                    handlePostview(const sp<IMemory>& mem);
-    void                    handleRawPicture(const sp<IMemory>& mem);
-    void                    handleCompressedPicture(const sp<IMemory>& mem);
-    void                    handleGenericNotify(int32_t msgType, int32_t ext1, int32_t ext2);
-    void                    handleGenericData(int32_t msgType, const sp<IMemory>& dataPtr,
-            camera_frame_metadata_t *metadata);
-    void                    handleGenericDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
-
-    void                    copyFrameAndPostCopiedFrame(
-        int32_t msgType,
-        const sp<hardware::ICameraClient>& client,
-        const sp<IMemoryHeap>& heap,
-        size_t offset, size_t size,
-        camera_frame_metadata_t *metadata);
-
-    int                     getOrientation(int orientation, bool mirror);
-
-    status_t                setPreviewWindow(
-        const sp<IBinder>& binder,
-        const sp<ANativeWindow>& window);
-
-
-    // these are initialized in the constructor.
-    sp<CameraHardwareInterface>     mHardware;       // cleared after disconnect()
-    int                             mPreviewCallbackFlag;
-    int                             mOrientation;     // Current display orientation
-    bool                            mPlayShutterSound;
-    bool                            mLegacyMode; // camera2 api legacy mode?
-
-    // Ensures atomicity among the public methods
-    mutable Mutex                   mLock;
-    // This is a binder of Surface or Surface.
-    sp<IBinder>                     mSurface;
-    sp<ANativeWindow>               mPreviewWindow;
-
-    // If the user want us to return a copy of the preview frame (instead
-    // of the original one), we allocate mPreviewBuffer and reuse it if possible.
-    sp<MemoryHeapBase>              mPreviewBuffer;
-
-    // Debugging information
-    CameraParameters                mLatestSetParameters;
-
-    // mAvailableCallbackBuffers stores sp<IMemory> that HAL uses to send VideoNativeHandleMetadata.
-    // It will be used to send VideoNativeHandleMetadata back to HAL when camera receives the
-    // native handle from releaseRecordingFrameHandle.
-    Mutex                           mAvailableCallbackBuffersLock;
-    std::vector<sp<IMemory>>        mAvailableCallbackBuffers;
-
-    // We need to avoid the deadlock when the incoming command thread and
-    // the CameraHardwareInterface callback thread both want to grab mLock.
-    // An extra flag is used to tell the callback thread that it should stop
-    // trying to deliver the callback messages if the client is not
-    // interested in it anymore. For example, if the client is calling
-    // stopPreview(), the preview frame messages do not need to be delivered
-    // anymore.
-
-    // This function takes the same parameter as the enableMsgType() and
-    // disableMsgType() functions in CameraHardwareInterface.
-    void                    enableMsgType(int32_t msgType);
-    void                    disableMsgType(int32_t msgType);
-    volatile int32_t        mMsgEnabled;
-
-    // This function keeps trying to grab mLock, or give up if the message
-    // is found to be disabled. It returns true if mLock is grabbed.
-    bool                    lockIfMessageWanted(int32_t msgType);
-};
-
-}
-
-#endif
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index dbc863b..d543cab 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -3253,6 +3253,8 @@
 
 status_t Parameters::calculatePictureFovs(float *horizFov, float *vertFov)
         const {
+    // For external camera, use FOVs = (-1.0, -1.0) as default values. Calculate
+    // FOVs only if there is sufficient information.
     if (fastInfo.isExternalCamera) {
         if (horizFov != NULL) {
             *horizFov = -1.0;
@@ -3260,16 +3262,29 @@
         if (vertFov != NULL) {
             *vertFov = -1.0;
         }
-        return OK;
     }
 
     camera_metadata_ro_entry_t sensorSize =
             staticInfo(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 2, 2);
-    if (!sensorSize.count) return NO_INIT;
+    if (!sensorSize.count) {
+        // It is non-fatal for external cameras since it has default values.
+        if (fastInfo.isExternalCamera) {
+            return OK;
+        } else {
+            return NO_INIT;
+        }
+    }
 
     camera_metadata_ro_entry_t pixelArraySize =
             staticInfo(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 2, 2);
-    if (!pixelArraySize.count) return NO_INIT;
+    if (!pixelArraySize.count) {
+        // It is non-fatal for external cameras since it has default values.
+        if (fastInfo.isExternalCamera) {
+            return OK;
+        } else {
+            return NO_INIT;
+        }
+    }
 
     float arrayAspect = static_cast<float>(fastInfo.arrayWidth) /
             fastInfo.arrayHeight;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index e35b436..e80838b 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -21,6 +21,7 @@
 #include <cutils/properties.h>
 #include <utils/CameraThreadState.h>
 #include <utils/Log.h>
+#include <utils/SessionConfigurationUtils.h>
 #include <utils/Trace.h>
 #include <gui/Surface.h>
 #include <camera/camera2/CaptureRequest.h>
@@ -54,7 +55,7 @@
         const sp<CameraService>& cameraService,
         const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
         const String16& clientPackageName,
-        const std::unique_ptr<String16>& clientFeatureId,
+        const std::optional<String16>& clientFeatureId,
         const String8& cameraId,
         int api1CameraId,
         int cameraFacing,
@@ -80,7 +81,7 @@
 CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
         const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
         const String16& clientPackageName,
-        const std::unique_ptr<String16>& clientFeatureId,
+        const std::optional<String16>& clientFeatureId,
         const String8& cameraId,
         int cameraFacing,
         int clientPid,
@@ -492,7 +493,8 @@
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
 
-    res = checkOperatingMode(operatingMode, mDevice->info(), mCameraIdStr);
+    res = SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
+            mCameraIdStr);
     if (!res.isOk()) {
         return res;
     }
@@ -550,247 +552,6 @@
     return res;
 }
 
-binder::Status CameraDeviceClient::checkSurfaceType(size_t numBufferProducers,
-        bool deferredConsumer, int surfaceType)  {
-    if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
-        ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
-                __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
-    } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
-        ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
-    }
-
-    bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
-            (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
-
-    if (deferredConsumer && !validSurfaceType) {
-        ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
-    }
-
-    return binder::Status::ok();
-}
-
-binder::Status CameraDeviceClient::checkPhysicalCameraId(
-        const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
-        const String8 &logicalCameraId) {
-    if (physicalCameraId.size() == 0) {
-        return binder::Status::ok();
-    }
-    if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(),
-        physicalCameraId.string()) == physicalCameraIds.end()) {
-        String8 msg = String8::format("Camera %s: Camera doesn't support physicalCameraId %s.",
-                logicalCameraId.string(), physicalCameraId.string());
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-    }
-    return binder::Status::ok();
-}
-
-binder::Status CameraDeviceClient::checkOperatingMode(int operatingMode,
-        const CameraMetadata &staticInfo, const String8 &cameraId) {
-    if (operatingMode < 0) {
-        String8 msg = String8::format(
-            "Camera %s: Invalid operating mode %d requested", cameraId.string(), operatingMode);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                msg.string());
-    }
-
-    bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE);
-    if (isConstrainedHighSpeed) {
-        camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
-        bool isConstrainedHighSpeedSupported = false;
-        for(size_t i = 0; i < entry.count; ++i) {
-            uint8_t capability = entry.data.u8[i];
-            if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) {
-                isConstrainedHighSpeedSupported = true;
-                break;
-            }
-        }
-        if (!isConstrainedHighSpeedSupported) {
-            String8 msg = String8::format(
-                "Camera %s: Try to create a constrained high speed configuration on a device"
-                " that doesn't support it.", cameraId.string());
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                    msg.string());
-        }
-    }
-
-    return binder::Status::ok();
-}
-
-void CameraDeviceClient::mapStreamInfo(const OutputStreamInfo &streamInfo,
-            camera3_stream_rotation_t rotation, String8 physicalId,
-            hardware::camera::device::V3_4::Stream *stream /*out*/) {
-    if (stream == nullptr) {
-        return;
-    }
-
-    stream->v3_2.streamType = hardware::camera::device::V3_2::StreamType::OUTPUT;
-    stream->v3_2.width = streamInfo.width;
-    stream->v3_2.height = streamInfo.height;
-    stream->v3_2.format = Camera3Device::mapToPixelFormat(streamInfo.format);
-    auto u = streamInfo.consumerUsage;
-    camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
-    stream->v3_2.usage = Camera3Device::mapToConsumerUsage(u);
-    stream->v3_2.dataSpace = Camera3Device::mapToHidlDataspace(streamInfo.dataSpace);
-    stream->v3_2.rotation = Camera3Device::mapToStreamRotation(rotation);
-    stream->v3_2.id = -1; // Invalid stream id
-    stream->physicalCameraId = std::string(physicalId.string());
-    stream->bufferSize = 0;
-}
-
-binder::Status
-CameraDeviceClient::convertToHALStreamCombination(const SessionConfiguration& sessionConfiguration,
-        const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
-        metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
-        hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration,
-        bool *unsupported) {
-    auto operatingMode = sessionConfiguration.getOperatingMode();
-    binder::Status res = checkOperatingMode(operatingMode, deviceInfo, logicalCameraId);
-    if (!res.isOk()) {
-        return res;
-    }
-
-    if (unsupported == nullptr) {
-        String8 msg("unsupported nullptr");
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-    }
-    *unsupported = false;
-    auto ret = Camera3Device::mapToStreamConfigurationMode(
-            static_cast<camera3_stream_configuration_mode_t> (operatingMode),
-            /*out*/ &streamConfiguration.operationMode);
-    if (ret != OK) {
-        String8 msg = String8::format(
-            "Camera %s: Failed mapping operating mode %d requested: %s (%d)",
-            logicalCameraId.string(), operatingMode, strerror(-ret), ret);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                msg.string());
-    }
-
-    bool isInputValid = (sessionConfiguration.getInputWidth() > 0) &&
-            (sessionConfiguration.getInputHeight() > 0) &&
-            (sessionConfiguration.getInputFormat() > 0);
-    auto outputConfigs = sessionConfiguration.getOutputConfigurations();
-    size_t streamCount = outputConfigs.size();
-    streamCount = isInputValid ? streamCount + 1 : streamCount;
-    streamConfiguration.streams.resize(streamCount);
-    size_t streamIdx = 0;
-    if (isInputValid) {
-        streamConfiguration.streams[streamIdx++] = {{/*streamId*/0,
-                hardware::camera::device::V3_2::StreamType::INPUT,
-                static_cast<uint32_t> (sessionConfiguration.getInputWidth()),
-                static_cast<uint32_t> (sessionConfiguration.getInputHeight()),
-                Camera3Device::mapToPixelFormat(sessionConfiguration.getInputFormat()),
-                /*usage*/ 0, HAL_DATASPACE_UNKNOWN,
-                hardware::camera::device::V3_2::StreamRotation::ROTATION_0},
-                /*physicalId*/ nullptr, /*bufferSize*/0};
-    }
-
-    for (const auto &it : outputConfigs) {
-        const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
-            it.getGraphicBufferProducers();
-        bool deferredConsumer = it.isDeferred();
-        String8 physicalCameraId = String8(it.getPhysicalCameraId());
-        size_t numBufferProducers = bufferProducers.size();
-        bool isStreamInfoValid = false;
-        OutputStreamInfo streamInfo;
-
-        res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
-        if (!res.isOk()) {
-            return res;
-        }
-        res = checkPhysicalCameraId(physicalCameraIds, physicalCameraId,
-                logicalCameraId);
-        if (!res.isOk()) {
-            return res;
-        }
-
-        if (deferredConsumer) {
-            streamInfo.width = it.getWidth();
-            streamInfo.height = it.getHeight();
-            streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-            streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
-            auto surfaceType = it.getSurfaceType();
-            streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
-            if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
-                streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
-            }
-            mapStreamInfo(streamInfo, CAMERA3_STREAM_ROTATION_0, physicalCameraId,
-                    &streamConfiguration.streams[streamIdx++]);
-            isStreamInfoValid = true;
-
-            if (numBufferProducers == 0) {
-                continue;
-            }
-        }
-
-        for (auto& bufferProducer : bufferProducers) {
-            sp<Surface> surface;
-            const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId);
-            res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
-                    logicalCameraId,
-                    physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo );
-
-            if (!res.isOk())
-                return res;
-
-            if (!isStreamInfoValid) {
-                bool isDepthCompositeStream =
-                        camera3::DepthCompositeStream::isDepthCompositeStream(surface);
-                bool isHeicCompositeStream =
-                        camera3::HeicCompositeStream::isHeicCompositeStream(surface);
-                if (isDepthCompositeStream || isHeicCompositeStream) {
-                    // We need to take in to account that composite streams can have
-                    // additional internal camera streams.
-                    std::vector<OutputStreamInfo> compositeStreams;
-                    if (isDepthCompositeStream) {
-                        ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
-                                deviceInfo, &compositeStreams);
-                    } else {
-                        ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
-                            deviceInfo, &compositeStreams);
-                    }
-                    if (ret != OK) {
-                        String8 msg = String8::format(
-                                "Camera %s: Failed adding composite streams: %s (%d)",
-                                logicalCameraId.string(), strerror(-ret), ret);
-                        ALOGE("%s: %s", __FUNCTION__, msg.string());
-                        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-                    }
-
-                    if (compositeStreams.size() == 0) {
-                        // No internal streams means composite stream not
-                        // supported.
-                        *unsupported = true;
-                        return binder::Status::ok();
-                    } else if (compositeStreams.size() > 1) {
-                        streamCount += compositeStreams.size() - 1;
-                        streamConfiguration.streams.resize(streamCount);
-                    }
-
-                    for (const auto& compositeStream : compositeStreams) {
-                        mapStreamInfo(compositeStream,
-                                static_cast<camera3_stream_rotation_t> (it.getRotation()),
-                                physicalCameraId, &streamConfiguration.streams[streamIdx++]);
-                    }
-                } else {
-                    mapStreamInfo(streamInfo,
-                            static_cast<camera3_stream_rotation_t> (it.getRotation()),
-                            physicalCameraId, &streamConfiguration.streams[streamIdx++]);
-                }
-                isStreamInfoValid = true;
-            }
-        }
-    }
-    return binder::Status::ok();
-}
-
 binder::Status CameraDeviceClient::isSessionConfigurationSupported(
         const SessionConfiguration& sessionConfiguration, bool *status /*out*/) {
     ATRACE_CALL();
@@ -806,7 +567,8 @@
     }
 
     auto operatingMode = sessionConfiguration.getOperatingMode();
-    res = checkOperatingMode(operatingMode, mDevice->info(), mCameraIdStr);
+    res = SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
+            mCameraIdStr);
     if (!res.isOk()) {
         return res;
     }
@@ -821,8 +583,9 @@
     metadataGetter getMetadata = [this](const String8 &id) {return mDevice->infoPhysical(id);};
     std::vector<std::string> physicalCameraIds;
     mProviderManager->isLogicalCamera(mCameraIdStr.string(), &physicalCameraIds);
-    res = convertToHALStreamCombination(sessionConfiguration, mCameraIdStr,
-            mDevice->info(), getMetadata, physicalCameraIds, streamConfiguration, &earlyExit);
+    res = SessionConfigurationUtils::convertToHALStreamCombination(sessionConfiguration,
+            mCameraIdStr, mDevice->info(), getMetadata, physicalCameraIds, streamConfiguration,
+            &earlyExit);
     if (!res.isOk()) {
         return res;
     }
@@ -970,7 +733,7 @@
     String8 physicalCameraId = String8(outputConfiguration.getPhysicalCameraId());
     bool deferredConsumerOnly = deferredConsumer && numBufferProducers == 0;
 
-    res = checkSurfaceType(numBufferProducers, deferredConsumer,
+    res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
             outputConfiguration.getSurfaceType());
     if (!res.isOk()) {
         return res;
@@ -981,7 +744,8 @@
     }
     std::vector<std::string> physicalCameraIds;
     mProviderManager->isLogicalCamera(mCameraIdStr.string(), &physicalCameraIds);
-    res = checkPhysicalCameraId(physicalCameraIds, physicalCameraId, mCameraIdStr);
+    res = SessionConfigurationUtils::checkPhysicalCameraId(physicalCameraIds, physicalCameraId,
+            mCameraIdStr);
     if (!res.isOk()) {
         return res;
     }
@@ -1009,8 +773,8 @@
         }
 
         sp<Surface> surface;
-        res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
-                mCameraIdStr, mDevice->infoPhysical(physicalCameraId));
+        res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo, isStreamInfoValid,
+                surface, bufferProducer, mCameraIdStr, mDevice->infoPhysical(physicalCameraId));
 
         if (!res.isOk())
             return res;
@@ -1313,8 +1077,9 @@
     for (size_t i = 0; i < newOutputsMap.size(); i++) {
         OutputStreamInfo outInfo;
         sp<Surface> surface;
-        res = createSurfaceFromGbp(outInfo, /*isStreamInfoValid*/ false, surface,
-                newOutputsMap.valueAt(i), mCameraIdStr, mDevice->infoPhysical(physicalCameraId));
+        res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo, /*isStreamInfoValid*/ false,
+                surface, newOutputsMap.valueAt(i), mCameraIdStr,
+                mDevice->infoPhysical(physicalCameraId));
         if (!res.isOk())
             return res;
 
@@ -1364,226 +1129,6 @@
     return res;
 }
 
-bool CameraDeviceClient::isPublicFormat(int32_t format)
-{
-    switch(format) {
-        case HAL_PIXEL_FORMAT_RGBA_8888:
-        case HAL_PIXEL_FORMAT_RGBX_8888:
-        case HAL_PIXEL_FORMAT_RGB_888:
-        case HAL_PIXEL_FORMAT_RGB_565:
-        case HAL_PIXEL_FORMAT_BGRA_8888:
-        case HAL_PIXEL_FORMAT_YV12:
-        case HAL_PIXEL_FORMAT_Y8:
-        case HAL_PIXEL_FORMAT_Y16:
-        case HAL_PIXEL_FORMAT_RAW16:
-        case HAL_PIXEL_FORMAT_RAW10:
-        case HAL_PIXEL_FORMAT_RAW12:
-        case HAL_PIXEL_FORMAT_RAW_OPAQUE:
-        case HAL_PIXEL_FORMAT_BLOB:
-        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
-        case HAL_PIXEL_FORMAT_YCbCr_420_888:
-        case HAL_PIXEL_FORMAT_YCbCr_422_SP:
-        case HAL_PIXEL_FORMAT_YCrCb_420_SP:
-        case HAL_PIXEL_FORMAT_YCbCr_422_I:
-            return true;
-        default:
-            return false;
-    }
-}
-
-binder::Status CameraDeviceClient::createSurfaceFromGbp(
-        OutputStreamInfo& streamInfo, bool isStreamInfoValid,
-        sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
-        const String8 &cameraId, const CameraMetadata &physicalCameraMetadata) {
-
-    // bufferProducer must be non-null
-    if (gbp == nullptr) {
-        String8 msg = String8::format("Camera %s: Surface is NULL", cameraId.string());
-        ALOGW("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-    }
-    // HACK b/10949105
-    // Query consumer usage bits to set async operation mode for
-    // GLConsumer using controlledByApp parameter.
-    bool useAsync = false;
-    uint64_t consumerUsage = 0;
-    status_t err;
-    if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
-        String8 msg = String8::format("Camera %s: Failed to query Surface consumer usage: %s (%d)",
-                cameraId.string(), strerror(-err), err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
-    }
-    if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
-        ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for stream",
-                __FUNCTION__, cameraId.string(), consumerUsage);
-        useAsync = true;
-    }
-
-    uint64_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
-                              GRALLOC_USAGE_RENDERSCRIPT;
-    uint64_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
-                           GraphicBuffer::USAGE_HW_TEXTURE |
-                           GraphicBuffer::USAGE_HW_COMPOSER;
-    bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
-            (consumerUsage & allowedFlags) != 0;
-
-    surface = new Surface(gbp, useAsync);
-    ANativeWindow *anw = surface.get();
-
-    int width, height, format;
-    android_dataspace dataSpace;
-    if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
-        String8 msg = String8::format("Camera %s: Failed to query Surface width: %s (%d)",
-                 cameraId.string(), strerror(-err), err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
-    }
-    if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
-        String8 msg = String8::format("Camera %s: Failed to query Surface height: %s (%d)",
-                cameraId.string(), strerror(-err), err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
-    }
-    if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
-        String8 msg = String8::format("Camera %s: Failed to query Surface format: %s (%d)",
-                cameraId.string(), strerror(-err), err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
-    }
-    if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
-            reinterpret_cast<int*>(&dataSpace))) != OK) {
-        String8 msg = String8::format("Camera %s: Failed to query Surface dataspace: %s (%d)",
-                cameraId.string(), strerror(-err), err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
-    }
-
-    // FIXME: remove this override since the default format should be
-    //       IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
-    if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
-            ((consumerUsage & GRALLOC_USAGE_HW_MASK) &&
-             ((consumerUsage & GRALLOC_USAGE_SW_READ_MASK) == 0))) {
-        ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED",
-                __FUNCTION__, cameraId.string(), format);
-        format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-    }
-    // Round dimensions to the nearest dimensions available for this format
-    if (flexibleConsumer && isPublicFormat(format) &&
-            !CameraDeviceClient::roundBufferDimensionNearest(width, height,
-            format, dataSpace, physicalCameraMetadata, /*out*/&width, /*out*/&height)) {
-        String8 msg = String8::format("Camera %s: No supported stream configurations with "
-                "format %#x defined, failed to create output stream",
-                cameraId.string(), format);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-    }
-
-    if (!isStreamInfoValid) {
-        streamInfo.width = width;
-        streamInfo.height = height;
-        streamInfo.format = format;
-        streamInfo.dataSpace = dataSpace;
-        streamInfo.consumerUsage = consumerUsage;
-        return binder::Status::ok();
-    }
-    if (width != streamInfo.width) {
-        String8 msg = String8::format("Camera %s:Surface width doesn't match: %d vs %d",
-                cameraId.string(), width, streamInfo.width);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-    }
-    if (height != streamInfo.height) {
-        String8 msg = String8::format("Camera %s:Surface height doesn't match: %d vs %d",
-                 cameraId.string(), height, streamInfo.height);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-    }
-    if (format != streamInfo.format) {
-        String8 msg = String8::format("Camera %s:Surface format doesn't match: %d vs %d",
-                 cameraId.string(), format, streamInfo.format);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-    }
-    if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
-        if (dataSpace != streamInfo.dataSpace) {
-            String8 msg = String8::format("Camera %s:Surface dataSpace doesn't match: %d vs %d",
-                    cameraId.string(), dataSpace, streamInfo.dataSpace);
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-        }
-        //At the native side, there isn't a way to check whether 2 surfaces come from the same
-        //surface class type. Use usage flag to approximate the comparison.
-        if (consumerUsage != streamInfo.consumerUsage) {
-            String8 msg = String8::format(
-                    "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
-                    cameraId.string(), consumerUsage, streamInfo.consumerUsage);
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-        }
-    }
-    return binder::Status::ok();
-}
-
-bool CameraDeviceClient::roundBufferDimensionNearest(int32_t width, int32_t height,
-        int32_t format, android_dataspace dataSpace, const CameraMetadata& info,
-        /*out*/int32_t* outWidth, /*out*/int32_t* outHeight) {
-
-    camera_metadata_ro_entry streamConfigs =
-            (dataSpace == HAL_DATASPACE_DEPTH) ?
-            info.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS) :
-            (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
-            info.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS) :
-            info.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
-
-    int32_t bestWidth = -1;
-    int32_t bestHeight = -1;
-
-    // Iterate through listed stream configurations and find the one with the smallest euclidean
-    // distance from the given dimensions for the given format.
-    for (size_t i = 0; i < streamConfigs.count; i += 4) {
-        int32_t fmt = streamConfigs.data.i32[i];
-        int32_t w = streamConfigs.data.i32[i + 1];
-        int32_t h = streamConfigs.data.i32[i + 2];
-
-        // Ignore input/output type for now
-        if (fmt == format) {
-            if (w == width && h == height) {
-                bestWidth = width;
-                bestHeight = height;
-                break;
-            } else if (w <= ROUNDING_WIDTH_CAP && (bestWidth == -1 ||
-                    CameraDeviceClient::euclidDistSquare(w, h, width, height) <
-                    CameraDeviceClient::euclidDistSquare(bestWidth, bestHeight, width, height))) {
-                bestWidth = w;
-                bestHeight = h;
-            }
-        }
-    }
-
-    if (bestWidth == -1) {
-        // Return false if no configurations for this format were listed
-        return false;
-    }
-
-    // Set the outputs to the closet width/height
-    if (outWidth != NULL) {
-        *outWidth = bestWidth;
-    }
-    if (outHeight != NULL) {
-        *outHeight = bestHeight;
-    }
-
-    // Return true if at least one configuration for this format was listed
-    return true;
-}
-
-int64_t CameraDeviceClient::euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
-    int64_t d0 = x0 - x1;
-    int64_t d1 = y0 - y1;
-    return d0 * d0 + d1 * d1;
-}
-
 // Create a request object from a template.
 binder::Status CameraDeviceClient::createDefaultRequest(int templateId,
         /*out*/
@@ -1896,8 +1441,9 @@
         }
 
         sp<Surface> surface;
-        res = createSurfaceFromGbp(mStreamInfoMap[streamId], true /*isStreamInfoValid*/,
-                surface, bufferProducer, mCameraIdStr, mDevice->infoPhysical(physicalId));
+        res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
+                true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
+                mDevice->infoPhysical(physicalId));
 
         if (!res.isOk())
             return res;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 9d3874f..2807aee 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -50,7 +50,7 @@
     CameraDeviceClientBase(const sp<CameraService>& cameraService,
             const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
             const String16& clientPackageName,
-            const std::unique_ptr<String16>& clientFeatureId,
+            const std::optional<String16>& clientFeatureId,
             const String8& cameraId,
             int api1CameraId,
             int cameraFacing,
@@ -175,7 +175,7 @@
     CameraDeviceClient(const sp<CameraService>& cameraService,
             const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
             const String16& clientPackageName,
-            const std::unique_ptr<String16>& clientFeatureId,
+            const std::optional<String16>& clientFeatureId,
             const String8& cameraId,
             int cameraFacing,
             int clientPid,
@@ -204,16 +204,6 @@
     virtual void notifyRequestQueueEmpty();
     virtual void notifyRepeatingRequestError(long lastFrameNumber);
 
-    // utility function to convert AIDL SessionConfiguration to HIDL
-    // streamConfiguration. Also checks for validity of SessionConfiguration and
-    // returns a non-ok binder::Status if the passed in session configuration
-    // isn't valid.
-    static binder::Status
-    convertToHALStreamCombination(const SessionConfiguration& sessionConfiguration,
-            const String8 &cameraId, const CameraMetadata &deviceInfo,
-            metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
-            hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration,
-            bool *earlyExit);
     /**
      * Interface used by independent components of CameraDeviceClient.
      */
@@ -266,18 +256,8 @@
 
     /** Utility members */
     binder::Status checkPidStatus(const char* checkLocation);
-    static binder::Status checkOperatingMode(int operatingMode, const CameraMetadata &staticInfo,
-            const String8 &cameraId);
-    static binder::Status checkSurfaceType(size_t numBufferProducers, bool deferredConsumer,
-            int surfaceType);
-    static void mapStreamInfo(const OutputStreamInfo &streamInfo,
-            camera3_stream_rotation_t rotation, String8 physicalId,
-            hardware::camera::device::V3_4::Stream *stream /*out*/);
     bool enforceRequestPermissions(CameraMetadata& metadata);
 
-    // Find the square of the euclidean distance between two points
-    static int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
-
     // Create an output stream with surface deferred for future.
     binder::Status createDeferredSurfaceStreamLocked(
             const hardware::camera2::params::OutputConfiguration &outputConfiguration,
@@ -288,33 +268,11 @@
     // cases.
     binder::Status setStreamTransformLocked(int streamId);
 
-    // Find the closest dimensions for a given format in available stream configurations with
-    // a width <= ROUNDING_WIDTH_CAP
-    static const int32_t ROUNDING_WIDTH_CAP = 1920;
-    static bool roundBufferDimensionNearest(int32_t width, int32_t height, int32_t format,
-            android_dataspace dataSpace, const CameraMetadata& info,
-            /*out*/int32_t* outWidth, /*out*/int32_t* outHeight);
-
-    //check if format is not custom format
-    static bool isPublicFormat(int32_t format);
-
-    // Create a Surface from an IGraphicBufferProducer. Returns error if
-    // IGraphicBufferProducer's property doesn't match with streamInfo
-    static binder::Status createSurfaceFromGbp(OutputStreamInfo& streamInfo, bool isStreamInfoValid,
-            sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp, const String8 &cameraId,
-            const CameraMetadata &physicalCameraMetadata);
-
-
     // Utility method to insert the surface into SurfaceMap
     binder::Status insertGbpLocked(const sp<IGraphicBufferProducer>& gbp,
             /*out*/SurfaceMap* surfaceMap, /*out*/Vector<int32_t>* streamIds,
             /*out*/int32_t*  currentStreamId);
 
-    // Check that the physicalCameraId passed in is spported by the camera
-    // device.
-    static binder::Status checkPhysicalCameraId(const std::vector<std::string> &physicalCameraIds,
-            const String8 &physicalCameraId, const String8 &logicalCameraId);
-
     // IGraphicsBufferProducer binder -> Stream ID + Surface ID for output streams
     KeyedVector<sp<IBinder>, StreamSurfaceId> mStreamMap;
 
@@ -346,7 +304,6 @@
 
     KeyedVector<sp<IBinder>, sp<CompositeStream>> mCompositeStreamMap;
 
-    static const int32_t MAX_SURFACES_PER_STREAM = 4;
     sp<CameraProviderManager> mProviderManager;
 };
 
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index b67fcb3..03621c8 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -48,7 +48,7 @@
             const KeyedVector<sp<IBinder>, sp<CompositeStream>>& offlineCompositeStreamMap,
             const sp<ICameraDeviceCallbacks>& remoteCallback,
             const String16& clientPackageName,
-            const std::unique_ptr<String16>& clientFeatureId,
+            const std::optional<String16>& clientFeatureId,
             const String8& cameraIdStr, int cameraFacing,
             int clientPid, uid_t clientUid, int servicePid) :
             CameraService::BasicClient(
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index a63f402..4fe5adf 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -510,7 +510,8 @@
 
     sp<camera3::StatusTracker> statusTracker = mStatusTracker.promote();
     if (statusTracker != nullptr) {
-        mStatusId = statusTracker->addComponent();
+        std::string name = std::string("HeicStream ") + std::to_string(getStreamId());
+        mStatusId = statusTracker->addComponent(name);
     }
 
     run("HeicCompositeStreamProc");
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 0a41776..609698c 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -44,7 +44,7 @@
         const sp<CameraService>& cameraService,
         const sp<TCamCallbacks>& remoteCallback,
         const String16& clientPackageName,
-        const std::unique_ptr<String16>& clientFeatureId,
+        const std::optional<String16>& clientFeatureId,
         const String8& cameraId,
         int api1CameraId,
         int cameraFacing,
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 042f5aa..d7506af 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -48,7 +48,7 @@
     Camera2ClientBase(const sp<CameraService>& cameraService,
                       const sp<TCamCallbacks>& remoteCallback,
                       const String16& clientPackageName,
-                      const std::unique_ptr<String16>& clientFeatureId,
+                      const std::optional<String16>& clientFeatureId,
                       const String8& cameraId,
                       int api1CameraId,
                       int cameraFacing,
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 32d118d..e9dcb01 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -417,46 +417,6 @@
     return mapToStatusT(status);
 }
 
-status_t CameraProviderManager::openSession(const std::string &id,
-        const sp<device::V1_0::ICameraDeviceCallback>& callback,
-        /*out*/
-        sp<device::V1_0::ICameraDevice> *session) {
-
-    std::lock_guard<std::mutex> lock(mInterfaceMutex);
-
-    auto deviceInfo = findDeviceInfoLocked(id,
-            /*minVersion*/ {1,0}, /*maxVersion*/ {2,0});
-    if (deviceInfo == nullptr) return NAME_NOT_FOUND;
-
-    auto *deviceInfo1 = static_cast<ProviderInfo::DeviceInfo1*>(deviceInfo);
-    sp<ProviderInfo> parentProvider = deviceInfo->mParentProvider.promote();
-    if (parentProvider == nullptr) {
-        return DEAD_OBJECT;
-    }
-    const sp<provider::V2_4::ICameraProvider> provider = parentProvider->startProviderInterface();
-    if (provider == nullptr) {
-        return DEAD_OBJECT;
-    }
-    saveRef(DeviceMode::CAMERA, id, provider);
-
-    auto interface = deviceInfo1->startDeviceInterface<
-            CameraProviderManager::ProviderInfo::DeviceInfo1::InterfaceT>();
-    if (interface == nullptr) {
-        return DEAD_OBJECT;
-    }
-    hardware::Return<Status> status = interface->open(callback);
-    if (!status.isOk()) {
-        removeRef(DeviceMode::CAMERA, id);
-        ALOGE("%s: Transaction error opening a session for camera device %s: %s",
-                __FUNCTION__, id.c_str(), status.description().c_str());
-        return DEAD_OBJECT;
-    }
-    if (status == Status::OK) {
-        *session = interface;
-    }
-    return mapToStatusT(status);
-}
-
 void CameraProviderManager::saveRef(DeviceMode usageType, const std::string &cameraId,
         sp<provider::V2_4::ICameraProvider> provider) {
     if (!kEnableLazyHal) {
@@ -1344,6 +1304,20 @@
         }
     }
 
+    // cameraDeviceStatusChange callbacks may be called (and causing new devices added)
+    // before setCallback returns
+    hardware::Return<Status> status = interface->setCallback(this);
+    if (!status.isOk()) {
+        ALOGE("%s: Transaction error setting up callbacks with camera provider '%s': %s",
+                __FUNCTION__, mProviderName.c_str(), status.description().c_str());
+        return DEAD_OBJECT;
+    }
+    if (status != Status::OK) {
+        ALOGE("%s: Unable to register callbacks with camera provider '%s'",
+                __FUNCTION__, mProviderName.c_str());
+        return mapToStatusT(status);
+    }
+
     hardware::Return<bool> linked = interface->linkToDeath(this, /*cookie*/ mId);
     if (!linked.isOk()) {
         ALOGE("%s: Transaction error in linking to camera provider '%s' death: %s",
@@ -1372,7 +1346,6 @@
         return res;
     }
 
-    Status status;
     // Get initial list of camera devices, if any
     std::vector<std::string> devices;
     hardware::Return<void> ret = interface->getCameraIdList([&status, this, &devices](
@@ -1437,26 +1410,43 @@
         }
     }
 
-    // cameraDeviceStatusChange callbacks may be called (and causing new devices added)
-    // before setCallback returns. setCallback must be called after addDevice so that
-    // the physical camera status callback can look up available regular
-    // cameras.
-    hardware::Return<Status> st = interface->setCallback(this);
-    if (!st.isOk()) {
-        ALOGE("%s: Transaction error setting up callbacks with camera provider '%s': %s",
-                __FUNCTION__, mProviderName.c_str(), st.description().c_str());
-        return DEAD_OBJECT;
-    }
-    if (st != Status::OK) {
-        ALOGE("%s: Unable to register callbacks with camera provider '%s'",
-                __FUNCTION__, mProviderName.c_str());
-        return mapToStatusT(st);
-    }
-
     ALOGI("Camera provider %s ready with %zu camera devices",
             mProviderName.c_str(), mDevices.size());
 
-    mInitialized = true;
+    // Process cached status callbacks
+    std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus =
+            std::make_unique<std::vector<CameraStatusInfoT>>();
+    {
+        std::lock_guard<std::mutex> lock(mInitLock);
+
+        for (auto& statusInfo : mCachedStatus) {
+            std::string id, physicalId;
+            status_t res = OK;
+            if (statusInfo.isPhysicalCameraStatus) {
+                res = physicalCameraDeviceStatusChangeLocked(&id, &physicalId,
+                    statusInfo.cameraId, statusInfo.physicalCameraId, statusInfo.status);
+            } else {
+                res = cameraDeviceStatusChangeLocked(&id, statusInfo.cameraId, statusInfo.status);
+            }
+            if (res == OK) {
+                cachedStatus->emplace_back(statusInfo.isPhysicalCameraStatus,
+                        id.c_str(), physicalId.c_str(), statusInfo.status);
+            }
+        }
+        mCachedStatus.clear();
+
+        mInitialized = true;
+    }
+
+    // The cached status change callbacks cannot be fired directly from this
+    // function, due to same-thread deadlock trying to acquire mInterfaceMutex
+    // twice.
+    if (listener != nullptr) {
+        mInitialStatusCallbackFuture = std::async(std::launch::async,
+                &CameraProviderManager::ProviderInfo::notifyInitialStatusChange, this,
+                listener, std::move(cachedStatus));
+    }
+
     return OK;
 }
 
@@ -1537,9 +1527,9 @@
     std::unique_ptr<DeviceInfo> deviceInfo;
     switch (major) {
         case 1:
-            deviceInfo = initializeDeviceInfo<DeviceInfo1>(name, mProviderTagid,
-                    id, minor);
-            break;
+            ALOGE("%s: Device %s: Unsupported HIDL device HAL major version %d:", __FUNCTION__,
+                    name.c_str(), major);
+            return BAD_VALUE;
         case 3:
             deviceInfo = initializeDeviceInfo<DeviceInfo3>(name, mProviderTagid,
                     id, minor);
@@ -1734,104 +1724,139 @@
         CameraDeviceStatus newStatus) {
     sp<StatusListener> listener;
     std::string id;
-    bool initialized = false;
+    std::lock_guard<std::mutex> lock(mInitLock);
+
+    if (!mInitialized) {
+        mCachedStatus.emplace_back(false /*isPhysicalCameraStatus*/,
+                cameraDeviceName.c_str(), std::string().c_str(), newStatus);
+        return hardware::Void();
+    }
+
     {
         std::lock_guard<std::mutex> lock(mLock);
-        bool known = false;
-        for (auto& deviceInfo : mDevices) {
-            if (deviceInfo->mName == cameraDeviceName) {
-                ALOGI("Camera device %s status is now %s, was %s", cameraDeviceName.c_str(),
-                        deviceStatusToString(newStatus), deviceStatusToString(deviceInfo->mStatus));
-                deviceInfo->mStatus = newStatus;
-                // TODO: Handle device removal (NOT_PRESENT)
-                id = deviceInfo->mId;
-                known = true;
-                break;
-            }
-        }
-        // Previously unseen device; status must not be NOT_PRESENT
-        if (!known) {
-            if (newStatus == CameraDeviceStatus::NOT_PRESENT) {
-                ALOGW("Camera provider %s says an unknown camera device %s is not present. Curious.",
-                    mProviderName.c_str(), cameraDeviceName.c_str());
-                return hardware::Void();
-            }
-            addDevice(cameraDeviceName, newStatus, &id);
-        } else if (newStatus == CameraDeviceStatus::NOT_PRESENT) {
-            removeDevice(id);
+        if (OK != cameraDeviceStatusChangeLocked(&id, cameraDeviceName, newStatus)) {
+            return hardware::Void();
         }
         listener = mManager->getStatusListener();
-        initialized = mInitialized;
-        if (reCacheConcurrentStreamingCameraIdsLocked() != OK) {
-            ALOGE("%s: CameraProvider %s could not re-cache concurrent streaming camera id list ",
-                      __FUNCTION__, mProviderName.c_str());
-        }
     }
+
     // Call without lock held to allow reentrancy into provider manager
-    // Don't send the callback if providerInfo hasn't been initialized.
-    // CameraService will initialize device status after provider is
-    // initialized
-    if (listener != nullptr && initialized) {
+    if (listener != nullptr) {
         listener->onDeviceStatusChanged(String8(id.c_str()), newStatus);
     }
+
     return hardware::Void();
 }
 
+status_t CameraProviderManager::ProviderInfo::cameraDeviceStatusChangeLocked(
+        std::string* id, const hardware::hidl_string& cameraDeviceName,
+        CameraDeviceStatus newStatus) {
+    bool known = false;
+    std::string cameraId;
+    for (auto& deviceInfo : mDevices) {
+        if (deviceInfo->mName == cameraDeviceName) {
+            ALOGI("Camera device %s status is now %s, was %s", cameraDeviceName.c_str(),
+                    deviceStatusToString(newStatus), deviceStatusToString(deviceInfo->mStatus));
+            deviceInfo->mStatus = newStatus;
+            // TODO: Handle device removal (NOT_PRESENT)
+            cameraId = deviceInfo->mId;
+            known = true;
+            break;
+        }
+    }
+    // Previously unseen device; status must not be NOT_PRESENT
+    if (!known) {
+        if (newStatus == CameraDeviceStatus::NOT_PRESENT) {
+            ALOGW("Camera provider %s says an unknown camera device %s is not present. Curious.",
+                mProviderName.c_str(), cameraDeviceName.c_str());
+            return BAD_VALUE;
+        }
+        addDevice(cameraDeviceName, newStatus, &cameraId);
+    } else if (newStatus == CameraDeviceStatus::NOT_PRESENT) {
+        removeDevice(cameraId);
+    }
+    if (reCacheConcurrentStreamingCameraIdsLocked() != OK) {
+        ALOGE("%s: CameraProvider %s could not re-cache concurrent streaming camera id list ",
+                  __FUNCTION__, mProviderName.c_str());
+    }
+    *id = cameraId;
+    return OK;
+}
+
 hardware::Return<void> CameraProviderManager::ProviderInfo::physicalCameraDeviceStatusChange(
         const hardware::hidl_string& cameraDeviceName,
         const hardware::hidl_string& physicalCameraDeviceName,
         CameraDeviceStatus newStatus) {
     sp<StatusListener> listener;
     std::string id;
-    bool initialized = false;
+    std::string physicalId;
+    std::lock_guard<std::mutex> lock(mInitLock);
+
+    if (!mInitialized) {
+        mCachedStatus.emplace_back(true /*isPhysicalCameraStatus*/, cameraDeviceName,
+                physicalCameraDeviceName, newStatus);
+        return hardware::Void();
+    }
+
     {
         std::lock_guard<std::mutex> lock(mLock);
-        bool known = false;
-        for (auto& deviceInfo : mDevices) {
-            if (deviceInfo->mName == cameraDeviceName) {
-                id = deviceInfo->mId;
 
-                if (!deviceInfo->mIsLogicalCamera) {
-                    ALOGE("%s: Invalid combination of camera id %s, physical id %s",
-                            __FUNCTION__, id.c_str(), physicalCameraDeviceName.c_str());
-                    return hardware::Void();
-                }
-                if (std::find(deviceInfo->mPhysicalIds.begin(), deviceInfo->mPhysicalIds.end(),
-                        physicalCameraDeviceName) == deviceInfo->mPhysicalIds.end()) {
-                    ALOGE("%s: Invalid combination of camera id %s, physical id %s",
-                            __FUNCTION__, id.c_str(), physicalCameraDeviceName.c_str());
-                    return hardware::Void();
-                }
-                ALOGI("Camera device %s physical device %s status is now %s, was %s",
-                        cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(),
-                        deviceStatusToString(newStatus), deviceStatusToString(
-                        deviceInfo->mPhysicalStatus[physicalCameraDeviceName]));
-                known = true;
-                break;
-            }
-        }
-        // Previously unseen device; status must not be NOT_PRESENT
-        if (!known) {
-            ALOGW("Camera provider %s says an unknown camera device %s-%s is not present. Curious.",
-                    mProviderName.c_str(), cameraDeviceName.c_str(),
-                    physicalCameraDeviceName.c_str());
+        if (OK != physicalCameraDeviceStatusChangeLocked(&id, &physicalId, cameraDeviceName,
+                physicalCameraDeviceName, newStatus)) {
             return hardware::Void();
         }
+
         listener = mManager->getStatusListener();
-        initialized = mInitialized;
     }
     // Call without lock held to allow reentrancy into provider manager
-    // Don't send the callback if providerInfo hasn't been initialized.
-    // CameraService will initialize device status after provider is
-    // initialized
-    if (listener != nullptr && initialized) {
-        String8 physicalId(physicalCameraDeviceName.c_str());
+    if (listener != nullptr) {
         listener->onDeviceStatusChanged(String8(id.c_str()),
-                physicalId, newStatus);
+                String8(physicalId.c_str()), newStatus);
     }
     return hardware::Void();
 }
 
+status_t CameraProviderManager::ProviderInfo::physicalCameraDeviceStatusChangeLocked(
+            std::string* id, std::string* physicalId,
+            const hardware::hidl_string& cameraDeviceName,
+            const hardware::hidl_string& physicalCameraDeviceName,
+            CameraDeviceStatus newStatus) {
+    bool known = false;
+    std::string cameraId;
+    for (auto& deviceInfo : mDevices) {
+        if (deviceInfo->mName == cameraDeviceName) {
+            cameraId = deviceInfo->mId;
+            if (!deviceInfo->mIsLogicalCamera) {
+                ALOGE("%s: Invalid combination of camera id %s, physical id %s",
+                        __FUNCTION__, cameraId.c_str(), physicalCameraDeviceName.c_str());
+                return BAD_VALUE;
+            }
+            if (std::find(deviceInfo->mPhysicalIds.begin(), deviceInfo->mPhysicalIds.end(),
+                    physicalCameraDeviceName) == deviceInfo->mPhysicalIds.end()) {
+                ALOGE("%s: Invalid combination of camera id %s, physical id %s",
+                        __FUNCTION__, cameraId.c_str(), physicalCameraDeviceName.c_str());
+                return BAD_VALUE;
+            }
+            ALOGI("Camera device %s physical device %s status is now %s",
+                    cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(),
+                    deviceStatusToString(newStatus));
+            known = true;
+            break;
+        }
+    }
+    // Previously unseen device; status must not be NOT_PRESENT
+    if (!known) {
+        ALOGW("Camera provider %s says an unknown camera device %s-%s is not present. Curious.",
+                mProviderName.c_str(), cameraDeviceName.c_str(),
+                physicalCameraDeviceName.c_str());
+        return BAD_VALUE;
+    }
+
+    *id = cameraId;
+    *physicalId = physicalCameraDeviceName.c_str();
+    return OK;
+}
+
 hardware::Return<void> CameraProviderManager::ProviderInfo::torchModeStatusChange(
         const hardware::hidl_string& cameraDeviceName,
         TorchModeStatus newStatus) {
@@ -1986,6 +2011,20 @@
     return INVALID_OPERATION;
 }
 
+void CameraProviderManager::ProviderInfo::notifyInitialStatusChange(
+        sp<StatusListener> listener,
+        std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus) {
+    for (auto& statusInfo : *cachedStatus) {
+        if (statusInfo.isPhysicalCameraStatus) {
+            listener->onDeviceStatusChanged(String8(statusInfo.cameraId.c_str()),
+                    String8(statusInfo.physicalCameraId.c_str()), statusInfo.status);
+        } else {
+            listener->onDeviceStatusChanged(
+                    String8(statusInfo.cameraId.c_str()), statusInfo.status);
+        }
+    }
+}
+
 template<class DeviceInfoT>
 std::unique_ptr<CameraProviderManager::ProviderInfo::DeviceInfo>
     CameraProviderManager::ProviderInfo::initializeDeviceInfo(
@@ -2034,35 +2073,6 @@
 }
 
 template<>
-sp<device::V1_0::ICameraDevice>
-CameraProviderManager::ProviderInfo::startDeviceInterface
-        <device::V1_0::ICameraDevice>(const std::string &name) {
-    Status status;
-    sp<device::V1_0::ICameraDevice> cameraInterface;
-    hardware::Return<void> ret;
-    const sp<provider::V2_4::ICameraProvider> interface = startProviderInterface();
-    if (interface == nullptr) {
-        return nullptr;
-    }
-    ret = interface->getCameraDeviceInterface_V1_x(name, [&status, &cameraInterface](
-        Status s, sp<device::V1_0::ICameraDevice> interface) {
-                status = s;
-                cameraInterface = interface;
-            });
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error trying to obtain interface for camera device %s: %s",
-                __FUNCTION__, name.c_str(), ret.description().c_str());
-        return nullptr;
-    }
-    if (status != Status::OK) {
-        ALOGE("%s: Unable to obtain interface for camera device %s: %s", __FUNCTION__,
-                name.c_str(), statusToString(status));
-        return nullptr;
-    }
-    return cameraInterface;
-}
-
-template<>
 sp<device::V3_2::ICameraDevice>
 CameraProviderManager::ProviderInfo::startDeviceInterface
         <device::V3_2::ICameraDevice>(const std::string &name) {
@@ -2115,126 +2125,6 @@
     return mapToStatusT(s);
 }
 
-CameraProviderManager::ProviderInfo::DeviceInfo1::DeviceInfo1(const std::string& name,
-        const metadata_vendor_id_t tagId, const std::string &id,
-        uint16_t minorVersion,
-        const CameraResourceCost& resourceCost,
-        sp<ProviderInfo> parentProvider,
-        const std::vector<std::string>& publicCameraIds,
-        sp<InterfaceT> interface) :
-        DeviceInfo(name, tagId, id, hardware::hidl_version{1, minorVersion},
-                   publicCameraIds, resourceCost, parentProvider) {
-    // Get default parameters and initialize flash unit availability
-    // Requires powering on the camera device
-    hardware::Return<Status> status = interface->open(nullptr);
-    if (!status.isOk()) {
-        ALOGE("%s: Transaction error opening camera device %s to check for a flash unit: %s",
-                __FUNCTION__, id.c_str(), status.description().c_str());
-        return;
-    }
-    if (status != Status::OK) {
-        ALOGE("%s: Unable to open camera device %s to check for a flash unit: %s", __FUNCTION__,
-                id.c_str(), CameraProviderManager::statusToString(status));
-        return;
-    }
-    hardware::Return<void> ret;
-    ret = interface->getParameters([this](const hardware::hidl_string& parms) {
-                mDefaultParameters.unflatten(String8(parms.c_str()));
-            });
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error reading camera device %s params to check for a flash unit: %s",
-                __FUNCTION__, id.c_str(), status.description().c_str());
-        return;
-    }
-    const char *flashMode =
-            mDefaultParameters.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
-    if (flashMode && strstr(flashMode, CameraParameters::FLASH_MODE_TORCH)) {
-        mHasFlashUnit = true;
-    }
-
-    status_t res = cacheCameraInfo(interface);
-    if (res != OK) {
-        ALOGE("%s: Could not cache CameraInfo", __FUNCTION__);
-        return;
-    }
-
-    ret = interface->close();
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error closing camera device %s after check for a flash unit: %s",
-                __FUNCTION__, id.c_str(), status.description().c_str());
-    }
-
-    if (!kEnableLazyHal) {
-        // Save HAL reference indefinitely
-        mSavedInterface = interface;
-    }
-}
-
-CameraProviderManager::ProviderInfo::DeviceInfo1::~DeviceInfo1() {}
-
-status_t CameraProviderManager::ProviderInfo::DeviceInfo1::setTorchMode(bool enabled) {
-    return setTorchModeForDevice<InterfaceT>(enabled);
-}
-
-status_t CameraProviderManager::ProviderInfo::DeviceInfo1::getCameraInfo(
-        hardware::CameraInfo *info) const {
-    if (info == nullptr) return BAD_VALUE;
-    *info = mInfo;
-    return OK;
-}
-
-status_t CameraProviderManager::ProviderInfo::DeviceInfo1::cacheCameraInfo(
-        sp<CameraProviderManager::ProviderInfo::DeviceInfo1::InterfaceT> interface) {
-    Status status;
-    device::V1_0::CameraInfo cInfo;
-    hardware::Return<void> ret;
-    ret = interface->getCameraInfo([&status, &cInfo](Status s, device::V1_0::CameraInfo camInfo) {
-                status = s;
-                cInfo = camInfo;
-            });
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error reading camera info from device %s: %s",
-                __FUNCTION__, mId.c_str(), ret.description().c_str());
-        return DEAD_OBJECT;
-    }
-    if (status != Status::OK) {
-        return mapToStatusT(status);
-    }
-
-    switch(cInfo.facing) {
-        case device::V1_0::CameraFacing::BACK:
-            mInfo.facing = hardware::CAMERA_FACING_BACK;
-            break;
-        case device::V1_0::CameraFacing::EXTERNAL:
-            // Map external to front for legacy API
-        case device::V1_0::CameraFacing::FRONT:
-            mInfo.facing = hardware::CAMERA_FACING_FRONT;
-            break;
-        default:
-            ALOGW("%s: Device %s: Unknown camera facing: %d",
-                    __FUNCTION__, mId.c_str(), cInfo.facing);
-            mInfo.facing = hardware::CAMERA_FACING_BACK;
-    }
-    mInfo.orientation = cInfo.orientation;
-
-    return OK;
-}
-
-status_t CameraProviderManager::ProviderInfo::DeviceInfo1::dumpState(int fd) {
-    native_handle_t* handle = native_handle_create(1,0);
-    handle->data[0] = fd;
-    const sp<InterfaceT> interface = startDeviceInterface<InterfaceT>();
-    if (interface == nullptr) {
-        return DEAD_OBJECT;
-    }
-    hardware::Return<Status> s = interface->dumpState(handle);
-    native_handle_delete(handle);
-    if (!s.isOk()) {
-        return INVALID_OPERATION;
-    }
-    return mapToStatusT(s);
-}
-
 CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string& name,
         const metadata_vendor_id_t tagId, const std::string &id,
         uint16_t minorVersion,
@@ -2689,9 +2579,11 @@
 
 
 CameraProviderManager::ProviderInfo::~ProviderInfo() {
+    if (mInitialStatusCallbackFuture.valid()) {
+        mInitialStatusCallbackFuture.wait();
+    }
     // Destruction of ProviderInfo is only supposed to happen when the respective
     // CameraProvider interface dies, so do not unregister callbacks.
-
 }
 
 status_t CameraProviderManager::mapToStatusT(const Status& s)  {
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 25d3639..8727e7f 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -22,6 +22,7 @@
 #include <unordered_set>
 #include <string>
 #include <mutex>
+#include <future>
 
 #include <camera/camera2/ConcurrentCamera.h>
 #include <camera/CameraParameters2.h>
@@ -269,11 +270,6 @@
             /*out*/
             sp<hardware::camera::device::V3_2::ICameraDeviceSession> *session);
 
-    status_t openSession(const std::string &id,
-            const sp<hardware::camera::device::V1_0::ICameraDeviceCallback>& callback,
-            /*out*/
-            sp<hardware::camera::device::V1_0::ICameraDevice> *session);
-
     /**
      * Save the ICameraProvider while it is being used by a camera or torch client
      */
@@ -403,6 +399,15 @@
                 const hardware::hidl_string& physicalCameraDeviceName,
                 hardware::camera::common::V1_0::CameraDeviceStatus newStatus) override;
 
+        status_t cameraDeviceStatusChangeLocked(
+                std::string* id, const hardware::hidl_string& cameraDeviceName,
+                hardware::camera::common::V1_0::CameraDeviceStatus newStatus);
+        status_t physicalCameraDeviceStatusChangeLocked(
+                std::string* id, std::string* physicalId,
+                const hardware::hidl_string& cameraDeviceName,
+                const hardware::hidl_string& physicalCameraDeviceName,
+                hardware::camera::common::V1_0::CameraDeviceStatus newStatus);
+
         // hidl_death_recipient interface - this locks the parent mInterfaceMutex
         virtual void serviceDied(uint64_t cookie, const wp<hidl::base::V1_0::IBase>& who) override;
 
@@ -444,8 +449,6 @@
             const hardware::camera::common::V1_0::CameraResourceCost mResourceCost;
 
             hardware::camera::common::V1_0::CameraDeviceStatus mStatus;
-            std::map<std::string, hardware::camera::common::V1_0::CameraDeviceStatus>
-                    mPhysicalStatus;
 
             wp<ProviderInfo> mParentProvider;
 
@@ -513,27 +516,6 @@
         // physical camera IDs.
         std::vector<std::string> mProviderPublicCameraIds;
 
-        // HALv1-specific camera fields, including the actual device interface
-        struct DeviceInfo1 : public DeviceInfo {
-            typedef hardware::camera::device::V1_0::ICameraDevice InterfaceT;
-
-            virtual status_t setTorchMode(bool enabled) override;
-            virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
-            //In case of Device1Info assume that we are always API1 compatible
-            virtual bool isAPI1Compatible() const override { return true; }
-            virtual status_t dumpState(int fd) override;
-            DeviceInfo1(const std::string& name, const metadata_vendor_id_t tagId,
-                    const std::string &id, uint16_t minorVersion,
-                    const hardware::camera::common::V1_0::CameraResourceCost& resourceCost,
-                    sp<ProviderInfo> parentProvider,
-                    const std::vector<std::string>& publicCameraIds,
-                    sp<InterfaceT> interface);
-            virtual ~DeviceInfo1();
-        private:
-            CameraParameters2 mDefaultParameters;
-            status_t cacheCameraInfo(sp<InterfaceT> interface);
-        };
-
         // HALv3-specific camera fields, including the actual device interface
         struct DeviceInfo3 : public DeviceInfo {
             typedef hardware::camera::device::V3_2::ICameraDevice InterfaceT;
@@ -600,7 +582,27 @@
 
         CameraProviderManager *mManager;
 
+        struct CameraStatusInfoT {
+            bool isPhysicalCameraStatus = false;
+            hardware::hidl_string cameraId;
+            hardware::hidl_string physicalCameraId;
+            hardware::camera::common::V1_0::CameraDeviceStatus status;
+            CameraStatusInfoT(bool isForPhysicalCamera, const hardware::hidl_string& id,
+                    const hardware::hidl_string& physicalId,
+                    hardware::camera::common::V1_0::CameraDeviceStatus s) :
+                    isPhysicalCameraStatus(isForPhysicalCamera), cameraId(id),
+                    physicalCameraId(physicalId), status(s) {}
+        };
+
+        // Lock to synchronize between initialize() and camera status callbacks
+        std::mutex mInitLock;
         bool mInitialized = false;
+        std::vector<CameraStatusInfoT> mCachedStatus;
+        // End of scope for mInitLock
+
+        std::future<void> mInitialStatusCallbackFuture;
+        void notifyInitialStatusChange(sp<StatusListener> listener,
+                std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus);
 
         std::vector<std::unordered_set<std::string>> mConcurrentCameraIdCombinations;
 
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp b/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
deleted file mode 100644
index 62ef681..0000000
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
+++ /dev/null
@@ -1,818 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_TAG "CameraHardwareInterface"
-//#define LOG_NDEBUG 0
-
-#include <inttypes.h>
-#include <media/hardware/HardwareAPI.h> // For VideoNativeHandleMetadata
-#include "CameraHardwareInterface.h"
-
-namespace android {
-
-using namespace hardware::camera::device::V1_0;
-using namespace hardware::camera::common::V1_0;
-using hardware::hidl_handle;
-
-CameraHardwareInterface::~CameraHardwareInterface()
-{
-    ALOGI("Destroying camera %s", mName.string());
-    if (mHidlDevice != nullptr) {
-        mHidlDevice->close();
-        mHidlDevice.clear();
-        cleanupCirculatingBuffers();
-    }
-}
-
-status_t CameraHardwareInterface::initialize(sp<CameraProviderManager> manager) {
-    ALOGI("Opening camera %s", mName.string());
-
-    status_t ret = manager->openSession(mName.string(), this, &mHidlDevice);
-    if (ret != OK) {
-        ALOGE("%s: openSession failed! %s (%d)", __FUNCTION__, strerror(-ret), ret);
-    }
-    return ret;
-}
-
-status_t CameraHardwareInterface::setPreviewScalingMode(int scalingMode)
-{
-    int rc = OK;
-    mPreviewScalingMode = scalingMode;
-    if (mPreviewWindow != nullptr) {
-        rc = native_window_set_scaling_mode(mPreviewWindow.get(),
-                scalingMode);
-    }
-    return rc;
-}
-
-status_t CameraHardwareInterface::setPreviewTransform(int transform) {
-    int rc = OK;
-    mPreviewTransform = transform;
-    if (mPreviewWindow != nullptr) {
-        rc = native_window_set_buffers_transform(mPreviewWindow.get(),
-                mPreviewTransform);
-    }
-    return rc;
-}
-
-/**
- * Implementation of android::hardware::camera::device::V1_0::ICameraDeviceCallback
- */
-hardware::Return<void> CameraHardwareInterface::notifyCallback(
-        NotifyCallbackMsg msgType, int32_t ext1, int32_t ext2) {
-    sNotifyCb((int32_t) msgType, ext1, ext2, (void*) this);
-    return hardware::Void();
-}
-
-hardware::Return<uint32_t> CameraHardwareInterface::registerMemory(
-        const hardware::hidl_handle& descriptor,
-        uint32_t bufferSize, uint32_t bufferCount) {
-    if (descriptor->numFds != 1) {
-        ALOGE("%s: camera memory descriptor has numFds %d (expect 1)",
-                __FUNCTION__, descriptor->numFds);
-        return 0;
-    }
-    if (descriptor->data[0] < 0) {
-        ALOGE("%s: camera memory descriptor has FD %d (expect >= 0)",
-                __FUNCTION__, descriptor->data[0]);
-        return 0;
-    }
-
-    camera_memory_t* mem = sGetMemory(descriptor->data[0], bufferSize, bufferCount, this);
-    sp<CameraHeapMemory> camMem(static_cast<CameraHeapMemory *>(mem->handle));
-    int memPoolId = camMem->mHeap->getHeapID();
-    if (memPoolId < 0) {
-        ALOGE("%s: CameraHeapMemory has FD %d (expect >= 0)", __FUNCTION__, memPoolId);
-        return 0;
-    }
-    std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-    mHidlMemPoolMap.insert(std::make_pair(memPoolId, mem));
-    return memPoolId;
-}
-
-hardware::Return<void> CameraHardwareInterface::unregisterMemory(uint32_t memId) {
-    camera_memory_t* mem = nullptr;
-    {
-        std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-        if (mHidlMemPoolMap.count(memId) == 0) {
-            ALOGE("%s: memory pool ID %d not found", __FUNCTION__, memId);
-            return hardware::Void();
-        }
-        mem = mHidlMemPoolMap.at(memId);
-        mHidlMemPoolMap.erase(memId);
-    }
-    sPutMemory(mem);
-    return hardware::Void();
-}
-
-hardware::Return<void> CameraHardwareInterface::dataCallback(
-        DataCallbackMsg msgType, uint32_t data, uint32_t bufferIndex,
-        const hardware::camera::device::V1_0::CameraFrameMetadata& metadata) {
-    camera_memory_t* mem = nullptr;
-    {
-        std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-        if (mHidlMemPoolMap.count(data) == 0) {
-            ALOGE("%s: memory pool ID %d not found", __FUNCTION__, data);
-            return hardware::Void();
-        }
-        mem = mHidlMemPoolMap.at(data);
-    }
-    camera_frame_metadata_t md;
-    md.number_of_faces = metadata.faces.size();
-    md.faces = (camera_face_t*) metadata.faces.data();
-    sDataCb((int32_t) msgType, mem, bufferIndex, &md, this);
-    return hardware::Void();
-}
-
-hardware::Return<void> CameraHardwareInterface::dataCallbackTimestamp(
-        DataCallbackMsg msgType, uint32_t data,
-        uint32_t bufferIndex, int64_t timestamp) {
-    camera_memory_t* mem = nullptr;
-    {
-        std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-        if (mHidlMemPoolMap.count(data) == 0) {
-            ALOGE("%s: memory pool ID %d not found", __FUNCTION__, data);
-            return hardware::Void();
-        }
-        mem = mHidlMemPoolMap.at(data);
-    }
-    sDataCbTimestamp(timestamp, (int32_t) msgType, mem, bufferIndex, this);
-    return hardware::Void();
-}
-
-hardware::Return<void> CameraHardwareInterface::handleCallbackTimestamp(
-        DataCallbackMsg msgType, const hidl_handle& frameData, uint32_t data,
-        uint32_t bufferIndex, int64_t timestamp) {
-    camera_memory_t* mem = nullptr;
-    {
-        std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-        if (mHidlMemPoolMap.count(data) == 0) {
-            ALOGE("%s: memory pool ID %d not found", __FUNCTION__, data);
-            return hardware::Void();
-        }
-        mem = mHidlMemPoolMap.at(data);
-    }
-    sp<CameraHeapMemory> heapMem(static_cast<CameraHeapMemory *>(mem->handle));
-    // TODO: Using unsecurePointer() has some associated security pitfalls
-    //       (see declaration for details).
-    //       Either document why it is safe in this case or address the
-    //       issue (e.g. by copying).
-    VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*)
-            heapMem->mBuffers[bufferIndex]->unsecurePointer();
-    md->pHandle = const_cast<native_handle_t*>(frameData.getNativeHandle());
-    sDataCbTimestamp(timestamp, (int32_t) msgType, mem, bufferIndex, this);
-    return hardware::Void();
-}
-
-hardware::Return<void> CameraHardwareInterface::handleCallbackTimestampBatch(
-        DataCallbackMsg msgType,
-        const hardware::hidl_vec<hardware::camera::device::V1_0::HandleTimestampMessage>& messages) {
-    std::vector<android::HandleTimestampMessage> msgs;
-    msgs.reserve(messages.size());
-    {
-        std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-        for (const auto& hidl_msg : messages) {
-            if (mHidlMemPoolMap.count(hidl_msg.data) == 0) {
-                ALOGE("%s: memory pool ID %d not found", __FUNCTION__, hidl_msg.data);
-                return hardware::Void();
-            }
-            sp<CameraHeapMemory> mem(
-                    static_cast<CameraHeapMemory *>(mHidlMemPoolMap.at(hidl_msg.data)->handle));
-
-            if (hidl_msg.bufferIndex >= mem->mNumBufs) {
-                ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__,
-                     hidl_msg.bufferIndex, mem->mNumBufs);
-                return hardware::Void();
-            }
-            // TODO: Using unsecurePointer() has some associated security pitfalls
-            //       (see declaration for details).
-            //       Either document why it is safe in this case or address the
-            //       issue (e.g. by copying).
-            VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*)
-                    mem->mBuffers[hidl_msg.bufferIndex]->unsecurePointer();
-            md->pHandle = const_cast<native_handle_t*>(hidl_msg.frameData.getNativeHandle());
-
-            msgs.push_back({hidl_msg.timestamp, mem->mBuffers[hidl_msg.bufferIndex]});
-        }
-    }
-    mDataCbTimestampBatch((int32_t) msgType, msgs, mCbUser);
-    return hardware::Void();
-}
-
-std::pair<bool, uint64_t> CameraHardwareInterface::getBufferId(
-        ANativeWindowBuffer* anb) {
-    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
-
-    buffer_handle_t& buf = anb->handle;
-    auto it = mBufferIdMap.find(buf);
-    if (it == mBufferIdMap.end()) {
-        uint64_t bufId = mNextBufferId++;
-        mBufferIdMap[buf] = bufId;
-        mReversedBufMap[bufId] = anb;
-        return std::make_pair(true, bufId);
-    } else {
-        return std::make_pair(false, it->second);
-    }
-}
-
-void CameraHardwareInterface::cleanupCirculatingBuffers() {
-    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
-    mBufferIdMap.clear();
-    mReversedBufMap.clear();
-}
-
-hardware::Return<void>
-CameraHardwareInterface::dequeueBuffer(dequeueBuffer_cb _hidl_cb) {
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return hardware::Void();
-    }
-    ANativeWindowBuffer* anb;
-    int rc = native_window_dequeue_buffer_and_wait(a, &anb);
-    Status s = Status::INTERNAL_ERROR;
-    uint64_t bufferId = 0;
-    uint32_t stride = 0;
-    hidl_handle buf = nullptr;
-    if (rc == OK) {
-        s = Status::OK;
-        auto pair = getBufferId(anb);
-        buf = (pair.first) ? anb->handle : nullptr;
-        bufferId = pair.second;
-        stride = anb->stride;
-    }
-
-    _hidl_cb(s, bufferId, buf, stride);
-    return hardware::Void();
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::enqueueBuffer(uint64_t bufferId) {
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return Status::INTERNAL_ERROR;
-    }
-    if (mReversedBufMap.count(bufferId) == 0) {
-        ALOGE("%s: bufferId %" PRIu64 " not found", __FUNCTION__, bufferId);
-        return Status::ILLEGAL_ARGUMENT;
-    }
-    int rc = a->queueBuffer(a, mReversedBufMap.at(bufferId), -1);
-    if (rc == 0) {
-        return Status::OK;
-    }
-    return Status::INTERNAL_ERROR;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::cancelBuffer(uint64_t bufferId) {
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return Status::INTERNAL_ERROR;
-    }
-    if (mReversedBufMap.count(bufferId) == 0) {
-        ALOGE("%s: bufferId %" PRIu64 " not found", __FUNCTION__, bufferId);
-        return Status::ILLEGAL_ARGUMENT;
-    }
-    int rc = a->cancelBuffer(a, mReversedBufMap.at(bufferId), -1);
-    if (rc == 0) {
-        return Status::OK;
-    }
-    return Status::INTERNAL_ERROR;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setBufferCount(uint32_t count) {
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a != nullptr) {
-        // Workaround for b/27039775
-        // Previously, setting the buffer count would reset the buffer
-        // queue's flag that allows for all buffers to be dequeued on the
-        // producer side, instead of just the producer's declared max count,
-        // if no filled buffers have yet been queued by the producer.  This
-        // reset no longer happens, but some HALs depend on this behavior,
-        // so it needs to be maintained for HAL backwards compatibility.
-        // Simulate the prior behavior by disconnecting/reconnecting to the
-        // window and setting the values again.  This has the drawback of
-        // actually causing memory reallocation, which may not have happened
-        // in the past.
-        native_window_api_disconnect(a, NATIVE_WINDOW_API_CAMERA);
-        native_window_api_connect(a, NATIVE_WINDOW_API_CAMERA);
-        if (mPreviewScalingMode != NOT_SET) {
-            native_window_set_scaling_mode(a, mPreviewScalingMode);
-        }
-        if (mPreviewTransform != NOT_SET) {
-            native_window_set_buffers_transform(a, mPreviewTransform);
-        }
-        if (mPreviewWidth != NOT_SET) {
-            native_window_set_buffers_dimensions(a,
-                    mPreviewWidth, mPreviewHeight);
-            native_window_set_buffers_format(a, mPreviewFormat);
-        }
-        if (mPreviewUsage != 0) {
-            native_window_set_usage(a, mPreviewUsage);
-        }
-        if (mPreviewSwapInterval != NOT_SET) {
-            a->setSwapInterval(a, mPreviewSwapInterval);
-        }
-        if (mPreviewCrop.left != NOT_SET) {
-            native_window_set_crop(a, &(mPreviewCrop));
-        }
-    }
-    int rc = native_window_set_buffer_count(a, count);
-    if (rc == OK) {
-        cleanupCirculatingBuffers();
-        return Status::OK;
-    }
-    return Status::INTERNAL_ERROR;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setBuffersGeometry(
-        uint32_t w, uint32_t h, hardware::graphics::common::V1_0::PixelFormat format) {
-    Status s = Status::INTERNAL_ERROR;
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return s;
-    }
-    mPreviewWidth = w;
-    mPreviewHeight = h;
-    mPreviewFormat = (int) format;
-    int rc = native_window_set_buffers_dimensions(a, w, h);
-    if (rc == OK) {
-        rc = native_window_set_buffers_format(a, mPreviewFormat);
-    }
-    if (rc == OK) {
-        cleanupCirculatingBuffers();
-        s = Status::OK;
-    }
-    return s;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setCrop(int32_t left, int32_t top, int32_t right, int32_t bottom) {
-    Status s = Status::INTERNAL_ERROR;
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return s;
-    }
-    mPreviewCrop.left = left;
-    mPreviewCrop.top = top;
-    mPreviewCrop.right = right;
-    mPreviewCrop.bottom = bottom;
-    int rc = native_window_set_crop(a, &mPreviewCrop);
-    if (rc == OK) {
-        s = Status::OK;
-    }
-    return s;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setUsage(hardware::graphics::common::V1_0::BufferUsage usage) {
-    Status s = Status::INTERNAL_ERROR;
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return s;
-    }
-    mPreviewUsage = static_cast<uint64_t> (usage);
-    int rc = native_window_set_usage(a, mPreviewUsage);
-    if (rc == OK) {
-        cleanupCirculatingBuffers();
-        s = Status::OK;
-    }
-    return s;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setSwapInterval(int32_t interval) {
-    Status s = Status::INTERNAL_ERROR;
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return s;
-    }
-    mPreviewSwapInterval = interval;
-    int rc = a->setSwapInterval(a, interval);
-    if (rc == OK) {
-        s = Status::OK;
-    }
-    return s;
-}
-
-hardware::Return<void>
-CameraHardwareInterface::getMinUndequeuedBufferCount(getMinUndequeuedBufferCount_cb _hidl_cb) {
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return hardware::Void();
-    }
-    int count = 0;
-    int rc = a->query(a, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &count);
-    Status s = Status::INTERNAL_ERROR;
-    if (rc == OK) {
-        s = Status::OK;
-    }
-    _hidl_cb(s, count);
-    return hardware::Void();
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setTimestamp(int64_t timestamp) {
-    Status s = Status::INTERNAL_ERROR;
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return s;
-    }
-    int rc = native_window_set_buffers_timestamp(a, timestamp);
-    if (rc == OK) {
-        s = Status::OK;
-    }
-    return s;
-}
-
-status_t CameraHardwareInterface::setPreviewWindow(const sp<ANativeWindow>& buf)
-{
-    ALOGV("%s(%s) buf %p", __FUNCTION__, mName.string(), buf.get());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mPreviewWindow = buf;
-        if (buf != nullptr) {
-            if (mPreviewScalingMode != NOT_SET) {
-                setPreviewScalingMode(mPreviewScalingMode);
-            }
-            if (mPreviewTransform != NOT_SET) {
-                setPreviewTransform(mPreviewTransform);
-            }
-        }
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->setPreviewWindow(buf.get() ? this : nullptr));
-    }
-    return INVALID_OPERATION;
-}
-
-void CameraHardwareInterface::setCallbacks(notify_callback notify_cb,
-        data_callback data_cb,
-        data_callback_timestamp data_cb_timestamp,
-        data_callback_timestamp_batch data_cb_timestamp_batch,
-        void* user)
-{
-    mNotifyCb = notify_cb;
-    mDataCb = data_cb;
-    mDataCbTimestamp = data_cb_timestamp;
-    mDataCbTimestampBatch = data_cb_timestamp_batch;
-    mCbUser = user;
-
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-}
-
-void CameraHardwareInterface::enableMsgType(int32_t msgType)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mHidlDevice->enableMsgType(msgType);
-    }
-}
-
-void CameraHardwareInterface::disableMsgType(int32_t msgType)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mHidlDevice->disableMsgType(msgType);
-    }
-}
-
-int CameraHardwareInterface::msgTypeEnabled(int32_t msgType)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return mHidlDevice->msgTypeEnabled(msgType);
-    }
-    return false;
-}
-
-status_t CameraHardwareInterface::startPreview()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->startPreview());
-    }
-    return INVALID_OPERATION;
-}
-
-void CameraHardwareInterface::stopPreview()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mHidlDevice->stopPreview();
-    }
-}
-
-int CameraHardwareInterface::previewEnabled()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return mHidlDevice->previewEnabled();
-    }
-    return false;
-}
-
-status_t CameraHardwareInterface::storeMetaDataInBuffers(int enable)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->storeMetaDataInBuffers(enable));
-    }
-    return enable ? INVALID_OPERATION: OK;
-}
-
-status_t CameraHardwareInterface::startRecording()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->startRecording());
-    }
-    return INVALID_OPERATION;
-}
-
-/**
- * Stop a previously started recording.
- */
-void CameraHardwareInterface::stopRecording()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mHidlDevice->stopRecording();
-    }
-}
-
-/**
- * Returns true if recording is enabled.
- */
-int CameraHardwareInterface::recordingEnabled()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return mHidlDevice->recordingEnabled();
-    }
-    return false;
-}
-
-void CameraHardwareInterface::releaseRecordingFrame(const sp<IMemory>& mem)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    ssize_t offset;
-    size_t size;
-    sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
-    int heapId = heap->getHeapID();
-    int bufferIndex = offset / size;
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        if (size == sizeof(VideoNativeHandleMetadata)) {
-            // TODO: Using unsecurePointer() has some associated security pitfalls
-            //       (see declaration for details).
-            //       Either document why it is safe in this case or address the
-            //       issue (e.g. by copying).
-            VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*) mem->unsecurePointer();
-            // Caching the handle here because md->pHandle will be subject to HAL's edit
-            native_handle_t* nh = md->pHandle;
-            hidl_handle frame = nh;
-            mHidlDevice->releaseRecordingFrameHandle(heapId, bufferIndex, frame);
-            native_handle_close(nh);
-            native_handle_delete(nh);
-        } else {
-            mHidlDevice->releaseRecordingFrame(heapId, bufferIndex);
-        }
-    }
-}
-
-void CameraHardwareInterface::releaseRecordingFrameBatch(const std::vector<sp<IMemory>>& frames)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    size_t n = frames.size();
-    std::vector<VideoFrameMessage> msgs;
-    msgs.reserve(n);
-    for (auto& mem : frames) {
-        if (CC_LIKELY(mHidlDevice != nullptr)) {
-            ssize_t offset;
-            size_t size;
-            sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
-            if (size == sizeof(VideoNativeHandleMetadata)) {
-                uint32_t heapId = heap->getHeapID();
-                uint32_t bufferIndex = offset / size;
-                // TODO: Using unsecurePointer() has some associated security pitfalls
-                //       (see declaration for details).
-                //       Either document why it is safe in this case or address the
-                //       issue (e.g. by copying).
-                VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*) mem->unsecurePointer();
-                // Caching the handle here because md->pHandle will be subject to HAL's edit
-                native_handle_t* nh = md->pHandle;
-                VideoFrameMessage msg;
-                msgs.push_back({nh, heapId, bufferIndex});
-            } else {
-                ALOGE("%s only supports VideoNativeHandleMetadata mode", __FUNCTION__);
-                return;
-            }
-        }
-    }
-
-    mHidlDevice->releaseRecordingFrameHandleBatch(msgs);
-
-    for (auto& msg : msgs) {
-        native_handle_t* nh = const_cast<native_handle_t*>(msg.frameData.getNativeHandle());
-        native_handle_close(nh);
-        native_handle_delete(nh);
-    }
-}
-
-status_t CameraHardwareInterface::autoFocus()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->autoFocus());
-    }
-    return INVALID_OPERATION;
-}
-
-status_t CameraHardwareInterface::cancelAutoFocus()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->cancelAutoFocus());
-    }
-    return INVALID_OPERATION;
-}
-
-status_t CameraHardwareInterface::takePicture()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->takePicture());
-    }
-    return INVALID_OPERATION;
-}
-
-status_t CameraHardwareInterface::cancelPicture()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->cancelPicture());
-    }
-    return INVALID_OPERATION;
-}
-
-status_t CameraHardwareInterface::setParameters(const CameraParameters &params)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->setParameters(params.flatten().string()));
-    }
-    return INVALID_OPERATION;
-}
-
-CameraParameters CameraHardwareInterface::getParameters() const
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    CameraParameters parms;
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        hardware::hidl_string outParam;
-        mHidlDevice->getParameters(
-                [&outParam](const auto& outStr) {
-                    outParam = outStr;
-                });
-        String8 tmp(outParam.c_str());
-        parms.unflatten(tmp);
-    }
-    return parms;
-}
-
-status_t CameraHardwareInterface::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->sendCommand((CommandType) cmd, arg1, arg2));
-    }
-    return INVALID_OPERATION;
-}
-
-/**
- * Release the hardware resources owned by this object.  Note that this is
- * *not* done in the destructor.
- */
-void CameraHardwareInterface::release() {
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mHidlDevice->close();
-        mHidlDevice.clear();
-    }
-}
-
-/**
- * Dump state of the camera hardware
- */
-status_t CameraHardwareInterface::dump(int fd, const Vector<String16>& /*args*/) const
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        native_handle_t* handle = native_handle_create(1,0);
-        handle->data[0] = fd;
-        Status s = mHidlDevice->dumpState(handle);
-        native_handle_delete(handle);
-        return CameraProviderManager::mapToStatusT(s);
-    }
-    return OK; // It's fine if the HAL doesn't implement dump()
-}
-
-void CameraHardwareInterface::sNotifyCb(int32_t msg_type, int32_t ext1,
-                        int32_t ext2, void *user)
-{
-    ALOGV("%s", __FUNCTION__);
-    CameraHardwareInterface *object =
-            static_cast<CameraHardwareInterface *>(user);
-    object->mNotifyCb(msg_type, ext1, ext2, object->mCbUser);
-}
-
-void CameraHardwareInterface::sDataCb(int32_t msg_type,
-                      const camera_memory_t *data, unsigned int index,
-                      camera_frame_metadata_t *metadata,
-                      void *user)
-{
-    ALOGV("%s", __FUNCTION__);
-    CameraHardwareInterface *object =
-            static_cast<CameraHardwareInterface *>(user);
-    sp<CameraHeapMemory> mem(static_cast<CameraHeapMemory *>(data->handle));
-    if (index >= mem->mNumBufs) {
-        ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__,
-             index, mem->mNumBufs);
-        return;
-    }
-    object->mDataCb(msg_type, mem->mBuffers[index], metadata, object->mCbUser);
-}
-
-void CameraHardwareInterface::sDataCbTimestamp(nsecs_t timestamp, int32_t msg_type,
-                         const camera_memory_t *data, unsigned index,
-                         void *user)
-{
-    ALOGV("%s", __FUNCTION__);
-    CameraHardwareInterface *object =
-            static_cast<CameraHardwareInterface *>(user);
-    // Start refcounting the heap object from here on.  When the clients
-    // drop all references, it will be destroyed (as well as the enclosed
-    // MemoryHeapBase.
-    sp<CameraHeapMemory> mem(static_cast<CameraHeapMemory *>(data->handle));
-    if (index >= mem->mNumBufs) {
-        ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__,
-             index, mem->mNumBufs);
-        return;
-    }
-    object->mDataCbTimestamp(timestamp, msg_type, mem->mBuffers[index], object->mCbUser);
-}
-
-camera_memory_t* CameraHardwareInterface::sGetMemory(
-        int fd, size_t buf_size, uint_t num_bufs,
-        void *user __attribute__((unused)))
-{
-    CameraHeapMemory *mem;
-    if (fd < 0) {
-        mem = new CameraHeapMemory(buf_size, num_bufs);
-    } else {
-        mem = new CameraHeapMemory(fd, buf_size, num_bufs);
-    }
-    mem->incStrong(mem);
-    return &mem->handle;
-}
-
-void CameraHardwareInterface::sPutMemory(camera_memory_t *data)
-{
-    if (!data) {
-        return;
-    }
-
-    CameraHeapMemory *mem = static_cast<CameraHeapMemory *>(data->handle);
-    mem->decStrong(mem);
-}
-
-}; // namespace android
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.h b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
deleted file mode 100644
index e519b04..0000000
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.h
+++ /dev/null
@@ -1,488 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H
-#define ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H
-
-#include <unordered_map>
-#include <binder/IMemory.h>
-#include <binder/MemoryBase.h>
-#include <binder/MemoryHeapBase.h>
-#include <utils/RefBase.h>
-#include <ui/GraphicBuffer.h>
-#include <camera/Camera.h>
-#include <camera/CameraParameters.h>
-#include <system/window.h>
-#include <hardware/camera.h>
-
-#include <common/CameraProviderManager.h>
-
-namespace android {
-
-typedef void (*notify_callback)(int32_t msgType,
-                            int32_t ext1,
-                            int32_t ext2,
-                            void* user);
-
-typedef void (*data_callback)(int32_t msgType,
-                            const sp<IMemory> &dataPtr,
-                            camera_frame_metadata_t *metadata,
-                            void* user);
-
-typedef void (*data_callback_timestamp)(nsecs_t timestamp,
-                            int32_t msgType,
-                            const sp<IMemory> &dataPtr,
-                            void *user);
-
-struct HandleTimestampMessage {
-    nsecs_t timestamp;
-    const sp<IMemory> dataPtr;
-};
-
-typedef void (*data_callback_timestamp_batch)(
-        int32_t msgType,
-        const std::vector<HandleTimestampMessage>&, void* user);
-
-/**
- * CameraHardwareInterface.h defines the interface to the
- * camera hardware abstraction layer, used for setting and getting
- * parameters, live previewing, and taking pictures. It is used for
- * HAL devices with version CAMERA_DEVICE_API_VERSION_1_0 only.
- *
- * It is a referenced counted interface with RefBase as its base class.
- * CameraService calls openCameraHardware() to retrieve a strong pointer to the
- * instance of this interface and may be called multiple times. The
- * following steps describe a typical sequence:
- *
- *   -# After CameraService calls openCameraHardware(), getParameters() and
- *      setParameters() are used to initialize the camera instance.
- *   -# startPreview() is called.
- *
- * Prior to taking a picture, CameraService often calls autofocus(). When auto
- * focusing has completed, the camera instance sends a CAMERA_MSG_FOCUS notification,
- * which informs the application whether focusing was successful. The camera instance
- * only sends this message once and it is up  to the application to call autoFocus()
- * again if refocusing is desired.
- *
- * CameraService calls takePicture() to request the camera instance take a
- * picture. At this point, if a shutter, postview, raw, and/or compressed
- * callback is desired, the corresponding message must be enabled. Any memory
- * provided in a data callback must be copied if it's needed after returning.
- */
-
-class CameraHardwareInterface :
-        public virtual RefBase,
-        public virtual hardware::camera::device::V1_0::ICameraDeviceCallback,
-        public virtual hardware::camera::device::V1_0::ICameraDevicePreviewCallback {
-
-public:
-    explicit CameraHardwareInterface(const char *name):
-            mHidlDevice(nullptr),
-            mName(name),
-            mPreviewScalingMode(NOT_SET),
-            mPreviewTransform(NOT_SET),
-            mPreviewWidth(NOT_SET),
-            mPreviewHeight(NOT_SET),
-            mPreviewFormat(NOT_SET),
-            mPreviewUsage(0),
-            mPreviewSwapInterval(NOT_SET),
-            mPreviewCrop{NOT_SET,NOT_SET,NOT_SET,NOT_SET}
-    {
-    }
-
-    ~CameraHardwareInterface();
-
-    status_t initialize(sp<CameraProviderManager> manager);
-
-    /** Set the ANativeWindow to which preview frames are sent */
-    status_t setPreviewWindow(const sp<ANativeWindow>& buf);
-
-    status_t setPreviewScalingMode(int scalingMode);
-
-    status_t setPreviewTransform(int transform);
-
-    /** Set the notification and data callbacks */
-    void setCallbacks(notify_callback notify_cb,
-                      data_callback data_cb,
-                      data_callback_timestamp data_cb_timestamp,
-                      data_callback_timestamp_batch data_cb_timestamp_batch,
-                      void* user);
-
-    /**
-     * The following three functions all take a msgtype,
-     * which is a bitmask of the messages defined in
-     * include/ui/Camera.h
-     */
-
-    /**
-     * Enable a message, or set of messages.
-     */
-    void enableMsgType(int32_t msgType);
-
-    /**
-     * Disable a message, or a set of messages.
-     *
-     * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera hal
-     * should not rely on its client to call releaseRecordingFrame() to release
-     * video recording frames sent out by the cameral hal before and after the
-     * disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera hal clients must not
-     * modify/access any video recording frame after calling
-     * disableMsgType(CAMERA_MSG_VIDEO_FRAME).
-     */
-    void disableMsgType(int32_t msgType);
-
-    /**
-     * Query whether a message, or a set of messages, is enabled.
-     * Note that this is operates as an AND, if any of the messages
-     * queried are off, this will return false.
-     */
-    int msgTypeEnabled(int32_t msgType);
-
-    /**
-     * Start preview mode.
-     */
-    status_t startPreview();
-
-    /**
-     * Stop a previously started preview.
-     */
-    void stopPreview();
-
-    /**
-     * Returns true if preview is enabled.
-     */
-    int previewEnabled();
-
-    /**
-     * Request the camera hal to store meta data or real YUV data in
-     * the video buffers send out via CAMERA_MSG_VIDEO_FRRAME for a
-     * recording session. If it is not called, the default camera
-     * hal behavior is to store real YUV data in the video buffers.
-     *
-     * This method should be called before startRecording() in order
-     * to be effective.
-     *
-     * If meta data is stored in the video buffers, it is up to the
-     * receiver of the video buffers to interpret the contents and
-     * to find the actual frame data with the help of the meta data
-     * in the buffer. How this is done is outside of the scope of
-     * this method.
-     *
-     * Some camera hal may not support storing meta data in the video
-     * buffers, but all camera hal should support storing real YUV data
-     * in the video buffers. If the camera hal does not support storing
-     * the meta data in the video buffers when it is requested to do
-     * do, INVALID_OPERATION must be returned. It is very useful for
-     * the camera hal to pass meta data rather than the actual frame
-     * data directly to the video encoder, since the amount of the
-     * uncompressed frame data can be very large if video size is large.
-     *
-     * @param enable if true to instruct the camera hal to store
-     *      meta data in the video buffers; false to instruct
-     *      the camera hal to store real YUV data in the video
-     *      buffers.
-     *
-     * @return OK on success.
-     */
-
-    status_t storeMetaDataInBuffers(int enable);
-
-    /**
-     * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME
-     * message is sent with the corresponding frame. Every record frame must be released
-     * by a cameral hal client via releaseRecordingFrame() before the client calls
-     * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls
-     * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's responsibility
-     * to manage the life-cycle of the video recording frames, and the client must
-     * not modify/access any video recording frames.
-     */
-    status_t startRecording();
-
-    /**
-     * Stop a previously started recording.
-     */
-    void stopRecording();
-
-    /**
-     * Returns true if recording is enabled.
-     */
-    int recordingEnabled();
-
-    /**
-     * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
-     *
-     * It is camera hal client's responsibility to release video recording
-     * frames sent out by the camera hal before the camera hal receives
-     * a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives
-     * the call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's
-     * responsibility of managing the life-cycle of the video recording
-     * frames.
-     */
-    void releaseRecordingFrame(const sp<IMemory>& mem);
-
-    /**
-     * Release a batch of recording frames previously returned by
-     * CAMERA_MSG_VIDEO_FRAME. This method only supports frames that are
-     * stored as VideoNativeHandleMetadata.
-     *
-     * It is camera hal client's responsibility to release video recording
-     * frames sent out by the camera hal before the camera hal receives
-     * a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives
-     * the call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's
-     * responsibility of managing the life-cycle of the video recording
-     * frames.
-     */
-    void releaseRecordingFrameBatch(const std::vector<sp<IMemory>>& frames);
-
-    /**
-     * Start auto focus, the notification callback routine is called
-     * with CAMERA_MSG_FOCUS once when focusing is complete. autoFocus()
-     * will be called again if another auto focus is needed.
-     */
-    status_t autoFocus();
-
-    /**
-     * Cancels auto-focus function. If the auto-focus is still in progress,
-     * this function will cancel it. Whether the auto-focus is in progress
-     * or not, this function will return the focus position to the default.
-     * If the camera does not support auto-focus, this is a no-op.
-     */
-    status_t cancelAutoFocus();
-
-    /**
-     * Take a picture.
-     */
-    status_t takePicture();
-
-    /**
-     * Cancel a picture that was started with takePicture.  Calling this
-     * method when no picture is being taken is a no-op.
-     */
-    status_t cancelPicture();
-
-    /**
-     * Set the camera parameters. This returns BAD_VALUE if any parameter is
-     * invalid or not supported. */
-    status_t setParameters(const CameraParameters &params);
-
-    /** Return the camera parameters. */
-    CameraParameters getParameters() const;
-
-    /**
-     * Send command to camera driver.
-     */
-    status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
-
-    /**
-     * Release the hardware resources owned by this object.  Note that this is
-     * *not* done in the destructor.
-     */
-    void release();
-
-    /**
-     * Dump state of the camera hardware
-     */
-    status_t dump(int fd, const Vector<String16>& /*args*/) const;
-
-private:
-    sp<hardware::camera::device::V1_0::ICameraDevice> mHidlDevice;
-    String8 mName;
-
-    static void sNotifyCb(int32_t msg_type, int32_t ext1,
-                            int32_t ext2, void *user);
-
-    static void sDataCb(int32_t msg_type,
-                          const camera_memory_t *data, unsigned int index,
-                          camera_frame_metadata_t *metadata,
-                          void *user);
-
-    static void sDataCbTimestamp(nsecs_t timestamp, int32_t msg_type,
-                             const camera_memory_t *data, unsigned index,
-                             void *user);
-
-    // This is a utility class that combines a MemoryHeapBase and a MemoryBase
-    // in one.  Since we tend to use them in a one-to-one relationship, this is
-    // handy.
-    class CameraHeapMemory : public RefBase {
-    public:
-        CameraHeapMemory(int fd, size_t buf_size, uint_t num_buffers = 1) :
-                         mBufSize(buf_size),
-                         mNumBufs(num_buffers)
-        {
-            mHeap = new MemoryHeapBase(fd, buf_size * num_buffers);
-            commonInitialization();
-        }
-
-        explicit CameraHeapMemory(size_t buf_size, uint_t num_buffers = 1) :
-                                  mBufSize(buf_size),
-                                  mNumBufs(num_buffers)
-        {
-            mHeap = new MemoryHeapBase(buf_size * num_buffers);
-            commonInitialization();
-        }
-
-        void commonInitialization()
-        {
-            handle.data = mHeap->base();
-            handle.size = mBufSize * mNumBufs;
-            handle.handle = this;
-
-            mBuffers = new sp<MemoryBase>[mNumBufs];
-            for (uint_t i = 0; i < mNumBufs; i++)
-                mBuffers[i] = new MemoryBase(mHeap,
-                                             i * mBufSize,
-                                             mBufSize);
-
-            handle.release = sPutMemory;
-        }
-
-        virtual ~CameraHeapMemory()
-        {
-            delete [] mBuffers;
-        }
-
-        size_t mBufSize;
-        uint_t mNumBufs;
-        sp<MemoryHeapBase> mHeap;
-        sp<MemoryBase> *mBuffers;
-
-        camera_memory_t handle;
-    };
-
-    static camera_memory_t* sGetMemory(int fd, size_t buf_size, uint_t num_bufs,
-                                         void *user __attribute__((unused)));
-
-    static void sPutMemory(camera_memory_t *data);
-
-    std::pair<bool, uint64_t> getBufferId(ANativeWindowBuffer* anb);
-    void cleanupCirculatingBuffers();
-
-    /**
-     * Implementation of android::hardware::camera::device::V1_0::ICameraDeviceCallback
-     */
-    hardware::Return<void> notifyCallback(
-            hardware::camera::device::V1_0::NotifyCallbackMsg msgType,
-            int32_t ext1, int32_t ext2) override;
-    hardware::Return<uint32_t> registerMemory(
-            const hardware::hidl_handle& descriptor,
-            uint32_t bufferSize, uint32_t bufferCount) override;
-    hardware::Return<void> unregisterMemory(uint32_t memId) override;
-    hardware::Return<void> dataCallback(
-            hardware::camera::device::V1_0::DataCallbackMsg msgType,
-            uint32_t data, uint32_t bufferIndex,
-            const hardware::camera::device::V1_0::CameraFrameMetadata& metadata) override;
-    hardware::Return<void> dataCallbackTimestamp(
-            hardware::camera::device::V1_0::DataCallbackMsg msgType,
-            uint32_t data, uint32_t bufferIndex, int64_t timestamp) override;
-    hardware::Return<void> handleCallbackTimestamp(
-            hardware::camera::device::V1_0::DataCallbackMsg msgType,
-            const hardware::hidl_handle& frameData, uint32_t data,
-            uint32_t bufferIndex, int64_t timestamp) override;
-    hardware::Return<void> handleCallbackTimestampBatch(
-            hardware::camera::device::V1_0::DataCallbackMsg msgType,
-            const hardware::hidl_vec<
-                    hardware::camera::device::V1_0::HandleTimestampMessage>&) override;
-
-    /**
-     * Implementation of android::hardware::camera::device::V1_0::ICameraDevicePreviewCallback
-     */
-    hardware::Return<void> dequeueBuffer(dequeueBuffer_cb _hidl_cb) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            enqueueBuffer(uint64_t bufferId) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            cancelBuffer(uint64_t bufferId) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setBufferCount(uint32_t count) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setBuffersGeometry(uint32_t w, uint32_t h,
-                    hardware::graphics::common::V1_0::PixelFormat format) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setCrop(int32_t left, int32_t top, int32_t right, int32_t bottom) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setUsage(hardware::graphics::common::V1_0::BufferUsage usage) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setSwapInterval(int32_t interval) override;
-    hardware::Return<void> getMinUndequeuedBufferCount(
-        getMinUndequeuedBufferCount_cb _hidl_cb) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setTimestamp(int64_t timestamp) override;
-
-    sp<ANativeWindow>        mPreviewWindow;
-
-    notify_callback               mNotifyCb;
-    data_callback                 mDataCb;
-    data_callback_timestamp       mDataCbTimestamp;
-    data_callback_timestamp_batch mDataCbTimestampBatch;
-    void *mCbUser;
-
-    // Cached values for preview stream parameters
-    static const int NOT_SET = -1;
-    int mPreviewScalingMode;
-    int mPreviewTransform;
-    int mPreviewWidth;
-    int mPreviewHeight;
-    int mPreviewFormat;
-    uint64_t mPreviewUsage;
-    int mPreviewSwapInterval;
-    android_native_rect_t mPreviewCrop;
-
-    struct BufferHasher {
-        size_t operator()(const buffer_handle_t& buf) const {
-            if (buf == nullptr)
-                return 0;
-
-            size_t result = 1;
-            result = 31 * result + buf->numFds;
-            result = 31 * result + buf->numInts;
-            int length = buf->numFds + buf->numInts;
-            for (int i = 0; i < length; i++) {
-                result = 31 * result + buf->data[i];
-            }
-            return result;
-        }
-    };
-
-    struct BufferComparator {
-        bool operator()(const buffer_handle_t& buf1, const buffer_handle_t& buf2) const {
-            if (buf1->numFds == buf2->numFds && buf1->numInts == buf2->numInts) {
-                int length = buf1->numFds + buf1->numInts;
-                for (int i = 0; i < length; i++) {
-                    if (buf1->data[i] != buf2->data[i]) {
-                        return false;
-                    }
-                }
-                return true;
-            }
-            return false;
-        }
-    };
-
-    std::mutex mBufferIdMapLock; // protecting mBufferIdMap and mNextBufferId
-    typedef std::unordered_map<const buffer_handle_t, uint64_t,
-            BufferHasher, BufferComparator> BufferIdMap;
-    // stream ID -> per stream buffer ID map
-    BufferIdMap mBufferIdMap;
-    std::unordered_map<uint64_t, ANativeWindowBuffer*> mReversedBufMap;
-    uint64_t mNextBufferId = 1;
-    static const uint64_t BUFFER_ID_NO_BUFFER = 0;
-
-    std::mutex mHidlMemPoolMapLock; // protecting mHidlMemPoolMap
-    std::unordered_map<int, camera_memory_t*> mHidlMemPoolMap;
-};
-
-};  // namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 4a509aa..d27f11f 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -56,7 +56,7 @@
 #include "device3/Camera3Device.h"
 #include "device3/Camera3OutputStream.h"
 #include "device3/Camera3InputStream.h"
-#include "device3/Camera3DummyStream.h"
+#include "device3/Camera3FakeStream.h"
 #include "device3/Camera3SharedOutputStream.h"
 #include "CameraService.h"
 #include "utils/CameraThreadState.h"
@@ -270,7 +270,7 @@
     }
 
     /** Register in-flight map to the status tracker */
-    mInFlightStatusId = mStatusTracker->addComponent();
+    mInFlightStatusId = mStatusTracker->addComponent("InflightRequests");
 
     if (mUseHalBufManager) {
         res = mRequestBufferSM.initialize(mStatusTracker);
@@ -309,7 +309,7 @@
 
     internalUpdateStatusLocked(STATUS_UNCONFIGURED);
     mNextStreamId = 0;
-    mDummyStreamId = NO_STREAM;
+    mFakeStreamId = NO_STREAM;
     mNeedConfig = true;
     mPauseStateNotify = false;
 
@@ -1768,6 +1768,7 @@
             maxExpectedDuration);
     status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
     if (res != OK) {
+        mStatusTracker->dumpActiveComponents();
         SET_ERR_L("Error waiting for HAL to drain: %s (%d)", strerror(-res),
                 res);
     }
@@ -1833,10 +1834,12 @@
 
     mStatusWaiters++;
 
+    bool signalPipelineDrain = false;
     if (!active && mUseHalBufManager) {
         auto streamIds = mOutputStreams.getStreamIds();
         if (mStatus == STATUS_ACTIVE) {
             mRequestThread->signalPipelineDrain(streamIds);
+            signalPipelineDrain = true;
         }
         mRequestBufferSM.onWaitUntilIdle();
     }
@@ -1866,6 +1869,10 @@
         }
     } while (!stateSeen);
 
+    if (signalPipelineDrain) {
+        mRequestThread->resetPipelineDrain();
+    }
+
     mStatusWaiters--;
 
     return res;
@@ -2306,6 +2313,15 @@
         newRequest->mRotateAndCropAuto = false;
     }
 
+    auto zoomRatioEntry =
+            newRequest->mSettingsList.begin()->metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
+    if (zoomRatioEntry.count > 0 &&
+            zoomRatioEntry.data.f[0] == 1.0f) {
+        newRequest->mZoomRatioIs1x = true;
+    } else {
+        newRequest->mZoomRatioIs1x = false;
+    }
+
     return newRequest;
 }
 
@@ -2466,12 +2482,12 @@
     }
 
     // Workaround for device HALv3.2 or older spec bug - zero streams requires
-    // adding a dummy stream instead.
+    // adding a fake stream instead.
     // TODO: Bug: 17321404 for fixing the HAL spec and removing this workaround.
     if (mOutputStreams.size() == 0) {
-        addDummyStreamLocked();
+        addFakeStreamLocked();
     } else {
-        tryRemoveDummyStreamLocked();
+        tryRemoveFakeStreamLocked();
     }
 
     // Start configuring the streams
@@ -2633,7 +2649,7 @@
 
     mNeedConfig = false;
 
-    internalUpdateStatusLocked((mDummyStreamId == NO_STREAM) ?
+    internalUpdateStatusLocked((mFakeStreamId == NO_STREAM) ?
             STATUS_CONFIGURED : STATUS_UNCONFIGURED);
 
     ALOGV("%s: Camera %s: Stream configuration complete", __FUNCTION__, mId.string());
@@ -2647,69 +2663,69 @@
         return rc;
     }
 
-    if (mDummyStreamId == NO_STREAM) {
+    if (mFakeStreamId == NO_STREAM) {
         mRequestBufferSM.onStreamsConfigured();
     }
 
     return OK;
 }
 
-status_t Camera3Device::addDummyStreamLocked() {
+status_t Camera3Device::addFakeStreamLocked() {
     ATRACE_CALL();
     status_t res;
 
-    if (mDummyStreamId != NO_STREAM) {
-        // Should never be adding a second dummy stream when one is already
+    if (mFakeStreamId != NO_STREAM) {
+        // Should never be adding a second fake stream when one is already
         // active
-        SET_ERR_L("%s: Camera %s: A dummy stream already exists!",
+        SET_ERR_L("%s: Camera %s: A fake stream already exists!",
                 __FUNCTION__, mId.string());
         return INVALID_OPERATION;
     }
 
-    ALOGV("%s: Camera %s: Adding a dummy stream", __FUNCTION__, mId.string());
+    ALOGV("%s: Camera %s: Adding a fake stream", __FUNCTION__, mId.string());
 
-    sp<Camera3OutputStreamInterface> dummyStream =
-            new Camera3DummyStream(mNextStreamId);
+    sp<Camera3OutputStreamInterface> fakeStream =
+            new Camera3FakeStream(mNextStreamId);
 
-    res = mOutputStreams.add(mNextStreamId, dummyStream);
+    res = mOutputStreams.add(mNextStreamId, fakeStream);
     if (res < 0) {
-        SET_ERR_L("Can't add dummy stream to set: %s (%d)", strerror(-res), res);
+        SET_ERR_L("Can't add fake stream to set: %s (%d)", strerror(-res), res);
         return res;
     }
 
-    mDummyStreamId = mNextStreamId;
+    mFakeStreamId = mNextStreamId;
     mNextStreamId++;
 
     return OK;
 }
 
-status_t Camera3Device::tryRemoveDummyStreamLocked() {
+status_t Camera3Device::tryRemoveFakeStreamLocked() {
     ATRACE_CALL();
     status_t res;
 
-    if (mDummyStreamId == NO_STREAM) return OK;
+    if (mFakeStreamId == NO_STREAM) return OK;
     if (mOutputStreams.size() == 1) return OK;
 
-    ALOGV("%s: Camera %s: Removing the dummy stream", __FUNCTION__, mId.string());
+    ALOGV("%s: Camera %s: Removing the fake stream", __FUNCTION__, mId.string());
 
-    // Ok, have a dummy stream and there's at least one other output stream,
-    // so remove the dummy
+    // Ok, have a fake stream and there's at least one other output stream,
+    // so remove the fake
 
-    sp<Camera3StreamInterface> deletedStream = mOutputStreams.get(mDummyStreamId);
+    sp<Camera3StreamInterface> deletedStream = mOutputStreams.get(mFakeStreamId);
     if (deletedStream == nullptr) {
-        SET_ERR_L("Dummy stream %d does not appear to exist", mDummyStreamId);
+        SET_ERR_L("Fake stream %d does not appear to exist", mFakeStreamId);
         return INVALID_OPERATION;
     }
-    mOutputStreams.remove(mDummyStreamId);
+    mOutputStreams.remove(mFakeStreamId);
 
     // Free up the stream endpoint so that it can be used by some other stream
     res = deletedStream->disconnect();
     if (res != OK) {
-        SET_ERR_L("Can't disconnect deleted dummy stream %d", mDummyStreamId);
+        SET_ERR_L("Can't disconnect deleted fake stream %d", mFakeStreamId);
         // fall through since we want to still list the stream as deleted.
     }
     mDeletedStreams.add(deletedStream);
-    mDummyStreamId = NO_STREAM;
+    mFakeStreamId = NO_STREAM;
 
     return res;
 }
@@ -2814,7 +2830,7 @@
 }
 
 void Camera3Device::checkInflightMapLengthLocked() {
-    // Sanity check - if we have too many in-flight frames with long total inflight duration,
+    // Validation check - if we have too many in-flight frames with long total inflight duration,
     // something has likely gone wrong. This might still be legit only if application send in
     // a long burst of long exposure requests.
     if (mExpectedInflightDuration > kMinWarnInflightDuration) {
@@ -3779,7 +3795,7 @@
         mSessionParamKeys(sessionParamKeys),
         mLatestSessionParams(sessionParamKeys.size()),
         mUseHalBufManager(useHalBufManager) {
-    mStatusId = statusTracker->addComponent();
+    mStatusId = statusTracker->addComponent("RequestThread");
 }
 
 Camera3Device::RequestThread::~RequestThread() {}
@@ -4405,11 +4421,11 @@
             std::set<std::string> cameraIdsWithZoom;
             /**
              * HAL workaround:
-             * Insert a dummy trigger ID if a trigger is set but no trigger ID is
+             * Insert a fake trigger ID if a trigger is set but no trigger ID is
              */
-            res = addDummyTriggerIds(captureRequest);
+            res = addFakeTriggerIds(captureRequest);
             if (res != OK) {
-                SET_ERR("RequestThread: Unable to insert dummy trigger IDs "
+                SET_ERR("RequestThread: Unable to insert fake trigger IDs "
                         "(capture request %d, HAL device: %s (%d)",
                         halRequest->frame_number, strerror(-res), res);
                 return INVALID_OPERATION;
@@ -4426,13 +4442,17 @@
                                 parent->mDistortionMappers.end()) {
                             continue;
                         }
-                        res = parent->mDistortionMappers[it->cameraId].correctCaptureRequest(
-                            &(it->metadata));
-                        if (res != OK) {
-                            SET_ERR("RequestThread: Unable to correct capture requests "
-                                    "for lens distortion for request %d: %s (%d)",
-                                    halRequest->frame_number, strerror(-res), res);
-                            return INVALID_OPERATION;
+
+                        if (!captureRequest->mDistortionCorrectionUpdated) {
+                            res = parent->mDistortionMappers[it->cameraId].correctCaptureRequest(
+                                    &(it->metadata));
+                            if (res != OK) {
+                                SET_ERR("RequestThread: Unable to correct capture requests "
+                                        "for lens distortion for request %d: %s (%d)",
+                                        halRequest->frame_number, strerror(-res), res);
+                                return INVALID_OPERATION;
+                            }
+                            captureRequest->mDistortionCorrectionUpdated = true;
                         }
                     }
 
@@ -4443,21 +4463,24 @@
                             continue;
                         }
 
-                        camera_metadata_entry_t e = it->metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
-                        if (e.count > 0 && e.data.f[0] != 1.0f) {
+                        if (!captureRequest->mZoomRatioIs1x) {
                             cameraIdsWithZoom.insert(it->cameraId);
                         }
 
-                        res = parent->mZoomRatioMappers[it->cameraId].updateCaptureRequest(
-                            &(it->metadata));
-                        if (res != OK) {
-                            SET_ERR("RequestThread: Unable to correct capture requests "
-                                    "for zoom ratio for request %d: %s (%d)",
-                                    halRequest->frame_number, strerror(-res), res);
-                            return INVALID_OPERATION;
+                        if (!captureRequest->mZoomRatioUpdated) {
+                            res = parent->mZoomRatioMappers[it->cameraId].updateCaptureRequest(
+                                    &(it->metadata));
+                            if (res != OK) {
+                                SET_ERR("RequestThread: Unable to correct capture requests "
+                                        "for zoom ratio for request %d: %s (%d)",
+                                        halRequest->frame_number, strerror(-res), res);
+                                return INVALID_OPERATION;
+                            }
+                            captureRequest->mZoomRatioUpdated = true;
                         }
                     }
-                    if (captureRequest->mRotateAndCropAuto) {
+                    if (captureRequest->mRotateAndCropAuto &&
+                            !captureRequest->mRotationAndCropUpdated) {
                         for (it = captureRequest->mSettingsList.begin();
                                 it != captureRequest->mSettingsList.end(); it++) {
                             auto mapper = parent->mRotateAndCropMappers.find(it->cameraId);
@@ -4471,6 +4494,7 @@
                                 }
                             }
                         }
+                        captureRequest->mRotationAndCropUpdated = true;
                     }
                 }
             }
@@ -4785,6 +4809,12 @@
     mStreamIdsToBeDrained = streamIds;
 }
 
+void Camera3Device::RequestThread::resetPipelineDrain() {
+    Mutex::Autolock pl(mPauseLock);
+    mNotifyPipelineDrain = false;
+    mStreamIdsToBeDrained.clear();
+}
+
 void Camera3Device::RequestThread::clearPreviousRequest() {
     Mutex::Autolock l(mRequestLock);
     mPrevRequest.clear();
@@ -5313,26 +5343,26 @@
     return OK;
 }
 
-status_t Camera3Device::RequestThread::addDummyTriggerIds(
+status_t Camera3Device::RequestThread::addFakeTriggerIds(
         const sp<CaptureRequest> &request) {
     // Trigger ID 0 had special meaning in the HAL2 spec, so avoid it here
-    static const int32_t dummyTriggerId = 1;
+    static const int32_t fakeTriggerId = 1;
     status_t res;
 
     CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
 
-    // If AF trigger is active, insert a dummy AF trigger ID if none already
+    // If AF trigger is active, insert a fake AF trigger ID if none already
     // exists
     camera_metadata_entry afTrigger = metadata.find(ANDROID_CONTROL_AF_TRIGGER);
     camera_metadata_entry afId = metadata.find(ANDROID_CONTROL_AF_TRIGGER_ID);
     if (afTrigger.count > 0 &&
             afTrigger.data.u8[0] != ANDROID_CONTROL_AF_TRIGGER_IDLE &&
             afId.count == 0) {
-        res = metadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &dummyTriggerId, 1);
+        res = metadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &fakeTriggerId, 1);
         if (res != OK) return res;
     }
 
-    // If AE precapture trigger is active, insert a dummy precapture trigger ID
+    // If AE precapture trigger is active, insert a fake precapture trigger ID
     // if none already exists
     camera_metadata_entry pcTrigger =
             metadata.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
@@ -5341,7 +5371,7 @@
             pcTrigger.data.u8[0] != ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE &&
             pcId.count == 0) {
         res = metadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
-                &dummyTriggerId, 1);
+                &fakeTriggerId, 1);
         if (res != OK) return res;
     }
 
@@ -5607,7 +5637,7 @@
 
     std::lock_guard<std::mutex> lock(mLock);
     mStatusTracker = statusTracker;
-    mRequestBufferStatusId = statusTracker->addComponent();
+    mRequestBufferStatusId = statusTracker->addComponent("BufferRequestSM");
     return OK;
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 408f1f9..c579071 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -474,7 +474,7 @@
     int                        mNextStreamId;
     bool                       mNeedConfig;
 
-    int                        mDummyStreamId;
+    int                        mFakeStreamId;
 
     // Whether to send state updates upstream
     // Pause when doing transparent reconfiguration
@@ -517,6 +517,19 @@
         // overriding of ROTATE_AND_CROP value and adjustment of coordinates
         // in several other controls in both the request and the result
         bool                                mRotateAndCropAuto;
+        // Whether this capture request has its zoom ratio set to 1.0x before
+        // the framework overrides it for camera HAL consumption.
+        bool                                mZoomRatioIs1x;
+
+
+        // Whether this capture request's distortion correction update has
+        // been done.
+        bool                                mDistortionCorrectionUpdated = false;
+        // Whether this capture request's rotation and crop update has been
+        // done.
+        bool                                mRotationAndCropUpdated = false;
+        // Whether this capture request's zoom ratio update has been done.
+        bool                                mZoomRatioUpdated = false;
     };
     typedef List<sp<CaptureRequest> > RequestList;
 
@@ -668,15 +681,15 @@
     void               cancelStreamsConfigurationLocked();
 
     /**
-     * Add a dummy stream to the current stream set as a workaround for
+     * Add a fake stream to the current stream set as a workaround for
      * not allowing 0 streams in the camera HAL spec.
      */
-    status_t           addDummyStreamLocked();
+    status_t           addFakeStreamLocked();
 
     /**
-     * Remove a dummy stream if the current config includes real streams.
+     * Remove a fake stream if the current config includes real streams.
      */
-    status_t           tryRemoveDummyStreamLocked();
+    status_t           tryRemoveFakeStreamLocked();
 
     /**
      * Set device into an error state due to some fatal failure, and set an
@@ -832,6 +845,7 @@
         }
 
         void signalPipelineDrain(const std::vector<int>& streamIds);
+        void resetPipelineDrain();
 
         status_t switchToOffline(
                 const std::vector<int32_t>& streamsToKeep,
@@ -860,7 +874,7 @@
 
         // HAL workaround: Make sure a trigger ID always exists if
         // a trigger does
-        status_t           addDummyTriggerIds(const sp<CaptureRequest> &request);
+        status_t           addFakeTriggerIds(const sp<CaptureRequest> &request);
 
         // Override rotate_and_crop control if needed; returns true if the current value was changed
         bool               overrideAutoRotateAndCrop(const sp<CaptureRequest> &request);
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
deleted file mode 100644
index b637160..0000000
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * Copyright (C) 2014-2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "Camera3-DummyStream"
-#define ATRACE_TAG ATRACE_TAG_CAMERA
-//#define LOG_NDEBUG 0
-
-#include <utils/Log.h>
-#include <utils/Trace.h>
-#include "Camera3DummyStream.h"
-
-namespace android {
-
-namespace camera3 {
-
-const String8 Camera3DummyStream::DUMMY_ID;
-
-Camera3DummyStream::Camera3DummyStream(int id) :
-        Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, DUMMY_WIDTH, DUMMY_HEIGHT,
-                /*maxSize*/0, DUMMY_FORMAT, DUMMY_DATASPACE, DUMMY_ROTATION,
-                DUMMY_ID) {
-
-}
-
-Camera3DummyStream::~Camera3DummyStream() {
-
-}
-
-status_t Camera3DummyStream::getBufferLocked(camera3_stream_buffer *,
-        const std::vector<size_t>&) {
-    ATRACE_CALL();
-    ALOGE("%s: Stream %d: Dummy stream cannot produce buffers!", __FUNCTION__, mId);
-    return INVALID_OPERATION;
-}
-
-status_t Camera3DummyStream::returnBufferLocked(
-        const camera3_stream_buffer &,
-        nsecs_t, const std::vector<size_t>&) {
-    ATRACE_CALL();
-    ALOGE("%s: Stream %d: Dummy stream cannot return buffers!", __FUNCTION__, mId);
-    return INVALID_OPERATION;
-}
-
-status_t Camera3DummyStream::returnBufferCheckedLocked(
-            const camera3_stream_buffer &,
-            nsecs_t,
-            bool,
-            const std::vector<size_t>&,
-            /*out*/
-            sp<Fence>*) {
-    ATRACE_CALL();
-    ALOGE("%s: Stream %d: Dummy stream cannot return buffers!", __FUNCTION__, mId);
-    return INVALID_OPERATION;
-}
-
-void Camera3DummyStream::dump(int fd, const Vector<String16> &args) const {
-    (void) args;
-    String8 lines;
-    lines.appendFormat("    Stream[%d]: Dummy\n", mId);
-    write(fd, lines.string(), lines.size());
-
-    Camera3IOStreamBase::dump(fd, args);
-}
-
-status_t Camera3DummyStream::setTransform(int) {
-    ATRACE_CALL();
-    // Do nothing
-    return OK;
-}
-
-status_t Camera3DummyStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
-    (void) buffer;
-    (void) fenceFd;
-    // Do nothing
-    return OK;
-}
-
-status_t Camera3DummyStream::configureQueueLocked() {
-    // Do nothing
-    return OK;
-}
-
-status_t Camera3DummyStream::disconnectLocked() {
-    mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG
-                                           : STATE_CONSTRUCTED;
-    return OK;
-}
-
-status_t Camera3DummyStream::getEndpointUsage(uint64_t *usage) const {
-    *usage = DUMMY_USAGE;
-    return OK;
-}
-
-bool Camera3DummyStream::isVideoStream() const {
-    return false;
-}
-
-bool Camera3DummyStream::isConsumerConfigurationDeferred(size_t /*surface_id*/) const {
-    return false;
-}
-
-status_t Camera3DummyStream::dropBuffers(bool /*dropping*/) {
-    return OK;
-}
-
-const String8& Camera3DummyStream::getPhysicalCameraId() const {
-    return DUMMY_ID;
-}
-
-status_t Camera3DummyStream::setConsumers(const std::vector<sp<Surface>>& /*consumers*/) {
-    ALOGE("%s: Stream %d: Dummy stream doesn't support set consumer surface!",
-            __FUNCTION__, mId);
-    return INVALID_OPERATION;
-}
-
-status_t Camera3DummyStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
-            const std::vector<OutputStreamInfo> &/*outputInfo*/,
-            const std::vector<size_t> &/*removedSurfaceIds*/,
-            KeyedVector<sp<Surface>, size_t> * /*outputMap*/) {
-    ALOGE("%s: this method is not supported!", __FUNCTION__);
-    return INVALID_OPERATION;
-}
-
-}; // namespace camera3
-
-}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.h b/services/camera/libcameraservice/device3/Camera3DummyStream.h
deleted file mode 100644
index 4b67ea5..0000000
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.h
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Copyright (C) 2014-2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA3_DUMMY_STREAM_H
-#define ANDROID_SERVERS_CAMERA3_DUMMY_STREAM_H
-
-#include <utils/RefBase.h>
-#include <gui/Surface.h>
-
-#include "Camera3Stream.h"
-#include "Camera3IOStreamBase.h"
-#include "Camera3OutputStreamInterface.h"
-
-namespace android {
-namespace camera3 {
-
-/**
- * A dummy output stream class, to be used as a placeholder when no valid
- * streams are configured by the client.
- * This is necessary because camera HAL v3.2 or older disallow configuring
- * 0 output streams, while the public camera2 API allows for it.
- */
-class Camera3DummyStream :
-        public Camera3IOStreamBase,
-        public Camera3OutputStreamInterface {
-
-  public:
-    /**
-     * Set up a dummy stream; doesn't actually connect to anything, and uses
-     * a default dummy format and size.
-     */
-    explicit Camera3DummyStream(int id);
-
-    virtual ~Camera3DummyStream();
-
-    /**
-     * Camera3Stream interface
-     */
-
-    virtual void     dump(int fd, const Vector<String16> &args) const;
-
-    status_t         setTransform(int transform);
-
-    virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd);
-
-    /**
-     * Drop buffers for stream of streamId if dropping is true. If dropping is false, do not
-     * drop buffers for stream of streamId.
-     */
-    virtual status_t dropBuffers(bool /*dropping*/) override;
-
-    /**
-     * Query the physical camera id for the output stream.
-     */
-    virtual const String8& getPhysicalCameraId() const override;
-
-    /**
-     * Return if this output stream is for video encoding.
-     */
-    bool isVideoStream() const;
-
-    /**
-     * Return if the consumer configuration of this stream is deferred.
-     */
-    virtual bool isConsumerConfigurationDeferred(size_t surface_id) const;
-
-    /**
-     * Set the consumer surfaces to the output stream.
-     */
-    virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers);
-
-    /**
-     * Query the output surface id.
-     */
-    virtual ssize_t getSurfaceId(const sp<Surface> &/*surface*/) { return 0; }
-
-    virtual status_t getUniqueSurfaceIds(const std::vector<size_t>&,
-            /*out*/std::vector<size_t>*) { return INVALID_OPERATION; };
-
-    /**
-     * Update the stream output surfaces.
-     */
-    virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
-            const std::vector<OutputStreamInfo> &outputInfo,
-            const std::vector<size_t> &removedSurfaceIds,
-            KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
-
-  protected:
-
-    /**
-     * Note that we release the lock briefly in this function
-     */
-    virtual status_t returnBufferCheckedLocked(
-            const camera3_stream_buffer &buffer,
-            nsecs_t timestamp,
-            bool output,
-            const std::vector<size_t>& surface_ids,
-            /*out*/
-            sp<Fence> *releaseFenceOut);
-
-    virtual status_t disconnectLocked();
-
-  private:
-
-    // Default dummy parameters; 320x240 is a required size for all devices,
-    // otherwise act like a SurfaceView would.
-    static const int DUMMY_WIDTH = 320;
-    static const int DUMMY_HEIGHT = 240;
-    static const int DUMMY_FORMAT = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-    static const android_dataspace DUMMY_DATASPACE = HAL_DATASPACE_UNKNOWN;
-    static const camera3_stream_rotation_t DUMMY_ROTATION = CAMERA3_STREAM_ROTATION_0;
-    static const uint64_t DUMMY_USAGE = GRALLOC_USAGE_HW_COMPOSER;
-    static const String8 DUMMY_ID;
-
-    /**
-     * Internal Camera3Stream interface
-     */
-    virtual status_t getBufferLocked(camera3_stream_buffer *buffer,
-            const std::vector<size_t>& surface_ids = std::vector<size_t>());
-    virtual status_t returnBufferLocked(
-            const camera3_stream_buffer &buffer,
-            nsecs_t timestamp, const std::vector<size_t>& surface_ids);
-
-    virtual status_t configureQueueLocked();
-
-    virtual status_t getEndpointUsage(uint64_t *usage) const;
-
-}; // class Camera3DummyStream
-
-} // namespace camera3
-
-} // namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
new file mode 100644
index 0000000..230512a
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2014-2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-FakeStream"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+#include "Camera3FakeStream.h"
+
+namespace android {
+
+namespace camera3 {
+
+const String8 Camera3FakeStream::FAKE_ID;
+
+Camera3FakeStream::Camera3FakeStream(int id) :
+        Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, FAKE_WIDTH, FAKE_HEIGHT,
+                /*maxSize*/0, FAKE_FORMAT, FAKE_DATASPACE, FAKE_ROTATION,
+                FAKE_ID) {
+
+}
+
+Camera3FakeStream::~Camera3FakeStream() {
+
+}
+
+status_t Camera3FakeStream::getBufferLocked(camera3_stream_buffer *,
+        const std::vector<size_t>&) {
+    ATRACE_CALL();
+    ALOGE("%s: Stream %d: Fake stream cannot produce buffers!", __FUNCTION__, mId);
+    return INVALID_OPERATION;
+}
+
+status_t Camera3FakeStream::returnBufferLocked(
+        const camera3_stream_buffer &,
+        nsecs_t, const std::vector<size_t>&) {
+    ATRACE_CALL();
+    ALOGE("%s: Stream %d: Fake stream cannot return buffers!", __FUNCTION__, mId);
+    return INVALID_OPERATION;
+}
+
+status_t Camera3FakeStream::returnBufferCheckedLocked(
+            const camera3_stream_buffer &,
+            nsecs_t,
+            bool,
+            const std::vector<size_t>&,
+            /*out*/
+            sp<Fence>*) {
+    ATRACE_CALL();
+    ALOGE("%s: Stream %d: Fake stream cannot return buffers!", __FUNCTION__, mId);
+    return INVALID_OPERATION;
+}
+
+void Camera3FakeStream::dump(int fd, const Vector<String16> &args) const {
+    (void) args;
+    String8 lines;
+    lines.appendFormat("    Stream[%d]: Fake\n", mId);
+    write(fd, lines.string(), lines.size());
+
+    Camera3IOStreamBase::dump(fd, args);
+}
+
+status_t Camera3FakeStream::setTransform(int) {
+    ATRACE_CALL();
+    // Do nothing
+    return OK;
+}
+
+status_t Camera3FakeStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
+    (void) buffer;
+    (void) fenceFd;
+    // Do nothing
+    return OK;
+}
+
+status_t Camera3FakeStream::configureQueueLocked() {
+    // Do nothing
+    return OK;
+}
+
+status_t Camera3FakeStream::disconnectLocked() {
+    mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG
+                                           : STATE_CONSTRUCTED;
+    return OK;
+}
+
+status_t Camera3FakeStream::getEndpointUsage(uint64_t *usage) const {
+    *usage = FAKE_USAGE;
+    return OK;
+}
+
+bool Camera3FakeStream::isVideoStream() const {
+    return false;
+}
+
+bool Camera3FakeStream::isConsumerConfigurationDeferred(size_t /*surface_id*/) const {
+    return false;
+}
+
+status_t Camera3FakeStream::dropBuffers(bool /*dropping*/) {
+    return OK;
+}
+
+const String8& Camera3FakeStream::getPhysicalCameraId() const {
+    return FAKE_ID;
+}
+
+status_t Camera3FakeStream::setConsumers(const std::vector<sp<Surface>>& /*consumers*/) {
+    ALOGE("%s: Stream %d: Fake stream doesn't support set consumer surface!",
+            __FUNCTION__, mId);
+    return INVALID_OPERATION;
+}
+
+status_t Camera3FakeStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
+            const std::vector<OutputStreamInfo> &/*outputInfo*/,
+            const std::vector<size_t> &/*removedSurfaceIds*/,
+            KeyedVector<sp<Surface>, size_t> * /*outputMap*/) {
+    ALOGE("%s: this method is not supported!", __FUNCTION__);
+    return INVALID_OPERATION;
+}
+
+}; // namespace camera3
+
+}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.h b/services/camera/libcameraservice/device3/Camera3FakeStream.h
new file mode 100644
index 0000000..fbf37e6
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.h
@@ -0,0 +1,147 @@
+/*
+ * Copyright (C) 2014-2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_FAKE_STREAM_H
+#define ANDROID_SERVERS_CAMERA3_FAKE_STREAM_H
+
+#include <utils/RefBase.h>
+#include <gui/Surface.h>
+
+#include "Camera3Stream.h"
+#include "Camera3IOStreamBase.h"
+#include "Camera3OutputStreamInterface.h"
+
+namespace android {
+namespace camera3 {
+
+/**
+ * A fake output stream class, to be used as a placeholder when no valid
+ * streams are configured by the client.
+ * This is necessary because camera HAL v3.2 or older disallow configuring
+ * 0 output streams, while the public camera2 API allows for it.
+ */
+class Camera3FakeStream :
+        public Camera3IOStreamBase,
+        public Camera3OutputStreamInterface {
+
+  public:
+    /**
+     * Set up a fake stream; doesn't actually connect to anything, and uses
+     * a default fake format and size.
+     */
+    explicit Camera3FakeStream(int id);
+
+    virtual ~Camera3FakeStream();
+
+    /**
+     * Camera3Stream interface
+     */
+
+    virtual void     dump(int fd, const Vector<String16> &args) const;
+
+    status_t         setTransform(int transform);
+
+    virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd);
+
+    /**
+     * Drop buffers for stream of streamId if dropping is true. If dropping is false, do not
+     * drop buffers for stream of streamId.
+     */
+    virtual status_t dropBuffers(bool /*dropping*/) override;
+
+    /**
+     * Query the physical camera id for the output stream.
+     */
+    virtual const String8& getPhysicalCameraId() const override;
+
+    /**
+     * Return if this output stream is for video encoding.
+     */
+    bool isVideoStream() const;
+
+    /**
+     * Return if the consumer configuration of this stream is deferred.
+     */
+    virtual bool isConsumerConfigurationDeferred(size_t surface_id) const;
+
+    /**
+     * Set the consumer surfaces to the output stream.
+     */
+    virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers);
+
+    /**
+     * Query the output surface id.
+     */
+    virtual ssize_t getSurfaceId(const sp<Surface> &/*surface*/) { return 0; }
+
+    virtual status_t getUniqueSurfaceIds(const std::vector<size_t>&,
+            /*out*/std::vector<size_t>*) { return INVALID_OPERATION; };
+
+    /**
+     * Update the stream output surfaces.
+     */
+    virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+            const std::vector<OutputStreamInfo> &outputInfo,
+            const std::vector<size_t> &removedSurfaceIds,
+            KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
+
+  protected:
+
+    /**
+     * Note that we release the lock briefly in this function
+     */
+    virtual status_t returnBufferCheckedLocked(
+            const camera3_stream_buffer &buffer,
+            nsecs_t timestamp,
+            bool output,
+            const std::vector<size_t>& surface_ids,
+            /*out*/
+            sp<Fence> *releaseFenceOut);
+
+    virtual status_t disconnectLocked();
+
+  private:
+
+    // Default fake parameters; 320x240 is a required size for all devices,
+    // otherwise act like a SurfaceView would.
+    static const int FAKE_WIDTH = 320;
+    static const int FAKE_HEIGHT = 240;
+    static const int FAKE_FORMAT = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+    static const android_dataspace FAKE_DATASPACE = HAL_DATASPACE_UNKNOWN;
+    static const camera3_stream_rotation_t FAKE_ROTATION = CAMERA3_STREAM_ROTATION_0;
+    static const uint64_t FAKE_USAGE = GRALLOC_USAGE_HW_COMPOSER;
+    static const String8 FAKE_ID;
+
+    /**
+     * Internal Camera3Stream interface
+     */
+    virtual status_t getBufferLocked(camera3_stream_buffer *buffer,
+            const std::vector<size_t>& surface_ids = std::vector<size_t>());
+    virtual status_t returnBufferLocked(
+            const camera3_stream_buffer &buffer,
+            nsecs_t timestamp, const std::vector<size_t>& surface_ids);
+
+    virtual status_t configureQueueLocked();
+
+    virtual status_t getEndpointUsage(uint64_t *usage) const;
+
+}; // class Camera3FakeStream
+
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index cb59a76..ebd33e9 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -27,13 +27,13 @@
 
 namespace camera3 {
 
-const String8 Camera3InputStream::DUMMY_ID;
+const String8 Camera3InputStream::FAKE_ID;
 
 Camera3InputStream::Camera3InputStream(int id,
         uint32_t width, uint32_t height, int format) :
         Camera3IOStreamBase(id, CAMERA3_STREAM_INPUT, width, height, /*maxSize*/0,
                             format, HAL_DATASPACE_UNKNOWN, CAMERA3_STREAM_ROTATION_0,
-                            DUMMY_ID) {
+                            FAKE_ID) {
 
     if (format == HAL_PIXEL_FORMAT_BLOB) {
         ALOGE("%s: Bad format, BLOB not supported", __FUNCTION__);
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h
index 97a627a..22697b7 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.h
@@ -53,7 +53,7 @@
     sp<IGraphicBufferProducer> mProducer;
     Vector<BufferItem> mBuffersInFlight;
 
-    static const String8 DUMMY_ID;
+    static const String8 FAKE_ID;
 
     /**
      * Camera3IOStreamBase
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.h b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
index c4c7a85..ee9ed25 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
@@ -36,7 +36,6 @@
 #include "device3/RotateAndCropMapper.h"
 #include "device3/ZoomRatioMapper.h"
 #include "utils/TagMonitor.h"
-#include "utils/LatencyHistogram.h"
 #include <camera_metadata_hidden.h>
 
 namespace android {
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 01ca006..7b812f2 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -114,7 +114,7 @@
         mState = STATE_ERROR;
     }
 
-    // Sanity check for the consumer usage flag.
+    // Validation check for the consumer usage flag.
     if ((consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) == 0 &&
             (consumerUsage & GraphicBuffer::USAGE_HW_COMPOSER) == 0) {
         ALOGE("%s: Deferred consumer usage flag is illegal %" PRIu64 "!",
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index eea5ef1..90f6216 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -182,7 +182,33 @@
         return;
     }
 
-    insertResultLocked(states, &captureResult, frameNumber);
+    // Update partial result by removing keys remapped by DistortionCorrection, ZoomRatio,
+    // and RotationAndCrop mappers.
+    std::set<uint32_t> keysToRemove;
+
+    auto iter = states.distortionMappers.find(states.cameraId.c_str());
+    if (iter != states.distortionMappers.end()) {
+        const auto& remappedKeys = iter->second.getRemappedKeys();
+        keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
+    }
+
+    const auto& remappedKeys = states.zoomRatioMappers[states.cameraId.c_str()].getRemappedKeys();
+    keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
+
+    auto mapper = states.rotateAndCropMappers.find(states.cameraId.c_str());
+    if (mapper != states.rotateAndCropMappers.end()) {
+        const auto& remappedKeys = iter->second.getRemappedKeys();
+        keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
+    }
+
+    for (uint32_t key : keysToRemove) {
+        captureResult.mMetadata.erase(key);
+    }
+
+    // Send partial result
+    if (captureResult.mMetadata.entryCount() > 0) {
+        insertResultLocked(states, &captureResult, frameNumber);
+    }
 }
 
 void sendCaptureResult(
@@ -416,7 +442,7 @@
 
         ATRACE_ASYNC_END("frame capture", frameNumber);
 
-        // Sanity check - if sensor timestamp matches shutter timestamp in the
+        // Validation check - if sensor timestamp matches shutter timestamp in the
         // case of request having callback.
         if (request.hasCallback && request.requestStatus == OK &&
                 sensorTimestamp != shutterTimestamp) {
@@ -1218,13 +1244,13 @@
             return;
         }
 
+        bufRet.streamId = streamId;
         if (outputStream->isAbandoned()) {
             bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
             allReqsSucceeds = false;
             continue;
         }
 
-        bufRet.streamId = streamId;
         size_t handOutBufferCount = outputStream->getOutstandingBuffersCount();
         uint32_t numBuffersRequested = bufReq.numBuffersRequested;
         size_t totalHandout = handOutBufferCount + numBuffersRequested;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 9946312..3ebbc17 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -72,8 +72,8 @@
         const String8& cameraId;
         std::mutex& inflightLock;
         int64_t& lastCompletedRegularFrameNumber;
-        int64_t& lastCompletedZslFrameNumber;
         int64_t& lastCompletedReprocessFrameNumber;
+        int64_t& lastCompletedZslFrameNumber;
         InFlightRequestMap& inflightMap; // end of inflightLock scope
         std::mutex& outputLock;
         std::list<CaptureResult>& resultQueue;
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 20f6168..f208561 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -330,7 +330,8 @@
     // Register for idle tracking
     sp<StatusTracker> statusTracker = mStatusTracker.promote();
     if (statusTracker != 0 && mStatusId == StatusTracker::NO_STATUS_ID) {
-        mStatusId = statusTracker->addComponent();
+        std::string name = std::string("Stream ") + std::to_string(mId);
+        mStatusId = statusTracker->addComponent(name.c_str());
     }
 
     // Check if the stream configuration is unchanged, and skip reallocation if
diff --git a/services/camera/libcameraservice/device3/CoordinateMapper.h b/services/camera/libcameraservice/device3/CoordinateMapper.h
index 5164856..558f4c0 100644
--- a/services/camera/libcameraservice/device3/CoordinateMapper.h
+++ b/services/camera/libcameraservice/device3/CoordinateMapper.h
@@ -18,16 +18,23 @@
 #define ANDROID_SERVERS_COORDINATEMAPPER_H
 
 #include <array>
+#include <set>
 
 namespace android {
 
 namespace camera3 {
 
 class CoordinateMapper {
-    // Right now only stores metadata tags containing 2D coordinates
-    // to be corrected.
+public:
+    // The result metadata tags that are to be re-mapped
+    const std::set<uint32_t>& getRemappedKeys() const {
+        return mRemappedKeys;
+    }
+
+    virtual ~CoordinateMapper() = default;
+
 protected:
-    // Metadata key lists to correct
+    // Metadata tags containing 2D coordinates to be corrected.
 
     // Both capture request and result
     static const std::array<uint32_t, 3> kMeteringRegionsToCorrect;
@@ -37,6 +44,10 @@
 
     // Only for capture results; don't clamp
     static const std::array<uint32_t, 2> kResultPointsToCorrectNoClamp;
+
+    virtual void initRemappedKeys() = 0;
+    std::set<uint32_t> mRemappedKeys;
+
 }; // class CoordinateMapper
 
 } // namespace camera3
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.cpp b/services/camera/libcameraservice/device3/DistortionMapper.cpp
index 8132225..316303e 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.cpp
+++ b/services/camera/libcameraservice/device3/DistortionMapper.cpp
@@ -29,6 +29,20 @@
 
 
 DistortionMapper::DistortionMapper() : mValidMapping(false), mValidGrids(false) {
+    initRemappedKeys();
+}
+
+void DistortionMapper::initRemappedKeys() {
+    mRemappedKeys.insert(
+            kMeteringRegionsToCorrect.begin(),
+            kMeteringRegionsToCorrect.end());
+    mRemappedKeys.insert(
+            kRectsToCorrect.begin(),
+            kRectsToCorrect.end());
+    mRemappedKeys.insert(
+            kResultPointsToCorrectNoClamp.begin(),
+            kResultPointsToCorrectNoClamp.end());
+    mRemappedKeys.insert(ANDROID_DISTORTION_CORRECTION_MODE);
 }
 
 bool DistortionMapper::isDistortionSupported(const CameraMetadata &deviceInfo) {
@@ -485,7 +499,7 @@
 
     float det = b * b - 4 * a * c;
     if (det < 0) {
-        // Sanity check - should not happen if pt is within the quad
+        // Validation check - should not happen if pt is within the quad
         ALOGE("Bad determinant! a: %f, b: %f, c: %f, det: %f", a,b,c,det);
         return -1;
     }
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.h b/services/camera/libcameraservice/device3/DistortionMapper.h
index 7dcb67b..5027bd0 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.h
+++ b/services/camera/libcameraservice/device3/DistortionMapper.h
@@ -32,7 +32,7 @@
  * Utilities to transform between raw (distorted) and warped (corrected) coordinate systems
  * for cameras that support geometric distortion
  */
-class DistortionMapper : private CoordinateMapper {
+class DistortionMapper : public CoordinateMapper {
   public:
     DistortionMapper();
 
@@ -43,7 +43,10 @@
             mArrayWidth(other.mArrayWidth), mArrayHeight(other.mArrayHeight),
             mActiveWidth(other.mActiveWidth), mActiveHeight(other.mActiveHeight),
             mArrayDiffX(other.mArrayDiffX), mArrayDiffY(other.mArrayDiffY),
-            mCorrectedGrid(other.mCorrectedGrid), mDistortedGrid(other.mDistortedGrid) {}
+            mCorrectedGrid(other.mCorrectedGrid), mDistortedGrid(other.mDistortedGrid) {
+            initRemappedKeys(); }
+
+    void initRemappedKeys() override;
 
     /**
      * Check whether distortion correction is supported by the camera HAL
diff --git a/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp b/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
index 3718f54..a02e5f6 100644
--- a/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
+++ b/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
@@ -27,6 +27,18 @@
 
 namespace camera3 {
 
+void RotateAndCropMapper::initRemappedKeys() {
+    mRemappedKeys.insert(
+            kMeteringRegionsToCorrect.begin(),
+            kMeteringRegionsToCorrect.end());
+    mRemappedKeys.insert(
+            kResultPointsToCorrectNoClamp.begin(),
+            kResultPointsToCorrectNoClamp.end());
+
+    mRemappedKeys.insert(ANDROID_SCALER_ROTATE_AND_CROP);
+    mRemappedKeys.insert(ANDROID_SCALER_CROP_REGION);
+}
+
 bool RotateAndCropMapper::isNeeded(const CameraMetadata* deviceInfo) {
     auto entry = deviceInfo->find(ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES);
     for (size_t i = 0; i < entry.count; i++) {
@@ -36,6 +48,8 @@
 }
 
 RotateAndCropMapper::RotateAndCropMapper(const CameraMetadata* deviceInfo) {
+    initRemappedKeys();
+
     auto entry = deviceInfo->find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
     if (entry.count != 4) return;
 
diff --git a/services/camera/libcameraservice/device3/RotateAndCropMapper.h b/services/camera/libcameraservice/device3/RotateAndCropMapper.h
index 459e27f..f9e2263 100644
--- a/services/camera/libcameraservice/device3/RotateAndCropMapper.h
+++ b/services/camera/libcameraservice/device3/RotateAndCropMapper.h
@@ -32,12 +32,14 @@
  * Utilities to transform between unrotated and rotated-and-cropped coordinate systems
  * for cameras that support SCALER_ROTATE_AND_CROP controls in AUTO mode.
  */
-class RotateAndCropMapper : private CoordinateMapper {
+class RotateAndCropMapper : public CoordinateMapper {
   public:
     static bool isNeeded(const CameraMetadata* deviceInfo);
 
     RotateAndCropMapper(const CameraMetadata* deviceInfo);
 
+    void initRemappedKeys() override;
+
     /**
      * Adjust capture request assuming rotate and crop AUTO is enabled
      */
diff --git a/services/camera/libcameraservice/device3/StatusTracker.cpp b/services/camera/libcameraservice/device3/StatusTracker.cpp
index 723b5c2..ea1f2c1 100644
--- a/services/camera/libcameraservice/device3/StatusTracker.cpp
+++ b/services/camera/libcameraservice/device3/StatusTracker.cpp
@@ -40,7 +40,7 @@
 StatusTracker::~StatusTracker() {
 }
 
-int StatusTracker::addComponent() {
+int StatusTracker::addComponent(std::string componentName) {
     int id;
     ssize_t err;
     {
@@ -49,8 +49,12 @@
         ALOGV("%s: Adding new component %d", __FUNCTION__, id);
 
         err = mStates.add(id, IDLE);
-        ALOGE_IF(err < 0, "%s: Can't add new component %d: %s (%zd)",
-                __FUNCTION__, id, strerror(-err), err);
+        if (componentName.empty()) {
+            componentName = std::to_string(id);
+        }
+        mComponentNames.add(id, componentName);
+        ALOGE_IF(err < 0, "%s: Can't add new component %d (%s): %s (%zd)",
+                __FUNCTION__, id, componentName.c_str(), strerror(-err), err);
     }
 
     if (err >= 0) {
@@ -68,6 +72,7 @@
         Mutex::Autolock l(mLock);
         ALOGV("%s: Removing component %d", __FUNCTION__, id);
         idx = mStates.removeItem(id);
+        mComponentNames.removeItem(id);
     }
 
     if (idx >= 0) {
@@ -80,6 +85,20 @@
 }
 
 
+void StatusTracker::dumpActiveComponents() {
+    Mutex::Autolock l(mLock);
+    if (mDeviceState == IDLE) {
+        ALOGI("%s: all components are IDLE", __FUNCTION__);
+        return;
+    }
+    for (size_t i = 0; i < mStates.size(); i++) {
+        if (mStates.valueAt(i) == ACTIVE) {
+            ALOGI("%s: component %d (%s) is active", __FUNCTION__, mStates.keyAt(i),
+                    mComponentNames.valueAt(i).c_str());
+        }
+    }
+}
+
 void StatusTracker::markComponentIdle(int id, const sp<Fence>& componentFence) {
     markComponent(id, IDLE, componentFence);
 }
diff --git a/services/camera/libcameraservice/device3/StatusTracker.h b/services/camera/libcameraservice/device3/StatusTracker.h
index 3a1d85c..3741cce 100644
--- a/services/camera/libcameraservice/device3/StatusTracker.h
+++ b/services/camera/libcameraservice/device3/StatusTracker.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_SERVERS_CAMERA3_STATUSTRACKER_H
 #define ANDROID_SERVERS_CAMERA3_STATUSTRACKER_H
 
+#include <string>
 #include <utils/Condition.h>
 #include <utils/Errors.h>
 #include <utils/List.h>
@@ -54,7 +55,7 @@
     // Add a component to track; returns non-negative unique ID for the new
     // component on success, negative error code on failure.
     // New components start in the idle state.
-    int addComponent();
+    int addComponent(std::string componentName);
 
     // Remove existing component from idle tracking. Ignores unknown IDs
     void removeComponent(int id);
@@ -68,6 +69,8 @@
     // Set the state of a tracked component to be active. Ignores unknown IDs.
     void markComponentActive(int id);
 
+    void dumpActiveComponents();
+
     virtual void requestExit();
   protected:
 
@@ -105,6 +108,7 @@
 
     // Current component states
     KeyedVector<int, ComponentState> mStates;
+    KeyedVector<int, std::string> mComponentNames;
     // Merged fence for all processed state changes
     sp<Fence> mIdleFence;
     // Current overall device state
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index a87de77..81d7bf9 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -25,6 +25,19 @@
 
 namespace camera3 {
 
+void ZoomRatioMapper::initRemappedKeys() {
+    mRemappedKeys.insert(
+            kMeteringRegionsToCorrect.begin(),
+            kMeteringRegionsToCorrect.end());
+    mRemappedKeys.insert(
+            kRectsToCorrect.begin(),
+            kRectsToCorrect.end());
+    mRemappedKeys.insert(
+            kResultPointsToCorrectNoClamp.begin(),
+            kResultPointsToCorrectNoClamp.end());
+
+    mRemappedKeys.insert(ANDROID_CONTROL_ZOOM_RATIO);
+}
 
 status_t ZoomRatioMapper::initZoomRatioInTemplate(CameraMetadata *request) {
     camera_metadata_entry_t entry;
@@ -117,6 +130,8 @@
 
 ZoomRatioMapper::ZoomRatioMapper(const CameraMetadata* deviceInfo,
         bool supportNativeZoomRatio, bool usePrecorrectArray) {
+    initRemappedKeys();
+
     camera_metadata_ro_entry_t entry;
 
     entry = deviceInfo->find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.h b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
index 698f87f..3769299 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.h
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
@@ -33,7 +33,7 @@
  * - HAL supports zoomRatio and the application uses cropRegion, or
  * - HAL doesn't support zoomRatio, but the application uses zoomRatio
  */
-class ZoomRatioMapper : private CoordinateMapper {
+class ZoomRatioMapper : public CoordinateMapper {
   public:
     ZoomRatioMapper() = default;
     ZoomRatioMapper(const CameraMetadata *deviceInfo,
@@ -41,7 +41,9 @@
     ZoomRatioMapper(const ZoomRatioMapper& other) :
             mHalSupportsZoomRatio(other.mHalSupportsZoomRatio),
             mArrayWidth(other.mArrayWidth), mArrayHeight(other.mArrayHeight),
-            mIsValid(other.mIsValid) {}
+            mIsValid(other.mIsValid) { initRemappedKeys(); }
+
+    void initRemappedKeys() override;
 
     /**
      * Initialize request template with valid zoomRatio if necessary.
diff --git a/services/camera/libcameraservice/fuzzer/Android.bp b/services/camera/libcameraservice/fuzzer/Android.bp
new file mode 100644
index 0000000..c5b7f00
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/Android.bp
@@ -0,0 +1,44 @@
+// Copyright 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+cc_defaults {
+    name: "libcameraservice_fuzz_defaults",
+    fuzz_config: {
+        componentid: 41727
+    },
+}
+
+cc_fuzz {
+    name: "libcameraservice_distortion_mapper_fuzzer",
+    defaults: ["libcameraservice_fuzz_defaults"],
+    srcs: [
+        "DistortionMapperFuzzer.cpp",
+    ],
+    shared_libs: [
+        "libcameraservice",
+        "libcamera_client",
+    ],
+}
+
+cc_fuzz {
+    name: "libcameraservice_depth_processor_fuzzer",
+    defaults: ["libcameraservice_fuzz_defaults"],
+    srcs: [
+        "DepthProcessorFuzzer.cpp",
+    ],
+    shared_libs: [
+        "libcameraservice",
+    ],
+    corpus: ["corpus/*.jpg"],
+}
diff --git a/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp b/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp
new file mode 100644
index 0000000..650ca91
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <array>
+#include <vector>
+
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "common/DepthPhotoProcessor.h"
+
+using namespace android;
+using namespace android::camera3;
+
+static const size_t kTestBufferWidth = 640;
+static const size_t kTestBufferHeight = 480;
+static const size_t kTestBufferDepthSize (kTestBufferWidth * kTestBufferHeight);
+
+void generateDepth16Buffer(const uint8_t* data, size_t size, std::array<uint16_t, kTestBufferDepthSize> *depth16Buffer /*out*/) {
+    FuzzedDataProvider dataProvider(data, size);
+    for (size_t i = 0; i < depth16Buffer->size(); i++) {
+        (*depth16Buffer)[i] = dataProvider.ConsumeIntegral<uint16_t>();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    DepthPhotoInputFrame inputFrame;
+    // Worst case both depth and confidence maps have the same size as the main color image.
+    inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+
+    std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
+    size_t actualDepthPhotoSize = 0;
+
+    std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
+    generateDepth16Buffer(data, size, &depth16Buffer);
+
+    inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (data);
+    inputFrame.mMainJpegSize = size;
+    inputFrame.mDepthMapBuffer = depth16Buffer.data();
+    inputFrame.mDepthMapStride = kTestBufferWidth;
+    inputFrame.mDepthMapWidth = kTestBufferWidth;
+    inputFrame.mDepthMapHeight = kTestBufferHeight;
+    processDepthPhotoFrame(
+        inputFrame,
+        depthPhotoBuffer.size(),
+        depthPhotoBuffer.data(),
+        &actualDepthPhotoSize);
+
+  return 0;
+}
diff --git a/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp b/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp
new file mode 100644
index 0000000..96bab4e
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <vector>
+
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "device3/DistortionMapper.h"
+#include <camera/CameraMetadata.h>
+
+using namespace android;
+using namespace android::camera3;
+
+int32_t testActiveArray[] = {100, 100, 1000, 750};
+float testICal[] = { 1000.f, 1000.f, 500.f, 500.f, 0.f };
+float identityDistortion[] = { 0.f, 0.f, 0.f, 0.f, 0.f};
+
+void setupTestMapper(DistortionMapper *m,
+        float distortion[5], float intrinsics[5],
+        int32_t activeArray[4], int32_t preCorrectionActiveArray[4]) {
+    CameraMetadata deviceInfo;
+
+    deviceInfo.update(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
+            preCorrectionActiveArray, 4);
+
+    deviceInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+            activeArray, 4);
+
+    deviceInfo.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
+            intrinsics, 5);
+
+    deviceInfo.update(ANDROID_LENS_DISTORTION,
+            distortion, 5);
+
+    m->setupStaticInfo(deviceInfo);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+
+    DistortionMapper m;
+    setupTestMapper(&m, identityDistortion, testICal,
+        /*activeArray*/ testActiveArray,
+        /*preCorrectionActiveArray*/ testActiveArray);
+
+    bool clamp = fdp.ConsumeBool();
+    bool simple = fdp.ConsumeBool();
+    std::vector<int32_t> input;
+    for (int index = 0; fdp.remaining_bytes() > 0; index++) {
+        input.push_back(fdp.ConsumeIntegral<int32_t>());
+    }
+
+    // The size argument counts how many coordinate pairs there are, so
+    // it is expected to be 1/2 the size of the input.
+    m.mapCorrectedToRaw(input.data(), input.size()/2,  clamp, simple);
+
+    return 0;
+}
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Canon_MakerNote_variant_type_1.jpg b/services/camera/libcameraservice/fuzzer/corpus/Canon_MakerNote_variant_type_1.jpg
new file mode 100644
index 0000000..1eb37d0
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Canon_MakerNote_variant_type_1.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Fuji_MakerNote_variant_type_1.jpg b/services/camera/libcameraservice/fuzzer/corpus/Fuji_MakerNote_variant_type_1.jpg
new file mode 100644
index 0000000..75e0371
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Fuji_MakerNote_variant_type_1.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_2.jpg b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_2.jpg
new file mode 100644
index 0000000..461d613
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_2.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_3.jpg b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_3.jpg
new file mode 100644
index 0000000..42498e2
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_3.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_4.jpg b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_4.jpg
new file mode 100644
index 0000000..233ff78
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_4.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_5.jpg b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_5.jpg
new file mode 100644
index 0000000..f083f75
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_5.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_2.jpg b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_2.jpg
new file mode 100644
index 0000000..0ef0ef2
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_2.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_3.jpg b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_3.jpg
new file mode 100644
index 0000000..d93b86f
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_3.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_4.jpg b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_4.jpg
new file mode 100644
index 0000000..297ea1c
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_4.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index a46133e..9ea9526 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -103,7 +103,7 @@
     }
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
     binder::Status serviceRet = mAidlICameraService->connectDevice(
-            callbacks, String16(cameraId.c_str()), String16(""), std::unique_ptr<String16>(),
+            callbacks, String16(cameraId.c_str()), String16(""), {},
             hardware::ICameraService::USE_CALLING_UID, /*out*/&deviceRemote);
     HStatus status = HStatus::NO_ERROR;
     if (!serviceRet.isOk()) {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 888671c..ba68a63 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -14,20 +14,493 @@
  * limitations under the License.
  */
 #include "SessionConfigurationUtils.h"
-#include "../api2/CameraDeviceClient.h"
+#include "../api2/DepthCompositeStream.h"
+#include "../api2/HeicCompositeStream.h"
+#include "common/CameraDeviceBase.h"
+#include "../CameraService.h"
+#include "device3/Camera3Device.h"
+#include "device3/Camera3OutputStream.h"
+
+// Convenience methods for constructing binder::Status objects for error returns
+
+#define STATUS_ERROR(errorCode, errorString) \
+    binder::Status::fromServiceSpecificError(errorCode, \
+            String8::format("%s:%d: %s", __FUNCTION__, __LINE__, errorString))
+
+#define STATUS_ERROR_FMT(errorCode, errorString, ...) \
+    binder::Status::fromServiceSpecificError(errorCode, \
+            String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, \
+                    __VA_ARGS__))
+
+using android::camera3::OutputStreamInfo;
+using android::camera3::OutputStreamInfo;
+using android::hardware::camera2::ICameraDeviceUser;
 
 namespace android {
 
+int64_t SessionConfigurationUtils::euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
+    int64_t d0 = x0 - x1;
+    int64_t d1 = y0 - y1;
+    return d0 * d0 + d1 * d1;
+}
+
+bool SessionConfigurationUtils::roundBufferDimensionNearest(int32_t width, int32_t height,
+        int32_t format, android_dataspace dataSpace, const CameraMetadata& info,
+        /*out*/int32_t* outWidth, /*out*/int32_t* outHeight) {
+
+    camera_metadata_ro_entry streamConfigs =
+            (dataSpace == HAL_DATASPACE_DEPTH) ?
+            info.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS) :
+            (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
+            info.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS) :
+            info.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+
+    int32_t bestWidth = -1;
+    int32_t bestHeight = -1;
+
+    // Iterate through listed stream configurations and find the one with the smallest euclidean
+    // distance from the given dimensions for the given format.
+    for (size_t i = 0; i < streamConfigs.count; i += 4) {
+        int32_t fmt = streamConfigs.data.i32[i];
+        int32_t w = streamConfigs.data.i32[i + 1];
+        int32_t h = streamConfigs.data.i32[i + 2];
+
+        // Ignore input/output type for now
+        if (fmt == format) {
+            if (w == width && h == height) {
+                bestWidth = width;
+                bestHeight = height;
+                break;
+            } else if (w <= ROUNDING_WIDTH_CAP && (bestWidth == -1 ||
+                    SessionConfigurationUtils::euclidDistSquare(w, h, width, height) <
+                    SessionConfigurationUtils::euclidDistSquare(bestWidth, bestHeight, width,
+                            height))) {
+                bestWidth = w;
+                bestHeight = h;
+            }
+        }
+    }
+
+    if (bestWidth == -1) {
+        // Return false if no configurations for this format were listed
+        return false;
+    }
+
+    // Set the outputs to the closet width/height
+    if (outWidth != NULL) {
+        *outWidth = bestWidth;
+    }
+    if (outHeight != NULL) {
+        *outHeight = bestHeight;
+    }
+
+    // Return true if at least one configuration for this format was listed
+    return true;
+}
+
+bool SessionConfigurationUtils::isPublicFormat(int32_t format)
+{
+    switch(format) {
+        case HAL_PIXEL_FORMAT_RGBA_8888:
+        case HAL_PIXEL_FORMAT_RGBX_8888:
+        case HAL_PIXEL_FORMAT_RGB_888:
+        case HAL_PIXEL_FORMAT_RGB_565:
+        case HAL_PIXEL_FORMAT_BGRA_8888:
+        case HAL_PIXEL_FORMAT_YV12:
+        case HAL_PIXEL_FORMAT_Y8:
+        case HAL_PIXEL_FORMAT_Y16:
+        case HAL_PIXEL_FORMAT_RAW16:
+        case HAL_PIXEL_FORMAT_RAW10:
+        case HAL_PIXEL_FORMAT_RAW12:
+        case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+        case HAL_PIXEL_FORMAT_BLOB:
+        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+        case HAL_PIXEL_FORMAT_YCbCr_420_888:
+        case HAL_PIXEL_FORMAT_YCbCr_422_SP:
+        case HAL_PIXEL_FORMAT_YCrCb_420_SP:
+        case HAL_PIXEL_FORMAT_YCbCr_422_I:
+            return true;
+        default:
+            return false;
+    }
+}
+
+binder::Status SessionConfigurationUtils::createSurfaceFromGbp(
+        OutputStreamInfo& streamInfo, bool isStreamInfoValid,
+        sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
+        const String8 &cameraId, const CameraMetadata &physicalCameraMetadata) {
+
+    // bufferProducer must be non-null
+    if (gbp == nullptr) {
+        String8 msg = String8::format("Camera %s: Surface is NULL", cameraId.string());
+        ALOGW("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    // HACK b/10949105
+    // Query consumer usage bits to set async operation mode for
+    // GLConsumer using controlledByApp parameter.
+    bool useAsync = false;
+    uint64_t consumerUsage = 0;
+    status_t err;
+    if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
+        String8 msg = String8::format("Camera %s: Failed to query Surface consumer usage: %s (%d)",
+                cameraId.string(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+    }
+    if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
+        ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for"
+                "stream", __FUNCTION__, cameraId.string(), consumerUsage);
+        useAsync = true;
+    }
+
+    uint64_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
+                              GRALLOC_USAGE_RENDERSCRIPT;
+    uint64_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
+                           GraphicBuffer::USAGE_HW_TEXTURE |
+                           GraphicBuffer::USAGE_HW_COMPOSER;
+    bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
+            (consumerUsage & allowedFlags) != 0;
+
+    surface = new Surface(gbp, useAsync);
+    ANativeWindow *anw = surface.get();
+
+    int width, height, format;
+    android_dataspace dataSpace;
+    if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
+        String8 msg = String8::format("Camera %s: Failed to query Surface width: %s (%d)",
+                 cameraId.string(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+    }
+    if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
+        String8 msg = String8::format("Camera %s: Failed to query Surface height: %s (%d)",
+                cameraId.string(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+    }
+    if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
+        String8 msg = String8::format("Camera %s: Failed to query Surface format: %s (%d)",
+                cameraId.string(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+    }
+    if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
+            reinterpret_cast<int*>(&dataSpace))) != OK) {
+        String8 msg = String8::format("Camera %s: Failed to query Surface dataspace: %s (%d)",
+                cameraId.string(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+    }
+
+    // FIXME: remove this override since the default format should be
+    //       IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
+    if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
+            ((consumerUsage & GRALLOC_USAGE_HW_MASK) &&
+             ((consumerUsage & GRALLOC_USAGE_SW_READ_MASK) == 0))) {
+        ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED",
+                __FUNCTION__, cameraId.string(), format);
+        format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+    }
+    // Round dimensions to the nearest dimensions available for this format
+    if (flexibleConsumer && isPublicFormat(format) &&
+            !SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
+            format, dataSpace, physicalCameraMetadata, /*out*/&width, /*out*/&height)) {
+        String8 msg = String8::format("Camera %s: No supported stream configurations with "
+                "format %#x defined, failed to create output stream",
+                cameraId.string(), format);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+
+    if (!isStreamInfoValid) {
+        streamInfo.width = width;
+        streamInfo.height = height;
+        streamInfo.format = format;
+        streamInfo.dataSpace = dataSpace;
+        streamInfo.consumerUsage = consumerUsage;
+        return binder::Status::ok();
+    }
+    if (width != streamInfo.width) {
+        String8 msg = String8::format("Camera %s:Surface width doesn't match: %d vs %d",
+                cameraId.string(), width, streamInfo.width);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    if (height != streamInfo.height) {
+        String8 msg = String8::format("Camera %s:Surface height doesn't match: %d vs %d",
+                 cameraId.string(), height, streamInfo.height);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    if (format != streamInfo.format) {
+        String8 msg = String8::format("Camera %s:Surface format doesn't match: %d vs %d",
+                 cameraId.string(), format, streamInfo.format);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+        if (dataSpace != streamInfo.dataSpace) {
+            String8 msg = String8::format("Camera %s:Surface dataSpace doesn't match: %d vs %d",
+                    cameraId.string(), dataSpace, streamInfo.dataSpace);
+            ALOGE("%s: %s", __FUNCTION__, msg.string());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        }
+        //At the native side, there isn't a way to check whether 2 surfaces come from the same
+        //surface class type. Use usage flag to approximate the comparison.
+        if (consumerUsage != streamInfo.consumerUsage) {
+            String8 msg = String8::format(
+                    "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
+                    cameraId.string(), consumerUsage, streamInfo.consumerUsage);
+            ALOGE("%s: %s", __FUNCTION__, msg.string());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        }
+    }
+    return binder::Status::ok();
+}
+
+
+void SessionConfigurationUtils::mapStreamInfo(const OutputStreamInfo &streamInfo,
+            camera3_stream_rotation_t rotation, String8 physicalId,
+            hardware::camera::device::V3_4::Stream *stream /*out*/) {
+    if (stream == nullptr) {
+        return;
+    }
+
+    stream->v3_2.streamType = hardware::camera::device::V3_2::StreamType::OUTPUT;
+    stream->v3_2.width = streamInfo.width;
+    stream->v3_2.height = streamInfo.height;
+    stream->v3_2.format = Camera3Device::mapToPixelFormat(streamInfo.format);
+    auto u = streamInfo.consumerUsage;
+    camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
+    stream->v3_2.usage = Camera3Device::mapToConsumerUsage(u);
+    stream->v3_2.dataSpace = Camera3Device::mapToHidlDataspace(streamInfo.dataSpace);
+    stream->v3_2.rotation = Camera3Device::mapToStreamRotation(rotation);
+    stream->v3_2.id = -1; // Invalid stream id
+    stream->physicalCameraId = std::string(physicalId.string());
+    stream->bufferSize = 0;
+}
+
+binder::Status SessionConfigurationUtils::checkPhysicalCameraId(
+        const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
+        const String8 &logicalCameraId) {
+    if (physicalCameraId.size() == 0) {
+        return binder::Status::ok();
+    }
+    if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(),
+        physicalCameraId.string()) == physicalCameraIds.end()) {
+        String8 msg = String8::format("Camera %s: Camera doesn't support physicalCameraId %s.",
+                logicalCameraId.string(), physicalCameraId.string());
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    return binder::Status::ok();
+}
+
+binder::Status SessionConfigurationUtils::checkSurfaceType(size_t numBufferProducers,
+        bool deferredConsumer, int surfaceType)  {
+    if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
+        ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
+                __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
+    } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
+        ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
+    }
+
+    bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
+            (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
+
+    if (deferredConsumer && !validSurfaceType) {
+        ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
+    }
+
+    return binder::Status::ok();
+}
+
+binder::Status SessionConfigurationUtils::checkOperatingMode(int operatingMode,
+        const CameraMetadata &staticInfo, const String8 &cameraId) {
+    if (operatingMode < 0) {
+        String8 msg = String8::format(
+            "Camera %s: Invalid operating mode %d requested", cameraId.string(), operatingMode);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                msg.string());
+    }
+
+    bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE);
+    if (isConstrainedHighSpeed) {
+        camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+        bool isConstrainedHighSpeedSupported = false;
+        for(size_t i = 0; i < entry.count; ++i) {
+            uint8_t capability = entry.data.u8[i];
+            if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) {
+                isConstrainedHighSpeedSupported = true;
+                break;
+            }
+        }
+        if (!isConstrainedHighSpeedSupported) {
+            String8 msg = String8::format(
+                "Camera %s: Try to create a constrained high speed configuration on a device"
+                " that doesn't support it.", cameraId.string());
+            ALOGE("%s: %s", __FUNCTION__, msg.string());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                    msg.string());
+        }
+    }
+
+    return binder::Status::ok();
+}
+
 binder::Status
 SessionConfigurationUtils::convertToHALStreamCombination(
         const SessionConfiguration& sessionConfiguration,
         const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
         hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration, bool *earlyExit) {
-    // TODO: http://b/148329298 Move the other dependencies from
-    // CameraDeviceClient into SessionConfigurationUtils.
-    return CameraDeviceClient::convertToHALStreamCombination(sessionConfiguration, logicalCameraId,
-            deviceInfo, getMetadata, physicalCameraIds, streamConfiguration, earlyExit);
+
+    auto operatingMode = sessionConfiguration.getOperatingMode();
+    binder::Status res = checkOperatingMode(operatingMode, deviceInfo, logicalCameraId);
+    if (!res.isOk()) {
+        return res;
+    }
+
+    if (earlyExit == nullptr) {
+        String8 msg("earlyExit nullptr");
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    *earlyExit = false;
+    auto ret = Camera3Device::mapToStreamConfigurationMode(
+            static_cast<camera3_stream_configuration_mode_t> (operatingMode),
+            /*out*/ &streamConfiguration.operationMode);
+    if (ret != OK) {
+        String8 msg = String8::format(
+            "Camera %s: Failed mapping operating mode %d requested: %s (%d)",
+            logicalCameraId.string(), operatingMode, strerror(-ret), ret);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                msg.string());
+    }
+
+    bool isInputValid = (sessionConfiguration.getInputWidth() > 0) &&
+            (sessionConfiguration.getInputHeight() > 0) &&
+            (sessionConfiguration.getInputFormat() > 0);
+    auto outputConfigs = sessionConfiguration.getOutputConfigurations();
+    size_t streamCount = outputConfigs.size();
+    streamCount = isInputValid ? streamCount + 1 : streamCount;
+    streamConfiguration.streams.resize(streamCount);
+    size_t streamIdx = 0;
+    if (isInputValid) {
+        streamConfiguration.streams[streamIdx++] = {{/*streamId*/0,
+                hardware::camera::device::V3_2::StreamType::INPUT,
+                static_cast<uint32_t> (sessionConfiguration.getInputWidth()),
+                static_cast<uint32_t> (sessionConfiguration.getInputHeight()),
+                Camera3Device::mapToPixelFormat(sessionConfiguration.getInputFormat()),
+                /*usage*/ 0, HAL_DATASPACE_UNKNOWN,
+                hardware::camera::device::V3_2::StreamRotation::ROTATION_0},
+                /*physicalId*/ nullptr, /*bufferSize*/0};
+    }
+
+    for (const auto &it : outputConfigs) {
+        const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
+            it.getGraphicBufferProducers();
+        bool deferredConsumer = it.isDeferred();
+        String8 physicalCameraId = String8(it.getPhysicalCameraId());
+        size_t numBufferProducers = bufferProducers.size();
+        bool isStreamInfoValid = false;
+        OutputStreamInfo streamInfo;
+
+        res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
+        if (!res.isOk()) {
+            return res;
+        }
+        res = checkPhysicalCameraId(physicalCameraIds, physicalCameraId,
+                logicalCameraId);
+        if (!res.isOk()) {
+            return res;
+        }
+
+        if (deferredConsumer) {
+            streamInfo.width = it.getWidth();
+            streamInfo.height = it.getHeight();
+            streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+            streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+            auto surfaceType = it.getSurfaceType();
+            streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
+            if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
+                streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
+            }
+            mapStreamInfo(streamInfo, CAMERA3_STREAM_ROTATION_0, physicalCameraId,
+                    &streamConfiguration.streams[streamIdx++]);
+            isStreamInfoValid = true;
+
+            if (numBufferProducers == 0) {
+                continue;
+            }
+        }
+
+        for (auto& bufferProducer : bufferProducers) {
+            sp<Surface> surface;
+            const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId);
+            res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
+                    logicalCameraId,
+                    physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo );
+
+            if (!res.isOk())
+                return res;
+
+            if (!isStreamInfoValid) {
+                bool isDepthCompositeStream =
+                        camera3::DepthCompositeStream::isDepthCompositeStream(surface);
+                bool isHeicCompositeStream =
+                        camera3::HeicCompositeStream::isHeicCompositeStream(surface);
+                if (isDepthCompositeStream || isHeicCompositeStream) {
+                    // We need to take in to account that composite streams can have
+                    // additional internal camera streams.
+                    std::vector<OutputStreamInfo> compositeStreams;
+                    if (isDepthCompositeStream) {
+                        ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
+                                deviceInfo, &compositeStreams);
+                    } else {
+                        ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
+                            deviceInfo, &compositeStreams);
+                    }
+                    if (ret != OK) {
+                        String8 msg = String8::format(
+                                "Camera %s: Failed adding composite streams: %s (%d)",
+                                logicalCameraId.string(), strerror(-ret), ret);
+                        ALOGE("%s: %s", __FUNCTION__, msg.string());
+                        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+                    }
+
+                    if (compositeStreams.size() == 0) {
+                        // No internal streams means composite stream not
+                        // supported.
+                        *earlyExit = true;
+                        return binder::Status::ok();
+                    } else if (compositeStreams.size() > 1) {
+                        streamCount += compositeStreams.size() - 1;
+                        streamConfiguration.streams.resize(streamCount);
+                    }
+
+                    for (const auto& compositeStream : compositeStreams) {
+                        mapStreamInfo(compositeStream,
+                                static_cast<camera3_stream_rotation_t> (it.getRotation()),
+                                physicalCameraId, &streamConfiguration.streams[streamIdx++]);
+                    }
+                } else {
+                    mapStreamInfo(streamInfo,
+                            static_cast<camera3_stream_rotation_t> (it.getRotation()),
+                            physicalCameraId, &streamConfiguration.streams[streamIdx++]);
+                }
+                isStreamInfoValid = true;
+            }
+        }
+    }
+    return binder::Status::ok();
+
 }
 
 }// namespace android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index cfb9f17..6ce2cd7 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -23,6 +23,9 @@
 #include <camera/camera2/SubmitInfo.h>
 #include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
 
+#include <hardware/camera3.h>
+#include <device3/Camera3StreamInterface.h>
+
 #include <stdint.h>
 
 namespace android {
@@ -31,6 +34,41 @@
 
 class SessionConfigurationUtils {
 public:
+
+    static int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
+
+    // Find the closest dimensions for a given format in available stream configurations with
+    // a width <= ROUNDING_WIDTH_CAP
+    static bool roundBufferDimensionNearest(int32_t width, int32_t height, int32_t format,
+            android_dataspace dataSpace, const CameraMetadata& info,
+            /*out*/int32_t* outWidth, /*out*/int32_t* outHeight);
+
+    //check if format is not custom format
+    static bool isPublicFormat(int32_t format);
+
+    // Create a Surface from an IGraphicBufferProducer. Returns error if
+    // IGraphicBufferProducer's property doesn't match with streamInfo
+    static binder::Status createSurfaceFromGbp(
+        camera3::OutputStreamInfo& streamInfo, bool isStreamInfoValid,
+        sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
+        const String8 &cameraId, const CameraMetadata &physicalCameraMetadata);
+
+    static void mapStreamInfo(const camera3::OutputStreamInfo &streamInfo,
+            camera3_stream_rotation_t rotation, String8 physicalId,
+            hardware::camera::device::V3_4::Stream *stream /*out*/);
+
+    // Check that the physicalCameraId passed in is spported by the camera
+    // device.
+    static binder::Status checkPhysicalCameraId(
+        const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
+        const String8 &logicalCameraId);
+
+    static binder::Status checkSurfaceType(size_t numBufferProducers,
+        bool deferredConsumer, int surfaceType);
+
+    static binder::Status checkOperatingMode(int operatingMode,
+        const CameraMetadata &staticInfo, const String8 &cameraId);
+
     // utility function to convert AIDL SessionConfiguration to HIDL
     // streamConfiguration. Also checks for validity of SessionConfiguration and
     // returns a non-ok binder::Status if the passed in session configuration
@@ -41,6 +79,10 @@
             metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
             hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration,
             bool *earlyExit);
+
+    static const int32_t MAX_SURFACES_PER_STREAM = 4;
+
+    static const int32_t ROUNDING_WIDTH_CAP = 1920;
 };
 
 } // android
diff --git a/services/mediacodec/Android.bp b/services/mediacodec/Android.bp
index c4efbaa..dc0773b 100644
--- a/services/mediacodec/Android.bp
+++ b/services/mediacodec/Android.bp
@@ -15,24 +15,10 @@
         "libmedia_codecserviceregistrant",
     ],
 
-    target: {
-        android: {
-            product_variables: {
-                malloc_not_svelte: {
-                    // Scudo increases memory footprint, so only enable on
-                    // non-svelte devices.
-                    shared_libs: ["libc_scudo"],
-                },
-            },
-        },
-    },
-
     header_libs: [
         "libmedia_headers",
     ],
 
-    init_rc: ["mediaswcodec.rc"],
-
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/services/mediacodec/mediaswcodec.rc b/services/mediacodec/mediaswcodec.rc
deleted file mode 100644
index 3549666..0000000
--- a/services/mediacodec/mediaswcodec.rc
+++ /dev/null
@@ -1,7 +0,0 @@
-service media.swcodec /system/bin/mediaswcodec
-    class main
-    user mediacodec
-    group camera drmrpc mediadrm
-    updatable
-    ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
diff --git a/services/mediaextractor/Android.bp b/services/mediaextractor/Android.bp
index 0b25d62..03e1e41 100644
--- a/services/mediaextractor/Android.bp
+++ b/services/mediaextractor/Android.bp
@@ -35,20 +35,6 @@
         "liblog",
         "libavservices_minijail",
     ],
-    header_libs: [
-        "bionic_libc_platform_headers",
-    ],
-    target: {
-        android: {
-            product_variables: {
-                malloc_not_svelte: {
-                    // Scudo increases memory footprint, so only enable on
-                    // non-svelte devices.
-                    shared_libs: ["libc_scudo"],
-                },
-            },
-        },
-    },
     init_rc: ["mediaextractor.rc"],
 
     cflags: [
diff --git a/services/mediaextractor/MediaExtractorService.cpp b/services/mediaextractor/MediaExtractorService.cpp
index 9992d1c..71a5bff 100644
--- a/services/mediaextractor/MediaExtractorService.cpp
+++ b/services/mediaextractor/MediaExtractorService.cpp
@@ -39,23 +39,23 @@
 
 ::android::binder::Status MediaExtractorService::makeExtractor(
         const ::android::sp<::android::IDataSource>& remoteSource,
-        const ::std::unique_ptr< ::std::string> &mime,
+        const ::std::optional< ::std::string> &mime,
         ::android::sp<::android::IMediaExtractor>* _aidl_return) {
-    ALOGV("@@@ MediaExtractorService::makeExtractor for %s", mime.get()->c_str());
+    ALOGV("@@@ MediaExtractorService::makeExtractor for %s", mime ? mime->c_str() : nullptr);
 
     sp<DataSource> localSource = CreateDataSourceFromIDataSource(remoteSource);
 
     MediaBuffer::useSharedMemory();
     sp<IMediaExtractor> extractor = MediaExtractorFactory::CreateFromService(
             localSource,
-            mime.get() ? mime.get()->c_str() : nullptr);
+            mime ? mime->c_str() : nullptr);
 
     ALOGV("extractor service created %p (%s)",
             extractor.get(),
             extractor == nullptr ? "" : extractor->name());
 
     if (extractor != nullptr) {
-        registerMediaExtractor(extractor, localSource, mime.get() ? mime.get()->c_str() : nullptr);
+        registerMediaExtractor(extractor, localSource, mime ? mime->c_str() : nullptr);
     }
     *_aidl_return = extractor;
     return binder::Status::ok();
diff --git a/services/mediaextractor/MediaExtractorService.h b/services/mediaextractor/MediaExtractorService.h
index 1b40bf9..0081e7e 100644
--- a/services/mediaextractor/MediaExtractorService.h
+++ b/services/mediaextractor/MediaExtractorService.h
@@ -33,7 +33,7 @@
 
     virtual ::android::binder::Status makeExtractor(
             const ::android::sp<::android::IDataSource>& source,
-            const ::std::unique_ptr< ::std::string> &mime,
+            const ::std::optional< ::std::string> &mime,
             ::android::sp<::android::IMediaExtractor>* _aidl_return);
 
     virtual ::android::binder::Status makeIDataSource(
diff --git a/services/mediaextractor/main_extractorservice.cpp b/services/mediaextractor/main_extractorservice.cpp
index f21574f..b7b51a6 100644
--- a/services/mediaextractor/main_extractorservice.cpp
+++ b/services/mediaextractor/main_extractorservice.cpp
@@ -28,8 +28,6 @@
 #include <android-base/properties.h>
 #include <utils/misc.h>
 
-#include <bionic/reserved_signals.h>
-
 // from LOCAL_C_INCLUDES
 #include "MediaExtractorService.h"
 #include "minijail.h"
@@ -50,10 +48,6 @@
 
     signal(SIGPIPE, SIG_IGN);
 
-    // Do not assist platform profilers (relevant only on debug builds).
-    // Otherwise, the signal handler can violate the seccomp policy.
-    signal(BIONIC_SIGNAL_PROFILER, SIG_IGN);
-
     //b/62255959: this forces libutis.so to dlopen vendor version of libutils.so
     //before minijail is on. This is dirty but required since some syscalls such
     //as pread64 are used by linker but aren't allowed in the minijail. By
diff --git a/services/medialog/Android.bp b/services/medialog/Android.bp
index 74b63d5..3a27a43 100644
--- a/services/medialog/Android.bp
+++ b/services/medialog/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
     name: "libmedialogservice",
 
     srcs: [
diff --git a/services/medialog/fuzzer/Android.bp b/services/medialog/fuzzer/Android.bp
new file mode 100644
index 0000000..2afaaae
--- /dev/null
+++ b/services/medialog/fuzzer/Android.bp
@@ -0,0 +1,33 @@
+cc_fuzz {
+    name: "media_log_fuzzer",
+    static_libs: [
+        "libmedialogservice",
+    ],
+    srcs: [
+        "media_log_fuzzer.cpp",
+    ],
+    header_libs: [
+        "libmedia_headers",
+    ],
+    shared_libs: [
+        "libaudioutils",
+        "libbinder",
+        "liblog",
+        "libmediautils",
+        "libnblog",
+        "libutils",
+    ],
+    include_dirs: [
+        "frameworks/av/services/medialog",
+    ],
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/services/medialog/fuzzer/README.md b/services/medialog/fuzzer/README.md
new file mode 100644
index 0000000..b79e5c8
--- /dev/null
+++ b/services/medialog/fuzzer/README.md
@@ -0,0 +1,50 @@
+# Fuzzer for libmedialogservice
+
+## Plugin Design Considerations
+The fuzzer plugin for libmedialogservice is designed based on the understanding of the
+service and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+medialogservice supports the following parameters:
+1. Writer name (parameter name: `writerNameIdx`)
+2. Log size (parameter name: `logSize`)
+3. Enable dump before unrgister API (parameter name: `shouldDumpBeforeUnregister`)
+5. size of string for log dump (parameter name: `numberOfLines`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `writerNameIdx` | 0. `0` 1. `1` | Value obtained from FuzzedDataProvider |
+| `logSize` | In the range `256 to 65536` | Value obtained from FuzzedDataProvider |
+| `shouldDumpBeforeUnregister` | 0. `0` 1. `1` | Value obtained from FuzzedDataProvider |
+| `numberOfLines` | In the range `0 to 65535` | Value obtained from FuzzedDataProvider |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+## Build
+
+This describes steps to build media_log_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) media_log_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/media_log_fuzzer/media_log_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/medialog/fuzzer/media_log_fuzzer.cpp b/services/medialog/fuzzer/media_log_fuzzer.cpp
new file mode 100644
index 0000000..bd50d0f
--- /dev/null
+++ b/services/medialog/fuzzer/media_log_fuzzer.cpp
@@ -0,0 +1,76 @@
+/**
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <binder/IMemory.h>
+#include <binder/MemoryDealer.h>
+#include <private/android_filesystem_config.h>
+#include "MediaLogService.h"
+#include "fuzzer/FuzzedDataProvider.h"
+
+constexpr const char* kWriterNames[2] = {"FastMixer", "FastCapture"};
+constexpr size_t kMinSize = 0x100;
+constexpr size_t kMaxSize = 0x10000;
+constexpr size_t kLogMemorySize = 400 * 1024;
+constexpr size_t kMaxNumLines = USHRT_MAX;
+
+using namespace android;
+
+class MediaLogFuzzer {
+   public:
+    void init();
+    void process(const uint8_t* data, size_t size);
+
+   private:
+    sp<MemoryDealer> mMemoryDealer = nullptr;
+    sp<MediaLogService> mService = nullptr;
+};
+
+void MediaLogFuzzer::init() {
+    setuid(AID_MEDIA);
+    mService = new MediaLogService();
+    mMemoryDealer = new MemoryDealer(kLogMemorySize, "MediaLogFuzzer", MemoryHeapBase::READ_ONLY);
+}
+
+void MediaLogFuzzer::process(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fuzzedDataProvider(data, size);
+    size_t writerNameIdx =
+        fuzzedDataProvider.ConsumeIntegralInRange<size_t>(0, std::size(kWriterNames) - 1);
+    bool shouldDumpBeforeUnregister = fuzzedDataProvider.ConsumeBool();
+    size_t logSize = fuzzedDataProvider.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize);
+    sp<IMemory> logBuffer = mMemoryDealer->allocate(NBLog::Timeline::sharedSize(logSize));
+    Vector<String16> args;
+    size_t numberOfLines = fuzzedDataProvider.ConsumeIntegralInRange<size_t>(0, kMaxNumLines);
+    for (size_t lineIdx = 0; lineIdx < numberOfLines; ++lineIdx) {
+        args.add(static_cast<String16>(fuzzedDataProvider.ConsumeRandomLengthString().c_str()));
+    }
+    const char* fileName = "logDumpFile";
+    int fd = memfd_create(fileName, MFD_ALLOW_SEALING);
+    fuzzedDataProvider.ConsumeData(logBuffer->unsecurePointer(), logBuffer->size());
+    mService->registerWriter(logBuffer, logSize, kWriterNames[writerNameIdx]);
+    if (shouldDumpBeforeUnregister) {
+        mService->dump(fd, args);
+        mService->unregisterWriter(logBuffer);
+    } else {
+        mService->unregisterWriter(logBuffer);
+        mService->dump(fd, args);
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    MediaLogFuzzer mediaLogFuzzer = MediaLogFuzzer();
+    mediaLogFuzzer.init();
+    mediaLogFuzzer.process(data, size);
+    return 0;
+}
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index cca6b41..34be0b9 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -28,7 +28,7 @@
 #include <cutils/properties.h>
 #include <statslog.h>
 #include <sys/timerfd.h>
-#include <system/audio-base.h>
+#include <system/audio.h>
 
 // property to disable audio power use metrics feature, default is enabled
 #define PROP_AUDIO_METRICS_DISABLED "persist.media.audio_metrics.power_usage_disabled"
@@ -200,6 +200,34 @@
     return true;
 }
 
+bool AudioPowerUsage::saveAsItems_l(
+        int32_t device, int64_t duration_ns, int32_t type, double average_vol)
+{
+    ALOGV("%s: (%#x, %d, %lld, %f)", __func__, device, type,
+                                   (long long)duration_ns, average_vol );
+    if (duration_ns == 0) {
+        return true; // skip duration 0 usage
+    }
+    if (device == 0) {
+        return true; //ignore unknown device
+    }
+
+    bool ret = false;
+    const int32_t input_bit = device & INPUT_DEVICE_BIT;
+    int32_t device_bits = device ^ input_bit;
+
+    while (device_bits != 0) {
+        int32_t tmp_device = device_bits & -device_bits; // get lowest bit
+        device_bits ^= tmp_device;  // clear lowest bit
+        tmp_device |= input_bit;    // restore input bit
+        ret = saveAsItem_l(tmp_device, duration_ns, type, average_vol);
+
+        ALOGV("%s: device %#x recorded, remaining device_bits = %#x", __func__,
+            tmp_device, device_bits);
+    }
+    return ret;
+}
+
 void AudioPowerUsage::checkTrackRecord(
         const std::shared_ptr<const mediametrics::Item>& item, bool isTrack)
 {
@@ -245,7 +273,7 @@
         ALOGV("device = %s => %d", device_strings.c_str(), device);
     }
     std::lock_guard l(mLock);
-    saveAsItem_l(device, deviceTimeNs, type, deviceVolume);
+    saveAsItems_l(device, deviceTimeNs, type, deviceVolume);
 }
 
 void AudioPowerUsage::checkMode(const std::shared_ptr<const mediametrics::Item>& item)
@@ -262,7 +290,7 @@
         if (durationNs > 0) {
             mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
                     mVoiceVolume * double(endCallNs - mVolumeTimeNs)) / durationNs;
-            saveAsItem_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
+            saveAsItems_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
         }
     } else if (mode == "AUDIO_MODE_IN_CALL") { // entering call mode
         mStartCallNs = item->getTimestamp(); // advisory only
@@ -321,7 +349,7 @@
         if (durationNs > 0) {
             mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
                     mVoiceVolume * double(endDeviceNs - mVolumeTimeNs)) / durationNs;
-            saveAsItem_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
+            saveAsItems_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
         }
         // reset statistics
         mDeviceVolume = 0;
diff --git a/services/mediametrics/AudioPowerUsage.h b/services/mediametrics/AudioPowerUsage.h
index 446ff4f..b705a6a 100644
--- a/services/mediametrics/AudioPowerUsage.h
+++ b/services/mediametrics/AudioPowerUsage.h
@@ -85,6 +85,8 @@
          REQUIRES(mLock);
     static void sendItem(const std::shared_ptr<const mediametrics::Item>& item);
     void collect();
+    bool saveAsItems_l(int32_t device, int64_t duration, int32_t type, double average_vol)
+         REQUIRES(mLock);
 
     AudioAnalytics * const mAudioAnalytics;
     const bool mDisabled;
diff --git a/services/mediametrics/AudioTypes.cpp b/services/mediametrics/AudioTypes.cpp
index aa44447..5d044bb 100644
--- a/services/mediametrics/AudioTypes.cpp
+++ b/services/mediametrics/AudioTypes.cpp
@@ -76,6 +76,7 @@
         {"AUDIO_DEVICE_IN_ECHO_REFERENCE",         1LL << 27},
         {"AUDIO_DEVICE_IN_DEFAULT",                1LL << 28},
         // R values above.
+        {"AUDIO_DEVICE_IN_BLE_HEADSET",            1LL << 29},
     };
     return map;
 }
@@ -121,6 +122,8 @@
         {"AUDIO_DEVICE_OUT_ECHO_CANCELLER",            1LL << 29},
         {"AUDIO_DEVICE_OUT_DEFAULT",                   1LL << 30},
         // R values above.
+        {"AUDIO_DEVICE_OUT_BLE_HEADSET",               1LL << 31},
+        {"AUDIO_DEVICE_OUT_BLE_SPAEKER",               1LL << 32},
     };
     return map;
 }
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index a3519d5..cdf5a4e 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -1,10 +1,53 @@
+filegroup {
+    name: "resourcemanager_aidl",
+    srcs: [
+        "aidl/android/media/IResourceManagerClient.aidl",
+        "aidl/android/media/IResourceManagerService.aidl",
+        "aidl/android/media/MediaResourceType.aidl",
+        "aidl/android/media/MediaResourceSubType.aidl",
+        "aidl/android/media/MediaResourceParcel.aidl",
+        "aidl/android/media/MediaResourcePolicyParcel.aidl",
+    ],
+    path: "aidl",
+}
 
+filegroup {
+    name: "resourceobserver_aidl",
+    srcs: [
+        "aidl/android/media/IResourceObserver.aidl",
+        "aidl/android/media/IResourceObserverService.aidl",
+        "aidl/android/media/MediaObservableEvent.aidl",
+        "aidl/android/media/MediaObservableFilter.aidl",
+        "aidl/android/media/MediaObservableType.aidl",
+        "aidl/android/media/MediaObservableParcel.aidl",
+    ],
+    path: "aidl",
+}
 
-cc_library_shared {
+aidl_interface {
+    name: "resourcemanager_aidl_interface",
+    unstable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        ":resourcemanager_aidl",
+    ],
+}
+
+aidl_interface {
+    name: "resourceobserver_aidl_interface",
+    unstable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        ":resourceobserver_aidl",
+    ],
+}
+
+cc_library {
     name: "libresourcemanagerservice",
 
     srcs: [
         "ResourceManagerService.cpp",
+        "ResourceObserverService.cpp",
         "ServiceLog.cpp",
     ],
 
@@ -17,6 +60,10 @@
         "liblog",
     ],
 
+    static_libs: [
+        "resourceobserver_aidl_interface-ndk_platform",
+    ],
+
     include_dirs: ["frameworks/av/include"],
 
     cflags: [
@@ -25,5 +72,4 @@
     ],
 
     export_include_dirs: ["."],
-
 }
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 3d36f8e..32ac583 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -36,18 +36,54 @@
 #include <unistd.h>
 
 #include "ResourceManagerService.h"
+#include "ResourceObserverService.h"
 #include "ServiceLog.h"
 
 namespace android {
 
+//static
+std::mutex ResourceManagerService::sCookieLock;
+//static
+uintptr_t ResourceManagerService::sCookieCounter = 0;
+//static
+std::map<uintptr_t, sp<DeathNotifier> > ResourceManagerService::sCookieToDeathNotifierMap;
+
+class DeathNotifier : public RefBase {
+public:
+    DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
+            int pid, int64_t clientId);
+
+    virtual ~DeathNotifier() {}
+
+    // Implement death recipient
+    static void BinderDiedCallback(void* cookie);
+    virtual void binderDied();
+
+protected:
+    std::weak_ptr<ResourceManagerService> mService;
+    int mPid;
+    int64_t mClientId;
+};
+
 DeathNotifier::DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
         int pid, int64_t clientId)
     : mService(service), mPid(pid), mClientId(clientId) {}
 
 //static
 void DeathNotifier::BinderDiedCallback(void* cookie) {
-    auto thiz = static_cast<DeathNotifier*>(cookie);
-    thiz->binderDied();
+    sp<DeathNotifier> notifier;
+    {
+        std::scoped_lock lock{ResourceManagerService::sCookieLock};
+        auto it = ResourceManagerService::sCookieToDeathNotifierMap.find(
+                reinterpret_cast<uintptr_t>(cookie));
+        if (it == ResourceManagerService::sCookieToDeathNotifierMap.end()) {
+            return;
+        }
+        notifier = it->second;
+    }
+    if (notifier.get() != nullptr) {
+        notifier->binderDied();
+    }
 }
 
 void DeathNotifier::binderDied() {
@@ -61,7 +97,27 @@
     service->overridePid(mPid, -1);
     // thiz is freed in the call below, so it must be last call referring thiz
     service->removeResource(mPid, mClientId, false);
+}
 
+class OverrideProcessInfoDeathNotifier : public DeathNotifier {
+public:
+    OverrideProcessInfoDeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
+            int pid) : DeathNotifier(service, pid, 0) {}
+
+    virtual ~OverrideProcessInfoDeathNotifier() {}
+
+    virtual void binderDied();
+};
+
+void OverrideProcessInfoDeathNotifier::binderDied() {
+    // Don't check for pid validity since we know it's already dead.
+    std::shared_ptr<ResourceManagerService> service = mService.lock();
+    if (service == nullptr) {
+        ALOGW("ResourceManagerService is dead as well.");
+        return;
+    }
+
+    service->removeProcessInfoOverride(mPid);
 }
 
 template <typename T>
@@ -116,6 +172,7 @@
         info.uid = uid;
         info.clientId = clientId;
         info.client = client;
+        info.cookie = 0;
         info.pendingRemoval = false;
 
         index = infos.add(clientId, info);
@@ -267,6 +324,13 @@
     if (status != STATUS_OK) {
         return;
     }
+
+    std::shared_ptr<ResourceObserverService> observerService =
+            ResourceObserverService::instantiate();
+
+    if (observerService != nullptr) {
+        service->setObserverService(observerService);
+    }
     // TODO: mediaserver main() is already starting the thread pool,
     // move this to mediaserver main() when other services in mediaserver
     // are converted to ndk-platform aidl.
@@ -275,6 +339,11 @@
 
 ResourceManagerService::~ResourceManagerService() {}
 
+void ResourceManagerService::setObserverService(
+        const std::shared_ptr<ResourceObserverService>& observerService) {
+    mObserverService = observerService;
+}
+
 Status ResourceManagerService::config(const std::vector<MediaResourcePolicyParcel>& policies) {
     String8 log = String8::format("config(%s)", getString(policies).string());
     mServiceLog->add(log);
@@ -358,6 +427,7 @@
     }
     ResourceInfos& infos = getResourceInfosForEdit(pid, mMap);
     ResourceInfo& info = getResourceInfoForEdit(uid, clientId, client, infos);
+    ResourceList resourceAdded;
 
     for (size_t i = 0; i < resources.size(); ++i) {
         const auto &res = resources[i];
@@ -379,11 +449,20 @@
         } else {
             mergeResources(info.resources[resType], res);
         }
+        // Add it to the list of added resources for observers.
+        auto it = resourceAdded.find(resType);
+        if (it == resourceAdded.end()) {
+            resourceAdded[resType] = res;
+        } else {
+            mergeResources(it->second, res);
+        }
     }
-    if (info.deathNotifier == nullptr && client != nullptr) {
-        info.deathNotifier = new DeathNotifier(ref<ResourceManagerService>(), pid, clientId);
-        AIBinder_linkToDeath(client->asBinder().get(),
-                mDeathRecipient.get(), info.deathNotifier.get());
+    if (info.cookie == 0 && client != nullptr) {
+        info.cookie = addCookieAndLink_l(client->asBinder(),
+                new DeathNotifier(ref<ResourceManagerService>(), pid, clientId));
+    }
+    if (mObserverService != nullptr && !resourceAdded.empty()) {
+        mObserverService->onResourceAdded(uid, pid, resourceAdded);
     }
     notifyResourceGranted(pid, resources);
     return Status::ok();
@@ -415,7 +494,7 @@
     }
 
     ResourceInfo &info = infos.editValueAt(index);
-
+    ResourceList resourceRemoved;
     for (size_t i = 0; i < resources.size(); ++i) {
         const auto &res = resources[i];
         const auto resType = std::tuple(res.type, res.subType, res.id);
@@ -427,14 +506,27 @@
         // ignore if we don't have it
         if (info.resources.find(resType) != info.resources.end()) {
             MediaResourceParcel &resource = info.resources[resType];
+            MediaResourceParcel actualRemoved = res;
             if (resource.value > res.value) {
                 resource.value -= res.value;
             } else {
                 onLastRemoved(res, info);
+                actualRemoved.value = resource.value;
                 info.resources.erase(resType);
             }
+
+            // Add it to the list of removed resources for observers.
+            auto it = resourceRemoved.find(resType);
+            if (it == resourceRemoved.end()) {
+                resourceRemoved[resType] = actualRemoved;
+            } else {
+                mergeResources(it->second, actualRemoved);
+            }
         }
     }
+    if (mObserverService != nullptr && !resourceRemoved.empty()) {
+        mObserverService->onResourceRemoved(info.uid, pid, resourceRemoved);
+    }
     return Status::ok();
 }
 
@@ -472,8 +564,11 @@
         onLastRemoved(it->second, info);
     }
 
-    AIBinder_unlinkToDeath(info.client->asBinder().get(),
-            mDeathRecipient.get(), info.deathNotifier.get());
+    removeCookieAndUnlink_l(info.client->asBinder(), info.cookie);
+
+    if (mObserverService != nullptr && !info.resources.empty()) {
+        mObserverService->onResourceRemoved(info.uid, pid, info.resources);
+    }
 
     infos.removeItemsAt(index);
     return Status::ok();
@@ -575,13 +670,19 @@
         }
     }
 
+    *_aidl_return = reclaimInternal(clients);
+    return Status::ok();
+}
+
+bool ResourceManagerService::reclaimInternal(
+        const Vector<std::shared_ptr<IResourceManagerClient>> &clients) {
     if (clients.size() == 0) {
-        return Status::ok();
+        return false;
     }
 
     std::shared_ptr<IResourceManagerClient> failedClient;
     for (size_t i = 0; i < clients.size(); ++i) {
-        log = String8::format("reclaimResource from client %p", clients[i].get());
+        String8 log = String8::format("reclaimResource from client %p", clients[i].get());
         mServiceLog->add(log);
         bool success;
         Status status = clients[i]->reclaimResource(&success);
@@ -592,8 +693,7 @@
     }
 
     if (failedClient == NULL) {
-        *_aidl_return = true;
-        return Status::ok();
+        return true;
     }
 
     {
@@ -618,7 +718,7 @@
         }
     }
 
-    return Status::ok();
+    return false;
 }
 
 Status ResourceManagerService::overridePid(
@@ -651,6 +751,83 @@
     return Status::ok();
 }
 
+Status ResourceManagerService::overrideProcessInfo(
+        const std::shared_ptr<IResourceManagerClient>& client,
+        int pid,
+        int procState,
+        int oomScore) {
+    String8 log = String8::format("overrideProcessInfo(pid %d, procState %d, oomScore %d)",
+            pid, procState, oomScore);
+    mServiceLog->add(log);
+
+    // Only allow the override if the caller already can access process state and oom scores.
+    int callingPid = AIBinder_getCallingPid();
+    if (callingPid != getpid() && (callingPid != pid || !checkCallingPermission(String16(
+            "android.permission.GET_PROCESS_STATE_AND_OOM_SCORE")))) {
+        ALOGE("Permission Denial: overrideProcessInfo method from pid=%d", callingPid);
+        return Status::fromServiceSpecificError(PERMISSION_DENIED);
+    }
+
+    if (client == nullptr) {
+        return Status::fromServiceSpecificError(BAD_VALUE);
+    }
+
+    Mutex::Autolock lock(mLock);
+    removeProcessInfoOverride_l(pid);
+
+    if (!mProcessInfo->overrideProcessInfo(pid, procState, oomScore)) {
+        // Override value is rejected by ProcessInfo.
+        return Status::fromServiceSpecificError(BAD_VALUE);
+    }
+
+    uintptr_t cookie = addCookieAndLink_l(client->asBinder(),
+            new OverrideProcessInfoDeathNotifier(ref<ResourceManagerService>(), pid));
+
+    mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{cookie, client});
+
+    return Status::ok();
+}
+
+uintptr_t ResourceManagerService::addCookieAndLink_l(
+        ::ndk::SpAIBinder binder, const sp<DeathNotifier>& notifier) {
+    std::scoped_lock lock{sCookieLock};
+
+    uintptr_t cookie;
+    // Need to skip cookie 0 (if it wraps around). ResourceInfo has cookie initialized to 0
+    // indicating the death notifier is not created yet.
+    while ((cookie = ++sCookieCounter) == 0);
+    AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), (void*)cookie);
+    sCookieToDeathNotifierMap.emplace(cookie, notifier);
+
+    return cookie;
+}
+
+void ResourceManagerService::removeCookieAndUnlink_l(
+        ::ndk::SpAIBinder binder, uintptr_t cookie) {
+    std::scoped_lock lock{sCookieLock};
+    AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(), (void*)cookie);
+    sCookieToDeathNotifierMap.erase(cookie);
+}
+
+void ResourceManagerService::removeProcessInfoOverride(int pid) {
+    Mutex::Autolock lock(mLock);
+
+    removeProcessInfoOverride_l(pid);
+}
+
+void ResourceManagerService::removeProcessInfoOverride_l(int pid) {
+    auto it = mProcessInfoOverrideMap.find(pid);
+    if (it == mProcessInfoOverrideMap.end()) {
+        return;
+    }
+
+    mProcessInfo->removeProcessInfoOverride(pid);
+
+    removeCookieAndUnlink_l(it->second.client->asBinder(), it->second.cookie);
+
+    mProcessInfoOverrideMap.erase(pid);
+}
+
 Status ResourceManagerService::markClientForPendingRemoval(int32_t pid, int64_t clientId) {
     String8 log = String8::format(
             "markClientForPendingRemoval(pid %d, clientId %lld)",
@@ -681,6 +858,36 @@
     return Status::ok();
 }
 
+Status ResourceManagerService::reclaimResourcesFromClientsPendingRemoval(int32_t pid) {
+    String8 log = String8::format("reclaimResourcesFromClientsPendingRemoval(pid %d)", pid);
+    mServiceLog->add(log);
+
+    Vector<std::shared_ptr<IResourceManagerClient>> clients;
+    {
+        Mutex::Autolock lock(mLock);
+        if (!mProcessInfo->isValidPid(pid)) {
+            ALOGE("Rejected reclaimResourcesFromClientsPendingRemoval call with invalid pid.");
+            return Status::fromServiceSpecificError(BAD_VALUE);
+        }
+
+        for (MediaResource::Type type : {MediaResource::Type::kSecureCodec,
+                                         MediaResource::Type::kNonSecureCodec,
+                                         MediaResource::Type::kGraphicMemory,
+                                         MediaResource::Type::kDrmSession}) {
+            std::shared_ptr<IResourceManagerClient> client;
+            if (getBiggestClient_l(pid, type, &client, true /* pendingRemovalOnly */)) {
+                clients.add(client);
+                break;
+            }
+        }
+    }
+
+    if (!clients.empty()) {
+        reclaimInternal(clients);
+    }
+    return Status::ok();
+}
+
 bool ResourceManagerService::getPriority_l(int pid, int* priority) {
     int newPid = pid;
 
@@ -804,7 +1011,8 @@
         bool pendingRemovalOnly) {
     ssize_t index = mMap.indexOfKey(pid);
     if (index < 0) {
-        ALOGE("getBiggestClient_l: can't find resource info for pid %d", pid);
+        ALOGE_IF(!pendingRemovalOnly,
+                 "getBiggestClient_l: can't find resource info for pid %d", pid);
         return false;
     }
 
@@ -828,7 +1036,9 @@
     }
 
     if (clientTemp == NULL) {
-        ALOGE("getBiggestClient_l: can't find resource type %s for pid %d", asString(type), pid);
+        ALOGE_IF(!pendingRemovalOnly,
+                 "getBiggestClient_l: can't find resource type %s for pid %d",
+                 asString(type), pid);
         return false;
     }
 
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index ee982b7..9c2636e 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -19,6 +19,7 @@
 #define ANDROID_MEDIA_RESOURCEMANAGERSERVICE_H
 
 #include <map>
+#include <mutex>
 
 #include <aidl/android/media/BnResourceManagerService.h>
 #include <arpa/inet.h>
@@ -33,6 +34,7 @@
 
 class DeathNotifier;
 class ResourceManagerService;
+class ResourceObserverService;
 class ServiceLog;
 struct ProcessInfoInterface;
 
@@ -43,14 +45,14 @@
 using ::aidl::android::media::MediaResourcePolicyParcel;
 
 typedef std::map<std::tuple<
-        MediaResource::Type, MediaResource::SubType, std::vector<int8_t>>,
+        MediaResource::Type, MediaResource::SubType, std::vector<uint8_t>>,
         MediaResourceParcel> ResourceList;
 
 struct ResourceInfo {
     int64_t clientId;
     uid_t uid;
     std::shared_ptr<IResourceManagerClient> client;
-    sp<DeathNotifier> deathNotifier;
+    uintptr_t cookie{0};
     ResourceList resources;
     bool pendingRemoval{false};
 };
@@ -59,22 +61,6 @@
 typedef KeyedVector<int64_t, ResourceInfo> ResourceInfos;
 typedef KeyedVector<int, ResourceInfos> PidResourceInfosMap;
 
-class DeathNotifier : public RefBase {
-public:
-    DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
-            int pid, int64_t clientId);
-
-    ~DeathNotifier() {}
-
-    // Implement death recipient
-    static void BinderDiedCallback(void* cookie);
-    void binderDied();
-
-private:
-    std::weak_ptr<ResourceManagerService> mService;
-    int mPid;
-    int64_t mClientId;
-};
 class ResourceManagerService : public BnResourceManagerService {
 public:
     struct SystemCallbackInterface : public RefBase {
@@ -95,6 +81,8 @@
             const sp<ProcessInfoInterface> &processInfo,
             const sp<SystemCallbackInterface> &systemResource);
     virtual ~ResourceManagerService();
+    void setObserverService(
+            const std::shared_ptr<ResourceObserverService>& observerService);
 
     // IResourceManagerService interface
     Status config(const std::vector<MediaResourcePolicyParcel>& policies) override;
@@ -125,12 +113,27 @@
             int originalPid,
             int newPid) override;
 
+    Status overrideProcessInfo(
+            const std::shared_ptr<IResourceManagerClient>& client,
+            int pid,
+            int procState,
+            int oomScore) override;
+
     Status markClientForPendingRemoval(int32_t pid, int64_t clientId) override;
 
+    Status reclaimResourcesFromClientsPendingRemoval(int32_t pid) override;
+
     Status removeResource(int pid, int64_t clientId, bool checkValid);
 
 private:
     friend class ResourceManagerServiceTest;
+    friend class DeathNotifier;
+    friend class OverrideProcessInfoDeathNotifier;
+
+    // Reclaims resources from |clients|. Returns true if reclaim succeeded
+    // for all clients.
+    bool reclaimInternal(
+            const Vector<std::shared_ptr<IResourceManagerClient>> &clients);
 
     // Gets the list of all the clients who own the specified resource type.
     // Returns false if any client belongs to a process with higher priority than the
@@ -170,6 +173,12 @@
     // Get priority from process's pid
     bool getPriority_l(int pid, int* priority);
 
+    void removeProcessInfoOverride(int pid);
+
+    void removeProcessInfoOverride_l(int pid);
+    uintptr_t addCookieAndLink_l(::ndk::SpAIBinder binder, const sp<DeathNotifier>& notifier);
+    void removeCookieAndUnlink_l(::ndk::SpAIBinder binder, uintptr_t cookie);
+
     mutable Mutex mLock;
     sp<ProcessInfoInterface> mProcessInfo;
     sp<SystemCallbackInterface> mSystemCB;
@@ -179,7 +188,17 @@
     bool mSupportsSecureWithNonSecureCodec;
     int32_t mCpuBoostCount;
     ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+    struct ProcessInfoOverride {
+        uintptr_t cookie;
+        std::shared_ptr<IResourceManagerClient> client;
+    };
     std::map<int, int> mOverridePidMap;
+    std::map<pid_t, ProcessInfoOverride> mProcessInfoOverrideMap;
+    static std::mutex sCookieLock;
+    static uintptr_t sCookieCounter GUARDED_BY(sCookieLock);
+    static std::map<uintptr_t, sp<DeathNotifier> > sCookieToDeathNotifierMap
+            GUARDED_BY(sCookieLock);
+    std::shared_ptr<ResourceObserverService> mObserverService;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/services/mediaresourcemanager/ResourceObserverService.cpp b/services/mediaresourcemanager/ResourceObserverService.cpp
new file mode 100644
index 0000000..44fe72d
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceObserverService.cpp
@@ -0,0 +1,328 @@
+/**
+ *
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceObserverService"
+#include <utils/Log.h>
+
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <binder/IServiceManager.h>
+#include <utils/String16.h>
+#include <aidl/android/media/MediaResourceParcel.h>
+
+#include "ResourceObserverService.h"
+
+namespace aidl {
+namespace android {
+namespace media {
+bool operator<(const MediaObservableFilter& lhs, const MediaObservableFilter &rhs) {
+    return lhs.type < rhs.type || (lhs.type == rhs.type && lhs.eventFilter < rhs.eventFilter);
+}
+}}} // namespace ::aidl::android::media
+
+namespace android {
+
+using ::aidl::android::media::MediaResourceParcel;
+using ::aidl::android::media::MediaObservableEvent;
+
+// MediaObservableEvent will be used as uint64_t flags.
+static_assert(sizeof(MediaObservableEvent) == sizeof(uint64_t));
+
+static std::vector<MediaObservableEvent> sEvents = {
+        MediaObservableEvent::kBusy,
+        MediaObservableEvent::kIdle,
+};
+
+static MediaObservableType getObservableType(const MediaResourceParcel& res) {
+    if (res.subType == MediaResourceSubType::kVideoCodec) {
+        if (res.type == MediaResourceType::kNonSecureCodec) {
+            return MediaObservableType::kVideoNonSecureCodec;
+        }
+        if (res.type == MediaResourceType::kSecureCodec) {
+            return MediaObservableType::kVideoSecureCodec;
+        }
+    }
+    return MediaObservableType::kInvalid;
+}
+
+//static
+std::mutex ResourceObserverService::sDeathRecipientLock;
+//static
+std::map<uintptr_t, std::shared_ptr<ResourceObserverService::DeathRecipient> >
+ResourceObserverService::sDeathRecipientMap;
+
+struct ResourceObserverService::DeathRecipient {
+    DeathRecipient(ResourceObserverService* _service,
+            const std::shared_ptr<IResourceObserver>& _observer)
+        : service(_service), observer(_observer) {}
+    ~DeathRecipient() {}
+
+    void binderDied() {
+        if (service != nullptr) {
+            service->unregisterObserver(observer);
+        }
+    }
+
+    ResourceObserverService* service;
+    std::shared_ptr<IResourceObserver> observer;
+};
+
+// static
+void ResourceObserverService::BinderDiedCallback(void* cookie) {
+    uintptr_t id = reinterpret_cast<uintptr_t>(cookie);
+
+    ALOGW("Observer %lld is dead", (long long)id);
+
+    std::shared_ptr<DeathRecipient> recipient;
+
+    {
+        std::scoped_lock lock{sDeathRecipientLock};
+
+        auto it = sDeathRecipientMap.find(id);
+        if (it != sDeathRecipientMap.end()) {
+            recipient = it->second;
+        }
+    }
+
+    if (recipient != nullptr) {
+        recipient->binderDied();
+    }
+}
+
+//static
+std::shared_ptr<ResourceObserverService> ResourceObserverService::instantiate() {
+    std::shared_ptr<ResourceObserverService> observerService =
+            ::ndk::SharedRefBase::make<ResourceObserverService>();
+    binder_status_t status = AServiceManager_addService(observerService->asBinder().get(),
+            ResourceObserverService::getServiceName());
+    if (status != STATUS_OK) {
+        return nullptr;
+    }
+    return observerService;
+}
+
+ResourceObserverService::ResourceObserverService()
+    : mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)) {}
+
+binder_status_t ResourceObserverService::dump(
+        int fd, const char** /*args*/, uint32_t /*numArgs*/) {
+    String8 result;
+
+    if (checkCallingPermission(String16("android.permission.DUMP")) == false) {
+        result.format("Permission Denial: "
+                "can't dump ResourceManagerService from pid=%d, uid=%d\n",
+                AIBinder_getCallingPid(),
+                AIBinder_getCallingUid());
+        write(fd, result.string(), result.size());
+        return PERMISSION_DENIED;
+    }
+
+    result.appendFormat("ResourceObserverService: %p\n", this);
+    result.appendFormat("  Registered Observers: %zu\n", mObserverInfoMap.size());
+
+    {
+        std::scoped_lock lock{mObserverLock};
+
+        for (auto &observer : mObserverInfoMap) {
+            result.appendFormat("    Observer %p:\n", observer.second.binder.get());
+            for (auto &observable : observer.second.filters) {
+                String8 enabledEventsStr;
+                for (auto &event : sEvents) {
+                    if (((uint64_t)observable.eventFilter & (uint64_t)event) != 0) {
+                        if (!enabledEventsStr.isEmpty()) {
+                            enabledEventsStr.append("|");
+                        }
+                        enabledEventsStr.append(toString(event).c_str());
+                    }
+                }
+                result.appendFormat("      %s: %s\n",
+                        toString(observable.type).c_str(), enabledEventsStr.c_str());
+            }
+        }
+    }
+
+    write(fd, result.string(), result.size());
+    return OK;
+}
+
+Status ResourceObserverService::registerObserver(
+        const std::shared_ptr<IResourceObserver>& in_observer,
+        const std::vector<MediaObservableFilter>& in_filters) {
+    if ((getpid() != AIBinder_getCallingPid()) &&
+            checkCallingPermission(
+            String16("android.permission.REGISTER_MEDIA_RESOURCE_OBSERVER")) == false) {
+        ALOGE("Permission Denial: "
+                "can't registerObserver from pid=%d, uid=%d\n",
+                AIBinder_getCallingPid(),
+                AIBinder_getCallingUid());
+        return Status::fromServiceSpecificError(PERMISSION_DENIED);
+    }
+
+    ::ndk::SpAIBinder binder = in_observer->asBinder();
+
+    {
+        std::scoped_lock lock{mObserverLock};
+
+        if (mObserverInfoMap.find((uintptr_t)binder.get()) != mObserverInfoMap.end()) {
+            return Status::fromServiceSpecificError(ALREADY_EXISTS);
+        }
+
+        if (in_filters.empty()) {
+            return Status::fromServiceSpecificError(BAD_VALUE);
+        }
+
+        // Add observer info.
+        mObserverInfoMap.emplace((uintptr_t)binder.get(),
+                ObserverInfo{binder, in_observer, in_filters});
+
+        // Add observer to observable->subscribers map.
+        for (auto &filter : in_filters) {
+            for (auto &event : sEvents) {
+                if (!((uint64_t)filter.eventFilter & (uint64_t)event)) {
+                    continue;
+                }
+                MediaObservableFilter key{filter.type, event};
+                mObservableToSubscribersMap[key].emplace((uintptr_t)binder.get(), in_observer);
+            }
+        }
+    }
+
+    // Add death binder and link.
+    uintptr_t cookie = (uintptr_t)binder.get();
+    {
+        std::scoped_lock lock{sDeathRecipientLock};
+        sDeathRecipientMap.emplace(
+                cookie, std::make_shared<DeathRecipient>(this, in_observer));
+    }
+
+    AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(),
+                         reinterpret_cast<void*>(cookie));
+
+    return Status::ok();
+}
+
+Status ResourceObserverService::unregisterObserver(
+        const std::shared_ptr<IResourceObserver>& in_observer) {
+    if ((getpid() != AIBinder_getCallingPid()) &&
+            checkCallingPermission(
+            String16("android.permission.REGISTER_MEDIA_RESOURCE_OBSERVER")) == false) {
+        ALOGE("Permission Denial: "
+                "can't unregisterObserver from pid=%d, uid=%d\n",
+                AIBinder_getCallingPid(),
+                AIBinder_getCallingUid());
+        return Status::fromServiceSpecificError(PERMISSION_DENIED);
+    }
+
+    ::ndk::SpAIBinder binder = in_observer->asBinder();
+
+    {
+        std::scoped_lock lock{mObserverLock};
+
+        auto it = mObserverInfoMap.find((uintptr_t)binder.get());
+        if (it == mObserverInfoMap.end()) {
+            return Status::fromServiceSpecificError(NAME_NOT_FOUND);
+        }
+
+        // Remove observer from observable->subscribers map.
+        for (auto &filter : it->second.filters) {
+            for (auto &event : sEvents) {
+                if (!((uint64_t)filter.eventFilter & (uint64_t)event)) {
+                    continue;
+                }
+                MediaObservableFilter key{filter.type, event};
+                mObservableToSubscribersMap[key].erase((uintptr_t)binder.get());
+
+                //Remove the entry if there's no more subscribers.
+                if (mObservableToSubscribersMap[key].empty()) {
+                    mObservableToSubscribersMap.erase(key);
+                }
+            }
+        }
+
+        // Remove observer info.
+        mObserverInfoMap.erase(it);
+    }
+
+    // Unlink and remove death binder.
+    uintptr_t cookie = (uintptr_t)binder.get();
+    AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(),
+            reinterpret_cast<void*>(cookie));
+
+    {
+        std::scoped_lock lock{sDeathRecipientLock};
+        sDeathRecipientMap.erase(cookie);
+    }
+
+    return Status::ok();
+}
+
+void ResourceObserverService::notifyObservers(
+        MediaObservableEvent event, int uid, int pid, const ResourceList &resources) {
+    struct CalleeInfo {
+        std::shared_ptr<IResourceObserver> observer;
+        std::vector<MediaObservableParcel> monitors;
+    };
+    // Build a consolidated list of observers to call with their respective observables.
+    std::map<uintptr_t, CalleeInfo> calleeList;
+
+    {
+        std::scoped_lock lock{mObserverLock};
+
+        for (auto &res : resources) {
+            // Skip if this resource doesn't map to any observable type.
+            MediaObservableType observableType = getObservableType(res.second);
+            if (observableType == MediaObservableType::kInvalid) {
+                continue;
+            }
+            MediaObservableFilter key{observableType, event};
+            // Skip if no one subscribed to this observable.
+            auto observableIt = mObservableToSubscribersMap.find(key);
+            if (observableIt == mObservableToSubscribersMap.end()) {
+                continue;
+            }
+            // Loop through all subsribers.
+            for (auto &subscriber : observableIt->second) {
+                auto calleeIt = calleeList.find(subscriber.first);
+                if (calleeIt == calleeList.end()) {
+                    calleeList.emplace(subscriber.first, CalleeInfo{
+                        subscriber.second, {{observableType, res.second.value}}});
+                } else {
+                    calleeIt->second.monitors.push_back({observableType, res.second.value});
+                }
+            }
+        }
+    }
+
+    // Finally call the observers about the status change.
+    for (auto &calleeInfo : calleeList) {
+        calleeInfo.second.observer->onStatusChanged(
+                event, uid, pid, calleeInfo.second.monitors);
+    }
+}
+
+void ResourceObserverService::onResourceAdded(
+        int uid, int pid, const ResourceList &resources) {
+    notifyObservers(MediaObservableEvent::kBusy, uid, pid, resources);
+}
+
+void ResourceObserverService::onResourceRemoved(
+        int uid, int pid, const ResourceList &resources) {
+    notifyObservers(MediaObservableEvent::kIdle, uid, pid, resources);
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/ResourceObserverService.h b/services/mediaresourcemanager/ResourceObserverService.h
new file mode 100644
index 0000000..46bc5fb
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceObserverService.h
@@ -0,0 +1,95 @@
+/**
+ *
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_RESOURCE_OBSERVER_SERVICE_H
+#define ANDROID_MEDIA_RESOURCE_OBSERVER_SERVICE_H
+
+#include <map>
+
+#include <aidl/android/media/BnResourceObserverService.h>
+#include "ResourceManagerService.h"
+
+namespace android {
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::BnResourceObserverService;
+using ::aidl::android::media::IResourceObserver;
+using ::aidl::android::media::MediaObservableFilter;
+using ::aidl::android::media::MediaObservableParcel;
+using ::aidl::android::media::MediaObservableType;
+using ::aidl::android::media::MediaObservableEvent;
+
+class ResourceObserverService : public BnResourceObserverService {
+public:
+
+    static char const *getServiceName() { return "media.resource_observer"; }
+    static std::shared_ptr<ResourceObserverService> instantiate();
+
+    virtual inline binder_status_t dump(
+            int /*fd*/, const char** /*args*/, uint32_t /*numArgs*/);
+
+    ResourceObserverService();
+    virtual ~ResourceObserverService() {}
+
+    // IResourceObserverService interface
+    Status registerObserver(const std::shared_ptr<IResourceObserver>& in_observer,
+            const std::vector<MediaObservableFilter>& in_filters) override;
+
+    Status unregisterObserver(const std::shared_ptr<IResourceObserver>& in_observer) override;
+    // ~IResourceObserverService interface
+
+    // Called by ResourceManagerService when resources are added.
+    void onResourceAdded(int uid, int pid, const ResourceList &resources);
+
+    // Called by ResourceManagerService when resources are removed.
+    void onResourceRemoved(int uid, int pid, const ResourceList &resources);
+
+private:
+    struct ObserverInfo {
+        ::ndk::SpAIBinder binder;
+        std::shared_ptr<IResourceObserver> observer;
+        std::vector<MediaObservableFilter> filters;
+    };
+    struct DeathRecipient;
+
+    // Below maps are all keyed on the observer's binder ptr value.
+    using ObserverInfoMap = std::map<uintptr_t, ObserverInfo>;
+    using SubscriberMap = std::map<uintptr_t, std::shared_ptr<IResourceObserver>>;
+
+    std::mutex mObserverLock;
+    // Binder->ObserverInfo
+    ObserverInfoMap mObserverInfoMap GUARDED_BY(mObserverLock);
+    // Observable(<type,event>)->Subscribers
+    std::map<MediaObservableFilter, SubscriberMap> mObservableToSubscribersMap
+            GUARDED_BY(mObserverLock);
+
+    ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+
+    // Binder death handling.
+    static std::mutex sDeathRecipientLock;
+    static std::map<uintptr_t, std::shared_ptr<DeathRecipient>> sDeathRecipientMap
+            GUARDED_BY(sDeathRecipientLock);
+    static void BinderDiedCallback(void* cookie);
+
+    void notifyObservers(MediaObservableEvent event,
+            int uid, int pid, const ResourceList &resources);
+};
+
+// ----------------------------------------------------------------------------
+} // namespace android
+
+#endif // ANDROID_MEDIA_RESOURCE_OBSERVER_SERVICE_H
diff --git a/services/mediaresourcemanager/TEST_MAPPING b/services/mediaresourcemanager/TEST_MAPPING
index 418b159..52ad441 100644
--- a/services/mediaresourcemanager/TEST_MAPPING
+++ b/services/mediaresourcemanager/TEST_MAPPING
@@ -5,6 +5,9 @@
     },
     {
        "name": "ServiceLog_test"
+    },
+    {
+       "name": "ResourceObserverService_test"
     }
   ]
 }
diff --git a/media/libmedia/aidl/android/media/IResourceManagerClient.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceManagerClient.aidl
similarity index 100%
rename from media/libmedia/aidl/android/media/IResourceManagerClient.aidl
rename to services/mediaresourcemanager/aidl/android/media/IResourceManagerClient.aidl
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
new file mode 100644
index 0000000..7a0a50f
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
@@ -0,0 +1,134 @@
+/**
+ * Copyright (c) 2019, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.IResourceManagerClient;
+import android.media.MediaResourceParcel;
+import android.media.MediaResourcePolicyParcel;
+
+/**
+ * ResourceManagerService interface that keeps track of media resource
+ * owned by clients, and reclaims resources based on configured policies
+ * when necessary.
+ *
+ * {@hide}
+ */
+interface IResourceManagerService {
+    const @utf8InCpp String kPolicySupportsMultipleSecureCodecs
+            = "supports-multiple-secure-codecs";
+    const @utf8InCpp String kPolicySupportsSecureWithNonSecureCodec
+            = "supports-secure-with-non-secure-codec";
+
+    /**
+     * Configure the ResourceManagerService to adopted particular policies when
+     * managing the resources.
+     *
+     * @param policies an array of policies to be adopted.
+     */
+    void config(in MediaResourcePolicyParcel[] policies);
+
+    /**
+     * Add a client to a process with a list of resources.
+     *
+     * @param pid pid of the client.
+     * @param uid uid of the client.
+     * @param clientId an identifier that uniquely identifies the client within the pid.
+     * @param client interface for the ResourceManagerService to call the client.
+     * @param resources an array of resources to be added.
+     */
+    void addResource(
+            int pid,
+            int uid,
+            long clientId,
+            IResourceManagerClient client,
+            in MediaResourceParcel[] resources);
+
+    /**
+     * Remove the listed resources from a client.
+     *
+     * @param pid pid from which the list of resources will be removed.
+     * @param clientId clientId within the pid from which the list of resources will be removed.
+     * @param resources an array of resources to be removed from the client.
+     */
+    void removeResource(int pid, long clientId, in MediaResourceParcel[] resources);
+
+    /**
+     * Remove all resources from a client.
+     *
+     * @param pid pid from which the client's resources will be removed.
+     * @param clientId clientId within the pid that will be removed.
+     */
+    void removeClient(int pid, long clientId);
+
+    /**
+     * Tries to reclaim resource from processes with lower priority than the
+     * calling process according to the requested resources.
+     *
+     * @param callingPid pid of the calling process.
+     * @param resources an array of resources to be reclaimed.
+     *
+     * @return true if the reclaim was successful and false otherwise.
+     */
+    boolean reclaimResource(int callingPid, in MediaResourceParcel[] resources);
+
+    /**
+     * Override the pid of original calling process with the pid of the process
+     * who actually use the requested resources.
+     *
+     * @param originalPid pid of the original calling process.
+     * @param newPid pid of the actual process who use the resources.
+     *        remove existing override on originalPid if newPid is -1.
+     */
+    void overridePid(int originalPid, int newPid);
+
+    /**
+     * Override the process state and OOM score of the calling process with the
+     * the specified values. This is used by native service processes to specify
+     * these values for ResourceManagerService to use. ResourceManagerService usually
+     * gets these values from ActivityManagerService, however, ActivityManagerService
+     * doesn't track native service processes.
+     *
+     * @param client a token for the ResourceManagerService to link to the caller and
+     *              receive notification if it goes away. This is needed for clearing
+     *              the overrides.
+     * @param pid pid of the calling process.
+     * @param procState the process state value that ResourceManagerService should
+     *                  use for this pid.
+     * @param oomScore the oom score value that ResourceManagerService should
+     *                  use for this pid.
+     */
+    void overrideProcessInfo(
+            IResourceManagerClient client,
+            int pid,
+            int procState,
+            int oomScore);
+
+    /**
+     * Mark a client for pending removal
+     *
+     * @param pid pid from which the client's resources will be removed.
+     * @param clientId clientId within the pid that will be removed.
+     */
+    void markClientForPendingRemoval(int pid, long clientId);
+
+    /**
+     * Reclaim resources from clients pending removal, if any.
+     *
+     * @param pid pid from which resources will be reclaimed.
+     */
+    void reclaimResourcesFromClientsPendingRemoval(int pid);
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceObserver.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceObserver.aidl
new file mode 100644
index 0000000..462009a
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceObserver.aidl
@@ -0,0 +1,39 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.MediaObservableEvent;
+import android.media.MediaObservableParcel;
+
+/**
+ * IResourceObserver interface for receiving observable resource updates
+ * from IResourceObserverService.
+ *
+ * {@hide}
+ */
+interface IResourceObserver {
+    /**
+     * Called when an observed resource is granted to a client.
+     *
+     * @param event the status change that happened to the resource.
+     * @param uid uid to which the resource is associated.
+     * @param pid pid to which the resource is associated.
+     * @param observables the resources whose status has changed.
+     */
+    oneway void onStatusChanged(MediaObservableEvent event,
+        int uid, int pid, in MediaObservableParcel[] observables);
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceObserverService.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceObserverService.aidl
new file mode 100644
index 0000000..08f4ca0
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceObserverService.aidl
@@ -0,0 +1,49 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.IResourceObserver;
+import android.media.MediaObservableFilter;
+
+/**
+ * IResourceObserverService interface for registering an IResourceObserver
+ * callback to receive status updates about observable media resources.
+ *
+ * {@hide}
+ */
+interface IResourceObserverService {
+
+    /**
+     * Register an observer on the IResourceObserverService to receive
+     * status updates for observable resources.
+     *
+     * @param observer the observer to register.
+     * @param filters an array of filters for resources and events to receive
+     *                updates for.
+     */
+    void registerObserver(
+            IResourceObserver observer,
+            in MediaObservableFilter[] filters);
+
+    /**
+     * Unregister an observer from the IResourceObserverService.
+     * The observer will stop receiving the status updates.
+     *
+     * @param observer the observer to unregister.
+     */
+    void unregisterObserver(IResourceObserver observer);
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaObservableEvent.aidl b/services/mediaresourcemanager/aidl/android/media/MediaObservableEvent.aidl
new file mode 100644
index 0000000..56ab24d
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/MediaObservableEvent.aidl
@@ -0,0 +1,44 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Enums for media observable events.
+ *
+ * These values are used as bitmasks to indicate the events that the
+ * observer is interested in in the MediaObservableFilter objects passed to
+ * IResourceObserverService::registerObserver().
+ *
+ * {@hide}
+ */
+@Backing(type="long")
+enum MediaObservableEvent {
+    /**
+     * A media resource is granted to a client and becomes busy.
+     */
+    kBusy = 1,
+
+    /**
+     * A media resource is released by a client and becomes idle.
+     */
+    kIdle = 2,
+
+    /**
+     * A bitmask that covers all observable events defined.
+     */
+    kAll = ~0,
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaObservableFilter.aidl b/services/mediaresourcemanager/aidl/android/media/MediaObservableFilter.aidl
new file mode 100644
index 0000000..38f7e39
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/MediaObservableFilter.aidl
@@ -0,0 +1,43 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.MediaObservableType;
+import android.media.MediaObservableEvent;
+
+/**
+ * Description of an observable resource and its associated events that the
+ * observer is interested in.
+ *
+ * {@hide}
+ */
+parcelable MediaObservableFilter {
+    /**
+     * Type of the observable media resource.
+     */
+    MediaObservableType type;
+
+    /**
+     * Events that the observer is interested in.
+     *
+     * This field is a bitwise-OR of the events in MediaObservableEvent. If a
+     * particular event's bit is set, it means that updates should be sent for
+     * that event. For example, if the observer is only interested in receiving
+     * updates when a resource becomes available, it should only set 'kIdle'.
+     */
+    MediaObservableEvent eventFilter;
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaObservableParcel.aidl b/services/mediaresourcemanager/aidl/android/media/MediaObservableParcel.aidl
new file mode 100644
index 0000000..c4233e1
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/MediaObservableParcel.aidl
@@ -0,0 +1,37 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.MediaObservableType;
+
+/**
+ * Description of an observable resource whose status has changed.
+ *
+ * {@hide}
+ */
+parcelable MediaObservableParcel {
+    /**
+     * Type of the observable media resource.
+     */
+    MediaObservableType type;// = MediaObservableType::kInvalid;
+
+    /**
+     * Number of units of the observable resource (number of codecs, bytes of
+     * graphic memory, etc.).
+     */
+    long value = 0;
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaObservableType.aidl b/services/mediaresourcemanager/aidl/android/media/MediaObservableType.aidl
new file mode 100644
index 0000000..ed202da
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/MediaObservableType.aidl
@@ -0,0 +1,35 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Type enums of observable media resources.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+enum MediaObservableType {
+    kInvalid = 0,
+
+    //kVideoStart = 1000,
+    kVideoSecureCodec = 1000,
+    kVideoNonSecureCodec = 1001,
+
+    //kAudioStart = 2000,
+
+    //kGraphicMemory = 3000,
+}
diff --git a/media/libmedia/aidl/android/media/MediaResourceParcel.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourceParcel.aidl
similarity index 100%
rename from media/libmedia/aidl/android/media/MediaResourceParcel.aidl
rename to services/mediaresourcemanager/aidl/android/media/MediaResourceParcel.aidl
diff --git a/media/libmedia/aidl/android/media/MediaResourcePolicyParcel.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourcePolicyParcel.aidl
similarity index 100%
rename from media/libmedia/aidl/android/media/MediaResourcePolicyParcel.aidl
rename to services/mediaresourcemanager/aidl/android/media/MediaResourcePolicyParcel.aidl
diff --git a/media/libmedia/aidl/android/media/MediaResourceSubType.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl
similarity index 100%
rename from media/libmedia/aidl/android/media/MediaResourceSubType.aidl
rename to services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl
diff --git a/media/libmedia/aidl/android/media/MediaResourceType.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourceType.aidl
similarity index 100%
rename from media/libmedia/aidl/android/media/MediaResourceType.aidl
rename to services/mediaresourcemanager/aidl/android/media/MediaResourceType.aidl
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index b6c548c..308ee91 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -3,12 +3,12 @@
     name: "ResourceManagerService_test",
     srcs: ["ResourceManagerService_test.cpp"],
     test_suites: ["device-tests"],
+    static_libs: ["libresourcemanagerservice"],
     shared_libs: [
         "libbinder",
         "libbinder_ndk",
         "liblog",
         "libmedia",
-        "libresourcemanagerservice",
         "libutils",
     ],
     include_dirs: [
@@ -19,17 +19,16 @@
         "-Werror",
         "-Wall",
     ],
-    compile_multilib: "32",
 }
 
 cc_test {
     name: "ServiceLog_test",
     srcs: ["ServiceLog_test.cpp"],
     test_suites: ["device-tests"],
+    static_libs: ["libresourcemanagerservice"],
     shared_libs: [
         "liblog",
         "libmedia",
-        "libresourcemanagerservice",
         "libutils",
     ],
     include_dirs: [
@@ -40,5 +39,29 @@
         "-Werror",
         "-Wall",
     ],
-    compile_multilib: "32",
+}
+
+cc_test {
+    name: "ResourceObserverService_test",
+    srcs: ["ResourceObserverService_test.cpp"],
+    test_suites: ["device-tests"],
+    static_libs: [
+        "libresourcemanagerservice",
+        "resourceobserver_aidl_interface-ndk_platform",
+    ],
+    shared_libs: [
+        "libbinder",
+        "libbinder_ndk",
+        "liblog",
+        "libmedia",
+        "libutils",
+    ],
+    include_dirs: [
+        "frameworks/av/include",
+        "frameworks/av/services/mediaresourcemanager",
+    ],
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
 }
diff --git a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
new file mode 100644
index 0000000..4cf5f0a
--- /dev/null
+++ b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
@@ -0,0 +1,224 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include "ResourceManagerService.h"
+#include <aidl/android/media/BnResourceManagerClient.h>
+#include <media/MediaResource.h>
+#include <media/MediaResourcePolicy.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/ProcessInfoInterface.h>
+
+namespace aidl {
+namespace android {
+namespace media {
+bool operator== (const MediaResourceParcel& lhs, const MediaResourceParcel& rhs) {
+    return lhs.type == rhs.type && lhs.subType == rhs.subType &&
+            lhs.id == rhs.id && lhs.value == rhs.value;
+}
+}}}
+
+namespace android {
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::BnResourceManagerClient;
+using ::aidl::android::media::IResourceManagerService;
+using ::aidl::android::media::IResourceManagerClient;
+using ::aidl::android::media::MediaResourceParcel;
+
+static int64_t getId(const std::shared_ptr<IResourceManagerClient>& client) {
+    return (int64_t) client.get();
+}
+
+struct TestProcessInfo : public ProcessInfoInterface {
+    TestProcessInfo() {}
+    virtual ~TestProcessInfo() {}
+
+    virtual bool getPriority(int pid, int *priority) {
+        // For testing, use pid as priority.
+        // Lower the value higher the priority.
+        *priority = pid;
+        return true;
+    }
+
+    virtual bool isValidPid(int /* pid */) {
+        return true;
+    }
+
+    virtual bool overrideProcessInfo(
+            int /* pid */, int /* procState */, int /* oomScore */) {
+        return true;
+    }
+
+    virtual void removeProcessInfoOverride(int /* pid */) {
+    }
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(TestProcessInfo);
+};
+
+struct TestSystemCallback :
+        public ResourceManagerService::SystemCallbackInterface {
+    TestSystemCallback() :
+        mLastEvent({EventType::INVALID, 0}), mEventCount(0) {}
+
+    enum EventType {
+        INVALID          = -1,
+        VIDEO_ON         = 0,
+        VIDEO_OFF        = 1,
+        VIDEO_RESET      = 2,
+        CPUSET_ENABLE    = 3,
+        CPUSET_DISABLE   = 4,
+    };
+
+    struct EventEntry {
+        EventType type;
+        int arg;
+    };
+
+    virtual void noteStartVideo(int uid) override {
+        mLastEvent = {EventType::VIDEO_ON, uid};
+        mEventCount++;
+    }
+
+    virtual void noteStopVideo(int uid) override {
+        mLastEvent = {EventType::VIDEO_OFF, uid};
+        mEventCount++;
+    }
+
+    virtual void noteResetVideo() override {
+        mLastEvent = {EventType::VIDEO_RESET, 0};
+        mEventCount++;
+    }
+
+    virtual bool requestCpusetBoost(bool enable) override {
+        mLastEvent = {enable ? EventType::CPUSET_ENABLE : EventType::CPUSET_DISABLE, 0};
+        mEventCount++;
+        return true;
+    }
+
+    size_t eventCount() { return mEventCount; }
+    EventType lastEventType() { return mLastEvent.type; }
+    EventEntry lastEvent() { return mLastEvent; }
+
+protected:
+    virtual ~TestSystemCallback() {}
+
+private:
+    EventEntry mLastEvent;
+    size_t mEventCount;
+
+    DISALLOW_EVIL_CONSTRUCTORS(TestSystemCallback);
+};
+
+
+struct TestClient : public BnResourceManagerClient {
+    TestClient(int pid, const std::shared_ptr<ResourceManagerService> &service)
+        : mReclaimed(false), mPid(pid), mService(service) {}
+
+    Status reclaimResource(bool* _aidl_return) override {
+        mService->removeClient(mPid, getId(ref<TestClient>()));
+        mReclaimed = true;
+        *_aidl_return = true;
+        return Status::ok();
+    }
+
+    Status getName(::std::string* _aidl_return) override {
+        *_aidl_return = "test_client";
+        return Status::ok();
+    }
+
+    bool reclaimed() const {
+        return mReclaimed;
+    }
+
+    void reset() {
+        mReclaimed = false;
+    }
+
+    virtual ~TestClient() {}
+
+private:
+    bool mReclaimed;
+    int mPid;
+    std::shared_ptr<ResourceManagerService> mService;
+    DISALLOW_EVIL_CONSTRUCTORS(TestClient);
+};
+
+static const int kTestPid1 = 30;
+static const int kTestUid1 = 1010;
+
+static const int kTestPid2 = 20;
+static const int kTestUid2 = 1011;
+
+static const int kLowPriorityPid = 40;
+static const int kMidPriorityPid = 25;
+static const int kHighPriorityPid = 10;
+
+using EventType = TestSystemCallback::EventType;
+using EventEntry = TestSystemCallback::EventEntry;
+bool operator== (const EventEntry& lhs, const EventEntry& rhs) {
+    return lhs.type == rhs.type && lhs.arg == rhs.arg;
+}
+
+#define CHECK_STATUS_TRUE(condition) \
+    EXPECT_TRUE((condition).isOk() && (result))
+
+#define CHECK_STATUS_FALSE(condition) \
+    EXPECT_TRUE((condition).isOk() && !(result))
+
+class ResourceManagerServiceTestBase : public ::testing::Test {
+public:
+    ResourceManagerServiceTestBase()
+        : mSystemCB(new TestSystemCallback()),
+          mService(::ndk::SharedRefBase::make<ResourceManagerService>(
+                  new TestProcessInfo, mSystemCB)),
+          mTestClient1(::ndk::SharedRefBase::make<TestClient>(kTestPid1, mService)),
+          mTestClient2(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)),
+          mTestClient3(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)) {
+    }
+
+    sp<TestSystemCallback> mSystemCB;
+    std::shared_ptr<ResourceManagerService> mService;
+    std::shared_ptr<IResourceManagerClient> mTestClient1;
+    std::shared_ptr<IResourceManagerClient> mTestClient2;
+    std::shared_ptr<IResourceManagerClient> mTestClient3;
+
+protected:
+    static bool isEqualResources(const std::vector<MediaResourceParcel> &resources1,
+            const ResourceList &resources2) {
+        // convert resource1 to ResourceList
+        ResourceList r1;
+        for (size_t i = 0; i < resources1.size(); ++i) {
+            const auto &res = resources1[i];
+            const auto resType = std::tuple(res.type, res.subType, res.id);
+            r1[resType] = res;
+        }
+        return r1 == resources2;
+    }
+
+    static void expectEqResourceInfo(const ResourceInfo &info,
+            int uid,
+            std::shared_ptr<IResourceManagerClient> client,
+            const std::vector<MediaResourceParcel> &resources) {
+        EXPECT_EQ(uid, info.uid);
+        EXPECT_EQ(client, info.client);
+        EXPECT_TRUE(isEqualResources(resources, info.resources));
+    }
+};
+
+} // namespace android
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index 702935d..a029d45 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -16,197 +16,17 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ResourceManagerService_test"
+
 #include <utils/Log.h>
 
-#include <gtest/gtest.h>
-
+#include "ResourceManagerServiceTestUtils.h"
 #include "ResourceManagerService.h"
-#include <aidl/android/media/BnResourceManagerClient.h>
-#include <media/MediaResource.h>
-#include <media/MediaResourcePolicy.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/ProcessInfoInterface.h>
-
-namespace aidl {
-namespace android {
-namespace media {
-bool operator== (const MediaResourceParcel& lhs, const MediaResourceParcel& rhs) {
-    return lhs.type == rhs.type && lhs.subType == rhs.subType &&
-            lhs.id == rhs.id && lhs.value == rhs.value;
-}}}}
 
 namespace android {
 
-using Status = ::ndk::ScopedAStatus;
-using ::aidl::android::media::BnResourceManagerClient;
-using ::aidl::android::media::IResourceManagerService;
-using ::aidl::android::media::IResourceManagerClient;
-
-static int64_t getId(const std::shared_ptr<IResourceManagerClient>& client) {
-    return (int64_t) client.get();
-}
-
-struct TestProcessInfo : public ProcessInfoInterface {
-    TestProcessInfo() {}
-    virtual ~TestProcessInfo() {}
-
-    virtual bool getPriority(int pid, int *priority) {
-        // For testing, use pid as priority.
-        // Lower the value higher the priority.
-        *priority = pid;
-        return true;
-    }
-
-    virtual bool isValidPid(int /* pid */) {
-        return true;
-    }
-
-private:
-    DISALLOW_EVIL_CONSTRUCTORS(TestProcessInfo);
-};
-
-struct TestSystemCallback :
-        public ResourceManagerService::SystemCallbackInterface {
-    TestSystemCallback() :
-        mLastEvent({EventType::INVALID, 0}), mEventCount(0) {}
-
-    enum EventType {
-        INVALID          = -1,
-        VIDEO_ON         = 0,
-        VIDEO_OFF        = 1,
-        VIDEO_RESET      = 2,
-        CPUSET_ENABLE    = 3,
-        CPUSET_DISABLE   = 4,
-    };
-
-    struct EventEntry {
-        EventType type;
-        int arg;
-    };
-
-    virtual void noteStartVideo(int uid) override {
-        mLastEvent = {EventType::VIDEO_ON, uid};
-        mEventCount++;
-    }
-
-    virtual void noteStopVideo(int uid) override {
-        mLastEvent = {EventType::VIDEO_OFF, uid};
-        mEventCount++;
-    }
-
-    virtual void noteResetVideo() override {
-        mLastEvent = {EventType::VIDEO_RESET, 0};
-        mEventCount++;
-    }
-
-    virtual bool requestCpusetBoost(bool enable) override {
-        mLastEvent = {enable ? EventType::CPUSET_ENABLE : EventType::CPUSET_DISABLE, 0};
-        mEventCount++;
-        return true;
-    }
-
-    size_t eventCount() { return mEventCount; }
-    EventType lastEventType() { return mLastEvent.type; }
-    EventEntry lastEvent() { return mLastEvent; }
-
-protected:
-    virtual ~TestSystemCallback() {}
-
-private:
-    EventEntry mLastEvent;
-    size_t mEventCount;
-
-    DISALLOW_EVIL_CONSTRUCTORS(TestSystemCallback);
-};
-
-
-struct TestClient : public BnResourceManagerClient {
-    TestClient(int pid, const std::shared_ptr<ResourceManagerService> &service)
-        : mReclaimed(false), mPid(pid), mService(service) {}
-
-    Status reclaimResource(bool* _aidl_return) override {
-        mService->removeClient(mPid, getId(ref<TestClient>()));
-        mReclaimed = true;
-        *_aidl_return = true;
-        return Status::ok();
-    }
-
-    Status getName(::std::string* _aidl_return) override {
-        *_aidl_return = "test_client";
-        return Status::ok();
-    }
-
-    bool reclaimed() const {
-        return mReclaimed;
-    }
-
-    void reset() {
-        mReclaimed = false;
-    }
-
-    virtual ~TestClient() {}
-
-private:
-    bool mReclaimed;
-    int mPid;
-    std::shared_ptr<ResourceManagerService> mService;
-    DISALLOW_EVIL_CONSTRUCTORS(TestClient);
-};
-
-static const int kTestPid1 = 30;
-static const int kTestUid1 = 1010;
-
-static const int kTestPid2 = 20;
-static const int kTestUid2 = 1011;
-
-static const int kLowPriorityPid = 40;
-static const int kMidPriorityPid = 25;
-static const int kHighPriorityPid = 10;
-
-using EventType = TestSystemCallback::EventType;
-using EventEntry = TestSystemCallback::EventEntry;
-bool operator== (const EventEntry& lhs, const EventEntry& rhs) {
-    return lhs.type == rhs.type && lhs.arg == rhs.arg;
-}
-
-#define CHECK_STATUS_TRUE(condition) \
-    EXPECT_TRUE((condition).isOk() && (result))
-
-#define CHECK_STATUS_FALSE(condition) \
-    EXPECT_TRUE((condition).isOk() && !(result))
-
-class ResourceManagerServiceTest : public ::testing::Test {
+class ResourceManagerServiceTest : public ResourceManagerServiceTestBase {
 public:
-    ResourceManagerServiceTest()
-        : mSystemCB(new TestSystemCallback()),
-          mService(::ndk::SharedRefBase::make<ResourceManagerService>(
-                  new TestProcessInfo, mSystemCB)),
-          mTestClient1(::ndk::SharedRefBase::make<TestClient>(kTestPid1, mService)),
-          mTestClient2(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)),
-          mTestClient3(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)) {
-    }
-
-protected:
-    static bool isEqualResources(const std::vector<MediaResourceParcel> &resources1,
-            const ResourceList &resources2) {
-        // convert resource1 to ResourceList
-        ResourceList r1;
-        for (size_t i = 0; i < resources1.size(); ++i) {
-            const auto &res = resources1[i];
-            const auto resType = std::tuple(res.type, res.subType, res.id);
-            r1[resType] = res;
-        }
-        return r1 == resources2;
-    }
-
-    static void expectEqResourceInfo(const ResourceInfo &info,
-            int uid,
-            std::shared_ptr<IResourceManagerClient> client,
-            const std::vector<MediaResourceParcel> &resources) {
-        EXPECT_EQ(uid, info.uid);
-        EXPECT_EQ(client, info.client);
-        EXPECT_TRUE(isEqualResources(resources, info.resources));
-    }
+    ResourceManagerServiceTest() : ResourceManagerServiceTestBase() {}
 
     void verifyClients(bool c1, bool c2, bool c3) {
         TestClient *client1 = static_cast<TestClient*>(mTestClient1.get());
@@ -520,6 +340,30 @@
             // clean up client 3 which still left
             mService->removeClient(kTestPid2, getId(mTestClient3));
         }
+
+        {
+            addResource();
+            mService->mSupportsSecureWithNonSecureCodec = true;
+
+            mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient2));
+
+            // client marked for pending removal got reclaimed
+            EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
+            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+
+            // No more clients marked for removal
+            EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
+            verifyClients(false /* c1 */, false /* c2 */, false /* c3 */);
+
+            mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient3));
+
+            // client marked for pending removal got reclaimed
+            EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
+            verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
+
+            // clean up client 1 which still left
+            mService->removeClient(kTestPid1, getId(mTestClient1));
+        }
     }
 
     void testRemoveClient() {
@@ -881,12 +725,6 @@
         EXPECT_EQ(4u, mSystemCB->eventCount());
         EXPECT_EQ(EventType::CPUSET_DISABLE, mSystemCB->lastEventType());
     }
-
-    sp<TestSystemCallback> mSystemCB;
-    std::shared_ptr<ResourceManagerService> mService;
-    std::shared_ptr<IResourceManagerClient> mTestClient1;
-    std::shared_ptr<IResourceManagerClient> mTestClient2;
-    std::shared_ptr<IResourceManagerClient> mTestClient3;
 };
 
 TEST_F(ResourceManagerServiceTest, config) {
diff --git a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
new file mode 100644
index 0000000..4c26246
--- /dev/null
+++ b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
@@ -0,0 +1,463 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceObserverService_test"
+
+#include <iostream>
+#include <list>
+
+#include <aidl/android/media/BnResourceObserver.h>
+#include <utils/Log.h>
+#include "ResourceObserverService.h"
+#include "ResourceManagerServiceTestUtils.h"
+
+namespace aidl {
+namespace android {
+namespace media {
+bool operator==(const MediaObservableParcel& lhs, const MediaObservableParcel& rhs) {
+    return lhs.type == rhs.type && lhs.value == rhs.value;
+}
+}}} // namespace ::aidl::android::media
+
+namespace android {
+
+using ::aidl::android::media::BnResourceObserver;
+using ::aidl::android::media::MediaObservableParcel;
+using ::aidl::android::media::MediaObservableType;
+
+#define BUSY ::aidl::android::media::MediaObservableEvent::kBusy
+#define IDLE ::aidl::android::media::MediaObservableEvent::kIdle
+#define ALL ::aidl::android::media::MediaObservableEvent::kAll
+
+struct EventTracker {
+    struct Event {
+        enum { NoEvent, Busy, Idle } type = NoEvent;
+        int uid = 0;
+        int pid = 0;
+        std::vector<MediaObservableParcel> observables;
+    };
+
+    static const Event NoEvent;
+
+    static std::string toString(const MediaObservableParcel& observable) {
+        return "{" + ::aidl::android::media::toString(observable.type)
+        + ", " + std::to_string(observable.value) + "}";
+    }
+    static std::string toString(const Event& event) {
+        std::string eventStr;
+        switch (event.type) {
+        case Event::Busy:
+            eventStr = "Busy";
+            break;
+        case Event::Idle:
+            eventStr = "Idle";
+            break;
+        default:
+            return "NoEvent";
+        }
+        std::string observableStr;
+        for (auto &observable : event.observables) {
+            if (!observableStr.empty()) {
+                observableStr += ", ";
+            }
+            observableStr += toString(observable);
+        }
+        return "{" + eventStr + ", " + std::to_string(event.uid) + ", "
+                + std::to_string(event.pid) + ", {" + observableStr + "}}";
+    }
+
+    static Event Busy(int uid, int pid, const std::vector<MediaObservableParcel>& observables) {
+        return { Event::Busy, uid, pid, observables };
+    }
+    static Event Idle(int uid, int pid, const std::vector<MediaObservableParcel>& observables) {
+        return { Event::Idle, uid, pid, observables };
+    }
+
+    // Pop 1 event from front, wait for up to timeoutUs if empty.
+    const Event& pop(int64_t timeoutUs = 0) {
+        std::unique_lock lock(mLock);
+
+        if (mEventQueue.empty() && timeoutUs > 0) {
+            mCondition.wait_for(lock, std::chrono::microseconds(timeoutUs));
+        }
+
+        if (mEventQueue.empty()) {
+            mPoppedEvent = NoEvent;
+        } else {
+            mPoppedEvent = *mEventQueue.begin();
+            mEventQueue.pop_front();
+        }
+
+        return mPoppedEvent;
+    }
+
+    // Push 1 event to back.
+    void append(const Event& event) {
+        ALOGD("%s", toString(event).c_str());
+
+        std::unique_lock lock(mLock);
+
+        mEventQueue.push_back(event);
+        mCondition.notify_one();
+    }
+
+private:
+    std::mutex mLock;
+    std::condition_variable mCondition;
+    Event mPoppedEvent;
+    std::list<Event> mEventQueue;
+};
+
+const EventTracker::Event EventTracker::NoEvent;
+
+// Operators for GTest macros.
+bool operator==(const EventTracker::Event& lhs, const EventTracker::Event& rhs) {
+    return lhs.type == rhs.type && lhs.uid == rhs.uid && lhs.pid == rhs.pid &&
+            lhs.observables == rhs.observables;
+}
+
+std::ostream& operator<<(std::ostream& str, const EventTracker::Event& v) {
+    str << EventTracker::toString(v);
+    return str;
+}
+
+struct TestObserver : public BnResourceObserver, public EventTracker {
+    TestObserver(const char *name) : mName(name) {}
+    ~TestObserver() = default;
+    Status onStatusChanged(MediaObservableEvent event, int32_t uid, int32_t pid,
+            const std::vector<MediaObservableParcel>& observables) override {
+        ALOGD("%s: %s", mName.c_str(), __FUNCTION__);
+        if (event == MediaObservableEvent::kBusy) {
+            append(Busy(uid, pid, observables));
+        } else {
+            append(Idle(uid, pid, observables));
+        }
+
+        return Status::ok();
+    }
+    std::string mName;
+};
+
+class ResourceObserverServiceTest : public ResourceManagerServiceTestBase {
+public:
+    ResourceObserverServiceTest() : ResourceManagerServiceTestBase(),
+        mObserverService(::ndk::SharedRefBase::make<ResourceObserverService>()),
+        mTestObserver1(::ndk::SharedRefBase::make<TestObserver>("observer1")),
+        mTestObserver2(::ndk::SharedRefBase::make<TestObserver>("observer2")),
+        mTestObserver3(::ndk::SharedRefBase::make<TestObserver>("observer3")) {
+        mService->setObserverService(mObserverService);
+    }
+
+    void registerObservers(MediaObservableEvent filter = ALL) {
+        std::vector<MediaObservableFilter> filters1, filters2, filters3;
+        filters1 = {{MediaObservableType::kVideoSecureCodec, filter}};
+        filters2 = {{MediaObservableType::kVideoNonSecureCodec, filter}};
+        filters3 = {{MediaObservableType::kVideoSecureCodec, filter},
+                   {MediaObservableType::kVideoNonSecureCodec, filter}};
+
+        // mTestObserver1 monitors secure video codecs.
+        EXPECT_TRUE(mObserverService->registerObserver(mTestObserver1, filters1).isOk());
+
+        // mTestObserver2 monitors non-secure video codecs.
+        EXPECT_TRUE(mObserverService->registerObserver(mTestObserver2, filters2).isOk());
+
+        // mTestObserver3 monitors both secure & non-secure video codecs.
+        EXPECT_TRUE(mObserverService->registerObserver(mTestObserver3, filters3).isOk());
+    }
+
+protected:
+    std::shared_ptr<ResourceObserverService> mObserverService;
+    std::shared_ptr<TestObserver> mTestObserver1;
+    std::shared_ptr<TestObserver> mTestObserver2;
+    std::shared_ptr<TestObserver> mTestObserver3;
+};
+
+TEST_F(ResourceObserverServiceTest, testRegisterObserver) {
+    std::vector<MediaObservableFilter> filters1;
+    Status status;
+
+    // Register with empty observables should fail.
+    status = mObserverService->registerObserver(mTestObserver1, filters1);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), BAD_VALUE);
+
+    // mTestObserver1 monitors secure video codecs.
+    filters1 = {{MediaObservableType::kVideoSecureCodec, ALL}};
+    EXPECT_TRUE(mObserverService->registerObserver(mTestObserver1, filters1).isOk());
+
+    // Register duplicates should fail.
+    status = mObserverService->registerObserver(mTestObserver1, filters1);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), ALREADY_EXISTS);
+}
+
+TEST_F(ResourceObserverServiceTest, testUnregisterObserver) {
+    std::vector<MediaObservableFilter> filters1;
+    Status status;
+
+    // Unregister without registering first should fail.
+    status = mObserverService->unregisterObserver(mTestObserver1);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), NAME_NOT_FOUND);
+
+    // mTestObserver1 monitors secure video codecs.
+    filters1 = {{MediaObservableType::kVideoSecureCodec, ALL}};
+    EXPECT_TRUE(mObserverService->registerObserver(mTestObserver1, filters1).isOk());
+    EXPECT_TRUE(mObserverService->unregisterObserver(mTestObserver1).isOk());
+
+    // Unregister again should fail.
+    status = mObserverService->unregisterObserver(mTestObserver1);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), NAME_NOT_FOUND);
+}
+
+TEST_F(ResourceObserverServiceTest, testAddResourceBasic) {
+    registerObservers();
+
+    std::vector<MediaObservableParcel> observables1, observables2, observables3;
+    observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+    observables2 = {{MediaObservableType::kVideoNonSecureCodec, 1}};
+    observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+                   {MediaObservableType::kVideoNonSecureCodec, 1}};
+
+    std::vector<MediaResourceParcel> resources;
+    // Add secure video codec.
+    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+    mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
+
+    // Add non-secure video codec.
+    resources = {MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+
+    // Add secure & non-secure video codecs.
+    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
+
+    // Add additional audio codecs, should be ignored.
+    resources.push_back(MediaResource::CodecResource(1 /*secure*/, 0 /*video*/));
+    resources.push_back(MediaResource::CodecResource(0 /*secure*/, 0 /*video*/));
+    mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables3));
+}
+
+TEST_F(ResourceObserverServiceTest, testAddResourceMultiple) {
+    registerObservers();
+
+    std::vector<MediaObservableParcel> observables1, observables2, observables3;
+    observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+    observables2 = {{MediaObservableType::kVideoNonSecureCodec, 1}};
+    observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+                   {MediaObservableType::kVideoNonSecureCodec, 1}};
+
+    std::vector<MediaResourceParcel> resources;
+
+    // Add multiple secure & non-secure video codecs.
+    // Multiple entries of the same type should be merged, count should be propagated correctly.
+    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 3 /*count*/)};
+    observables1 = {{MediaObservableType::kVideoSecureCodec, 2}};
+    observables2 = {{MediaObservableType::kVideoNonSecureCodec, 3}};
+    observables3 = {{MediaObservableType::kVideoSecureCodec, 2},
+                   {MediaObservableType::kVideoNonSecureCodec, 3}};
+    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
+}
+
+TEST_F(ResourceObserverServiceTest, testRemoveResourceBasic) {
+    registerObservers();
+
+    std::vector<MediaObservableParcel> observables1, observables2, observables3;
+    observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+    observables2 = {{MediaObservableType::kVideoNonSecureCodec, 1}};
+    observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+                   {MediaObservableType::kVideoNonSecureCodec, 1}};
+
+    std::vector<MediaResourceParcel> resources;
+    // Add secure video codec to client1.
+    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+    mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
+    // Remove secure video codec. observer 1&3 should receive updates.
+    mService->removeResource(kTestPid1, getId(mTestClient1), resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Idle(kTestUid1, kTestPid1, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid1, kTestPid1, observables1));
+    // Remove secure video codec again, should have no event.
+    mService->removeResource(kTestPid1, getId(mTestClient1), resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
+    // Remove client1, should have no event.
+    mService->removeClient(kTestPid1, getId(mTestClient1));
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
+
+    // Add non-secure video codec to client2.
+    resources = {MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+    // Remove client2, observer 2&3 should receive updates.
+    mService->removeClient(kTestPid2, getId(mTestClient2));
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+    // Remove non-secure codec after client2 removed, should have no event.
+    mService->removeResource(kTestPid2, getId(mTestClient2), resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
+    // Remove client2 again, should have no event.
+    mService->removeClient(kTestPid2, getId(mTestClient2));
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
+
+    // Add secure & non-secure video codecs, plus audio codecs (that's ignored).
+    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(1 /*secure*/, 0 /*video*/),
+                 MediaResource::CodecResource(0 /*secure*/, 0 /*video*/)};
+    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
+    // Remove one audio codec, should have no event.
+    resources = {MediaResource::CodecResource(1 /*secure*/, 0 /*video*/)};
+    mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
+    // Remove the other audio codec and the secure video codec, only secure video codec
+    // removal should be reported.
+    resources = {MediaResource::CodecResource(0 /*secure*/, 0 /*video*/),
+                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+    mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
+    // Remove client3 entirely. Non-secure video codec removal should be reported.
+    mService->removeClient(kTestPid2, getId(mTestClient3));
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+}
+
+TEST_F(ResourceObserverServiceTest, testRemoveResourceMultiple) {
+    registerObservers();
+
+    std::vector<MediaObservableParcel> observables1, observables2, observables3;
+    observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+    observables2 = {{MediaObservableType::kVideoNonSecureCodec, 1}};
+    observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+                    {MediaObservableType::kVideoNonSecureCodec, 1}};
+
+    std::vector<MediaResourceParcel> resources;
+
+    // Add multiple secure & non-secure video codecs, plus audio codecs (that's ignored).
+    // (ResourceManager will merge these internally.)
+    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 4 /*count*/),
+                 MediaResource::CodecResource(1 /*secure*/, 0 /*video*/),
+                 MediaResource::CodecResource(0 /*secure*/, 0 /*video*/)};
+    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+    observables2 = {{MediaObservableType::kVideoNonSecureCodec, 4}};
+    observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+                    {MediaObservableType::kVideoNonSecureCodec, 4}};
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
+    // Remove one audio codec, 2 secure video codecs and 2 non-secure video codecs.
+    // 1 secure video codec removal and 2 non-secure video codec removals should be reported.
+    resources = {MediaResource::CodecResource(0 /*secure*/, 0 /*video*/),
+                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 2 /*count*/)};
+    mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+    observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+    observables2 = {{MediaObservableType::kVideoNonSecureCodec, 2}};
+    observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+                    {MediaObservableType::kVideoNonSecureCodec, 2}};
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables3));
+    // Remove client3 entirely. 2 non-secure video codecs removal should be reported.
+    mService->removeClient(kTestPid2, getId(mTestClient3));
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+}
+
+TEST_F(ResourceObserverServiceTest, testEventFilters) {
+    // Register observers with different event filters.
+    std::vector<MediaObservableFilter> filters1, filters2, filters3;
+    filters1 = {{MediaObservableType::kVideoSecureCodec, BUSY}};
+    filters2 = {{MediaObservableType::kVideoNonSecureCodec, IDLE}};
+    filters3 = {{MediaObservableType::kVideoSecureCodec, IDLE},
+               {MediaObservableType::kVideoNonSecureCodec, BUSY}};
+
+    // mTestObserver1 monitors secure video codecs.
+    EXPECT_TRUE(mObserverService->registerObserver(mTestObserver1, filters1).isOk());
+
+    // mTestObserver2 monitors non-secure video codecs.
+    EXPECT_TRUE(mObserverService->registerObserver(mTestObserver2, filters2).isOk());
+
+    // mTestObserver3 monitors both secure & non-secure video codecs.
+    EXPECT_TRUE(mObserverService->registerObserver(mTestObserver3, filters3).isOk());
+
+    std::vector<MediaObservableParcel> observables1, observables2;
+    observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+    observables2 = {{MediaObservableType::kVideoNonSecureCodec, 1}};
+
+    std::vector<MediaResourceParcel> resources;
+
+    // Add secure & non-secure video codecs.
+    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+
+    // Remove secure & non-secure video codecs.
+    mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/test/build_and_run_all_unit_tests.sh b/services/mediaresourcemanager/test/build_and_run_all_unit_tests.sh
new file mode 100755
index 0000000..1c4ae98
--- /dev/null
+++ b/services/mediaresourcemanager/test/build_and_run_all_unit_tests.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+#
+# Run tests in this directory.
+#
+
+if [ "$SYNC_FINISHED" != true ]; then
+  if [ -z "$ANDROID_BUILD_TOP" ]; then
+      echo "Android build environment not set"
+      exit -1
+  fi
+
+  # ensure we have mm
+  . $ANDROID_BUILD_TOP/build/envsetup.sh
+
+  mm
+
+  echo "waiting for device"
+
+  adb root && adb wait-for-device remount && adb sync
+fi
+
+echo "========================================"
+
+echo "testing ResourceManagerService"
+#adb shell /data/nativetest64/ResourceManagerService_test/ResourceManagerService_test
+adb shell /data/nativetest/ResourceManagerService_test/ResourceManagerService_test
+
+echo "testing ServiceLog"
+#adb shell /data/nativetest64/ServiceLog_test/ServiceLog_test
+adb shell /data/nativetest/ServiceLog_test/ServiceLog_test
+
+echo "testing ResourceObserverService"
+#adb shell /data/nativetest64/ResourceObserverService_test/ResourceObserverService_test
+adb shell /data/nativetest/ResourceObserverService_test/ResourceObserverService_test
diff --git a/services/mediatranscoding/Android.bp b/services/mediatranscoding/Android.bp
index 17347a9..2dbcf5a 100644
--- a/services/mediatranscoding/Android.bp
+++ b/services/mediatranscoding/Android.bp
@@ -2,16 +2,25 @@
 cc_library_shared {
     name: "libmediatranscodingservice",
 
-    srcs: ["MediaTranscodingService.cpp"],
+    srcs: [
+        "MediaTranscodingService.cpp",
+        "SimulatedTranscoder.cpp",
+    ],
 
     shared_libs: [
         "libbase",
+        "libbinder",
         "libbinder_ndk",
+        "libcutils",
         "liblog",
         "libmediatranscoding",
         "libutils",
     ],
 
+    export_shared_lib_headers: [
+        "libmediatranscoding",
+    ],
+
     static_libs: [
         "mediatranscoding_aidl_interface-ndk_platform",
     ],
@@ -44,18 +53,6 @@
         "mediatranscoding_aidl_interface-ndk_platform",
     ],
 
-    target: {
-        android: {
-            product_variables: {
-                malloc_not_svelte: {
-                    // Scudo increases memory footprint, so only enable on
-                    // non-svelte devices.
-                    shared_libs: ["libc_scudo"],
-                },
-            },
-        },
-    },
-
     init_rc: ["mediatranscoding.rc"],
 
     cflags: [
diff --git a/services/mediatranscoding/MediaTranscodingService.cpp b/services/mediatranscoding/MediaTranscodingService.cpp
index 82d4161..56f327e 100644
--- a/services/mediatranscoding/MediaTranscodingService.cpp
+++ b/services/mediatranscoding/MediaTranscodingService.cpp
@@ -16,13 +16,22 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "MediaTranscodingService"
-#include <MediaTranscodingService.h>
+#include "MediaTranscodingService.h"
+
 #include <android/binder_manager.h>
 #include <android/binder_process.h>
-#include <private/android_filesystem_config.h>
+#include <binder/IServiceManager.h>
+#include <cutils/properties.h>
+#include <media/TranscoderWrapper.h>
+#include <media/TranscodingClientManager.h>
+#include <media/TranscodingResourcePolicy.h>
+#include <media/TranscodingSessionController.h>
+#include <media/TranscodingUidPolicy.h>
 #include <utils/Log.h>
 #include <utils/Vector.h>
 
+#include "SimulatedTranscoder.h"
+
 namespace android {
 
 // Convenience methods for constructing binder::Status objects for error returns
@@ -31,23 +40,17 @@
             errorCode,                                \
             String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, ##__VA_ARGS__))
 
-// Can MediaTranscoding service trust the caller based on the calling UID?
-// TODO(hkuang): Add MediaProvider's UID.
-static bool isTrustedCallingUid(uid_t uid) {
-    switch (uid) {
-    case AID_ROOT:  // root user
-    case AID_SYSTEM:
-    case AID_SHELL:
-    case AID_MEDIA:  // mediaserver
-        return true;
-    default:
-        return false;
-    }
-}
-
-MediaTranscodingService::MediaTranscodingService()
-      : mTranscodingClientManager(TranscodingClientManager::getInstance()) {
+MediaTranscodingService::MediaTranscodingService(
+        const std::shared_ptr<TranscoderInterface>& transcoder)
+      : mUidPolicy(new TranscodingUidPolicy()),
+        mResourcePolicy(new TranscodingResourcePolicy()),
+        mSessionController(
+                new TranscodingSessionController(transcoder, mUidPolicy, mResourcePolicy)),
+        mClientManager(new TranscodingClientManager(mSessionController)) {
     ALOGV("MediaTranscodingService is created");
+    transcoder->setCallback(mSessionController);
+    mUidPolicy->setCallback(mSessionController);
+    mResourcePolicy->setCallback(mSessionController);
 }
 
 MediaTranscodingService::~MediaTranscodingService() {
@@ -56,6 +59,17 @@
 
 binder_status_t MediaTranscodingService::dump(int fd, const char** /*args*/, uint32_t /*numArgs*/) {
     String8 result;
+
+    // TODO(b/161549994): Remove libbinder dependencies for mainline.
+    if (checkCallingPermission(String16("android.permission.DUMP")) == false) {
+        result.format(
+                "Permission Denial: "
+                "can't dump MediaTranscodingService from pid=%d, uid=%d\n",
+                AIBinder_getCallingPid(), AIBinder_getCallingUid());
+        write(fd, result.string(), result.size());
+        return PERMISSION_DENIED;
+    }
+
     const size_t SIZE = 256;
     char buffer[SIZE];
 
@@ -64,14 +78,22 @@
     write(fd, result.string(), result.size());
 
     Vector<String16> args;
-    mTranscodingClientManager.dumpAllClients(fd, args);
+    mClientManager->dumpAllClients(fd, args);
+    mSessionController->dumpAllSessions(fd, args);
     return OK;
 }
 
 //static
 void MediaTranscodingService::instantiate() {
+    std::shared_ptr<TranscoderInterface> transcoder;
+    if (property_get_bool("debug.transcoding.simulated_transcoder", false)) {
+        transcoder = std::make_shared<SimulatedTranscoder>();
+    } else {
+        transcoder = std::make_shared<TranscoderWrapper>();
+    }
+
     std::shared_ptr<MediaTranscodingService> service =
-            ::ndk::SharedRefBase::make<MediaTranscodingService>();
+            ::ndk::SharedRefBase::make<MediaTranscodingService>(transcoder);
     binder_status_t status =
             AServiceManager_addService(service->asBinder().get(), getServiceName());
     if (status != STATUS_OK) {
@@ -80,118 +102,31 @@
 }
 
 Status MediaTranscodingService::registerClient(
-        const std::shared_ptr<ITranscodingServiceClient>& in_client,
-        const std::string& in_opPackageName, int32_t in_clientUid, int32_t in_clientPid,
-        int32_t* _aidl_return) {
-    if (in_client == nullptr) {
-        ALOGE("Client can not be null");
-        *_aidl_return = kInvalidJobId;
-        return Status::fromServiceSpecificError(ERROR_ILLEGAL_ARGUMENT);
-    }
-
-    int32_t callingPid = AIBinder_getCallingPid();
-    int32_t callingUid = AIBinder_getCallingUid();
-
-    // Check if we can trust clientUid. Only privilege caller could forward the uid on app client's behalf.
-    if (in_clientUid == USE_CALLING_UID) {
-        in_clientUid = callingUid;
-    } else if (!isTrustedCallingUid(callingUid)) {
-        ALOGE("MediaTranscodingService::registerClient failed (calling PID %d, calling UID %d) "
-              "rejected "
-              "(don't trust clientUid %d)",
-              in_clientPid, in_clientUid, in_clientUid);
-        return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
-                                "Untrusted caller (calling PID %d, UID %d) trying to "
-                                "register client",
-                                in_clientPid, in_clientUid);
-    }
-
-    // Check if we can trust clientPid. Only privilege caller could forward the pid on app client's behalf.
-    if (in_clientPid == USE_CALLING_PID) {
-        in_clientPid = callingPid;
-    } else if (!isTrustedCallingUid(callingUid)) {
-        ALOGE("MediaTranscodingService::registerClient client failed (calling PID %d, calling UID "
-              "%d) rejected "
-              "(don't trust clientPid %d)",
-              in_clientPid, in_clientUid, in_clientPid);
-        return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
-                                "Untrusted caller (calling PID %d, UID %d) trying to "
-                                "register client",
-                                in_clientPid, in_clientUid);
-    }
-
-    // We know the clientId must be equal to its pid as we assigned client's pid as its clientId.
-    int32_t clientId = in_clientPid;
-
-    // Checks if the client already registers.
-    if (mTranscodingClientManager.isClientIdRegistered(clientId)) {
-        return Status::fromServiceSpecificError(ERROR_ALREADY_EXISTS);
+        const std::shared_ptr<ITranscodingClientCallback>& in_callback,
+        const std::string& in_clientName, const std::string& in_opPackageName,
+        std::shared_ptr<ITranscodingClient>* _aidl_return) {
+    if (in_callback == nullptr) {
+        *_aidl_return = nullptr;
+        return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Client callback cannot be null!");
     }
 
     // Creates the client and uses its process id as client id.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> newClient =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    in_client, clientId, in_clientPid, in_clientUid, in_opPackageName);
-    status_t err = mTranscodingClientManager.addClient(std::move(newClient));
+    std::shared_ptr<ITranscodingClient> newClient;
+
+    status_t err =
+            mClientManager->addClient(in_callback, in_clientName, in_opPackageName, &newClient);
     if (err != OK) {
-        *_aidl_return = kInvalidClientId;
+        *_aidl_return = nullptr;
         return STATUS_ERROR_FMT(err, "Failed to add client to TranscodingClientManager");
     }
 
-    ALOGD("Assign client: %s pid: %d, uid: %d with id: %d", in_opPackageName.c_str(), in_clientPid,
-          in_clientUid, clientId);
-
-    *_aidl_return = clientId;
-    return Status::ok();
-}
-
-Status MediaTranscodingService::unregisterClient(int32_t clientId, bool* _aidl_return) {
-    ALOGD("unregisterClient id: %d", clientId);
-    int32_t callingUid = AIBinder_getCallingUid();
-    int32_t callingPid = AIBinder_getCallingPid();
-
-    // Only the client with clientId or the trusted caller could unregister the client.
-    if (callingPid != clientId) {
-        if (!isTrustedCallingUid(callingUid)) {
-            ALOGE("Untrusted caller (calling PID %d, UID %d) trying to "
-                  "unregister client with id: %d",
-                  callingUid, callingPid, clientId);
-            *_aidl_return = true;
-            return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
-                                    "Untrusted caller (calling PID %d, UID %d) trying to "
-                                    "unregister client with id: %d",
-                                    callingUid, callingPid, clientId);
-        }
-    }
-
-    *_aidl_return = (mTranscodingClientManager.removeClient(clientId) == OK);
+    *_aidl_return = newClient;
     return Status::ok();
 }
 
 Status MediaTranscodingService::getNumOfClients(int32_t* _aidl_return) {
     ALOGD("MediaTranscodingService::getNumOfClients");
-    *_aidl_return = mTranscodingClientManager.getNumOfClients();
-    return Status::ok();
-}
-
-Status MediaTranscodingService::submitRequest(int32_t /*clientId*/,
-                                              const TranscodingRequestParcel& /*request*/,
-                                              TranscodingJobParcel* /*job*/,
-                                              int32_t* /*_aidl_return*/) {
-    // TODO(hkuang): Add implementation.
-    return Status::ok();
-}
-
-Status MediaTranscodingService::cancelJob(int32_t /*in_clientId*/, int32_t /*in_jobId*/,
-                                          bool* /*_aidl_return*/) {
-    // TODO(hkuang): Add implementation.
-    return Status::ok();
-}
-
-Status MediaTranscodingService::getJobWithId(int32_t /*in_jobId*/,
-                                             TranscodingJobParcel* /*out_job*/,
-                                             bool* /*_aidl_return*/) {
-    // TODO(hkuang): Add implementation.
+    *_aidl_return = mClientManager->getNumOfClients();
     return Status::ok();
 }
 
diff --git a/services/mediatranscoding/MediaTranscodingService.h b/services/mediatranscoding/MediaTranscodingService.h
index cc69727..428f777 100644
--- a/services/mediatranscoding/MediaTranscodingService.h
+++ b/services/mediatranscoding/MediaTranscodingService.h
@@ -19,44 +19,39 @@
 
 #include <aidl/android/media/BnMediaTranscodingService.h>
 #include <binder/IServiceManager.h>
-#include <media/TranscodingClientManager.h>
 
 namespace android {
 
 using Status = ::ndk::ScopedAStatus;
 using ::aidl::android::media::BnMediaTranscodingService;
-using ::aidl::android::media::ITranscodingServiceClient;
-using ::aidl::android::media::TranscodingJobParcel;
+using ::aidl::android::media::ITranscodingClient;
+using ::aidl::android::media::ITranscodingClientCallback;
 using ::aidl::android::media::TranscodingRequestParcel;
+using ::aidl::android::media::TranscodingSessionParcel;
+class TranscodingClientManager;
+class TranscodingSessionController;
+class TranscoderInterface;
+class UidPolicyInterface;
+class ResourcePolicyInterface;
 
 class MediaTranscodingService : public BnMediaTranscodingService {
 public:
-    static constexpr int32_t kInvalidJobId = -1;
+    static constexpr int32_t kInvalidSessionId = -1;
     static constexpr int32_t kInvalidClientId = -1;
 
-    MediaTranscodingService();
+    MediaTranscodingService(const std::shared_ptr<TranscoderInterface>& transcoder);
     virtual ~MediaTranscodingService();
 
     static void instantiate();
 
     static const char* getServiceName() { return "media.transcoding"; }
 
-    Status registerClient(const std::shared_ptr<ITranscodingServiceClient>& in_client,
-                          const std::string& in_opPackageName, int32_t in_clientUid,
-                          int32_t in_clientPid, int32_t* _aidl_return) override;
-
-    Status unregisterClient(int32_t clientId, bool* _aidl_return) override;
+    Status registerClient(const std::shared_ptr<ITranscodingClientCallback>& in_callback,
+                          const std::string& in_clientName, const std::string& in_opPackageName,
+                          std::shared_ptr<ITranscodingClient>* _aidl_return) override;
 
     Status getNumOfClients(int32_t* _aidl_return) override;
 
-    Status submitRequest(int32_t in_clientId, const TranscodingRequestParcel& in_request,
-                         TranscodingJobParcel* out_job, int32_t* _aidl_return) override;
-
-    Status cancelJob(int32_t in_clientId, int32_t in_jobId, bool* _aidl_return) override;
-
-    Status getJobWithId(int32_t in_jobId, TranscodingJobParcel* out_job,
-                        bool* _aidl_return) override;
-
     virtual inline binder_status_t dump(int /*fd*/, const char** /*args*/, uint32_t /*numArgs*/);
 
 private:
@@ -64,7 +59,10 @@
 
     mutable std::mutex mServiceLock;
 
-    TranscodingClientManager& mTranscodingClientManager;
+    std::shared_ptr<UidPolicyInterface> mUidPolicy;
+    std::shared_ptr<ResourcePolicyInterface> mResourcePolicy;
+    std::shared_ptr<TranscodingSessionController> mSessionController;
+    std::shared_ptr<TranscodingClientManager> mClientManager;
 };
 
 }  // namespace android
diff --git a/services/mediatranscoding/SimulatedTranscoder.cpp b/services/mediatranscoding/SimulatedTranscoder.cpp
new file mode 100644
index 0000000..03ee886
--- /dev/null
+++ b/services/mediatranscoding/SimulatedTranscoder.cpp
@@ -0,0 +1,171 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SimulatedTranscoder"
+#include "SimulatedTranscoder.h"
+
+#include <utils/Log.h>
+
+#include <thread>
+
+namespace android {
+
+//static
+const char* SimulatedTranscoder::toString(Event::Type type) {
+    switch (type) {
+    case Event::Start:
+        return "Start";
+    case Event::Pause:
+        return "Pause";
+    case Event::Resume:
+        return "Resume";
+    default:
+        break;
+    }
+    return "(unknown)";
+}
+
+SimulatedTranscoder::SimulatedTranscoder() {
+    std::thread(&SimulatedTranscoder::threadLoop, this).detach();
+}
+
+void SimulatedTranscoder::setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) {
+    mCallback = cb;
+}
+
+void SimulatedTranscoder::start(
+        ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+        const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) {
+    if (request.testConfig.has_value() && request.testConfig->processingTotalTimeMs > 0) {
+        mSessionProcessingTimeMs = request.testConfig->processingTotalTimeMs;
+    }
+    ALOGV("%s: session {%d}: processingTime: %lld", __FUNCTION__, sessionId,
+          (long long)mSessionProcessingTimeMs);
+    queueEvent(Event::Start, clientId, sessionId, [=] {
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onStarted(clientId, sessionId);
+        }
+    });
+}
+
+void SimulatedTranscoder::pause(ClientIdType clientId, SessionIdType sessionId) {
+    queueEvent(Event::Pause, clientId, sessionId, [=] {
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onPaused(clientId, sessionId);
+        }
+    });
+}
+
+void SimulatedTranscoder::resume(
+        ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& /*request*/,
+        const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) {
+    queueEvent(Event::Resume, clientId, sessionId, [=] {
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onResumed(clientId, sessionId);
+        }
+    });
+}
+
+void SimulatedTranscoder::stop(ClientIdType clientId, SessionIdType sessionId) {
+    queueEvent(Event::Stop, clientId, sessionId, nullptr);
+}
+
+void SimulatedTranscoder::queueEvent(Event::Type type, ClientIdType clientId,
+                                     SessionIdType sessionId, std::function<void()> runnable) {
+    ALOGV("%s: session {%lld, %d}: %s", __FUNCTION__, (long long)clientId, sessionId,
+          toString(type));
+
+    auto lock = std::scoped_lock(mLock);
+
+    mQueue.push_back({type, clientId, sessionId, runnable});
+    mCondition.notify_one();
+}
+
+void SimulatedTranscoder::threadLoop() {
+    bool running = false;
+    std::chrono::microseconds remainingUs(kSessionDurationUs);
+    std::chrono::system_clock::time_point lastRunningTime;
+    Event lastRunningEvent;
+
+    std::unique_lock<std::mutex> lock(mLock);
+    // SimulatedTranscoder currently lives in the transcoding service, as long as
+    // MediaTranscodingService itself.
+    while (true) {
+        // Wait for the next event.
+        while (mQueue.empty()) {
+            if (!running) {
+                mCondition.wait(lock);
+                continue;
+            }
+            // If running, wait for the remaining life of this session. Report finish if timed out.
+            std::cv_status status = mCondition.wait_for(lock, remainingUs);
+            if (status == std::cv_status::timeout) {
+                running = false;
+
+                auto callback = mCallback.lock();
+                if (callback != nullptr) {
+                    lock.unlock();
+                    callback->onFinish(lastRunningEvent.clientId, lastRunningEvent.sessionId);
+                    lock.lock();
+                }
+            } else {
+                // Advance last running time and remaining time. This is needed to guard
+                // against bad events (which will be ignored) or spurious wakeups, in that
+                // case we don't want to wait for the same time again.
+                auto now = std::chrono::system_clock::now();
+                remainingUs -= (now - lastRunningTime);
+                lastRunningTime = now;
+            }
+        }
+
+        // Handle the events, adjust state and send updates to client accordingly.
+        while (!mQueue.empty()) {
+            Event event = *mQueue.begin();
+            mQueue.pop_front();
+
+            ALOGV("%s: session {%lld, %d}: %s", __FUNCTION__, (long long)event.clientId,
+                  event.sessionId, toString(event.type));
+
+            if (!running && (event.type == Event::Start || event.type == Event::Resume)) {
+                running = true;
+                lastRunningTime = std::chrono::system_clock::now();
+                lastRunningEvent = event;
+                if (event.type == Event::Start) {
+                    remainingUs = std::chrono::milliseconds(mSessionProcessingTimeMs);
+                }
+            } else if (running && (event.type == Event::Pause || event.type == Event::Stop)) {
+                running = false;
+                remainingUs -= (std::chrono::system_clock::now() - lastRunningTime);
+            } else {
+                ALOGW("%s: discarding bad event: session {%lld, %d}: %s", __FUNCTION__,
+                      (long long)event.clientId, event.sessionId, toString(event.type));
+                continue;
+            }
+
+            if (event.runnable != nullptr) {
+                lock.unlock();
+                event.runnable();
+                lock.lock();
+            }
+        }
+    }
+}
+
+}  // namespace android
diff --git a/services/mediatranscoding/SimulatedTranscoder.h b/services/mediatranscoding/SimulatedTranscoder.h
new file mode 100644
index 0000000..ba2bba0
--- /dev/null
+++ b/services/mediatranscoding/SimulatedTranscoder.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SIMULATED_TRANSCODER_H
+#define ANDROID_MEDIA_SIMULATED_TRANSCODER_H
+
+#include <android-base/thread_annotations.h>
+#include <media/TranscoderInterface.h>
+
+#include <list>
+#include <mutex>
+
+namespace android {
+
+/**
+ * SimulatedTranscoder is currently used to instantiate MediaTranscodingService
+ * on service side for testing, so that we could actually test the IPC calls of
+ * MediaTranscodingService to expose issues that's observable only over IPC.
+ * SimulatedTranscoder is used when useSimulatedTranscoder in TranscodingTestConfig
+ * is set to true.
+ *
+ * SimulatedTranscoder simulates session execution by reporting finish after kSessionDurationUs.
+ * Session lifecycle events are reported via progress updates with special progress
+ * numbers (equal to the Event's type).
+ */
+class SimulatedTranscoder : public TranscoderInterface {
+public:
+    struct Event {
+        enum Type { NoEvent, Start, Pause, Resume, Stop, Finished, Failed } type;
+        ClientIdType clientId;
+        SessionIdType sessionId;
+        std::function<void()> runnable;
+    };
+
+    static constexpr int64_t kSessionDurationUs = 1000000;
+
+    SimulatedTranscoder();
+
+    // TranscoderInterface
+    void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) override;
+    void start(ClientIdType clientId, SessionIdType sessionId,
+               const TranscodingRequestParcel& request,
+               const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+    void pause(ClientIdType clientId, SessionIdType sessionId) override;
+    void resume(ClientIdType clientId, SessionIdType sessionId,
+                const TranscodingRequestParcel& request,
+                const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+    void stop(ClientIdType clientId, SessionIdType sessionId) override;
+    // ~TranscoderInterface
+
+private:
+    std::weak_ptr<TranscoderCallbackInterface> mCallback;
+    std::mutex mLock;
+    std::condition_variable mCondition;
+    std::list<Event> mQueue GUARDED_BY(mLock);
+
+    // Minimum time spent on transcode the video. This is used just for testing.
+    int64_t mSessionProcessingTimeMs = kSessionDurationUs / 1000;
+
+    static const char* toString(Event::Type type);
+    void queueEvent(Event::Type type, ClientIdType clientId, SessionIdType sessionId,
+                    std::function<void()> runnable);
+    void threadLoop();
+};
+
+}  // namespace android
+
+#endif  // ANDROID_MEDIA_SIMULATED_TRANSCODER_H
diff --git a/services/mediatranscoding/tests/Android.bp b/services/mediatranscoding/tests/Android.bp
index e0e040c..6497685 100644
--- a/services/mediatranscoding/tests/Android.bp
+++ b/services/mediatranscoding/tests/Android.bp
@@ -19,17 +19,40 @@
         "liblog",
         "libutils",
         "libmediatranscodingservice",
+        "libcutils",
     ],
 
     static_libs: [
         "mediatranscoding_aidl_interface-ndk_platform",
     ],
+
+    required: [
+        "TranscodingUidPolicy_TestAppA",
+        "TranscodingUidPolicy_TestAppB",
+        "TranscodingUidPolicy_TestAppC",
+    ],
 }
 
-// MediaTranscodingService unit test
+// MediaTranscodingService unit test using simulated transcoder
 cc_test {
-    name: "mediatranscodingservice_tests",
+    name: "mediatranscodingservice_simulated_tests",
     defaults: ["mediatranscodingservice_test_defaults"],
 
-    srcs: ["mediatranscodingservice_tests.cpp"],
-}
\ No newline at end of file
+    srcs: ["mediatranscodingservice_simulated_tests.cpp"],
+}
+
+// MediaTranscodingService unit test using real transcoder
+cc_test {
+    name: "mediatranscodingservice_real_tests",
+    defaults: ["mediatranscodingservice_test_defaults"],
+
+    srcs: ["mediatranscodingservice_real_tests.cpp"],
+}
+
+// MediaTranscodingService unit test related to resource management
+cc_test {
+    name: "mediatranscodingservice_resource_tests",
+    defaults: ["mediatranscodingservice_test_defaults"],
+
+    srcs: ["mediatranscodingservice_resource_tests.cpp"],
+}
diff --git a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
new file mode 100644
index 0000000..f4d3ff8
--- /dev/null
+++ b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
@@ -0,0 +1,496 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+#include <aidl/android/media/BnTranscodingClientCallback.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <aidl/android/media/TranscodingSessionParcel.h>
+#include <aidl/android/media/TranscodingSessionPriority.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <binder/PermissionController.h>
+#include <cutils/multiuser.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <utils/Log.h>
+
+#include <iostream>
+#include <list>
+
+#include "SimulatedTranscoder.h"
+
+namespace android {
+
+namespace media {
+
+using Status = ::ndk::ScopedAStatus;
+using aidl::android::media::BnTranscodingClientCallback;
+using aidl::android::media::IMediaTranscodingService;
+using aidl::android::media::ITranscodingClient;
+using aidl::android::media::ITranscodingClientCallback;
+using aidl::android::media::TranscodingRequestParcel;
+using aidl::android::media::TranscodingSessionParcel;
+using aidl::android::media::TranscodingSessionPriority;
+using aidl::android::media::TranscodingVideoTrackFormat;
+
+constexpr int32_t kClientUseCallingPid = IMediaTranscodingService::USE_CALLING_PID;
+
+constexpr uid_t kClientUid = 5000;
+#define UID(n) (kClientUid + (n))
+
+constexpr pid_t kClientPid = 10000;
+#define PID(n) (kClientPid + (n))
+
+constexpr int32_t kClientId = 0;
+#define CLIENT(n) (kClientId + (n))
+
+constexpr const char* kClientName = "TestClient";
+constexpr const char* kClientPackageA = "com.android.tests.transcoding.testapp.A";
+constexpr const char* kClientPackageB = "com.android.tests.transcoding.testapp.B";
+constexpr const char* kClientPackageC = "com.android.tests.transcoding.testapp.C";
+
+constexpr const char* kTestActivityName = "/com.android.tests.transcoding.MainActivity";
+
+static status_t getUidForPackage(String16 packageName, userid_t userId, /*inout*/ uid_t& uid) {
+    PermissionController pc;
+    uid = pc.getPackageUid(packageName, 0);
+    if (uid <= 0) {
+        ALOGE("Unknown package: '%s'", String8(packageName).string());
+        return BAD_VALUE;
+    }
+
+    if (userId < 0) {
+        ALOGE("Invalid user: %d", userId);
+        return BAD_VALUE;
+    }
+
+    uid = multiuser_get_uid(userId, uid);
+    return NO_ERROR;
+}
+
+struct ShellHelper {
+    static bool RunCmd(const std::string& cmdStr) {
+        int ret = system(cmdStr.c_str());
+        if (ret != 0) {
+            ALOGE("Failed to run cmd: %s, exitcode %d", cmdStr.c_str(), ret);
+            return false;
+        }
+        return true;
+    }
+
+    static bool Start(const char* packageName, const char* activityName) {
+        return RunCmd("am start -W " + std::string(packageName) + std::string(activityName) +
+                      " &> /dev/null");
+    }
+
+    static bool Stop(const char* packageName) {
+        return RunCmd("am force-stop " + std::string(packageName));
+    }
+};
+
+struct EventTracker {
+    struct Event {
+        enum { NoEvent, Start, Pause, Resume, Finished, Failed } type;
+        int64_t clientId;
+        int32_t sessionId;
+    };
+
+#define DECLARE_EVENT(action)                                  \
+    static Event action(int32_t clientId, int32_t sessionId) { \
+        return {Event::action, clientId, sessionId};           \
+    }
+
+    DECLARE_EVENT(Start);
+    DECLARE_EVENT(Pause);
+    DECLARE_EVENT(Resume);
+    DECLARE_EVENT(Finished);
+    DECLARE_EVENT(Failed);
+
+    static constexpr Event NoEvent = {Event::NoEvent, 0, 0};
+
+    static std::string toString(const Event& event) {
+        std::string eventStr;
+        switch (event.type) {
+        case Event::Start:
+            eventStr = "Start";
+            break;
+        case Event::Pause:
+            eventStr = "Pause";
+            break;
+        case Event::Resume:
+            eventStr = "Resume";
+            break;
+        case Event::Finished:
+            eventStr = "Finished";
+            break;
+        case Event::Failed:
+            eventStr = "Failed";
+            break;
+        default:
+            return "NoEvent";
+        }
+        return "session {" + std::to_string(event.clientId) + ", " +
+               std::to_string(event.sessionId) + "}: " + eventStr;
+    }
+
+    // Pop 1 event from front, wait for up to timeoutUs if empty.
+    const Event& pop(int64_t timeoutUs = 0) {
+        std::unique_lock lock(mLock);
+
+        if (mEventQueue.empty() && timeoutUs > 0) {
+            mCondition.wait_for(lock, std::chrono::microseconds(timeoutUs));
+        }
+
+        if (mEventQueue.empty()) {
+            mPoppedEvent = NoEvent;
+        } else {
+            mPoppedEvent = *mEventQueue.begin();
+            mEventQueue.pop_front();
+        }
+
+        return mPoppedEvent;
+    }
+
+    bool waitForSpecificEventAndPop(const Event& target, std::list<Event>* outEvents,
+                                    int64_t timeoutUs = 0) {
+        std::unique_lock lock(mLock);
+
+        auto startTime = std::chrono::system_clock::now();
+
+        std::list<Event>::iterator it;
+        while (((it = std::find(mEventQueue.begin(), mEventQueue.end(), target)) ==
+                mEventQueue.end()) &&
+               timeoutUs > 0) {
+            std::cv_status status = mCondition.wait_for(lock, std::chrono::microseconds(timeoutUs));
+            if (status == std::cv_status::timeout) {
+                break;
+            }
+            std::chrono::microseconds elapsedTime = std::chrono::system_clock::now() - startTime;
+            timeoutUs -= elapsedTime.count();
+        }
+
+        if (it == mEventQueue.end()) {
+            return false;
+        }
+        *outEvents = std::list<Event>(mEventQueue.begin(), std::next(it));
+        mEventQueue.erase(mEventQueue.begin(), std::next(it));
+        return true;
+    }
+
+    // Push 1 event to back.
+    void append(const Event& event,
+                const TranscodingErrorCode err = TranscodingErrorCode::kNoError) {
+        ALOGD("%s", toString(event).c_str());
+
+        std::unique_lock lock(mLock);
+
+        mEventQueue.push_back(event);
+        mLastErr = err;
+        mCondition.notify_one();
+    }
+
+    void updateProgress(int progress) {
+        std::unique_lock lock(mLock);
+        mLastProgress = progress;
+        mUpdateCount++;
+    }
+
+    int getUpdateCount(int* lastProgress) {
+        std::unique_lock lock(mLock);
+        *lastProgress = mLastProgress;
+        return mUpdateCount;
+    }
+
+    TranscodingErrorCode getLastError() {
+        std::unique_lock lock(mLock);
+        return mLastErr;
+    }
+
+private:
+    std::mutex mLock;
+    std::condition_variable mCondition;
+    Event mPoppedEvent;
+    std::list<Event> mEventQueue;
+    TranscodingErrorCode mLastErr;
+    int mUpdateCount = 0;
+    int mLastProgress = -1;
+};
+
+// Operators for GTest macros.
+bool operator==(const EventTracker::Event& lhs, const EventTracker::Event& rhs) {
+    return lhs.type == rhs.type && lhs.clientId == rhs.clientId && lhs.sessionId == rhs.sessionId;
+}
+
+std::ostream& operator<<(std::ostream& str, const EventTracker::Event& v) {
+    str << EventTracker::toString(v);
+    return str;
+}
+
+static constexpr bool success = true;
+static constexpr bool fail = false;
+
+struct TestClientCallback : public BnTranscodingClientCallback,
+                            public EventTracker,
+                            public std::enable_shared_from_this<TestClientCallback> {
+    TestClientCallback(const char* packageName, int32_t id)
+          : mClientId(id), mClientPid(PID(id)), mClientUid(UID(id)), mPackageName(packageName) {
+        ALOGI("TestClientCallback %d created: pid %d, uid %d", id, PID(id), UID(id));
+
+        // Use package uid if that's available.
+        uid_t packageUid;
+        if (getUidForPackage(String16(packageName), 0 /*userId*/, packageUid) == NO_ERROR) {
+            mClientUid = packageUid;
+        }
+    }
+
+    virtual ~TestClientCallback() { ALOGI("TestClientCallback %d destroyed", mClientId); }
+
+    Status openFileDescriptor(const std::string& in_fileUri, const std::string& in_mode,
+                              ::ndk::ScopedFileDescriptor* _aidl_return) override {
+        ALOGD("@@@ openFileDescriptor: %s", in_fileUri.c_str());
+        int fd;
+        if (in_mode == "w" || in_mode == "rw") {
+            int kOpenFlags;
+            if (in_mode == "w") {
+                // Write-only, create file if non-existent, truncate existing file.
+                kOpenFlags = O_WRONLY | O_CREAT | O_TRUNC;
+            } else {
+                // Read-Write, create if non-existent, no truncate (service will truncate if needed)
+                kOpenFlags = O_RDWR | O_CREAT;
+            }
+            // User R+W permission.
+            constexpr int kFileMode = S_IRUSR | S_IWUSR;
+            fd = open(in_fileUri.c_str(), kOpenFlags, kFileMode);
+        } else {
+            fd = open(in_fileUri.c_str(), O_RDONLY);
+        }
+        _aidl_return->set(fd);
+        return Status::ok();
+    }
+
+    Status onTranscodingStarted(int32_t in_sessionId) override {
+        append(EventTracker::Start(mClientId, in_sessionId));
+        return Status::ok();
+    }
+
+    Status onTranscodingPaused(int32_t in_sessionId) override {
+        append(EventTracker::Pause(mClientId, in_sessionId));
+        return Status::ok();
+    }
+
+    Status onTranscodingResumed(int32_t in_sessionId) override {
+        append(EventTracker::Resume(mClientId, in_sessionId));
+        return Status::ok();
+    }
+
+    Status onTranscodingFinished(
+            int32_t in_sessionId,
+            const ::aidl::android::media::TranscodingResultParcel& /* in_result */) override {
+        append(Finished(mClientId, in_sessionId));
+        return Status::ok();
+    }
+
+    Status onTranscodingFailed(int32_t in_sessionId,
+                               ::aidl::android::media::TranscodingErrorCode in_errorCode) override {
+        append(Failed(mClientId, in_sessionId), in_errorCode);
+        return Status::ok();
+    }
+
+    Status onAwaitNumberOfSessionsChanged(int32_t /* in_sessionId */,
+                                          int32_t /* in_oldAwaitNumber */,
+                                          int32_t /* in_newAwaitNumber */) override {
+        return Status::ok();
+    }
+
+    Status onProgressUpdate(int32_t /* in_sessionId */, int32_t in_progress) override {
+        updateProgress(in_progress);
+        return Status::ok();
+    }
+
+    Status registerClient(const char* packageName,
+                          const std::shared_ptr<IMediaTranscodingService>& service) {
+        // Override the default uid if the package uid is found.
+        uid_t uid;
+        if (getUidForPackage(String16(packageName), 0 /*userId*/, uid) == NO_ERROR) {
+            mClientUid = uid;
+        }
+
+        ALOGD("registering %s with uid %d", packageName, mClientUid);
+
+        std::shared_ptr<ITranscodingClient> client;
+        Status status =
+                service->registerClient(shared_from_this(), kClientName, packageName, &client);
+
+        mClient = status.isOk() ? client : nullptr;
+        return status;
+    }
+
+    Status unregisterClient() {
+        Status status;
+        if (mClient != nullptr) {
+            status = mClient->unregister();
+            mClient = nullptr;
+        }
+        return status;
+    }
+
+    template <bool expectation = success>
+    bool submit(int32_t sessionId, const char* sourceFilePath, const char* destinationFilePath,
+                TranscodingSessionPriority priority = TranscodingSessionPriority::kNormal,
+                int bitrateBps = -1, int overridePid = -1, int overrideUid = -1) {
+        constexpr bool shouldSucceed = (expectation == success);
+        bool result;
+        TranscodingRequestParcel request;
+        TranscodingSessionParcel session;
+
+        request.sourceFilePath = sourceFilePath;
+        request.destinationFilePath = destinationFilePath;
+        request.priority = priority;
+        request.clientPid = (overridePid == -1) ? mClientPid : overridePid;
+        request.clientUid = (overrideUid == -1) ? mClientUid : overrideUid;
+        request.clientPackageName = (overrideUid == -1) ? mPackageName : "";
+        if (bitrateBps > 0) {
+            request.requestedVideoTrackFormat.emplace(TranscodingVideoTrackFormat());
+            request.requestedVideoTrackFormat->bitrateBps = bitrateBps;
+        }
+        Status status = mClient->submitRequest(request, &session, &result);
+
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(result, shouldSucceed);
+        if (shouldSucceed) {
+            EXPECT_EQ(session.sessionId, sessionId);
+        }
+
+        return status.isOk() && (result == shouldSucceed) &&
+               (!shouldSucceed || session.sessionId == sessionId);
+    }
+
+    template <bool expectation = success>
+    bool cancel(int32_t sessionId) {
+        constexpr bool shouldSucceed = (expectation == success);
+        bool result;
+        Status status = mClient->cancelSession(sessionId, &result);
+
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(result, shouldSucceed);
+
+        return status.isOk() && (result == shouldSucceed);
+    }
+
+    template <bool expectation = success>
+    bool getSession(int32_t sessionId, const char* sourceFilePath,
+                    const char* destinationFilePath) {
+        constexpr bool shouldSucceed = (expectation == success);
+        bool result;
+        TranscodingSessionParcel session;
+        Status status = mClient->getSessionWithId(sessionId, &session, &result);
+
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(result, shouldSucceed);
+        if (shouldSucceed) {
+            EXPECT_EQ(session.sessionId, sessionId);
+            EXPECT_EQ(session.request.sourceFilePath, sourceFilePath);
+        }
+
+        return status.isOk() && (result == shouldSucceed) &&
+               (!shouldSucceed || (session.sessionId == sessionId &&
+                                   session.request.sourceFilePath == sourceFilePath &&
+                                   session.request.destinationFilePath == destinationFilePath));
+    }
+
+    int32_t mClientId;
+    pid_t mClientPid;
+    uid_t mClientUid;
+    std::string mPackageName;
+    std::shared_ptr<ITranscodingClient> mClient;
+};
+
+class MediaTranscodingServiceTestBase : public ::testing::Test {
+public:
+    MediaTranscodingServiceTestBase() { ALOGI("MediaTranscodingServiceTestBase created"); }
+
+    virtual ~MediaTranscodingServiceTestBase() {
+        ALOGI("MediaTranscodingServiceTestBase destroyed");
+    }
+
+    void SetUp() override {
+        // Need thread pool to receive callbacks, otherwise oneway callbacks are
+        // silently ignored.
+        ABinderProcess_startThreadPool();
+        ::ndk::SpAIBinder binder(AServiceManager_getService("media.transcoding"));
+        mService = IMediaTranscodingService::fromBinder(binder);
+        if (mService == nullptr) {
+            ALOGE("Failed to connect to the media.trascoding service.");
+            return;
+        }
+
+        mClient1 = ::ndk::SharedRefBase::make<TestClientCallback>(kClientPackageA, 1);
+        mClient2 = ::ndk::SharedRefBase::make<TestClientCallback>(kClientPackageB, 2);
+        mClient3 = ::ndk::SharedRefBase::make<TestClientCallback>(kClientPackageC, 3);
+    }
+
+    Status registerOneClient(const std::shared_ptr<TestClientCallback>& callback) {
+        ALOGD("registering %s with uid %d", callback->mPackageName.c_str(), callback->mClientUid);
+
+        std::shared_ptr<ITranscodingClient> client;
+        Status status =
+                mService->registerClient(callback, kClientName, callback->mPackageName, &client);
+
+        if (status.isOk()) {
+            callback->mClient = client;
+        } else {
+            callback->mClient = nullptr;
+        }
+        return status;
+    }
+
+    void registerMultipleClients() {
+        // Register 3 clients.
+        EXPECT_TRUE(registerOneClient(mClient1).isOk());
+        EXPECT_TRUE(registerOneClient(mClient2).isOk());
+        EXPECT_TRUE(registerOneClient(mClient3).isOk());
+
+        // Check the number of clients.
+        int32_t numOfClients;
+        Status status = mService->getNumOfClients(&numOfClients);
+        EXPECT_TRUE(status.isOk());
+        EXPECT_GE(numOfClients, 3);
+    }
+
+    void unregisterMultipleClients() {
+        // Unregister the clients.
+        EXPECT_TRUE(mClient1->unregisterClient().isOk());
+        EXPECT_TRUE(mClient2->unregisterClient().isOk());
+        EXPECT_TRUE(mClient3->unregisterClient().isOk());
+    }
+
+    void deleteFile(const char* path) { unlink(path); }
+
+    std::shared_ptr<IMediaTranscodingService> mService;
+    std::shared_ptr<TestClientCallback> mClient1;
+    std::shared_ptr<TestClientCallback> mClient2;
+    std::shared_ptr<TestClientCallback> mClient3;
+};
+
+}  // namespace media
+}  // namespace android
diff --git a/services/mediatranscoding/tests/README.txt b/services/mediatranscoding/tests/README.txt
new file mode 100644
index 0000000..cde465e
--- /dev/null
+++ b/services/mediatranscoding/tests/README.txt
@@ -0,0 +1,8 @@
+mediatranscodingservice_simulated_tests:
+	Tests media transcoding service with simulated transcoder.
+
+mediatranscodingservice_real_tests:
+	Tests media transcoding service with real transcoder. Uses the same test assets
+	as the MediaTranscoder unit tests. Before running the test, please make sure
+	to push the test assets to /sdcard:
+	adb push $TOP/frameworks/av/media/libmediatranscoding/tests/assets /data/local/tmp/TranscodingTestAssets
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp
new file mode 100644
index 0000000..95a94fc
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp
@@ -0,0 +1,23 @@
+android_test_helper_app {
+    name: "TranscodingUidPolicy_TestAppA",
+    manifest: "TestAppA.xml",
+    static_libs: ["androidx.test.rules"],
+    sdk_version: "test_current",
+    srcs: ["src/**/*.java"],
+}
+
+android_test_helper_app {
+    name: "TranscodingUidPolicy_TestAppB",
+    manifest: "TestAppB.xml",
+    static_libs: ["androidx.test.rules"],
+    sdk_version: "test_current",
+    srcs: ["src/**/*.java"],
+}
+
+android_test_helper_app {
+    name: "TranscodingUidPolicy_TestAppC",
+    manifest: "TestAppC.xml",
+    static_libs: ["androidx.test.rules"],
+    sdk_version: "test_current",
+    srcs: ["src/**/*.java"],
+}
\ No newline at end of file
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml
new file mode 100644
index 0000000..0dff171
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml
@@ -0,0 +1,41 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.android.tests.transcoding.testapp.A"
+    android:versionCode="1"
+    android:versionName="1.0" >
+
+    <application android:label="TestAppA">
+        <activity android:name="com.android.tests.transcoding.MainActivity"
+            android:exported="true">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.DEFAULT"/>
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+        <activity android:name="com.android.tests.transcoding.ResourcePolicyTestActivity"
+            android:exported="true">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.DEFAULT"/>
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+    </application>
+</manifest>
+
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml
new file mode 100644
index 0000000..4baa35a
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.android.tests.transcoding.testapp.B"
+    android:versionCode="1"
+    android:versionName="1.0" >
+
+    <application android:label="TestAppB">
+        <activity android:name="com.android.tests.transcoding.MainActivity"
+            android:exported="true">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.DEFAULT"/>
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+    </application>
+</manifest>
+
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml
new file mode 100644
index 0000000..3dde3af
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.android.tests.transcoding.testapp.C"
+    android:versionCode="1"
+    android:versionName="1.0" >
+
+    <application android:label="TestAppC">
+        <activity android:name="com.android.tests.transcoding.MainActivity"
+            android:exported="true">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.DEFAULT"/>
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+    </application>
+</manifest>
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java
new file mode 100644
index 0000000..b79164d
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tests.transcoding;
+
+import android.app.Activity;
+import android.content.Intent;
+import android.os.Bundle;
+import android.util.Log;
+
+/**
+ * This is an empty activity for testing the UID policy of media transcoding service.
+ */
+public class MainActivity extends Activity {
+    private static final String TAG = "MainActivity";
+
+    // Called at the start of the full lifetime.
+    @Override
+    public void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        // Initialize Activity and inflate the UI.
+    }
+
+    // Called after onCreate has finished, use to restore UI state
+    @Override
+    public void onRestoreInstanceState(Bundle savedInstanceState) {
+        super.onRestoreInstanceState(savedInstanceState);
+        // Restore UI state from the savedInstanceState.
+        // This bundle has also been passed to onCreate.
+        // Will only be called if the Activity has been
+        // killed by the system since it was last visible.
+    }
+
+    // Called before subsequent visible lifetimes
+    // for an activity process.
+    @Override
+    public void onRestart() {
+        super.onRestart();
+        // Load changes knowing that the Activity has already
+        // been visible within this process.
+    }
+
+    // Called at the start of the visible lifetime.
+    @Override
+    public void onStart() {
+        super.onStart();
+        // Apply any required UI change now that the Activity is visible.
+    }
+
+    // Called at the start of the active lifetime.
+    @Override
+    public void onResume() {
+        super.onResume();
+        // Resume any paused UI updates, threads, or processes required
+        // by the Activity but suspended when it was inactive.
+    }
+
+    // Called to save UI state changes at the
+    // end of the active lifecycle.
+    @Override
+    public void onSaveInstanceState(Bundle savedInstanceState) {
+        // Save UI state changes to the savedInstanceState.
+        // This bundle will be passed to onCreate and
+        // onRestoreInstanceState if the process is
+        // killed and restarted by the run time.
+        super.onSaveInstanceState(savedInstanceState);
+    }
+
+    // Called at the end of the active lifetime.
+    @Override
+    public void onPause() {
+        // Suspend UI updates, threads, or CPU intensive processes
+        // that don't need to be updated when the Activity isn't
+        // the active foreground Activity.
+        super.onPause();
+    }
+
+    // Called at the end of the visible lifetime.
+    @Override
+    public void onStop() {
+        // Suspend remaining UI updates, threads, or processing
+        // that aren't required when the Activity isn't visible.
+        // Persist all edits or state changes
+        // as after this call the process is likely to be killed.
+        super.onStop();
+    }
+
+    // Sometimes called at the end of the full lifetime.
+    @Override
+    public void onDestroy() {
+        // Clean up any resources including ending threads,
+        // closing database connections etc.
+        super.onDestroy();
+    }
+}
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java
new file mode 100644
index 0000000..c9e2ddb
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java
@@ -0,0 +1,272 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.tests.transcoding;
+
+import android.app.Activity;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecInfo.VideoCapabilities;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import android.util.Log;
+import java.io.IOException;
+import java.util.Vector;
+
+public class ResourcePolicyTestActivity extends Activity {
+    public static final int TYPE_NONSECURE = 0;
+    public static final int TYPE_SECURE = 1;
+    public static final int TYPE_MIX = 2;
+
+    protected String TAG;
+    private static final int FRAME_RATE = 10;
+    private static final int IFRAME_INTERVAL = 10; // 10 seconds between I-frames
+    private static final String MIME = MediaFormat.MIMETYPE_VIDEO_AVC;
+    private static final int TIMEOUT_MS = 5000;
+
+    private Vector<MediaCodec> mCodecs = new Vector<MediaCodec>();
+
+    private class TestCodecCallback extends MediaCodec.Callback {
+        @Override
+        public void onInputBufferAvailable(MediaCodec codec, int index) {
+            Log.d(TAG, "onInputBufferAvailable " + codec.toString());
+        }
+
+        @Override
+        public void onOutputBufferAvailable(
+                MediaCodec codec, int index, MediaCodec.BufferInfo info) {
+            Log.d(TAG, "onOutputBufferAvailable " + codec.toString());
+        }
+
+        @Override
+        public void onError(MediaCodec codec, MediaCodec.CodecException e) {
+            Log.d(TAG, "onError " + codec.toString() + " errorCode " + e.getErrorCode());
+        }
+
+        @Override
+        public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
+            Log.d(TAG, "onOutputFormatChanged " + codec.toString());
+        }
+    }
+
+    private MediaCodec.Callback mCallback = new TestCodecCallback();
+
+    private MediaFormat getTestFormat(CodecCapabilities caps, boolean securePlayback) {
+        VideoCapabilities vcaps = caps.getVideoCapabilities();
+        int width = vcaps.getSupportedWidths().getLower();
+        int height = vcaps.getSupportedHeightsFor(width).getLower();
+        int bitrate = vcaps.getBitrateRange().getLower();
+
+        MediaFormat format = MediaFormat.createVideoFormat(MIME, width, height);
+        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, caps.colorFormats[0]);
+        format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
+        format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
+        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
+        format.setFeatureEnabled(CodecCapabilities.FEATURE_SecurePlayback, securePlayback);
+        return format;
+    }
+
+    private MediaCodecInfo getTestCodecInfo(boolean securePlayback) {
+        // Use avc decoder for testing.
+        boolean isEncoder = false;
+
+        MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS);
+        for (MediaCodecInfo info : mcl.getCodecInfos()) {
+            if (info.isEncoder() != isEncoder) {
+                continue;
+            }
+            CodecCapabilities caps;
+            try {
+                caps = info.getCapabilitiesForType(MIME);
+                boolean securePlaybackSupported =
+                        caps.isFeatureSupported(CodecCapabilities.FEATURE_SecurePlayback);
+                boolean securePlaybackRequired =
+                        caps.isFeatureRequired(CodecCapabilities.FEATURE_SecurePlayback);
+                if ((securePlayback && securePlaybackSupported)
+                        || (!securePlayback && !securePlaybackRequired)) {
+                    Log.d(TAG, "securePlayback " + securePlayback + " will use " + info.getName());
+                } else {
+                    Log.d(TAG, "securePlayback " + securePlayback + " skip " + info.getName());
+                    continue;
+                }
+            } catch (IllegalArgumentException e) {
+                // mime is not supported
+                continue;
+            }
+            return info;
+        }
+
+        return null;
+    }
+
+    protected int allocateCodecs(int max) {
+        Bundle extras = getIntent().getExtras();
+        int type = TYPE_NONSECURE;
+        if (extras != null) {
+            type = extras.getInt("test-type", type);
+            Log.d(TAG, "type is: " + type);
+        }
+
+        boolean shouldSkip = false;
+        boolean securePlayback;
+        if (type == TYPE_NONSECURE || type == TYPE_MIX) {
+            securePlayback = false;
+            MediaCodecInfo info = getTestCodecInfo(securePlayback);
+            if (info != null) {
+                allocateCodecs(max, info, securePlayback);
+            } else {
+                shouldSkip = true;
+            }
+        }
+
+        if (!shouldSkip) {
+            if (type == TYPE_SECURE || type == TYPE_MIX) {
+                securePlayback = true;
+                MediaCodecInfo info = getTestCodecInfo(securePlayback);
+                if (info != null) {
+                    allocateCodecs(max, info, securePlayback);
+                } else {
+                    shouldSkip = true;
+                }
+            }
+        }
+
+        if (shouldSkip) {
+            Log.d(TAG, "test skipped as there's no supported codec.");
+            finishWithResult(RESULT_OK);
+        }
+
+        Log.d(TAG, "allocateCodecs returned " + mCodecs.size());
+        return mCodecs.size();
+    }
+
+    protected void allocateCodecs(int max, MediaCodecInfo info, boolean securePlayback) {
+        String name = info.getName();
+        CodecCapabilities caps = info.getCapabilitiesForType(MIME);
+        MediaFormat format = getTestFormat(caps, securePlayback);
+        MediaCodec codec = null;
+        for (int i = mCodecs.size(); i < max; ++i) {
+            try {
+                Log.d(TAG, "Create codec " + name + " #" + i);
+                codec = MediaCodec.createByCodecName(name);
+                codec.setCallback(mCallback);
+                Log.d(TAG, "Configure codec " + format);
+                codec.configure(format, null, null, 0);
+                Log.d(TAG, "Start codec " + format);
+                codec.start();
+                mCodecs.add(codec);
+                codec = null;
+            } catch (IllegalArgumentException e) {
+                Log.d(TAG, "IllegalArgumentException " + e.getMessage());
+                break;
+            } catch (IOException e) {
+                Log.d(TAG, "IOException " + e.getMessage());
+                break;
+            } catch (MediaCodec.CodecException e) {
+                Log.d(TAG, "CodecException 0x" + Integer.toHexString(e.getErrorCode()));
+                break;
+            } finally {
+                if (codec != null) {
+                    Log.d(TAG, "release codec");
+                    codec.release();
+                    codec = null;
+                }
+            }
+        }
+    }
+
+    protected void finishWithResult(int result) {
+        for (int i = 0; i < mCodecs.size(); ++i) {
+            Log.d(TAG, "release codec #" + i);
+            mCodecs.get(i).release();
+        }
+        mCodecs.clear();
+        setResult(result);
+        finish();
+        Log.d(TAG, "activity finished");
+    }
+
+    private void doUseCodecs() {
+        int current = 0;
+        try {
+            for (current = 0; current < mCodecs.size(); ++current) {
+                mCodecs.get(current).getName();
+            }
+        } catch (MediaCodec.CodecException e) {
+            Log.d(TAG, "useCodecs got CodecException 0x" + Integer.toHexString(e.getErrorCode()));
+            if (e.getErrorCode() == MediaCodec.CodecException.ERROR_RECLAIMED) {
+                Log.d(TAG, "Remove codec " + current + " from the list");
+                mCodecs.get(current).release();
+                mCodecs.remove(current);
+                mGotReclaimedException = true;
+                mUseCodecs = false;
+            }
+            return;
+        }
+    }
+
+    private Thread mWorkerThread;
+    private volatile boolean mUseCodecs = true;
+    private volatile boolean mGotReclaimedException = false;
+    protected void useCodecs() {
+        mWorkerThread = new Thread(new Runnable() {
+            @Override
+            public void run() {
+                long start = System.currentTimeMillis();
+                long timeSinceStartedMs = 0;
+                while (mUseCodecs && (timeSinceStartedMs < TIMEOUT_MS)) {
+                    doUseCodecs();
+                    try {
+                        Thread.sleep(50 /* millis */);
+                    } catch (InterruptedException e) {
+                    }
+                    timeSinceStartedMs = System.currentTimeMillis() - start;
+                }
+                if (mGotReclaimedException) {
+                    Log.d(TAG, "Got expected reclaim exception.");
+                }
+                finishWithResult(RESULT_OK);
+            }
+        });
+        mWorkerThread.start();
+    }
+
+    private static final int MAX_INSTANCES = 32;
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        TAG = "ResourcePolicyTestActivity";
+
+        Log.d(TAG, "onCreate called.");
+        super.onCreate(savedInstanceState);
+
+        if (allocateCodecs(MAX_INSTANCES) == MAX_INSTANCES) {
+            // haven't reached the limit with MAX_INSTANCES, no need to wait for reclaim exception.
+            //mWaitForReclaim = false;
+            Log.d(TAG, "Didn't hit resource limitation");
+        }
+
+        useCodecs();
+    }
+
+    @Override
+    protected void onDestroy() {
+        Log.d(TAG, "onDestroy called.");
+        super.onDestroy();
+    }
+}
diff --git a/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh b/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
old mode 100644
new mode 100755
index bcdc7f7..1b42a22
--- a/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
+++ b/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
@@ -13,11 +13,35 @@
 
 mm
 
-echo "waiting for device"
+# Push the files onto the device.
+. $ANDROID_BUILD_TOP/frameworks/av/media/libmediatranscoding/tests/assets/push_assets.sh
 
-adb root && adb wait-for-device remount && adb sync
+echo "[==========] installing test apps"
+adb root
+adb install -t -r -g -d $ANDROID_TARGET_OUT_TESTCASES/TranscodingUidPolicy_TestAppA/arm64/TranscodingUidPolicy_TestAppA.apk
+adb install -t -r -g -d $ANDROID_TARGET_OUT_TESTCASES/TranscodingUidPolicy_TestAppB/arm64/TranscodingUidPolicy_TestAppB.apk
+adb install -t -r -g -d $ANDROID_TARGET_OUT_TESTCASES/TranscodingUidPolicy_TestAppC/arm64/TranscodingUidPolicy_TestAppC.apk
 
-echo "========================================"
+echo "[==========] waiting for device and sync"
+adb wait-for-device remount && adb sync
 
-echo "testing mediatranscodingservice"
-adb shell /data/nativetest64/mediatranscodingservice_tests/mediatranscodingservice_tests
+echo "[==========] running simulated tests"
+adb shell setprop debug.transcoding.simulated_transcoder true
+adb shell kill -9 `pid media.transcoding`
+#adb shell /data/nativetest64/mediatranscodingservice_simulated_tests/mediatranscodingservice_simulated_tests
+adb shell /data/nativetest/mediatranscodingservice_simulated_tests/mediatranscodingservice_simulated_tests
+
+echo "[==========] running real tests"
+adb shell setprop debug.transcoding.simulated_transcoder false
+adb shell kill -9 `pid media.transcoding`
+#adb shell /data/nativetest64/mediatranscodingservice_real_tests/mediatranscodingservice_real_tests
+adb shell /data/nativetest/mediatranscodingservice_real_tests/mediatranscodingservice_real_tests
+
+echo "[==========] running resource tests"
+adb shell kill -9 `pid media.transcoding`
+#adb shell /data/nativetest64/mediatranscodingservice_resource_tests/mediatranscodingservice_resource_tests
+adb shell /data/nativetest/mediatranscodingservice_resource_tests/mediatranscodingservice_resource_tests
+
+echo "[==========] removing debug properties"
+adb shell setprop debug.transcoding.simulated_transcoder \"\"
+adb shell kill -9 `pid media.transcoding`
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
new file mode 100644
index 0000000..0550d77
--- /dev/null
+++ b/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
@@ -0,0 +1,305 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscodingServiceRealTest"
+
+#include "MediaTranscodingServiceTestHelper.h"
+
+/*
+ * Tests media transcoding service with real transcoder.
+ *
+ * Uses the same test assets as the MediaTranscoder unit tests. Before running the test,
+ * please make sure to push the test assets to /sdcard:
+ *
+ * adb push $TOP/frameworks/av/media/libmediatranscoding/transcoder/tests/assets /data/local/tmp/TranscodingTestAssets
+ */
+namespace android {
+
+namespace media {
+
+constexpr int64_t kPaddingUs = 400000;
+constexpr int64_t kSessionWithPaddingUs = 10000000 + kPaddingUs;
+constexpr int32_t kBitRate = 8 * 1000 * 1000;  // 8Mbs
+
+constexpr const char* kShortSrcPath =
+        "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+constexpr const char* kLongSrcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+
+#define OUTPATH(name) "/data/local/tmp/MediaTranscodingService_" #name ".MP4"
+
+class MediaTranscodingServiceRealTest : public MediaTranscodingServiceTestBase {
+public:
+    MediaTranscodingServiceRealTest() { ALOGI("MediaTranscodingServiceResourceTest created"); }
+
+    virtual ~MediaTranscodingServiceRealTest() {
+        ALOGI("MediaTranscodingServiceResourceTest destroyed");
+    }
+};
+
+TEST_F(MediaTranscodingServiceRealTest, TestInvalidSource) {
+    registerMultipleClients();
+
+    const char* srcPath = "bad_file_uri";
+    const char* dstPath = OUTPATH(TestInvalidSource);
+    deleteFile(dstPath);
+
+    // Submit one session.
+    EXPECT_TRUE(
+            mClient1->submit(0, srcPath, dstPath, TranscodingSessionPriority::kNormal, kBitRate));
+
+    // Check expected error.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Failed(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->getLastError(), TranscodingErrorCode::kErrorIO);
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestPassthru) {
+    registerMultipleClients();
+
+    const char* dstPath = OUTPATH(TestPassthru);
+    deleteFile(dstPath);
+
+    // Submit one session.
+    EXPECT_TRUE(mClient1->submit(0, kShortSrcPath, dstPath));
+
+    // Wait for session to finish.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestTranscodeVideo) {
+    registerMultipleClients();
+
+    const char* dstPath = OUTPATH(TestTranscodeVideo);
+    deleteFile(dstPath);
+
+    // Submit one session.
+    EXPECT_TRUE(mClient1->submit(0, kShortSrcPath, dstPath, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+
+    // Wait for session to finish.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestTranscodeVideoProgress) {
+    registerMultipleClients();
+
+    const char* dstPath = OUTPATH(TestTranscodeVideoProgress);
+    deleteFile(dstPath);
+
+    // Submit one session.
+    EXPECT_TRUE(mClient1->submit(0, kLongSrcPath, dstPath, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+
+    // Wait for session to finish.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+    // Check the progress update messages are received. For this clip (around ~15 second long),
+    // expect at least 10 updates, and the last update should be 100.
+    int lastProgress;
+    EXPECT_GE(mClient1->getUpdateCount(&lastProgress), 10);
+    EXPECT_EQ(lastProgress, 100);
+
+    unregisterMultipleClients();
+}
+
+/*
+ * Test cancel immediately after start.
+ */
+TEST_F(MediaTranscodingServiceRealTest, TestCancelImmediately) {
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* srcPath1 = kShortSrcPath;
+    const char* dstPath0 = OUTPATH(TestCancelImmediately_Session0);
+    const char* dstPath1 = OUTPATH(TestCancelImmediately_Session1);
+
+    deleteFile(dstPath0);
+    deleteFile(dstPath1);
+    // Submit one session, should start immediately.
+    EXPECT_TRUE(
+            mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kNormal, kBitRate));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_TRUE(mClient1->getSession(0, srcPath0, dstPath0));
+
+    // Test cancel session immediately, getSession should fail after cancel.
+    EXPECT_TRUE(mClient1->cancel(0));
+    EXPECT_TRUE(mClient1->getSession<fail>(0, "", ""));
+
+    // Submit new session, new session should start immediately and finish.
+    EXPECT_TRUE(
+            mClient1->submit(1, srcPath1, dstPath1, TranscodingSessionPriority::kNormal, kBitRate));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+    unregisterMultipleClients();
+}
+
+/*
+ * Test cancel in the middle of transcoding.
+ */
+TEST_F(MediaTranscodingServiceRealTest, TestCancelWhileRunning) {
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* srcPath1 = kShortSrcPath;
+    const char* dstPath0 = OUTPATH(TestCancelWhileRunning_Session0);
+    const char* dstPath1 = OUTPATH(TestCancelWhileRunning_Session1);
+
+    deleteFile(dstPath0);
+    deleteFile(dstPath1);
+    // Submit two sessions, session 0 should start immediately, session 1 should be queued.
+    EXPECT_TRUE(
+            mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kNormal, kBitRate));
+    EXPECT_TRUE(
+            mClient1->submit(1, srcPath1, dstPath1, TranscodingSessionPriority::kNormal, kBitRate));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_TRUE(mClient1->getSession(0, srcPath0, dstPath0));
+    EXPECT_TRUE(mClient1->getSession(1, srcPath1, dstPath1));
+
+    // Session 0 (longtest) shouldn't finish in 1 seconds.
+    EXPECT_EQ(mClient1->pop(1000000), EventTracker::NoEvent);
+
+    // Now cancel session 0. Session 1 should start immediately and finish.
+    EXPECT_TRUE(mClient1->cancel(0));
+    EXPECT_TRUE(mClient1->getSession<fail>(0, "", ""));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestPauseResumeSingleClient) {
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* srcPath1 = kShortSrcPath;
+    const char* dstPath0 = OUTPATH(TestPauseResumeSingleClient_Session0);
+    const char* dstPath1 = OUTPATH(TestPauseResumeSingleClient_Session1);
+    deleteFile(dstPath0);
+    deleteFile(dstPath1);
+
+    // Submit one offline session, should start immediately.
+    EXPECT_TRUE(mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kUnspecified,
+                                 kBitRate));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    // Test get session after starts.
+    EXPECT_TRUE(mClient1->getSession(0, srcPath0, dstPath0));
+
+    // Submit one realtime session.
+    EXPECT_TRUE(
+            mClient1->submit(1, srcPath1, dstPath1, TranscodingSessionPriority::kNormal, kBitRate));
+
+    // Offline session should pause.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+    EXPECT_TRUE(mClient1->getSession(0, srcPath0, dstPath0));
+
+    // Realtime session should start immediately, and run to finish.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+    // Test get session after finish fails.
+    EXPECT_TRUE(mClient1->getSession<fail>(1, "", ""));
+
+    // Then offline session should resume.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+    // Test get session after resume.
+    EXPECT_TRUE(mClient1->getSession(0, srcPath0, dstPath0));
+
+    // Offline session should finish.
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    // Test get session after finish fails.
+    EXPECT_TRUE(mClient1->getSession<fail>(0, "", ""));
+
+    unregisterMultipleClients();
+}
+
+/*
+ * Basic test for pause/resume with two clients, with one session each.
+ * Top app's session should preempt the other app's session.
+ */
+TEST_F(MediaTranscodingServiceRealTest, TestPauseResumeMultiClients) {
+    ALOGD("TestPauseResumeMultiClients starting...");
+
+    EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
+    EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* srcPath1 = kShortSrcPath;
+    const char* dstPath0 = OUTPATH(TestPauseResumeMultiClients_Client0);
+    const char* dstPath1 = OUTPATH(TestPauseResumeMultiClients_Client1);
+    deleteFile(dstPath0);
+    deleteFile(dstPath1);
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Submit session to Client1.
+    ALOGD("Submitting session to client1 (app A) ...");
+    EXPECT_TRUE(
+            mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kNormal, kBitRate));
+
+    // Client1's session should start immediately.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    ALOGD("Moving app B to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+
+    // Client1's session should continue to run, since Client2 (app B) doesn't have any session.
+    EXPECT_EQ(mClient1->pop(1000000), EventTracker::NoEvent);
+
+    // Submit session to Client2.
+    ALOGD("Submitting session to client2 (app B) ...");
+    EXPECT_TRUE(
+            mClient2->submit(0, srcPath1, dstPath1, TranscodingSessionPriority::kNormal, kBitRate));
+
+    // Client1's session should pause, client2's session should start.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
+
+    // Client2's session should finish, then Client1's session should resume.
+    EXPECT_EQ(mClient2->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(2), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+
+    // Client1's session should finish.
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+    unregisterMultipleClients();
+
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    ALOGD("TestPauseResumeMultiClients finished.");
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp
new file mode 100644
index 0000000..bf99efc
--- /dev/null
+++ b/services/mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp
@@ -0,0 +1,135 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscodingServiceRealTest"
+
+#include "MediaTranscodingServiceTestHelper.h"
+
+/*
+ * Tests media transcoding service with real transcoder.
+ *
+ * Uses the same test assets as the MediaTranscoder unit tests. Before running the test,
+ * please make sure to push the test assets to /sdcard:
+ *
+ * adb push $TOP/frameworks/av/media/libmediatranscoding/transcoder/tests/assets /data/local/tmp/TranscodingTestAssets
+ */
+namespace android {
+
+namespace media {
+
+constexpr int64_t kPaddingUs = 400000;
+constexpr int32_t kBitRate = 8 * 1000 * 1000;  // 8Mbs
+
+constexpr const char* kLongSrcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+
+constexpr const char* kResourcePolicyTestActivity =
+        "/com.android.tests.transcoding.ResourcePolicyTestActivity";
+
+#define OUTPATH(name) "/data/local/tmp/MediaTranscodingService_" #name ".MP4"
+
+class MediaTranscodingServiceResourceTest : public MediaTranscodingServiceTestBase {
+public:
+    MediaTranscodingServiceResourceTest() { ALOGI("MediaTranscodingServiceResourceTest created"); }
+
+    virtual ~MediaTranscodingServiceResourceTest() {
+        ALOGI("MediaTranscodingServiceResourceTest destroyed");
+    }
+};
+
+/**
+ * Basic testing for handling resource lost.
+ *
+ * This test starts a transcoding session (that's somewhat long and takes several seconds),
+ * then launches an activity that allocates video codec instances until it hits insufficient
+ * resource error. Because the activity is running in foreground,
+ * ResourceManager would reclaim codecs from transcoding service which should
+ * cause the session to be paused. The activity will hold the codecs for a few seconds
+ * before releasing them, and the transcoding service should be able to resume
+ * and complete the session.
+ */
+TEST_F(MediaTranscodingServiceResourceTest, TestResourceLost) {
+    ALOGD("TestResourceLost starting...");
+
+    EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
+    EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* dstPath0 = OUTPATH(TestPauseResumeMultiClients_Client0);
+    deleteFile(dstPath0);
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Submit session to Client1.
+    ALOGD("Submitting session to client1 (app A) ...");
+    EXPECT_TRUE(
+            mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kNormal, kBitRate));
+
+    // Client1's session should start immediately.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    // Launch ResourcePolicyTestActivity, which will try to allocate up to 32
+    // instances, which should trigger insufficient resources on most devices.
+    // (Note that it's possible that the device supports a very high number of
+    // resource instances, in which case we'll simply require that the session completes.)
+    ALOGD("Launch ResourcePolicyTestActivity...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kResourcePolicyTestActivity));
+
+    // The basic requirement is that the session should complete. Wait for finish
+    // event to come and pop up all events received.
+    std::list<EventTracker::Event> events;
+    EXPECT_TRUE(mClient1->waitForSpecificEventAndPop(EventTracker::Finished(CLIENT(1), 0), &events,
+                                                     15000000));
+
+    // If there is only 1 event, it must be finish (otherwise waitForSpecificEventAndPop
+    // woudldn't pop up anything), and we're ok.
+    //
+    // TODO: If there is only 1 event (finish), and no pause/resume happened, we need
+    // to verify that the ResourcePolicyTestActivity actually was able to allocate
+    // all 32 instances without hitting insufficient resources. Otherwise, it could
+    // be that ResourceManager was not able to reclaim codecs from the transcoding
+    // service at all, which means the resource management is broken.
+    if (events.size() > 1) {
+        EXPECT_TRUE(events.size() >= 3);
+        size_t i = 0;
+        for (auto& event : events) {
+            if (i == 0) {
+                EXPECT_EQ(event, EventTracker::Pause(CLIENT(1), 0));
+            } else if (i == events.size() - 2) {
+                EXPECT_EQ(event, EventTracker::Resume(CLIENT(1), 0));
+            } else if (i == events.size() - 1) {
+                EXPECT_EQ(event, EventTracker::Finished(CLIENT(1), 0));
+            } else {
+                EXPECT_TRUE(event == EventTracker::Pause(CLIENT(1), 0) ||
+                            event == EventTracker::Resume(CLIENT(1), 0));
+            }
+            i++;
+        }
+    }
+
+    unregisterMultipleClients();
+
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
new file mode 100644
index 0000000..7dfda44
--- /dev/null
+++ b/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
@@ -0,0 +1,358 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscodingServiceSimulatedTest"
+
+#include <aidl/android/media/BnTranscodingClientCallback.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <aidl/android/media/TranscodingSessionParcel.h>
+#include <aidl/android/media/TranscodingSessionPriority.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <binder/PermissionController.h>
+#include <cutils/multiuser.h>
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+#include <iostream>
+#include <list>
+
+#include "MediaTranscodingServiceTestHelper.h"
+#include "SimulatedTranscoder.h"
+
+namespace android {
+
+namespace media {
+
+// Note that -1 is valid and means using calling pid/uid for the service. But only privilege caller
+// could use them. This test is not a privilege caller.
+constexpr int32_t kInvalidClientPid = -5;
+constexpr int32_t kInvalidClientUid = -10;
+constexpr const char* kInvalidClientName = "";
+constexpr const char* kInvalidClientOpPackageName = "";
+
+constexpr int64_t kPaddingUs = 1000000;
+constexpr int64_t kSessionWithPaddingUs = SimulatedTranscoder::kSessionDurationUs + kPaddingUs;
+
+constexpr const char* kClientOpPackageName = "TestClientPackage";
+
+class MediaTranscodingServiceSimulatedTest : public MediaTranscodingServiceTestBase {
+public:
+    MediaTranscodingServiceSimulatedTest() { ALOGI("MediaTranscodingServiceResourceTest created"); }
+
+    virtual ~MediaTranscodingServiceSimulatedTest() {
+        ALOGI("MediaTranscodingServiceResourceTest destroyed");
+    }
+};
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterNullClient) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register the client with null callback.
+    Status status = mService->registerClient(nullptr, kClientName, kClientOpPackageName, &client);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientWithInvalidClientName) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register the client with the service.
+    Status status = mService->registerClient(mClient1, kInvalidClientName,
+                                             kInvalidClientOpPackageName, &client);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientWithInvalidClientPackageName) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register the client with the service.
+    Status status =
+            mService->registerClient(mClient1, kClientName, kInvalidClientOpPackageName, &client);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterOneClient) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    Status status = mService->registerClient(mClient1, kClientName, kClientOpPackageName, &client);
+    EXPECT_TRUE(status.isOk());
+
+    // Validate the client.
+    EXPECT_TRUE(client != nullptr);
+
+    // Check the number of Clients.
+    int32_t numOfClients;
+    status = mService->getNumOfClients(&numOfClients);
+    EXPECT_TRUE(status.isOk());
+    EXPECT_GE(numOfClients, 1);
+
+    // Unregister the client.
+    status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientTwice) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    Status status = mService->registerClient(mClient1, kClientName, kClientOpPackageName, &client);
+    EXPECT_TRUE(status.isOk());
+
+    // Validate the client.
+    EXPECT_TRUE(client != nullptr);
+
+    // Register the client again and expects failure.
+    std::shared_ptr<ITranscodingClient> client1;
+    status = mService->registerClient(mClient1, kClientName, kClientOpPackageName, &client1);
+    EXPECT_FALSE(status.isOk());
+
+    // Unregister the client.
+    status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterMultipleClients) {
+    registerMultipleClients();
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestSessionIdIndependence) {
+    registerMultipleClients();
+
+    // Submit 2 requests on client1 first.
+    EXPECT_TRUE(mClient1->submit(0, "test_source_file", "test_destination_file"));
+    EXPECT_TRUE(mClient1->submit(1, "test_source_file", "test_destination_file"));
+
+    // Submit 2 requests on client2, sessionId should be independent for each client.
+    EXPECT_TRUE(mClient2->submit(0, "test_source_file", "test_destination_file"));
+    EXPECT_TRUE(mClient2->submit(1, "test_source_file", "test_destination_file"));
+
+    // Cancel all sessions.
+    EXPECT_TRUE(mClient1->cancel(0));
+    EXPECT_TRUE(mClient1->cancel(1));
+    EXPECT_TRUE(mClient2->cancel(0));
+    EXPECT_TRUE(mClient2->cancel(1));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestSubmitCancelSessions) {
+    registerMultipleClients();
+
+    // Test sessionId assignment.
+    EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file"));
+    EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file"));
+    EXPECT_TRUE(mClient1->submit(2, "test_source_file_2", "test_destination_file"));
+
+    // Test submit bad request (no valid sourceFilePath) fails.
+    EXPECT_TRUE(mClient1->submit<fail>(0, "", ""));
+
+    // Test submit bad request (no valid sourceFilePath) fails.
+    EXPECT_TRUE(mClient1->submit<fail>(0, "src", "dst", TranscodingSessionPriority::kNormal,
+                                       1000000, kInvalidClientPid, kInvalidClientUid));
+
+    // Test cancel non-existent session fails.
+    EXPECT_TRUE(mClient1->cancel<fail>(100));
+
+    // Session 0 should start immediately and finish in 2 seconds, followed by Session 1 start.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    // Test cancel valid sessionId in random order.
+    // Test cancel finished session fails.
+    EXPECT_TRUE(mClient1->cancel(2));
+    EXPECT_TRUE(mClient1->cancel<fail>(0));
+    EXPECT_TRUE(mClient1->cancel(1));
+
+    // Test cancel session again fails.
+    EXPECT_TRUE(mClient1->cancel<fail>(1));
+
+    // Test no more events arriving after cancel.
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::NoEvent);
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestGetSessions) {
+    registerMultipleClients();
+
+    // Submit 3 requests.
+    EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file_0"));
+    EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file_1"));
+    EXPECT_TRUE(mClient1->submit(2, "test_source_file_2", "test_destination_file_2"));
+
+    // Test get sessions by id.
+    EXPECT_TRUE(mClient1->getSession(2, "test_source_file_2", "test_destination_file_2"));
+    EXPECT_TRUE(mClient1->getSession(1, "test_source_file_1", "test_destination_file_1"));
+    EXPECT_TRUE(mClient1->getSession(0, "test_source_file_0", "test_destination_file_0"));
+
+    // Test get session by invalid id fails.
+    EXPECT_TRUE(mClient1->getSession<fail>(100, "", ""));
+    EXPECT_TRUE(mClient1->getSession<fail>(-1, "", ""));
+
+    // Test get session after cancel fails.
+    EXPECT_TRUE(mClient1->cancel(2));
+    EXPECT_TRUE(mClient1->getSession<fail>(2, "", ""));
+
+    // Session 0 should start immediately and finish in 2 seconds, followed by Session 1 start.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    // Test get session after finish fails.
+    EXPECT_TRUE(mClient1->getSession<fail>(0, "", ""));
+
+    // Test get the remaining session 1.
+    EXPECT_TRUE(mClient1->getSession(1, "test_source_file_1", "test_destination_file_1"));
+
+    // Cancel remaining session 1.
+    EXPECT_TRUE(mClient1->cancel(1));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestSubmitCancelWithOfflineSessions) {
+    registerMultipleClients();
+
+    // Submit some offline sessions first.
+    EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file_0",
+                                 TranscodingSessionPriority::kUnspecified));
+    EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file_1",
+                                 TranscodingSessionPriority::kUnspecified));
+
+    // Session 0 should start immediately.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    // Submit more real-time sessions.
+    EXPECT_TRUE(mClient1->submit(2, "test_source_file_2", "test_destination_file_2"));
+    EXPECT_TRUE(mClient1->submit(3, "test_source_file_3", "test_destination_file_3"));
+
+    // Session 0 should pause immediately and session 2 should start.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 2));
+
+    // Session 2 should finish in 2 seconds and session 3 should start.
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 2));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 3));
+
+    // Cancel session 3 now
+    EXPECT_TRUE(mClient1->cancel(3));
+
+    // Session 0 should resume and finish in 2 seconds, followed by session 1 start.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    // Cancel remaining session 1.
+    EXPECT_TRUE(mClient1->cancel(1));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestClientUseAfterUnregister) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register a client, then unregister.
+    Status status = mService->registerClient(mClient1, kClientName, kClientOpPackageName, &client);
+    EXPECT_TRUE(status.isOk());
+
+    status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+
+    // Test various operations on the client, should fail with ERROR_DISCONNECTED.
+    TranscodingSessionParcel session;
+    bool result;
+    status = client->getSessionWithId(0, &session, &result);
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    status = client->cancelSession(0, &result);
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    TranscodingRequestParcel request;
+    status = client->submitRequest(request, &session, &result);
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingUidPolicy) {
+    ALOGD("TestTranscodingUidPolicy starting...");
+
+    EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
+    EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    registerMultipleClients();
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Submit 3 requests.
+    ALOGD("Submitting session to client1 (app A) ...");
+    EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file_0"));
+    EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file_1"));
+    EXPECT_TRUE(mClient1->submit(2, "test_source_file_2", "test_destination_file_2"));
+
+    // Session 0 should start immediately.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    ALOGD("Moving app B to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+
+    // Session 0 should continue and finish in 2 seconds, then session 1 should start.
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    ALOGD("Submitting session to client2 (app B) ...");
+    EXPECT_TRUE(mClient2->submit(0, "test_source_file_0", "test_destination_file_0"));
+
+    // Client1's session should pause, client2's session should start.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 1));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
+
+    ALOGD("Moving app A back to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Client2's session should pause, client1's session 1 should resume.
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Pause(CLIENT(2), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 1));
+
+    // Client2's session 1 should finish in 2 seconds, then its session 2 should start.
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 2));
+
+    // After client2's sessions finish, client1's session should resume.
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 2));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Resume(CLIENT(2), 0));
+
+    unregisterMultipleClients();
+
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    ALOGD("TestTranscodingUidPolicy finished.");
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_tests.cpp
deleted file mode 100644
index 5a791fe..0000000
--- a/services/mediatranscoding/tests/mediatranscodingservice_tests.cpp
+++ /dev/null
@@ -1,239 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// Unit Test for MediaTranscoding Service.
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MediaTranscodingServiceTest"
-
-#include <aidl/android/media/BnTranscodingServiceClient.h>
-#include <aidl/android/media/IMediaTranscodingService.h>
-#include <aidl/android/media/ITranscodingServiceClient.h>
-#include <android-base/logging.h>
-#include <android-base/unique_fd.h>
-#include <android/binder_ibinder_jni.h>
-#include <android/binder_manager.h>
-#include <android/binder_process.h>
-#include <cutils/ashmem.h>
-#include <gtest/gtest.h>
-#include <stdlib.h>
-#include <sys/mman.h>
-#include <utils/Log.h>
-
-namespace android {
-
-namespace media {
-
-using Status = ::ndk::ScopedAStatus;
-using aidl::android::media::BnTranscodingServiceClient;
-using aidl::android::media::IMediaTranscodingService;
-using aidl::android::media::ITranscodingServiceClient;
-
-constexpr int32_t kInvalidClientId = -5;
-
-// Note that -1 is valid and means using calling pid/uid for the service. But only privilege caller could
-// use them. This test is not a privilege caller.
-constexpr int32_t kInvalidClientPid = -5;
-constexpr int32_t kInvalidClientUid = -5;
-constexpr const char* kInvalidClientOpPackageName = "";
-
-constexpr int32_t kClientUseCallingPid = -1;
-constexpr int32_t kClientUseCallingUid = -1;
-constexpr const char* kClientOpPackageName = "TestClient";
-
-class MediaTranscodingServiceTest : public ::testing::Test {
-public:
-    MediaTranscodingServiceTest() { ALOGD("MediaTranscodingServiceTest created"); }
-
-    void SetUp() override {
-        ::ndk::SpAIBinder binder(AServiceManager_getService("media.transcoding"));
-        mService = IMediaTranscodingService::fromBinder(binder);
-        if (mService == nullptr) {
-            ALOGE("Failed to connect to the media.trascoding service.");
-            return;
-        }
-    }
-
-    ~MediaTranscodingServiceTest() { ALOGD("MediaTranscodingingServiceTest destroyed"); }
-
-    std::shared_ptr<IMediaTranscodingService> mService = nullptr;
-};
-
-struct TestClient : public BnTranscodingServiceClient {
-    TestClient(const std::shared_ptr<IMediaTranscodingService>& service) : mService(service) {
-        ALOGD("TestClient Created");
-    }
-
-    Status getName(std::string* _aidl_return) override {
-        *_aidl_return = "test_client";
-        return Status::ok();
-    }
-
-    Status onTranscodingFinished(
-            int32_t /* in_jobId */,
-            const ::aidl::android::media::TranscodingResultParcel& /* in_result */) override {
-        return Status::ok();
-    }
-
-    Status onTranscodingFailed(
-            int32_t /* in_jobId */,
-            ::aidl::android::media::TranscodingErrorCode /*in_errorCode */) override {
-        return Status::ok();
-    }
-
-    Status onAwaitNumberOfJobsChanged(int32_t /* in_jobId */, int32_t /* in_oldAwaitNumber */,
-                                      int32_t /* in_newAwaitNumber */) override {
-        return Status::ok();
-    }
-
-    Status onProgressUpdate(int32_t /* in_jobId */, int32_t /* in_progress */) override {
-        return Status::ok();
-    }
-
-    virtual ~TestClient() { ALOGI("TestClient destroyed"); };
-
-private:
-    std::shared_ptr<IMediaTranscodingService> mService;
-};
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterNullClient) {
-    std::shared_ptr<ITranscodingServiceClient> client = nullptr;
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingUid,
-                                             kClientUseCallingPid, &clientId);
-    EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientWithInvalidClientPid) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingUid,
-                                             kInvalidClientPid, &clientId);
-    EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientWithInvalidClientUid) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kInvalidClientUid,
-                                             kClientUseCallingPid, &clientId);
-    EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientWithInvalidClientPackageName) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kInvalidClientOpPackageName,
-                                             kClientUseCallingUid, kClientUseCallingPid, &clientId);
-    EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterOneClient) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingPid,
-                                             kClientUseCallingUid, &clientId);
-    ALOGD("client id is %d", clientId);
-    EXPECT_TRUE(status.isOk());
-
-    // Validate the clientId.
-    EXPECT_TRUE(clientId > 0);
-
-    // Check the number of Clients.
-    int32_t numOfClients;
-    status = mService->getNumOfClients(&numOfClients);
-    EXPECT_TRUE(status.isOk());
-    EXPECT_EQ(1, numOfClients);
-
-    // Unregister the client.
-    bool res;
-    status = mService->unregisterClient(clientId, &res);
-    EXPECT_TRUE(status.isOk());
-    EXPECT_TRUE(res);
-}
-
-TEST_F(MediaTranscodingServiceTest, TestUnRegisterClientWithInvalidClientId) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingUid,
-                                             kClientUseCallingPid, &clientId);
-    ALOGD("client id is %d", clientId);
-    EXPECT_TRUE(status.isOk());
-
-    // Validate the clientId.
-    EXPECT_TRUE(clientId > 0);
-
-    // Check the number of Clients.
-    int32_t numOfClients;
-    status = mService->getNumOfClients(&numOfClients);
-    EXPECT_TRUE(status.isOk());
-    EXPECT_EQ(1, numOfClients);
-
-    // Unregister the client with invalid ID
-    bool res;
-    mService->unregisterClient(kInvalidClientId, &res);
-    EXPECT_FALSE(res);
-
-    // Unregister the valid client.
-    mService->unregisterClient(clientId, &res);
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientTwice) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingUid,
-                                             kClientUseCallingPid, &clientId);
-    EXPECT_TRUE(status.isOk());
-
-    // Validate the clientId.
-    EXPECT_TRUE(clientId > 0);
-
-    // Register the client again and expects failure.
-    status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingUid,
-                                      kClientUseCallingPid, &clientId);
-    EXPECT_FALSE(status.isOk());
-
-    // Unregister the valid client.
-    bool res;
-    mService->unregisterClient(clientId, &res);
-}
-
-}  // namespace media
-}  // namespace android
diff --git a/services/oboeservice/AAudioClientTracker.cpp b/services/oboeservice/AAudioClientTracker.cpp
index 9d9ca63..054a896 100644
--- a/services/oboeservice/AAudioClientTracker.cpp
+++ b/services/oboeservice/AAudioClientTracker.cpp
@@ -41,7 +41,7 @@
         : Singleton<AAudioClientTracker>() {
 }
 
-std::string AAudioClientTracker::dump() const {
+std::string AAudioClientTracker::dump() const NO_THREAD_SAFETY_ANALYSIS {
     std::stringstream result;
     const bool isLocked = AAudio_tryUntilTrue(
             [this]()->bool { return mLock.try_lock(); } /* f */,
@@ -198,7 +198,7 @@
         for (const auto& serviceStream : streamsToClose) {
             aaudio_handle_t handle = serviceStream->getHandle();
             ALOGW("binderDied() close abandoned stream 0x%08X\n", handle);
-            aaudioService->closeStream(handle);
+            aaudioService->asAAudioServiceInterface().closeStream(handle);
         }
         // mStreams should be empty now
     }
@@ -207,7 +207,7 @@
 }
 
 
-std::string AAudioClientTracker::NotificationClient::dump() const {
+std::string AAudioClientTracker::NotificationClient::dump() const NO_THREAD_SAFETY_ANALYSIS {
     std::stringstream result;
     const bool isLocked = AAudio_tryUntilTrue(
             [this]()->bool { return mLock.try_lock(); } /* f */,
diff --git a/services/oboeservice/AAudioClientTracker.h b/services/oboeservice/AAudioClientTracker.h
index 943b809..2b38621 100644
--- a/services/oboeservice/AAudioClientTracker.h
+++ b/services/oboeservice/AAudioClientTracker.h
@@ -21,10 +21,11 @@
 #include <mutex>
 #include <set>
 
+#include <android-base/thread_annotations.h>
 #include <utils/Singleton.h>
 
 #include <aaudio/AAudio.h>
-#include "binding/IAAudioClient.h"
+#include <aaudio/IAAudioClient.h>
 #include "AAudioService.h"
 
 namespace aaudio {
@@ -46,7 +47,7 @@
      */
     std::string dump() const;
 
-    aaudio_result_t registerClient(pid_t pid, const android::sp<android::IAAudioClient>& client);
+    aaudio_result_t registerClient(pid_t pid, const android::sp<IAAudioClient>& client);
 
     void unregisterClient(pid_t pid);
 
@@ -114,10 +115,12 @@
     };
 
     // This must be called under mLock
-    android::sp<NotificationClient> getNotificationClient_l(pid_t pid);
+    android::sp<NotificationClient> getNotificationClient_l(pid_t pid)
+            REQUIRES(mLock);
 
     mutable std::mutex                               mLock;
-    std::map<pid_t, android::sp<NotificationClient>> mNotificationClients;
+    std::map<pid_t, android::sp<NotificationClient>> mNotificationClients
+            GUARDED_BY(mLock);
     android::AAudioService                          *mAAudioService = nullptr;
 };
 
diff --git a/services/oboeservice/AAudioEndpointManager.cpp b/services/oboeservice/AAudioEndpointManager.cpp
index 9f34153..407f6d5 100644
--- a/services/oboeservice/AAudioEndpointManager.cpp
+++ b/services/oboeservice/AAudioEndpointManager.cpp
@@ -43,7 +43,7 @@
         , mExclusiveStreams() {
 }
 
-std::string AAudioEndpointManager::dump() const {
+std::string AAudioEndpointManager::dump() const NO_THREAD_SAFETY_ANALYSIS {
     std::stringstream result;
     int index = 0;
 
@@ -306,6 +306,7 @@
                 mSharedStreams.end());
 
         serviceEndpoint->close();
+
         mSharedCloseCount++;
         ALOGV("%s(%p) closed for device %d",
               __func__, serviceEndpoint.get(), serviceEndpoint->getDeviceId());
diff --git a/services/oboeservice/AAudioEndpointManager.h b/services/oboeservice/AAudioEndpointManager.h
index ae776b1..b07bcef 100644
--- a/services/oboeservice/AAudioEndpointManager.h
+++ b/services/oboeservice/AAudioEndpointManager.h
@@ -20,6 +20,8 @@
 #include <map>
 #include <mutex>
 #include <sys/types.h>
+
+#include <android-base/thread_annotations.h>
 #include <utils/Singleton.h>
 
 #include "binding/AAudioServiceMessage.h"
@@ -70,10 +72,12 @@
                                               const aaudio::AAudioStreamRequest &request);
 
     android::sp<AAudioServiceEndpoint> findExclusiveEndpoint_l(
-            const AAudioStreamConfiguration& configuration);
+            const AAudioStreamConfiguration& configuration)
+            REQUIRES(mExclusiveLock);
 
     android::sp<AAudioServiceEndpointShared> findSharedEndpoint_l(
-            const AAudioStreamConfiguration& configuration);
+            const AAudioStreamConfiguration& configuration)
+            REQUIRES(mSharedLock);
 
     void closeExclusiveEndpoint(android::sp<AAudioServiceEndpoint> serviceEndpoint);
     void closeSharedEndpoint(android::sp<AAudioServiceEndpoint> serviceEndpoint);
@@ -83,23 +87,25 @@
     // Lock mSharedLock before mExclusiveLock.
     // it is OK to only lock mExclusiveLock.
     mutable std::mutex                                     mSharedLock;
-    std::vector<android::sp<AAudioServiceEndpointShared>>  mSharedStreams;
+    std::vector<android::sp<AAudioServiceEndpointShared>>  mSharedStreams
+            GUARDED_BY(mSharedLock);
 
     mutable std::mutex                                     mExclusiveLock;
-    std::vector<android::sp<AAudioServiceEndpointMMAP>>    mExclusiveStreams;
+    std::vector<android::sp<AAudioServiceEndpointMMAP>>    mExclusiveStreams
+            GUARDED_BY(mExclusiveLock);
 
-    // Modified under a lock.
-    int32_t mExclusiveSearchCount = 0; // number of times we SEARCHED for an exclusive endpoint
-    int32_t mExclusiveFoundCount  = 0; // number of times we FOUND an exclusive endpoint
-    int32_t mExclusiveOpenCount   = 0; // number of times we OPENED an exclusive endpoint
-    int32_t mExclusiveCloseCount  = 0; // number of times we CLOSED an exclusive endpoint
-    int32_t mExclusiveStolenCount = 0; // number of times we STOLE an exclusive endpoint
+    // Counts related to an exclusive endpoint.
+    int32_t mExclusiveSearchCount GUARDED_BY(mExclusiveLock) = 0; // # SEARCHED
+    int32_t mExclusiveFoundCount  GUARDED_BY(mExclusiveLock) = 0; // # FOUND
+    int32_t mExclusiveOpenCount   GUARDED_BY(mExclusiveLock) = 0; // # OPENED
+    int32_t mExclusiveCloseCount  GUARDED_BY(mExclusiveLock) = 0; // # CLOSED
+    int32_t mExclusiveStolenCount GUARDED_BY(mExclusiveLock) = 0; // # STOLEN
 
     // Same as above but for SHARED endpoints.
-    int32_t mSharedSearchCount    = 0;
-    int32_t mSharedFoundCount     = 0;
-    int32_t mSharedOpenCount      = 0;
-    int32_t mSharedCloseCount     = 0;
+    int32_t mSharedSearchCount    GUARDED_BY(mSharedLock) = 0;
+    int32_t mSharedFoundCount     GUARDED_BY(mSharedLock) = 0;
+    int32_t mSharedOpenCount      GUARDED_BY(mSharedLock) = 0;
+    int32_t mSharedCloseCount     GUARDED_BY(mSharedLock) = 0;
 
     // For easily disabling the stealing of exclusive streams.
     static constexpr bool kStealingEnabled = true;
diff --git a/services/oboeservice/AAudioMixer.cpp b/services/oboeservice/AAudioMixer.cpp
index 1c03b7f..ad4b830 100644
--- a/services/oboeservice/AAudioMixer.cpp
+++ b/services/oboeservice/AAudioMixer.cpp
@@ -33,25 +33,21 @@
 using android::FifoBuffer;
 using android::fifo_frames_t;
 
-AAudioMixer::~AAudioMixer() {
-    delete[] mOutputBuffer;
-}
-
 void AAudioMixer::allocate(int32_t samplesPerFrame, int32_t framesPerBurst) {
     mSamplesPerFrame = samplesPerFrame;
     mFramesPerBurst = framesPerBurst;
     int32_t samplesPerBuffer = samplesPerFrame * framesPerBurst;
-    mOutputBuffer = new float[samplesPerBuffer];
+    mOutputBuffer = std::make_unique<float[]>(samplesPerBuffer);
     mBufferSizeInBytes = samplesPerBuffer * sizeof(float);
 }
 
 void AAudioMixer::clear() {
-    memset(mOutputBuffer, 0, mBufferSizeInBytes);
+    memset(mOutputBuffer.get(), 0, mBufferSizeInBytes);
 }
 
-int32_t AAudioMixer::mix(int streamIndex, FifoBuffer *fifo, bool allowUnderflow) {
+int32_t AAudioMixer::mix(int streamIndex, std::shared_ptr<FifoBuffer> fifo, bool allowUnderflow) {
     WrappingBuffer wrappingBuffer;
-    float *destination = mOutputBuffer;
+    float *destination = mOutputBuffer.get();
 
 #if AAUDIO_MIXER_ATRACE_ENABLED
     ATRACE_BEGIN("aaMix");
@@ -117,5 +113,5 @@
 }
 
 float *AAudioMixer::getOutputBuffer() {
-    return mOutputBuffer;
+    return mOutputBuffer.get();
 }
diff --git a/services/oboeservice/AAudioMixer.h b/services/oboeservice/AAudioMixer.h
index d5abc5b..1a120f2 100644
--- a/services/oboeservice/AAudioMixer.h
+++ b/services/oboeservice/AAudioMixer.h
@@ -25,7 +25,6 @@
 class AAudioMixer {
 public:
     AAudioMixer() {}
-    ~AAudioMixer();
 
     void allocate(int32_t samplesPerFrame, int32_t framesPerBurst);
 
@@ -38,7 +37,7 @@
      * @param allowUnderflow if true then allow mixer to advance read index past the write index
      * @return frames read from this stream
      */
-    int32_t mix(int streamIndex, android::FifoBuffer *fifo, bool allowUnderflow);
+    int32_t mix(int streamIndex, std::shared_ptr<android::FifoBuffer> fifo, bool allowUnderflow);
 
     float *getOutputBuffer();
 
@@ -47,7 +46,7 @@
 private:
     void mixPart(float *destination, float *source, int32_t numFrames);
 
-    float   *mOutputBuffer = nullptr;
+    std::unique_ptr<float[]> mOutputBuffer;
     int32_t  mSamplesPerFrame = 0;
     int32_t  mFramesPerBurst = 0;
     int32_t  mBufferSizeInBytes = 0;
diff --git a/services/oboeservice/AAudioService.cpp b/services/oboeservice/AAudioService.cpp
index 22cdb35..69e58f6 100644
--- a/services/oboeservice/AAudioService.cpp
+++ b/services/oboeservice/AAudioService.cpp
@@ -32,26 +32,26 @@
 #include "AAudioService.h"
 #include "AAudioServiceStreamMMAP.h"
 #include "AAudioServiceStreamShared.h"
-#include "binding/IAAudioService.h"
 
 using namespace android;
 using namespace aaudio;
 
 #define MAX_STREAMS_PER_PROCESS   8
+#define AIDL_RETURN(x) *_aidl_return = (x); return Status::ok();
+
 
 using android::AAudioService;
+using binder::Status;
 
 android::AAudioService::AAudioService()
-    : BnAAudioService() {
+    : BnAAudioService(),
+      mAdapter(this) {
     mAudioClient.clientUid = getuid();   // TODO consider using geteuid()
     mAudioClient.clientPid = getpid();
     mAudioClient.packageName = String16("");
     AAudioClientTracker::getInstance().setAAudioService(this);
 }
 
-AAudioService::~AAudioService() {
-}
-
 status_t AAudioService::dump(int fd, const Vector<String16>& args) {
     std::string result;
 
@@ -72,18 +72,21 @@
     return NO_ERROR;
 }
 
-void AAudioService::registerClient(const sp<IAAudioClient>& client) {
+Status AAudioService::registerClient(const sp<IAAudioClient> &client) {
     pid_t pid = IPCThreadState::self()->getCallingPid();
     AAudioClientTracker::getInstance().registerClient(pid, client);
+    return Status::ok();
 }
 
-bool AAudioService::isCallerInService() {
-    return mAudioClient.clientPid == IPCThreadState::self()->getCallingPid() &&
-        mAudioClient.clientUid == IPCThreadState::self()->getCallingUid();
-}
+Status
+AAudioService::openStream(const StreamRequest &_request, StreamParameters* _paramsOut,
+                          int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
 
-aaudio_handle_t AAudioService::openStream(const aaudio::AAudioStreamRequest &request,
-                                          aaudio::AAudioStreamConfiguration &configurationOutput) {
+    // Create wrapper objects for simple usage of the parcelables.
+    const AAudioStreamRequest request(_request);
+    AAudioStreamConfiguration paramsOut;
+
     // A lock in is used to order the opening of endpoints when an
     // EXCLUSIVE endpoint is stolen. We want the order to be:
     // 1) Thread A opens exclusive MMAP endpoint
@@ -108,13 +111,13 @@
         if (count >= MAX_STREAMS_PER_PROCESS) {
             ALOGE("openStream(): exceeded max streams per process %d >= %d",
                   count,  MAX_STREAMS_PER_PROCESS);
-            return AAUDIO_ERROR_UNAVAILABLE;
+            AIDL_RETURN(AAUDIO_ERROR_UNAVAILABLE);
         }
     }
 
     if (sharingMode != AAUDIO_SHARING_MODE_EXCLUSIVE && sharingMode != AAUDIO_SHARING_MODE_SHARED) {
         ALOGE("openStream(): unrecognized sharing mode = %d", sharingMode);
-        return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+        AIDL_RETURN(AAUDIO_ERROR_ILLEGAL_ARGUMENT);
     }
 
     if (sharingMode == AAUDIO_SHARING_MODE_EXCLUSIVE
@@ -147,29 +150,124 @@
 
     if (result != AAUDIO_OK) {
         serviceStream.clear();
-        return result;
+        AIDL_RETURN(result);
     } else {
         aaudio_handle_t handle = mStreamTracker.addStreamForHandle(serviceStream.get());
         serviceStream->setHandle(handle);
         pid_t pid = request.getProcessId();
         AAudioClientTracker::getInstance().registerClientStream(pid, serviceStream);
-        configurationOutput.copyFrom(*serviceStream);
+        paramsOut.copyFrom(*serviceStream);
+        *_paramsOut = std::move(paramsOut).parcelable();
         // Log open in MediaMetrics after we have the handle because we need the handle to
         // create the metrics ID.
         serviceStream->logOpen(handle);
         ALOGV("%s(): return handle = 0x%08X", __func__, handle);
-        return handle;
+        AIDL_RETURN(handle);
     }
 }
 
-aaudio_result_t AAudioService::closeStream(aaudio_handle_t streamHandle) {
+Status AAudioService::closeStream(int32_t streamHandle, int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
     // Check permission and ownership first.
     sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
         ALOGE("closeStream(0x%0x), illegal stream handle", streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
     }
-    return closeStream(serviceStream);
+    AIDL_RETURN(closeStream(serviceStream));
+}
+
+Status AAudioService::getStreamDescription(int32_t streamHandle, Endpoint* endpoint,
+                                           int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    if (serviceStream.get() == nullptr) {
+        ALOGE("getStreamDescription(), illegal stream handle = 0x%0x", streamHandle);
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+    }
+    AudioEndpointParcelable endpointParcelable;
+    aaudio_result_t result = serviceStream->getDescription(endpointParcelable);
+    if (result == AAUDIO_OK) {
+        *endpoint = std::move(endpointParcelable).parcelable();
+    }
+    AIDL_RETURN(result);
+}
+
+Status AAudioService::startStream(int32_t streamHandle, int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    if (serviceStream.get() == nullptr) {
+        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+    }
+    AIDL_RETURN(serviceStream->start());
+}
+
+Status AAudioService::pauseStream(int32_t streamHandle, int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    if (serviceStream.get() == nullptr) {
+        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+    }
+    AIDL_RETURN(serviceStream->pause());
+}
+
+Status AAudioService::stopStream(int32_t streamHandle, int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    if (serviceStream.get() == nullptr) {
+        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+    }
+    AIDL_RETURN(serviceStream->stop());
+}
+
+Status AAudioService::flushStream(int32_t streamHandle, int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    if (serviceStream.get() == nullptr) {
+        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+    }
+    AIDL_RETURN(serviceStream->flush());
+}
+
+Status AAudioService::registerAudioThread(int32_t streamHandle, int32_t clientThreadId, int64_t periodNanoseconds,
+                                          int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    if (serviceStream.get() == nullptr) {
+        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+    }
+    int32_t priority = isCallerInService()
+        ? kRealTimeAudioPriorityService : kRealTimeAudioPriorityClient;
+    AIDL_RETURN(serviceStream->registerAudioThread(clientThreadId, priority));
+}
+
+Status AAudioService::unregisterAudioThread(int32_t streamHandle, int32_t clientThreadId,
+                                            int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    if (serviceStream.get() == nullptr) {
+        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+    }
+    AIDL_RETURN(serviceStream->unregisterAudioThread(clientThreadId));
+}
+
+bool AAudioService::isCallerInService() {
+    return mAudioClient.clientPid == IPCThreadState::self()->getCallingPid() &&
+        mAudioClient.clientUid == IPCThreadState::self()->getCallingUid();
 }
 
 aaudio_result_t AAudioService::closeStream(sp<AAudioServiceStreamBase> serviceStream) {
@@ -205,76 +303,6 @@
     return serviceStream;
 }
 
-aaudio_result_t AAudioService::getStreamDescription(
-                aaudio_handle_t streamHandle,
-                aaudio::AudioEndpointParcelable &parcelable) {
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
-    if (serviceStream.get() == nullptr) {
-        ALOGE("getStreamDescription(), illegal stream handle = 0x%0x", streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
-    }
-    return serviceStream->getDescription(parcelable);
-}
-
-aaudio_result_t AAudioService::startStream(aaudio_handle_t streamHandle) {
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
-    if (serviceStream.get() == nullptr) {
-        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
-    }
-    return serviceStream->start();
-}
-
-aaudio_result_t AAudioService::pauseStream(aaudio_handle_t streamHandle) {
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
-    if (serviceStream.get() == nullptr) {
-        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
-    }
-    return serviceStream->pause();
-}
-
-aaudio_result_t AAudioService::stopStream(aaudio_handle_t streamHandle) {
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
-    if (serviceStream.get() == nullptr) {
-        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
-    }
-    return serviceStream->stop();
-}
-
-aaudio_result_t AAudioService::flushStream(aaudio_handle_t streamHandle) {
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
-    if (serviceStream.get() == nullptr) {
-        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
-    }
-    return serviceStream->flush();
-}
-
-aaudio_result_t AAudioService::registerAudioThread(aaudio_handle_t streamHandle,
-                                                   pid_t clientThreadId,
-                                                   int64_t /* periodNanoseconds */) {
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
-    if (serviceStream.get() == nullptr) {
-        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
-    }
-    int32_t priority = isCallerInService()
-        ? kRealTimeAudioPriorityService : kRealTimeAudioPriorityClient;
-    return serviceStream->registerAudioThread(clientThreadId, priority);
-}
-
-aaudio_result_t AAudioService::unregisterAudioThread(aaudio_handle_t streamHandle,
-                                                     pid_t clientThreadId) {
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
-    if (serviceStream.get() == nullptr) {
-        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
-    }
-    return serviceStream->unregisterAudioThread(clientThreadId);
-}
-
 aaudio_result_t AAudioService::startClient(aaudio_handle_t streamHandle,
                                            const android::AudioClient& client,
                                            const audio_attributes_t *attr,
diff --git a/services/oboeservice/AAudioService.h b/services/oboeservice/AAudioService.h
index caf48a5..7c1b796 100644
--- a/services/oboeservice/AAudioService.h
+++ b/services/oboeservice/AAudioService.h
@@ -24,69 +24,71 @@
 #include <media/AudioClient.h>
 
 #include <aaudio/AAudio.h>
+#include <aaudio/BnAAudioService.h>
 
 #include "binding/AAudioCommon.h"
+#include "binding/AAudioBinderAdapter.h"
 #include "binding/AAudioServiceInterface.h"
-#include "binding/IAAudioService.h"
 
 #include "AAudioServiceStreamBase.h"
 #include "AAudioStreamTracker.h"
 
 namespace android {
 
+#define AAUDIO_SERVICE_NAME  "media.aaudio"
+
 class AAudioService :
     public BinderService<AAudioService>,
-    public BnAAudioService,
-    public aaudio::AAudioServiceInterface
+    public aaudio::BnAAudioService
 {
     friend class BinderService<AAudioService>;
 
 public:
     AAudioService();
-    virtual ~AAudioService();
+    virtual ~AAudioService() = default;
+
+    aaudio::AAudioServiceInterface& asAAudioServiceInterface() {
+        return mAdapter;
+    }
 
     static const char* getServiceName() { return AAUDIO_SERVICE_NAME; }
 
     virtual status_t        dump(int fd, const Vector<String16>& args) override;
 
-    virtual void            registerClient(const sp<IAAudioClient>& client);
+    binder::Status registerClient(const ::android::sp<::aaudio::IAAudioClient>& client) override;
 
-    aaudio::aaudio_handle_t openStream(const aaudio::AAudioStreamRequest &request,
-                                       aaudio::AAudioStreamConfiguration &configurationOutput)
-                                       override;
+    binder::Status openStream(const ::aaudio::StreamRequest& request,
+                              ::aaudio::StreamParameters* paramsOut,
+                              int32_t* _aidl_return) override;
 
-    /*
-     * This is called from Binder. It checks for permissions
-     * and converts the handle passed through Binder to a stream pointer.
-     */
-    aaudio_result_t closeStream(aaudio::aaudio_handle_t streamHandle) override;
+    binder::Status closeStream(int32_t streamHandle, int32_t* _aidl_return) override;
 
-    aaudio_result_t getStreamDescription(
-                aaudio::aaudio_handle_t streamHandle,
-                aaudio::AudioEndpointParcelable &parcelable) override;
+    binder::Status
+    getStreamDescription(int32_t streamHandle, ::aaudio::Endpoint* endpoint,
+                         int32_t* _aidl_return) override;
 
-    aaudio_result_t startStream(aaudio::aaudio_handle_t streamHandle) override;
+    binder::Status startStream(int32_t streamHandle, int32_t* _aidl_return) override;
 
-    aaudio_result_t pauseStream(aaudio::aaudio_handle_t streamHandle) override;
+    binder::Status pauseStream(int32_t streamHandle, int32_t* _aidl_return) override;
 
-    aaudio_result_t stopStream(aaudio::aaudio_handle_t streamHandle) override;
+    binder::Status stopStream(int32_t streamHandle, int32_t* _aidl_return) override;
 
-    aaudio_result_t flushStream(aaudio::aaudio_handle_t streamHandle) override;
+    binder::Status flushStream(int32_t streamHandle, int32_t* _aidl_return) override;
 
-    aaudio_result_t registerAudioThread(aaudio::aaudio_handle_t streamHandle,
-                                                pid_t tid,
-                                                int64_t periodNanoseconds) override;
+    binder::Status
+    registerAudioThread(int32_t streamHandle, int32_t clientThreadId, int64_t periodNanoseconds,
+                        int32_t* _aidl_return) override;
 
-    aaudio_result_t unregisterAudioThread(aaudio::aaudio_handle_t streamHandle,
-                                                  pid_t tid) override;
+    binder::Status unregisterAudioThread(int32_t streamHandle, int32_t clientThreadId,
+                                         int32_t* _aidl_return) override;
 
     aaudio_result_t startClient(aaudio::aaudio_handle_t streamHandle,
                                 const android::AudioClient& client,
                                 const audio_attributes_t *attr,
-                                audio_port_handle_t *clientHandle) override;
+                                audio_port_handle_t *clientHandle);
 
     aaudio_result_t stopClient(aaudio::aaudio_handle_t streamHandle,
-                                       audio_port_handle_t clientHandle) override;
+                                       audio_port_handle_t clientHandle);
 
  // ===============================================================================
  // The following public methods are only called from the service and NOT by Binder.
@@ -101,6 +103,29 @@
     aaudio_result_t closeStream(sp<aaudio::AAudioServiceStreamBase> serviceStream);
 
 private:
+    class Adapter : public aaudio::AAudioBinderAdapter {
+    public:
+        explicit Adapter(AAudioService *service)
+                : aaudio::AAudioBinderAdapter(service),
+                  mService(service) {}
+
+        aaudio_result_t startClient(aaudio::aaudio_handle_t streamHandle,
+                                    const android::AudioClient &client,
+                                    const audio_attributes_t *attr,
+                                    audio_port_handle_t *clientHandle) override {
+            return mService->startClient(streamHandle, client, attr, clientHandle);
+        }
+
+        aaudio_result_t stopClient(aaudio::aaudio_handle_t streamHandle,
+                                   audio_port_handle_t clientHandle) override {
+            return mService->stopClient(streamHandle, clientHandle);
+        }
+
+    private:
+        AAudioService* const mService;
+    };
+
+    Adapter mAdapter;
 
     /** @return true if the client is the audioserver
      */
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index ceefe93..faea58f 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -38,7 +38,7 @@
 using namespace android;  // TODO just import names needed
 using namespace aaudio;   // TODO just import names needed
 
-std::string AAudioServiceEndpoint::dump() const {
+std::string AAudioServiceEndpoint::dump() const NO_THREAD_SAFETY_ANALYSIS {
     std::stringstream result;
 
     const bool isLocked = AAudio_tryUntilTrue(
@@ -182,11 +182,12 @@
             : AUDIO_SOURCE_DEFAULT;
     audio_flags_mask_t flags;
     if (direction == AAUDIO_DIRECTION_OUTPUT) {
-        flags = AUDIO_FLAG_LOW_LATENCY
-            | AAudioConvert_allowCapturePolicyToAudioFlagsMask(params->getAllowedCapturePolicy());
+        flags = static_cast<audio_flags_mask_t>(AUDIO_FLAG_LOW_LATENCY
+                | AAudioConvert_allowCapturePolicyToAudioFlagsMask(
+                        params->getAllowedCapturePolicy()));
     } else {
-        flags = AUDIO_FLAG_LOW_LATENCY
-            | AAudioConvert_privacySensitiveToAudioFlagsMask(params->isPrivacySensitive());
+        flags = static_cast<audio_flags_mask_t>(AUDIO_FLAG_LOW_LATENCY
+                | AAudioConvert_privacySensitiveToAudioFlagsMask(params->isPrivacySensitive()));
     }
     return {
             .content_type = contentType,
diff --git a/services/oboeservice/AAudioServiceEndpoint.h b/services/oboeservice/AAudioServiceEndpoint.h
index a171cb0..72090c2 100644
--- a/services/oboeservice/AAudioServiceEndpoint.h
+++ b/services/oboeservice/AAudioServiceEndpoint.h
@@ -22,6 +22,8 @@
 #include <mutex>
 #include <vector>
 
+#include <android-base/thread_annotations.h>
+
 #include "client/AudioStreamInternal.h"
 #include "client/AudioStreamInternalPlay.h"
 #include "core/AAudioStreamParameters.h"
@@ -47,7 +49,11 @@
 
     virtual aaudio_result_t open(const aaudio::AAudioStreamRequest &request) = 0;
 
-    virtual aaudio_result_t close() = 0;
+    /*
+     * Perform any cleanup necessary before deleting the stream.
+     * This might include releasing and closing internal streams.
+     */
+    virtual void close() = 0;
 
     aaudio_result_t registerStream(android::sp<AAudioServiceStreamBase> stream);
 
@@ -137,7 +143,8 @@
     std::vector<android::sp<AAudioServiceStreamBase>> disconnectRegisteredStreams();
 
     mutable std::mutex       mLockStreams;
-    std::vector<android::sp<AAudioServiceStreamBase>> mRegisteredStreams;
+    std::vector<android::sp<AAudioServiceStreamBase>> mRegisteredStreams
+            GUARDED_BY(mLockStreams);
 
     SimpleDoubleBuffer<Timestamp>  mAtomicEndpointTimestamp;
 
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.cpp b/services/oboeservice/AAudioServiceEndpointCapture.cpp
index 37d105b..bc769f0 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.cpp
+++ b/services/oboeservice/AAudioServiceEndpointCapture.cpp
@@ -35,22 +35,17 @@
 using namespace android;  // TODO just import names needed
 using namespace aaudio;   // TODO just import names needed
 
-AAudioServiceEndpointCapture::AAudioServiceEndpointCapture(AAudioService &audioService)
-        : mStreamInternalCapture(audioService, true) {
-    mStreamInternal = &mStreamInternalCapture;
-}
-
-AAudioServiceEndpointCapture::~AAudioServiceEndpointCapture() {
-    delete mDistributionBuffer;
+AAudioServiceEndpointCapture::AAudioServiceEndpointCapture(AAudioService& audioService)
+        : AAudioServiceEndpointShared(
+                new AudioStreamInternalCapture(audioService.asAAudioServiceInterface(), true)) {
 }
 
 aaudio_result_t AAudioServiceEndpointCapture::open(const aaudio::AAudioStreamRequest &request) {
     aaudio_result_t result = AAudioServiceEndpointShared::open(request);
     if (result == AAUDIO_OK) {
-        delete mDistributionBuffer;
         int distributionBufferSizeBytes = getStreamInternal()->getFramesPerBurst()
                                           * getStreamInternal()->getBytesPerFrame();
-        mDistributionBuffer = new uint8_t[distributionBufferSizeBytes];
+        mDistributionBuffer = std::make_unique<uint8_t[]>(distributionBufferSizeBytes);
     }
     return result;
 }
@@ -67,9 +62,12 @@
         int64_t mmapFramesRead = getStreamInternal()->getFramesRead();
 
         // Read audio data from stream using a blocking read.
-        result = getStreamInternal()->read(mDistributionBuffer, getFramesPerBurst(), timeoutNanos);
+        result = getStreamInternal()->read(mDistributionBuffer.get(),
+                getFramesPerBurst(), timeoutNanos);
         if (result == AAUDIO_ERROR_DISCONNECTED) {
-            disconnectRegisteredStreams();
+            ALOGD("%s() read() returned AAUDIO_ERROR_DISCONNECTED", __func__);
+            // We do not need the returned vector.
+            (void) AAudioServiceEndpointShared::disconnectRegisteredStreams();
             break;
         } else if (result != getFramesPerBurst()) {
             ALOGW("callbackLoop() read %d / %d",
@@ -79,48 +77,14 @@
 
         // Distribute data to each active stream.
         { // brackets are for lock_guard
-
             std::lock_guard <std::mutex> lock(mLockStreams);
             for (const auto& clientStream : mRegisteredStreams) {
                 if (clientStream->isRunning() && !clientStream->isSuspended()) {
-                    int64_t clientFramesWritten = 0;
-
                     sp<AAudioServiceStreamShared> streamShared =
                             static_cast<AAudioServiceStreamShared *>(clientStream.get());
-
-                    {
-                        // Lock the AudioFifo to protect against close.
-                        std::lock_guard <std::mutex> lock(streamShared->getAudioDataQueueLock());
-
-                        FifoBuffer *fifo = streamShared->getAudioDataFifoBuffer_l();
-                        if (fifo != nullptr) {
-
-                            // Determine offset between framePosition in client's stream
-                            // vs the underlying MMAP stream.
-                            clientFramesWritten = fifo->getWriteCounter();
-                            // There are two indices that refer to the same frame.
-                            int64_t positionOffset = mmapFramesRead - clientFramesWritten;
-                            streamShared->setTimestampPositionOffset(positionOffset);
-
-                            // Is the buffer too full to write a burst?
-                            if (fifo->getEmptyFramesAvailable() <
-                                    getFramesPerBurst()) {
-                                streamShared->incrementXRunCount();
-                            } else {
-                                fifo->write(mDistributionBuffer, getFramesPerBurst());
-                            }
-                            clientFramesWritten = fifo->getWriteCounter();
-                        }
-                    }
-
-                    if (clientFramesWritten > 0) {
-                        // This timestamp represents the completion of data being written into the
-                        // client buffer. It is sent to the client and used in the timing model
-                        // to decide when data will be available to read.
-                        Timestamp timestamp(clientFramesWritten, AudioClock::getNanoseconds());
-                        streamShared->markTransferTime(timestamp);
-                    }
-
+                    streamShared->writeDataIfRoom(mmapFramesRead,
+                                                  mDistributionBuffer.get(),
+                                                  getFramesPerBurst());
                 }
             }
         }
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.h b/services/oboeservice/AAudioServiceEndpointCapture.h
index 971da9a..2ca43cf 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.h
+++ b/services/oboeservice/AAudioServiceEndpointCapture.h
@@ -17,6 +17,8 @@
 #ifndef AAUDIO_SERVICE_ENDPOINT_CAPTURE_H
 #define AAUDIO_SERVICE_ENDPOINT_CAPTURE_H
 
+#include <memory>
+
 #include "client/AudioStreamInternal.h"
 #include "client/AudioStreamInternalCapture.h"
 
@@ -28,16 +30,14 @@
 class AAudioServiceEndpointCapture : public AAudioServiceEndpointShared {
 public:
     explicit AAudioServiceEndpointCapture(android::AAudioService &audioService);
-    virtual ~AAudioServiceEndpointCapture();
+    virtual ~AAudioServiceEndpointCapture() = default;
 
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
 
-
     void *callbackLoop() override;
 
 private:
-    AudioStreamInternalCapture  mStreamInternalCapture;
-    uint8_t                    *mDistributionBuffer = nullptr;
+    std::unique_ptr<uint8_t[]>  mDistributionBuffer;
 };
 
 } /* namespace aaudio */
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 0843e0b..5bccfd5 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -226,7 +226,7 @@
     return result;
 }
 
-aaudio_result_t AAudioServiceEndpointMMAP::close() {
+void AAudioServiceEndpointMMAP::close() {
     if (mMmapStream != nullptr) {
         // Needs to be explicitly cleared or CTS will fail but it is not clear why.
         mMmapStream.clear();
@@ -235,8 +235,6 @@
         // FIXME Make closing synchronous.
         AudioClock::sleepForNanos(100 * AAUDIO_NANOS_PER_MILLISECOND);
     }
-
-    return AAUDIO_OK;
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::startStream(sp<AAudioServiceStreamBase> stream,
@@ -380,3 +378,18 @@
     parcelable.mDownDataQueueParcelable.setCapacityInFrames(getBufferCapacity());
     return AAUDIO_OK;
 }
+
+aaudio_result_t AAudioServiceEndpointMMAP::getExternalPosition(uint64_t *positionFrames,
+                                                               int64_t *timeNanos)
+{
+    if (!mExternalPositionSupported) {
+        return AAUDIO_ERROR_INVALID_STATE;
+    }
+    status_t status = mMmapStream->getExternalPosition(positionFrames, timeNanos);
+    if (status == INVALID_OPERATION) {
+        // getExternalPosition is not supported. Set mExternalPositionSupported as false
+        // so that the call will not go to the HAL next time.
+        mExternalPositionSupported = false;
+    }
+    return AAudioConvert_androidToAAudioResult(status);
+}
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 3d10861..a2a0922 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -50,7 +50,7 @@
 
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
 
-    aaudio_result_t close() override;
+    void close() override;
 
     aaudio_result_t startStream(android::sp<AAudioServiceStreamBase> stream,
                                 audio_port_handle_t *clientHandle) override;
@@ -85,6 +85,8 @@
         return mHardwareTimeOffsetNanos;
     }
 
+    aaudio_result_t getExternalPosition(uint64_t *positionFrames, int64_t *timeNanos);
+
 private:
     MonotonicCounter                          mFramesTransferred;
 
@@ -101,6 +103,8 @@
 
     int64_t                                   mHardwareTimeOffsetNanos = 0; // TODO get from HAL
 
+    bool                                      mExternalPositionSupported = true;
+
 };
 
 } /* namespace aaudio */
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.cpp b/services/oboeservice/AAudioServiceEndpointPlay.cpp
index bda4b90..4e46033 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.cpp
+++ b/services/oboeservice/AAudioServiceEndpointPlay.cpp
@@ -41,10 +41,9 @@
 
 #define BURSTS_PER_BUFFER_DEFAULT   2
 
-AAudioServiceEndpointPlay::AAudioServiceEndpointPlay(AAudioService &audioService)
-        : mStreamInternalPlay(audioService, true) {
-    mStreamInternal = &mStreamInternalPlay;
-}
+AAudioServiceEndpointPlay::AAudioServiceEndpointPlay(AAudioService& audioService)
+        : AAudioServiceEndpointShared(
+                new AudioStreamInternalPlay(audioService.asAAudioServiceInterface(), true)) {}
 
 aaudio_result_t AAudioServiceEndpointPlay::open(const aaudio::AAudioStreamRequest &request) {
     aaudio_result_t result = AAudioServiceEndpointShared::open(request);
@@ -99,10 +98,11 @@
 
                 {
                     // Lock the AudioFifo to protect against close.
-                    std::lock_guard <std::mutex> lock(streamShared->getAudioDataQueueLock());
-
-                    FifoBuffer *fifo = streamShared->getAudioDataFifoBuffer_l();
-                    if (fifo != nullptr) {
+                    std::lock_guard <std::mutex> lock(streamShared->audioDataQueueLock);
+                    std::shared_ptr<SharedRingBuffer> audioDataQueue
+                            = streamShared->getAudioDataQueue_l();
+                    std::shared_ptr<FifoBuffer> fifo;
+                    if (audioDataQueue && (fifo = audioDataQueue->getFifoBuffer())) {
 
                         // Determine offset between framePosition in client's stream
                         // vs the underlying MMAP stream.
@@ -145,7 +145,9 @@
         result = getStreamInternal()->write(mMixer.getOutputBuffer(),
                                             getFramesPerBurst(), timeoutNanos);
         if (result == AAUDIO_ERROR_DISCONNECTED) {
-            AAudioServiceEndpointShared::disconnectRegisteredStreams();
+            ALOGD("%s() write() returned AAUDIO_ERROR_DISCONNECTED", __func__);
+            // We do not need the returned vector.
+            (void) AAudioServiceEndpointShared::disconnectRegisteredStreams();
             break;
         } else if (result != getFramesPerBurst()) {
             ALOGW("callbackLoop() wrote %d / %d",
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.h b/services/oboeservice/AAudioServiceEndpointPlay.h
index 981e430..160a1de 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.h
+++ b/services/oboeservice/AAudioServiceEndpointPlay.h
@@ -45,7 +45,6 @@
     void *callbackLoop() override;
 
 private:
-    AudioStreamInternalPlay  mStreamInternalPlay; // for playing output of mixer
     bool                     mLatencyTuningEnabled = false; // TODO implement tuning
     AAudioMixer              mMixer;    //
 };
diff --git a/services/oboeservice/AAudioServiceEndpointShared.cpp b/services/oboeservice/AAudioServiceEndpointShared.cpp
index dc21886..501e8c0 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.cpp
+++ b/services/oboeservice/AAudioServiceEndpointShared.cpp
@@ -40,6 +40,9 @@
 // This is the maximum size in frames. The effective size can be tuned smaller at runtime.
 #define DEFAULT_BUFFER_CAPACITY   (48 * 8)
 
+AAudioServiceEndpointShared::AAudioServiceEndpointShared(AudioStreamInternal *streamInternal)
+    : mStreamInternal(streamInternal) {}
+
 std::string AAudioServiceEndpointShared::dump() const {
     std::stringstream result;
 
@@ -84,24 +87,31 @@
     return result;
 }
 
-aaudio_result_t AAudioServiceEndpointShared::close() {
-    return getStreamInternal()->releaseCloseFinal();
+void AAudioServiceEndpointShared::close() {
+    stopSharingThread();
+    getStreamInternal()->safeReleaseClose();
 }
 
 // Glue between C and C++ callbacks.
 static void *aaudio_endpoint_thread_proc(void *arg) {
     assert(arg != nullptr);
+    ALOGD("%s() called", __func__);
 
-    // The caller passed in a smart pointer to prevent the endpoint from getting deleted
-    // while the thread was launching.
-    sp<AAudioServiceEndpointShared> *endpointForThread =
-            static_cast<sp<AAudioServiceEndpointShared> *>(arg);
-    sp<AAudioServiceEndpointShared> endpoint = *endpointForThread;
-    delete endpointForThread; // Just use scoped smart pointer. Don't need this anymore.
+    // Prevent the stream from being deleted while being used.
+    // This is just for extra safety. It is probably not needed because
+    // this callback should be joined before the stream is closed.
+    AAudioServiceEndpointShared *endpointPtr =
+        static_cast<AAudioServiceEndpointShared *>(arg);
+    android::sp<AAudioServiceEndpointShared> endpoint(endpointPtr);
+    // Balance the incStrong() in startSharingThread_l().
+    endpoint->decStrong(nullptr);
+
     void *result = endpoint->callbackLoop();
     // Close now so that the HW resource is freed and we can open a new device.
     if (!endpoint->isConnected()) {
-        endpoint->close();
+        ALOGD("%s() call safeReleaseCloseFromCallback()", __func__);
+        // Release and close under a lock with no check for callback collisions.
+        endpoint->getStreamInternal()->safeReleaseCloseFromCallback();
     }
 
     return result;
@@ -113,38 +123,39 @@
                           * AAUDIO_NANOS_PER_SECOND
                           / getSampleRate();
     mCallbackEnabled.store(true);
-    // Pass a smart pointer so the thread can hold a reference.
-    sp<AAudioServiceEndpointShared> *endpointForThread = new sp<AAudioServiceEndpointShared>(this);
-    aaudio_result_t result = getStreamInternal()->createThread(periodNanos,
-                                                               aaudio_endpoint_thread_proc,
-                                                               endpointForThread);
+    // Prevent this object from getting deleted before the thread has a chance to create
+    // its strong pointer. Assume the thread will call decStrong().
+    this->incStrong(nullptr);
+    aaudio_result_t result = getStreamInternal()->createThread_l(periodNanos,
+                                                                 aaudio_endpoint_thread_proc,
+                                                                 this);
     if (result != AAUDIO_OK) {
-        // The thread can't delete it so we have to do it here.
-        delete endpointForThread;
+        this->decStrong(nullptr); // Because the thread won't do it.
     }
     return result;
 }
 
 aaudio_result_t aaudio::AAudioServiceEndpointShared::stopSharingThread() {
     mCallbackEnabled.store(false);
-    aaudio_result_t result = getStreamInternal()->joinThread(NULL);
-    return result;
+    return getStreamInternal()->joinThread(NULL);
 }
 
-aaudio_result_t AAudioServiceEndpointShared::startStream(sp<AAudioServiceStreamBase> sharedStream,
-                                                         audio_port_handle_t *clientHandle) {
+aaudio_result_t AAudioServiceEndpointShared::startStream(
+        sp<AAudioServiceStreamBase> sharedStream,
+        audio_port_handle_t *clientHandle)
+        NO_THREAD_SAFETY_ANALYSIS {
     aaudio_result_t result = AAUDIO_OK;
 
     {
         std::lock_guard<std::mutex> lock(mLockStreams);
         if (++mRunningStreamCount == 1) { // atomic
-            result = getStreamInternal()->requestStart();
+            result = getStreamInternal()->systemStart();
             if (result != AAUDIO_OK) {
                 --mRunningStreamCount;
             } else {
                 result = startSharingThread_l();
                 if (result != AAUDIO_OK) {
-                    getStreamInternal()->requestStop();
+                    getStreamInternal()->systemStopFromApp();
                     --mRunningStreamCount;
                 }
             }
@@ -158,7 +169,7 @@
         if (result != AAUDIO_OK) {
             if (--mRunningStreamCount == 0) { // atomic
                 stopSharingThread();
-                getStreamInternal()->requestStop();
+                getStreamInternal()->systemStopFromApp();
             }
         }
     }
@@ -173,7 +184,7 @@
 
     if (--mRunningStreamCount == 0) { // atomic
         stopSharingThread(); // the sharing thread locks mLockStreams
-        getStreamInternal()->requestStop();
+        getStreamInternal()->systemStopFromApp();
     }
     return AAUDIO_OK;
 }
diff --git a/services/oboeservice/AAudioServiceEndpointShared.h b/services/oboeservice/AAudioServiceEndpointShared.h
index bfc1744..8357567 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.h
+++ b/services/oboeservice/AAudioServiceEndpointShared.h
@@ -20,6 +20,8 @@
 #include <atomic>
 #include <mutex>
 
+#include <android-base/thread_annotations.h>
+
 #include "AAudioServiceEndpoint.h"
 #include "client/AudioStreamInternal.h"
 #include "client/AudioStreamInternalPlay.h"
@@ -35,12 +37,15 @@
 class AAudioServiceEndpointShared : public AAudioServiceEndpoint {
 
 public:
+    explicit AAudioServiceEndpointShared(AudioStreamInternal *streamInternal);
+
+    virtual ~AAudioServiceEndpointShared() = default;
 
     std::string dump() const override;
 
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
 
-    aaudio_result_t close() override;
+    void close() override;
 
     aaudio_result_t startStream(android::sp<AAudioServiceStreamBase> stream,
                                 audio_port_handle_t *clientHandle) override;
@@ -54,18 +59,18 @@
 
     virtual void   *callbackLoop() = 0;
 
-protected:
-
     AudioStreamInternal *getStreamInternal() const {
-        return mStreamInternal;
+        return mStreamInternal.get();
     };
 
-    aaudio_result_t          startSharingThread_l();
+protected:
+
+    aaudio_result_t          startSharingThread_l() REQUIRES(mLockStreams);
 
     aaudio_result_t          stopSharingThread();
 
-    // pointer to object statically allocated in subclasses
-    AudioStreamInternal     *mStreamInternal = nullptr;
+    // An MMAP stream that is shared by multiple clients.
+    android::sp<AudioStreamInternal> mStreamInternal;
 
     std::atomic<bool>        mCallbackEnabled{false};
 
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 663dae2..7edc25c 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -26,7 +26,6 @@
 #include <media/TypeConverter.h>
 #include <mediautils/SchedulingPolicyService.h>
 
-#include "binding/IAAudioService.h"
 #include "binding/AAudioServiceMessage.h"
 #include "core/AudioGlobal.h"
 #include "utility/AudioClock.h"
@@ -46,8 +45,7 @@
  */
 
 AAudioServiceStreamBase::AAudioServiceStreamBase(AAudioService &audioService)
-        : mUpMessageQueue(nullptr)
-        , mTimestampThread("AATime")
+        : mTimestampThread("AATime")
         , mAtomicStreamTimestamp()
         , mAudioService(audioService) {
     mMmapClient.clientUid = -1;
@@ -56,6 +54,8 @@
 }
 
 AAudioServiceStreamBase::~AAudioServiceStreamBase() {
+    ALOGD("%s() called", __func__);
+
     // May not be set if open failed.
     if (mMetricsId.size() > 0) {
         mediametrics::LogItem(mMetricsId)
@@ -140,7 +140,7 @@
             return AAUDIO_ERROR_INVALID_STATE;
         }
 
-        mUpMessageQueue = new SharedRingBuffer();
+        mUpMessageQueue = std::make_shared<SharedRingBuffer>();
         result = mUpMessageQueue->allocate(sizeof(AAudioServiceMessage),
                                            QUEUE_UP_CAPACITY_COMMANDS);
         if (result != AAUDIO_OK) {
@@ -179,6 +179,8 @@
         return AAUDIO_OK;
     }
 
+    // This will call stopTimestampThread() and also stop the stream,
+    // just in case it was not already stopped.
     stop_l();
 
     aaudio_result_t result = AAUDIO_OK;
@@ -194,13 +196,6 @@
         mServiceEndpoint.clear(); // endpoint will hold the pointer after this method returns.
     }
 
-    {
-        std::lock_guard<std::mutex> lock(mUpMessageQueueLock);
-        stopTimestampThread();
-        delete mUpMessageQueue;
-        mUpMessageQueue = nullptr;
-    }
-
     setState(AAUDIO_STREAM_STATE_CLOSED);
 
     mediametrics::LogItem(mMetricsId)
@@ -514,12 +509,8 @@
         ALOGE("%s(): mUpMessageQueue null! - stream not open", __func__);
         return true;
     }
-    int32_t framesAvailable = mUpMessageQueue->getFifoBuffer()
-        ->getFullFramesAvailable();
-    int32_t capacity = mUpMessageQueue->getFifoBuffer()
-        ->getBufferCapacityInFrames();
     // Is it half full or more
-    return framesAvailable >= (capacity / 2);
+    return mUpMessageQueue->getFractionalFullness() >= 0.5;
 }
 
 aaudio_result_t AAudioServiceStreamBase::writeUpMessageQueue(AAudioServiceMessage *command) {
@@ -604,14 +595,3 @@
 void AAudioServiceStreamBase::onVolumeChanged(float volume) {
     sendServiceEvent(AAUDIO_SERVICE_EVENT_VOLUME, volume);
 }
-
-int32_t AAudioServiceStreamBase::incrementServiceReferenceCount_l() {
-    return ++mCallingCount;
-}
-
-int32_t AAudioServiceStreamBase::decrementServiceReferenceCount_l() {
-    int32_t count = --mCallingCount;
-    // Each call to increment should be balanced with one call to decrement.
-    assert(count >= 0);
-    return count;
-}
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index 94cc980..0f752b7 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -20,13 +20,15 @@
 #include <assert.h>
 #include <mutex>
 
+#include <android-base/thread_annotations.h>
 #include <media/AudioClient.h>
 #include <utils/RefBase.h>
 
 #include "fifo/FifoBuffer.h"
-#include "binding/IAAudioService.h"
 #include "binding/AudioEndpointParcelable.h"
 #include "binding/AAudioServiceMessage.h"
+#include "binding/AAudioStreamRequest.h"
+#include "core/AAudioStreamParameters.h"
 #include "utility/AAudioUtilities.h"
 #include "utility/AudioClock.h"
 
@@ -208,25 +210,6 @@
         return mSuspended;
     }
 
-    /**
-     * Atomically increment the number of active references to the stream by AAudioService.
-     *
-     * This is called under a global lock in AAudioStreamTracker.
-     *
-     * @return value after the increment
-     */
-    int32_t incrementServiceReferenceCount_l();
-
-    /**
-     * Atomically decrement the number of active references to the stream by AAudioService.
-     * This should only be called after incrementServiceReferenceCount_l().
-     *
-     * This is called under a global lock in AAudioStreamTracker.
-     *
-     * @return value after the decrement
-     */
-    int32_t decrementServiceReferenceCount_l();
-
     bool isCloseNeeded() const {
         return mCloseNeeded.load();
     }
@@ -249,11 +232,10 @@
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request,
                          aaudio_sharing_mode_t sharingMode);
 
-    // These must be called under mLock
-    virtual aaudio_result_t close_l();
-    virtual aaudio_result_t pause_l();
-    virtual aaudio_result_t stop_l();
-    void disconnect_l();
+    virtual aaudio_result_t close_l() REQUIRES(mLock);
+    virtual aaudio_result_t pause_l() REQUIRES(mLock);
+    virtual aaudio_result_t stop_l() REQUIRES(mLock);
+    void disconnect_l() REQUIRES(mLock);
 
     void setState(aaudio_stream_state_t state);
 
@@ -284,8 +266,8 @@
 
     pid_t                   mRegisteredClientThread = ILLEGAL_THREAD_ID;
 
-    SharedRingBuffer*       mUpMessageQueue;
     std::mutex              mUpMessageQueueLock;
+    std::shared_ptr<SharedRingBuffer> mUpMessageQueue;
 
     AAudioThread            mTimestampThread;
     // This is used by one thread to tell another thread to exit. So it must be atomic.
@@ -331,18 +313,17 @@
     aaudio_handle_t         mHandle = -1;
     bool                    mFlowing = false;
 
-    // This is modified under a global lock in AAudioStreamTracker.
-    int32_t                 mCallingCount = 0;
-
-    // This indicates that a stream that is being referenced by a binder call needs to closed.
-    std::atomic<bool>       mCloseNeeded{false};
+    // This indicates that a stream that is being referenced by a binder call
+    // and needs to closed.
+    std::atomic<bool>       mCloseNeeded{false}; // TODO remove
 
     // This indicate that a running stream should not be processed because of an error,
     // for example a full message queue. Note that this atomic is unrelated to mCloseNeeded.
     std::atomic<bool>       mSuspended{false};
 
+protected:
     // Locking order is important.
-    // Always acquire mLock before acquiring AAudioServiceEndpoint::mLockStreams
+    // Acquire mLock before acquiring AAudioServiceEndpoint::mLockStreams
     std::mutex              mLock; // Prevent start/stop/close etcetera from colliding
 };
 
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index 54d7d06..57dc1ab 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -19,6 +19,7 @@
 #include <utils/Log.h>
 
 #include <atomic>
+#include <inttypes.h>
 #include <iomanip>
 #include <iostream>
 #include <stdint.h>
@@ -162,7 +163,8 @@
     return result;
 }
 
-// Get timestamp that was written by getFreeRunningPosition()
+// Get timestamp from presentation position.
+// If it fails, get timestamp that was written by getFreeRunningPosition()
 aaudio_result_t AAudioServiceStreamMMAP::getHardwareTimestamp(int64_t *positionFrames,
                                                                 int64_t *timeNanos) {
 
@@ -174,7 +176,17 @@
     sp<AAudioServiceEndpointMMAP> serviceEndpointMMAP =
             static_cast<AAudioServiceEndpointMMAP *>(endpoint.get());
 
-    // TODO Get presentation timestamp from the HAL
+    // Disable this code temporarily because the HAL is not returning
+    // a useful result.
+#if 0
+    uint64_t position;
+    if (serviceEndpointMMAP->getExternalPosition(&position, timeNanos) == AAUDIO_OK) {
+        ALOGD("%s() getExternalPosition() says pos = %" PRIi64 ", time = %" PRIi64,
+                __func__, position, *timeNanos);
+        *positionFrames = (int64_t) position;
+        return AAUDIO_OK;
+    } else
+#endif
     if (mAtomicStreamTimestamp.isValid()) {
         Timestamp timestamp = mAtomicStreamTimestamp.read();
         *positionFrames = timestamp.getPosition();
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 5902613..6ba1725 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -19,6 +19,7 @@
 
 #include <atomic>
 
+#include <android-base/thread_annotations.h>
 #include <android-base/unique_fd.h>
 #include <media/audiohal/StreamHalInterface.h>
 #include <media/MmapStreamCallback.h>
@@ -34,10 +35,8 @@
 #include "TimestampScheduler.h"
 #include "utility/MonotonicCounter.h"
 
-
 namespace aaudio {
 
-
 /**
  * These corresponds to an EXCLUSIVE mode MMAP client stream.
  * It has exclusive use of one AAudioServiceEndpointMMAP to communicate with the underlying
@@ -68,9 +67,9 @@
      * This is not guaranteed to be synchronous but it currently is.
      * An AAUDIO_SERVICE_EVENT_PAUSED will be sent to the client when complete.
     */
-    aaudio_result_t pause_l() override;
+    aaudio_result_t pause_l() REQUIRES(mLock) override;
 
-    aaudio_result_t stop_l() override;
+    aaudio_result_t stop_l() REQUIRES(mLock) override;
 
     aaudio_result_t getAudioDataDescription(AudioEndpointParcelable &parcelable) override;
 
diff --git a/services/oboeservice/AAudioServiceStreamShared.cpp b/services/oboeservice/AAudioServiceStreamShared.cpp
index 01b1c2e..c665cda 100644
--- a/services/oboeservice/AAudioServiceStreamShared.cpp
+++ b/services/oboeservice/AAudioServiceStreamShared.cpp
@@ -24,8 +24,6 @@
 
 #include <aaudio/AAudio.h>
 
-#include "binding/IAAudioService.h"
-
 #include "binding/AAudioServiceMessage.h"
 #include "AAudioServiceStreamBase.h"
 #include "AAudioServiceStreamShared.h"
@@ -54,19 +52,26 @@
     return result.str();
 }
 
-std::string AAudioServiceStreamShared::dump() const {
+std::string AAudioServiceStreamShared::dump() const NO_THREAD_SAFETY_ANALYSIS {
     std::stringstream result;
 
+    const bool isLocked = AAudio_tryUntilTrue(
+            [this]()->bool { return audioDataQueueLock.try_lock(); } /* f */,
+            50 /* times */,
+            20 /* sleepMs */);
+    if (!isLocked) {
+        result << "AAudioServiceStreamShared may be deadlocked\n";
+    }
+
     result << AAudioServiceStreamBase::dump();
 
-    auto fifo = mAudioDataQueue->getFifoBuffer();
-    int32_t readCounter = fifo->getReadCounter();
-    int32_t writeCounter = fifo->getWriteCounter();
-    result << std::setw(10) << writeCounter;
-    result << std::setw(10) << readCounter;
-    result << std::setw(8) << (writeCounter - readCounter);
+    result << mAudioDataQueue->dump();
     result << std::setw(8) << getXRunCount();
 
+    if (isLocked) {
+        audioDataQueueLock.unlock();
+    }
+
     return result.str();
 }
 
@@ -105,7 +110,7 @@
     }
     int32_t capacityInFrames = numBursts * framesPerBurst;
 
-    // Final sanity check.
+    // Final range check.
     if (capacityInFrames > MAX_FRAMES_PER_BUFFER) {
         ALOGE("calculateBufferCapacity() calc capacity %d > max %d",
               capacityInFrames, MAX_FRAMES_PER_BUFFER);
@@ -178,9 +183,9 @@
     }
 
     {
-        std::lock_guard<std::mutex> lock(mAudioDataQueueLock);
+        std::lock_guard<std::mutex> lock(audioDataQueueLock);
         // Create audio data shared memory buffer for client.
-        mAudioDataQueue = new SharedRingBuffer();
+        mAudioDataQueue = std::make_shared<SharedRingBuffer>();
         result = mAudioDataQueue->allocate(calculateBytesPerFrame(), getBufferCapacity());
         if (result != AAUDIO_OK) {
             ALOGE("%s() could not allocate FIFO with %d frames",
@@ -203,25 +208,13 @@
     return result;
 }
 
-aaudio_result_t AAudioServiceStreamShared::close_l()  {
-    aaudio_result_t result = AAudioServiceStreamBase::close_l();
-
-    {
-        std::lock_guard<std::mutex> lock(mAudioDataQueueLock);
-        delete mAudioDataQueue;
-        mAudioDataQueue = nullptr;
-    }
-
-    return result;
-}
-
 /**
  * Get an immutable description of the data queue created by this service.
  */
 aaudio_result_t AAudioServiceStreamShared::getAudioDataDescription(
         AudioEndpointParcelable &parcelable)
 {
-    std::lock_guard<std::mutex> lock(mAudioDataQueueLock);
+    std::lock_guard<std::mutex> lock(audioDataQueueLock);
     if (mAudioDataQueue == nullptr) {
         ALOGW("%s(): mUpMessageQueue null! - stream not open", __func__);
         return AAUDIO_ERROR_NULL;
@@ -273,3 +266,37 @@
     *positionFrames = position;
     return result;
 }
+
+void AAudioServiceStreamShared::writeDataIfRoom(int64_t mmapFramesRead,
+                                                const void *buffer, int32_t numFrames) {
+    int64_t clientFramesWritten = 0;
+
+    // Lock the AudioFifo to protect against close.
+    std::lock_guard <std::mutex> lock(audioDataQueueLock);
+
+    if (mAudioDataQueue != nullptr) {
+        std::shared_ptr<FifoBuffer> fifo = mAudioDataQueue->getFifoBuffer();
+        // Determine offset between framePosition in client's stream
+        // vs the underlying MMAP stream.
+        clientFramesWritten = fifo->getWriteCounter();
+        // There are two indices that refer to the same frame.
+        int64_t positionOffset = mmapFramesRead - clientFramesWritten;
+        setTimestampPositionOffset(positionOffset);
+
+        // Is the buffer too full to write a burst?
+        if (fifo->getEmptyFramesAvailable() < getFramesPerBurst()) {
+            incrementXRunCount();
+        } else {
+            fifo->write(buffer, numFrames);
+        }
+        clientFramesWritten = fifo->getWriteCounter();
+    }
+
+    if (clientFramesWritten > 0) {
+        // This timestamp represents the completion of data being written into the
+        // client buffer. It is sent to the client and used in the timing model
+        // to decide when data will be available to read.
+        Timestamp timestamp(clientFramesWritten, AudioClock::getNanoseconds());
+        markTransferTime(timestamp);
+    }
+}
diff --git a/services/oboeservice/AAudioServiceStreamShared.h b/services/oboeservice/AAudioServiceStreamShared.h
index abcb782..4fae5b4 100644
--- a/services/oboeservice/AAudioServiceStreamShared.h
+++ b/services/oboeservice/AAudioServiceStreamShared.h
@@ -52,23 +52,16 @@
 
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
 
-    aaudio_result_t close_l() override;
+    void writeDataIfRoom(int64_t mmapFramesRead, const void *buffer, int32_t numFrames);
 
     /**
-     * This must be locked when calling getAudioDataFifoBuffer_l() and while
-     * using the FifoBuffer it returns.
-     */
-    std::mutex &getAudioDataQueueLock() {
-        return mAudioDataQueueLock;
-    }
-
-    /**
-     * This must only be call under getAudioDataQueueLock().
+     * This must only be called under getAudioDataQueueLock().
      * @return
      */
-    android::FifoBuffer *getAudioDataFifoBuffer_l() { return (mAudioDataQueue == nullptr)
-                                                      ? nullptr
-                                                      : mAudioDataQueue->getFifoBuffer(); }
+    std::shared_ptr<SharedRingBuffer> getAudioDataQueue_l()
+            REQUIRES(audioDataQueueLock) {
+        return mAudioDataQueue;
+    }
 
     /* Keep a record of when a buffer transfer completed.
      * This allows for a more accurate timing model.
@@ -89,6 +82,10 @@
 
     const char *getTypeText() const override { return "Shared"; }
 
+    // This is public so that the thread safety annotation, GUARDED_BY(),
+    // Can work when another object takes the lock.
+    mutable std::mutex   audioDataQueueLock;
+
 protected:
 
     aaudio_result_t getAudioDataDescription(AudioEndpointParcelable &parcelable) override;
@@ -106,8 +103,8 @@
                                             int32_t framesPerBurst);
 
 private:
-    SharedRingBuffer        *mAudioDataQueue = nullptr; // protected by mAudioDataQueueLock
-    std::mutex               mAudioDataQueueLock;
+
+    std::shared_ptr<SharedRingBuffer> mAudioDataQueue GUARDED_BY(audioDataQueueLock);
 
     std::atomic<int64_t>     mTimestampPositionOffset;
     std::atomic<int32_t>     mXRunCount;
diff --git a/services/oboeservice/AAudioStreamTracker.cpp b/services/oboeservice/AAudioStreamTracker.cpp
index 8e66b94..9bbbc73 100644
--- a/services/oboeservice/AAudioStreamTracker.cpp
+++ b/services/oboeservice/AAudioStreamTracker.cpp
@@ -96,7 +96,7 @@
     return handle;
 }
 
-std::string AAudioStreamTracker::dump() const {
+std::string AAudioStreamTracker::dump() const NO_THREAD_SAFETY_ANALYSIS {
     std::stringstream result;
     const bool isLocked = AAudio_tryUntilTrue(
             [this]()->bool { return mHandleLock.try_lock(); } /* f */,
diff --git a/services/oboeservice/AAudioStreamTracker.h b/services/oboeservice/AAudioStreamTracker.h
index d1301a2..43870fc 100644
--- a/services/oboeservice/AAudioStreamTracker.h
+++ b/services/oboeservice/AAudioStreamTracker.h
@@ -17,13 +17,13 @@
 #ifndef AAUDIO_AAUDIO_STREAM_TRACKER_H
 #define AAUDIO_AAUDIO_STREAM_TRACKER_H
 
+#include <mutex>
 #include <time.h>
-#include <pthread.h>
 
+#include <android-base/thread_annotations.h>
 #include <aaudio/AAudio.h>
 
 #include "binding/AAudioCommon.h"
-
 #include "AAudioServiceStreamBase.h"
 
 namespace aaudio {
@@ -75,11 +75,10 @@
     static aaudio_handle_t bumpHandle(aaudio_handle_t handle);
 
     // Track stream using a unique handle that wraps. Only use positive half.
-    mutable std::mutex                mHandleLock;
-    // protected by mHandleLock
-    aaudio_handle_t                   mPreviousHandle = 0;
-    // protected by mHandleLock
-    std::map<aaudio_handle_t, android::sp<aaudio::AAudioServiceStreamBase>> mStreamsByHandle;
+    mutable std::mutex            mHandleLock;
+    aaudio_handle_t               mPreviousHandle GUARDED_BY(mHandleLock) = 0;
+    std::map<aaudio_handle_t, android::sp<aaudio::AAudioServiceStreamBase>>
+            mStreamsByHandle GUARDED_BY(mHandleLock);
 };
 
 
diff --git a/services/oboeservice/AAudioThread.cpp b/services/oboeservice/AAudioThread.cpp
index ed7895b..68496ac 100644
--- a/services/oboeservice/AAudioThread.cpp
+++ b/services/oboeservice/AAudioThread.cpp
@@ -37,10 +37,13 @@
     setup("AAudio");
 }
 
-void AAudioThread::setup(const char *prefix) {
-    // mThread is a pthread_t of unknown size so we need memset().
-    memset(&mThread, 0, sizeof(mThread));
+AAudioThread::~AAudioThread() {
+    ALOGE_IF(pthread_equal(pthread_self(), mThread),
+            "%s() destructor running in thread", __func__);
+    ALOGE_IF(mHasThread, "%s() thread never joined", __func__);
+}
 
+void AAudioThread::setup(const char *prefix) {
     // Name the thread with an increasing index, "prefix_#", for debugging.
     uint32_t index = mNextThreadIndex++;
     // Wrap the index so that we do not hit the 16 char limit
@@ -57,7 +60,7 @@
     }
 }
 
-// This is the entry point for the new thread created by createThread().
+// This is the entry point for the new thread created by createThread_l().
 // It converts the 'C' function call to a C++ method call.
 static void * AAudioThread_internalThreadProc(void *arg) {
     AAudioThread *aaudioThread = (AAudioThread *) arg;
@@ -90,13 +93,18 @@
         ALOGE("stop() but no thread running");
         return AAUDIO_ERROR_INVALID_STATE;
     }
+    // Check to see if the thread is trying to stop itself.
+    if (pthread_equal(pthread_self(), mThread)) {
+        ALOGE("%s() attempt to pthread_join() from launched thread!", __func__);
+        return AAUDIO_ERROR_INTERNAL;
+    }
+
     int err = pthread_join(mThread, nullptr);
-    mHasThread = false;
     if (err != 0) {
         ALOGE("stop() - pthread_join() returned %d %s", err, strerror(err));
         return AAudioConvert_androidToAAudioResult(-err);
     } else {
+        mHasThread = false;
         return AAUDIO_OK;
     }
 }
-
diff --git a/services/oboeservice/AAudioThread.h b/services/oboeservice/AAudioThread.h
index dcce68a..08a8a98 100644
--- a/services/oboeservice/AAudioThread.h
+++ b/services/oboeservice/AAudioThread.h
@@ -46,7 +46,7 @@
 
     explicit AAudioThread(const char *prefix);
 
-    virtual ~AAudioThread() = default;
+    virtual ~AAudioThread();
 
     /**
      * Start the thread running.
@@ -73,7 +73,7 @@
 
     Runnable    *mRunnable = nullptr;
     bool         mHasThread = false;
-    pthread_t    mThread; // initialized in constructor
+    pthread_t    mThread = {};
 
     static std::atomic<uint32_t> mNextThreadIndex;
     char         mName[16]; // max length for a pthread_name
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 8b1e2c0..80f17f4 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -37,6 +37,7 @@
     ],
 
     cflags: [
+        "-Wthread-safety",
         "-Wno-unused-parameter",
         "-Wall",
         "-Werror",
@@ -55,6 +56,11 @@
         "libcutils",
         "liblog",
         "libutils",
+        "aaudio-aidl-cpp",
+    ],
+
+    export_shared_lib_headers: [
+        "libaaudio_internal",
     ],
 
     header_libs: [
diff --git a/services/oboeservice/SharedMemoryProxy.cpp b/services/oboeservice/SharedMemoryProxy.cpp
index c43ed22..78d4884 100644
--- a/services/oboeservice/SharedMemoryProxy.cpp
+++ b/services/oboeservice/SharedMemoryProxy.cpp
@@ -20,6 +20,7 @@
 
 #include <errno.h>
 #include <string.h>
+#include <unistd.h>
 
 #include <aaudio/AAudio.h>
 #include "SharedMemoryProxy.h"
diff --git a/services/oboeservice/SharedRingBuffer.cpp b/services/oboeservice/SharedRingBuffer.cpp
index 2454446..c1d4e16 100644
--- a/services/oboeservice/SharedRingBuffer.cpp
+++ b/services/oboeservice/SharedRingBuffer.cpp
@@ -18,6 +18,8 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
+#include <iomanip>
+#include <iostream>
 #include <sys/mman.h>
 
 #include "binding/RingBufferParcelable.h"
@@ -30,8 +32,8 @@
 
 SharedRingBuffer::~SharedRingBuffer()
 {
+    mFifoBuffer.reset(); // uses mSharedMemory
     if (mSharedMemory != nullptr) {
-        delete mFifoBuffer;
         munmap(mSharedMemory, mSharedMemorySizeInBytes);
         mSharedMemory = nullptr;
     }
@@ -58,16 +60,18 @@
         return AAUDIO_ERROR_INTERNAL; // TODO convert errno to a better AAUDIO_ERROR;
     }
 
-    // Map the fd to memory addresses.
-    mSharedMemory = (uint8_t *) mmap(0, mSharedMemorySizeInBytes,
+    // Map the fd to memory addresses. Use a temporary pointer to keep the mmap result and update
+    // it to `mSharedMemory` only when mmap operate successfully.
+    uint8_t* tmpPtr = (uint8_t *) mmap(0, mSharedMemorySizeInBytes,
                          PROT_READ|PROT_WRITE,
                          MAP_SHARED,
                          mFileDescriptor.get(), 0);
-    if (mSharedMemory == MAP_FAILED) {
+    if (tmpPtr == MAP_FAILED) {
         ALOGE("allocate() mmap() failed %d", errno);
         mFileDescriptor.reset();
         return AAUDIO_ERROR_INTERNAL; // TODO convert errno to a better AAUDIO_ERROR;
     }
+    mSharedMemory = tmpPtr;
 
     // Get addresses for our counters and data from the shared memory.
     fifo_counter_t *readCounterAddress =
@@ -76,7 +80,7 @@
             (fifo_counter_t *) &mSharedMemory[SHARED_RINGBUFFER_WRITE_OFFSET];
     uint8_t *dataAddress = &mSharedMemory[SHARED_RINGBUFFER_DATA_OFFSET];
 
-    mFifoBuffer = new FifoBuffer(bytesPerFrame, capacityInFrames,
+    mFifoBuffer = std::make_shared<FifoBufferIndirect>(bytesPerFrame, capacityInFrames,
                                  readCounterAddress, writeCounterAddress, dataAddress);
     return AAUDIO_OK;
 }
@@ -94,3 +98,19 @@
     ringBufferParcelable.setFramesPerBurst(1);
     ringBufferParcelable.setCapacityInFrames(mCapacityInFrames);
 }
+
+double SharedRingBuffer::getFractionalFullness() const {
+  int32_t framesAvailable = mFifoBuffer->getFullFramesAvailable();
+  int32_t capacity = mFifoBuffer->getBufferCapacityInFrames();
+  return framesAvailable / (double) capacity;
+}
+
+std::string SharedRingBuffer::dump() const {
+    std::stringstream result;
+    int32_t readCounter = mFifoBuffer->getReadCounter();
+    int32_t writeCounter = mFifoBuffer->getWriteCounter();
+    result << std::setw(10) << writeCounter;
+    result << std::setw(10) << readCounter;
+    result << std::setw(8) << (writeCounter - readCounter);
+    return result.str();
+}
diff --git a/services/oboeservice/SharedRingBuffer.h b/services/oboeservice/SharedRingBuffer.h
index 79169bc..c3a9bb7 100644
--- a/services/oboeservice/SharedRingBuffer.h
+++ b/services/oboeservice/SharedRingBuffer.h
@@ -18,8 +18,9 @@
 #define AAUDIO_SHARED_RINGBUFFER_H
 
 #include <android-base/unique_fd.h>
-#include <stdint.h>
 #include <cutils/ashmem.h>
+#include <stdint.h>
+#include <string>
 #include <sys/mman.h>
 
 #include "fifo/FifoBuffer.h"
@@ -47,15 +48,25 @@
     void fillParcelable(AudioEndpointParcelable &endpointParcelable,
                         RingBufferParcelable &ringBufferParcelable);
 
-    android::FifoBuffer * getFifoBuffer() {
+    /**
+     * Return available frames as a fraction of the capacity.
+     * @return fullness between 0.0 and 1.0
+     */
+    double getFractionalFullness() const;
+
+    // dump: write# read# available
+    std::string dump() const;
+
+    std::shared_ptr<android::FifoBuffer> getFifoBuffer() {
         return mFifoBuffer;
     }
 
 private:
     android::base::unique_fd  mFileDescriptor;
-    android::FifoBuffer      *mFifoBuffer = nullptr;
-    uint8_t                  *mSharedMemory = nullptr;
+    std::shared_ptr<android::FifoBufferIndirect>  mFifoBuffer;
+    uint8_t                  *mSharedMemory = nullptr; // mmap
     int32_t                   mSharedMemorySizeInBytes = 0;
+    // size of memory used for data vs counters
     int32_t                   mDataMemorySizeInBytes = 0;
     android::fifo_frames_t    mCapacityInFrames = 0;
 };
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
new file mode 100644
index 0000000..6078e54
--- /dev/null
+++ b/services/tuner/Android.bp
@@ -0,0 +1,84 @@
+filegroup {
+    name: "tv_tuner_aidl",
+    srcs: [
+        "aidl/android/media/tv/tuner/ITunerService.aidl",
+        "aidl/android/media/tv/tuner/TunerFrontendAnalogCapabilities.aidl",
+        "aidl/android/media/tv/tuner/TunerFrontendAtscCapabilities.aidl",
+        "aidl/android/media/tv/tuner/TunerFrontendAtsc3Capabilities.aidl",
+        "aidl/android/media/tv/tuner/TunerFrontendCableCapabilities.aidl",
+        "aidl/android/media/tv/tuner/TunerFrontendCapabilities.aidl",
+        "aidl/android/media/tv/tuner/TunerFrontendDvbsCapabilities.aidl",
+        "aidl/android/media/tv/tuner/TunerFrontendDvbtCapabilities.aidl",
+        "aidl/android/media/tv/tuner/TunerFrontendIsdbsCapabilities.aidl",
+        "aidl/android/media/tv/tuner/TunerFrontendIsdbs3Capabilities.aidl",
+        "aidl/android/media/tv/tuner/TunerFrontendIsdbtCapabilities.aidl",
+        "aidl/android/media/tv/tuner/TunerServiceFrontendInfo.aidl",
+    ],
+    path: "aidl",
+}
+
+aidl_interface {
+    name: "tv_tuner_aidl_interface",
+    unstable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        ":tv_tuner_aidl",
+    ],
+}
+
+cc_library {
+    name: "libtunerservice",
+
+    srcs: [
+        "TunerService.cpp",
+    ],
+
+    shared_libs: [
+        "android.hardware.tv.tuner@1.0",
+        "libbinder",
+        "libbinder_ndk",
+        "libhidlbase",
+        "liblog",
+        "libmedia",
+        "libutils",
+        "tv_tuner_aidl_interface-ndk_platform",
+    ],
+
+    include_dirs: ["frameworks/av/include"],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    export_include_dirs: ["."],
+}
+
+
+cc_binary {
+    name: "mediatuner",
+
+    srcs: [
+        "main_tunerservice.cpp",
+    ],
+
+    shared_libs: [
+        "android.hardware.tv.tuner@1.0",
+        "libbase",
+        "libbinder",
+        "liblog",
+        "libtunerservice",
+        "libutils",
+    ],
+
+    static_libs: [
+        "tv_tuner_aidl_interface-ndk_platform",
+    ],
+
+    init_rc: ["mediatuner.rc"],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+}
\ No newline at end of file
diff --git a/services/tuner/OWNERS b/services/tuner/OWNERS
new file mode 100644
index 0000000..0ceb8e8
--- /dev/null
+++ b/services/tuner/OWNERS
@@ -0,0 +1,2 @@
+nchalko@google.com
+quxiangfang@google.com
diff --git a/services/tuner/TunerService.cpp b/services/tuner/TunerService.cpp
new file mode 100644
index 0000000..77250aa
--- /dev/null
+++ b/services/tuner/TunerService.cpp
@@ -0,0 +1,219 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerService"
+
+#include <android/binder_manager.h>
+#include <utils/Log.h>
+#include "TunerService.h"
+
+using ::aidl::android::media::tv::tuner::TunerFrontendAnalogCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendAtsc3Capabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendAtscCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendCableCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendDvbsCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendDvbtCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendIsdbs3Capabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendIsdbsCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendIsdbtCapabilities;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::tv::tuner::V1_0::FrontendId;
+using ::android::hardware::tv::tuner::V1_0::FrontendType;
+using ::android::hardware::tv::tuner::V1_0::Result;
+
+namespace android {
+
+sp<ITuner> TunerService::mTuner;
+
+TunerService::TunerService() {}
+TunerService::~TunerService() {}
+
+void TunerService::instantiate() {
+    std::shared_ptr<TunerService> service =
+            ::ndk::SharedRefBase::make<TunerService>();
+    AServiceManager_addService(service->asBinder().get(), getServiceName());
+}
+
+Status TunerService::getFrontendIds(std::vector<int32_t>* ids, int32_t* /* _aidl_return */) {
+    if (mTuner == nullptr) {
+        // TODO: create a method for init.
+        mTuner = ITuner::getService();
+        if (mTuner == nullptr) {
+            ALOGE("Failed to get ITuner service.");
+            return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                    static_cast<int32_t>(Result::UNAVAILABLE));
+        }
+    }
+    hidl_vec<FrontendId> feIds;
+    Result res;
+    mTuner->getFrontendIds([&](Result r, const hidl_vec<FrontendId>& frontendIds) {
+        feIds = frontendIds;
+        res = r;
+    });
+    if (res != Result::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    ids->resize(feIds.size());
+    std::copy(feIds.begin(), feIds.end(), ids->begin());
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+Status TunerService::getFrontendInfo(
+        int32_t frontendHandle, TunerServiceFrontendInfo* _aidl_return) {
+    if (mTuner == nullptr) {
+        // TODO: create a method for init.
+        mTuner = ITuner::getService();
+        if (mTuner == nullptr) {
+            ALOGE("Failed to get ITuner service.");
+            return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                    static_cast<int32_t>(Result::UNAVAILABLE));
+        }
+    }
+
+    Result res;
+    FrontendInfo info;
+    int feId = getResourceIdFromHandle(frontendHandle);
+    mTuner->getFrontendInfo(feId, [&](Result r, const FrontendInfo& feInfo) {
+        info = feInfo;
+        res = r;
+    });
+    if (res != Result::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    TunerServiceFrontendInfo tunerInfo = convertToAidlFrontendInfo(feId, info);
+    *_aidl_return = tunerInfo;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+TunerServiceFrontendInfo TunerService::convertToAidlFrontendInfo(int feId, FrontendInfo halInfo) {
+    TunerServiceFrontendInfo info{
+        .id = feId,
+        .type = (int)halInfo.type,
+        .minFrequency = (int)halInfo.minFrequency,
+        .maxFrequency = (int)halInfo.maxFrequency,
+        .minSymbolRate = (int)halInfo.minSymbolRate,
+        .maxSymbolRate = (int)halInfo.maxSymbolRate,
+        .acquireRange = (int)halInfo.acquireRange,
+        .exclusiveGroupId = (int)halInfo.exclusiveGroupId,
+    };
+    for (int i = 0; i < halInfo.statusCaps.size(); i++) {
+        info.statusCaps.push_back((int)halInfo.statusCaps[i]);
+    }
+
+    TunerFrontendCapabilities caps;
+    switch (halInfo.type) {
+        case FrontendType::ANALOG: {
+            TunerFrontendAnalogCapabilities analogCaps{
+                .typeCap = (int)halInfo.frontendCaps.analogCaps().typeCap,
+                .sifStandardCap = (int)halInfo.frontendCaps.analogCaps().sifStandardCap,
+            };
+            caps.set<TunerFrontendCapabilities::analogCaps>(analogCaps);
+            break;
+        }
+        case FrontendType::ATSC: {
+            TunerFrontendAtscCapabilities atscCaps{
+                .modulationCap = (int)halInfo.frontendCaps.atscCaps().modulationCap,
+            };
+            caps.set<TunerFrontendCapabilities::atscCaps>(atscCaps);
+            break;
+        }
+        case FrontendType::ATSC3: {
+            TunerFrontendAtsc3Capabilities atsc3Caps{
+                .bandwidthCap = (int)halInfo.frontendCaps.atsc3Caps().bandwidthCap,
+                .modulationCap = (int)halInfo.frontendCaps.atsc3Caps().modulationCap,
+                .timeInterleaveModeCap =
+                        (int)halInfo.frontendCaps.atsc3Caps().timeInterleaveModeCap,
+                .codeRateCap = (int)halInfo.frontendCaps.atsc3Caps().codeRateCap,
+                .demodOutputFormatCap = (int)halInfo.frontendCaps.atsc3Caps().demodOutputFormatCap,
+                .fecCap = (int)halInfo.frontendCaps.atsc3Caps().fecCap,
+            };
+            caps.set<TunerFrontendCapabilities::atsc3Caps>(atsc3Caps);
+            break;
+        }
+        case FrontendType::DVBC: {
+            TunerFrontendCableCapabilities cableCaps{
+                .modulationCap = (int)halInfo.frontendCaps.dvbcCaps().modulationCap,
+                .codeRateCap = (int)halInfo.frontendCaps.dvbcCaps().fecCap,
+                .annexCap = (int)halInfo.frontendCaps.dvbcCaps().annexCap,
+            };
+            caps.set<TunerFrontendCapabilities::cableCaps>(cableCaps);
+            break;
+        }
+        case FrontendType::DVBS: {
+            TunerFrontendDvbsCapabilities dvbsCaps{
+                .modulationCap = (int)halInfo.frontendCaps.dvbsCaps().modulationCap,
+                .codeRateCap = (long)halInfo.frontendCaps.dvbsCaps().innerfecCap,
+                .standard = (int)halInfo.frontendCaps.dvbsCaps().standard,
+            };
+            caps.set<TunerFrontendCapabilities::dvbsCaps>(dvbsCaps);
+            break;
+        }
+        case FrontendType::DVBT: {
+            TunerFrontendDvbtCapabilities dvbtCaps{
+                .transmissionModeCap = (int)halInfo.frontendCaps.dvbtCaps().transmissionModeCap,
+                .bandwidthCap = (int)halInfo.frontendCaps.dvbtCaps().bandwidthCap,
+                .constellationCap = (int)halInfo.frontendCaps.dvbtCaps().constellationCap,
+                .codeRateCap = (int)halInfo.frontendCaps.dvbtCaps().coderateCap,
+                .hierarchyCap = (int)halInfo.frontendCaps.dvbtCaps().hierarchyCap,
+                .guardIntervalCap = (int)halInfo.frontendCaps.dvbtCaps().guardIntervalCap,
+                .isT2Supported = (bool)halInfo.frontendCaps.dvbtCaps().isT2Supported,
+                .isMisoSupported = (bool)halInfo.frontendCaps.dvbtCaps().isMisoSupported,
+            };
+            caps.set<TunerFrontendCapabilities::dvbtCaps>(dvbtCaps);
+            break;
+        }
+        case FrontendType::ISDBS: {
+            TunerFrontendIsdbsCapabilities isdbsCaps{
+                .modulationCap = (int)halInfo.frontendCaps.isdbsCaps().modulationCap,
+                .codeRateCap = (int)halInfo.frontendCaps.isdbsCaps().coderateCap,
+            };
+            caps.set<TunerFrontendCapabilities::isdbsCaps>(isdbsCaps);
+            break;
+        }
+        case FrontendType::ISDBS3: {
+            TunerFrontendIsdbs3Capabilities isdbs3Caps{
+                .modulationCap = (int)halInfo.frontendCaps.isdbs3Caps().modulationCap,
+                .codeRateCap = (int)halInfo.frontendCaps.isdbs3Caps().coderateCap,
+            };
+            caps.set<TunerFrontendCapabilities::isdbs3Caps>(isdbs3Caps);
+            break;
+        }
+        case FrontendType::ISDBT: {
+            TunerFrontendIsdbtCapabilities isdbtCaps{
+                .modeCap = (int)halInfo.frontendCaps.isdbtCaps().modeCap,
+                .bandwidthCap = (int)halInfo.frontendCaps.isdbtCaps().bandwidthCap,
+                .modulationCap = (int)halInfo.frontendCaps.isdbtCaps().modulationCap,
+                .codeRateCap = (int)halInfo.frontendCaps.isdbtCaps().coderateCap,
+                .guardIntervalCap = (int)halInfo.frontendCaps.isdbtCaps().guardIntervalCap,
+            };
+            caps.set<TunerFrontendCapabilities::isdbtCaps>(isdbtCaps);
+            break;
+        }
+        default:
+            break;
+    }
+
+    info.caps = caps;
+    return info;
+}
+
+int TunerService::getResourceIdFromHandle(int resourceHandle) {
+    return (resourceHandle & 0x00ff0000) >> 16;
+}
+} // namespace android
diff --git a/services/tuner/TunerService.h b/services/tuner/TunerService.h
new file mode 100644
index 0000000..f3d5ff7
--- /dev/null
+++ b/services/tuner/TunerService.h
@@ -0,0 +1,51 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERSERVICE_H
+#define ANDROID_MEDIA_TUNERSERVICE_H
+
+#include <aidl/android/media/tv/tuner/BnTunerService.h>
+#include <aidl/android/media/tv/tuner/TunerServiceFrontendInfo.h>
+#include <android/hardware/tv/tuner/1.0/ITuner.h>
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::tv::tuner::BnTunerService;
+using ::aidl::android::media::tv::tuner::TunerServiceFrontendInfo;
+using ::android::hardware::tv::tuner::V1_0::FrontendInfo;
+using ::android::hardware::tv::tuner::V1_0::ITuner;
+
+namespace android {
+
+class TunerService : public BnTunerService {
+
+public:
+    static char const *getServiceName() { return "media.tuner"; }
+    static void instantiate();
+    TunerService();
+    virtual ~TunerService();
+    Status getFrontendIds(std::vector<int32_t>* ids, int32_t* _aidl_return) override;
+    Status getFrontendInfo(int32_t frontendHandle, TunerServiceFrontendInfo* _aidl_return) override;
+
+private:
+    static sp<ITuner> mTuner;
+
+    int getResourceIdFromHandle(int resourceHandle);
+    TunerServiceFrontendInfo convertToAidlFrontendInfo(int feId, FrontendInfo halInfo);
+};
+
+} // namespace android
+
+#endif // ANDROID_MEDIA_TUNERSERVICE_H
diff --git a/services/tuner/aidl/android/media/tv/OWNERS b/services/tuner/aidl/android/media/tv/OWNERS
new file mode 100644
index 0000000..0ceb8e8
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/OWNERS
@@ -0,0 +1,2 @@
+nchalko@google.com
+quxiangfang@google.com
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
new file mode 100644
index 0000000..1d3671d
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
@@ -0,0 +1,42 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerServiceFrontendInfo;
+
+/**
+ * TunerService interface handles tuner related operations.
+ *
+ * {@hide}
+ */
+interface ITunerService {
+
+    /**
+     * Gets frontend IDs.
+     *
+     * @return the result code of the operation.
+     */
+    int getFrontendIds(out int[] ids);
+
+    /**
+     * Retrieve the frontend's information.
+     *
+     * @param frontendHandle the handle of the frontend granted by TRM.
+     * @return the information for the frontend.
+     */
+    TunerServiceFrontendInfo getFrontendInfo(in int frontendHandle);
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogCapabilities.aidl
new file mode 100644
index 0000000..74bf04e
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogCapabilities.aidl
@@ -0,0 +1,34 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Analog Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAnalogCapabilities {
+	/**
+     * Signal Type capability
+     */
+    int typeCap;
+
+    /**
+     * Standard Interchange Format (SIF) capability
+     */
+    int sifStandardCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Capabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Capabilities.aidl
new file mode 100644
index 0000000..6c9be77
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Capabilities.aidl
@@ -0,0 +1,54 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * ATSC3 Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAtsc3Capabilities {
+    /**
+     * Bandwidth capability
+     */
+    int bandwidthCap;
+
+    /**
+     * Modulation capability
+     */
+    int modulationCap;
+
+    /**
+     * TimeInterleaveMode capability
+     */
+    int timeInterleaveModeCap;
+
+    /**
+     * CodeRate capability
+     */
+    int codeRateCap;
+
+    /**
+     * FEC capability
+     */
+    int fecCap;
+
+    /**
+     * Demodulator Output Format capability
+     */
+    int demodOutputFormatCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscCapabilities.aidl
new file mode 100644
index 0000000..2b6c2fc
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscCapabilities.aidl
@@ -0,0 +1,29 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * ATSC Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAtscCapabilities {
+    /**
+     * Modulation capability
+     */
+    int modulationCap;
+}
\ No newline at end of file
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableCapabilities.aidl
new file mode 100644
index 0000000..7df452a
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableCapabilities.aidl
@@ -0,0 +1,39 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Cable(DVBC) Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendCableCapabilities {
+    /**
+     * Modulation capability
+     */
+    int modulationCap;
+
+    /**
+     * Code Rate capability
+     */
+    int codeRateCap; // inner FEC will converge to codeRate
+
+    /**
+     * Annex capability
+     */
+    int annexCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCapabilities.aidl
new file mode 100644
index 0000000..19f31f1
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCapabilities.aidl
@@ -0,0 +1,85 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFrontendAnalogCapabilities;
+import android.media.tv.tuner.TunerFrontendAtscCapabilities;
+import android.media.tv.tuner.TunerFrontendAtsc3Capabilities;
+import android.media.tv.tuner.TunerFrontendCableCapabilities;
+import android.media.tv.tuner.TunerFrontendDvbsCapabilities;
+import android.media.tv.tuner.TunerFrontendDvbtCapabilities;
+import android.media.tv.tuner.TunerFrontendIsdbsCapabilities;
+import android.media.tv.tuner.TunerFrontendIsdbs3Capabilities;
+import android.media.tv.tuner.TunerFrontendIsdbtCapabilities;
+
+/**
+ * Frontend Capabilities interface.
+ *
+ * Use a group of vectors as the workaround for Union structure that is not fully supported
+ * in AIDL currently.
+ *
+ * Client may use FrontendInfo.type as the discriminar to check the corresponding vector. If
+ * the vector is not null, it contains valid value.
+ *
+ * {@hide}
+ */
+union TunerFrontendCapabilities {
+    /**
+     * Analog Frontend Capabilities
+     */
+    TunerFrontendAnalogCapabilities analogCaps;
+
+    /**
+     * ATSC Frontend Capabilities
+     */
+    TunerFrontendAtscCapabilities atscCaps;
+
+    /**
+     * ATSC3 Frontend Capabilities
+     */
+    TunerFrontendAtsc3Capabilities atsc3Caps;
+
+    /**
+     * Cable Frontend Capabilities
+     */
+    TunerFrontendCableCapabilities cableCaps;
+
+    /**
+     * DVBS Frontend Capabilities
+     */
+    TunerFrontendDvbsCapabilities dvbsCaps;
+
+    /**
+     * DVBT Frontend Capabilities
+     */
+    TunerFrontendDvbtCapabilities dvbtCaps;
+
+    /**
+     * ISDB-S Frontend Capabilities
+     */
+    TunerFrontendIsdbsCapabilities isdbsCaps;
+
+    /**
+     * ISDB-S3 Frontend Capabilities
+     */
+    TunerFrontendIsdbs3Capabilities isdbs3Caps;
+
+    /**
+     * ISDB-T Frontend Capabilities
+     */
+    TunerFrontendIsdbtCapabilities isdbtCaps;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCapabilities.aidl
new file mode 100644
index 0000000..5e4322c
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCapabilities.aidl
@@ -0,0 +1,39 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * DVBS Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendDvbsCapabilities {
+    /**
+     * Modulation capability
+     */
+    int modulationCap;
+
+    /**
+     * Code Rate capability
+     */
+    long codeRateCap;  // inner FEC will converge to codeRate
+
+    /**
+     * Sub standards capability
+     */
+    int standard;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtCapabilities.aidl
new file mode 100644
index 0000000..73f16dd
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtCapabilities.aidl
@@ -0,0 +1,64 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * DVBT Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendDvbtCapabilities {
+    /**
+     * Transmission Mode capability
+     */
+    int transmissionModeCap;
+
+    /**
+     * Bandwidth capability
+     */
+    int bandwidthCap;
+
+    /**
+     * Constellation capability
+     */
+    int constellationCap;
+
+    /**
+     * Code Rate capability
+     */
+    int codeRateCap;
+
+    /**
+     * Hierarchy Type capability
+     */
+    int hierarchyCap;
+
+    /**
+     * Guard Interval capability
+     */
+    int guardIntervalCap;
+
+    /**
+     * T2 Support capability
+     */
+    boolean isT2Supported;
+
+    /**
+     * Miso Support capability
+     */
+    boolean isMisoSupported;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Capabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Capabilities.aidl
new file mode 100644
index 0000000..84dd67a
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Capabilities.aidl
@@ -0,0 +1,34 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * ISDB-S3 Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendIsdbs3Capabilities {
+    /**
+     * Modulation capability
+     */
+    int modulationCap;
+
+    /**
+     * Code Rate capability
+     */
+    int codeRateCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsCapabilities.aidl
new file mode 100644
index 0000000..15dfdf7
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsCapabilities.aidl
@@ -0,0 +1,34 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * ISDB-S Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendIsdbsCapabilities {
+    /**
+     * Modulation capability
+     */
+    int modulationCap;
+
+    /**
+     * Code Rate capability
+     */
+    int codeRateCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtCapabilities.aidl
new file mode 100644
index 0000000..c9295d8
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtCapabilities.aidl
@@ -0,0 +1,49 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * ISDB-T Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendIsdbtCapabilities {
+    /**
+     * ISDB-T Mode capability
+     */
+    int modeCap;
+
+    /**
+     * Bandwidth capability
+     */
+    int bandwidthCap;
+
+    /**
+     * Modulation capability
+     */
+    int modulationCap;
+
+    /**
+     * Code Rate capability
+     */
+    int codeRateCap;
+
+    /**
+     * Guard Interval capability
+     */
+    int guardIntervalCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerServiceFrontendInfo.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerServiceFrontendInfo.aidl
new file mode 100644
index 0000000..ddcbcdc
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerServiceFrontendInfo.aidl
@@ -0,0 +1,77 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFrontendCapabilities;
+
+/**
+ * FrontendInfo interface that carries tuner frontend information.
+ *
+ * {@hide}
+ */
+parcelable TunerServiceFrontendInfo {
+    /**
+     * Frontend Id
+     */
+    int id;
+
+    /**
+     * Frontend Type
+     */
+    int type;
+
+    /**
+     * Minimum Frequency in Hertz
+     */
+    int minFrequency;
+
+    /**
+     * Maximum Frequency in Hertz
+     */
+    int maxFrequency;
+
+    /**
+     * Minimum symbols per second
+     */
+    int minSymbolRate;
+
+    /**
+     * Maximum symbols per second
+     */
+    int maxSymbolRate;
+
+    /**
+     * Range in Hertz
+     */
+    int acquireRange;
+
+    /**
+     * Frontends are assigned with the same exclusiveGroupId if they can't
+     * function at same time. For instance, they share same hardware module.
+     */
+    int exclusiveGroupId;
+
+    /**
+     * A list of supported status types which client can inquiry
+     */
+    int[] statusCaps;
+
+    /**
+     * Frontend Capabilities
+     */
+    TunerFrontendCapabilities caps;
+}
diff --git a/services/tuner/main_tunerservice.cpp b/services/tuner/main_tunerservice.cpp
new file mode 100644
index 0000000..a0e7a9f
--- /dev/null
+++ b/services/tuner/main_tunerservice.cpp
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <utils/Log.h>
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+#include <binder/ProcessState.h>
+#include <hidl/HidlTransportSupport.h>
+
+#include "TunerService.h"
+
+using namespace android;
+
+int main(int argc __unused, char** argv) {
+    ALOGD("Tuner service starting");
+
+    strcpy(argv[0], "media.tuner");
+    sp<ProcessState> proc(ProcessState::self());
+    sp<IServiceManager> sm = defaultServiceManager();
+    ALOGD("ServiceManager: %p", sm.get());
+
+    TunerService::instantiate();
+
+    ProcessState::self()->startThreadPool();
+    IPCThreadState::self()->joinThreadPool();
+}
diff --git a/services/tuner/mediatuner.rc b/services/tuner/mediatuner.rc
new file mode 100644
index 0000000..b0347be
--- /dev/null
+++ b/services/tuner/mediatuner.rc
@@ -0,0 +1,6 @@
+service media.tuner /system/bin/mediatuner
+    class main
+    user media
+    group media
+    ioprio rt 4
+    task_profiles ProcessCapacityHigh HighPerformance
\ No newline at end of file