Merge "Fix heap buffer overflow in clearkey CryptoPlugin::decrypt"
diff --git a/apex/ld.config.txt b/apex/ld.config.txt
index 1c44e65..713f0b7 100644
--- a/apex/ld.config.txt
+++ b/apex/ld.config.txt
@@ -22,6 +22,12 @@
 namespace.default.search.paths      = /apex/com.android.media.swcodec/${LIB}
 namespace.default.asan.search.paths = /apex/com.android.media.swcodec/${LIB}
 
+# Below lines are required to be able to access libs in APEXes which are
+# actually symlinks to the files under /system/lib. The symlinks exist for
+# bundled APEXes to reduce space.
+namespace.default.permitted.paths   = /system/${LIB}
+namespace.default.asan.permitted.paths = /system/${LIB}
+
 namespace.default.links = platform
 
 # TODO: replace the following when apex has a way to auto-generate this list
diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp
index 92fe84b..15c295e 100644
--- a/camera/CameraMetadata.cpp
+++ b/camera/CameraMetadata.cpp
@@ -17,7 +17,9 @@
 // #define LOG_NDEBUG 0
 
 #define LOG_TAG "Camera2-Metadata"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
 #include <utils/Log.h>
+#include <utils/Trace.h>
 #include <utils/Errors.h>
 
 #include <binder/Parcel.h>
@@ -38,11 +40,13 @@
 CameraMetadata::CameraMetadata(size_t entryCapacity, size_t dataCapacity) :
         mLocked(false)
 {
+    ATRACE_CALL();
     mBuffer = allocate_camera_metadata(entryCapacity, dataCapacity);
 }
 
 CameraMetadata::CameraMetadata(const CameraMetadata &other) :
         mLocked(false) {
+    ATRACE_CALL();
     mBuffer = clone_camera_metadata(other.mBuffer);
 }
 
@@ -104,6 +108,7 @@
 }
 
 void CameraMetadata::clear() {
+    ATRACE_CALL();
     if (mLocked) {
         ALOGE("%s: CameraMetadata is locked", __FUNCTION__);
         return;
@@ -115,6 +120,7 @@
 }
 
 void CameraMetadata::acquire(camera_metadata_t *buffer) {
+    ATRACE_CALL();
     if (mLocked) {
         ALOGE("%s: CameraMetadata is locked", __FUNCTION__);
         return;
@@ -128,6 +134,7 @@
 }
 
 void CameraMetadata::acquire(CameraMetadata &other) {
+    ATRACE_CALL();
     if (mLocked) {
         ALOGE("%s: CameraMetadata is locked", __FUNCTION__);
         return;
@@ -136,10 +143,12 @@
 }
 
 status_t CameraMetadata::append(const CameraMetadata &other) {
+    ATRACE_CALL();
     return append(other.mBuffer);
 }
 
 status_t CameraMetadata::append(const camera_metadata_t* other) {
+    ATRACE_CALL();
     if (mLocked) {
         ALOGE("%s: CameraMetadata is locked", __FUNCTION__);
         return INVALID_OPERATION;
@@ -161,6 +170,7 @@
 }
 
 status_t CameraMetadata::sort() {
+    ATRACE_CALL();
     if (mLocked) {
         ALOGE("%s: CameraMetadata is locked", __FUNCTION__);
         return INVALID_OPERATION;
@@ -291,6 +301,7 @@
 
 status_t CameraMetadata::updateImpl(uint32_t tag, const void *data,
         size_t data_count) {
+    ATRACE_CALL();
     status_t res;
     if (mLocked) {
         ALOGE("%s: CameraMetadata is locked", __FUNCTION__);
@@ -348,11 +359,13 @@
 }
 
 bool CameraMetadata::exists(uint32_t tag) const {
+    ATRACE_CALL();
     camera_metadata_ro_entry entry;
     return find_camera_metadata_ro_entry(mBuffer, tag, &entry) == 0;
 }
 
 camera_metadata_entry_t CameraMetadata::find(uint32_t tag) {
+    ATRACE_CALL();
     status_t res;
     camera_metadata_entry entry;
     if (mLocked) {
@@ -369,6 +382,7 @@
 }
 
 camera_metadata_ro_entry_t CameraMetadata::find(uint32_t tag) const {
+    ATRACE_CALL();
     status_t res;
     camera_metadata_ro_entry entry;
     res = find_camera_metadata_ro_entry(mBuffer, tag, &entry);
@@ -380,6 +394,7 @@
 }
 
 status_t CameraMetadata::erase(uint32_t tag) {
+    ATRACE_CALL();
     camera_metadata_entry_t entry;
     status_t res;
     if (mLocked) {
@@ -410,6 +425,7 @@
 
 status_t CameraMetadata::removePermissionEntries(metadata_vendor_id_t vendorId,
         std::vector<int32_t> *tagsRemoved) {
+    ATRACE_CALL();
     uint32_t tagCount = 0;
     std::vector<uint32_t> tagsToRemove;
 
@@ -486,6 +502,7 @@
 }
 
 status_t CameraMetadata::resizeIfNeeded(size_t extraEntries, size_t extraData) {
+    ATRACE_CALL();
     if (mBuffer == NULL) {
         mBuffer = allocate_camera_metadata(extraEntries * 2, extraData * 2);
         if (mBuffer == NULL) {
@@ -525,7 +542,7 @@
 
 status_t CameraMetadata::readFromParcel(const Parcel& data,
                                         camera_metadata_t** out) {
-
+    ATRACE_CALL();
     status_t err = OK;
 
     camera_metadata_t* metadata = NULL;
@@ -616,6 +633,7 @@
 
 status_t CameraMetadata::writeToParcel(Parcel& data,
                                        const camera_metadata_t* metadata) {
+    ATRACE_CALL();
     status_t res = OK;
 
     /**
@@ -710,7 +728,7 @@
 }
 
 status_t CameraMetadata::readFromParcel(const Parcel *parcel) {
-
+    ATRACE_CALL();
     ALOGV("%s: parcel = %p", __FUNCTION__, parcel);
 
     status_t res = OK;
@@ -742,7 +760,7 @@
 }
 
 status_t CameraMetadata::writeToParcel(Parcel *parcel) const {
-
+    ATRACE_CALL();
     ALOGV("%s: parcel = %p", __FUNCTION__, parcel);
 
     if (parcel == NULL) {
@@ -771,7 +789,7 @@
 
 status_t CameraMetadata::getTagFromName(const char *name,
         const VendorTagDescriptor* vTags, uint32_t *tag) {
-
+    ATRACE_CALL();
     if (name == nullptr || tag == nullptr) return BAD_VALUE;
 
     size_t nameLength = strlen(name);
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index cdb7ac3..b183ccc 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -83,8 +83,9 @@
      * @param operatingMode The kind of session to create; either NORMAL_MODE or
      *     CONSTRAINED_HIGH_SPEED_MODE. Must be a non-negative value.
      * @param sessionParams Session wide camera parameters
+     * @return a list of stream ids that can be used in offline mode via "switchToOffline"
      */
-    void endConfigure(int operatingMode, in CameraMetadataNative sessionParams);
+    int[] endConfigure(int operatingMode, in CameraMetadataNative sessionParams);
 
     /**
       * Check whether a particular session configuration has camera device
@@ -189,5 +190,5 @@
      * @return Offline session object.
      */
     ICameraOfflineSession switchToOffline(in ICameraDeviceCallbacks callbacks,
-            in Surface[] offlineOutputs);
+            in int[] offlineOutputIds);
 }
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index d8220eb..56f209c 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -57,6 +57,9 @@
         "libmediandk",
         "libnativewindow",
     ],
+    header_libs: [
+        "jni_headers",
+    ],
     cflags: [
         "-fvisibility=hidden",
         "-DEXPORT=__attribute__ ((visibility (\"default\")))",
@@ -121,7 +124,6 @@
         "android.frameworks.cameraservice.common@2.0",
         "android.frameworks.cameraservice.service@2.0",
     ],
-
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
         "libarect",
diff --git a/camera/ndk/NdkCameraMetadata.cpp b/camera/ndk/NdkCameraMetadata.cpp
index 9a39ed8..99691ed 100644
--- a/camera/ndk/NdkCameraMetadata.cpp
+++ b/camera/ndk/NdkCameraMetadata.cpp
@@ -26,6 +26,68 @@
 
 using namespace android;
 
+#ifndef __ANDROID_VNDK__
+namespace {
+
+constexpr const char* android_hardware_camera2_CameraMetadata_jniClassName =
+    "android/hardware/camera2/CameraMetadata";
+constexpr const char* android_hardware_camera2_CameraCharacteristics_jniClassName =
+    "android/hardware/camera2/CameraCharacteristics";
+constexpr const char* android_hardware_camera2_CaptureResult_jniClassName =
+    "android/hardware/camera2/CaptureResult";
+
+jclass android_hardware_camera2_CameraCharacteristics_clazz = nullptr;
+jclass android_hardware_camera2_CaptureResult_clazz = nullptr;
+jmethodID android_hardware_camera2_CameraMetadata_getNativeMetadataPtr = nullptr;
+
+// Called at most once to initializes global variables used by JNI.
+bool InitJni(JNIEnv* env) {
+    // From C++11 onward, static initializers are guaranteed to be executed at most once,
+    // even if called from multiple threads.
+    static bool ok = [env]() -> bool {
+        const jclass cameraMetadataClazz = env->FindClass(
+            android_hardware_camera2_CameraMetadata_jniClassName);
+        if (cameraMetadataClazz == nullptr) {
+            return false;
+        }
+        android_hardware_camera2_CameraMetadata_getNativeMetadataPtr =
+            env->GetMethodID(cameraMetadataClazz, "getNativeMetadataPtr", "()J");
+        if (android_hardware_camera2_CameraMetadata_getNativeMetadataPtr == nullptr) {
+            return false;
+        }
+
+        android_hardware_camera2_CameraCharacteristics_clazz = env->FindClass(
+            android_hardware_camera2_CameraCharacteristics_jniClassName);
+        if (android_hardware_camera2_CameraCharacteristics_clazz == nullptr) {
+            return false;
+        }
+
+        android_hardware_camera2_CaptureResult_clazz = env->FindClass(
+            android_hardware_camera2_CaptureResult_jniClassName);
+        if (android_hardware_camera2_CaptureResult_clazz == nullptr) {
+            return false;
+        }
+
+        return true;
+    }();
+    return ok;
+}
+
+// Given cameraMetadata, an instance of android.hardware.camera2.CameraMetadata, invokes
+// cameraMetadata.getNativeMetadataPtr() and returns it as a CameraMetadata*.
+CameraMetadata* CameraMetadata_getNativeMetadataPtr(JNIEnv* env, jobject cameraMetadata) {
+    if (cameraMetadata == nullptr) {
+        ALOGE("%s: Invalid Java CameraMetadata object.", __FUNCTION__);
+        return nullptr;
+    }
+    jlong ret = env->CallLongMethod(cameraMetadata,
+                                    android_hardware_camera2_CameraMetadata_getNativeMetadataPtr);
+    return reinterpret_cast<CameraMetadata *>(ret);
+}
+
+}  // namespace
+#endif  /* __ANDROID_VNDK__ */
+
 EXPORT
 camera_status_t ACameraMetadata_getConstEntry(
         const ACameraMetadata* acm, uint32_t tag, ACameraMetadata_const_entry* entry) {
@@ -58,7 +120,7 @@
         return nullptr;
     }
     ACameraMetadata* copy = new ACameraMetadata(*src);
-    copy->incStrong((void*) ACameraMetadata_copy);
+    copy->incStrong(/*id=*/(void*) ACameraMetadata_copy);
     return copy;
 }
 
@@ -86,3 +148,34 @@
 
     return staticMetadata->isLogicalMultiCamera(numPhysicalCameras, physicalCameraIds);
 }
+
+#ifndef __ANDROID_VNDK__
+EXPORT
+ACameraMetadata* ACameraMetadata_fromCameraMetadata(JNIEnv* env, jobject cameraMetadata) {
+    ATRACE_CALL();
+
+    const bool ok = InitJni(env);
+    LOG_ALWAYS_FATAL_IF(!ok, "Failed to find CameraMetadata Java classes.");
+
+    if (cameraMetadata == nullptr) {
+        return nullptr;
+    }
+
+    ACameraMetadata::ACAMERA_METADATA_TYPE type;
+    if (env->IsInstanceOf(cameraMetadata,
+        android_hardware_camera2_CameraCharacteristics_clazz)) {
+        type = ACameraMetadata::ACM_CHARACTERISTICS;
+    } else if (env->IsInstanceOf(cameraMetadata,
+        android_hardware_camera2_CaptureResult_clazz)) {
+        type = ACameraMetadata::ACM_RESULT;
+    } else {
+        return nullptr;
+    }
+
+    CameraMetadata* src = CameraMetadata_getNativeMetadataPtr(env,
+                                                              cameraMetadata);
+    ACameraMetadata* output = new ACameraMetadata(src, type);
+    output->incStrong(/*id=*/(void*) ACameraMetadata_fromCameraMetadata);
+    return output;
+}
+#endif  /* __ANDROID_VNDK__ */
\ No newline at end of file
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 46a8dae..0d7180a 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -710,7 +710,8 @@
     if ((sessionParameters != nullptr) && (sessionParameters->settings != nullptr)) {
         params.append(sessionParameters->settings->getInternalData());
     }
-    remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false, params);
+    std::vector<int> offlineStreamIds;
+    remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false, params, &offlineStreamIds);
     if (remoteRet.serviceSpecificErrorCode() == hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT) {
         ALOGE("Camera device %s cannnot support app output configuration: %s", getId(),
                 remoteRet.toString8().string());
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 7c41b5e..0ff78ab 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -28,7 +28,37 @@
  * ACameraMetadata Implementation
  */
 ACameraMetadata::ACameraMetadata(camera_metadata_t* buffer, ACAMERA_METADATA_TYPE type) :
-        mData(buffer), mType(type) {
+        mData(new CameraMetadata(buffer)),
+        mOwnsData(true),
+        mType(type) {
+    init();
+}
+
+ACameraMetadata::ACameraMetadata(CameraMetadata* cameraMetadata, ACAMERA_METADATA_TYPE type) :
+        mData(cameraMetadata),
+        mOwnsData(false),
+        mType(type) {
+    init();
+}
+
+ACameraMetadata::ACameraMetadata(const ACameraMetadata& other) :
+        mOwnsData(other.mOwnsData),
+        mType(other.mType) {
+    if (other.mOwnsData) {
+        mData = new CameraMetadata(*(other.mData));
+    } else {
+        mData = other.mData;
+    }
+}
+
+ACameraMetadata::~ACameraMetadata() {
+    if (mOwnsData) {
+        delete mData;
+    }
+}
+
+void
+ACameraMetadata::init() {
     if (mType == ACM_CHARACTERISTICS) {
         filterUnsupportedFeatures();
         filterStreamConfigurations();
@@ -61,7 +91,7 @@
 void
 ACameraMetadata::filterUnsupportedFeatures() {
     // Hide unsupported capabilities (reprocessing)
-    camera_metadata_entry entry = mData.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    camera_metadata_entry entry = mData->find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
     if (entry.count == 0 || entry.type != TYPE_BYTE) {
         ALOGE("%s: malformed available capability key! count %zu, type %d",
                 __FUNCTION__, entry.count, entry.type);
@@ -80,7 +110,7 @@
             }
         }
     }
-    mData.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, capabilities);
+    mData->update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, capabilities);
 }
 
 void
@@ -118,7 +148,7 @@
     const int STREAM_WIDTH_OFFSET = 1;
     const int STREAM_HEIGHT_OFFSET = 2;
     const int STREAM_DURATION_OFFSET = 3;
-    camera_metadata_entry entry = mData.find(tag);
+    camera_metadata_entry entry = mData->find(tag);
     if (entry.count == 0 || entry.count % 4 || entry.type != TYPE_INT64) {
         ALOGE("%s: malformed duration key %d! count %zu, type %d",
                 __FUNCTION__, tag, entry.count, entry.type);
@@ -194,7 +224,7 @@
         }
     }
 
-    mData.update(tag, filteredDurations);
+    mData->update(tag, filteredDurations);
 }
 
 void
@@ -204,7 +234,7 @@
     const int STREAM_WIDTH_OFFSET = 1;
     const int STREAM_HEIGHT_OFFSET = 2;
     const int STREAM_IS_INPUT_OFFSET = 3;
-    camera_metadata_entry entry = mData.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+    camera_metadata_entry entry = mData->find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
     if (entry.count > 0 && (entry.count % 4 || entry.type != TYPE_INT32)) {
         ALOGE("%s: malformed available stream configuration key! count %zu, type %d",
                 __FUNCTION__, entry.count, entry.type);
@@ -234,10 +264,10 @@
     }
 
     if (filteredStreamConfigs.size() > 0) {
-        mData.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, filteredStreamConfigs);
+        mData->update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, filteredStreamConfigs);
     }
 
-    entry = mData.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS);
+    entry = mData->find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS);
     if (entry.count > 0 && (entry.count % 4 || entry.type != TYPE_INT32)) {
         ALOGE("%s: malformed available depth stream configuration key! count %zu, type %d",
                 __FUNCTION__, entry.count, entry.type);
@@ -270,11 +300,11 @@
     }
 
     if (filteredDepthStreamConfigs.size() > 0) {
-        mData.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
+        mData->update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
                 filteredDepthStreamConfigs);
     }
 
-    entry = mData.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
+    entry = mData->find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
     Vector<int32_t> filteredHeicStreamConfigs;
     filteredHeicStreamConfigs.setCapacity(entry.count);
 
@@ -297,9 +327,9 @@
         filteredHeicStreamConfigs.push_back(height);
         filteredHeicStreamConfigs.push_back(isInput);
     }
-    mData.update(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, filteredHeicStreamConfigs);
+    mData->update(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, filteredHeicStreamConfigs);
 
-    entry = mData.find(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
+    entry = mData->find(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
     Vector<int32_t> filteredDynamicDepthStreamConfigs;
     filteredDynamicDepthStreamConfigs.setCapacity(entry.count);
 
@@ -322,7 +352,7 @@
         filteredDynamicDepthStreamConfigs.push_back(height);
         filteredDynamicDepthStreamConfigs.push_back(isInput);
     }
-    mData.update(ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS,
+    mData->update(ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS,
             filteredDynamicDepthStreamConfigs);
 }
 
@@ -343,7 +373,7 @@
 
     Mutex::Autolock _l(mLock);
 
-    camera_metadata_ro_entry rawEntry = mData.find(tag);
+    camera_metadata_ro_entry rawEntry = static_cast<const CameraMetadata*>(mData)->find(tag);
     if (rawEntry.count == 0) {
         ALOGE("%s: cannot find metadata tag %d", __FUNCTION__, tag);
         return ACAMERA_ERROR_METADATA_NOT_FOUND;
@@ -390,9 +420,9 @@
                          /*out*/const uint32_t** tags) const {
     Mutex::Autolock _l(mLock);
     if (mTags.size() == 0) {
-        size_t entry_count = mData.entryCount();
+        size_t entry_count = mData->entryCount();
         mTags.setCapacity(entry_count);
-        const camera_metadata_t* rawMetadata = mData.getAndLock();
+        const camera_metadata_t* rawMetadata = mData->getAndLock();
         for (size_t i = 0; i < entry_count; i++) {
             camera_metadata_ro_entry_t entry;
             int ret = get_camera_metadata_ro_entry(rawMetadata, i, &entry);
@@ -405,7 +435,7 @@
                 mTags.push_back(entry.tag);
             }
         }
-        mData.unlock(rawMetadata);
+        mData->unlock(rawMetadata);
     }
 
     *numTags = mTags.size();
@@ -415,7 +445,7 @@
 
 const CameraMetadata&
 ACameraMetadata::getInternalData() const {
-    return mData;
+    return (*mData);
 }
 
 bool
@@ -480,6 +510,7 @@
         case ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST:
         case ACAMERA_CONTROL_ENABLE_ZSL:
         case ACAMERA_CONTROL_BOKEH_MODE:
+        case ACAMERA_CONTROL_ZOOM_RATIO:
         case ACAMERA_EDGE_MODE:
         case ACAMERA_FLASH_MODE:
         case ACAMERA_HOT_PIXEL_MODE:
diff --git a/camera/ndk/impl/ACameraMetadata.h b/camera/ndk/impl/ACameraMetadata.h
index 97f7f48..a57b2ff 100644
--- a/camera/ndk/impl/ACameraMetadata.h
+++ b/camera/ndk/impl/ACameraMetadata.h
@@ -36,8 +36,8 @@
 using namespace android;
 
 /**
- * ACameraMetadata opaque struct definition
- * Leave outside of android namespace because it's NDK struct
+ * ACameraMetadata is an opaque struct definition.
+ * It is intentionally left outside of the android namespace because it's NDK struct.
  */
 struct ACameraMetadata : public RefBase {
   public:
@@ -47,11 +47,20 @@
         ACM_RESULT,          // Read only
     } ACAMERA_METADATA_TYPE;
 
-    // Takes ownership of pass-in buffer
-    ACameraMetadata(camera_metadata_t *buffer, ACAMERA_METADATA_TYPE type);
-    // Clone
-    ACameraMetadata(const ACameraMetadata& other) :
-            mData(other.mData), mType(other.mType) {};
+    // Constructs a ACameraMetadata that takes ownership of `buffer`.
+    ACameraMetadata(camera_metadata_t* buffer, ACAMERA_METADATA_TYPE type);
+
+    // Constructs a ACameraMetadata that is a view of `cameraMetadata`.
+    // `cameraMetadata` will not be deleted by ~ACameraMetadata().
+    ACameraMetadata(CameraMetadata* cameraMetadata, ACAMERA_METADATA_TYPE type);
+
+    // Copy constructor.
+    //
+    // If `other` owns its CameraMetadata, then makes a deep copy.
+    // Otherwise, the new instance is also a view of the same data.
+    ACameraMetadata(const ACameraMetadata& other);
+
+    ~ACameraMetadata();
 
     camera_status_t getConstEntry(uint32_t tag, ACameraMetadata_const_entry* entry) const;
 
@@ -70,6 +79,9 @@
 
   private:
 
+    // Common code called by constructors.
+    void init();
+
     // This function does not check whether the capability passed to it is valid.
     // The caller must make sure that it is.
     bool isNdkSupportedCapability(const int32_t capability);
@@ -95,11 +107,11 @@
 
         status_t ret = OK;
         if (count == 0 && data == nullptr) {
-            ret = mData.erase(tag);
+            ret = mData->erase(tag);
         } else {
             // Here we have to use reinterpret_cast because the NDK data type is
             // exact copy of internal data type but they do not inherit from each other
-            ret = mData.update(tag, reinterpret_cast<const INTERNAL_T*>(data), count);
+            ret = mData->update(tag, reinterpret_cast<const INTERNAL_T*>(data), count);
         }
 
         if (ret == OK) {
@@ -110,10 +122,14 @@
         }
     }
 
-    // guard access of public APIs: get/update/getTags
-    mutable Mutex    mLock;
-    CameraMetadata   mData;
-    mutable Vector<uint32_t> mTags; // updated in getTags, cleared by update
+    // Guard access of public APIs: get/update/getTags.
+    mutable Mutex mLock;
+
+    CameraMetadata* mData;
+    // If true, has ownership of mData. Otherwise, mData is a view of an external instance.
+    bool mOwnsData;
+
+    mutable Vector<uint32_t> mTags; // Updated by `getTags()`, cleared by `update()`.
     const ACAMERA_METADATA_TYPE mType;
 
     static std::unordered_set<uint32_t> sSystemTags;
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index 9bbfb83..ee75610 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -39,6 +39,12 @@
 #include <stdint.h>
 #include <sys/cdefs.h>
 
+#ifndef __ANDROID_VNDK__
+#if __ANDROID_API__ >= 30
+#include "jni.h"
+#endif  /* __ANDROID_API__ >= 30 */
+#endif  /* __ANDROID_VNDK__ */
+
 #include "NdkCameraError.h"
 #include "NdkCameraMetadataTags.h"
 
@@ -255,8 +261,40 @@
 
 #endif /* __ANDROID_API__ >= 29 */
 
+#ifndef __ANDROID_VNDK__
+#if __ANDROID_API__ >= 30
+
+/**
+ * Return a {@link ACameraMetadata} that references the same data as
+ * {@link cameraMetadata}, which is an instance of
+ * {@link android.hardware.camera2.CameraMetadata} (e.g., a
+ * {@link android.hardware.camera2.CameraCharacteristics} or
+ * {@link android.hardware.camera2.CaptureResult}).
+ *
+ * <p>The returned ACameraMetadata must be freed by the application by {@link ACameraMetadata_free}
+ * after application is done using it.</p>
+ *
+ * <p>This function does not affect the lifetime of {@link cameraMetadata}. Attempting to use the
+ * returned ACameraMetadata object after {@link cameraMetadata} has been garbage collected is
+ * unsafe. To manage the lifetime beyond the current JNI function call, use
+ * {@code env->NewGlobalRef()} and {@code env->DeleteGlobalRef()}.
+ *
+ * @param env the JNI environment.
+ * @param cameraMetadata the source {@link android.hardware.camera2.CameraMetadata} from which the
+ *                       returned {@link ACameraMetadata} is a view.
+ *
+ * @return a valid ACameraMetadata pointer or NULL if {@link cameraMetadata} is null or not a valid
+ *         instance of {@link android.hardware.camera2.CameraMetadata}.
+ *
+ */
+ACameraMetadata* ACameraMetadata_fromCameraMetadata(JNIEnv* env, jobject cameraMetadata)
+        __INTRODUCED_IN(30);
+
+#endif /* __ANDROID_API__ >= 30 */
+#endif  /* __ANDROID_VNDK__ */
+
 __END_DECLS
 
 #endif /* _NDK_CAMERA_METADATA_H */
 
-/** @} */
+/** @} */
\ No newline at end of file
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 0652cbb..3ac3ded 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -518,11 +518,22 @@
      * region and output only the intersection rectangle as the metering region in the result
      * metadata.  If the region is entirely outside the crop region, it will be ignored and
      * not reported in the result metadata.</p>
+     * <p>Starting from API level 30, the coordinate system of activeArraySize or
+     * preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
+     * pre-zoom field of view. This means that the same aeRegions values at different
+     * ACAMERA_CONTROL_ZOOM_RATIO represent different parts of the scene. The aeRegions
+     * coordinates are relative to the activeArray/preCorrectionActiveArray representing the
+     * zoomed field of view. If ACAMERA_CONTROL_ZOOM_RATIO is set to 1.0 (default), the same
+     * aeRegions at different ACAMERA_SCALER_CROP_REGION still represent the same parts of the
+     * scene as they do before. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use
+     * activeArraySize or preCorrectionActiveArraySize still depends on distortion correction
+     * mode.</p>
      * <p>The data representation is <code>int[5 * area_count]</code>.
      * Every five elements represent a metering region of <code>(xmin, ymin, xmax, ymax, weight)</code>.
      * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
      * ymax.</p>
      *
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
@@ -698,11 +709,22 @@
      * region and output only the intersection rectangle as the metering region in the result
      * metadata. If the region is entirely outside the crop region, it will be ignored and
      * not reported in the result metadata.</p>
+     * <p>Starting from API level 30, the coordinate system of activeArraySize or
+     * preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
+     * pre-zoom field of view. This means that the same afRegions values at different
+     * ACAMERA_CONTROL_ZOOM_RATIO represent different parts of the scene. The afRegions
+     * coordinates are relative to the activeArray/preCorrectionActiveArray representing the
+     * zoomed field of view. If ACAMERA_CONTROL_ZOOM_RATIO is set to 1.0 (default), the same
+     * afRegions at different ACAMERA_SCALER_CROP_REGION still represent the same parts of the
+     * scene as they do before. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use
+     * activeArraySize or preCorrectionActiveArraySize still depends on distortion correction
+     * mode.</p>
      * <p>The data representation is <code>int[5 * area_count]</code>.
      * Every five elements represent a metering region of <code>(xmin, ymin, xmax, ymax, weight)</code>.
      * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
      * ymax.</p>
      *
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
@@ -873,11 +895,22 @@
      * region and output only the intersection rectangle as the metering region in the result
      * metadata.  If the region is entirely outside the crop region, it will be ignored and
      * not reported in the result metadata.</p>
+     * <p>Starting from API level 30, the coordinate system of activeArraySize or
+     * preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
+     * pre-zoom field of view. This means that the same awbRegions values at different
+     * ACAMERA_CONTROL_ZOOM_RATIO represent different parts of the scene. The awbRegions
+     * coordinates are relative to the activeArray/preCorrectionActiveArray representing the
+     * zoomed field of view. If ACAMERA_CONTROL_ZOOM_RATIO is set to 1.0 (default), the same
+     * awbRegions at different ACAMERA_SCALER_CROP_REGION still represent the same parts of
+     * the scene as they do before. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use
+     * activeArraySize or preCorrectionActiveArraySize still depends on distortion correction
+     * mode.</p>
      * <p>The data representation is <code>int[5 * area_count]</code>.
      * Every five elements represent a metering region of <code>(xmin, ymin, xmax, ymax, weight)</code>.
      * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
      * ymax.</p>
      *
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
@@ -1735,8 +1768,10 @@
     ACAMERA_CONTROL_AF_SCENE_CHANGE =                           // byte (acamera_metadata_enum_android_control_af_scene_change_t)
             ACAMERA_CONTROL_START + 42,
     /**
-     * <p>The list of bokeh modes that are supported by this camera device, and each bokeh mode's
-     * maximum streaming (non-stall) size with bokeh effect.</p>
+     * <p>The list of bokeh modes for ACAMERA_CONTROL_BOKEH_MODE that are supported by this camera
+     * device, and each bokeh mode's maximum streaming (non-stall) size with bokeh effect.</p>
+     *
+     * @see ACAMERA_CONTROL_BOKEH_MODE
      *
      * <p>Type: int32[3*n]</p>
      *
@@ -1761,9 +1796,31 @@
      * application configures a stream with larger dimension, the stream may not have bokeh
      * effect applied.</p>
      */
-    ACAMERA_CONTROL_AVAILABLE_BOKEH_CAPABILITIES =              // int32[3*n]
+    ACAMERA_CONTROL_AVAILABLE_BOKEH_MAX_SIZES =                 // int32[3*n]
             ACAMERA_CONTROL_START + 43,
     /**
+     * <p>The ranges of supported zoom ratio for non-OFF ACAMERA_CONTROL_BOKEH_MODE.</p>
+     *
+     * @see ACAMERA_CONTROL_BOKEH_MODE
+     *
+     * <p>Type: float[2*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>When bokeh mode is enabled, the camera device may have limited range of zoom ratios
+     * compared to when bokeh mode is disabled. This tag lists the zoom ratio ranges for all
+     * supported non-OFF bokeh modes, in the same order as in
+     * ACAMERA_CONTROL_AVAILABLE_BOKEH_CAPABILITIES.</p>
+     * <p>Range [1.0, 1.0] means that no zoom (optical or digital) is supported.</p>
+     *
+     * @see ACAMERA_CONTROL_AVAILABLE_BOKEH_CAPABILITIES
+     */
+    ACAMERA_CONTROL_AVAILABLE_BOKEH_ZOOM_RATIO_RANGES =         // float[2*n]
+            ACAMERA_CONTROL_START + 44,
+    /**
      * <p>Whether bokeh mode is enabled for a particular capture request.</p>
      *
      * <p>Type: byte (acamera_metadata_enum_android_control_bokeh_mode_t)</p>
@@ -1800,11 +1857,79 @@
      * stream combinations of LIMITED hardware level are guaranteed.</p>
      * <p>For a logical multi-camera, bokeh may be implemented by stereo vision from sub-cameras
      * with different field of view. As a result, when bokeh mode is enabled, the camera device
-     * may override android.scaler.CropRegion, and the field of view will be smaller than when
+     * may override ACAMERA_SCALER_CROP_REGION, and the field of view will be smaller than when
      * bokeh mode is off.</p>
+     *
+     * @see ACAMERA_SCALER_CROP_REGION
      */
     ACAMERA_CONTROL_BOKEH_MODE =                                // byte (acamera_metadata_enum_android_control_bokeh_mode_t)
-            ACAMERA_CONTROL_START + 44,
+            ACAMERA_CONTROL_START + 45,
+    /**
+     * <p>Minimum and maximum zoom ratios supported by this camera device.</p>
+     *
+     * <p>Type: float[2]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>If the camera device supports zoom-out from 1x zoom, minZoom will be less than 1.0, and
+     * setting ACAMERA_CONTROL_ZOOM_RATIO to values less than 1.0 increases the camera's field
+     * of view.</p>
+     *
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
+     */
+    ACAMERA_CONTROL_ZOOM_RATIO_RANGE =                          // float[2]
+            ACAMERA_CONTROL_START + 46,
+    /**
+     * <p>The desired zoom ratio</p>
+     *
+     * <p>Type: float</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     *   <li>ACaptureRequest</li>
+     * </ul></p>
+     *
+     * <p>Instead of using ACAMERA_SCALER_CROP_REGION with dual purposes of crop and zoom, the
+     * application can now choose to use this tag to specify the desired zoom level. The
+     * ACAMERA_SCALER_CROP_REGION can still be used to specify the horizontal or vertical
+     * crop to achieve aspect ratios different than the native camera sensor.</p>
+     * <p>By using this control, the application gains a simpler way to control zoom, which can
+     * be a combination of optical and digital zoom. More specifically, for a logical
+     * multi-camera with more than one focal length, using a floating point zoom ratio offers
+     * more zoom precision when a telephoto lens is used, as well as allowing zoom ratio of
+     * less than 1.0 to zoom out to a wide field of view.</p>
+     * <p>Note that the coordinate system of cropRegion, AE/AWB/AF regions, and faces now changes
+     * to the effective after-zoom field-of-view represented by rectangle of (0, 0,
+     * activeArrayWidth, activeArrayHeight).</p>
+     * <p>For example, if ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE is 4032*3024, and the preview stream
+     * is configured to the same 4:3 aspect ratio, the application can achieve 2.0x zoom in
+     * one of two ways:</p>
+     * <ul>
+     * <li>zoomRatio = 2.0, scaler.cropRegion = (0, 0, 4032, 3024)</li>
+     * <li>zoomRatio = 1.0 (default), scaler.cropRegion = (1008, 756, 3024, 2268)</li>
+     * </ul>
+     * <p>If the application intends to set aeRegions to be top-left quarter of the preview
+     * field-of-view, the ACAMERA_CONTROL_AE_REGIONS should be set to (0, 0, 2016, 1512) with
+     * zoomRatio set to 2.0. Alternatively, the application can set aeRegions to the equivalent
+     * region of (1008, 756, 2016, 1512) for zoomRatio of 1.0. If the application doesn't
+     * explicitly set ACAMERA_CONTROL_ZOOM_RATIO, its value defaults to 1.0.</p>
+     * <p>This coordinate system change isn't applicable to RAW capture and its related metadata
+     * such as intrinsicCalibration and lensShadingMap.</p>
+     * <p>One limitation of controlling zoom using zoomRatio is that the ACAMERA_SCALER_CROP_REGION
+     * must only be used for letterboxing or pillarboxing of the sensor active array, and no
+     * FREEFORM cropping can be used with ACAMERA_CONTROL_ZOOM_RATIO other than 1.0.</p>
+     *
+     * @see ACAMERA_CONTROL_AE_REGIONS
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
+     * @see ACAMERA_SCALER_CROP_REGION
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     */
+    ACAMERA_CONTROL_ZOOM_RATIO =                                // float
+            ACAMERA_CONTROL_START + 47,
     ACAMERA_CONTROL_END,
 
     /**
@@ -2384,6 +2509,11 @@
      * <p><code>p' = Rp</code></p>
      * <p>where <code>p</code> is in the device sensor coordinate system, and
      *  <code>p'</code> is in the camera-oriented coordinate system.</p>
+     * <p>If ACAMERA_LENS_POSE_REFERENCE is UNDEFINED, the quaternion rotation cannot
+     *  be accurately represented by the camera device, and will be represented by
+     *  default values matching its default facing.</p>
+     *
+     * @see ACAMERA_LENS_POSE_REFERENCE
      */
     ACAMERA_LENS_POSE_ROTATION =                                // float[4]
             ACAMERA_LENS_START + 6,
@@ -2424,6 +2554,8 @@
      * <p>When ACAMERA_LENS_POSE_REFERENCE is GYROSCOPE, then this position is relative to
      * the center of the primary gyroscope on the device. The axis definitions are the same as
      * with PRIMARY_CAMERA.</p>
+     * <p>When ACAMERA_LENS_POSE_REFERENCE is UNDEFINED, this position cannot be accurately
+     * represented by the camera device, and will be represented as <code>(0, 0, 0)</code>.</p>
      *
      * @see ACAMERA_LENS_DISTORTION
      * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
@@ -2566,8 +2698,10 @@
     ACAMERA_LENS_RADIAL_DISTORTION =                            // Deprecated! DO NOT USE
             ACAMERA_LENS_START + 11,
     /**
-     * <p>The origin for ACAMERA_LENS_POSE_TRANSLATION.</p>
+     * <p>The origin for ACAMERA_LENS_POSE_TRANSLATION, and the accuracy of
+     * ACAMERA_LENS_POSE_TRANSLATION and ACAMERA_LENS_POSE_ROTATION.</p>
      *
+     * @see ACAMERA_LENS_POSE_ROTATION
      * @see ACAMERA_LENS_POSE_TRANSLATION
      *
      * <p>Type: byte (acamera_metadata_enum_android_lens_pose_reference_t)</p>
@@ -3163,32 +3297,70 @@
      * <p>For devices not supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
      * system always follows that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with <code>(0, 0)</code> being
      * the top-left pixel of the active array.</p>
-     * <p>For devices supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
-     * system depends on the mode being set.
-     * When the distortion correction mode is OFF, the coordinate system follows
-     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, with
-     * <code>(0, 0)</code> being the top-left pixel of the pre-correction active array.
-     * When the distortion correction mode is not OFF, the coordinate system follows
-     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
-     * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
-     * <p>Output streams use this rectangle to produce their output,
-     * cropping to a smaller region if necessary to maintain the
-     * stream's aspect ratio, then scaling the sensor input to
+     * <p>For devices supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate system
+     * depends on the mode being set.  When the distortion correction mode is OFF, the
+     * coordinate system follows ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, with <code>(0,
+     * 0)</code> being the top-left pixel of the pre-correction active array.  When the distortion
+     * correction mode is not OFF, the coordinate system follows
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with <code>(0, 0)</code> being the top-left pixel of the
+     * active array.</p>
+     * <p>Output streams use this rectangle to produce their output, cropping to a smaller region
+     * if necessary to maintain the stream's aspect ratio, then scaling the sensor input to
      * match the output's configured resolution.</p>
-     * <p>The crop region is applied after the RAW to other color
-     * space (e.g. YUV) conversion. Since raw streams
-     * (e.g. RAW16) don't have the conversion stage, they are not
+     * <p>The crop region is applied after the RAW to other color space (e.g. YUV)
+     * conversion. Since raw streams (e.g. RAW16) don't have the conversion stage, they are not
      * croppable. The crop region will be ignored by raw streams.</p>
-     * <p>For non-raw streams, any additional per-stream cropping will
-     * be done to maximize the final pixel area of the stream.</p>
-     * <p>For example, if the crop region is set to a 4:3 aspect
-     * ratio, then 4:3 streams will use the exact crop
-     * region. 16:9 streams will further crop vertically
-     * (letterbox).</p>
-     * <p>Conversely, if the crop region is set to a 16:9, then 4:3
-     * outputs will crop horizontally (pillarbox), and 16:9
-     * streams will match exactly. These additional crops will
-     * be centered within the crop region.</p>
+     * <p>For non-raw streams, any additional per-stream cropping will be done to maximize the
+     * final pixel area of the stream.</p>
+     * <p>For example, if the crop region is set to a 4:3 aspect ratio, then 4:3 streams will use
+     * the exact crop region. 16:9 streams will further crop vertically (letterbox).</p>
+     * <p>Conversely, if the crop region is set to a 16:9, then 4:3 outputs will crop horizontally
+     * (pillarbox), and 16:9 streams will match exactly. These additional crops will be
+     * centered within the crop region.</p>
+     * <p>To illustrate, here are several scenarios of different crop regions and output streams,
+     * for a hypothetical camera device with an active array of size <code>(2000,1500)</code>.  Note that
+     * several of these examples use non-centered crop regions for ease of illustration; such
+     * regions are only supported on devices with FREEFORM capability
+     * (ACAMERA_SCALER_CROPPING_TYPE <code>== FREEFORM</code>), but this does not affect the way the crop
+     * rules work otherwise.</p>
+     * <ul>
+     * <li>Camera Configuration:<ul>
+     * <li>Active array size: <code>2000x1500</code> (3 MP, 4:3 aspect ratio)</li>
+     * <li>Output stream #1: <code>640x480</code> (VGA, 4:3 aspect ratio)</li>
+     * <li>Output stream #2: <code>1280x720</code> (720p, 16:9 aspect ratio)</li>
+     * </ul>
+     * </li>
+     * <li>Case #1: 4:3 crop region with 2x digital zoom<ul>
+     * <li>Crop region: <code>Rect(500, 375, 1500, 1125) // (left, top, right, bottom)</code></li>
+     * <li><img alt="4:3 aspect ratio crop diagram" src="../images/camera2/metadata/android.scaler.cropRegion/crop-region-43-ratio.png" /></li>
+     * <li><code>640x480</code> stream source area: <code>(500, 375, 1500, 1125)</code> (equal to crop region)</li>
+     * <li><code>1280x720</code> stream source area: <code>(500, 469, 1500, 1031)</code> (letterboxed)</li>
+     * </ul>
+     * </li>
+     * <li>Case #2: 16:9 crop region with ~1.5x digital zoom.<ul>
+     * <li>Crop region: <code>Rect(500, 375, 1833, 1125)</code></li>
+     * <li><img alt="16:9 aspect ratio crop diagram" src="../images/camera2/metadata/android.scaler.cropRegion/crop-region-169-ratio.png" /></li>
+     * <li><code>640x480</code> stream source area: <code>(666, 375, 1666, 1125)</code> (pillarboxed)</li>
+     * <li><code>1280x720</code> stream source area: <code>(500, 375, 1833, 1125)</code> (equal to crop region)</li>
+     * </ul>
+     * </li>
+     * <li>Case #3: 1:1 crop region with ~2.6x digital zoom.<ul>
+     * <li>Crop region: <code>Rect(500, 375, 1250, 1125)</code></li>
+     * <li><img alt="1:1 aspect ratio crop diagram" src="../images/camera2/metadata/android.scaler.cropRegion/crop-region-11-ratio.png" /></li>
+     * <li><code>640x480</code> stream source area: <code>(500, 469, 1250, 1031)</code> (letterboxed)</li>
+     * <li><code>1280x720</code> stream source area: <code>(500, 543, 1250, 957)</code> (letterboxed)</li>
+     * </ul>
+     * </li>
+     * <li>Case #4: Replace <code>640x480</code> stream with <code>1024x1024</code> stream, with 4:3 crop region:<ul>
+     * <li>Crop region: <code>Rect(500, 375, 1500, 1125)</code></li>
+     * <li><img alt="Square output, 4:3 aspect ratio crop diagram" src="../images/camera2/metadata/android.scaler.cropRegion/crop-region-43-square-ratio.png" /></li>
+     * <li><code>1024x1024</code> stream source area: <code>(625, 375, 1375, 1125)</code> (pillarboxed)</li>
+     * <li><code>1280x720</code> stream source area: <code>(500, 469, 1500, 1031)</code> (letterboxed)</li>
+     * <li>Note that in this case, neither of the two outputs is a subset of the other, with
+     *   each containing image data the other doesn't have.</li>
+     * </ul>
+     * </li>
+     * </ul>
      * <p>If the coordinate system is ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, the width and height
      * of the crop region cannot be set to be smaller than
      * <code>floor( activeArraySize.width / ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM )</code> and
@@ -3199,14 +3371,22 @@
      * and
      * <code>floor( preCorrectionActiveArraySize.height / ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM )</code>,
      * respectively.</p>
-     * <p>The camera device may adjust the crop region to account
-     * for rounding and other hardware requirements; the final
-     * crop region used will be included in the output capture
-     * result.</p>
+     * <p>The camera device may adjust the crop region to account for rounding and other hardware
+     * requirements; the final crop region used will be included in the output capture result.</p>
+     * <p>Starting from API level 30, it's strongly recommended to use ACAMERA_CONTROL_ZOOM_RATIO
+     * to take advantage of better support for zoom with logical multi-camera. The benefits
+     * include better precision with optical-digital zoom combination, and ability to do
+     * zoom-out from 1.0x. When using ACAMERA_CONTROL_ZOOM_RATIO for zoom, the crop region in
+     * the capture request must be either letterboxing or pillarboxing (but not both). The
+     * coordinate system is post-zoom, meaning that the activeArraySize or
+     * preCorrectionActiveArraySize covers the camera device's field of view "after" zoom.  See
+     * ACAMERA_CONTROL_ZOOM_RATIO for details.</p>
      * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      *
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM
+     * @see ACAMERA_SCALER_CROPPING_TYPE
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
      */
@@ -3232,6 +3412,12 @@
      * <p>Crop regions that have a width or height that is smaller
      * than this ratio allows will be rounded up to the minimum
      * allowed size by the camera device.</p>
+     * <p>Starting from API level 30, when using ACAMERA_CONTROL_ZOOM_RATIO to zoom in or out,
+     * the application must use ACAMERA_CONTROL_ZOOM_RATIO_RANGE to query both the minimum and
+     * maximum zoom ratio.</p>
+     *
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
+     * @see ACAMERA_CONTROL_ZOOM_RATIO_RANGE
      */
     ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM =                 // float
             ACAMERA_SCALER_START + 4,
@@ -3406,8 +3592,24 @@
      * <p>Camera devices that support FREEFORM cropping will support any crop region that
      * is inside of the active array. The camera device will apply the same crop region and
      * return the final used crop region in capture result metadata ACAMERA_SCALER_CROP_REGION.</p>
+     * <p>Starting from API level 30,</p>
+     * <ul>
+     * <li>If the camera device supports FREEFORM cropping, in order to do FREEFORM cropping, the
+     * application must set ACAMERA_CONTROL_ZOOM_RATIO to 1.0, and use ACAMERA_SCALER_CROP_REGION
+     * for zoom.</li>
+     * <li>To do CENTER_ONLY zoom, the application has below 2 options:<ol>
+     * <li>Set ACAMERA_CONTROL_ZOOM_RATIO to 1.0; adjust zoom by ACAMERA_SCALER_CROP_REGION.</li>
+     * <li>Adjust zoom by ACAMERA_CONTROL_ZOOM_RATIO; use ACAMERA_SCALER_CROP_REGION to crop
+     * the field of view vertically (letterboxing) or horizontally (pillarboxing), but not
+     * windowboxing.</li>
+     * </ol>
+     * </li>
+     * <li>Setting ACAMERA_CONTROL_ZOOM_RATIO to values different than 1.0 and
+     * ACAMERA_SCALER_CROP_REGION to be windowboxing at the same time is undefined behavior.</li>
+     * </ul>
      * <p>LEGACY capability devices will only support CENTER_ONLY cropping.</p>
      *
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
      */
@@ -4707,9 +4909,20 @@
      * When the distortion correction mode is not OFF, the coordinate system follows
      * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
      * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
-     * <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE == FULL</p>
+     * <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE == FULL.</p>
+     * <p>Starting from API level 30, the coordinate system of activeArraySize or
+     * preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
+     * pre-zoomRatio field of view. This means that if the relative position of faces and
+     * the camera device doesn't change, when zooming in by increasing
+     * ACAMERA_CONTROL_ZOOM_RATIO, the face landmarks move farther away from the center of the
+     * activeArray or preCorrectionActiveArray. If ACAMERA_CONTROL_ZOOM_RATIO is set to 1.0
+     * (default), the face landmarks coordinates won't change as ACAMERA_SCALER_CROP_REGION
+     * changes. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use activeArraySize or
+     * preCorrectionActiveArraySize still depends on distortion correction mode.</p>
      *
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
+     * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
      * @see ACAMERA_STATISTICS_FACE_DETECT_MODE
@@ -4738,10 +4951,21 @@
      * When the distortion correction mode is not OFF, the coordinate system follows
      * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
      * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
-     * <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF
-     * The data representation is <code>int[4]</code>, which maps to <code>(left, top, right, bottom)</code>.</p>
+     * <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF.</p>
+     * <p>Starting from API level 30, the coordinate system of activeArraySize or
+     * preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
+     * pre-zoomRatio field of view. This means that if the relative position of faces and
+     * the camera device doesn't change, when zooming in by increasing
+     * ACAMERA_CONTROL_ZOOM_RATIO, the face rectangles grow larger and move farther away from
+     * the center of the activeArray or preCorrectionActiveArray. If ACAMERA_CONTROL_ZOOM_RATIO
+     * is set to 1.0 (default), the face rectangles won't change as ACAMERA_SCALER_CROP_REGION
+     * changes. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use activeArraySize or
+     * preCorrectionActiveArraySize still depends on distortion correction mode.</p>
+     * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, right, bottom)</code>.</p>
      *
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
+     * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
      * @see ACAMERA_STATISTICS_FACE_DETECT_MODE
@@ -7324,6 +7548,19 @@
      */
     ACAMERA_LENS_POSE_REFERENCE_GYROSCOPE                            = 1,
 
+    /**
+     * <p>The camera device cannot represent the values of ACAMERA_LENS_POSE_TRANSLATION
+     * and ACAMERA_LENS_POSE_ROTATION accurately enough. One such example is a camera device
+     * on the cover of a foldable phone: in order to measure the pose translation and rotation,
+     * some kind of hinge position sensor would be needed.</p>
+     * <p>The value of ACAMERA_LENS_POSE_TRANSLATION must be all zeros, and
+     * ACAMERA_LENS_POSE_ROTATION must be values matching its default facing.</p>
+     *
+     * @see ACAMERA_LENS_POSE_ROTATION
+     * @see ACAMERA_LENS_POSE_TRANSLATION
+     */
+    ACAMERA_LENS_POSE_REFERENCE_UNDEFINED                            = 2,
+
 } acamera_metadata_enum_android_lens_pose_reference_t;
 
 
@@ -8165,12 +8402,16 @@
 // ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
 typedef enum acamera_metadata_enum_acamera_sensor_info_timestamp_source {
     /**
-     * <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in nanoseconds and monotonic,
-     * but can not be compared to timestamps from other subsystems
-     * (e.g. accelerometer, gyro etc.), or other instances of the same or different
-     * camera devices in the same system. Timestamps between streams and results for
-     * a single camera instance are comparable, and the timestamps for all buffers
-     * and the result metadata generated by a single capture are identical.</p>
+     * <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in nanoseconds and monotonic, but can
+     * not be compared to timestamps from other subsystems (e.g. accelerometer, gyro etc.),
+     * or other instances of the same or different camera devices in the same system with
+     * accuracy. However, the timestamps are roughly in the same timebase as
+     * <a href="https://developer.android.com/reference/android/os/SystemClock.html#uptimeMillis">SystemClock#uptimeMillis</a>.  The accuracy is sufficient for tasks
+     * like A/V synchronization for video recording, at least, and the timestamps can be
+     * directly used together with timestamps from the audio subsystem for that task.</p>
+     * <p>Timestamps between streams and results for a single camera instance are comparable,
+     * and the timestamps for all buffers and the result metadata generated by a single
+     * capture are identical.</p>
      *
      * @see ACAMERA_SENSOR_TIMESTAMP
      */
@@ -8180,6 +8421,14 @@
      * <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in the same timebase as
      * <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">SystemClock#elapsedRealtimeNanos</a>,
      * and they can be compared to other timestamps using that base.</p>
+     * <p>When buffers from a REALTIME device are passed directly to a video encoder from the
+     * camera, automatic compensation is done to account for differing timebases of the
+     * audio and camera subsystems.  If the application is receiving buffers and then later
+     * sending them to a video encoder or other application where they are compared with
+     * audio subsystem timestamps or similar, this compensation is not present.  In those
+     * cases, applications need to adjust the timestamps themselves.  Since <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">SystemClock#elapsedRealtimeNanos</a> and <a href="https://developer.android.com/reference/android/os/SystemClock.html#uptimeMillis">SystemClock#uptimeMillis</a> only diverge while the device is asleep, an
+     * offset between the two sources can be measured once per active session and applied
+     * to timestamps for sufficient accuracy for A/V sync.</p>
      *
      * @see ACAMERA_SENSOR_TIMESTAMP
      */
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index b6f1553..2b630db 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -31,6 +31,7 @@
     ACameraMetadata_getAllTags;
     ACameraMetadata_getConstEntry;
     ACameraMetadata_isLogicalMultiCamera; # introduced=29
+    ACameraMetadata_fromCameraMetadata; # introduced=30
     ACameraOutputTarget_create;
     ACameraOutputTarget_free;
     ACaptureRequest_addTarget;
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 9a18b10..cd5bdd1 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -498,7 +498,9 @@
         EXPECT_TRUE(res.isOk()) << res;
         EXPECT_LE(0, streamId);
         CameraMetadata sessionParams;
-        res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams);
+        std::vector<int> offlineStreamIds;
+        res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams,
+                &offlineStreamIds);
         EXPECT_TRUE(res.isOk()) << res;
         EXPECT_FALSE(callbacks->hadError());
 
@@ -609,7 +611,8 @@
         EXPECT_TRUE(res.isOk()) << res;
         res = device->deleteStream(streamId);
         EXPECT_TRUE(res.isOk()) << res;
-        res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams);
+        res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams,
+                &offlineStreamIds);
         EXPECT_TRUE(res.isOk()) << res;
 
         sleep(/*second*/1); // allow some time for errors to show up, if any
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index b534f8a..c66dea2 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -84,14 +84,13 @@
 using android::sp;
 using android::status_t;
 
-using android::DISPLAY_ORIENTATION_0;
-using android::DISPLAY_ORIENTATION_180;
-using android::DISPLAY_ORIENTATION_90;
 using android::INVALID_OPERATION;
 using android::NAME_NOT_FOUND;
 using android::NO_ERROR;
 using android::UNKNOWN_ERROR;
 
+namespace ui = android::ui;
+
 static const uint32_t kMinBitRate = 100000;         // 0.1Mbps
 static const uint32_t kMaxBitRate = 200 * 1000000;  // 200Mbps
 static const uint32_t kMaxTimeLimitSec = 180;       // 3 minutes
@@ -328,7 +327,7 @@
     }
 
     t.setDisplayProjection(dpy,
-            gRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
+            gRotate ? ui::ROTATION_90 : ui::ROTATION_0,
             layerStackRect, displayRect);
     return NO_ERROR;
 }
@@ -414,7 +413,7 @@
  */
 static status_t runEncoder(const sp<MediaCodec>& encoder,
         AMediaMuxer *muxer, FILE* rawFp, const sp<IBinder>& display,
-        const sp<IBinder>& virtualDpy, uint8_t orientation) {
+        const sp<IBinder>& virtualDpy, ui::Rotation orientation) {
     static int kTimeout = 250000;   // be responsive on signal
     status_t err;
     ssize_t trackIdx = -1;
@@ -484,7 +483,7 @@
                     if (err != NO_ERROR) {
                         ALOGW("getDisplayInfo(main) failed: %d", err);
                     } else if (orientation != displayInfo.orientation) {
-                        ALOGD("orientation changed, now %d", displayInfo.orientation);
+                        ALOGD("orientation changed, now %s", toCString(displayInfo.orientation));
                         SurfaceComposerClient::Transaction t;
                         setDisplayProjection(t, virtualDpy, displayInfo);
                         t.apply();
@@ -691,9 +690,9 @@
     }
 
     if (gVerbose) {
-        printf("Display is %dx%d @%.2ffps (orientation=%u), layerStack=%u\n",
+        printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
                 displayInfo.viewportW, displayInfo.viewportH, displayInfo.fps,
-                displayInfo.orientation, displayInfo.layerStack);
+                toCString(displayInfo.orientation), displayInfo.layerStack);
         fflush(stdout);
     }
 
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index defc94f..7b447d3 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -23,6 +23,7 @@
 
 LOCAL_MODULE_TAGS := optional
 
+LOCAL_SYSTEM_EXT_MODULE:= true
 LOCAL_MODULE:= stagefright
 
 include $(BUILD_EXECUTABLE)
diff --git a/cmds/stagefright/AudioPlayer.cpp b/cmds/stagefright/AudioPlayer.cpp
index 208713d..eb76953 100644
--- a/cmds/stagefright/AudioPlayer.cpp
+++ b/cmds/stagefright/AudioPlayer.cpp
@@ -23,7 +23,7 @@
 
 #include <binder/IPCThreadState.h>
 #include <media/AudioTrack.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/openmax/OMX_Audio.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALookup.h>
diff --git a/cmds/stagefright/AudioPlayer.h b/cmds/stagefright/AudioPlayer.h
index 7c2c36f..107bd71 100644
--- a/cmds/stagefright/AudioPlayer.h
+++ b/cmds/stagefright/AudioPlayer.h
@@ -18,7 +18,7 @@
 
 #define AUDIO_PLAYER_H_
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/MediaPlayerInterface.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <utils/threads.h>
diff --git a/cmds/stagefright/SineSource.h b/cmds/stagefright/SineSource.h
index 1817291..6f1d98c 100644
--- a/cmds/stagefright/SineSource.h
+++ b/cmds/stagefright/SineSource.h
@@ -2,7 +2,7 @@
 
 #define SINE_SOURCE_H_
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <utils/Compat.h>
 
 namespace android {
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index e8cfece..c430f05 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -33,7 +33,7 @@
 #include <binder/ProcessState.h>
 #include <datasource/DataSourceFactory.h>
 #include <media/DataSource.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/IMediaHTTPService.h>
 #include <media/IMediaPlayerService.h>
 #include <media/stagefright/foundation/ABuffer.h>
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index fe613a8..5b29158 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -26,7 +26,7 @@
 #include <media/IMediaHTTPService.h>
 #include <media/IStreamSource.h>
 #include <media/mediaplayer.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/InterfaceUtils.h>
diff --git a/drm/libmediadrm/Android.bp b/drm/libmediadrm/Android.bp
index 6b1b475..255640f 100644
--- a/drm/libmediadrm/Android.bp
+++ b/drm/libmediadrm/Android.bp
@@ -165,4 +165,8 @@
         "libmediadrmmetrics_full",
         "libutils",
     ],
+
+    header_libs: [
+        "libstagefright_foundation_headers",
+    ],
 }
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Android.bp b/drm/mediadrm/plugins/clearkey/hidl/Android.bp
index eb623f9..a194416 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/hidl/Android.bp
@@ -78,18 +78,37 @@
     },
     srcs: ["protos/DeviceFiles.proto"],
 }
+
 cc_binary {
     name: "android.hardware.drm@1.2-service.clearkey",
     defaults: ["clearkey_service_defaults"],
     srcs: ["service.cpp"],
     init_rc: ["android.hardware.drm@1.2-service.clearkey.rc"],
-    vintf_fragments: ["manifest_android.hardware.drm@1.3-service.clearkey.xml"],
+    vintf_fragments: ["manifest_android.hardware.drm@1.2-service.clearkey.xml"],
 }
+
 cc_binary {
     name: "android.hardware.drm@1.2-service-lazy.clearkey",
     overrides: ["android.hardware.drm@1.2-service.clearkey"],
     defaults: ["clearkey_service_defaults"],
     srcs: ["serviceLazy.cpp"],
     init_rc: ["android.hardware.drm@1.2-service-lazy.clearkey.rc"],
+    vintf_fragments: ["manifest_android.hardware.drm@1.2-service.clearkey.xml"],
+}
+
+cc_binary {
+    name: "android.hardware.drm@1.3-service.clearkey",
+    defaults: ["clearkey_service_defaults"],
+    srcs: ["service.cpp"],
+    init_rc: ["android.hardware.drm@1.3-service.clearkey.rc"],
+    vintf_fragments: ["manifest_android.hardware.drm@1.3-service.clearkey.xml"],
+}
+
+cc_binary {
+    name: "android.hardware.drm@1.3-service-lazy.clearkey",
+    overrides: ["android.hardware.drm@1.3-service.clearkey"],
+    defaults: ["clearkey_service_defaults"],
+    srcs: ["serviceLazy.cpp"],
+    init_rc: ["android.hardware.drm@1.3-service-lazy.clearkey.rc"],
     vintf_fragments: ["manifest_android.hardware.drm@1.3-service.clearkey.xml"],
 }
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
index 942ea7d..546eb3e 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
@@ -829,6 +829,12 @@
     // and the drm service. The clearkey implementation consists of:
     //    count - number of secure stops
     //    list of fixed length secure stops
+    size_t countBufferSize = sizeof(uint32_t);
+    if (input.size() < countBufferSize) {
+        // SafetyNet logging
+        android_errorWriteLog(0x534e4554, "144766455");
+        return Status::BAD_VALUE;
+    }
     uint32_t count = 0;
     sscanf(reinterpret_cast<char*>(input.data()), "%04" PRIu32, &count);
 
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc
index cff4d74..9afd3d7 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc
@@ -5,8 +5,6 @@
     interface android.hardware.drm@1.1::IDrmFactory clearkey
     interface android.hardware.drm@1.2::ICryptoFactory clearkey
     interface android.hardware.drm@1.2::IDrmFactory clearkey
-    interface android.hardware.drm@1.3::ICryptoFactory clearkey
-    interface android.hardware.drm@1.3::IDrmFactory clearkey
     disabled
     oneshot
     class hal
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc
index dca232a..c1abe7f 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc
@@ -5,8 +5,7 @@
     interface android.hardware.drm@1.1::IDrmFactory clearkey
     interface android.hardware.drm@1.2::ICryptoFactory clearkey
     interface android.hardware.drm@1.2::IDrmFactory clearkey
-    interface android.hardware.drm@1.3::ICryptoFactory clearkey
-    interface android.hardware.drm@1.3::IDrmFactory clearkey
+    disabled
     class hal
     user media
     group media mediadrm
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc
new file mode 100644
index 0000000..1e0d431
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc
@@ -0,0 +1,16 @@
+service vendor.drm-clearkey-hal-1-3 /vendor/bin/hw/android.hardware.drm@1.3-service-lazy.clearkey
+    interface android.hardware.drm@1.0::ICryptoFactory clearkey
+    interface android.hardware.drm@1.0::IDrmFactory clearkey
+    interface android.hardware.drm@1.1::ICryptoFactory clearkey
+    interface android.hardware.drm@1.1::IDrmFactory clearkey
+    interface android.hardware.drm@1.2::ICryptoFactory clearkey
+    interface android.hardware.drm@1.2::IDrmFactory clearkey
+    interface android.hardware.drm@1.3::ICryptoFactory clearkey
+    interface android.hardware.drm@1.3::IDrmFactory clearkey
+    disabled
+    oneshot
+    class hal
+    user media
+    group media mediadrm
+    ioprio rt 4
+    writepid /dev/cpuset/foreground/tasks
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc
new file mode 100644
index 0000000..8130511
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc
@@ -0,0 +1,14 @@
+service vendor.drm-clearkey-hal-1-3 /vendor/bin/hw/android.hardware.drm@1.3-service.clearkey
+    interface android.hardware.drm@1.0::ICryptoFactory clearkey
+    interface android.hardware.drm@1.0::IDrmFactory clearkey
+    interface android.hardware.drm@1.1::ICryptoFactory clearkey
+    interface android.hardware.drm@1.1::IDrmFactory clearkey
+    interface android.hardware.drm@1.2::ICryptoFactory clearkey
+    interface android.hardware.drm@1.2::IDrmFactory clearkey
+    interface android.hardware.drm@1.3::ICryptoFactory clearkey
+    interface android.hardware.drm@1.3::IDrmFactory clearkey
+    class hal
+    user media
+    group media mediadrm
+    ioprio rt 4
+    writepid /dev/cpuset/foreground/tasks
diff --git a/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.2-service.clearkey.xml b/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.2-service.clearkey.xml
new file mode 100644
index 0000000..16cba11
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.2-service.clearkey.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2019 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<manifest version="1.0" type="device">
+    <hal format="hidl">
+        <name>android.hardware.drm</name>
+        <transport>hwbinder</transport>
+        <fqname>@1.2::ICryptoFactory/clearkey</fqname>
+        <fqname>@1.2::IDrmFactory/clearkey</fqname>
+    </hal>
+</manifest>
diff --git a/include/media/DataSource.h b/include/media/DataSource.h
index 8efa809..2abd5f5 100644
--- a/include/media/DataSource.h
+++ b/include/media/DataSource.h
@@ -22,7 +22,7 @@
 
 #include <android/IDataSource.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/DataSourceBase.h>
+#include <media/stagefright/DataSourceBase.h>
 #include <media/MediaExtractorPluginApi.h>
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
diff --git a/include/media/DataSourceBase.h b/include/media/DataSourceBase.h
deleted file mode 120000
index d2ab2f1..0000000
--- a/include/media/DataSourceBase.h
+++ /dev/null
@@ -1 +0,0 @@
-stagefright/DataSourceBase.h
\ No newline at end of file
diff --git a/include/media/MediaSource.h b/include/media/MediaSource.h
deleted file mode 120000
index 34bf65d..0000000
--- a/include/media/MediaSource.h
+++ /dev/null
@@ -1 +0,0 @@
-../../media/libstagefright/include/media/stagefright/MediaSource.h
\ No newline at end of file
diff --git a/include/media/audiohal b/include/media/audiohal
deleted file mode 120000
index f400582..0000000
--- a/include/media/audiohal
+++ /dev/null
@@ -1 +0,0 @@
-../../media/libaudiohal/include/media/audiohal/
\ No newline at end of file
diff --git a/include/media/nbaio/AudioStreamOutSink.h b/include/media/nbaio/AudioStreamOutSink.h
deleted file mode 120000
index 43bfac5..0000000
--- a/include/media/nbaio/AudioStreamOutSink.h
+++ /dev/null
@@ -1 +0,0 @@
-../../../media/libnbaio/include/media/nbaio/AudioStreamOutSink.h
\ No newline at end of file
diff --git a/include/media/nbaio/NBAIO.h b/include/media/nbaio/NBAIO.h
deleted file mode 120000
index ff6a151..0000000
--- a/include/media/nbaio/NBAIO.h
+++ /dev/null
@@ -1 +0,0 @@
-../../../media/libnbaio/include_mono/media/nbaio/NBAIO.h
\ No newline at end of file
diff --git a/include/media/nbaio/SingleStateQueue.h b/include/media/nbaio/SingleStateQueue.h
deleted file mode 120000
index d3e0553..0000000
--- a/include/media/nbaio/SingleStateQueue.h
+++ /dev/null
@@ -1 +0,0 @@
-../../../media/libnbaio/include_mono/media/nbaio/SingleStateQueue.h
\ No newline at end of file
diff --git a/include/media/stagefright b/include/media/stagefright
deleted file mode 120000
index 5393f68..0000000
--- a/include/media/stagefright
+++ /dev/null
@@ -1 +0,0 @@
-../../media/libstagefright/include/media/stagefright/
\ No newline at end of file
diff --git a/media/audioserver/Android.mk b/media/audioserver/Android.mk
index fcf7cb1..79b77a0 100644
--- a/media/audioserver/Android.mk
+++ b/media/audioserver/Android.mk
@@ -22,6 +22,9 @@
 	libutils \
 	libvibrator
 
+LOCAL_HEADER_LIBRARIES := \
+	libaudiohal_headers \
+
 # TODO oboeservice is the old folder name for aaudioservice. It will be changed.
 LOCAL_C_INCLUDES := \
 	frameworks/av/services/audioflinger \
diff --git a/media/bufferpool/2.0/Android.bp b/media/bufferpool/2.0/Android.bp
index 97f114a..557b7ef 100644
--- a/media/bufferpool/2.0/Android.bp
+++ b/media/bufferpool/2.0/Android.bp
@@ -30,6 +30,8 @@
     name: "libstagefright_bufferpool@2.0.1",
     defaults: ["libstagefright_bufferpool@2.0-default"],
     vendor_available: true,
+    // TODO: b/147147992
+    double_loadable: true,
     cflags: [
         "-DBUFFERPOOL_CLONE_HANDLES",
     ],
@@ -40,6 +42,8 @@
     name: "libstagefright_bufferpool@2.0",
     defaults: ["libstagefright_bufferpool@2.0-default"],
     vendor_available: true,
+    // TODO: b/147147992
+    double_loadable: true,
     vndk: {
         enabled: true,
     },
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 2662f0f..56813c4 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -501,7 +501,7 @@
 status_t C2SoftAvcDec::initDecoder() {
     if (OK != createDecoder()) return UNKNOWN_ERROR;
     mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
-    mStride = ALIGN64(mWidth);
+    mStride = ALIGN128(mWidth);
     mSignalledError = false;
     resetPlugin();
     (void) setNumCores();
@@ -756,8 +756,8 @@
         ALOGE("not supposed to be here, invalid decoder context");
         return C2_CORRUPTED;
     }
-    if (mStride != ALIGN64(mWidth)) {
-        mStride = ALIGN64(mWidth);
+    if (mStride != ALIGN128(mWidth)) {
+        mStride = ALIGN128(mWidth);
         if (OK != setParams(mStride, IVD_DECODE_FRAME)) return C2_CORRUPTED;
     }
     if (mOutBlock &&
@@ -909,7 +909,7 @@
         if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                setParams(ALIGN64(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
+                setParams(ALIGN128(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
             }
             if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) {
                 mWidth = s_decode_op.u4_pic_wd;
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
index 4414a26..ed27493 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.h
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -40,6 +40,7 @@
 #define ivdext_ctl_get_vui_params_ip_t  ih264d_ctl_get_vui_params_ip_t
 #define ivdext_ctl_get_vui_params_op_t  ih264d_ctl_get_vui_params_op_t
 #define ALIGN64(x)                      ((((x) + 63) >> 6) << 6)
+#define ALIGN128(x)                     ((((x) + 127) >> 7) << 7)
 #define MAX_NUM_CORES                   4
 #define IVDEXT_CMD_CTL_SET_NUM_CORES    \
         (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
diff --git a/media/codec2/components/cmds/codec2.cpp b/media/codec2/components/cmds/codec2.cpp
index 38eaf88..d6025de 100644
--- a/media/codec2/components/cmds/codec2.cpp
+++ b/media/codec2/components/cmds/codec2.cpp
@@ -34,7 +34,7 @@
 #include <media/DataSource.h>
 #include <mediadrm/ICrypto.h>
 #include <media/IMediaHTTPService.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index df677c2..6db4387 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -497,7 +497,7 @@
 status_t C2SoftHevcDec::initDecoder() {
     if (OK != createDecoder()) return UNKNOWN_ERROR;
     mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
-    mStride = ALIGN64(mWidth);
+    mStride = ALIGN128(mWidth);
     mSignalledError = false;
     resetPlugin();
     (void) setNumCores();
@@ -752,8 +752,8 @@
         ALOGE("not supposed to be here, invalid decoder context");
         return C2_CORRUPTED;
     }
-    if (mStride != ALIGN64(mWidth)) {
-        mStride = ALIGN64(mWidth);
+    if (mStride != ALIGN128(mWidth)) {
+        mStride = ALIGN128(mWidth);
         if (OK != setParams(mStride, IVD_DECODE_FRAME)) return C2_CORRUPTED;
     }
     if (mOutBlock &&
@@ -904,7 +904,7 @@
         if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                setParams(ALIGN64(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
+                setParams(ALIGN128(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
             }
             if (s_decode_op.u4_pic_wd != mWidth ||  s_decode_op.u4_pic_ht != mHeight) {
                 mWidth = s_decode_op.u4_pic_wd;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.h b/media/codec2/components/hevc/C2SoftHevcDec.h
index ce63a6c..aecd101 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.h
+++ b/media/codec2/components/hevc/C2SoftHevcDec.h
@@ -38,6 +38,7 @@
 #define ivdext_ctl_get_vui_params_ip_t  ihevcd_cxa_ctl_get_vui_params_ip_t
 #define ivdext_ctl_get_vui_params_op_t  ihevcd_cxa_ctl_get_vui_params_op_t
 #define ALIGN64(x)                      ((((x) + 63) >> 6) << 6)
+#define ALIGN128(x)                     ((((x) + 127) >> 7) << 7)
 #define MAX_NUM_CORES                   4
 #define IVDEXT_CMD_CTL_SET_NUM_CORES    \
         (IVD_CONTROL_API_COMMAND_TYPE_T)IHEVCD_CXA_CMD_CTL_SET_NUM_CORES
diff --git a/media/codec2/core/Android.bp b/media/codec2/core/Android.bp
index a7e8997..1f9d7ab 100644
--- a/media/codec2/core/Android.bp
+++ b/media/codec2/core/Android.bp
@@ -10,6 +10,7 @@
     vndk: {
         enabled: true,
     },
+    double_loadable: true,
 
     srcs: ["C2.cpp"],
 
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 59ac94b..534c1a7 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -1,6 +1,8 @@
 cc_library_shared {
     name: "libsfplugin_ccodec",
 
+    export_include_dirs: ["include"],
+
     srcs: [
         "C2OMXNode.cpp",
         "CCodec.cpp",
@@ -11,7 +13,6 @@
         "Codec2InfoBuilder.cpp",
         "PipelineWatcher.cpp",
         "ReflectedParamUpdater.cpp",
-        "SkipCutBuffer.cpp",
     ],
 
     cflags: [
@@ -32,6 +33,7 @@
         "android.hardware.media.omx@1.0",
         "libbase",
         "libbinder",
+        "libcodec2",
         "libcodec2_client",
         "libcodec2_vndk",
         "libcutils",
@@ -51,6 +53,11 @@
         "libutils",
     ],
 
+    export_shared_lib_headers: [
+        "libcodec2",
+        "libcodec2_client",
+    ],
+
     sanitize: {
         cfi: true,
         misc_undefined: [
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 31e5406..39263f9 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -40,13 +40,15 @@
 #include <media/openmax/OMX_IndexExt.h>
 #include <media/stagefright/omx/1.0/WGraphicBufferSource.h>
 #include <media/stagefright/omx/OmxGraphicBufferSource.h>
+#include <media/stagefright/CCodec.h>
 #include <media/stagefright/BufferProducerWrapper.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/PersistentSurface.h>
 
 #include "C2OMXNode.h"
-#include "CCodec.h"
 #include "CCodecBufferChannel.h"
+#include "CCodecConfig.h"
+#include "Codec2Mapper.h"
 #include "InputSurfaceWrapper.h"
 
 extern "C" android::PersistentSurface *CreateInputSurface();
@@ -59,6 +61,7 @@
 using ::android::hardware::media::c2::V1_0::IInputSurface;
 
 typedef hardware::media::omx::V1_0::IGraphicBufferSource HGraphicBufferSource;
+typedef CCodecConfig Config;
 
 namespace {
 
@@ -571,7 +574,8 @@
 // CCodec
 
 CCodec::CCodec()
-    : mChannel(new CCodecBufferChannel(std::make_shared<CCodecCallbackImpl>(this))) {
+    : mChannel(new CCodecBufferChannel(std::make_shared<CCodecCallbackImpl>(this))),
+      mConfig(new CCodecConfig) {
 }
 
 CCodec::~CCodec() {
@@ -662,7 +666,8 @@
     }
 
     // initialize config here in case setParameters is called prior to configure
-    Mutexed<Config>::Locked config(mConfig);
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
     status_t err = config->initialize(mClient, comp);
     if (err != OK) {
         ALOGW("Failed to initialize configuration support");
@@ -713,6 +718,11 @@
             encoder = false;
         }
 
+        int32_t flags;
+        if (!msg->findInt32("flags", &flags)) {
+            return BAD_VALUE;
+        }
+
         // TODO: read from intf()
         if ((!encoder) != (comp->getName().find("encoder") == std::string::npos)) {
             return UNKNOWN_ERROR;
@@ -736,8 +746,12 @@
             setSurface(surface);
         }
 
-        Mutexed<Config>::Locked config(mConfig);
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
         config->mUsingSurface = surface != nullptr;
+        config->mBuffersBoundToCodec = ((flags & CONFIGURE_FLAG_USE_BLOCK_MODEL) == 0);
+        ALOGD("[%s] buffers are %sbound to CCodec for this session",
+              comp->getName().c_str(), config->mBuffersBoundToCodec ? "" : "not ");
 
         // Enforce required parameters
         int32_t i32;
@@ -1052,7 +1066,8 @@
         return;
     }
 
-    Mutexed<Config>::Locked config(mConfig);
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
 
     mCallback->onComponentConfigured(config->mInputFormat, config->mOutputFormat);
 }
@@ -1126,7 +1141,8 @@
     sp<AMessage> outputFormat;
     uint64_t usage = 0;
     {
-        Mutexed<Config>::Locked config(mConfig);
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
         inputFormat = config->mInputFormat;
         outputFormat = config->mOutputFormat;
         usage = config->mISConfig ? config->mISConfig->mUsage : 0;
@@ -1170,7 +1186,8 @@
 }
 
 status_t CCodec::setupInputSurface(const std::shared_ptr<InputSurfaceWrapper> &surface) {
-    Mutexed<Config>::Locked config(mConfig);
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
     config->mUsingSurface = true;
 
     // we are now using surface - apply default color aspects to input format - as well as
@@ -1215,7 +1232,8 @@
     sp<AMessage> outputFormat;
     uint64_t usage = 0;
     {
-        Mutexed<Config>::Locked config(mConfig);
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
         inputFormat = config->mInputFormat;
         outputFormat = config->mOutputFormat;
         usage = config->mISConfig ? config->mISConfig->mUsage : 0;
@@ -1290,19 +1308,22 @@
     sp<AMessage> inputFormat;
     sp<AMessage> outputFormat;
     status_t err2 = OK;
+    bool buffersBoundToCodec = false;
     {
-        Mutexed<Config>::Locked config(mConfig);
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
         inputFormat = config->mInputFormat;
         outputFormat = config->mOutputFormat;
         if (config->mInputSurface) {
             err2 = config->mInputSurface->start();
         }
+        buffersBoundToCodec = config->mBuffersBoundToCodec;
     }
     if (err2 != OK) {
         mCallback->onError(err2, ACTION_CODE_FATAL);
         return;
     }
-    err2 = mChannel->start(inputFormat, outputFormat);
+    err2 = mChannel->start(inputFormat, outputFormat, buffersBoundToCodec);
     if (err2 != OK) {
         mCallback->onError(err2, ACTION_CODE_FATAL);
         return;
@@ -1377,7 +1398,8 @@
     }
 
     {
-        Mutexed<Config>::Locked config(mConfig);
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
         if (config->mInputSurface) {
             config->mInputSurface->disconnect();
             config->mInputSurface = nullptr;
@@ -1425,7 +1447,8 @@
     }
 
     if (clearInputSurfaceIfNeeded) {
-        Mutexed<Config>::Locked config(mConfig);
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
         if (config->mInputSurface) {
             config->mInputSurface->disconnect();
             config->mInputSurface = nullptr;
@@ -1544,7 +1567,11 @@
         return;
     }
 
-    (void)mChannel->start(nullptr, nullptr);
+    (void)mChannel->start(nullptr, nullptr, [&]{
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        return config->mBuffersBoundToCodec;
+    }());
 
     {
         Mutexed<State>::Locked state(mState);
@@ -1583,7 +1610,8 @@
         params->removeEntryAt(params->findEntryByName(KEY_BIT_RATE));
     }
 
-    Mutexed<Config>::Locked config(mConfig);
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
 
     /**
      * Handle input surface parameters
@@ -1642,7 +1670,8 @@
         comp = state->comp;
     }
     ALOGV("request IDR");
-    Mutexed<Config>::Locked config(mConfig);
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
     std::vector<std::unique_ptr<C2Param>> params;
     params.push_back(
             std::make_unique<C2StreamRequestSyncFrameTuning::output>(0u, true));
@@ -1661,7 +1690,8 @@
     mChannel->onInputBufferDone(frameIndex, arrayIndex);
     if (arrayIndex == 0) {
         // We always put no more than one buffer per work, if we use an input surface.
-        Mutexed<Config>::Locked config(mConfig);
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
         if (config->mInputSurface) {
             config->mInputSurface->onInputBufferDone(frameIndex);
         }
@@ -1738,7 +1768,8 @@
             }
 
             // handle configuration changes in work done
-            Mutexed<Config>::Locked config(mConfig);
+            Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+            const std::unique_ptr<Config> &config = *configLocked;
             bool changed = false;
             Config::Watcher<C2StreamInitDataInfo::output> initData =
                 config->watch<C2StreamInitDataInfo::output>();
@@ -1867,14 +1898,8 @@
     mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
 }
 
-}  // namespace android
-
-extern "C" android::CodecBase *CreateCodec() {
-    return new android::CCodec;
-}
-
-// Create Codec 2.0 input surface
-extern "C" android::PersistentSurface *CreateInputSurface() {
+// static
+PersistentSurface *CCodec::CreateInputSurface() {
     using namespace android;
     using ::android::hardware::media::omx::V1_0::implementation::TWGraphicBufferSource;
     // Attempt to create a Codec2's input surface.
@@ -1901,3 +1926,243 @@
             inputSurface->getHalInterface()));
 }
 
+static status_t GetCommonAllocatorIds(
+        const std::vector<std::string> &names,
+        C2Allocator::type_t type,
+        std::set<C2Allocator::id_t> *ids) {
+    int poolMask = GetCodec2PoolMask();
+    C2PlatformAllocatorStore::id_t preferredLinearId = GetPreferredLinearAllocatorId(poolMask);
+    C2Allocator::id_t defaultAllocatorId =
+        (type == C2Allocator::LINEAR) ? preferredLinearId : C2PlatformAllocatorStore::GRALLOC;
+
+    ids->clear();
+    if (names.empty()) {
+        return OK;
+    }
+    std::shared_ptr<Codec2Client::Interface> intf{
+        Codec2Client::CreateInterfaceByName(names[0].c_str())};
+    std::vector<std::unique_ptr<C2Param>> params;
+    c2_status_t err = intf->query(
+            {}, {C2PortAllocatorsTuning::input::PARAM_TYPE}, C2_MAY_BLOCK, &params);
+    if (err == C2_OK && params.size() == 1u) {
+        C2PortAllocatorsTuning::input *allocators =
+            C2PortAllocatorsTuning::input::From(params[0].get());
+        if (allocators && allocators->flexCount() > 0) {
+            ids->insert(allocators->m.values, allocators->m.values + allocators->flexCount());
+        }
+    }
+    if (ids->empty()) {
+        // The component does not advertise allocators. Use default.
+        ids->insert(defaultAllocatorId);
+    }
+    for (size_t i = 1; i < names.size(); ++i) {
+        intf = Codec2Client::CreateInterfaceByName(names[i].c_str());
+        err = intf->query(
+                {}, {C2PortAllocatorsTuning::input::PARAM_TYPE}, C2_MAY_BLOCK, &params);
+        bool filtered = false;
+        if (err == C2_OK && params.size() == 1u) {
+            C2PortAllocatorsTuning::input *allocators =
+                C2PortAllocatorsTuning::input::From(params[0].get());
+            if (allocators && allocators->flexCount() > 0) {
+                filtered = true;
+                for (auto it = ids->begin(); it != ids->end(); ) {
+                    bool found = false;
+                    for (size_t j = 0; j < allocators->flexCount(); ++j) {
+                        if (allocators->m.values[j] == *it) {
+                            found = true;
+                            break;
+                        }
+                    }
+                    if (found) {
+                        ++it;
+                    } else {
+                        it = ids->erase(it);
+                    }
+                }
+            }
+        }
+        if (!filtered) {
+            // The component does not advertise supported allocators. Use default.
+            bool containsDefault = (ids->count(defaultAllocatorId) > 0u);
+            if (ids->size() != (containsDefault ? 1 : 0)) {
+                ids->clear();
+                if (containsDefault) {
+                    ids->insert(defaultAllocatorId);
+                }
+            }
+        }
+    }
+    // Finally, filter with pool masks
+    for (auto it = ids->begin(); it != ids->end(); ) {
+        if ((poolMask >> *it) & 1) {
+            ++it;
+        } else {
+            it = ids->erase(it);
+        }
+    }
+    return OK;
+}
+
+static status_t CalculateMinMaxUsage(
+        const std::vector<std::string> &names, uint64_t *minUsage, uint64_t *maxUsage) {
+    static C2StreamUsageTuning::input sUsage{0u /* stream id */};
+    *minUsage = 0;
+    *maxUsage = ~0ull;
+    for (const std::string &name : names) {
+        std::shared_ptr<Codec2Client::Interface> intf{
+            Codec2Client::CreateInterfaceByName(name.c_str())};
+        std::vector<C2FieldSupportedValuesQuery> fields;
+        fields.push_back(C2FieldSupportedValuesQuery::Possible(
+                C2ParamField{&sUsage, &sUsage.value}));
+        c2_status_t err = intf->querySupportedValues(fields, C2_MAY_BLOCK);
+        if (err != C2_OK) {
+            continue;
+        }
+        if (fields[0].status != C2_OK) {
+            continue;
+        }
+        const C2FieldSupportedValues &supported = fields[0].values;
+        if (supported.type != C2FieldSupportedValues::FLAGS) {
+            continue;
+        }
+        if (supported.values.empty()) {
+            *maxUsage = 0;
+            continue;
+        }
+        *minUsage |= supported.values[0].u64;
+        int64_t currentMaxUsage = 0;
+        for (const C2Value::Primitive &flags : supported.values) {
+            currentMaxUsage |= flags.u64;
+        }
+        *maxUsage &= currentMaxUsage;
+    }
+    return OK;
+}
+
+// static
+status_t CCodec::CanFetchLinearBlock(
+        const std::vector<std::string> &names, const C2MemoryUsage &usage, bool *isCompatible) {
+    uint64_t minUsage = usage.expected;
+    uint64_t maxUsage = ~0ull;
+    std::set<C2Allocator::id_t> allocators;
+    GetCommonAllocatorIds(names, C2Allocator::LINEAR, &allocators);
+    if (allocators.empty()) {
+        *isCompatible = false;
+        return OK;
+    }
+    CalculateMinMaxUsage(names, &minUsage, &maxUsage);
+    *isCompatible = ((maxUsage & minUsage) == minUsage);
+    return OK;
+}
+
+static std::shared_ptr<C2BlockPool> GetPool(C2Allocator::id_t allocId) {
+    static std::mutex sMutex{};
+    static std::map<C2Allocator::id_t, std::shared_ptr<C2BlockPool>> sPools;
+    std::unique_lock<std::mutex> lock{sMutex};
+    std::shared_ptr<C2BlockPool> pool;
+    auto it = sPools.find(allocId);
+    if (it == sPools.end()) {
+        c2_status_t err = CreateCodec2BlockPool(allocId, nullptr, &pool);
+        if (err == OK) {
+            sPools.emplace(allocId, pool);
+        } else {
+            pool.reset();
+        }
+    } else {
+        pool = it->second;
+    }
+    return pool;
+}
+
+// static
+std::shared_ptr<C2LinearBlock> CCodec::FetchLinearBlock(
+        size_t capacity, const C2MemoryUsage &usage, const std::vector<std::string> &names) {
+    uint64_t minUsage = usage.expected;
+    uint64_t maxUsage = ~0ull;
+    std::set<C2Allocator::id_t> allocators;
+    GetCommonAllocatorIds(names, C2Allocator::LINEAR, &allocators);
+    if (allocators.empty()) {
+        allocators.insert(C2PlatformAllocatorStore::DEFAULT_LINEAR);
+    }
+    CalculateMinMaxUsage(names, &minUsage, &maxUsage);
+    if ((maxUsage & minUsage) != minUsage) {
+        allocators.clear();
+        allocators.insert(C2PlatformAllocatorStore::DEFAULT_LINEAR);
+    }
+    std::shared_ptr<C2LinearBlock> block;
+    for (C2Allocator::id_t allocId : allocators) {
+        std::shared_ptr<C2BlockPool> pool = GetPool(allocId);
+        if (!pool) {
+            continue;
+        }
+        c2_status_t err = pool->fetchLinearBlock(capacity, C2MemoryUsage{minUsage}, &block);
+        if (err != C2_OK || !block) {
+            block.reset();
+            continue;
+        }
+        break;
+    }
+    return block;
+}
+
+// static
+status_t CCodec::CanFetchGraphicBlock(
+        const std::vector<std::string> &names, bool *isCompatible) {
+    uint64_t minUsage = 0;
+    uint64_t maxUsage = ~0ull;
+    std::set<C2Allocator::id_t> allocators;
+    GetCommonAllocatorIds(names, C2Allocator::GRAPHIC, &allocators);
+    if (allocators.empty()) {
+        *isCompatible = false;
+        return OK;
+    }
+    CalculateMinMaxUsage(names, &minUsage, &maxUsage);
+    *isCompatible = ((maxUsage & minUsage) == minUsage);
+    return OK;
+}
+
+// static
+std::shared_ptr<C2GraphicBlock> CCodec::FetchGraphicBlock(
+        int32_t width,
+        int32_t height,
+        int32_t format,
+        uint64_t usage,
+        const std::vector<std::string> &names) {
+    uint32_t halPixelFormat = HAL_PIXEL_FORMAT_YCBCR_420_888;
+    if (!C2Mapper::mapPixelFormatFrameworkToCodec(format, &halPixelFormat)) {
+        ALOGD("Unrecognized pixel format: %d", format);
+        return nullptr;
+    }
+    uint64_t minUsage = 0;
+    uint64_t maxUsage = ~0ull;
+    std::set<C2Allocator::id_t> allocators;
+    GetCommonAllocatorIds(names, C2Allocator::GRAPHIC, &allocators);
+    if (allocators.empty()) {
+        allocators.insert(C2PlatformAllocatorStore::DEFAULT_GRAPHIC);
+    }
+    CalculateMinMaxUsage(names, &minUsage, &maxUsage);
+    minUsage |= usage;
+    if ((maxUsage & minUsage) != minUsage) {
+        allocators.clear();
+        allocators.insert(C2PlatformAllocatorStore::DEFAULT_GRAPHIC);
+    }
+    std::shared_ptr<C2GraphicBlock> block;
+    for (C2Allocator::id_t allocId : allocators) {
+        std::shared_ptr<C2BlockPool> pool;
+        c2_status_t err = CreateCodec2BlockPool(allocId, nullptr, &pool);
+        if (err != C2_OK || !pool) {
+            continue;
+        }
+        err = pool->fetchGraphicBlock(
+                width, height, halPixelFormat, C2MemoryUsage{minUsage}, &block);
+        if (err != C2_OK || !block) {
+            block.reset();
+            continue;
+        }
+        break;
+    }
+    return block;
+}
+
+}  // namespace android
+
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 2bcb7e2..ae95336 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -29,6 +29,7 @@
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <android/hardware/drm/1.0/types.h>
 #include <android-base/stringprintf.h>
+#include <binder/MemoryBase.h>
 #include <binder/MemoryDealer.h>
 #include <cutils/properties.h>
 #include <gui/Surface.h>
@@ -41,12 +42,12 @@
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/SkipCutBuffer.h>
 #include <media/MediaCodecBuffer.h>
 #include <system/window.h>
 
 #include "CCodecBufferChannel.h"
 #include "Codec2Buffer.h"
-#include "SkipCutBuffer.h"
 
 namespace android {
 
@@ -249,7 +250,7 @@
 }
 
 CCodecBufferChannel::~CCodecBufferChannel() {
-    if (mCrypto != nullptr && mDealer != nullptr && mHeapSeqNum >= 0) {
+    if (mCrypto != nullptr && mHeapSeqNum >= 0) {
         mCrypto->unsetHeap(mHeapSeqNum);
     }
 }
@@ -409,6 +410,173 @@
     return OK;
 }
 
+status_t CCodecBufferChannel::attachBuffer(
+        const std::shared_ptr<C2Buffer> &c2Buffer,
+        const sp<MediaCodecBuffer> &buffer) {
+    if (!buffer->copy(c2Buffer)) {
+        return -ENOSYS;
+    }
+    return OK;
+}
+
+void CCodecBufferChannel::ensureDecryptDestination(size_t size) {
+    if (!mDecryptDestination || mDecryptDestination->size() < size) {
+        sp<IMemoryHeap> heap{new MemoryHeapBase(size * 2)};
+        if (mDecryptDestination && mCrypto && mHeapSeqNum >= 0) {
+            mCrypto->unsetHeap(mHeapSeqNum);
+        }
+        mDecryptDestination = new MemoryBase(heap, 0, size * 2);
+        if (mCrypto) {
+            mHeapSeqNum = mCrypto->setHeap(hardware::fromHeap(heap));
+        }
+    }
+}
+
+int32_t CCodecBufferChannel::getHeapSeqNum(const sp<HidlMemory> &memory) {
+    CHECK(mCrypto);
+    auto it = mHeapSeqNumMap.find(memory);
+    int32_t heapSeqNum = -1;
+    if (it == mHeapSeqNumMap.end()) {
+        heapSeqNum = mCrypto->setHeap(memory);
+        mHeapSeqNumMap.emplace(memory, heapSeqNum);
+    } else {
+        heapSeqNum = it->second;
+    }
+    return heapSeqNum;
+}
+
+status_t CCodecBufferChannel::attachEncryptedBuffer(
+        const sp<hardware::HidlMemory> &memory,
+        bool secure,
+        const uint8_t *key,
+        const uint8_t *iv,
+        CryptoPlugin::Mode mode,
+        CryptoPlugin::Pattern pattern,
+        size_t offset,
+        const CryptoPlugin::SubSample *subSamples,
+        size_t numSubSamples,
+        const sp<MediaCodecBuffer> &buffer) {
+    static const C2MemoryUsage kSecureUsage{C2MemoryUsage::READ_PROTECTED, 0};
+    static const C2MemoryUsage kDefaultReadWriteUsage{
+        C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
+    size_t size = 0;
+    for (size_t i = 0; i < numSubSamples; ++i) {
+        size += subSamples[i].mNumBytesOfClearData + subSamples[i].mNumBytesOfEncryptedData;
+    }
+    std::shared_ptr<C2BlockPool> pool = mBlockPools.lock()->inputPool;
+    std::shared_ptr<C2LinearBlock> block;
+    c2_status_t err = pool->fetchLinearBlock(
+            size,
+            secure ? kSecureUsage : kDefaultReadWriteUsage,
+            &block);
+    if (err != C2_OK) {
+        return NO_MEMORY;
+    }
+    if (!secure) {
+        ensureDecryptDestination(size);
+    }
+    ssize_t result = -1;
+    ssize_t codecDataOffset = 0;
+    if (mCrypto) {
+        AString errorDetailMsg;
+        int32_t heapSeqNum = getHeapSeqNum(memory);
+        hardware::drm::V1_0::SharedBuffer src{(uint32_t)heapSeqNum, offset, size};
+        hardware::drm::V1_0::DestinationBuffer dst;
+        if (secure) {
+            dst.type = DrmBufferType::NATIVE_HANDLE;
+            dst.secureMemory = hardware::hidl_handle(block->handle());
+        } else {
+            dst.type = DrmBufferType::SHARED_MEMORY;
+            IMemoryToSharedBuffer(
+                    mDecryptDestination, mHeapSeqNum, &dst.nonsecureMemory);
+        }
+        result = mCrypto->decrypt(
+                key, iv, mode, pattern, src, 0, subSamples, numSubSamples,
+                dst, &errorDetailMsg);
+        if (result < 0) {
+            return result;
+        }
+        if (dst.type == DrmBufferType::SHARED_MEMORY) {
+            C2WriteView view = block->map().get();
+            if (view.error() != C2_OK) {
+                return false;
+            }
+            if (view.size() < result) {
+                return false;
+            }
+            memcpy(view.data(), mDecryptDestination->unsecurePointer(), result);
+        }
+    } else {
+        // Here we cast CryptoPlugin::SubSample to hardware::cas::native::V1_0::SubSample
+        // directly, the structure definitions should match as checked in DescramblerImpl.cpp.
+        hidl_vec<SubSample> hidlSubSamples;
+        hidlSubSamples.setToExternal((SubSample *)subSamples, numSubSamples, false /*own*/);
+
+        hardware::cas::native::V1_0::SharedBuffer src{*memory, offset, size};
+        hardware::cas::native::V1_0::DestinationBuffer dst;
+        if (secure) {
+            dst.type = BufferType::NATIVE_HANDLE;
+            dst.secureMemory = hardware::hidl_handle(block->handle());
+        } else {
+            dst.type = BufferType::SHARED_MEMORY;
+            dst.nonsecureMemory = src;
+        }
+
+        CasStatus status = CasStatus::OK;
+        hidl_string detailedError;
+        ScramblingControl sctrl = ScramblingControl::UNSCRAMBLED;
+
+        if (key != nullptr) {
+            sctrl = (ScramblingControl)key[0];
+            // Adjust for the PES offset
+            codecDataOffset = key[2] | (key[3] << 8);
+        }
+
+        auto returnVoid = mDescrambler->descramble(
+                sctrl,
+                hidlSubSamples,
+                src,
+                0,
+                dst,
+                0,
+                [&status, &result, &detailedError] (
+                        CasStatus _status, uint32_t _bytesWritten,
+                        const hidl_string& _detailedError) {
+                    status = _status;
+                    result = (ssize_t)_bytesWritten;
+                    detailedError = _detailedError;
+                });
+
+        if (!returnVoid.isOk() || status != CasStatus::OK || result < 0) {
+            ALOGI("[%s] descramble failed, trans=%s, status=%d, result=%zd",
+                    mName, returnVoid.description().c_str(), status, result);
+            return UNKNOWN_ERROR;
+        }
+
+        if (result < codecDataOffset) {
+            ALOGD("invalid codec data offset: %zd, result %zd", codecDataOffset, result);
+            return BAD_VALUE;
+        }
+    }
+    if (!secure) {
+        C2WriteView view = block->map().get();
+        if (view.error() != C2_OK) {
+            return UNKNOWN_ERROR;
+        }
+        if (view.size() < result) {
+            return UNKNOWN_ERROR;
+        }
+        memcpy(view.data(), mDecryptDestination->unsecurePointer(), result);
+    }
+    std::shared_ptr<C2Buffer> c2Buffer{C2Buffer::CreateLinearBuffer(
+            block->share(codecDataOffset, result - codecDataOffset, C2Fence{}))};
+    if (!buffer->copy(c2Buffer)) {
+        return -ENOSYS;
+    }
+    return OK;
+}
+
 status_t CCodecBufferChannel::queueInputBuffer(const sp<MediaCodecBuffer> &buffer) {
     QueueGuard guard(mSync);
     if (!guard.isRunning()) {
@@ -774,7 +942,9 @@
 }
 
 status_t CCodecBufferChannel::start(
-        const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) {
+        const sp<AMessage> &inputFormat,
+        const sp<AMessage> &outputFormat,
+        bool buffersBoundToCodec) {
     C2StreamBufferTypeSetting::input iStreamFormat(0u);
     C2StreamBufferTypeSetting::output oStreamFormat(0u);
     C2PortReorderBufferDepthTuning::output reorderDepth;
@@ -897,7 +1067,9 @@
         input->numSlots = numInputSlots;
         input->extraBuffers.flush();
         input->numExtraSlots = 0u;
-        if (graphic) {
+        if (!buffersBoundToCodec) {
+            input->buffers.reset(new SlotInputBuffers(mName));
+        } else if (graphic) {
             if (mInputSurface) {
                 input->buffers.reset(new DummyInputBuffers(mName));
             } else if (mMetaMode == MODE_ANW) {
@@ -1071,7 +1243,7 @@
         output->outputDelay = outputDelayValue;
         output->numSlots = numOutputSlots;
         if (graphic) {
-            if (outputSurface) {
+            if (outputSurface || !buffersBoundToCodec) {
                 output->buffers.reset(new GraphicOutputBuffers(mName));
             } else {
                 output->buffers.reset(new RawGraphicOutputBuffers(numOutputSlots, mName));
@@ -1669,6 +1841,16 @@
 }
 
 void CCodecBufferChannel::setCrypto(const sp<ICrypto> &crypto) {
+    if (mCrypto != nullptr) {
+        for (std::pair<wp<HidlMemory>, int32_t> entry : mHeapSeqNumMap) {
+            mCrypto->unsetHeap(entry.second);
+        }
+        mHeapSeqNumMap.clear();
+        if (mHeapSeqNum >= 0) {
+            mCrypto->unsetHeap(mHeapSeqNum);
+            mHeapSeqNum = -1;
+        }
+    }
     mCrypto = crypto;
 }
 
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 82fec18..0263211 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -70,6 +70,20 @@
             const CryptoPlugin::SubSample *subSamples,
             size_t numSubSamples,
             AString *errorDetailMsg) override;
+    virtual status_t attachBuffer(
+            const std::shared_ptr<C2Buffer> &c2Buffer,
+            const sp<MediaCodecBuffer> &buffer) override;
+    virtual status_t attachEncryptedBuffer(
+            const sp<hardware::HidlMemory> &memory,
+            bool secure,
+            const uint8_t *key,
+            const uint8_t *iv,
+            CryptoPlugin::Mode mode,
+            CryptoPlugin::Pattern pattern,
+            size_t offset,
+            const CryptoPlugin::SubSample *subSamples,
+            size_t numSubSamples,
+            const sp<MediaCodecBuffer> &buffer) override;
     virtual status_t renderOutputBuffer(
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
     virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
@@ -108,7 +122,10 @@
      * Start queueing buffers to the component. This object should never queue
      * buffers before this call has completed.
      */
-    status_t start(const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat);
+    status_t start(
+            const sp<AMessage> &inputFormat,
+            const sp<AMessage> &outputFormat,
+            bool buffersBoundToCodec);
 
     /**
      * Request initial input buffers to be filled by client.
@@ -216,11 +233,14 @@
             std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
             const C2StreamInitDataInfo::output *initData);
     void sendOutputBuffers();
+    void ensureDecryptDestination(size_t size);
+    int32_t getHeapSeqNum(const sp<hardware::HidlMemory> &memory);
 
     QueueSync mSync;
     sp<MemoryDealer> mDealer;
     sp<IMemory> mDecryptDestination;
     int32_t mHeapSeqNum;
+    std::map<wp<hardware::HidlMemory>, int32_t> mHeapSeqNumMap;
 
     std::shared_ptr<Codec2Client::Component> mComponent;
     std::string mComponentName; ///< component name for debugging
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index ed8b832..265eeb7 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -22,6 +22,7 @@
 
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/SkipCutBuffer.h>
 
 #include "CCodecBuffers.h"
 
@@ -493,6 +494,44 @@
     return mAllocate();
 }
 
+// SlotInputBuffers
+
+bool SlotInputBuffers::requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) {
+    sp<Codec2Buffer> newBuffer = createNewBuffer();
+    *index = mImpl.assignSlot(newBuffer);
+    *buffer = newBuffer;
+    return true;
+}
+
+bool SlotInputBuffers::releaseBuffer(
+        const sp<MediaCodecBuffer> &buffer,
+        std::shared_ptr<C2Buffer> *c2buffer,
+        bool release) {
+    return mImpl.releaseSlot(buffer, c2buffer, release);
+}
+
+bool SlotInputBuffers::expireComponentBuffer(
+        const std::shared_ptr<C2Buffer> &c2buffer) {
+    return mImpl.expireComponentBuffer(c2buffer);
+}
+
+void SlotInputBuffers::flush() {
+    mImpl.flush();
+}
+
+std::unique_ptr<InputBuffers> SlotInputBuffers::toArrayMode(size_t) {
+    TRESPASS("Array mode should not be called at non-legacy mode");
+    return nullptr;
+}
+
+size_t SlotInputBuffers::numClientBuffers() const {
+    return mImpl.numClientBuffers();
+}
+
+sp<Codec2Buffer> SlotInputBuffers::createNewBuffer() {
+    return new DummyContainerBuffer{mFormat, nullptr};
+}
+
 // LinearInputBuffers
 
 bool LinearInputBuffers::requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) {
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index 2cb6b81..bae08e0 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -25,10 +25,11 @@
 #include <media/MediaCodecBuffer.h>
 
 #include "Codec2Buffer.h"
-#include "SkipCutBuffer.h"
 
 namespace android {
 
+class SkipCutBuffer;
+
 constexpr size_t kLinearBufferSize = 1048576;
 // This can fit 4K RGBA frame, and most likely client won't need more than this.
 constexpr size_t kMaxLinearBufferSize = 4096 * 2304 * 4;
@@ -546,6 +547,36 @@
     std::function<sp<Codec2Buffer>()> mAllocate;
 };
 
+class SlotInputBuffers : public InputBuffers {
+public:
+    SlotInputBuffers(const char *componentName, const char *name = "Slot-Input")
+        : InputBuffers(componentName, name),
+          mImpl(mName) { }
+    ~SlotInputBuffers() override = default;
+
+    bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) final;
+
+    bool releaseBuffer(
+            const sp<MediaCodecBuffer> &buffer,
+            std::shared_ptr<C2Buffer> *c2buffer,
+            bool release) final;
+
+    bool expireComponentBuffer(
+            const std::shared_ptr<C2Buffer> &c2buffer) final;
+
+    void flush() final;
+
+    std::unique_ptr<InputBuffers> toArrayMode(size_t size) final;
+
+    size_t numClientBuffers() const final;
+
+protected:
+    sp<Codec2Buffer> createNewBuffer() final;
+
+private:
+    FlexBuffersImpl mImpl;
+};
+
 class LinearInputBuffers : public InputBuffers {
 public:
     LinearInputBuffers(const char *componentName, const char *name = "1D-Input")
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index ee3cdf6..d2f5ea7 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -595,34 +595,18 @@
         .withMappers([](C2Value v) -> C2Value {
             int32_t value;
             if (v.get(&value)) {
-                switch (value) {
-                    case COLOR_FormatSurface:
-                        return (uint32_t)HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-                    case COLOR_FormatYUV420Flexible:
-                        return (uint32_t)HAL_PIXEL_FORMAT_YCBCR_420_888;
-                    case COLOR_FormatYUV420Planar:
-                    case COLOR_FormatYUV420SemiPlanar:
-                    case COLOR_FormatYUV420PackedPlanar:
-                    case COLOR_FormatYUV420PackedSemiPlanar:
-                        return (uint32_t)HAL_PIXEL_FORMAT_YV12;
-                    default:
-                        // TODO: support some sort of passthrough
-                        break;
+                uint32_t result;
+                if (C2Mapper::mapPixelFormatFrameworkToCodec(value, &result)) {
+                    return result;
                 }
             }
             return C2Value();
         }, [](C2Value v) -> C2Value {
             uint32_t value;
             if (v.get(&value)) {
-                switch (value) {
-                    case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
-                        return COLOR_FormatSurface;
-                    case HAL_PIXEL_FORMAT_YV12:
-                    case HAL_PIXEL_FORMAT_YCBCR_420_888:
-                        return COLOR_FormatYUV420Flexible;
-                    default:
-                        // TODO: support some sort of passthrough
-                        break;
+                int32_t result;
+                if (C2Mapper::mapPixelFormatCodecToFramework(value, &result)) {
+                    return result;
                 }
             }
             return C2Value();
diff --git a/media/codec2/sfplugin/CCodecConfig.h b/media/codec2/sfplugin/CCodecConfig.h
index a61c8b7..093bfdd 100644
--- a/media/codec2/sfplugin/CCodecConfig.h
+++ b/media/codec2/sfplugin/CCodecConfig.h
@@ -118,6 +118,7 @@
     sp<AMessage> mOutputFormat;
 
     bool mUsingSurface; ///< using input or output surface
+    bool mBuffersBoundToCodec; ///< whether buffers are bound to codecs or not.
 
     std::shared_ptr<InputSurfaceWrapper> mInputSurface;
     std::unique_ptr<InputSurfaceWrapper::Config> mISConfig;
diff --git a/media/codec2/sfplugin/Codec2Buffer.h b/media/codec2/sfplugin/Codec2Buffer.h
index 9291c52..09475ef 100644
--- a/media/codec2/sfplugin/Codec2Buffer.h
+++ b/media/codec2/sfplugin/Codec2Buffer.h
@@ -57,36 +57,6 @@
     using MediaCodecBuffer::MediaCodecBuffer;
     ~Codec2Buffer() override = default;
 
-    /**
-     * \return  C2Buffer object represents this buffer.
-     */
-    virtual std::shared_ptr<C2Buffer> asC2Buffer() = 0;
-
-    /**
-     * Test if we can copy the content of |buffer| into this object.
-     *
-     * \param   buffer  C2Buffer object to copy.
-     * \return  true    if the content of buffer can be copied over to this buffer
-     *          false   otherwise.
-     */
-    virtual bool canCopy(const std::shared_ptr<C2Buffer> &buffer) const {
-        (void)buffer;
-        return false;
-    }
-
-    /**
-     * Copy the content of |buffer| into this object. This method assumes that
-     * canCopy() check already passed.
-     *
-     * \param   buffer  C2Buffer object to copy.
-     * \return  true    if successful
-     *          false   otherwise.
-     */
-    virtual bool copy(const std::shared_ptr<C2Buffer> &buffer) {
-        (void)buffer;
-        return false;
-    }
-
     sp<ABuffer> getImageData() const { return mImageData; }
 
 protected:
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 745d701..5990116 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -43,13 +43,12 @@
 #include <codec2/hidl/client.h>
 #include <cutils/native_handle.h>
 #include <media/omx/1.0/WOmxNode.h>
-#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/foundation/ALookup.h>
 #include <media/stagefright/foundation/MediaDefs.h>
 #include <media/stagefright/omx/OMXUtils.h>
 #include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
-
-#include "Codec2InfoBuilder.h"
+#include <media/stagefright/Codec2InfoBuilder.h>
+#include <media/stagefright/MediaCodecConstants.h>
 
 namespace android {
 
diff --git a/media/codec2/sfplugin/SkipCutBuffer.cpp b/media/codec2/sfplugin/SkipCutBuffer.cpp
deleted file mode 100644
index 8d1de65..0000000
--- a/media/codec2/sfplugin/SkipCutBuffer.cpp
+++ /dev/null
@@ -1,206 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SkipCutBuffer"
-#include <utils/Log.h>
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaBuffer.h>
-#include "SkipCutBuffer.h"
-
-namespace android {
-
-SkipCutBuffer::SkipCutBuffer(size_t skip, size_t cut, size_t num16BitChannels) {
-
-    mWriteHead = 0;
-    mReadHead = 0;
-    mCapacity = 0;
-    mCutBuffer = nullptr;
-
-    if (num16BitChannels == 0 || num16BitChannels > INT32_MAX / 2) {
-        ALOGW("# channels out of range: %zu, using passthrough instead", num16BitChannels);
-        return;
-    }
-    size_t frameSize = num16BitChannels * 2;
-    if (skip > INT32_MAX / frameSize || cut > INT32_MAX / frameSize
-            || cut * frameSize > INT32_MAX - 4096) {
-        ALOGW("out of range skip/cut: %zu/%zu, using passthrough instead",
-                skip, cut);
-        return;
-    }
-    skip *= frameSize;
-    cut *= frameSize;
-
-    mFrontPadding = mSkip = skip;
-    mBackPadding = cut;
-    mCapacity = cut + 4096;
-    mCutBuffer = new (std::nothrow) char[mCapacity];
-    ALOGV("skipcutbuffer %zu %zu %d", skip, cut, mCapacity);
-}
-
-SkipCutBuffer::~SkipCutBuffer() {
-    delete[] mCutBuffer;
-}
-
-void SkipCutBuffer::submit(MediaBuffer *buffer) {
-    if (mCutBuffer == nullptr) {
-        // passthrough mode
-        return;
-    }
-
-    int32_t offset = buffer->range_offset();
-    int32_t buflen = buffer->range_length();
-
-    // drop the initial data from the buffer if needed
-    if (mFrontPadding > 0) {
-        // still data left to drop
-        int32_t to_drop = (buflen < mFrontPadding) ? buflen : mFrontPadding;
-        offset += to_drop;
-        buflen -= to_drop;
-        buffer->set_range(offset, buflen);
-        mFrontPadding -= to_drop;
-    }
-
-
-    // append data to cutbuffer
-    char *src = ((char*) buffer->data()) + offset;
-    write(src, buflen);
-
-
-    // the mediabuffer is now empty. Fill it from cutbuffer, always leaving
-    // at least mBackPadding bytes in the cutbuffer
-    char *dst = (char*) buffer->data();
-    size_t copied = read(dst, buffer->size());
-    buffer->set_range(0, copied);
-}
-
-template <typename T>
-void SkipCutBuffer::submitInternal(const sp<T>& buffer) {
-    if (mCutBuffer == nullptr) {
-        // passthrough mode
-        return;
-    }
-
-    int32_t offset = buffer->offset();
-    int32_t buflen = buffer->size();
-
-    // drop the initial data from the buffer if needed
-    if (mFrontPadding > 0) {
-        // still data left to drop
-        int32_t to_drop = (buflen < mFrontPadding) ? buflen : mFrontPadding;
-        offset += to_drop;
-        buflen -= to_drop;
-        buffer->setRange(offset, buflen);
-        mFrontPadding -= to_drop;
-    }
-
-
-    // append data to cutbuffer
-    char *src = (char*) buffer->data();
-    write(src, buflen);
-
-
-    // the mediabuffer is now empty. Fill it from cutbuffer, always leaving
-    // at least mBackPadding bytes in the cutbuffer
-    char *dst = (char*) buffer->base();
-    size_t copied = read(dst, buffer->capacity());
-    buffer->setRange(0, copied);
-}
-
-void SkipCutBuffer::submit(const sp<ABuffer>& buffer) {
-    submitInternal(buffer);
-}
-
-void SkipCutBuffer::submit(const sp<MediaCodecBuffer>& buffer) {
-    submitInternal(buffer);
-}
-
-void SkipCutBuffer::clear() {
-    mWriteHead = mReadHead = 0;
-    mFrontPadding = mSkip;
-}
-
-void SkipCutBuffer::write(const char *src, size_t num) {
-    int32_t sizeused = (mWriteHead - mReadHead);
-    if (sizeused < 0) sizeused += mCapacity;
-
-    // Everything must fit. Make sure the buffer is a little larger than needed,
-    // so there is no ambiguity as to whether mWriteHead == mReadHead means buffer
-    // full or empty
-    size_t available = mCapacity - sizeused - 32;
-    if (available < num) {
-        int32_t newcapacity = mCapacity + (num - available);
-        char * newbuffer = new char[newcapacity];
-        memcpy(newbuffer, mCutBuffer, mCapacity);
-        delete [] mCutBuffer;
-        mCapacity = newcapacity;
-        mCutBuffer = newbuffer;
-        ALOGV("reallocated buffer at size %d", newcapacity);
-    }
-
-    size_t copyfirst = (mCapacity - mWriteHead);
-    if (copyfirst > num) copyfirst = num;
-    if (copyfirst) {
-        memcpy(mCutBuffer + mWriteHead, src, copyfirst);
-        num -= copyfirst;
-        src += copyfirst;
-        mWriteHead += copyfirst;
-        CHECK_LE(mWriteHead, mCapacity);
-        if (mWriteHead == mCapacity) mWriteHead = 0;
-        if (num) {
-            memcpy(mCutBuffer, src, num);
-            mWriteHead += num;
-        }
-    }
-}
-
-size_t SkipCutBuffer::read(char *dst, size_t num) {
-    int32_t available = (mWriteHead - mReadHead);
-    if (available < 0) available += mCapacity;
-
-    available -= mBackPadding;
-    if (available <=0) {
-        return 0;
-    }
-    if (available < int32_t(num)) {
-        num = available;
-    }
-
-    size_t copyfirst = (mCapacity - mReadHead);
-    if (copyfirst > num) copyfirst = num;
-    if (copyfirst) {
-        memcpy(dst, mCutBuffer + mReadHead, copyfirst);
-        num -= copyfirst;
-        dst += copyfirst;
-        mReadHead += copyfirst;
-        CHECK_LE(mReadHead, mCapacity);
-        if (mReadHead == mCapacity) mReadHead = 0;
-        if (num) {
-            memcpy(dst, mCutBuffer, num);
-            mReadHead += num;
-        }
-    }
-    return available;
-}
-
-size_t SkipCutBuffer::size() {
-    int32_t available = (mWriteHead - mReadHead);
-    if (available < 0) available += mCapacity;
-    return available;
-}
-
-}  // namespace android
diff --git a/media/codec2/sfplugin/SkipCutBuffer.h b/media/codec2/sfplugin/SkipCutBuffer.h
deleted file mode 100644
index 0fb5690..0000000
--- a/media/codec2/sfplugin/SkipCutBuffer.h
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SKIP_CUT_BUFFER_H_
-
-#define SKIP_CUT_BUFFER_H_
-
-#include <media/MediaCodecBuffer.h>
-#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/foundation/ABuffer.h>
-
-namespace android {
-
-/**
- * utility class to cut the start and end off a stream of data in MediaBuffers
- *
- */
-class SkipCutBuffer: public RefBase {
- public:
-    // 'skip' is the number of frames to skip from the beginning
-    // 'cut' is the number of frames to cut from the end
-    // 'num16BitChannels' is the number of channels, which are assumed to be 16 bit wide each
-    SkipCutBuffer(size_t skip, size_t cut, size_t num16Channels);
-
-    // Submit one MediaBuffer for skipping and cutting. This may consume all or
-    // some of the data in the buffer, or it may add data to it.
-    // After this, the caller should continue processing the buffer as usual.
-    void submit(MediaBuffer *buffer);
-    void submit(const sp<ABuffer>& buffer);    // same as above, but with an ABuffer
-    void submit(const sp<MediaCodecBuffer>& buffer);    // same as above, but with an ABuffer
-    void clear();
-    size_t size(); // how many bytes are currently stored in the buffer
-
- protected:
-    virtual ~SkipCutBuffer();
-
- private:
-    void write(const char *src, size_t num);
-    size_t read(char *dst, size_t num);
-    template <typename T>
-    void submitInternal(const sp<T>& buffer);
-    int32_t mSkip;
-    int32_t mFrontPadding;
-    int32_t mBackPadding;
-    int32_t mWriteHead;
-    int32_t mReadHead;
-    int32_t mCapacity;
-    char* mCutBuffer;
-    DISALLOW_EVIL_CONSTRUCTORS(SkipCutBuffer);
-};
-
-}  // namespace android
-
-#endif  // OMX_CODEC_H_
diff --git a/media/codec2/sfplugin/CCodec.h b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
similarity index 87%
rename from media/codec2/sfplugin/CCodec.h
rename to media/codec2/sfplugin/include/media/stagefright/CCodec.h
index b0b3c4f..6ff2c4a 100644
--- a/media/codec2/sfplugin/CCodec.h
+++ b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
@@ -36,12 +36,11 @@
 #include <hardware/gralloc.h>
 #include <nativebase/nativebase.h>
 
-#include "CCodecConfig.h"
-
 namespace android {
 
 class CCodecBufferChannel;
 class InputSurfaceWrapper;
+struct CCodecConfig;
 struct MediaCodecInfo;
 
 class CCodec : public CodecBase {
@@ -69,6 +68,24 @@
     void onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems);
     void onInputBufferDone(uint64_t frameIndex, size_t arrayIndex);
 
+    static PersistentSurface *CreateInputSurface();
+
+    static status_t CanFetchLinearBlock(
+            const std::vector<std::string> &names, const C2MemoryUsage &usage, bool *isCompatible);
+
+    static std::shared_ptr<C2LinearBlock> FetchLinearBlock(
+            size_t capacity, const C2MemoryUsage &usage, const std::vector<std::string> &names);
+
+    static status_t CanFetchGraphicBlock(
+            const std::vector<std::string> &names, bool *isCompatible);
+
+    static std::shared_ptr<C2GraphicBlock> FetchGraphicBlock(
+            int32_t width,
+            int32_t height,
+            int32_t format,
+            uint64_t usage,
+            const std::vector<std::string> &names);
+
 protected:
     virtual ~CCodec();
 
@@ -173,7 +190,7 @@
 
     Mutexed<NamedTimePoint> mDeadline;
     typedef CCodecConfig Config;
-    Mutexed<Config> mConfig;
+    Mutexed<std::unique_ptr<CCodecConfig>> mConfig;
     Mutexed<std::list<std::unique_ptr<C2Work>>> mWorkDoneQueue;
 
     friend class CCodecCallbackImpl;
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.h b/media/codec2/sfplugin/include/media/stagefright/Codec2InfoBuilder.h
similarity index 100%
rename from media/codec2/sfplugin/Codec2InfoBuilder.h
rename to media/codec2/sfplugin/include/media/stagefright/Codec2InfoBuilder.h
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 8c8f025..205abdc 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -1,6 +1,7 @@
 cc_library_shared {
     name: "libsfplugin_ccodec_utils",
     vendor_available: true,
+    double_loadable: true,
 
     srcs: [
         "Codec2BufferUtils.cpp",
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 2f3d688..903db6c 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -949,3 +949,41 @@
 bool C2Mapper::map(ColorAspects::Transfer from, C2Color::transfer_t *to) {
     return sColorTransfersSf.map(from, to);
 }
+
+// static
+bool C2Mapper::mapPixelFormatFrameworkToCodec(
+        int32_t frameworkValue, uint32_t *c2Value) {
+    switch (frameworkValue) {
+        case COLOR_FormatSurface:
+            *c2Value = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+            return true;
+        case COLOR_FormatYUV420Flexible:
+            *c2Value = HAL_PIXEL_FORMAT_YCBCR_420_888;
+            return true;
+        case COLOR_FormatYUV420Planar:
+        case COLOR_FormatYUV420SemiPlanar:
+        case COLOR_FormatYUV420PackedPlanar:
+        case COLOR_FormatYUV420PackedSemiPlanar:
+            *c2Value = HAL_PIXEL_FORMAT_YV12;
+            return true;
+        default:
+            // TODO: support some sort of passthrough
+            return false;
+    }
+}
+
+// static
+bool C2Mapper::mapPixelFormatCodecToFramework(
+        uint32_t c2Value, int32_t *frameworkValue) {
+    switch (c2Value) {
+        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+            *frameworkValue = COLOR_FormatSurface;
+            return true;
+        case HAL_PIXEL_FORMAT_YV12:
+        case HAL_PIXEL_FORMAT_YCBCR_420_888:
+            *frameworkValue = COLOR_FormatYUV420Flexible;
+            return true;
+        default:
+            return false;
+    }
+}
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.h b/media/codec2/sfplugin/utils/Codec2Mapper.h
index cec6f07..797c8a8 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.h
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.h
@@ -75,6 +75,11 @@
         static bool map(ColorAspects::MatrixCoeffs, C2Color::matrix_t*);
         static bool map(C2Color::transfer_t, ColorAspects::Transfer*);
         static bool map(ColorAspects::Transfer, C2Color::transfer_t*);
+
+        static bool mapPixelFormatFrameworkToCodec(
+                int32_t frameworkValue, uint32_t *c2Value);
+        static bool mapPixelFormatCodecToFramework(
+                uint32_t c2Value, int32_t *frameworkValue);
     };
 }
 
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 071bb74..a1f145b 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -14,6 +14,8 @@
 cc_library_shared {
     name: "libcodec2_vndk",
     vendor_available: true,
+    // TODO: b/147147883
+    double_loadable: true,
 
     srcs: [
         "C2AllocatorBlob.cpp",
@@ -68,6 +70,8 @@
         "libutils",
     ],
 
+    tidy: false, // b/146435095, clang-tidy segmentation fault
+
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/media/codec2/vndk/C2AllocatorIon.cpp b/media/codec2/vndk/C2AllocatorIon.cpp
index 0470a31..dfc90b1 100644
--- a/media/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/codec2/vndk/C2AllocatorIon.cpp
@@ -262,7 +262,7 @@
                 *fence = C2Fence(); // not using fences
             }
             (void)mMappings.erase(it);
-            ALOGV("successfully unmapped: %d", mHandle.bufferFd());
+            ALOGV("successfully unmapped: addr=%p size=%zu fd=%d", addr, size, mHandle.bufferFd());
             return C2_OK;
         }
         ALOGD("unmap failed to find specified map");
diff --git a/media/extractors/Android.bp b/media/extractors/Android.bp
new file mode 100644
index 0000000..bb42580
--- /dev/null
+++ b/media/extractors/Android.bp
@@ -0,0 +1,46 @@
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+cc_defaults {
+    name: "extractor-defaults",
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright/include",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmediandk#29",
+    ],
+
+    relative_install_path: "extractors",
+
+    compile_multilib: "first",
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-fvisibility=hidden",
+    ],
+
+    version_script: "exports.lds",
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
\ No newline at end of file
diff --git a/media/extractors/aac/Android.bp b/media/extractors/aac/Android.bp
index a58167a..60d3ae1 100644
--- a/media/extractors/aac/Android.bp
+++ b/media/extractors/aac/Android.bp
@@ -1,40 +1,13 @@
-cc_library_shared {
+cc_library {
+    name: "libaacextractor",
+    defaults: ["extractor-defaults"],
 
     srcs: ["AACExtractor.cpp"],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright/",
-    ],
-
-    shared_libs: [
-        "liblog",
-        "libmediandk",
-    ],
-
     static_libs: [
         "libstagefright_foundation",
         "libstagefright_metadatautils",
         "libutils",
     ],
 
-    name: "libaacextractor",
-    relative_install_path: "extractors",
-
-    compile_multilib: "first",
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-        "-fvisibility=hidden",
-    ],
-    version_script: "exports.lds",
-
-    sanitize: {
-        cfi: true,
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-    },
-
 }
diff --git a/media/extractors/amr/Android.bp b/media/extractors/amr/Android.bp
index 4bd933d..49c9567 100644
--- a/media/extractors/amr/Android.bp
+++ b/media/extractors/amr/Android.bp
@@ -1,38 +1,11 @@
-cc_library_shared {
+cc_library {
+    name: "libamrextractor",
+    defaults: ["extractor-defaults"],
 
     srcs: ["AMRExtractor.cpp"],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright/include",
-    ],
-
-    shared_libs: [
-        "liblog",
-        "libmediandk",
-    ],
-
     static_libs: [
         "libstagefright_foundation",
     ],
 
-    name: "libamrextractor",
-    relative_install_path: "extractors",
-
-    compile_multilib: "first",
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-        "-fvisibility=hidden",
-    ],
-    version_script: "exports.lds",
-
-    sanitize: {
-        cfi: true,
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-    },
-
 }
diff --git a/media/extractors/flac/Android.bp b/media/extractors/flac/Android.bp
index 3a3d051..3675611 100644
--- a/media/extractors/flac/Android.bp
+++ b/media/extractors/flac/Android.bp
@@ -1,16 +1,15 @@
-cc_library_shared {
+cc_library {
+    name: "libflacextractor",
+    defaults: ["extractor-defaults"],
 
     srcs: ["FLACExtractor.cpp"],
 
     include_dirs: [
-        "frameworks/av/media/libstagefright/include",
         "external/flac/include",
     ],
 
     shared_libs: [
         "libbinder_ndk",
-        "liblog",
-        "libmediandk",
     ],
 
     static_libs: [
@@ -21,24 +20,4 @@
         "libutils",
     ],
 
-    name: "libflacextractor",
-    relative_install_path: "extractors",
-
-    compile_multilib: "first",
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-        "-fvisibility=hidden",
-    ],
-    version_script: "exports.lds",
-
-    sanitize: {
-        cfi: true,
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-    },
-
 }
diff --git a/media/extractors/midi/Android.bp b/media/extractors/midi/Android.bp
index d36cb49..592ffa9 100644
--- a/media/extractors/midi/Android.bp
+++ b/media/extractors/midi/Android.bp
@@ -1,43 +1,18 @@
-cc_library_shared {
+cc_library {
+    name: "libmidiextractor",
+    defaults: ["extractor-defaults"],
 
     srcs: ["MidiExtractor.cpp"],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright/include",
-    ],
-
     header_libs: [
         "libmedia_headers",
     ],
 
-    shared_libs: [
-        "liblog",
-        "libmediandk",
-    ],
-
     static_libs: [
         "libmedia_midiiowrapper",
         "libsonivox",
         "libstagefright_foundation"
     ],
-    name: "libmidiextractor",
-    relative_install_path: "extractors",
 
-    compile_multilib: "first",
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-        "-fvisibility=hidden",
-    ],
-    version_script: "exports.lds",
-
-    sanitize: {
-        cfi: true,
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-    },
 
 }
diff --git a/media/extractors/mkv/Android.bp b/media/extractors/mkv/Android.bp
index 38821fd..7ad8cc1 100644
--- a/media/extractors/mkv/Android.bp
+++ b/media/extractors/mkv/Android.bp
@@ -1,4 +1,6 @@
-cc_library_shared {
+cc_library {
+    name: "libmkvextractor",
+    defaults: ["extractor-defaults"],
 
     srcs: ["MatroskaExtractor.cpp"],
 
@@ -6,12 +8,9 @@
         "external/flac/include",
         "external/libvpx/libwebm",
         "frameworks/av/media/libstagefright/flac/dec",
-        "frameworks/av/media/libstagefright/include",
     ],
 
     shared_libs: [
-        "liblog",
-        "libmediandk",
         "libstagefright_flacdec",
     ],
 
@@ -22,24 +21,4 @@
         "libutils",
     ],
 
-    name: "libmkvextractor",
-    relative_install_path: "extractors",
-
-    compile_multilib: "first",
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-        "-fvisibility=hidden",
-    ],
-    version_script: "exports.lds",
-
-    sanitize: {
-        cfi: true,
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-    },
-
 }
diff --git a/media/extractors/mkv/MatroskaExtractor.cpp b/media/extractors/mkv/MatroskaExtractor.cpp
index 23022e4..044c4d0 100644
--- a/media/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/extractors/mkv/MatroskaExtractor.cpp
@@ -22,7 +22,7 @@
 #include "MatroskaExtractor.h"
 #include "common/webmids.h"
 
-#include <media/DataSourceBase.h>
+#include <media/stagefright/DataSourceBase.h>
 #include <media/ExtractorUtils.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AUtils.h>
diff --git a/media/extractors/mp3/Android.bp b/media/extractors/mp3/Android.bp
index 4e2f248..102ac81 100644
--- a/media/extractors/mp3/Android.bp
+++ b/media/extractors/mp3/Android.bp
@@ -1,44 +1,16 @@
-cc_library_shared {
-
+cc_library {
+    name: "libmp3extractor",
+    defaults: ["extractor-defaults"],
     srcs: [
             "MP3Extractor.cpp",
             "VBRISeeker.cpp",
             "XINGSeeker.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright/include",
-    ],
-
-    shared_libs: [
-        "liblog",
-        "libmediandk",
-    ],
-
     static_libs: [
         "libutils",
         "libstagefright_id3",
         "libstagefright_foundation",
     ],
 
-    name: "libmp3extractor",
-    relative_install_path: "extractors",
-
-    compile_multilib: "first",
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-        "-fvisibility=hidden",
-    ],
-    version_script: "exports.lds",
-
-    sanitize: {
-        cfi: true,
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-    },
-
 }
diff --git a/media/extractors/mp4/Android.bp b/media/extractors/mp4/Android.bp
index 1b308aa..e48e1b7 100644
--- a/media/extractors/mp4/Android.bp
+++ b/media/extractors/mp4/Android.bp
@@ -1,5 +1,6 @@
-cc_defaults {
-    name: "libmp4extractor_defaults",
+cc_library {
+    name: "libmp4extractor",
+    defaults: ["extractor-defaults"],
 
     srcs: [
         "AC4Parser.cpp",
@@ -9,50 +10,10 @@
         "SampleTable.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright/",
-    ],
-
-    shared_libs: [
-        "liblog",
-        "libmediandk"
-    ],
-
     static_libs: [
         "libstagefright_esds",
         "libstagefright_foundation",
         "libstagefright_id3",
         "libutils",
     ],
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-        "-fvisibility=hidden",
-    ],
-    version_script: "exports.lds",
-    relative_install_path: "extractors",
-    compile_multilib: "first",
-}
-
-cc_library_shared {
-
-
-    name: "libmp4extractor",
-    defaults: ["libmp4extractor_defaults"],
-
-    sanitize: {
-        cfi: true,
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-    },
-
-}
-
-cc_library_static {
-    name: "libmp4extractor_fuzzing",
-
-    defaults: ["libmp4extractor_defaults"],
 }
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 8c0ecf2..c8079eb 100755
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -31,9 +31,10 @@
 #include "MPEG4Extractor.h"
 #include "SampleTable.h"
 #include "ItemTable.h"
-#include "include/ESDS.h"
 
-#include <media/DataSourceBase.h>
+#include <ESDS.h>
+#include <ID3.h>
+#include <media/stagefright/DataSourceBase.h>
 #include <media/ExtractorUtils.h>
 #include <media/stagefright/foundation/ABitReader.h>
 #include <media/stagefright/foundation/ABuffer.h>
@@ -52,7 +53,6 @@
 #include <utils/String8.h>
 
 #include <byteswap.h>
-#include "include/ID3.h"
 
 #ifndef UINT32_MAX
 #define UINT32_MAX       (4294967295U)
@@ -83,7 +83,8 @@
                 const Trex *trex,
                 off64_t firstMoofOffset,
                 const sp<ItemTable> &itemTable,
-                uint64_t elstShiftStartTicks);
+                uint64_t elstShiftStartTicks,
+                uint64_t elstInitialEmptyEditTicks);
     virtual status_t init();
 
     virtual media_status_t start();
@@ -151,6 +152,7 @@
     // Start offset from composition time to presentation time.
     // Support shift only for video tracks through mElstShiftStartTicks for now.
     uint64_t mElstShiftStartTicks;
+    uint64_t mElstInitialEmptyEditTicks;
 
     size_t parseNALSize(const uint8_t *data) const;
     status_t parseChunk(off64_t *offset);
@@ -484,12 +486,11 @@
         int64_t duration;
         int32_t samplerate;
         // Only for audio track.
-        if (track->has_elst && mHeaderTimescale != 0 &&
-                AMediaFormat_getInt64(track->meta, AMEDIAFORMAT_KEY_DURATION, &duration) &&
-                AMediaFormat_getInt32(track->meta, AMEDIAFORMAT_KEY_SAMPLE_RATE, &samplerate)) {
-
+        if (track->elst_needs_processing && mHeaderTimescale != 0 &&
+            AMediaFormat_getInt64(track->meta, AMEDIAFORMAT_KEY_DURATION, &duration) &&
+            AMediaFormat_getInt32(track->meta, AMEDIAFORMAT_KEY_SAMPLE_RATE, &samplerate)) {
             // Elst has to be processed only the first time this function is called.
-            track->has_elst = false;
+            track->elst_needs_processing = false;
 
             if (track->elst_segment_duration > INT64_MAX) {
                 return;
@@ -1098,10 +1099,11 @@
                             track_b->sampleTable = mLastTrack->sampleTable;
                             track_b->includes_expensive_metadata = mLastTrack->includes_expensive_metadata;
                             track_b->skipTrack = mLastTrack->skipTrack;
-                            track_b->has_elst = mLastTrack->has_elst;
+                            track_b->elst_needs_processing = mLastTrack->elst_needs_processing;
                             track_b->elst_media_time = mLastTrack->elst_media_time;
                             track_b->elst_segment_duration = mLastTrack->elst_segment_duration;
-                            track_b->elstShiftStartTicks = mLastTrack->elstShiftStartTicks;
+                            track_b->elst_shift_start_ticks = mLastTrack->elst_shift_start_ticks;
+                            track_b->elst_initial_empty_edit_ticks = mLastTrack->elst_initial_empty_edit_ticks;
                             track_b->subsample_encryption = mLastTrack->subsample_encryption;
 
                             track_b->mTx3gBuffer = mLastTrack->mTx3gBuffer;
@@ -1204,39 +1206,86 @@
                 return ERROR_IO;
             }
 
-            if (entry_count != 1) {
-                // we only support a single entry at the moment, for gapless playback
-                // or start offset
+            if (entry_count > 2) {
+                /* We support a single entry for gapless playback or negating offset for
+                 * reordering B frames, two entries (empty edit) for start offset at the moment.
+                 */
                 ALOGW("ignoring edit list with %d entries", entry_count);
             } else {
                 off64_t entriesoffset = data_offset + 8;
                 uint64_t segment_duration;
                 int64_t media_time;
-
-                if (version == 1) {
-                    if (!mDataSource->getUInt64(entriesoffset, &segment_duration) ||
-                            !mDataSource->getUInt64(entriesoffset + 8, (uint64_t*)&media_time)) {
-                        return ERROR_IO;
-                    }
-                } else if (version == 0) {
-                    uint32_t sd;
-                    int32_t mt;
-                    if (!mDataSource->getUInt32(entriesoffset, &sd) ||
+                uint64_t empty_edit_ticks = 0;
+                bool empty_edit_present = false;
+                for (int i = 0; i < entry_count; ++i) {
+                    switch (version) {
+                    case 0: {
+                        uint32_t sd;
+                        int32_t mt;
+                        if (!mDataSource->getUInt32(entriesoffset, &sd) ||
                             !mDataSource->getUInt32(entriesoffset + 4, (uint32_t*)&mt)) {
-                        return ERROR_IO;
+                            return ERROR_IO;
+                        }
+                        segment_duration = sd;
+                        media_time = mt;
+                        // 4(segment duration) + 4(media time) + 4(media rate)
+                        entriesoffset += 12;
+                        break;
                     }
-                    segment_duration = sd;
-                    media_time = mt;
-                } else {
-                    return ERROR_IO;
+                    case 1: {
+                        if (!mDataSource->getUInt64(entriesoffset, &segment_duration) ||
+                            !mDataSource->getUInt64(entriesoffset + 8, (uint64_t*)&media_time)) {
+                            return ERROR_IO;
+                        }
+                        // 8(segment duration) + 8(media time) + 4(media rate)
+                        entriesoffset += 20;
+                        break;
+                    }
+                    default:
+                        return ERROR_IO;
+                        break;
+                    }
+                    // Empty edit entry would have to be first entry.
+                    if (media_time == -1 && i == 0) {
+                        int64_t durationUs;
+                        if (AMediaFormat_getInt64(mFileMetaData, AMEDIAFORMAT_KEY_DURATION,
+                                                  &durationUs)) {
+                            empty_edit_ticks = segment_duration;
+                            ALOGV("initial empty edit ticks: %" PRIu64, empty_edit_ticks);
+                            empty_edit_present = true;
+                        }
+                    }
+                    // Process second entry only when the first entry was an empty edit entry.
+                    if (empty_edit_present && i == 1) {
+                        int64_t durationUs;
+                        if (AMediaFormat_getInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION,
+                                                  &durationUs) &&
+                            mHeaderTimescale != 0) {
+                            // Support only segment_duration<=track_duration and media_time==0 case.
+                            uint64_t segmentDurationUs =
+                                    segment_duration * 1000000 / mHeaderTimescale;
+                            if (segmentDurationUs == 0 || segmentDurationUs > durationUs ||
+                                media_time != 0) {
+                                ALOGW("for now, unsupported second entry in empty edit list");
+                            }
+                        }
+                    }
                 }
-
                 // save these for later, because the elst atom might precede
                 // the atoms that actually gives us the duration and sample rate
                 // needed to calculate the padding and delay values
-                mLastTrack->has_elst = true;
-                mLastTrack->elst_media_time = media_time;
-                mLastTrack->elst_segment_duration = segment_duration;
+                mLastTrack->elst_needs_processing = true;
+                if (empty_edit_present) {
+                    /* In movie header timescale, and needs to be converted to media timescale once
+                     * we get that from a track's 'mdhd' atom, which at times come after 'elst'.
+                     */
+                    mLastTrack->elst_initial_empty_edit_ticks = empty_edit_ticks;
+                } else {
+                    mLastTrack->elst_media_time = media_time;
+                    mLastTrack->elst_segment_duration = segment_duration;
+                    ALOGV("segment_duration: %" PRIu64 " media_time: %" PRId64, segment_duration,
+                          media_time);
+                }
             }
             break;
         }
@@ -4275,15 +4324,28 @@
         }
     }
 
-    if (track->has_elst and !strncasecmp("video/", mime, 6) and track->elst_media_time > 0) {
-        track->elstShiftStartTicks = track->elst_media_time;
-        ALOGV("video track->elstShiftStartTicks :%" PRIu64, track->elstShiftStartTicks);
+    // media_time is in media timescale as are STTS/CTTS entries.
+    track->elst_shift_start_ticks = track->elst_media_time;
+    ALOGV("track->elst_shift_start_ticks :%" PRIu64, track->elst_shift_start_ticks);
+    if (mHeaderTimescale != 0) {
+        // Convert empty_edit_ticks from movie timescale to media timescale.
+        uint64_t elst_initial_empty_edit_ticks_mul = 0, elst_initial_empty_edit_ticks_add = 0;
+        if (__builtin_mul_overflow(track->elst_initial_empty_edit_ticks, track->timescale,
+                                   &elst_initial_empty_edit_ticks_mul) ||
+            __builtin_add_overflow(elst_initial_empty_edit_ticks_mul, (mHeaderTimescale / 2),
+                                   &elst_initial_empty_edit_ticks_add)) {
+            ALOGE("track->elst_initial_empty_edit_ticks overflow");
+            return nullptr;
+        }
+        track->elst_initial_empty_edit_ticks = elst_initial_empty_edit_ticks_add / mHeaderTimescale;
+        ALOGV("track->elst_initial_empty_edit_ticks :%" PRIu64,
+              track->elst_initial_empty_edit_ticks);
     }
 
-    MPEG4Source *source =  new MPEG4Source(
-            track->meta, mDataSource, track->timescale, track->sampleTable,
-            mSidxEntries, trex, mMoofOffset, itemTable,
-            track->elstShiftStartTicks);
+    MPEG4Source* source =
+            new MPEG4Source(track->meta, mDataSource, track->timescale, track->sampleTable,
+                            mSidxEntries, trex, mMoofOffset, itemTable,
+                            track->elst_shift_start_ticks, track->elst_initial_empty_edit_ticks);
     if (source->init() != OK) {
         delete source;
         return NULL;
@@ -4776,7 +4838,8 @@
         const Trex *trex,
         off64_t firstMoofOffset,
         const sp<ItemTable> &itemTable,
-        uint64_t elstShiftStartTicks)
+        uint64_t elstShiftStartTicks,
+        uint64_t elstInitialEmptyEditTicks)
     : mFormat(format),
       mDataSource(dataSource),
       mTimescale(timeScale),
@@ -4806,7 +4869,8 @@
       mSrcBuffer(NULL),
       mIsHeif(itemTable != NULL),
       mItemTable(itemTable),
-      mElstShiftStartTicks(elstShiftStartTicks) {
+      mElstShiftStartTicks(elstShiftStartTicks),
+      mElstInitialEmptyEditTicks(elstInitialEmptyEditTicks) {
 
     memset(&mTrackFragmentHeaderInfo, 0, sizeof(mTrackFragmentHeaderInfo));
 
@@ -4925,35 +4989,14 @@
     }
 
     CHECK(AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_TRACK_ID, &mTrackId));
-
 }
 
 status_t MPEG4Source::init() {
-    status_t err = OK;
-    const char *mime;
-    CHECK(AMediaFormat_getString(mFormat, AMEDIAFORMAT_KEY_MIME, &mime));
     if (mFirstMoofOffset != 0) {
         off64_t offset = mFirstMoofOffset;
-        err = parseChunk(&offset);
-        if(err == OK && !strncasecmp("video/", mime, 6)
-            && !mCurrentSamples.isEmpty()) {
-            // Start offset should be less or equal to composition time of first sample.
-            // ISO : sample_composition_time_offset, version 0 (unsigned) for major brands.
-            mElstShiftStartTicks = std::min(mElstShiftStartTicks,
-                                            (uint64_t)(*mCurrentSamples.begin()).compositionOffset);
-        }
-        return err;
+        return parseChunk(&offset);
     }
-
-    if (!strncasecmp("video/", mime, 6)) {
-        uint64_t firstSampleCTS = 0;
-        err = mSampleTable->getMetaDataForSample(0, NULL, NULL, &firstSampleCTS);
-        // Start offset should be less or equal to composition time of first sample.
-        // Composition time stamp of first sample cannot be negative.
-        mElstShiftStartTicks = std::min(mElstShiftStartTicks, firstSampleCTS);
-    }
-
-    return err;
+    return OK;
 }
 
 MPEG4Source::~MPEG4Source() {
@@ -5801,6 +5844,7 @@
 
     int64_t seekTimeUs;
     ReadOptions::SeekMode mode;
+
     if (options && options->getSeekTo(&seekTimeUs, &mode)) {
 
         if (mIsHeif) {
@@ -5842,6 +5886,8 @@
             }
             if( mode != ReadOptions::SEEK_FRAME_INDEX) {
                 seekTimeUs += ((long double)mElstShiftStartTicks * 1000000) / mTimescale;
+                ALOGV("shifted seekTimeUs :%" PRId64 ", mElstShiftStartTicks:%" PRIu64, seekTimeUs,
+                      mElstShiftStartTicks);
             }
 
             uint32_t sampleIndex;
@@ -5916,7 +5962,8 @@
 
     off64_t offset = 0;
     size_t size = 0;
-    uint64_t cts, stts;
+    int64_t cts;
+    uint64_t stts;
     bool isSyncSample;
     bool newBuffer = false;
     if (mBuffer == NULL) {
@@ -5924,14 +5971,15 @@
 
         status_t err;
         if (!mIsHeif) {
-            err = mSampleTable->getMetaDataForSample(
-                    mCurrentSampleIndex, &offset, &size, &cts, &isSyncSample, &stts);
+            err = mSampleTable->getMetaDataForSample(mCurrentSampleIndex, &offset, &size,
+                                                    (uint64_t*)&cts, &isSyncSample, &stts);
             if(err == OK) {
-                /* Composition Time Stamp cannot be negative. Some files have video Sample
-                * Time(STTS)delta with zero value(b/117402420).  Hence subtract only
-                * min(cts, mElstShiftStartTicks), so that audio tracks can be played.
-                */
-                cts -= std::min(cts, mElstShiftStartTicks);
+                if (mElstInitialEmptyEditTicks > 0) {
+                    cts += mElstInitialEmptyEditTicks;
+                } else {
+                    // cts can be negative. for example, initial audio samples for gapless playback.
+                    cts -= (int64_t)mElstShiftStartTicks;
+                }
             }
 
         } else {
@@ -6272,7 +6320,7 @@
 
     off64_t offset = 0;
     size_t size = 0;
-    uint64_t cts = 0;
+    int64_t cts = 0;
     bool isSyncSample = false;
     bool newBuffer = false;
     if (mBuffer == NULL || mCurrentSampleIndex >= mCurrentSamples.size()) {
@@ -6304,11 +6352,13 @@
         offset = smpl->offset;
         size = smpl->size;
         cts = mCurrentTime + smpl->compositionOffset;
-        /* Composition Time Stamp cannot be negative. Some files have video Sample
-        * Time(STTS)delta with zero value(b/117402420).  Hence subtract only
-        * min(cts, mElstShiftStartTicks), so that audio tracks can be played.
-        */
-        cts -= std::min(cts, mElstShiftStartTicks);
+
+        if (mElstInitialEmptyEditTicks > 0) {
+            cts += mElstInitialEmptyEditTicks;
+        } else {
+            // cts can be negative. for example, initial audio samples for gapless playback.
+            cts -= (int64_t)mElstShiftStartTicks;
+        }
 
         mCurrentTime += smpl->duration;
         isSyncSample = (mCurrentSampleIndex == 0);
diff --git a/media/extractors/mp4/MPEG4Extractor.h b/media/extractors/mp4/MPEG4Extractor.h
index fcddbb8..53ec6bc 100644
--- a/media/extractors/mp4/MPEG4Extractor.h
+++ b/media/extractors/mp4/MPEG4Extractor.h
@@ -82,14 +82,16 @@
         sp<SampleTable> sampleTable;
         bool includes_expensive_metadata;
         bool skipTrack;
-        bool has_elst;
+        bool elst_needs_processing;
         /* signed int, ISO Spec allows media_time = -1 for other use cases.
          * but we don't support empty edits for now.
          */
         int64_t elst_media_time;
         uint64_t elst_segment_duration;
-        // unsigned int, shift start offset only when media_time > 0.
-        uint64_t elstShiftStartTicks;
+        // Shift start offset only when media_time > 0.
+        uint64_t elst_shift_start_ticks;
+        // Initial start offset, empty edit list entry.
+        uint64_t elst_initial_empty_edit_ticks;
         bool subsample_encryption;
 
         uint8_t *mTx3gBuffer;
@@ -102,9 +104,11 @@
             timescale = 0;
             includes_expensive_metadata = false;
             skipTrack = false;
-            has_elst = false;
+            elst_needs_processing = false;
             elst_media_time = 0;
-            elstShiftStartTicks = 0;
+            elst_segment_duration = 0;
+            elst_shift_start_ticks = 0;
+            elst_initial_empty_edit_ticks = 0;
             subsample_encryption = false;
             mTx3gBuffer = NULL;
             mTx3gSize = mTx3gFilled = 0;
diff --git a/media/extractors/mpeg2/Android.bp b/media/extractors/mpeg2/Android.bp
index 8638cdc..ef8431e 100644
--- a/media/extractors/mpeg2/Android.bp
+++ b/media/extractors/mpeg2/Android.bp
@@ -1,4 +1,7 @@
-cc_library_shared {
+cc_library {
+    name: "libmpeg2extractor",
+
+    defaults: ["extractor-defaults"],
 
     srcs: [
         "ExtractorBundle.cpp",
@@ -6,15 +9,8 @@
         "MPEG2TSExtractor.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
-        "frameworks/av/media/libstagefright/include",
-    ],
-
     shared_libs: [
         "libcgrouprc#29",
-        "liblog#10000",
-        "libmediandk#29",
         "libvndksupport#29",
     ],
 
@@ -46,29 +42,15 @@
         "libutils",
     ],
 
-    name: "libmpeg2extractor",
-    relative_install_path: "extractors",
-
-    compile_multilib: "first",
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-        "-fvisibility=hidden",
-    ],
-    version_script: "exports.lds",
-
-    sanitize: {
-        // STOPSHIP: turn on cfi once b/139945549 is resolved.
-        cfi: false,
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-    },
-
     apex_available: [
         "com.android.media",
         "test_com.android.media",
     ],
+
+    static: {
+        apex_available: [
+            // Needed for unit tests
+            "//apex_available:platform",
+        ],
+    },
 }
diff --git a/media/extractors/mpeg2/MPEG2PSExtractor.cpp b/media/extractors/mpeg2/MPEG2PSExtractor.cpp
index 002a855..d431b05 100644
--- a/media/extractors/mpeg2/MPEG2PSExtractor.cpp
+++ b/media/extractors/mpeg2/MPEG2PSExtractor.cpp
@@ -20,8 +20,8 @@
 
 #include "MPEG2PSExtractor.h"
 
-#include "mpeg2ts/AnotherPacketSource.h"
-#include "mpeg2ts/ESQueue.h"
+#include <AnotherPacketSource.h>
+#include <ESQueue.h>
 
 #include <media/stagefright/foundation/ABitReader.h>
 #include <media/stagefright/foundation/ABuffer.h>
diff --git a/media/extractors/mpeg2/MPEG2TSExtractor.cpp b/media/extractors/mpeg2/MPEG2TSExtractor.cpp
index 50ce657..9e093eb 100644
--- a/media/extractors/mpeg2/MPEG2TSExtractor.cpp
+++ b/media/extractors/mpeg2/MPEG2TSExtractor.cpp
@@ -24,21 +24,20 @@
 
 #include "MPEG2TSExtractor.h"
 
-#include <media/DataSourceBase.h>
 #include <media/IStreamSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/MediaKeys.h>
+#include <media/stagefright/DataSourceBase.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/Utils.h>
 #include <utils/String8.h>
 
-#include "mpeg2ts/AnotherPacketSource.h"
-#include "mpeg2ts/ATSParser.h"
+#include <AnotherPacketSource.h>
 
 #include <hidl/HybridInterface.h>
 #include <android/hardware/cas/1.0/ICas.h>
diff --git a/media/extractors/mpeg2/MPEG2TSExtractor.h b/media/extractors/mpeg2/MPEG2TSExtractor.h
index dcd1e7b..fd77b08 100644
--- a/media/extractors/mpeg2/MPEG2TSExtractor.h
+++ b/media/extractors/mpeg2/MPEG2TSExtractor.h
@@ -27,7 +27,7 @@
 #include <utils/KeyedVector.h>
 #include <utils/Vector.h>
 
-#include "mpeg2ts/ATSParser.h"
+#include <ATSParser.h>
 
 namespace android {
 
diff --git a/media/extractors/ogg/Android.bp b/media/extractors/ogg/Android.bp
index 604ec59..7aed683 100644
--- a/media/extractors/ogg/Android.bp
+++ b/media/extractors/ogg/Android.bp
@@ -1,9 +1,11 @@
-cc_library_shared {
+cc_library {
+    name: "liboggextractor",
+
+    defaults: ["extractor-defaults"],
 
     srcs: ["OggExtractor.cpp"],
 
     include_dirs: [
-        "frameworks/av/media/libstagefright/include",
         "external/tremolo",
     ],
 
@@ -11,11 +13,6 @@
         "libaudio_system_headers",
     ],
 
-    shared_libs: [
-        "liblog",
-        "libmediandk",
-    ],
-
     static_libs: [
         "libstagefright_foundation",
         "libstagefright_metadatautils",
@@ -23,24 +20,4 @@
         "libvorbisidec",
     ],
 
-    name: "liboggextractor",
-    relative_install_path: "extractors",
-
-    compile_multilib: "first",
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-        "-fvisibility=hidden",
-    ],
-    version_script: "exports.lds",
-
-    sanitize: {
-        cfi: true,
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-    },
-
 }
diff --git a/media/extractors/ogg/OggExtractor.cpp b/media/extractors/ogg/OggExtractor.cpp
index bfb2deb..828bcd6 100644
--- a/media/extractors/ogg/OggExtractor.cpp
+++ b/media/extractors/ogg/OggExtractor.cpp
@@ -22,7 +22,7 @@
 
 #include <cutils/properties.h>
 #include <utils/Vector.h>
-#include <media/DataSourceBase.h>
+#include <media/stagefright/DataSourceBase.h>
 #include <media/ExtractorUtils.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
diff --git a/media/extractors/tests/Android.bp b/media/extractors/tests/Android.bp
new file mode 100644
index 0000000..059c308
--- /dev/null
+++ b/media/extractors/tests/Android.bp
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+    name: "ExtractorUnitTest",
+    gtest: true,
+
+    srcs: ["ExtractorUnitTest.cpp"],
+
+    static_libs: [
+        "libaacextractor",
+        "libamrextractor",
+        "libmp3extractor",
+        "libwavextractor",
+        "liboggextractor",
+        "libflacextractor",
+        "libmidiextractor",
+        "libmkvextractor",
+        "libmpeg2extractor",
+        "libmp4extractor",
+        "libaudioutils",
+        "libdatasource",
+
+        "libstagefright",
+        "libstagefright_id3",
+        "libstagefright_flacdec",
+        "libstagefright_esds",
+        "libstagefright_mpeg2support",
+        "libstagefright_mpeg2extractor",
+        "libstagefright_foundation",
+        "libstagefright_metadatautils",
+
+        "libmedia_midiiowrapper",
+        "libsonivox",
+        "libvorbisidec",
+        "libwebm",
+        "libFLAC",
+    ],
+
+    shared_libs: [
+        "android.hardware.cas@1.0",
+        "android.hardware.cas.native@1.0",
+        "android.hidl.token@1.0-utils",
+        "android.hidl.allocator@1.0",
+        "libbinder",
+        "libbinder_ndk",
+        "libutils",
+        "liblog",
+        "libcutils",
+        "libmediandk",
+        "libmedia",
+        "libcrypto",
+        "libhidlmemory",
+        "libhidlbase",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/",
+        "frameworks/av/media/libstagefright/",
+    ],
+
+    compile_multilib: "first",
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    ldflags: [
+        "-Wl",
+        "-Bsymbolic",
+        // to ignore duplicate symbol: GETEXTRACTORDEF
+        "-z muldefs",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/extractors/tests/ExtractorUnitTest.cpp b/media/extractors/tests/ExtractorUnitTest.cpp
new file mode 100644
index 0000000..518166e
--- /dev/null
+++ b/media/extractors/tests/ExtractorUnitTest.cpp
@@ -0,0 +1,528 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ExtractorUnitTest"
+#include <utils/Log.h>
+
+#include <datasource/FileSource.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaDataUtils.h>
+
+#include "aac/AACExtractor.h"
+#include "amr/AMRExtractor.h"
+#include "flac/FLACExtractor.h"
+#include "midi/MidiExtractor.h"
+#include "mkv/MatroskaExtractor.h"
+#include "mp3/MP3Extractor.h"
+#include "mp4/MPEG4Extractor.h"
+#include "mp4/SampleTable.h"
+#include "mpeg2/MPEG2PSExtractor.h"
+#include "mpeg2/MPEG2TSExtractor.h"
+#include "ogg/OggExtractor.h"
+#include "wav/WAVExtractor.h"
+
+#include "ExtractorUnitTestEnvironment.h"
+
+using namespace android;
+
+#define OUTPUT_DUMP_FILE "/data/local/tmp/extractorOutput"
+
+constexpr int32_t kMaxCount = 10;
+constexpr int32_t kOpusSeekPreRollUs = 80000;  // 80 ms;
+
+static ExtractorUnitTestEnvironment *gEnv = nullptr;
+
+class ExtractorUnitTest : public ::testing::TestWithParam<pair<string, string>> {
+  public:
+    ExtractorUnitTest() : mInputFp(nullptr), mDataSource(nullptr), mExtractor(nullptr) {}
+
+    ~ExtractorUnitTest() {
+        if (mInputFp) {
+            fclose(mInputFp);
+            mInputFp = nullptr;
+        }
+        if (mDataSource) {
+            mDataSource.clear();
+            mDataSource = nullptr;
+        }
+        if (mExtractor) {
+            delete mExtractor;
+            mExtractor = nullptr;
+        }
+    }
+
+    virtual void SetUp() override {
+        mExtractorName = unknown_comp;
+        mDisableTest = false;
+
+        static const std::map<std::string, standardExtractors> mapExtractor = {
+                {"aac", AAC},     {"amr", AMR},         {"mp3", MP3},        {"ogg", OGG},
+                {"wav", WAV},     {"mkv", MKV},         {"flac", FLAC},      {"midi", MIDI},
+                {"mpeg4", MPEG4}, {"mpeg2ts", MPEG2TS}, {"mpeg2ps", MPEG2PS}};
+        // Find the component type
+        string writerFormat = GetParam().first;
+        if (mapExtractor.find(writerFormat) != mapExtractor.end()) {
+            mExtractorName = mapExtractor.at(writerFormat);
+        }
+        if (mExtractorName == standardExtractors::unknown_comp) {
+            cout << "[   WARN   ] Test Skipped. Invalid extractor\n";
+            mDisableTest = true;
+        }
+    }
+
+    int32_t setDataSource(string inputFileName);
+
+    int32_t createExtractor();
+
+    enum standardExtractors {
+        AAC,
+        AMR,
+        FLAC,
+        MIDI,
+        MKV,
+        MP3,
+        MPEG4,
+        MPEG2PS,
+        MPEG2TS,
+        OGG,
+        WAV,
+        unknown_comp,
+    };
+
+    bool mDisableTest;
+    standardExtractors mExtractorName;
+
+    FILE *mInputFp;
+    sp<DataSource> mDataSource;
+    MediaExtractorPluginHelper *mExtractor;
+};
+
+int32_t ExtractorUnitTest::setDataSource(string inputFileName) {
+    mInputFp = fopen(inputFileName.c_str(), "rb");
+    if (!mInputFp) {
+        ALOGE("Unable to open input file for reading");
+        return -1;
+    }
+    struct stat buf;
+    stat(inputFileName.c_str(), &buf);
+    int32_t fd = fileno(mInputFp);
+    mDataSource = new FileSource(dup(fd), 0, buf.st_size);
+    if (!mDataSource) return -1;
+    return 0;
+}
+
+int32_t ExtractorUnitTest::createExtractor() {
+    switch (mExtractorName) {
+        case AAC:
+            mExtractor = new AACExtractor(new DataSourceHelper(mDataSource->wrap()), 0);
+            break;
+        case AMR:
+            mExtractor = new AMRExtractor(new DataSourceHelper(mDataSource->wrap()));
+            break;
+        case MP3:
+            mExtractor = new MP3Extractor(new DataSourceHelper(mDataSource->wrap()), nullptr);
+            break;
+        case OGG:
+            mExtractor = new OggExtractor(new DataSourceHelper(mDataSource->wrap()));
+            break;
+        case WAV:
+            mExtractor = new WAVExtractor(new DataSourceHelper(mDataSource->wrap()));
+            break;
+        case MKV:
+            mExtractor = new MatroskaExtractor(new DataSourceHelper(mDataSource->wrap()));
+            break;
+        case FLAC:
+            mExtractor = new FLACExtractor(new DataSourceHelper(mDataSource->wrap()));
+            break;
+        case MPEG4:
+            mExtractor = new MPEG4Extractor(new DataSourceHelper(mDataSource->wrap()));
+            break;
+        case MPEG2TS:
+            mExtractor = new MPEG2TSExtractor(new DataSourceHelper(mDataSource->wrap()));
+            break;
+        case MPEG2PS:
+            mExtractor = new MPEG2PSExtractor(new DataSourceHelper(mDataSource->wrap()));
+            break;
+        case MIDI:
+            mExtractor = new MidiExtractor(mDataSource->wrap());
+            break;
+        default:
+            return -1;
+    }
+    if (!mExtractor) return -1;
+    return 0;
+}
+
+void getSeekablePoints(vector<int64_t> &seekablePoints, MediaTrackHelper *track) {
+    int32_t status = 0;
+    if (!seekablePoints.empty()) {
+        seekablePoints.clear();
+    }
+    int64_t timeStamp;
+    while (status != AMEDIA_ERROR_END_OF_STREAM) {
+        MediaBufferHelper *buffer = nullptr;
+        status = track->read(&buffer);
+        if (buffer) {
+            AMediaFormat *metaData = buffer->meta_data();
+            int32_t isSync = 0;
+            AMediaFormat_getInt32(metaData, AMEDIAFORMAT_KEY_IS_SYNC_FRAME, &isSync);
+            if (isSync) {
+                AMediaFormat_getInt64(metaData, AMEDIAFORMAT_KEY_TIME_US, &timeStamp);
+                seekablePoints.push_back(timeStamp);
+            }
+            buffer->release();
+        }
+    }
+}
+
+TEST_P(ExtractorUnitTest, CreateExtractorTest) {
+    if (mDisableTest) return;
+
+    ALOGV("Checks if a valid extractor is created for a given input file");
+    string inputFileName = gEnv->getRes() + GetParam().second;
+
+    ASSERT_EQ(setDataSource(inputFileName), 0)
+            << "SetDataSource failed for" << GetParam().first << "extractor";
+
+    ASSERT_EQ(createExtractor(), 0)
+            << "Extractor creation failed for" << GetParam().first << "extractor";
+
+    // A valid extractor instace should return success for following calls
+    ASSERT_GT(mExtractor->countTracks(), 0);
+
+    AMediaFormat *format = AMediaFormat_new();
+    ASSERT_NE(format, nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+    ASSERT_EQ(mExtractor->getMetaData(format), AMEDIA_OK);
+    AMediaFormat_delete(format);
+}
+
+TEST_P(ExtractorUnitTest, ExtractorTest) {
+    if (mDisableTest) return;
+
+    ALOGV("Validates %s Extractor for a given input file", GetParam().first.c_str());
+    string inputFileName = gEnv->getRes() + GetParam().second;
+
+    int32_t status = setDataSource(inputFileName);
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+
+    status = createExtractor();
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+
+    int32_t numTracks = mExtractor->countTracks();
+    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+
+    for (int32_t idx = 0; idx < numTracks; idx++) {
+        MediaTrackHelper *track = mExtractor->getTrack(idx);
+        ASSERT_NE(track, nullptr) << "Failed to get track for index " << idx;
+
+        CMediaTrack *cTrack = wrap(track);
+        ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << idx;
+
+        MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+        status = cTrack->start(track, bufferGroup->wrap());
+        ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+
+        FILE *outFp = fopen((OUTPUT_DUMP_FILE + to_string(idx)).c_str(), "wb");
+        if (!outFp) {
+            ALOGW("Unable to open output file for dumping extracted stream");
+        }
+
+        while (status != AMEDIA_ERROR_END_OF_STREAM) {
+            MediaBufferHelper *buffer = nullptr;
+            status = track->read(&buffer);
+            ALOGV("track->read Status = %d buffer %p", status, buffer);
+            if (buffer) {
+                ALOGV("buffer->data %p buffer->size() %zu buffer->range_length() %zu",
+                      buffer->data(), buffer->size(), buffer->range_length());
+                if (outFp) fwrite(buffer->data(), 1, buffer->range_length(), outFp);
+                buffer->release();
+            }
+        }
+        if (outFp) fclose(outFp);
+        status = cTrack->stop(track);
+        ASSERT_EQ(OK, status) << "Failed to stop the track";
+        delete bufferGroup;
+        delete track;
+    }
+}
+
+TEST_P(ExtractorUnitTest, MetaDataComparisonTest) {
+    if (mDisableTest) return;
+
+    ALOGV("Validates Extractor's meta data for a given input file");
+    string inputFileName = gEnv->getRes() + GetParam().second;
+
+    int32_t status = setDataSource(inputFileName);
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+
+    status = createExtractor();
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+
+    int32_t numTracks = mExtractor->countTracks();
+    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+
+    AMediaFormat *extractorFormat = AMediaFormat_new();
+    ASSERT_NE(extractorFormat, nullptr) << "AMediaFormat_new returned null AMediaformat";
+    AMediaFormat *trackFormat = AMediaFormat_new();
+    ASSERT_NE(trackFormat, nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+    for (int32_t idx = 0; idx < numTracks; idx++) {
+        MediaTrackHelper *track = mExtractor->getTrack(idx);
+        ASSERT_NE(track, nullptr) << "Failed to get track for index " << idx;
+
+        CMediaTrack *cTrack = wrap(track);
+        ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << idx;
+
+        MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+        status = cTrack->start(track, bufferGroup->wrap());
+        ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+
+        status = mExtractor->getTrackMetaData(extractorFormat, idx, 1);
+        ASSERT_EQ(OK, (media_status_t)status) << "Failed to get trackMetaData";
+
+        status = track->getFormat(trackFormat);
+        ASSERT_EQ(OK, (media_status_t)status) << "Failed to get track meta data";
+
+        const char *extractorMime, *trackMime;
+        AMediaFormat_getString(extractorFormat, AMEDIAFORMAT_KEY_MIME, &extractorMime);
+        AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &trackMime);
+        ASSERT_TRUE(!strcmp(extractorMime, trackMime))
+                << "Extractor's format doesn't match track format";
+
+        if (!strncmp(extractorMime, "audio/", 6)) {
+            int32_t exSampleRate, exChannelCount;
+            int32_t trackSampleRate, trackChannelCount;
+            ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+                                              &exChannelCount));
+            ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_SAMPLE_RATE,
+                                              &exSampleRate));
+            ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+                                              &trackChannelCount));
+            ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_SAMPLE_RATE,
+                                              &trackSampleRate));
+            ASSERT_EQ(exChannelCount, trackChannelCount) << "ChannelCount not as expected";
+            ASSERT_EQ(exSampleRate, trackSampleRate) << "SampleRate not as expected";
+        } else {
+            int32_t exWidth, exHeight;
+            int32_t trackWidth, trackHeight;
+            ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_WIDTH, &exWidth));
+            ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_HEIGHT, &exHeight));
+            ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_WIDTH, &trackWidth));
+            ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_HEIGHT, &trackHeight));
+            ASSERT_EQ(exWidth, trackWidth) << "Width not as expected";
+            ASSERT_EQ(exHeight, trackHeight) << "Height not as expected";
+        }
+        status = cTrack->stop(track);
+        ASSERT_EQ(OK, status) << "Failed to stop the track";
+        delete bufferGroup;
+        delete track;
+    }
+    AMediaFormat_delete(trackFormat);
+    AMediaFormat_delete(extractorFormat);
+}
+
+TEST_P(ExtractorUnitTest, MultipleStartStopTest) {
+    if (mDisableTest) return;
+
+    ALOGV("Test %s extractor for multiple start and stop calls", GetParam().first.c_str());
+    string inputFileName = gEnv->getRes() + GetParam().second;
+
+    int32_t status = setDataSource(inputFileName);
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+
+    status = createExtractor();
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+
+    int32_t numTracks = mExtractor->countTracks();
+    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+
+    // start/stop the tracks multiple times
+    for (int32_t count = 0; count < kMaxCount; count++) {
+        for (int32_t idx = 0; idx < numTracks; idx++) {
+            MediaTrackHelper *track = mExtractor->getTrack(idx);
+            ASSERT_NE(track, nullptr) << "Failed to get track for index " << idx;
+
+            CMediaTrack *cTrack = wrap(track);
+            ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << idx;
+
+            MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+            status = cTrack->start(track, bufferGroup->wrap());
+            ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+            MediaBufferHelper *buffer = nullptr;
+            status = track->read(&buffer);
+            if (buffer) {
+                ALOGV("buffer->data %p buffer->size() %zu buffer->range_length() %zu",
+                      buffer->data(), buffer->size(), buffer->range_length());
+                buffer->release();
+            }
+            status = cTrack->stop(track);
+            ASSERT_EQ(OK, status) << "Failed to stop the track";
+            delete bufferGroup;
+            delete track;
+        }
+    }
+}
+
+TEST_P(ExtractorUnitTest, SeekTest) {
+    // Both Flac and Wav extractor can give samples from any pts and mark the given sample as
+    // sync frame. So, this seek test is not applicable to FLAC and WAV extractors
+    if (mDisableTest || mExtractorName == FLAC || mExtractorName == WAV) return;
+
+    ALOGV("Validates %s Extractor behaviour for different seek modes", GetParam().first.c_str());
+    string inputFileName = gEnv->getRes() + GetParam().second;
+
+    int32_t status = setDataSource(inputFileName);
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+
+    status = createExtractor();
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+
+    int32_t numTracks = mExtractor->countTracks();
+    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+
+    uint32_t seekFlag = mExtractor->flags();
+    if (!(seekFlag & MediaExtractorPluginHelper::CAN_SEEK)) {
+        cout << "[   WARN   ] Test Skipped. " << GetParam().first
+             << " Extractor doesn't support seek\n";
+        return;
+    }
+
+    vector<int64_t> seekablePoints;
+    for (int32_t idx = 0; idx < numTracks; idx++) {
+        MediaTrackHelper *track = mExtractor->getTrack(idx);
+        ASSERT_NE(track, nullptr) << "Failed to get track for index " << idx;
+
+        CMediaTrack *cTrack = wrap(track);
+        ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << idx;
+
+        // Get all the seekable points of a given input
+        MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+        status = cTrack->start(track, bufferGroup->wrap());
+        ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+        getSeekablePoints(seekablePoints, track);
+        ASSERT_GT(seekablePoints.size(), 0)
+                << "Failed to get seekable points for " << GetParam().first << " extractor";
+
+        AMediaFormat *trackFormat = AMediaFormat_new();
+        ASSERT_NE(trackFormat, nullptr) << "AMediaFormat_new returned null format";
+        status = track->getFormat(trackFormat);
+        ASSERT_EQ(OK, (media_status_t)status) << "Failed to get track meta data";
+
+        bool isOpus = false;
+        const char *mime;
+        AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+        if (!strcmp(mime, "audio/opus")) isOpus = true;
+        AMediaFormat_delete(trackFormat);
+
+        int32_t seekIdx = 0;
+        size_t seekablePointsSize = seekablePoints.size();
+        for (int32_t mode = CMediaTrackReadOptions::SEEK_PREVIOUS_SYNC;
+             mode <= CMediaTrackReadOptions::SEEK_CLOSEST; mode++) {
+            for (int32_t seekCount = 0; seekCount < kMaxCount; seekCount++) {
+                seekIdx = rand() % seekablePointsSize + 1;
+                if (seekIdx >= seekablePointsSize) seekIdx = seekablePointsSize - 1;
+
+                int64_t seekToTimeStamp = seekablePoints[seekIdx];
+                if (seekablePointsSize > 1) {
+                    int64_t prevTimeStamp = seekablePoints[seekIdx - 1];
+                    seekToTimeStamp = seekToTimeStamp - ((seekToTimeStamp - prevTimeStamp) >> 3);
+                }
+
+                // Opus has a seekPreRollUs. TimeStamp returned by the
+                // extractor is calculated based on (seekPts - seekPreRollUs).
+                // So we add the preRoll value to the timeStamp we want to seek to.
+                if (isOpus) {
+                    seekToTimeStamp += kOpusSeekPreRollUs;
+                }
+
+                MediaTrackHelper::ReadOptions *options = new MediaTrackHelper::ReadOptions(
+                        mode | CMediaTrackReadOptions::SEEK, seekToTimeStamp);
+                ASSERT_NE(options, nullptr) << "Cannot create read option";
+
+                MediaBufferHelper *buffer = nullptr;
+                status = track->read(&buffer, options);
+                if (status == AMEDIA_ERROR_END_OF_STREAM) {
+                    delete options;
+                    continue;
+                }
+                if (buffer) {
+                    AMediaFormat *metaData = buffer->meta_data();
+                    int64_t timeStamp;
+                    AMediaFormat_getInt64(metaData, AMEDIAFORMAT_KEY_TIME_US, &timeStamp);
+                    buffer->release();
+
+                    // CMediaTrackReadOptions::SEEK is 8. Using mask 0111b to get true modes
+                    switch (mode & 0x7) {
+                        case CMediaTrackReadOptions::SEEK_PREVIOUS_SYNC:
+                            if (seekablePointsSize == 1) {
+                                EXPECT_EQ(timeStamp, seekablePoints[seekIdx]);
+                            } else {
+                                EXPECT_EQ(timeStamp, seekablePoints[seekIdx - 1]);
+                            }
+                            break;
+                        case CMediaTrackReadOptions::SEEK_NEXT_SYNC:
+                        case CMediaTrackReadOptions::SEEK_CLOSEST_SYNC:
+                        case CMediaTrackReadOptions::SEEK_CLOSEST:
+                            EXPECT_EQ(timeStamp, seekablePoints[seekIdx]);
+                            break;
+                        default:
+                            break;
+                    }
+                }
+                delete options;
+            }
+        }
+        status = cTrack->stop(track);
+        ASSERT_EQ(OK, status) << "Failed to stop the track";
+        delete bufferGroup;
+        delete track;
+    }
+    seekablePoints.clear();
+}
+
+// TODO: (b/145332185)
+// Add MIDI inputs
+INSTANTIATE_TEST_SUITE_P(ExtractorUnitTestAll, ExtractorUnitTest,
+                         ::testing::Values(make_pair("aac", "loudsoftaac.aac"),
+                                           make_pair("amr", "testamr.amr"),
+                                           make_pair("amr", "amrwb.wav"),
+                                           make_pair("ogg", "john_cage.ogg"),
+                                           make_pair("wav", "monotestgsm.wav"),
+                                           make_pair("mpeg2ts", "segment000001.ts"),
+                                           make_pair("flac", "sinesweepflac.flac"),
+                                           make_pair("ogg", "testopus.opus"),
+                                           make_pair("mkv", "sinesweepvorbis.mkv"),
+                                           make_pair("mpeg4", "sinesweepoggmp4.mp4"),
+                                           make_pair("mp3", "sinesweepmp3lame.mp3"),
+                                           make_pair("mkv", "swirl_144x136_vp9.webm"),
+                                           make_pair("mkv", "swirl_144x136_vp8.webm"),
+                                           make_pair("mpeg2ps", "swirl_144x136_mpeg2.mpg"),
+                                           make_pair("mpeg4", "swirl_132x130_mpeg4.mp4")));
+
+int main(int argc, char **argv) {
+    gEnv = new ExtractorUnitTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("Test result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/extractors/tests/ExtractorUnitTestEnvironment.h b/media/extractors/tests/ExtractorUnitTestEnvironment.h
new file mode 100644
index 0000000..fce8fc2
--- /dev/null
+++ b/media/extractors/tests/ExtractorUnitTestEnvironment.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __EXTRACTOR_UNIT_TEST_ENVIRONMENT_H__
+#define __EXTRACTOR_UNIT_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class ExtractorUnitTestEnvironment : public ::testing::Environment {
+  public:
+    ExtractorUnitTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int ExtractorUnitTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"res", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P':
+                setRes(optarg);
+                break;
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __EXTRACTOR_UNIT_TEST_ENVIRONMENT_H__
diff --git a/media/extractors/tests/README.md b/media/extractors/tests/README.md
new file mode 100644
index 0000000..6e02d3e
--- /dev/null
+++ b/media/extractors/tests/README.md
@@ -0,0 +1,34 @@
+## Media Testing ##
+---
+#### Extractor :
+The Extractor Test Suite validates the extractors available in the device.
+
+Run the following steps to build the test suite:
+```
+m ExtractorUnitTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/ExtractorUnitTest/ExtractorUnitTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/ExtractorUnitTest/ExtractorUnitTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://drive.google.com/drive/folders/1Z9nCIRB6pGLvb5mPkF8BURa5Nc6cY9pY). Push these files into device for testing.
+Download extractor folder and push all the files in this folder to /data/local/tmp/ on the device.
+```
+adb push extractor /data/local/tmp/
+```
+
+usage: ExtractorUnitTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/ExtractorUnitTest -P /data/local/tmp/extractor/
+```
diff --git a/media/extractors/wav/Android.bp b/media/extractors/wav/Android.bp
index 7e89271..8ce5c3f 100644
--- a/media/extractors/wav/Android.bp
+++ b/media/extractors/wav/Android.bp
@@ -1,4 +1,7 @@
-cc_library_shared {
+cc_library {
+    name: "libwavextractor",
+
+    defaults: ["extractor-defaults"],
 
     srcs: ["WAVExtractor.cpp"],
 
@@ -8,8 +11,6 @@
 
     shared_libs: [
         "libbinder_ndk",
-        "liblog",
-        "libmediandk",
     ],
 
     static_libs: [
@@ -17,25 +18,4 @@
         "libfifo",
         "libstagefright_foundation",
     ],
-
-    name: "libwavextractor",
-    relative_install_path: "extractors",
-
-    compile_multilib: "first",
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-        "-fvisibility=hidden",
-    ],
-    version_script: "exports.lds",
-
-    sanitize: {
-        cfi: true,
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-    },
-
 }
diff --git a/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h b/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
deleted file mode 100644
index 8eb70b1..0000000
--- a/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
+++ /dev/null
@@ -1,1114 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Tools for measuring latency and for detecting glitches.
- * These classes are pure math and can be used with any audio system.
- */
-
-#ifndef AAUDIO_EXAMPLES_LOOPBACK_ANALYSER_H
-#define AAUDIO_EXAMPLES_LOOPBACK_ANALYSER_H
-
-#include <algorithm>
-#include <assert.h>
-#include <cctype>
-#include <math.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-
-#include <audio_utils/sndfile.h>
-
-// Tag for machine readable results as property = value pairs
-#define LOOPBACK_RESULT_TAG      "RESULT: "
-
-constexpr int32_t kDefaultSampleRate = 48000;
-constexpr int32_t kMillisPerSecond   = 1000;
-constexpr int32_t kMinLatencyMillis  = 4;    // arbitrary and very low
-constexpr int32_t kMaxLatencyMillis  = 400;  // arbitrary and generous
-constexpr double  kMaxEchoGain       = 10.0; // based on experiments, otherwise too noisy
-constexpr double  kMinimumConfidence = 0.5;
-
-static void printAudioScope(float sample) {
-    const int maxStars = 80; // arbitrary, fits on one line
-    char c = '*';
-    if (sample < -1.0) {
-        sample = -1.0;
-        c = '$';
-    } else if (sample > 1.0) {
-        sample = 1.0;
-        c = '$';
-    }
-    int numSpaces = (int) (((sample + 1.0) * 0.5) * maxStars);
-    for (int i = 0; i < numSpaces; i++) {
-        putchar(' ');
-    }
-    printf("%c\n", c);
-}
-
-/*
-
-FIR filter designed with
-http://t-filter.appspot.com
-
-sampling frequency: 48000 Hz
-
-* 0 Hz - 8000 Hz
-  gain = 1.2
-  desired ripple = 5 dB
-  actual ripple = 5.595266169703693 dB
-
-* 12000 Hz - 20000 Hz
-  gain = 0
-  desired attenuation = -40 dB
-  actual attenuation = -37.58691566571914 dB
-
-*/
-
-#define FILTER_TAP_NUM 11
-
-static const float sFilterTaps8000[FILTER_TAP_NUM] = {
-        -0.05944219353343189f,
-        -0.07303434839503208f,
-        -0.037690487672689066f,
-        0.1870480506596512f,
-        0.3910337357836833f,
-        0.5333672385425637f,
-        0.3910337357836833f,
-        0.1870480506596512f,
-        -0.037690487672689066f,
-        -0.07303434839503208f,
-        -0.05944219353343189f
-};
-
-class LowPassFilter {
-public:
-
-    /*
-     * Filter one input sample.
-     * @return filtered output
-     */
-    float filter(float input) {
-        float output = 0.0f;
-        mX[mCursor] = input;
-        // Index backwards over x.
-        int xIndex = mCursor + FILTER_TAP_NUM;
-        // Write twice so we avoid having to wrap in the middle of the convolution.
-        mX[xIndex] = input;
-        for (int i = 0; i < FILTER_TAP_NUM; i++) {
-            output += sFilterTaps8000[i] * mX[xIndex--];
-        }
-        if (++mCursor >= FILTER_TAP_NUM) {
-            mCursor = 0;
-        }
-        return output;
-    }
-
-    /**
-     * @return true if PASSED
-     */
-    bool test() {
-        // Measure the impulse of the filter at different phases so we exercise
-        // all the wraparound cases in the FIR.
-        for (int offset = 0; offset < (FILTER_TAP_NUM * 2); offset++ ) {
-            // printf("LowPassFilter: cursor = %d\n", mCursor);
-            // Offset by one each time.
-            if (filter(0.0f) != 0.0f) {
-                printf("ERROR: filter should return 0.0 before impulse response\n");
-                return false;
-            }
-            for (int i = 0; i < FILTER_TAP_NUM; i++) {
-                float output = filter((i == 0) ? 1.0f : 0.0f); // impulse
-                if (output != sFilterTaps8000[i]) {
-                    printf("ERROR: filter should return impulse response\n");
-                    return false;
-                }
-            }
-            for (int i = 0; i < FILTER_TAP_NUM; i++) {
-                if (filter(0.0f) != 0.0f) {
-                    printf("ERROR: filter should return 0.0 after impulse response\n");
-                    return false;
-                }
-            }
-        }
-        return true;
-    }
-
-private:
-    float   mX[FILTER_TAP_NUM * 2]{}; // twice as big as needed to avoid wrapping
-    int32_t mCursor = 0;
-};
-
-// A narrow impulse seems to have better immunity against over estimating the
-// latency due to detecting subharmonics by the auto-correlator.
-static const float s_Impulse[] = {
-        0.0f, 0.0f, 0.0f, 0.0f, 0.3f, // silence on each side of the impulse
-        0.99f, 0.0f, -0.99f, // bipolar with one zero crossing in middle
-        -0.3f, 0.0f, 0.0f, 0.0f, 0.0f
-};
-
-constexpr int32_t kImpulseSizeInFrames = (int32_t)(sizeof(s_Impulse) / sizeof(s_Impulse[0]));
-
-class PseudoRandom {
-public:
-    PseudoRandom() {}
-    PseudoRandom(int64_t seed)
-            :    mSeed(seed)
-    {}
-
-    /**
-     * Returns the next random double from -1.0 to 1.0
-     *
-     * @return value from -1.0 to 1.0
-     */
-     double nextRandomDouble() {
-        return nextRandomInteger() * (0.5 / (((int32_t)1) << 30));
-    }
-
-    /** Calculate random 32 bit number using linear-congruential method. */
-    int32_t nextRandomInteger() {
-        // Use values for 64-bit sequence from MMIX by Donald Knuth.
-        mSeed = (mSeed * (int64_t)6364136223846793005) + (int64_t)1442695040888963407;
-        return (int32_t) (mSeed >> 32); // The higher bits have a longer sequence.
-    }
-
-private:
-    int64_t mSeed = 99887766;
-};
-
-
-typedef struct LatencyReport_s {
-    double latencyInFrames;
-    double confidence;
-} LatencyReport;
-
-static double calculateCorrelation(const float *a,
-                                   const float *b,
-                                   int windowSize)
-{
-    double correlation = 0.0;
-    double sumProducts = 0.0;
-    double sumSquares = 0.0;
-
-    // Correlate a against b.
-    for (int i = 0; i < windowSize; i++) {
-        float s1 = a[i];
-        float s2 = b[i];
-        // Use a normalized cross-correlation.
-        sumProducts += s1 * s2;
-        sumSquares += ((s1 * s1) + (s2 * s2));
-    }
-
-    if (sumSquares >= 0.00000001) {
-        correlation = (float) (2.0 * sumProducts / sumSquares);
-    }
-    return correlation;
-}
-
-static int measureLatencyFromEchos(const float *data,
-                                   int32_t numFloats,
-                                   int32_t sampleRate,
-                                   LatencyReport *report) {
-    // Allocate results array
-    const int minReasonableLatencyFrames = sampleRate * kMinLatencyMillis / kMillisPerSecond;
-    const int maxReasonableLatencyFrames = sampleRate * kMaxLatencyMillis / kMillisPerSecond;
-    int32_t maxCorrelationSize = maxReasonableLatencyFrames * 3;
-    int numCorrelations = std::min(numFloats, maxCorrelationSize);
-    float *correlations = new float[numCorrelations]{};
-    float *harmonicSums = new float[numCorrelations]{};
-
-    // Perform sliding auto-correlation.
-    // Skip first frames to avoid huge peak at zero offset.
-    for (int i = minReasonableLatencyFrames; i < numCorrelations; i++) {
-        int32_t remaining = numFloats - i;
-        float correlation = (float) calculateCorrelation(&data[i], data, remaining);
-        correlations[i] = correlation;
-        // printf("correlation[%d] = %f\n", ic, correlation);
-    }
-
-    // Apply a technique similar to Harmonic Product Spectrum Analysis to find echo fundamental.
-    // Add higher harmonics mapped onto lower harmonics. This reinforces the "fundamental" echo.
-    const int numEchoes = 8;
-    for (int partial = 1; partial < numEchoes; partial++) {
-        for (int i = minReasonableLatencyFrames; i < numCorrelations; i++) {
-            harmonicSums[i / partial] += correlations[i] / partial;
-        }
-    }
-
-    // Find highest peak in correlation array.
-    float maxCorrelation = 0.0;
-    int peakIndex = 0;
-    for (int i = 0; i < numCorrelations; i++) {
-        if (harmonicSums[i] > maxCorrelation) {
-            maxCorrelation = harmonicSums[i];
-            peakIndex = i;
-            // printf("maxCorrelation = %f at %d\n", maxCorrelation, peakIndex);
-        }
-    }
-    report->latencyInFrames = peakIndex;
-/*
-    {
-        int32_t topPeak = peakIndex * 7 / 2;
-        for (int i = 0; i < topPeak; i++) {
-            float sample = harmonicSums[i];
-            printf("%4d: %7.5f ", i, sample);
-            printAudioScope(sample);
-        }
-    }
-*/
-
-    // Calculate confidence.
-    if (maxCorrelation < 0.001) {
-        report->confidence = 0.0;
-    } else {
-        // Compare peak to average value around peak.
-        int32_t numSamples = std::min(numCorrelations, peakIndex * 2);
-        if (numSamples <= 0) {
-            report->confidence = 0.0;
-        } else {
-            double sum = 0.0;
-            for (int i = 0; i < numSamples; i++) {
-                sum += harmonicSums[i];
-            }
-            const double average = sum / numSamples;
-            const double ratio = average / maxCorrelation; // will be < 1.0
-            report->confidence = 1.0 - sqrt(ratio);
-        }
-    }
-
-    delete[] correlations;
-    delete[] harmonicSums;
-    return 0;
-}
-
-class AudioRecording
-{
-public:
-    AudioRecording() {
-    }
-    ~AudioRecording() {
-        delete[] mData;
-    }
-
-    void allocate(int maxFrames) {
-        delete[] mData;
-        mData = new float[maxFrames];
-        mMaxFrames = maxFrames;
-    }
-
-    // Write SHORT data from the first channel.
-    int32_t write(int16_t *inputData, int32_t inputChannelCount, int32_t numFrames) {
-        // stop at end of buffer
-        if ((mFrameCounter + numFrames) > mMaxFrames) {
-            numFrames = mMaxFrames - mFrameCounter;
-        }
-        for (int i = 0; i < numFrames; i++) {
-            mData[mFrameCounter++] = inputData[i * inputChannelCount] * (1.0f / 32768);
-        }
-        return numFrames;
-    }
-
-    // Write FLOAT data from the first channel.
-    int32_t write(float *inputData, int32_t inputChannelCount, int32_t numFrames) {
-        // stop at end of buffer
-        if ((mFrameCounter + numFrames) > mMaxFrames) {
-            numFrames = mMaxFrames - mFrameCounter;
-        }
-        for (int i = 0; i < numFrames; i++) {
-            mData[mFrameCounter++] = inputData[i * inputChannelCount];
-        }
-        return numFrames;
-    }
-
-    int32_t size() {
-        return mFrameCounter;
-    }
-
-    float *getData() {
-        return mData;
-    }
-
-    void setSampleRate(int32_t sampleRate) {
-        mSampleRate = sampleRate;
-    }
-
-    int32_t getSampleRate() {
-        return mSampleRate;
-    }
-
-    int save(const char *fileName, bool writeShorts = true) {
-        SNDFILE *sndFile = nullptr;
-        int written = 0;
-        SF_INFO info = {
-                .frames = mFrameCounter,
-                .samplerate = mSampleRate,
-                .channels = 1,
-                .format = SF_FORMAT_WAV | (writeShorts ? SF_FORMAT_PCM_16 : SF_FORMAT_FLOAT)
-        };
-
-        sndFile = sf_open(fileName, SFM_WRITE, &info);
-        if (sndFile == nullptr) {
-            printf("AudioRecording::save(%s) failed to open file\n", fileName);
-            return -errno;
-        }
-
-        written = sf_writef_float(sndFile, mData, mFrameCounter);
-
-        sf_close(sndFile);
-        return written;
-    }
-
-    int load(const char *fileName) {
-        SNDFILE *sndFile = nullptr;
-        SF_INFO info;
-
-        sndFile = sf_open(fileName, SFM_READ, &info);
-        if (sndFile == nullptr) {
-            printf("AudioRecording::load(%s) failed to open file\n", fileName);
-            return -errno;
-        }
-
-        assert(info.channels == 1);
-        assert(info.format == SF_FORMAT_FLOAT);
-
-        setSampleRate(info.samplerate);
-        allocate(info.frames);
-        mFrameCounter = sf_readf_float(sndFile, mData, info.frames);
-
-        sf_close(sndFile);
-        return mFrameCounter;
-    }
-
-    /**
-     * Square the samples so they are all positive and so the peaks are emphasized.
-     */
-    void square() {
-        for (int i = 0; i < mFrameCounter; i++) {
-            const float sample = mData[i];
-            mData[i] = sample * sample;
-        }
-    }
-
-    /**
-     * Low pass filter the recording using a simple FIR filter.
-     * Note that the lowpass filter cutoff tracks the sample rate.
-     * That is OK because the impulse width is a fixed number of samples.
-     */
-    void lowPassFilter() {
-        for (int i = 0; i < mFrameCounter; i++) {
-            mData[i] = mLowPassFilter.filter(mData[i]);
-        }
-    }
-
-    /**
-     * Remove DC offset using a one-pole one-zero IIR filter.
-     */
-    void dcBlocker() {
-        const float R = 0.996; // narrow notch at zero Hz
-        float x1 = 0.0;
-        float y1 = 0.0;
-        for (int i = 0; i < mFrameCounter; i++) {
-            const float x = mData[i];
-            const float y = x - x1 + (R * y1);
-            mData[i] = y;
-            y1 = y;
-            x1 = x;
-        }
-    }
-
-private:
-    float        *mData = nullptr;
-    int32_t       mFrameCounter = 0;
-    int32_t       mMaxFrames = 0;
-    int32_t       mSampleRate = kDefaultSampleRate; // common default
-    LowPassFilter mLowPassFilter;
-};
-
-// ====================================================================================
-class LoopbackProcessor {
-public:
-    virtual ~LoopbackProcessor() = default;
-
-
-    enum process_result {
-        PROCESS_RESULT_OK,
-        PROCESS_RESULT_GLITCH
-    };
-
-    virtual void reset() {}
-
-    virtual process_result process(float *inputData, int inputChannelCount,
-                 float *outputData, int outputChannelCount,
-                 int numFrames) = 0;
-
-
-    virtual void report() = 0;
-
-    virtual void printStatus() {};
-
-    int32_t getResult() {
-        return mResult;
-    }
-
-    void setResult(int32_t result) {
-        mResult = result;
-    }
-
-    virtual bool isDone() {
-        return false;
-    }
-
-    virtual int save(const char *fileName) {
-        (void) fileName;
-        return AAUDIO_ERROR_UNIMPLEMENTED;
-    }
-
-    virtual int load(const char *fileName) {
-        (void) fileName;
-        return AAUDIO_ERROR_UNIMPLEMENTED;
-    }
-
-    virtual void setSampleRate(int32_t sampleRate) {
-        mSampleRate = sampleRate;
-    }
-
-    int32_t getSampleRate() {
-        return mSampleRate;
-    }
-
-    // Measure peak amplitude of buffer.
-    static float measurePeakAmplitude(float *inputData, int inputChannelCount, int numFrames) {
-        float peak = 0.0f;
-        for (int i = 0; i < numFrames; i++) {
-            const float pos = fabs(*inputData);
-            if (pos > peak) {
-                peak = pos;
-            }
-            inputData += inputChannelCount;
-        }
-        return peak;
-    }
-
-
-private:
-    int32_t mSampleRate = kDefaultSampleRate;
-    int32_t mResult = 0;
-};
-
-class PeakDetector {
-public:
-    float process(float input) {
-        float output = mPrevious * mDecay;
-        if (input > output) {
-            output = input;
-        }
-        mPrevious = output;
-        return output;
-    }
-
-private:
-    float  mDecay = 0.99f;
-    float  mPrevious = 0.0f;
-};
-
-// ====================================================================================
-/**
- * Measure latency given a loopback stream data.
- * Uses a state machine to cycle through various stages including:
- *
- */
-class EchoAnalyzer : public LoopbackProcessor {
-public:
-
-    EchoAnalyzer() : LoopbackProcessor() {
-        mAudioRecording.allocate(2 * getSampleRate());
-        mAudioRecording.setSampleRate(getSampleRate());
-    }
-
-    void setSampleRate(int32_t sampleRate) override {
-        LoopbackProcessor::setSampleRate(sampleRate);
-        mAudioRecording.setSampleRate(sampleRate);
-    }
-
-    void reset() override {
-        mDownCounter = getSampleRate() / 2;
-        mLoopCounter = 0;
-        mMeasuredLoopGain = 0.0f;
-        mEchoGain = 1.0f;
-        mState = STATE_INITIAL_SILENCE;
-    }
-
-    virtual bool isDone() {
-        return mState == STATE_DONE || mState == STATE_FAILED;
-    }
-
-    void setGain(float gain) {
-        mEchoGain = gain;
-    }
-
-    float getGain() {
-        return mEchoGain;
-    }
-
-    bool testLowPassFilter() {
-        LowPassFilter filter;
-        return filter.test();
-    }
-
-    void report() override {
-        printf("EchoAnalyzer ---------------\n");
-        if (getResult() != 0) {
-            printf(LOOPBACK_RESULT_TAG "result          = %d\n", getResult());
-            return;
-        }
-
-        // printf("LowPassFilter test %s\n", testLowPassFilter() ? "PASSED" : "FAILED");
-
-        printf(LOOPBACK_RESULT_TAG "measured.gain          = %8f\n", mMeasuredLoopGain);
-        printf(LOOPBACK_RESULT_TAG "echo.gain              = %8f\n", mEchoGain);
-        printf(LOOPBACK_RESULT_TAG "test.state             = %8d\n", mState);
-        printf(LOOPBACK_RESULT_TAG "test.state.name        = %8s\n", convertStateToText(mState));
-
-        if (mState == STATE_WAITING_FOR_SILENCE) {
-            printf("WARNING - Stuck waiting for silence. Input may be too noisy!\n");
-            setResult(ERROR_NOISY);
-        } else if (mMeasuredLoopGain >= 0.9999) {
-            printf("   ERROR - clipping, turn down volume slightly\n");
-            setResult(ERROR_CLIPPING);
-        } else if (mState != STATE_DONE && mState != STATE_GATHERING_ECHOS) {
-            printf("WARNING - Bad state. Check volume on device.\n");
-            setResult(ERROR_INVALID_STATE);
-        } else {
-            // Cleanup the signal to improve the auto-correlation.
-            mAudioRecording.dcBlocker();
-            mAudioRecording.square();
-            mAudioRecording.lowPassFilter();
-
-            printf("Please wait several seconds for auto-correlation to complete.\n");
-            measureLatencyFromEchos(mAudioRecording.getData(),
-                                    mAudioRecording.size(),
-                                    getSampleRate(),
-                                    &mLatencyReport);
-
-            double latencyMillis = kMillisPerSecond * (double) mLatencyReport.latencyInFrames
-                                   / getSampleRate();
-            printf(LOOPBACK_RESULT_TAG "latency.frames         = %8.2f\n",
-                   mLatencyReport.latencyInFrames);
-            printf(LOOPBACK_RESULT_TAG "latency.msec           = %8.2f\n",
-                   latencyMillis);
-            printf(LOOPBACK_RESULT_TAG "latency.confidence     = %8.6f\n",
-                   mLatencyReport.confidence);
-            if (mLatencyReport.confidence < kMinimumConfidence) {
-                printf("   ERROR - confidence too low!\n");
-                setResult(ERROR_CONFIDENCE);
-            }
-        }
-    }
-
-    void printStatus() override {
-        printf("st = %d, echo gain = %f ", mState, mEchoGain);
-    }
-
-    void sendImpulses(float *outputData, int outputChannelCount, int numFrames) {
-        while (numFrames-- > 0) {
-            float sample = s_Impulse[mSampleIndex++];
-            if (mSampleIndex >= kImpulseSizeInFrames) {
-                mSampleIndex = 0;
-            }
-
-            *outputData = sample;
-            outputData += outputChannelCount;
-        }
-    }
-
-    void sendOneImpulse(float *outputData, int outputChannelCount) {
-        mSampleIndex = 0;
-        sendImpulses(outputData, outputChannelCount, kImpulseSizeInFrames);
-    }
-
-    // @return number of frames for a typical block of processing
-    int32_t getBlockFrames() {
-        return getSampleRate() / 8;
-    }
-
-    process_result process(float *inputData, int inputChannelCount,
-                 float *outputData, int outputChannelCount,
-                 int numFrames) override {
-        int channelsValid = std::min(inputChannelCount, outputChannelCount);
-        float peak = 0.0f;
-        int numWritten;
-        int numSamples;
-
-        echo_state nextState = mState;
-
-        switch (mState) {
-            case STATE_INITIAL_SILENCE:
-                // Output silence at the beginning.
-                numSamples = numFrames * outputChannelCount;
-                for (int i = 0; i < numSamples; i++) {
-                    outputData[i] = 0;
-                }
-                mDownCounter -= numFrames;
-                if (mDownCounter <= 0) {
-                    nextState = STATE_MEASURING_GAIN;
-                    //printf("%5d: switch to STATE_MEASURING_GAIN\n", mLoopCounter);
-                    mDownCounter = getBlockFrames() * 2;
-                }
-                break;
-
-            case STATE_MEASURING_GAIN:
-                sendImpulses(outputData, outputChannelCount, numFrames);
-                peak = measurePeakAmplitude(inputData, inputChannelCount, numFrames);
-                // If we get several in a row then go to next state.
-                if (peak > mPulseThreshold) {
-                    mDownCounter -= numFrames;
-                    if (mDownCounter <= 0) {
-                        //printf("%5d: switch to STATE_WAITING_FOR_SILENCE, measured peak = %f\n",
-                        //       mLoopCounter, peak);
-                        mDownCounter = getBlockFrames();
-                        mMeasuredLoopGain = peak;  // assumes original pulse amplitude is one
-                        mSilenceThreshold = peak * 0.1; // scale silence to measured pulse
-                        // Calculate gain that will give us a nice decaying echo.
-                        mEchoGain = mDesiredEchoGain / mMeasuredLoopGain;
-                        if (mEchoGain > kMaxEchoGain) {
-                            printf("ERROR - loop gain too low. Increase the volume.\n");
-                            nextState = STATE_FAILED;
-                        } else {
-                            nextState = STATE_WAITING_FOR_SILENCE;
-                        }
-                    }
-                } else if (numFrames > kImpulseSizeInFrames){ // ignore short callbacks
-                    mDownCounter = getBlockFrames();
-                }
-                break;
-
-            case STATE_WAITING_FOR_SILENCE:
-                // Output silence and wait for the echos to die down.
-                numSamples = numFrames * outputChannelCount;
-                for (int i = 0; i < numSamples; i++) {
-                    outputData[i] = 0;
-                }
-                peak = measurePeakAmplitude(inputData, inputChannelCount, numFrames);
-                // If we get several in a row then go to next state.
-                if (peak < mSilenceThreshold) {
-                    mDownCounter -= numFrames;
-                    if (mDownCounter <= 0) {
-                        nextState = STATE_SENDING_PULSE;
-                        //printf("%5d: switch to STATE_SENDING_PULSE\n", mLoopCounter);
-                        mDownCounter = getBlockFrames();
-                    }
-                } else {
-                    mDownCounter = getBlockFrames();
-                }
-                break;
-
-            case STATE_SENDING_PULSE:
-                mAudioRecording.write(inputData, inputChannelCount, numFrames);
-                sendOneImpulse(outputData, outputChannelCount);
-                nextState = STATE_GATHERING_ECHOS;
-                //printf("%5d: switch to STATE_GATHERING_ECHOS\n", mLoopCounter);
-                break;
-
-            case STATE_GATHERING_ECHOS:
-                numWritten = mAudioRecording.write(inputData, inputChannelCount, numFrames);
-                peak = measurePeakAmplitude(inputData, inputChannelCount, numFrames);
-                if (peak > mMeasuredLoopGain) {
-                    mMeasuredLoopGain = peak;  // AGC might be raising gain so adjust it on the fly.
-                    // Recalculate gain that will give us a nice decaying echo.
-                    mEchoGain = mDesiredEchoGain / mMeasuredLoopGain;
-                }
-                // Echo input to output.
-                for (int i = 0; i < numFrames; i++) {
-                    int ic;
-                    for (ic = 0; ic < channelsValid; ic++) {
-                        outputData[ic] = inputData[ic] * mEchoGain;
-                    }
-                    for (; ic < outputChannelCount; ic++) {
-                        outputData[ic] = 0;
-                    }
-                    inputData += inputChannelCount;
-                    outputData += outputChannelCount;
-                }
-                if (numWritten  < numFrames) {
-                    nextState = STATE_DONE;
-                }
-                break;
-
-            case STATE_DONE:
-            case STATE_FAILED:
-            default:
-                break;
-        }
-
-        mState = nextState;
-        mLoopCounter++;
-        return PROCESS_RESULT_OK;
-    }
-
-    int save(const char *fileName) override {
-        return mAudioRecording.save(fileName);
-    }
-
-    int load(const char *fileName) override {
-        int result = mAudioRecording.load(fileName);
-        setSampleRate(mAudioRecording.getSampleRate());
-        mState = STATE_DONE;
-        return result;
-    }
-
-private:
-
-    enum error_code {
-        ERROR_OK = 0,
-        ERROR_NOISY = -99,
-        ERROR_CLIPPING,
-        ERROR_CONFIDENCE,
-        ERROR_INVALID_STATE
-    };
-
-    enum echo_state {
-        STATE_INITIAL_SILENCE,
-        STATE_MEASURING_GAIN,
-        STATE_WAITING_FOR_SILENCE,
-        STATE_SENDING_PULSE,
-        STATE_GATHERING_ECHOS,
-        STATE_DONE,
-        STATE_FAILED
-    };
-
-    const char *convertStateToText(echo_state state) {
-        const char *result = "Unknown";
-        switch(state) {
-            case STATE_INITIAL_SILENCE:
-                result = "INIT";
-                break;
-            case STATE_MEASURING_GAIN:
-                result = "GAIN";
-                break;
-            case STATE_WAITING_FOR_SILENCE:
-                result = "SILENCE";
-                break;
-            case STATE_SENDING_PULSE:
-                result = "PULSE";
-                break;
-            case STATE_GATHERING_ECHOS:
-                result = "ECHOS";
-                break;
-            case STATE_DONE:
-                result = "DONE";
-                break;
-            case STATE_FAILED:
-                result = "FAILED";
-                break;
-        }
-        return result;
-    }
-
-
-    int32_t         mDownCounter = 500;
-    int32_t         mLoopCounter = 0;
-    int32_t         mSampleIndex = 0;
-    float           mPulseThreshold = 0.02f;
-    float           mSilenceThreshold = 0.002f;
-    float           mMeasuredLoopGain = 0.0f;
-    float           mDesiredEchoGain = 0.95f;
-    float           mEchoGain = 1.0f;
-    echo_state      mState = STATE_INITIAL_SILENCE;
-
-    AudioRecording  mAudioRecording; // contains only the input after the gain detection burst
-    LatencyReport   mLatencyReport;
-    // PeakDetector    mPeakDetector;
-};
-
-
-// ====================================================================================
-/**
- * Output a steady sinewave and analyze the return signal.
- *
- * Use a cosine transform to measure the predicted magnitude and relative phase of the
- * looped back sine wave. Then generate a predicted signal and compare with the actual signal.
- */
-class SineAnalyzer : public LoopbackProcessor {
-public:
-
-    void report() override {
-        printf("SineAnalyzer ------------------\n");
-        printf(LOOPBACK_RESULT_TAG "peak.amplitude     = %8f\n", mPeakAmplitude);
-        printf(LOOPBACK_RESULT_TAG "sine.magnitude     = %8f\n", mMagnitude);
-        printf(LOOPBACK_RESULT_TAG "peak.noise         = %8f\n", mPeakNoise);
-        printf(LOOPBACK_RESULT_TAG "rms.noise          = %8f\n", mRootMeanSquareNoise);
-        float amplitudeRatio = mMagnitude / mPeakNoise;
-        float signalToNoise = amplitudeRatio * amplitudeRatio;
-        printf(LOOPBACK_RESULT_TAG "signal.to.noise    = %8.2f\n", signalToNoise);
-        float signalToNoiseDB = 10.0 * log(signalToNoise);
-        printf(LOOPBACK_RESULT_TAG "signal.to.noise.db = %8.2f\n", signalToNoiseDB);
-        if (signalToNoiseDB < MIN_SNRATIO_DB) {
-            printf("ERROR - signal to noise ratio is too low! < %d dB. Adjust volume.\n", MIN_SNRATIO_DB);
-            setResult(ERROR_NOISY);
-        }
-        printf(LOOPBACK_RESULT_TAG "frames.accumulated = %8d\n", mFramesAccumulated);
-        printf(LOOPBACK_RESULT_TAG "sine.period        = %8d\n", mSinePeriod);
-        printf(LOOPBACK_RESULT_TAG "test.state         = %8d\n", mState);
-        printf(LOOPBACK_RESULT_TAG "frame.count        = %8d\n", mFrameCounter);
-        // Did we ever get a lock?
-        bool gotLock = (mState == STATE_LOCKED) || (mGlitchCount > 0);
-        if (!gotLock) {
-            printf("ERROR - failed to lock on reference sine tone\n");
-            setResult(ERROR_NO_LOCK);
-        } else {
-            // Only print if meaningful.
-            printf(LOOPBACK_RESULT_TAG "glitch.count       = %8d\n", mGlitchCount);
-            printf(LOOPBACK_RESULT_TAG "max.glitch         = %8f\n", mMaxGlitchDelta);
-            if (mGlitchCount > 0) {
-                printf("ERROR - number of glitches > 0\n");
-                setResult(ERROR_GLITCHES);
-            }
-        }
-    }
-
-    void printStatus() override {
-        printf("st = %d, #gl = %3d,", mState, mGlitchCount);
-    }
-
-    double calculateMagnitude(double *phasePtr = NULL) {
-        if (mFramesAccumulated == 0) {
-            return 0.0;
-        }
-        double sinMean = mSinAccumulator / mFramesAccumulated;
-        double cosMean = mCosAccumulator / mFramesAccumulated;
-        double magnitude = 2.0 * sqrt( (sinMean * sinMean) + (cosMean * cosMean ));
-        if( phasePtr != NULL )
-        {
-            double phase = M_PI_2 - atan2( sinMean, cosMean );
-            *phasePtr = phase;
-        }
-        return magnitude;
-    }
-
-    /**
-     * @param inputData contains microphone data with sine signal feedback
-     * @param outputData contains the reference sine wave
-     */
-    process_result process(float *inputData, int inputChannelCount,
-                 float *outputData, int outputChannelCount,
-                 int numFrames) override {
-        process_result result = PROCESS_RESULT_OK;
-        mProcessCount++;
-
-        float peak = measurePeakAmplitude(inputData, inputChannelCount, numFrames);
-        if (peak > mPeakAmplitude) {
-            mPeakAmplitude = peak;
-        }
-
-        for (int i = 0; i < numFrames; i++) {
-            bool sineEnabled = true;
-            float sample = inputData[i * inputChannelCount];
-
-            float sinOut = sinf(mPhase);
-
-            switch (mState) {
-                case STATE_IDLE:
-                    sineEnabled = false;
-                    mDownCounter--;
-                    if (mDownCounter <= 0) {
-                        mState = STATE_MEASURE_NOISE;
-                        mDownCounter = NOISE_FRAME_COUNT;
-                    }
-                    break;
-                case STATE_MEASURE_NOISE:
-                    sineEnabled = false;
-                    mPeakNoise = std::max(abs(sample), mPeakNoise);
-                    mNoiseSumSquared += sample * sample;
-                    mDownCounter--;
-                    if (mDownCounter <= 0) {
-                        mState = STATE_WAITING_FOR_SIGNAL;
-                        mRootMeanSquareNoise = sqrt(mNoiseSumSquared / NOISE_FRAME_COUNT);
-                        mTolerance = std::max(MIN_TOLERANCE, mPeakNoise * 2.0f);
-                        mPhase = 0.0; // prevent spike at start
-                    }
-                    break;
-
-                case STATE_IMMUNE:
-                    mDownCounter--;
-                    if (mDownCounter <= 0) {
-                        mState = STATE_WAITING_FOR_SIGNAL;
-                    }
-                    break;
-
-                case STATE_WAITING_FOR_SIGNAL:
-                    if (peak > mThreshold) {
-                        mState = STATE_WAITING_FOR_LOCK;
-                        //printf("%5d: switch to STATE_WAITING_FOR_LOCK\n", mFrameCounter);
-                        resetAccumulator();
-                    }
-                    break;
-
-                case STATE_WAITING_FOR_LOCK:
-                    mSinAccumulator += sample * sinOut;
-                    mCosAccumulator += sample * cosf(mPhase);
-                    mFramesAccumulated++;
-                    // Must be a multiple of the period or the calculation will not be accurate.
-                    if (mFramesAccumulated == mSinePeriod * PERIODS_NEEDED_FOR_LOCK) {
-                        mPhaseOffset = 0.0;
-                        mMagnitude = calculateMagnitude(&mPhaseOffset);
-                        if (mMagnitude > mThreshold) {
-                            if (fabs(mPreviousPhaseOffset - mPhaseOffset) < 0.001) {
-                                mState = STATE_LOCKED;
-                                //printf("%5d: switch to STATE_LOCKED\n", mFrameCounter);
-                            }
-                            mPreviousPhaseOffset = mPhaseOffset;
-                        }
-                        resetAccumulator();
-                    }
-                    break;
-
-                case STATE_LOCKED: {
-                    // Predict next sine value
-                    float predicted = sinf(mPhase + mPhaseOffset) * mMagnitude;
-                    // printf("    predicted = %f, actual = %f\n", predicted, sample);
-
-                    float diff = predicted - sample;
-                    float absDiff = fabs(diff);
-                    mMaxGlitchDelta = std::max(mMaxGlitchDelta, absDiff);
-                    if (absDiff > mTolerance) {
-                        mGlitchCount++;
-                        result = PROCESS_RESULT_GLITCH;
-                        //printf("%5d: Got a glitch # %d, predicted = %f, actual = %f\n",
-                        //       mFrameCounter, mGlitchCount, predicted, sample);
-                        mState = STATE_IMMUNE;
-                        mDownCounter = mSinePeriod * PERIODS_IMMUNE;
-                    }
-
-                    // Track incoming signal and slowly adjust magnitude to account
-                    // for drift in the DRC or AGC.
-                    mSinAccumulator += sample * sinOut;
-                    mCosAccumulator += sample * cosf(mPhase);
-                    mFramesAccumulated++;
-                    // Must be a multiple of the period or the calculation will not be accurate.
-                    if (mFramesAccumulated == mSinePeriod) {
-                        const double coefficient = 0.1;
-                        double phaseOffset = 0.0;
-                        double magnitude = calculateMagnitude(&phaseOffset);
-                        // One pole averaging filter.
-                        mMagnitude = (mMagnitude * (1.0 - coefficient)) + (magnitude * coefficient);
-                        resetAccumulator();
-                    }
-                } break;
-            }
-
-            float output = 0.0f;
-            // Output sine wave so we can measure it.
-            if (sineEnabled) {
-                output = (sinOut * mOutputAmplitude)
-                         + (mWhiteNoise.nextRandomDouble() * mNoiseAmplitude);
-                // printf("%5d: sin(%f) = %f, %f\n", i, mPhase, sinOut,  mPhaseIncrement);
-                // advance and wrap phase
-                mPhase += mPhaseIncrement;
-                if (mPhase > M_PI) {
-                    mPhase -= (2.0 * M_PI);
-                }
-            }
-            outputData[i * outputChannelCount] = output;
-
-
-            mFrameCounter++;
-        }
-        return result;
-    }
-
-    void resetAccumulator() {
-        mFramesAccumulated = 0;
-        mSinAccumulator = 0.0;
-        mCosAccumulator = 0.0;
-    }
-
-    void reset() override {
-        mGlitchCount = 0;
-        mState = STATE_IDLE;
-        mDownCounter = IDLE_FRAME_COUNT;
-        mPhaseIncrement = 2.0 * M_PI / mSinePeriod;
-        printf("phaseInc = %f for period %d\n", mPhaseIncrement, mSinePeriod);
-        resetAccumulator();
-        mProcessCount = 0;
-        mPeakNoise = 0.0f;
-        mNoiseSumSquared = 0.0;
-        mRootMeanSquareNoise = 0.0;
-        mPhase = 0.0f;
-        mMaxGlitchDelta = 0.0;
-    }
-
-private:
-
-    enum error_code {
-        OK,
-        ERROR_NO_LOCK = -80,
-        ERROR_GLITCHES,
-        ERROR_NOISY
-    };
-
-    enum sine_state_t {
-        STATE_IDLE,
-        STATE_MEASURE_NOISE,
-        STATE_IMMUNE,
-        STATE_WAITING_FOR_SIGNAL,
-        STATE_WAITING_FOR_LOCK,
-        STATE_LOCKED
-    };
-
-    enum constants {
-        // Arbitrary durations, assuming 48000 Hz
-        IDLE_FRAME_COUNT = 48 * 100,
-        NOISE_FRAME_COUNT = 48 * 600,
-        PERIODS_NEEDED_FOR_LOCK = 8,
-        PERIODS_IMMUNE = 2,
-        MIN_SNRATIO_DB = 65
-    };
-
-    static constexpr float MIN_TOLERANCE = 0.01;
-
-    int     mSinePeriod = 79;
-    double  mPhaseIncrement = 0.0;
-    double  mPhase = 0.0;
-    double  mPhaseOffset = 0.0;
-    double  mPreviousPhaseOffset = 0.0;
-    double  mMagnitude = 0.0;
-    double  mThreshold = 0.005;
-    double  mTolerance = MIN_TOLERANCE;
-    int32_t mFramesAccumulated = 0;
-    int32_t mProcessCount = 0;
-    double  mSinAccumulator = 0.0;
-    double  mCosAccumulator = 0.0;
-    float   mMaxGlitchDelta = 0.0f;
-    int32_t mGlitchCount = 0;
-    double  mPeakAmplitude = 0.0;
-    int     mDownCounter = IDLE_FRAME_COUNT;
-    int32_t mFrameCounter = 0;
-    float   mOutputAmplitude = 0.75;
-
-    // measure background noise
-    float   mPeakNoise = 0.0f;
-    double  mNoiseSumSquared = 0.0;
-    double  mRootMeanSquareNoise = 0.0;
-
-    PseudoRandom  mWhiteNoise;
-    float   mNoiseAmplitude = 0.00; // Used to experiment with warbling caused by DRC.
-
-    sine_state_t  mState = STATE_IDLE;
-};
-
-#undef LOOPBACK_RESULT_TAG
-
-#endif /* AAUDIO_EXAMPLES_LOOPBACK_ANALYSER_H */
diff --git a/media/libaaudio/examples/loopback/src/analyzer/GlitchAnalyzer.h b/media/libaaudio/examples/loopback/src/analyzer/GlitchAnalyzer.h
new file mode 100644
index 0000000..04435d1
--- /dev/null
+++ b/media/libaaudio/examples/loopback/src/analyzer/GlitchAnalyzer.h
@@ -0,0 +1,445 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANALYZER_GLITCH_ANALYZER_H
+#define ANALYZER_GLITCH_ANALYZER_H
+
+#include <algorithm>
+#include <cctype>
+#include <iomanip>
+#include <iostream>
+
+#include "LatencyAnalyzer.h"
+#include "PseudoRandom.h"
+
+/**
+ * Output a steady sine wave and analyze the return signal.
+ *
+ * Use a cosine transform to measure the predicted magnitude and relative phase of the
+ * looped back sine wave. Then generate a predicted signal and compare with the actual signal.
+ */
+class GlitchAnalyzer : public LoopbackProcessor {
+public:
+
+    int32_t getState() const {
+        return mState;
+    }
+
+    double getPeakAmplitude() const {
+        return mPeakFollower.getLevel();
+    }
+
+    double getTolerance() {
+        return mTolerance;
+    }
+
+    void setTolerance(double tolerance) {
+        mTolerance = tolerance;
+        mScaledTolerance = mMagnitude * mTolerance;
+    }
+
+    void setMagnitude(double magnitude) {
+        mMagnitude = magnitude;
+        mScaledTolerance = mMagnitude * mTolerance;
+    }
+
+    int32_t getGlitchCount() const {
+        return mGlitchCount;
+    }
+
+    int32_t getStateFrameCount(int state) const {
+        return mStateFrameCounters[state];
+    }
+
+    double getSignalToNoiseDB() {
+        static const double threshold = 1.0e-14;
+        if (mMeanSquareSignal < threshold || mMeanSquareNoise < threshold) {
+            return 0.0;
+        } else {
+            double signalToNoise = mMeanSquareSignal / mMeanSquareNoise; // power ratio
+            double signalToNoiseDB = 10.0 * log(signalToNoise);
+            if (signalToNoiseDB < MIN_SNR_DB) {
+                ALOGD("ERROR - signal to noise ratio is too low! < %d dB. Adjust volume.",
+                     MIN_SNR_DB);
+                setResult(ERROR_VOLUME_TOO_LOW);
+            }
+            return signalToNoiseDB;
+        }
+    }
+
+    std::string analyze() override {
+        std::stringstream report;
+        report << "GlitchAnalyzer ------------------\n";
+        report << LOOPBACK_RESULT_TAG "peak.amplitude     = " << std::setw(8)
+               << getPeakAmplitude() << "\n";
+        report << LOOPBACK_RESULT_TAG "sine.magnitude     = " << std::setw(8)
+               << mMagnitude << "\n";
+        report << LOOPBACK_RESULT_TAG "rms.noise          = " << std::setw(8)
+               << mMeanSquareNoise << "\n";
+        report << LOOPBACK_RESULT_TAG "signal.to.noise.db = " << std::setw(8)
+               << getSignalToNoiseDB() << "\n";
+        report << LOOPBACK_RESULT_TAG "frames.accumulated = " << std::setw(8)
+               << mFramesAccumulated << "\n";
+        report << LOOPBACK_RESULT_TAG "sine.period        = " << std::setw(8)
+               << mSinePeriod << "\n";
+        report << LOOPBACK_RESULT_TAG "test.state         = " << std::setw(8)
+               << mState << "\n";
+        report << LOOPBACK_RESULT_TAG "frame.count        = " << std::setw(8)
+               << mFrameCounter << "\n";
+        // Did we ever get a lock?
+        bool gotLock = (mState == STATE_LOCKED) || (mGlitchCount > 0);
+        if (!gotLock) {
+            report << "ERROR - failed to lock on reference sine tone.\n";
+            setResult(ERROR_NO_LOCK);
+        } else {
+            // Only print if meaningful.
+            report << LOOPBACK_RESULT_TAG "glitch.count       = " << std::setw(8)
+                   << mGlitchCount << "\n";
+            report << LOOPBACK_RESULT_TAG "max.glitch         = " << std::setw(8)
+                   << mMaxGlitchDelta << "\n";
+            if (mGlitchCount > 0) {
+                report << "ERROR - number of glitches > 0\n";
+                setResult(ERROR_GLITCHES);
+            }
+        }
+        return report.str();
+    }
+
+    void printStatus() override {
+        ALOGD("st = %d, #gl = %3d,", mState, mGlitchCount);
+    }
+    /**
+     * Calculate the magnitude of the component of the input signal
+     * that matches the analysis frequency.
+     * Also calculate the phase that we can use to create a
+     * signal that matches that component.
+     * The phase will be between -PI and +PI.
+     */
+    double calculateMagnitude(double *phasePtr = nullptr) {
+        if (mFramesAccumulated == 0) {
+            return 0.0;
+        }
+        double sinMean = mSinAccumulator / mFramesAccumulated;
+        double cosMean = mCosAccumulator / mFramesAccumulated;
+        double magnitude = 2.0 * sqrt((sinMean * sinMean) + (cosMean * cosMean));
+        if (phasePtr != nullptr) {
+            double phase = M_PI_2 - atan2(sinMean, cosMean);
+            *phasePtr = phase;
+        }
+        return magnitude;
+    }
+
+    /**
+     * @param frameData contains microphone data with sine signal feedback
+     * @param channelCount
+     */
+    result_code processInputFrame(float *frameData, int /* channelCount */) override {
+        result_code result = RESULT_OK;
+
+        float sample = frameData[0];
+        float peak = mPeakFollower.process(sample);
+
+        // Force a periodic glitch to test the detector!
+        if (mForceGlitchDuration > 0) {
+            if (mForceGlitchCounter == 0) {
+                ALOGE("%s: force a glitch!!", __func__);
+                mForceGlitchCounter = getSampleRate();
+            } else if (mForceGlitchCounter <= mForceGlitchDuration) {
+                // Force an abrupt offset.
+                sample += (sample > 0.0) ? -0.5f : 0.5f;
+            }
+            --mForceGlitchCounter;
+        }
+
+        mStateFrameCounters[mState]++; // count how many frames we are in each state
+
+        switch (mState) {
+            case STATE_IDLE:
+                mDownCounter--;
+                if (mDownCounter <= 0) {
+                    mState = STATE_IMMUNE;
+                    mDownCounter = IMMUNE_FRAME_COUNT;
+                    mInputPhase = 0.0; // prevent spike at start
+                    mOutputPhase = 0.0;
+                }
+                break;
+
+            case STATE_IMMUNE:
+                mDownCounter--;
+                if (mDownCounter <= 0) {
+                    mState = STATE_WAITING_FOR_SIGNAL;
+                }
+                break;
+
+            case STATE_WAITING_FOR_SIGNAL:
+                if (peak > mThreshold) {
+                    mState = STATE_WAITING_FOR_LOCK;
+                    //ALOGD("%5d: switch to STATE_WAITING_FOR_LOCK", mFrameCounter);
+                    resetAccumulator();
+                }
+                break;
+
+            case STATE_WAITING_FOR_LOCK:
+                mSinAccumulator += sample * sinf(mInputPhase);
+                mCosAccumulator += sample * cosf(mInputPhase);
+                mFramesAccumulated++;
+                // Must be a multiple of the period or the calculation will not be accurate.
+                if (mFramesAccumulated == mSinePeriod * PERIODS_NEEDED_FOR_LOCK) {
+                    double phaseOffset = 0.0;
+                    setMagnitude(calculateMagnitude(&phaseOffset));
+//                    ALOGD("%s() mag = %f, offset = %f, prev = %f",
+//                            __func__, mMagnitude, mPhaseOffset, mPreviousPhaseOffset);
+                    if (mMagnitude > mThreshold) {
+                        if (abs(phaseOffset) < kMaxPhaseError) {
+                            mState = STATE_LOCKED;
+//                            ALOGD("%5d: switch to STATE_LOCKED", mFrameCounter);
+                        }
+                        // Adjust mInputPhase to match measured phase
+                        mInputPhase += phaseOffset;
+                    }
+                    resetAccumulator();
+                }
+                incrementInputPhase();
+                break;
+
+            case STATE_LOCKED: {
+                // Predict next sine value
+                double predicted = sinf(mInputPhase) * mMagnitude;
+                double diff = predicted - sample;
+                double absDiff = fabs(diff);
+                mMaxGlitchDelta = std::max(mMaxGlitchDelta, absDiff);
+                if (absDiff > mScaledTolerance) {
+                    result = ERROR_GLITCHES;
+                    onGlitchStart();
+//                    LOGI("diff glitch detected, absDiff = %g", absDiff);
+                } else {
+                    mSumSquareSignal += predicted * predicted;
+                    mSumSquareNoise += diff * diff;
+                    // Track incoming signal and slowly adjust magnitude to account
+                    // for drift in the DRC or AGC.
+                    mSinAccumulator += sample * sinf(mInputPhase);
+                    mCosAccumulator += sample * cosf(mInputPhase);
+                    mFramesAccumulated++;
+                    // Must be a multiple of the period or the calculation will not be accurate.
+                    if (mFramesAccumulated == mSinePeriod) {
+                        const double coefficient = 0.1;
+                        double phaseOffset = 0.0;
+                        double magnitude = calculateMagnitude(&phaseOffset);
+                        // One pole averaging filter.
+                        setMagnitude((mMagnitude * (1.0 - coefficient)) + (magnitude * coefficient));
+
+                        mMeanSquareNoise = mSumSquareNoise * mInverseSinePeriod;
+                        mMeanSquareSignal = mSumSquareSignal * mInverseSinePeriod;
+                        resetAccumulator();
+
+                        if (abs(phaseOffset) > kMaxPhaseError) {
+                            result = ERROR_GLITCHES;
+                            onGlitchStart();
+                            ALOGD("phase glitch detected, phaseOffset = %g", phaseOffset);
+                        } else if (mMagnitude < mThreshold) {
+                            result = ERROR_GLITCHES;
+                            onGlitchStart();
+                            ALOGD("magnitude glitch detected, mMagnitude = %g", mMagnitude);
+                        }
+                    }
+                }
+                incrementInputPhase();
+            } break;
+
+            case STATE_GLITCHING: {
+                // Predict next sine value
+                mGlitchLength++;
+                double predicted = sinf(mInputPhase) * mMagnitude;
+                double diff = predicted - sample;
+                double absDiff = fabs(diff);
+                mMaxGlitchDelta = std::max(mMaxGlitchDelta, absDiff);
+                if (absDiff < mScaledTolerance) { // close enough?
+                    // If we get a full sine period of non-glitch samples in a row then consider the glitch over.
+                    // We don't want to just consider a zero crossing the end of a glitch.
+                    if (mNonGlitchCount++ > mSinePeriod) {
+                        onGlitchEnd();
+                    }
+                } else {
+                    mNonGlitchCount = 0;
+                    if (mGlitchLength > (4 * mSinePeriod)) {
+                        relock();
+                    }
+                }
+                incrementInputPhase();
+            } break;
+
+            case NUM_STATES: // not a real state
+                break;
+        }
+
+        mFrameCounter++;
+
+        return result;
+    }
+
+    // advance and wrap phase
+    void incrementInputPhase() {
+        mInputPhase += mPhaseIncrement;
+        if (mInputPhase > M_PI) {
+            mInputPhase -= (2.0 * M_PI);
+        }
+    }
+
+    // advance and wrap phase
+    void incrementOutputPhase() {
+        mOutputPhase += mPhaseIncrement;
+        if (mOutputPhase > M_PI) {
+            mOutputPhase -= (2.0 * M_PI);
+        }
+    }
+
+    /**
+     * @param frameData upon return, contains the reference sine wave
+     * @param channelCount
+     */
+    result_code processOutputFrame(float *frameData, int channelCount) override {
+        float output = 0.0f;
+        // Output sine wave so we can measure it.
+        if (mState != STATE_IDLE) {
+            float sinOut = sinf(mOutputPhase);
+            incrementOutputPhase();
+            output = (sinOut * mOutputAmplitude)
+                     + (mWhiteNoise.nextRandomDouble() * kNoiseAmplitude);
+            // ALOGD("sin(%f) = %f, %f\n", mOutputPhase, sinOut,  mPhaseIncrement);
+        }
+        frameData[0] = output;
+        for (int i = 1; i < channelCount; i++) {
+            frameData[i] = 0.0f;
+        }
+        return RESULT_OK;
+    }
+
+    void onGlitchStart() {
+        mGlitchCount++;
+//        ALOGD("%5d: STARTED a glitch # %d", mFrameCounter, mGlitchCount);
+        mState = STATE_GLITCHING;
+        mGlitchLength = 1;
+        mNonGlitchCount = 0;
+    }
+
+    void onGlitchEnd() {
+//        ALOGD("%5d: ENDED a glitch # %d, length = %d", mFrameCounter, mGlitchCount, mGlitchLength);
+        mState = STATE_LOCKED;
+        resetAccumulator();
+    }
+
+    // reset the sine wave detector
+    void resetAccumulator() {
+        mFramesAccumulated = 0;
+        mSinAccumulator = 0.0;
+        mCosAccumulator = 0.0;
+        mSumSquareSignal = 0.0;
+        mSumSquareNoise = 0.0;
+    }
+
+    void relock() {
+//        ALOGD("relock: %d because of a very long %d glitch", mFrameCounter, mGlitchLength);
+        mState = STATE_WAITING_FOR_LOCK;
+        resetAccumulator();
+    }
+
+    void reset() override {
+        LoopbackProcessor::reset();
+        mState = STATE_IDLE;
+        mDownCounter = IDLE_FRAME_COUNT;
+        resetAccumulator();
+    }
+
+    void prepareToTest() override {
+        LoopbackProcessor::prepareToTest();
+        mSinePeriod = getSampleRate() / kTargetGlitchFrequency;
+        mOutputPhase = 0.0f;
+        mInverseSinePeriod = 1.0 / mSinePeriod;
+        mPhaseIncrement = 2.0 * M_PI * mInverseSinePeriod;
+        mGlitchCount = 0;
+        mMaxGlitchDelta = 0.0;
+        for (int i = 0; i < NUM_STATES; i++) {
+            mStateFrameCounters[i] = 0;
+        }
+    }
+
+private:
+
+    // These must match the values in GlitchActivity.java
+    enum sine_state_t {
+        STATE_IDLE,               // beginning
+        STATE_IMMUNE,             // ignoring input, waiting fo HW to settle
+        STATE_WAITING_FOR_SIGNAL, // looking for a loud signal
+        STATE_WAITING_FOR_LOCK,   // trying to lock onto the phase of the sine
+        STATE_LOCKED,             // locked on the sine wave, looking for glitches
+        STATE_GLITCHING,           // locked on the sine wave but glitching
+        NUM_STATES
+    };
+
+    enum constants {
+        // Arbitrary durations, assuming 48000 Hz
+        IDLE_FRAME_COUNT = 48 * 100,
+        IMMUNE_FRAME_COUNT = 48 * 100,
+        PERIODS_NEEDED_FOR_LOCK = 8,
+        MIN_SNR_DB = 65
+    };
+
+    static constexpr float kNoiseAmplitude = 0.00; // Used to experiment with warbling caused by DRC.
+    static constexpr int kTargetGlitchFrequency = 607;
+    static constexpr double kMaxPhaseError = M_PI * 0.05;
+
+    float   mTolerance = 0.10; // scaled from 0.0 to 1.0
+    double  mThreshold = 0.005;
+    int     mSinePeriod = 1; // this will be set before use
+    double  mInverseSinePeriod = 1.0;
+
+    int32_t mStateFrameCounters[NUM_STATES];
+
+    double  mPhaseIncrement = 0.0;
+    double  mInputPhase = 0.0;
+    double  mOutputPhase = 0.0;
+    double  mMagnitude = 0.0;
+    int32_t mFramesAccumulated = 0;
+    double  mSinAccumulator = 0.0;
+    double  mCosAccumulator = 0.0;
+    double  mMaxGlitchDelta = 0.0;
+    int32_t mGlitchCount = 0;
+    int32_t mNonGlitchCount = 0;
+    int32_t mGlitchLength = 0;
+    // This is used for processing every frame so we cache it here.
+    double  mScaledTolerance = 0.0;
+    int     mDownCounter = IDLE_FRAME_COUNT;
+    int32_t mFrameCounter = 0;
+    double  mOutputAmplitude = 0.75;
+
+    int32_t mForceGlitchDuration = 0; // if > 0 then force a glitch for debugging
+    int32_t mForceGlitchCounter = 4 * 48000; // count down and trigger at zero
+
+    // measure background noise continuously as a deviation from the expected signal
+    double  mSumSquareSignal = 0.0;
+    double  mSumSquareNoise = 0.0;
+    double  mMeanSquareSignal = 0.0;
+    double  mMeanSquareNoise = 0.0;
+
+    PeakDetector  mPeakFollower;
+
+    PseudoRandom  mWhiteNoise;
+
+    sine_state_t  mState = STATE_IDLE;
+};
+
+
+#endif //ANALYZER_GLITCH_ANALYZER_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/LatencyAnalyzer.h b/media/libaaudio/examples/loopback/src/analyzer/LatencyAnalyzer.h
new file mode 100644
index 0000000..e506791
--- /dev/null
+++ b/media/libaaudio/examples/loopback/src/analyzer/LatencyAnalyzer.h
@@ -0,0 +1,606 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Tools for measuring latency and for detecting glitches.
+ * These classes are pure math and can be used with any audio system.
+ */
+
+#ifndef ANALYZER_LATENCY_ANALYZER_H
+#define ANALYZER_LATENCY_ANALYZER_H
+
+#include <algorithm>
+#include <assert.h>
+#include <cctype>
+#include <iomanip>
+#include <iostream>
+#include <math.h>
+#include <memory>
+#include <sstream>
+#include <stdio.h>
+#include <stdlib.h>
+#include <unistd.h>
+#include <vector>
+
+#include "PeakDetector.h"
+#include "PseudoRandom.h"
+#include "RandomPulseGenerator.h"
+
+// This is used when the code is in Oboe.
+#ifndef ALOGD
+#define ALOGD printf
+#define ALOGE printf
+#define ALOGW printf
+#endif
+
+#define LOOPBACK_RESULT_TAG  "RESULT: "
+
+static constexpr int32_t kDefaultSampleRate = 48000;
+static constexpr int32_t kMillisPerSecond   = 1000;
+static constexpr int32_t kMaxLatencyMillis  = 700;  // arbitrary and generous
+static constexpr double  kMinimumConfidence = 0.2;
+
+struct LatencyReport {
+    int32_t latencyInFrames = 0.0;
+    double confidence = 0.0;
+
+    void reset() {
+        latencyInFrames = 0;
+        confidence = 0.0;
+    }
+};
+
+// Calculate a normalized cross correlation.
+static double calculateNormalizedCorrelation(const float *a,
+                                             const float *b,
+                                             int windowSize) {
+    double correlation = 0.0;
+    double sumProducts = 0.0;
+    double sumSquares = 0.0;
+
+    // Correlate a against b.
+    for (int i = 0; i < windowSize; i++) {
+        float s1 = a[i];
+        float s2 = b[i];
+        // Use a normalized cross-correlation.
+        sumProducts += s1 * s2;
+        sumSquares += ((s1 * s1) + (s2 * s2));
+    }
+
+    if (sumSquares >= 1.0e-9) {
+        correlation = 2.0 * sumProducts / sumSquares;
+    }
+    return correlation;
+}
+
+static double calculateRootMeanSquare(float *data, int32_t numSamples) {
+    double sum = 0.0;
+    for (int32_t i = 0; i < numSamples; i++) {
+        float sample = data[i];
+        sum += sample * sample;
+    }
+    return sqrt(sum / numSamples);
+}
+
+/**
+ * Monophonic recording with processing.
+ */
+class AudioRecording
+{
+public:
+
+    void allocate(int maxFrames) {
+        mData = std::make_unique<float[]>(maxFrames);
+        mMaxFrames = maxFrames;
+    }
+
+    // Write SHORT data from the first channel.
+    int32_t write(int16_t *inputData, int32_t inputChannelCount, int32_t numFrames) {
+        // stop at end of buffer
+        if ((mFrameCounter + numFrames) > mMaxFrames) {
+            numFrames = mMaxFrames - mFrameCounter;
+        }
+        for (int i = 0; i < numFrames; i++) {
+            mData[mFrameCounter++] = inputData[i * inputChannelCount] * (1.0f / 32768);
+        }
+        return numFrames;
+    }
+
+    // Write FLOAT data from the first channel.
+    int32_t write(float *inputData, int32_t inputChannelCount, int32_t numFrames) {
+        // stop at end of buffer
+        if ((mFrameCounter + numFrames) > mMaxFrames) {
+            numFrames = mMaxFrames - mFrameCounter;
+        }
+        for (int i = 0; i < numFrames; i++) {
+            mData[mFrameCounter++] = inputData[i * inputChannelCount];
+        }
+        return numFrames;
+    }
+
+    // Write FLOAT data from the first channel.
+    int32_t write(float sample) {
+        // stop at end of buffer
+        if (mFrameCounter < mMaxFrames) {
+            mData[mFrameCounter++] = sample;
+            return 1;
+        }
+        return 0;
+    }
+
+    void clear() {
+        mFrameCounter = 0;
+    }
+    int32_t size() const {
+        return mFrameCounter;
+    }
+
+    bool isFull() const {
+        return mFrameCounter >= mMaxFrames;
+    }
+
+    float *getData() const {
+        return mData.get();
+    }
+
+    void setSampleRate(int32_t sampleRate) {
+        mSampleRate = sampleRate;
+    }
+
+    int32_t getSampleRate() const {
+        return mSampleRate;
+    }
+
+    /**
+     * Square the samples so they are all positive and so the peaks are emphasized.
+     */
+    void square() {
+        float *x = mData.get();
+        for (int i = 0; i < mFrameCounter; i++) {
+            x[i] *= x[i];
+        }
+    }
+
+    /**
+     * Amplify a signal so that the peak matches the specified target.
+     *
+     * @param target final max value
+     * @return gain applied to signal
+     */
+    float normalize(float target) {
+        float maxValue = 1.0e-9f;
+        for (int i = 0; i < mFrameCounter; i++) {
+            maxValue = std::max(maxValue, abs(mData[i]));
+        }
+        float gain = target / maxValue;
+        for (int i = 0; i < mFrameCounter; i++) {
+            mData[i] *= gain;
+        }
+        return gain;
+    }
+
+private:
+    std::unique_ptr<float[]> mData;
+    int32_t       mFrameCounter = 0;
+    int32_t       mMaxFrames = 0;
+    int32_t       mSampleRate = kDefaultSampleRate; // common default
+};
+
+static int measureLatencyFromPulse(AudioRecording &recorded,
+                                   AudioRecording &pulse,
+                                   LatencyReport *report) {
+
+    report->latencyInFrames = 0;
+    report->confidence = 0.0;
+
+    int numCorrelations = recorded.size() - pulse.size();
+    if (numCorrelations < 10) {
+        ALOGE("%s() recording too small = %d frames\n", __func__, recorded.size());
+        return -1;
+    }
+    std::unique_ptr<float[]> correlations= std::make_unique<float[]>(numCorrelations);
+
+    // Correlate pulse against the recorded data.
+    for (int i = 0; i < numCorrelations; i++) {
+        float correlation = (float) calculateNormalizedCorrelation(&recorded.getData()[i],
+                                                                   &pulse.getData()[0],
+                                                                   pulse.size());
+        correlations[i] = correlation;
+    }
+
+    // Find highest peak in correlation array.
+    float peakCorrelation = 0.0;
+    int peakIndex = -1;
+    for (int i = 0; i < numCorrelations; i++) {
+        float value = abs(correlations[i]);
+        if (value > peakCorrelation) {
+            peakCorrelation = value;
+            peakIndex = i;
+        }
+    }
+    if (peakIndex < 0) {
+        ALOGE("%s() no signal for correlation\n", __func__);
+        return -2;
+    }
+
+    report->latencyInFrames = peakIndex;
+    report->confidence = peakCorrelation;
+
+    return 0;
+}
+
+// ====================================================================================
+class LoopbackProcessor {
+public:
+    virtual ~LoopbackProcessor() = default;
+
+    enum result_code {
+        RESULT_OK = 0,
+        ERROR_NOISY = -99,
+        ERROR_VOLUME_TOO_LOW,
+        ERROR_VOLUME_TOO_HIGH,
+        ERROR_CONFIDENCE,
+        ERROR_INVALID_STATE,
+        ERROR_GLITCHES,
+        ERROR_NO_LOCK
+    };
+
+    virtual void prepareToTest() {
+        reset();
+    }
+
+    virtual void reset() {
+        mResult = 0;
+        mResetCount++;
+    }
+
+    virtual result_code processInputFrame(float *frameData, int channelCount) = 0;
+    virtual result_code processOutputFrame(float *frameData, int channelCount) = 0;
+
+    void process(float *inputData, int inputChannelCount, int numInputFrames,
+                 float *outputData, int outputChannelCount, int numOutputFrames) {
+        int numBoth = std::min(numInputFrames, numOutputFrames);
+        // Process one frame at a time.
+        for (int i = 0; i < numBoth; i++) {
+            processInputFrame(inputData, inputChannelCount);
+            inputData += inputChannelCount;
+            processOutputFrame(outputData, outputChannelCount);
+            outputData += outputChannelCount;
+        }
+        // If there is more input than output.
+        for (int i = numBoth; i < numInputFrames; i++) {
+            processInputFrame(inputData, inputChannelCount);
+            inputData += inputChannelCount;
+        }
+        // If there is more output than input.
+        for (int i = numBoth; i < numOutputFrames; i++) {
+            processOutputFrame(outputData, outputChannelCount);
+            outputData += outputChannelCount;
+        }
+    }
+
+    virtual std::string analyze() = 0;
+
+    virtual void printStatus() {};
+
+    int32_t getResult() {
+        return mResult;
+    }
+
+    void setResult(int32_t result) {
+        mResult = result;
+    }
+
+    virtual bool isDone() {
+        return false;
+    }
+
+    virtual int save(const char *fileName) {
+        (void) fileName;
+        return -1;
+    }
+
+    virtual int load(const char *fileName) {
+        (void) fileName;
+        return -1;
+    }
+
+    virtual void setSampleRate(int32_t sampleRate) {
+        mSampleRate = sampleRate;
+    }
+
+    int32_t getSampleRate() const {
+        return mSampleRate;
+    }
+
+    int32_t getResetCount() const {
+        return mResetCount;
+    }
+
+    /** Called when not enough input frames could be read after synchronization.
+     */
+    virtual void onInsufficientRead() {
+        reset();
+    }
+
+protected:
+    int32_t   mResetCount = 0;
+
+private:
+    int32_t mSampleRate = kDefaultSampleRate;
+    int32_t mResult = 0;
+};
+
+class LatencyAnalyzer : public LoopbackProcessor {
+public:
+
+    LatencyAnalyzer() : LoopbackProcessor() {}
+    virtual ~LatencyAnalyzer() = default;
+
+    virtual int32_t getProgress() const = 0;
+
+    virtual int getState() = 0;
+
+    // @return latency in frames
+    virtual int32_t getMeasuredLatency() = 0;
+
+    virtual double getMeasuredConfidence() = 0;
+
+    virtual double getBackgroundRMS() = 0;
+
+    virtual double getSignalRMS() = 0;
+
+};
+
+// ====================================================================================
+/**
+ * Measure latency given a loopback stream data.
+ * Use an encoded bit train as the sound source because it
+ * has an unambiguous correlation value.
+ * Uses a state machine to cycle through various stages.
+ *
+ */
+class PulseLatencyAnalyzer : public LatencyAnalyzer {
+public:
+
+    PulseLatencyAnalyzer() : LatencyAnalyzer() {
+        int32_t maxLatencyFrames = getSampleRate() * kMaxLatencyMillis / kMillisPerSecond;
+        int32_t numPulseBits = getSampleRate() * kPulseLengthMillis
+                / (kFramesPerEncodedBit * kMillisPerSecond);
+        int32_t  pulseLength = numPulseBits * kFramesPerEncodedBit;
+        mFramesToRecord = pulseLength + maxLatencyFrames;
+        mAudioRecording.allocate(mFramesToRecord);
+        mAudioRecording.setSampleRate(getSampleRate());
+        generateRandomPulse(pulseLength);
+    }
+
+    void generateRandomPulse(int32_t pulseLength) {
+        mPulse.allocate(pulseLength);
+        RandomPulseGenerator pulser(kFramesPerEncodedBit);
+        for (int i = 0; i < pulseLength; i++) {
+            mPulse.write(pulser.nextFloat());
+        }
+    }
+
+    int getState() override {
+        return mState;
+    }
+
+    void setSampleRate(int32_t sampleRate) override {
+        LoopbackProcessor::setSampleRate(sampleRate);
+        mAudioRecording.setSampleRate(sampleRate);
+    }
+
+    void reset() override {
+        LoopbackProcessor::reset();
+        mDownCounter = getSampleRate() / 2;
+        mLoopCounter = 0;
+
+        mPulseCursor = 0;
+        mBackgroundSumSquare = 0.0f;
+        mBackgroundSumCount = 0;
+        mBackgroundRMS = 0.0f;
+        mSignalRMS = 0.0f;
+
+        mState = STATE_MEASURE_BACKGROUND;
+        mAudioRecording.clear();
+        mLatencyReport.reset();
+    }
+
+    bool hasEnoughData() {
+        return mAudioRecording.isFull();
+    }
+
+    bool isDone() override {
+        return mState == STATE_DONE;
+    }
+
+    int32_t getProgress() const override {
+        return mAudioRecording.size();
+    }
+
+    std::string analyze() override {
+        std::stringstream report;
+        report << "PulseLatencyAnalyzer ---------------\n";
+        report << LOOPBACK_RESULT_TAG "test.state             = "
+                << std::setw(8) << mState << "\n";
+        report << LOOPBACK_RESULT_TAG "test.state.name        = "
+                << convertStateToText(mState) << "\n";
+        report << LOOPBACK_RESULT_TAG "background.rms         = "
+                << std::setw(8) << mBackgroundRMS << "\n";
+
+        int32_t newResult = RESULT_OK;
+        if (mState != STATE_GOT_DATA) {
+            report << "WARNING - Bad state. Check volume on device.\n";
+            // setResult(ERROR_INVALID_STATE);
+        } else {
+            float gain = mAudioRecording.normalize(1.0f);
+            measureLatencyFromPulse(mAudioRecording,
+                                    mPulse,
+                                    &mLatencyReport);
+
+            if (mLatencyReport.confidence < kMinimumConfidence) {
+                report << "   ERROR - confidence too low!";
+                newResult = ERROR_CONFIDENCE;
+            } else {
+                mSignalRMS = calculateRootMeanSquare(
+                        &mAudioRecording.getData()[mLatencyReport.latencyInFrames], mPulse.size())
+                                / gain;
+            }
+            double latencyMillis = kMillisPerSecond * (double) mLatencyReport.latencyInFrames
+                                   / getSampleRate();
+            report << LOOPBACK_RESULT_TAG "latency.frames         = " << std::setw(8)
+                   << mLatencyReport.latencyInFrames << "\n";
+            report << LOOPBACK_RESULT_TAG "latency.msec           = " << std::setw(8)
+                   << latencyMillis << "\n";
+            report << LOOPBACK_RESULT_TAG "latency.confidence     = " << std::setw(8)
+                   << mLatencyReport.confidence << "\n";
+        }
+        mState = STATE_DONE;
+        if (getResult() == RESULT_OK) {
+            setResult(newResult);
+        }
+
+        return report.str();
+    }
+
+    int32_t getMeasuredLatency() override {
+        return mLatencyReport.latencyInFrames;
+    }
+
+    double getMeasuredConfidence() override {
+        return mLatencyReport.confidence;
+    }
+
+    double getBackgroundRMS() override {
+        return mBackgroundRMS;
+    }
+
+    double getSignalRMS() override {
+        return mSignalRMS;
+    }
+
+    void printStatus() override {
+        ALOGD("st = %d", mState);
+    }
+
+    result_code processInputFrame(float *frameData, int channelCount) override {
+        echo_state nextState = mState;
+        mLoopCounter++;
+
+        switch (mState) {
+            case STATE_MEASURE_BACKGROUND:
+                // Measure background RMS on channel 0
+                mBackgroundSumSquare += frameData[0] * frameData[0];
+                mBackgroundSumCount++;
+                mDownCounter--;
+                if (mDownCounter <= 0) {
+                    mBackgroundRMS = sqrtf(mBackgroundSumSquare / mBackgroundSumCount);
+                    nextState = STATE_IN_PULSE;
+                    mPulseCursor = 0;
+                }
+                break;
+
+            case STATE_IN_PULSE:
+                // Record input until the mAudioRecording is full.
+                mAudioRecording.write(frameData, channelCount, 1);
+                if (hasEnoughData()) {
+                    nextState = STATE_GOT_DATA;
+                }
+                break;
+
+            case STATE_GOT_DATA:
+            case STATE_DONE:
+            default:
+                break;
+        }
+
+        mState = nextState;
+        return RESULT_OK;
+    }
+
+    result_code processOutputFrame(float *frameData, int channelCount) override {
+        switch (mState) {
+            case STATE_IN_PULSE:
+                if (mPulseCursor < mPulse.size()) {
+                    float pulseSample = mPulse.getData()[mPulseCursor++];
+                    for (int i = 0; i < channelCount; i++) {
+                        frameData[i] = pulseSample;
+                    }
+                } else {
+                    for (int i = 0; i < channelCount; i++) {
+                        frameData[i] = 0;
+                    }
+                }
+                break;
+
+            case STATE_MEASURE_BACKGROUND:
+            case STATE_GOT_DATA:
+            case STATE_DONE:
+            default:
+                for (int i = 0; i < channelCount; i++) {
+                    frameData[i] = 0.0f; // silence
+                }
+                break;
+        }
+
+        return RESULT_OK;
+    }
+
+private:
+
+    enum echo_state {
+        STATE_MEASURE_BACKGROUND,
+        STATE_IN_PULSE,
+        STATE_GOT_DATA, // must match RoundTripLatencyActivity.java
+        STATE_DONE,
+    };
+
+    const char *convertStateToText(echo_state state) {
+        switch (state) {
+            case STATE_MEASURE_BACKGROUND:
+                return "INIT";
+            case STATE_IN_PULSE:
+                return "PULSE";
+            case STATE_GOT_DATA:
+                return "GOT_DATA";
+            case STATE_DONE:
+                return "DONE";
+        }
+        return "UNKNOWN";
+    }
+
+    int32_t         mDownCounter = 500;
+    int32_t         mLoopCounter = 0;
+    echo_state      mState = STATE_MEASURE_BACKGROUND;
+
+    static constexpr int32_t kFramesPerEncodedBit = 8; // multiple of 2
+    static constexpr int32_t kPulseLengthMillis = 500;
+
+    AudioRecording     mPulse;
+    int32_t            mPulseCursor = 0;
+
+    double             mBackgroundSumSquare = 0.0;
+    int32_t            mBackgroundSumCount = 0;
+    double             mBackgroundRMS = 0.0;
+    double             mSignalRMS = 0.0;
+    int32_t            mFramesToRecord = 0;
+
+    AudioRecording     mAudioRecording; // contains only the input after starting the pulse
+    LatencyReport      mLatencyReport;
+};
+
+#endif // ANALYZER_LATENCY_ANALYZER_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/ManchesterEncoder.h b/media/libaaudio/examples/loopback/src/analyzer/ManchesterEncoder.h
new file mode 100644
index 0000000..0a4bd5b
--- /dev/null
+++ b/media/libaaudio/examples/loopback/src/analyzer/ManchesterEncoder.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANALYZER_MANCHESTER_ENCODER_H
+#define ANALYZER_MANCHESTER_ENCODER_H
+
+#include <cstdint>
+
+/**
+ * Encode bytes using Manchester Coding scheme.
+ *
+ * Manchester Code is self clocking.
+ * There is a transition in the middle of every bit.
+ * Zero is high then low.
+ * One is low then high.
+ *
+ * This avoids having long DC sections that would droop when
+ * passed though analog circuits with AC coupling.
+ *
+ * IEEE 802.3 compatible.
+ */
+
+class ManchesterEncoder {
+public:
+    ManchesterEncoder(int samplesPerPulse)
+            : mSamplesPerPulse(samplesPerPulse)
+            , mSamplesPerPulseHalf(samplesPerPulse / 2)
+            , mCursor(samplesPerPulse) {
+    }
+
+    virtual ~ManchesterEncoder() = default;
+
+    /**
+     * This will be called when the next byte is needed.
+     * @return
+     */
+    virtual uint8_t onNextByte() = 0;
+
+    /**
+     * Generate the next floating point sample.
+     * @return
+     */
+    virtual float nextFloat() {
+        advanceSample();
+        if (mCurrentBit) {
+            return (mCursor < mSamplesPerPulseHalf) ? -1.0f : 1.0f; // one
+        } else {
+            return (mCursor < mSamplesPerPulseHalf) ? 1.0f : -1.0f; // zero
+        }
+    }
+
+protected:
+    /**
+     * This will be called when a new bit is ready to be encoded.
+     * It can be used to prepare the encoded samples.
+     * @param current
+     */
+    virtual void onNextBit(bool /* current */) {};
+
+    void advanceSample() {
+        // Are we ready for a new bit?
+        if (++mCursor >= mSamplesPerPulse) {
+            mCursor = 0;
+            if (mBitsLeft == 0) {
+                mCurrentByte = onNextByte();
+                mBitsLeft = 8;
+            }
+            --mBitsLeft;
+            mCurrentBit = (mCurrentByte >> mBitsLeft) & 1;
+            onNextBit(mCurrentBit);
+        }
+    }
+
+    bool getCurrentBit() {
+        return mCurrentBit;
+    }
+
+    const int mSamplesPerPulse;
+    const int mSamplesPerPulseHalf;
+    int       mCursor;
+    int       mBitsLeft = 0;
+    uint8_t   mCurrentByte = 0;
+    bool      mCurrentBit = false;
+};
+#endif //ANALYZER_MANCHESTER_ENCODER_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/PeakDetector.h b/media/libaaudio/examples/loopback/src/analyzer/PeakDetector.h
new file mode 100644
index 0000000..4b3b4e7
--- /dev/null
+++ b/media/libaaudio/examples/loopback/src/analyzer/PeakDetector.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANALYZER_PEAK_DETECTOR_H
+#define ANALYZER_PEAK_DETECTOR_H
+
+#include <math.h>
+
+/**
+ * Measure a peak envelope by rising with the peaks,
+ * and decaying exponentially after each peak.
+ * The absolute value of the input signal is used.
+ */
+class PeakDetector {
+public:
+
+    void reset() {
+        mLevel = 0.0;
+    }
+
+    double process(double input) {
+        mLevel *= mDecay; // exponential decay
+        input = fabs(input);
+        // never fall below the input signal
+        if (input > mLevel) {
+            mLevel = input;
+        }
+        return mLevel;
+    }
+
+    double getLevel() const {
+        return mLevel;
+    }
+
+    double getDecay() const {
+        return mDecay;
+    }
+
+    /**
+     * Multiply the level by this amount on every iteration.
+     * This provides an exponential decay curve.
+     * A value just under 1.0 is best, for example, 0.99;
+     * @param decay scale level for each input
+     */
+    void setDecay(double decay) {
+        mDecay = decay;
+    }
+
+private:
+    static constexpr double kDefaultDecay = 0.99f;
+
+    double mLevel = 0.0;
+    double mDecay = kDefaultDecay;
+};
+#endif //ANALYZER_PEAK_DETECTOR_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/PseudoRandom.h b/media/libaaudio/examples/loopback/src/analyzer/PseudoRandom.h
new file mode 100644
index 0000000..1c4938c
--- /dev/null
+++ b/media/libaaudio/examples/loopback/src/analyzer/PseudoRandom.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef ANALYZER_PSEUDORANDOM_H
+#define ANALYZER_PSEUDORANDOM_H
+
+#include <cctype>
+
+class PseudoRandom {
+public:
+    PseudoRandom(int64_t seed = 99887766)
+            :    mSeed(seed)
+    {}
+
+    /**
+     * Returns the next random double from -1.0 to 1.0
+     *
+     * @return value from -1.0 to 1.0
+     */
+    double nextRandomDouble() {
+        return nextRandomInteger() * (0.5 / (((int32_t)1) << 30));
+    }
+
+    /** Calculate random 32 bit number using linear-congruential method
+     * with known real-time performance.
+     */
+    int32_t nextRandomInteger() {
+#if __has_builtin(__builtin_mul_overflow) && __has_builtin(__builtin_add_overflow)
+        int64_t prod;
+        // Use values for 64-bit sequence from MMIX by Donald Knuth.
+        __builtin_mul_overflow(mSeed, (int64_t)6364136223846793005, &prod);
+        __builtin_add_overflow(prod, (int64_t)1442695040888963407, &mSeed);
+#else
+        mSeed = (mSeed * (int64_t)6364136223846793005) + (int64_t)1442695040888963407;
+#endif
+        return (int32_t) (mSeed >> 32); // The higher bits have a longer sequence.
+    }
+
+private:
+    int64_t mSeed;
+};
+
+#endif //ANALYZER_PSEUDORANDOM_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/RandomPulseGenerator.h b/media/libaaudio/examples/loopback/src/analyzer/RandomPulseGenerator.h
new file mode 100644
index 0000000..030050b
--- /dev/null
+++ b/media/libaaudio/examples/loopback/src/analyzer/RandomPulseGenerator.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANALYZER_RANDOM_PULSE_GENERATOR_H
+#define ANALYZER_RANDOM_PULSE_GENERATOR_H
+
+#include <stdlib.h>
+#include "RoundedManchesterEncoder.h"
+
+/**
+ * Encode random ones and zeros using Manchester Code per IEEE 802.3.
+ */
+class RandomPulseGenerator : public RoundedManchesterEncoder {
+public:
+    RandomPulseGenerator(int samplesPerPulse)
+    : RoundedManchesterEncoder(samplesPerPulse) {
+    }
+
+    virtual ~RandomPulseGenerator() = default;
+
+    /**
+     * This will be called when the next byte is needed.
+     * @return random byte
+     */
+    uint8_t onNextByte() override {
+        return static_cast<uint8_t>(rand());
+    }
+};
+
+#endif //ANALYZER_RANDOM_PULSE_GENERATOR_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/RoundedManchesterEncoder.h b/media/libaaudio/examples/loopback/src/analyzer/RoundedManchesterEncoder.h
new file mode 100644
index 0000000..f2eba84
--- /dev/null
+++ b/media/libaaudio/examples/loopback/src/analyzer/RoundedManchesterEncoder.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANALYZER_ROUNDED_MANCHESTER_ENCODER_H
+#define ANALYZER_ROUNDED_MANCHESTER_ENCODER_H
+
+#include <math.h>
+#include <memory.h>
+#include <stdlib.h>
+#include "ManchesterEncoder.h"
+
+/**
+ * Encode bytes using Manchester Code.
+ * Round the edges using a half cosine to reduce ringing caused by a hard edge.
+ */
+
+class RoundedManchesterEncoder : public ManchesterEncoder {
+public:
+    RoundedManchesterEncoder(int samplesPerPulse)
+            : ManchesterEncoder(samplesPerPulse) {
+        int rampSize = samplesPerPulse / 4;
+        mZeroAfterZero = std::make_unique<float[]>(samplesPerPulse);
+        mZeroAfterOne = std::make_unique<float[]>(samplesPerPulse);
+
+        int sampleIndex = 0;
+        for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
+            float phase = (rampIndex + 1) * M_PI / rampSize;
+            float sample = -cosf(phase);
+            mZeroAfterZero[sampleIndex] = sample;
+            mZeroAfterOne[sampleIndex] = 1.0f;
+            sampleIndex++;
+        }
+        for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
+            mZeroAfterZero[sampleIndex] = 1.0f;
+            mZeroAfterOne[sampleIndex] = 1.0f;
+            sampleIndex++;
+        }
+        for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
+            float phase = (rampIndex + 1) * M_PI / rampSize;
+            float sample = cosf(phase);
+            mZeroAfterZero[sampleIndex] = sample;
+            mZeroAfterOne[sampleIndex] = sample;
+            sampleIndex++;
+        }
+        for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
+            mZeroAfterZero[sampleIndex] = -1.0f;
+            mZeroAfterOne[sampleIndex] = -1.0f;
+            sampleIndex++;
+        }
+    }
+
+    void onNextBit(bool current) override {
+        // Do we need to use the rounded edge?
+        mCurrentSamples = (current ^ mPreviousBit)
+                          ? mZeroAfterOne.get()
+                          : mZeroAfterZero.get();
+        mPreviousBit = current;
+    }
+
+    float nextFloat() override {
+        advanceSample();
+        float output = mCurrentSamples[mCursor];
+        if (getCurrentBit()) output = -output;
+        return output;
+    }
+
+private:
+
+    bool mPreviousBit = false;
+    float *mCurrentSamples = nullptr;
+    std::unique_ptr<float[]> mZeroAfterZero;
+    std::unique_ptr<float[]> mZeroAfterOne;
+};
+
+#endif //ANALYZER_ROUNDED_MANCHESTER_ENCODER_H
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index 49d921f..0d2ec70 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -20,6 +20,8 @@
 #include <assert.h>
 #include <cctype>
 #include <errno.h>
+#include <iomanip>
+#include <iostream>
 #include <math.h>
 #include <stdio.h>
 #include <stdlib.h>
@@ -33,7 +35,9 @@
 #include "AAudioSimplePlayer.h"
 #include "AAudioSimpleRecorder.h"
 #include "AAudioExampleUtils.h"
-#include "LoopbackAnalyzer.h"
+
+#include "analyzer/GlitchAnalyzer.h"
+#include "analyzer/LatencyAnalyzer.h"
 #include "../../utils/AAudioExampleUtils.h"
 
 // V0.4.00 = rectify and low-pass filter the echos, auto-correlate entire echo
@@ -41,7 +45,8 @@
 //           fix -n option to set output buffer for -tm
 //           plot first glitch
 // V0.4.02 = allow -n0 for minimal buffer size
-#define APP_VERSION             "0.4.02"
+// V0.5.00 = use latency analyzer from OboeTester, uses random noise for latency
+#define APP_VERSION             "0.5.00"
 
 // Tag for machine readable results as property = value pairs
 #define RESULT_TAG              "RESULT: "
@@ -57,6 +62,20 @@
 constexpr int kDefaultHangTimeMillis = 50;
 constexpr int kMaxGlitchEventsToSave = 32;
 
+static void printAudioScope(float sample) {
+    const int maxStars = 80; // arbitrary, fits on one line
+    char c = '*';
+    if (sample < -1.0) {
+        sample = -1.0;
+        c = '$';
+    } else if (sample > 1.0) {
+        sample = 1.0;
+        c = '$';
+    }
+    int numSpaces = (int) (((sample + 1.0) * 0.5) * maxStars);
+    printf("%*c%c\n", numSpaces, ' ', c);
+}
+
 struct LoopbackData {
     AAudioStream      *inputStream = nullptr;
     AAudioStream      *outputStream = nullptr;
@@ -83,8 +102,8 @@
     aaudio_result_t    inputError = AAUDIO_OK;
     aaudio_result_t    outputError = AAUDIO_OK;
 
-    SineAnalyzer       sineAnalyzer;
-    EchoAnalyzer       echoAnalyzer;
+    GlitchAnalyzer     sineAnalyzer;
+    PulseLatencyAnalyzer echoAnalyzer;
     AudioRecording     audioRecording;
     LoopbackProcessor *loopbackProcessor;
 
@@ -254,17 +273,18 @@
             }
 
             // Analyze the data.
-            LoopbackProcessor::process_result procResult = myData->loopbackProcessor->process(myData->inputFloatData,
+            myData->loopbackProcessor->process(myData->inputFloatData,
                                                myData->actualInputChannelCount,
+                                               numFrames,
                                                outputData,
                                                myData->actualOutputChannelCount,
                                                numFrames);
-
-            if (procResult == LoopbackProcessor::PROCESS_RESULT_GLITCH) {
-                if (myData->numGlitchEvents < kMaxGlitchEventsToSave) {
-                    myData->glitchFrames[myData->numGlitchEvents++] = myData->audioRecording.size();
-                }
-            }
+//
+//            if (procResult == LoopbackProcessor::PROCESS_RESULT_GLITCH) {
+//                if (myData->numGlitchEvents < kMaxGlitchEventsToSave) {
+//                    myData->glitchFrames[myData->numGlitchEvents++] = myData->audioRecording.size();
+//                }
+//            }
 
             // Save for later.
             myData->audioRecording.write(myData->inputFloatData,
@@ -283,8 +303,8 @@
 }
 
 static void MyErrorCallbackProc(
-        AAudioStream *stream __unused,
-        void *userData __unused,
+        AAudioStream * /* stream */,
+        void * userData,
         aaudio_result_t error) {
     printf("Error Callback, error: %d\n",(int)error);
     LoopbackData *myData = (LoopbackData *) userData;
@@ -305,8 +325,8 @@
     printf("          l for _LATENCY\n");
     printf("          p for _POWER_SAVING\n");
     printf("      -t{test}          select test mode\n");
-    printf("          m for sine magnitude\n");
-    printf("          e for echo latency (default)\n");
+    printf("          g for Glitch detection\n");
+    printf("          l for round trip Latency (default)\n");
     printf("          f for file latency, analyzes %s\n\n", FILENAME_ECHOS);
     printf("      -X  use EXCLUSIVE mode for input\n");
     printf("Example:  aaudio_loopback -n2 -pl -Pl -x\n");
@@ -333,20 +353,22 @@
 }
 
 enum {
-    TEST_SINE_MAGNITUDE = 0,
-    TEST_ECHO_LATENCY,
+    TEST_GLITCHES = 0,
+    TEST_LATENCY,
     TEST_FILE_LATENCY,
 };
 
 static int parseTestMode(char c) {
-    int testMode = TEST_ECHO_LATENCY;
+    int testMode = TEST_LATENCY;
     c = tolower(c);
     switch (c) {
-        case 'm':
-            testMode = TEST_SINE_MAGNITUDE;
+        case 'm': // deprecated
+        case 'g':
+            testMode = TEST_GLITCHES;
             break;
-        case 'e':
-            testMode = TEST_ECHO_LATENCY;
+        case 'e': // deprecated
+        case 'l':
+            testMode = TEST_LATENCY;
             break;
         case 'f':
             testMode = TEST_FILE_LATENCY;
@@ -408,9 +430,10 @@
     int32_t               actualSampleRate           = 0;
     int                   written                    = 0;
 
-    int                   testMode                   = TEST_ECHO_LATENCY;
+    int                   testMode                   = TEST_LATENCY;
     double                gain                       = 1.0;
     int                   hangTimeMillis             = 0;
+    std::string           report;
 
     // Make printf print immediately so that debug info is not stuck
     // in a buffer if we hang or crash.
@@ -488,22 +511,21 @@
     int32_t requestedOutputBursts = argParser.getNumberOfBursts();
 
     switch(testMode) {
-        case TEST_SINE_MAGNITUDE:
+        case TEST_GLITCHES:
             loopbackData.loopbackProcessor = &loopbackData.sineAnalyzer;
             break;
-        case TEST_ECHO_LATENCY:
-            loopbackData.echoAnalyzer.setGain(gain);
+        case TEST_LATENCY:
+            // TODO loopbackData.echoAnalyzer.setGain(gain);
             loopbackData.loopbackProcessor = &loopbackData.echoAnalyzer;
             break;
         case TEST_FILE_LATENCY: {
-            loopbackData.echoAnalyzer.setGain(gain);
-
+            // TODO loopbackData.echoAnalyzer.setGain(gain);
             loopbackData.loopbackProcessor = &loopbackData.echoAnalyzer;
             int read = loopbackData.loopbackProcessor->load(FILENAME_ECHOS);
             printf("main() read %d mono samples from %s on Android device, rate = %d\n",
                    read, FILENAME_ECHOS,
                    loopbackData.loopbackProcessor->getSampleRate());
-            loopbackData.loopbackProcessor->report();
+            std::cout << loopbackData.loopbackProcessor->analyze();
             goto report_result;
         }
             break;
@@ -557,7 +579,7 @@
         int32_t actualCapacity = AAudioStream_getBufferCapacityInFrames(inputStream);
         (void) AAudioStream_setBufferSizeInFrames(inputStream, actualCapacity);
 
-        if (testMode == TEST_SINE_MAGNITUDE
+        if (testMode == TEST_GLITCHES
                 && requestedOutputBursts == AAUDIO_UNSPECIFIED) {
             result = AAudioStream_setBufferSizeInFrames(outputStream, actualCapacity);
             if (result < 0) {
@@ -594,10 +616,10 @@
     loopbackData.inputFloatData = new float[loopbackData.inputFramesMaximum *
                                               loopbackData.actualInputChannelCount]{};
 
-    loopbackData.loopbackProcessor->reset();
-
     loopbackData.hangTimeMillis = hangTimeMillis;
 
+    loopbackData.loopbackProcessor->prepareToTest();
+
     // Start OUTPUT first so INPUT does not overflow.
     result = player.start();
     if (result != AAUDIO_OK) {
@@ -669,7 +691,8 @@
 
     printf("input error = %d = %s\n",
            loopbackData.inputError, AAudio_convertResultToText(loopbackData.inputError));
-
+/*
+    // TODO Restore this code some day if we want to save files.
     written = loopbackData.loopbackProcessor->save(FILENAME_ECHOS);
     if (written > 0) {
         printf("main() wrote %8d mono samples to \"%s\" on Android device\n",
@@ -681,9 +704,9 @@
         printf("main() wrote %8d mono samples to \"%s\" on Android device\n",
                written, FILENAME_ALL);
     }
-
+*/
     if (loopbackData.inputError == AAUDIO_OK) {
-        if (testMode == TEST_SINE_MAGNITUDE) {
+        if (testMode == TEST_GLITCHES) {
             if (loopbackData.numGlitchEvents > 0) {
                 // Graph around the first glitch if there is one.
                 const int32_t start = loopbackData.glitchFrames[0] - 8;
@@ -697,7 +720,8 @@
             }
         }
 
-        loopbackData.loopbackProcessor->report();
+        std::cout << "Please wait several seconds for analysis to complete.\n";
+        std::cout << loopbackData.loopbackProcessor->analyze();
     }
 
     {
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 5bebd61..2f43b22 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -227,6 +227,8 @@
 };
 typedef int32_t aaudio_performance_mode_t;
 
+#define AAUDIO_SYSTEM_USAGE_OFFSET 1000
+
 /**
  * The USAGE attribute expresses "why" you are playing a sound, what is this sound used for.
  * This information is used by certain platforms or routing policies
@@ -297,7 +299,31 @@
     /**
      * Use this for audio responses to user queries, audio instructions or help utterances.
      */
-    AAUDIO_USAGE_ASSISTANT = 16
+    AAUDIO_USAGE_ASSISTANT = 16,
+
+    /**
+     * Use this in case of playing sounds in an emergency.
+     * Privileged MODIFY_AUDIO_ROUTING permission required.
+     */
+    AAUDIO_SYSTEM_USAGE_EMERGENCY = AAUDIO_SYSTEM_USAGE_OFFSET,
+
+    /**
+     * Use this for safety sounds and alerts, for example backup camera obstacle detection.
+     * Privileged MODIFY_AUDIO_ROUTING permission required.
+     */
+    AAUDIO_SYSTEM_USAGE_SAFETY = AAUDIO_SYSTEM_USAGE_OFFSET + 1,
+
+    /**
+     * Use this for vehicle status alerts and information, for example the check engine light.
+     * Privileged MODIFY_AUDIO_ROUTING permission required.
+     */
+    AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS = AAUDIO_SYSTEM_USAGE_OFFSET + 2,
+
+    /**
+     * Use this for traffic announcements, etc.
+     * Privileged MODIFY_AUDIO_ROUTING permission required.
+     */
+    AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT = AAUDIO_SYSTEM_USAGE_OFFSET + 3,
 };
 typedef int32_t aaudio_usage_t;
 
@@ -995,10 +1021,26 @@
 // Stream Control
 // ============================================================
 
+#if __ANDROID_API__ >= 30
 /**
- * Free the resources associated with a stream created by AAudioStreamBuilder_openStream()
+ * Free the audio resources associated with a stream created by
+ * AAudioStreamBuilder_openStream().
+ * AAudioStream_close() should be called at some point after calling
+ * this function.
  *
- * Available since API level 26.
+ * After this call, the stream will be in {@link #AAUDIO_STREAM_STATE_CLOSING}
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return {@link #AAUDIO_OK} or a negative error.
+ */
+AAUDIO_API aaudio_result_t  AAudioStream_release(AAudioStream* stream) __INTRODUCED_IN(30);
+#endif // __ANDROID_API__
+
+/**
+ * Delete the internal data structures associated with the stream created
+ * by AAudioStreamBuilder_openStream().
+ *
+ * If AAudioStream_release() has not been called then it will be called automatically.
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return {@link #AAUDIO_OK} or a negative error.
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index baada22..8753aa9 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -44,14 +44,6 @@
     sanitize: {
         integer_overflow: true,
         misc_undefined: ["bounds"],
-        diag: {
-            integer_overflow: true,
-            misc_undefined: ["bounds"],
-            no_recover: [
-                "bounds",
-                "integer",
-            ],
-        },
     },
 
     stubs: {
@@ -137,14 +129,5 @@
     sanitize: {
         integer_overflow: true,
         misc_undefined: ["bounds"],
-        diag: {
-            integer_overflow: true,
-            misc_undefined: ["bounds"],
-            no_recover: [
-                "bounds",
-                "integer",
-            ],
-        },
     },
-
 }
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index bfad254..b6548e6 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -258,18 +258,17 @@
     return result;
 
 error:
-    close();
+    releaseCloseFinal();
     return result;
 }
 
 // This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternal::close() {
+aaudio_result_t AudioStreamInternal::release_l() {
     aaudio_result_t result = AAUDIO_OK;
     ALOGV("%s(): mServiceStreamHandle = 0x%08X", __func__, mServiceStreamHandle);
     if (mServiceStreamHandle != AAUDIO_HANDLE_INVALID) {
-        // Don't close a stream while it is running.
         aaudio_stream_state_t currentState = getState();
-        // Don't close a stream while it is running. Stop it first.
+        // Don't release a stream while it is running. Stop it first.
         // If DISCONNECTED then we should still try to stop in case the
         // error callback is still running.
         if (isActive() || currentState == AAUDIO_STREAM_STATE_DISCONNECTED) {
@@ -282,10 +281,8 @@
         mServiceInterface.closeStream(serviceStreamHandle);
         delete[] mCallbackBuffer;
         mCallbackBuffer = nullptr;
-
-        setState(AAUDIO_STREAM_STATE_CLOSED);
         result = mEndPointParcelable.close();
-        aaudio_result_t result2 = AudioStream::close();
+        aaudio_result_t result2 = AudioStream::release_l();
         return (result != AAUDIO_OK) ? result : result2;
     } else {
         return AAUDIO_ERROR_INVALID_HANDLE;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 596d37f..8843a8a 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -58,7 +58,7 @@
 
     aaudio_result_t open(const AudioStreamBuilder &builder) override;
 
-    aaudio_result_t close() override;
+    aaudio_result_t release_l() override;
 
     aaudio_result_t setBufferSize(int32_t requestedFrames) override;
 
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index dc9f48c..536009a 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -49,7 +49,7 @@
                              getDeviceChannelCount());
 
         if (result != AAUDIO_OK) {
-            close();
+            releaseCloseFinal();
         }
         // Sample rate is constrained to common values by now and should not overflow.
         int32_t numFrames = kRampMSec * getSampleRate() / AAUDIO_MILLIS_PER_SECOND;
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 184e9cb..8965875 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -25,7 +25,6 @@
 
 #include <aaudio/AAudio.h>
 #include <aaudio/AAudioTesting.h>
-
 #include "AudioClock.h"
 #include "AudioGlobal.h"
 #include "AudioStreamBuilder.h"
@@ -231,21 +230,42 @@
     return AAUDIO_ERROR_NULL;
 }
 
-AAUDIO_API aaudio_result_t  AAudioStream_close(AAudioStream* stream)
-{
+AAUDIO_API aaudio_result_t  AAudioStream_release(AAudioStream* stream) {
     aaudio_result_t result = AAUDIO_ERROR_NULL;
-    AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+    AudioStream* audioStream = convertAAudioStreamToAudioStream(stream);
     if (audioStream != nullptr) {
         aaudio_stream_id_t id = audioStream->getId();
         ALOGD("%s(s#%u) called ---------------", __func__, id);
-        result = audioStream->safeClose();
-        // Close will only fail if called illegally, for example, from a callback.
+        result = audioStream->safeRelease();
+        // safeRelease() will only fail if called illegally, for example, from a callback.
+        // That would result in the release of an active stream, which would cause a crash.
+        if (result != AAUDIO_OK) {
+            ALOGW("%s(s#%u) failed. Release it from another thread.",
+                  __func__, id);
+        }
+        ALOGD("%s(s#%u) returned %d %s ---------", __func__,
+                id, result, AAudio_convertResultToText(result));
+    }
+    return result;
+}
+
+AAUDIO_API aaudio_result_t  AAudioStream_close(AAudioStream* stream) {
+    aaudio_result_t result = AAUDIO_ERROR_NULL;
+    AudioStream* audioStream = convertAAudioStreamToAudioStream(stream);
+    if (audioStream != nullptr) {
+        aaudio_stream_id_t id = audioStream->getId();
+        ALOGD("%s(s#%u) called ---------------", __func__, id);
+        result = audioStream->safeRelease();
+        // safeRelease will only fail if called illegally, for example, from a callback.
         // That would result in deleting an active stream, which would cause a crash.
-        if (result == AAUDIO_OK) {
-            audioStream->unregisterPlayerBase();
-            delete audioStream;
+        if (result != AAUDIO_OK) {
+            ALOGW("%s(s#%u) failed. Close it from another thread.",
+                  __func__, id);
         } else {
-            ALOGW("%s attempt to close failed. Close it from another thread.", __func__);
+            audioStream->unregisterPlayerBase();
+             // Mark CLOSED to keep destructors from asserting.
+            audioStream->closeFinal();
+            delete audioStream;
         }
         ALOGD("%s(s#%u) returned %d ---------", __func__, id, result);
     }
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 58058f5..5f45261 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -133,6 +133,10 @@
         case AAUDIO_USAGE_ASSISTANCE_SONIFICATION:
         case AAUDIO_USAGE_GAME:
         case AAUDIO_USAGE_ASSISTANT:
+        case AAUDIO_SYSTEM_USAGE_EMERGENCY:
+        case AAUDIO_SYSTEM_USAGE_SAFETY:
+        case AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS:
+        case AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT:
             break; // valid
         default:
             ALOGD("usage not valid = %d", mUsage);
diff --git a/media/libaaudio/src/core/AudioGlobal.cpp b/media/libaaudio/src/core/AudioGlobal.cpp
index e6d9a0d..d299761 100644
--- a/media/libaaudio/src/core/AudioGlobal.cpp
+++ b/media/libaaudio/src/core/AudioGlobal.cpp
@@ -87,9 +87,9 @@
         AAUDIO_CASE_ENUM(AAUDIO_STREAM_STATE_FLUSHED);
         AAUDIO_CASE_ENUM(AAUDIO_STREAM_STATE_STOPPING);
         AAUDIO_CASE_ENUM(AAUDIO_STREAM_STATE_STOPPED);
-        AAUDIO_CASE_ENUM(AAUDIO_STREAM_STATE_DISCONNECTED);
         AAUDIO_CASE_ENUM(AAUDIO_STREAM_STATE_CLOSING);
         AAUDIO_CASE_ENUM(AAUDIO_STREAM_STATE_CLOSED);
+        AAUDIO_CASE_ENUM(AAUDIO_STREAM_STATE_DISCONNECTED);
     }
     return "Unrecognized AAudio state.";
 }
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 1560e0c..f51db70 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -249,25 +249,29 @@
     return requestStop();
 }
 
-aaudio_result_t AudioStream::safeClose() {
-    // This get temporarily unlocked in the close when joining callback threads.
+aaudio_result_t AudioStream::safeRelease() {
+    // This get temporarily unlocked in the release() when joining callback threads.
     std::lock_guard<std::mutex> lock(mStreamLock);
     if (collidesWithCallback()) {
         ALOGE("%s cannot be called from a callback!", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
-    return close();
+    if (getState() == AAUDIO_STREAM_STATE_CLOSING) {
+        return AAUDIO_OK;
+    }
+    return release_l();
 }
 
 void AudioStream::setState(aaudio_stream_state_t state) {
-    ALOGV("%s(%d) from %d to %d", __func__, getId(), mState, state);
+    ALOGD("%s(s#%d) from %d to %d", __func__, getId(), mState, state);
     // CLOSED is a final state
     if (mState == AAUDIO_STREAM_STATE_CLOSED) {
         ALOGE("%s(%d) tried to set to %d but already CLOSED", __func__, getId(), state);
 
-    // Once DISCONNECTED, we can only move to CLOSED state.
+    // Once DISCONNECTED, we can only move to CLOSING or CLOSED state.
     } else if (mState == AAUDIO_STREAM_STATE_DISCONNECTED
-               && state != AAUDIO_STREAM_STATE_CLOSED) {
+               && !(state == AAUDIO_STREAM_STATE_CLOSING
+                   || state == AAUDIO_STREAM_STATE_CLOSED)) {
         ALOGE("%s(%d) tried to set to %d but already DISCONNECTED", __func__, getId(), state);
 
     } else {
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index b4ffcf2..9bda41b 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -115,13 +115,32 @@
     virtual aaudio_result_t open(const AudioStreamBuilder& builder);
 
     /**
-     * Close the stream and deallocate any resources from the open() call.
-     * It is safe to call close() multiple times.
+     * Free any hardware or system resources from the open() call.
+     * It is safe to call release_l() multiple times.
      */
-    virtual aaudio_result_t close() {
+    virtual aaudio_result_t release_l() {
+        setState(AAUDIO_STREAM_STATE_CLOSING);
         return AAUDIO_OK;
     }
 
+    aaudio_result_t closeFinal() {
+        // State is checked by destructor.
+        setState(AAUDIO_STREAM_STATE_CLOSED);
+        return AAUDIO_OK;
+    }
+
+    /**
+     * Release then close the stream.
+     * @return AAUDIO_OK or negative error.
+     */
+    aaudio_result_t releaseCloseFinal() {
+        aaudio_result_t result = release_l(); // TODO review locking
+        if (result == AAUDIO_OK) {
+          result = closeFinal();
+        }
+        return result;
+    }
+
     // This is only used to identify a stream in the logs without
     // revealing any pointers.
     aaudio_stream_id_t getId() {
@@ -373,7 +392,7 @@
      */
     aaudio_result_t systemStopFromCallback();
 
-    aaudio_result_t safeClose();
+    aaudio_result_t safeRelease();
 
 protected:
 
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index 54af580..2ed82d2 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -182,7 +182,7 @@
         // Did we get a valid track?
         status_t status = mAudioRecord->initCheck();
         if (status != OK) {
-            close();
+            releaseCloseFinal();
             ALOGE("open(), initCheck() returned %d", status);
             return AAudioConvert_androidToAAudioResult(status);
         }
@@ -278,16 +278,17 @@
     return AAUDIO_OK;
 }
 
-aaudio_result_t AudioStreamRecord::close()
-{
-    // TODO add close() or release() to AudioRecord API then call it from here
-    if (getState() != AAUDIO_STREAM_STATE_CLOSED) {
+aaudio_result_t AudioStreamRecord::release_l() {
+    // TODO add close() or release() to AudioFlinger's AudioRecord API.
+    //  Then call it from here
+    if (getState() != AAUDIO_STREAM_STATE_CLOSING) {
         mAudioRecord->removeAudioDeviceCallback(mDeviceCallback);
         mAudioRecord.clear();
-        setState(AAUDIO_STREAM_STATE_CLOSED);
+        mFixedBlockWriter.close();
+        return AudioStream::release_l();
+    } else {
+        return AAUDIO_OK; // already released
     }
-    mFixedBlockWriter.close();
-    return AudioStream::close();
 }
 
 const void * AudioStreamRecord::maybeConvertDeviceData(const void *audioData, int32_t numFrames) {
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.h b/media/libaaudio/src/legacy/AudioStreamRecord.h
index 2f41d34..c5944c7 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.h
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.h
@@ -38,7 +38,7 @@
     virtual ~AudioStreamRecord();
 
     aaudio_result_t open(const AudioStreamBuilder & builder) override;
-    aaudio_result_t close() override;
+    aaudio_result_t release_l() override;
 
     aaudio_result_t requestStart() override;
     aaudio_result_t requestStop() override;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 094cdd1..00963d6 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -176,7 +176,7 @@
     // Did we get a valid track?
     status_t status = mAudioTrack->initCheck();
     if (status != NO_ERROR) {
-        close();
+        releaseCloseFinal();
         ALOGE("open(), initCheck() returned %d", status);
         return AAudioConvert_androidToAAudioResult(status);
     }
@@ -239,14 +239,18 @@
     return AAUDIO_OK;
 }
 
-aaudio_result_t AudioStreamTrack::close()
-{
-    if (getState() != AAUDIO_STREAM_STATE_CLOSED) {
+aaudio_result_t AudioStreamTrack::release_l() {
+    if (getState() != AAUDIO_STREAM_STATE_CLOSING) {
         mAudioTrack->removeAudioDeviceCallback(mDeviceCallback);
-        setState(AAUDIO_STREAM_STATE_CLOSED);
+        // TODO Investigate why clear() causes a hang in test_various.cpp
+        // if I call close() from a data callback.
+        // But the same thing in AudioRecord is OK!
+        // mAudioTrack.clear();
+        mFixedBlockReader.close();
+        return AudioStream::release_l();
+    } else {
+        return AAUDIO_OK; // already released
     }
-    mFixedBlockReader.close();
-    return AAUDIO_OK;
 }
 
 void AudioStreamTrack::processCallback(int event, void *info) {
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index 68608de..550f693 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -41,7 +41,7 @@
 
 
     aaudio_result_t open(const AudioStreamBuilder & builder) override;
-    aaudio_result_t close() override;
+    aaudio_result_t release_l() override;
 
     aaudio_result_t requestStart() override;
     aaudio_result_t requestPause() override;
diff --git a/media/libaaudio/src/libaaudio.map.txt b/media/libaaudio/src/libaaudio.map.txt
index a87ede3..2e00aa5 100644
--- a/media/libaaudio/src/libaaudio.map.txt
+++ b/media/libaaudio/src/libaaudio.map.txt
@@ -22,6 +22,7 @@
     AAudioStreamBuilder_setInputPreset; # introduced=28
     AAudioStreamBuilder_setAllowedCapturePolicy; # introduced=29
     AAudioStreamBuilder_setSessionId;   # introduced=28
+    AAudioStreamBuilder_setPrivacySensitive;   # introduced=30
     AAudioStreamBuilder_openStream;
     AAudioStreamBuilder_delete;
     AAudioStream_close;
@@ -56,6 +57,8 @@
     AAudioStream_getSessionId;   # introduced=28
     AAudioStream_getTimestamp;
     AAudioStream_isMMapUsed;
+    AAudioStream_isPrivacySensitive;   # introduced=30
+    AAudioStream_release;        # introduced=30
   local:
     *;
 };
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index ef89697..9007b10 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -183,6 +183,10 @@
     STATIC_ASSERT(AAUDIO_USAGE_ASSISTANCE_SONIFICATION == AUDIO_USAGE_ASSISTANCE_SONIFICATION);
     STATIC_ASSERT(AAUDIO_USAGE_GAME == AUDIO_USAGE_GAME);
     STATIC_ASSERT(AAUDIO_USAGE_ASSISTANT == AUDIO_USAGE_ASSISTANT);
+    STATIC_ASSERT(AAUDIO_SYSTEM_USAGE_EMERGENCY == AUDIO_USAGE_EMERGENCY);
+    STATIC_ASSERT(AAUDIO_SYSTEM_USAGE_SAFETY == AUDIO_USAGE_SAFETY);
+    STATIC_ASSERT(AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS == AUDIO_USAGE_VEHICLE_STATUS);
+    STATIC_ASSERT(AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT == AUDIO_USAGE_ANNOUNCEMENT);
     if (usage == AAUDIO_UNSPECIFIED) {
         usage = AAUDIO_USAGE_MEDIA;
     }
diff --git a/media/libaaudio/tests/test_attributes.cpp b/media/libaaudio/tests/test_attributes.cpp
index 32ee2a3..d540866 100644
--- a/media/libaaudio/tests/test_attributes.cpp
+++ b/media/libaaudio/tests/test_attributes.cpp
@@ -33,6 +33,7 @@
                             aaudio_content_type_t contentType,
                             aaudio_input_preset_t preset = DONT_SET,
                             aaudio_allowed_capture_policy_t capturePolicy = DONT_SET,
+                            int privacyMode = DONT_SET,
                             aaudio_direction_t direction = AAUDIO_DIRECTION_OUTPUT) {
 
     float *buffer = new float[kNumFrames * kChannelCount];
@@ -60,6 +61,9 @@
     if (capturePolicy != DONT_SET) {
         AAudioStreamBuilder_setAllowedCapturePolicy(aaudioBuilder, capturePolicy);
     }
+    if (privacyMode != DONT_SET) {
+        AAudioStreamBuilder_setPrivacySensitive(aaudioBuilder, (bool)privacyMode);
+    }
 
     // Create an AAudioStream using the Builder.
     ASSERT_EQ(AAUDIO_OK, AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
@@ -90,6 +94,13 @@
             : preset;
     EXPECT_EQ(expectedCapturePolicy, AAudioStream_getAllowedCapturePolicy(aaudioStream));
 
+    bool expectedPrivacyMode =
+            (privacyMode == DONT_SET) ?
+                ((preset == AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION
+                    || preset == AAUDIO_INPUT_PRESET_CAMCORDER) ? true : false) :
+                privacyMode;
+    EXPECT_EQ(expectedPrivacyMode, AAudioStream_isPrivacySensitive(aaudioStream));
+
     EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStart(aaudioStream));
 
     if (direction == AAUDIO_DIRECTION_INPUT) {
@@ -120,7 +131,11 @@
     AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
     AAUDIO_USAGE_ASSISTANCE_SONIFICATION,
     AAUDIO_USAGE_GAME,
-    AAUDIO_USAGE_ASSISTANT
+    AAUDIO_USAGE_ASSISTANT,
+    AAUDIO_SYSTEM_USAGE_EMERGENCY,
+    AAUDIO_SYSTEM_USAGE_SAFETY,
+    AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS,
+    AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT
 };
 
 static const aaudio_content_type_t sContentypes[] = {
@@ -151,6 +166,12 @@
     AAUDIO_ALLOW_CAPTURE_BY_NONE,
 };
 
+static const int sPrivacyModes[] = {
+    DONT_SET,
+    false,
+    true,
+};
+
 static void checkAttributesUsage(aaudio_performance_mode_t perfMode) {
     for (aaudio_usage_t usage : sUsages) {
         checkAttributes(perfMode, usage, DONT_SET);
@@ -170,6 +191,7 @@
                         DONT_SET,
                         inputPreset,
                         DONT_SET,
+                        DONT_SET,
                         AAUDIO_DIRECTION_INPUT);
     }
 }
@@ -185,6 +207,18 @@
     }
 }
 
+static void checkAttributesPrivacySensitive(aaudio_performance_mode_t perfMode) {
+    for (int privacyMode : sPrivacyModes) {
+        checkAttributes(perfMode,
+                        DONT_SET,
+                        DONT_SET,
+                        DONT_SET,
+                        DONT_SET,
+                        privacyMode,
+                        AAUDIO_DIRECTION_INPUT);
+    }
+}
+
 TEST(test_attributes, aaudio_usage_perfnone) {
     checkAttributesUsage(AAUDIO_PERFORMANCE_MODE_NONE);
 }
@@ -216,3 +250,7 @@
 TEST(test_attributes, aaudio_allowed_capture_policy_lowlat) {
     checkAttributesAllowedCapturePolicy(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
 }
+
+TEST(test_attributes, aaudio_allowed_privacy_sensitive_lowlat) {
+    checkAttributesPrivacySensitive(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+}
diff --git a/media/libaaudio/tests/test_various.cpp b/media/libaaudio/tests/test_various.cpp
index 4b065c9..5bb1046 100644
--- a/media/libaaudio/tests/test_various.cpp
+++ b/media/libaaudio/tests/test_various.cpp
@@ -40,10 +40,62 @@
     return AAUDIO_CALLBACK_RESULT_CONTINUE;
 }
 
-// Test AAudioStream_setBufferSizeInFrames()
-
 constexpr int64_t NANOS_PER_MILLISECOND = 1000 * 1000;
 
+void checkReleaseThenClose(aaudio_performance_mode_t perfMode,
+        aaudio_sharing_mode_t sharingMode) {
+    AAudioStreamBuilder* aaudioBuilder = nullptr;
+    AAudioStream* aaudioStream = nullptr;
+
+    // Use an AAudioStreamBuilder to contain requested parameters.
+    ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
+
+    // Request stream properties.
+    AAudioStreamBuilder_setDataCallback(aaudioBuilder,
+                                        NoopDataCallbackProc,
+                                        nullptr);
+    AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, perfMode);
+    AAudioStreamBuilder_setSharingMode(aaudioBuilder, sharingMode);
+
+    // Create an AAudioStream using the Builder.
+    ASSERT_EQ(AAUDIO_OK,
+              AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
+    AAudioStreamBuilder_delete(aaudioBuilder);
+
+    ASSERT_EQ(AAUDIO_OK, AAudioStream_requestStart(aaudioStream));
+
+    sleep(1);
+
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStop(aaudioStream));
+
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_release(aaudioStream));
+    aaudio_stream_state_t state = AAudioStream_getState(aaudioStream);
+    EXPECT_EQ(AAUDIO_STREAM_STATE_CLOSING, state);
+
+    // We should be able to call this again without crashing.
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_release(aaudioStream));
+    state = AAudioStream_getState(aaudioStream);
+    EXPECT_EQ(AAUDIO_STREAM_STATE_CLOSING, state);
+
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_close(aaudioStream));
+}
+
+TEST(test_various, aaudio_release_close_none) {
+    checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_NONE,
+            AAUDIO_SHARING_MODE_SHARED);
+    // No EXCLUSIVE streams with MODE_NONE.
+}
+
+TEST(test_various, aaudio_release_close_low_shared) {
+    checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+            AAUDIO_SHARING_MODE_SHARED);
+}
+
+TEST(test_various, aaudio_release_close_low_exclusive) {
+    checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+            AAUDIO_SHARING_MODE_EXCLUSIVE);
+}
+
 enum FunctionToCall {
     CALL_START, CALL_STOP, CALL_PAUSE, CALL_FLUSH
 };
@@ -441,7 +493,7 @@
            bufferCapacity, bufferCapacity % framesPerBurst);
 
     actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, 0);
-    EXPECT_GT(actualSize, 0);
+    EXPECT_GE(actualSize, 0); // 0 is legal in R
     EXPECT_LE(actualSize, bufferCapacity);
 
     actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, 2 * framesPerBurst);
@@ -469,7 +521,7 @@
     EXPECT_LE(actualSize, bufferCapacity);
 
     actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, INT32_MIN);
-    EXPECT_GT(actualSize, 0);
+    EXPECT_GE(actualSize, 0); // 0 is legal in R
     EXPECT_LE(actualSize, bufferCapacity);
 
     AAudioStream_close(aaudioStream);
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index d1812e6..3638d2c 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -90,7 +90,12 @@
     ],
     export_shared_lib_headers: ["libbinder"],
 
-    local_include_dirs: ["include/media", "aidl"],
+    include_dirs: [
+        "frameworks/av/media/libnbaio/include_mono/",
+    ],
+    local_include_dirs: [
+        "include/media", "aidl"
+    ],
     header_libs: [
         "libaudioclient_headers",
         "libbase_headers",
diff --git a/media/libaudioclient/AudioEffect.cpp b/media/libaudioclient/AudioEffect.cpp
index 7d5b690..a1b141b 100644
--- a/media/libaudioclient/AudioEffect.cpp
+++ b/media/libaudioclient/AudioEffect.cpp
@@ -138,6 +138,7 @@
 
     mIEffectClient = new EffectClient(this);
     mClientPid = IPCThreadState::self()->getCallingPid();
+    mClientUid = IPCThreadState::self()->getCallingUid();
 
     iEffect = audioFlinger->createEffect((effect_descriptor_t *)&mDescriptor,
             mIEffectClient, priority, io, mSessionId, device, mOpPackageName, mClientPid,
@@ -179,7 +180,7 @@
             mStatus, mEnabled, mClientPid);
 
     if (!audio_is_global_session(mSessionId)) {
-        AudioSystem::acquireAudioSessionId(mSessionId, mClientPid);
+        AudioSystem::acquireAudioSessionId(mSessionId, mClientPid, mClientUid);
     }
 
     return mStatus;
diff --git a/media/libaudioclient/AudioPolicy.cpp b/media/libaudioclient/AudioPolicy.cpp
index 06fc23c..d468239 100644
--- a/media/libaudioclient/AudioPolicy.cpp
+++ b/media/libaudioclient/AudioPolicy.cpp
@@ -53,6 +53,10 @@
     case RULE_EXCLUDE_UID:
         mValue.mUid = (uid_t) parcel->readInt32();
         break;
+    case RULE_MATCH_USERID:
+    case RULE_EXCLUDE_USERID:
+        mValue.mUserId = (int) parcel->readInt32();
+        break;
     default:
         ALOGE("Trying to build AudioMixMatchCriterion from unknown rule %d", mRule);
         return BAD_VALUE;
@@ -163,9 +167,50 @@
     return false;
 }
 
+void AudioMix::setExcludeUserId(int userId) const {
+    AudioMixMatchCriterion crit;
+    crit.mRule = RULE_EXCLUDE_USERID;
+    crit.mValue.mUserId = userId;
+    mCriteria.add(crit);
+}
+
+void AudioMix::setMatchUserId(int userId) const {
+    AudioMixMatchCriterion crit;
+    crit.mRule = RULE_MATCH_USERID;
+    crit.mValue.mUserId = userId;
+    mCriteria.add(crit);
+}
+
+bool AudioMix::hasUserIdRule(bool match, int userId) const {
+    const uint32_t rule = match ? RULE_MATCH_USERID : RULE_EXCLUDE_USERID;
+    for (size_t i = 0; i < mCriteria.size(); i++) {
+        if (mCriteria[i].mRule == rule
+                && mCriteria[i].mValue.mUserId == userId) {
+            return true;
+        }
+    }
+    return false;
+}
+
+bool AudioMix::hasMatchUserIdRule() const {
+    for (size_t i = 0; i < mCriteria.size(); i++) {
+        if (mCriteria[i].mRule == RULE_MATCH_USERID) {
+            return true;
+        }
+    }
+    return false;
+}
+
 bool AudioMix::isDeviceAffinityCompatible() const {
     return ((mMixType == MIX_TYPE_PLAYERS)
             && (mRouteFlags == MIX_ROUTE_FLAG_RENDER));
 }
 
+bool AudioMix::hasMatchingRuleForUsage(std::function<bool (audio_usage_t)>const& func) const {
+    return std::any_of(mCriteria.begin(), mCriteria.end(), [func](auto& criterion) {
+            return criterion.mRule == RULE_MATCH_ATTRIBUTE_USAGE
+              && func(criterion.mValue.mUsage);
+          });
+}
+
 } // namespace android
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 2494313..981cfee 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -186,7 +186,7 @@
         IPCThreadState::self()->flushCommands();
         ALOGV("%s(%d): releasing session id %d",
                 __func__, mPortId, mSessionId);
-        AudioSystem::releaseAudioSessionId(mSessionId, -1 /*pid*/);
+        AudioSystem::releaseAudioSessionId(mSessionId, mClientPid);
     }
 }
 
@@ -361,7 +361,7 @@
     mMarkerReached = false;
     mNewPosition = 0;
     mUpdatePeriod = 0;
-    AudioSystem::acquireAudioSessionId(mSessionId, -1);
+    AudioSystem::acquireAudioSessionId(mSessionId, mClientPid, mClientUid);
     mSequence = 1;
     mObservedSequence = mSequence;
     mInOverrun = false;
@@ -896,7 +896,6 @@
 {
     // previous and new IAudioRecord sequence numbers are used to detect track re-creation
     uint32_t oldSequence = 0;
-    uint32_t newSequence;
 
     Proxy::Buffer buffer;
     status_t status = NO_ERROR;
@@ -914,7 +913,7 @@
             // start of lock scope
             AutoMutex lock(mLock);
 
-            newSequence = mSequence;
+            uint32_t newSequence = mSequence;
             // did previous obtainBuffer() fail due to media server death or voluntary invalidation?
             if (status == DEAD_OBJECT) {
                 // re-create track, unless someone else has already done so
@@ -951,6 +950,7 @@
     audioBuffer->frameCount = buffer.mFrameCount;
     audioBuffer->size = buffer.mFrameCount * mFrameSize;
     audioBuffer->raw = buffer.mRaw;
+    audioBuffer->sequence = oldSequence;
     if (nonContig != NULL) {
         *nonContig = buffer.mNonContig;
     }
@@ -971,6 +971,12 @@
     buffer.mRaw = audioBuffer->raw;
 
     AutoMutex lock(mLock);
+    if (audioBuffer->sequence != mSequence) {
+        // This Buffer came from a different IAudioRecord instance, so ignore the releaseBuffer
+        ALOGD("%s is no-op due to IAudioRecord sequence mismatch %u != %u",
+                __func__, audioBuffer->sequence, mSequence);
+        return;
+    }
     mInOverrun = false;
     mProxy->releaseBuffer(&buffer);
 
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index ad39abe..1769062 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -36,10 +36,11 @@
 
 // client singleton for AudioFlinger binder interface
 Mutex AudioSystem::gLock;
+Mutex AudioSystem::gLockErrorCallbacks;
 Mutex AudioSystem::gLockAPS;
 sp<IAudioFlinger> AudioSystem::gAudioFlinger;
 sp<AudioSystem::AudioFlingerClient> AudioSystem::gAudioFlingerClient;
-audio_error_callback AudioSystem::gAudioErrorCallback = NULL;
+std::set<audio_error_callback> AudioSystem::gAudioErrorCallbacks;
 dynamic_policy_callback AudioSystem::gDynPolicyCallback = NULL;
 record_config_callback AudioSystem::gRecordConfigCallback = NULL;
 
@@ -63,9 +64,7 @@
             if (gAudioFlingerClient == NULL) {
                 gAudioFlingerClient = new AudioFlingerClient();
             } else {
-                if (gAudioErrorCallback) {
-                    gAudioErrorCallback(NO_ERROR);
-                }
+                reportError(NO_ERROR);
             }
             binder->linkToDeath(gAudioFlingerClient);
             gAudioFlinger = interface_cast<IAudioFlinger>(binder);
@@ -434,11 +433,11 @@
     return af->newAudioUniqueId(use);
 }
 
-void AudioSystem::acquireAudioSessionId(audio_session_t audioSession, pid_t pid)
+void AudioSystem::acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid)
 {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af != 0) {
-        af->acquireAudioSessionId(audioSession, pid);
+        af->acquireAudioSessionId(audioSession, pid, uid);
     }
 }
 
@@ -500,19 +499,16 @@
 
 void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused)
 {
-    audio_error_callback cb = NULL;
     {
         Mutex::Autolock _l(AudioSystem::gLock);
         AudioSystem::gAudioFlinger.clear();
-        cb = gAudioErrorCallback;
     }
 
     // clear output handles and stream to output map caches
     clearIoCache();
 
-    if (cb) {
-        cb(DEAD_OBJECT);
-    }
+    reportError(DEAD_OBJECT);
+
     ALOGW("AudioFlinger server died!");
 }
 
@@ -717,10 +713,23 @@
     return NO_ERROR;
 }
 
-/* static */ void AudioSystem::setErrorCallback(audio_error_callback cb)
+/* static */ uintptr_t AudioSystem::addErrorCallback(audio_error_callback cb)
 {
-    Mutex::Autolock _l(gLock);
-    gAudioErrorCallback = cb;
+    Mutex::Autolock _l(gLockErrorCallbacks);
+    gAudioErrorCallbacks.insert(cb);
+    return reinterpret_cast<uintptr_t>(cb);
+}
+
+/* static */ void AudioSystem::removeErrorCallback(uintptr_t cb) {
+    Mutex::Autolock _l(gLockErrorCallbacks);
+    gAudioErrorCallbacks.erase(reinterpret_cast<audio_error_callback>(cb));
+}
+
+/* static */ void AudioSystem::reportError(status_t err) {
+    Mutex::Autolock _l(gLockErrorCallbacks);
+    for (auto callback : gAudioErrorCallbacks) {
+      callback(err);
+    }
 }
 
 /*static*/ void AudioSystem::setDynPolicyCallback(dynamic_policy_callback cb)
@@ -1017,6 +1026,16 @@
     return aps->getDevicesForStream(stream);
 }
 
+status_t AudioSystem::getDevicesForAttributes(const AudioAttributes &aa,
+                                              AudioDeviceTypeAddrVector *devices) {
+    if (devices == nullptr) {
+        return BAD_VALUE;
+    }
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) return PERMISSION_DENIED;
+    return aps->getDevicesForAttributes(aa, devices);
+}
+
 audio_io_handle_t AudioSystem::getOutputForEffect(const effect_descriptor_t *desc)
 {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
@@ -1123,6 +1142,12 @@
     }
 }
 
+status_t AudioSystem::setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == nullptr) return PERMISSION_DENIED;
+    return aps->setSupportedSystemUsages(systemUsages);
+}
+
 status_t AudioSystem::setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == nullptr) return PERMISSION_DENIED;
@@ -1334,6 +1359,21 @@
     return aps->removeUidDeviceAffinities(uid);
 }
 
+status_t AudioSystem::setUserIdDeviceAffinities(int userId,
+                                                const Vector<AudioDeviceTypeAddr>& devices)
+{
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) return PERMISSION_DENIED;
+    return aps->setUserIdDeviceAffinities(userId, devices);
+}
+
+status_t AudioSystem::removeUserIdDeviceAffinities(int userId)
+{
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) return PERMISSION_DENIED;
+    return aps->removeUserIdDeviceAffinities(userId);
+}
+
 status_t AudioSystem::startAudioSource(const struct audio_port_config *source,
                                        const audio_attributes_t *attributes,
                                        audio_port_handle_t *portId)
@@ -1491,7 +1531,14 @@
             }
         }
     }
-    ALOGE("invalid attributes %s when converting to stream",  toString(attr).c_str());
+    switch (attr.usage) {
+        case AUDIO_USAGE_VIRTUAL_SOURCE:
+            // virtual source is not expected to have an associated product strategy
+            break;
+        default:
+            ALOGE("invalid attributes %s when converting to stream",  toString(attr).c_str());
+            break;
+    }
     return AUDIO_STREAM_MUSIC;
 }
 
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 3151feb..8cfee50 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -599,7 +599,7 @@
     mReleased = 0;
     mStartNs = 0;
     mStartFromZeroUs = 0;
-    AudioSystem::acquireAudioSessionId(mSessionId, mClientPid);
+    AudioSystem::acquireAudioSessionId(mSessionId, mClientPid, mClientUid);
     mSequence = 1;
     mObservedSequence = mSequence;
     mInUnderrun = false;
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index cbc98bd..513da2b 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -571,12 +571,13 @@
         return id;
     }
 
-    virtual void acquireAudioSessionId(audio_session_t audioSession, int pid)
+    void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) override
     {
         Parcel data, reply;
         data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
         data.writeInt32(audioSession);
-        data.writeInt32(pid);
+        data.writeInt32((int32_t)pid);
+        data.writeInt32((int32_t)uid);
         remote()->transact(ACQUIRE_AUDIO_SESSION_ID, data, &reply);
     }
 
@@ -1326,8 +1327,9 @@
         case ACQUIRE_AUDIO_SESSION_ID: {
             CHECK_INTERFACE(IAudioFlinger, data, reply);
             audio_session_t audioSession = (audio_session_t) data.readInt32();
-            int pid = data.readInt32();
-            acquireAudioSessionId(audioSession, pid);
+            const pid_t pid = (pid_t)data.readInt32();
+            const uid_t uid = (uid_t)data.readInt32();
+            acquireAudioSessionId(audioSession, pid, uid);
             return NO_ERROR;
         } break;
         case RELEASE_AUDIO_SESSION_ID: {
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index b9e6e33..ce38414 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -97,11 +97,14 @@
     IS_HAPTIC_PLAYBACK_SUPPORTED,
     SET_UID_DEVICE_AFFINITY,
     REMOVE_UID_DEVICE_AFFINITY,
+    SET_USERID_DEVICE_AFFINITY,
+    REMOVE_USERID_DEVICE_AFFINITY,
     GET_OFFLOAD_FORMATS_A2DP,
     LIST_AUDIO_PRODUCT_STRATEGIES,
     GET_STRATEGY_FOR_ATTRIBUTES,
     LIST_AUDIO_VOLUME_GROUPS,
     GET_VOLUME_GROUP_FOR_ATTRIBUTES,
+    SET_SUPPORTED_SYSTEM_USAGES,
     SET_ALLOWED_CAPTURE_POLICY,
     MOVE_EFFECTS_TO_IO,
     SET_RTT_ENABLED,
@@ -109,6 +112,7 @@
     SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
     REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
     GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
+    GET_DEVICES_FOR_ATTRIBUTES,
 };
 
 #define MAX_ITEMS_PER_LIST 1024
@@ -331,6 +335,7 @@
             ALOGE("getInputForAttr NULL portId - shouldn't happen");
             return BAD_VALUE;
         }
+
         data.write(attr, sizeof(audio_attributes_t));
         data.writeInt32(*input);
         data.writeInt32(riid);
@@ -621,6 +626,20 @@
         return status;
     }
 
+    status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+        data.writeInt32(systemUsages.size());
+        for (auto systemUsage : systemUsages) {
+            data.writeInt32(systemUsage);
+        }
+        status_t status = remote()->transact(SET_SUPPORTED_SYSTEM_USAGES, data, &reply);
+        if (status != NO_ERROR) {
+            return status;
+        }
+        return static_cast <status_t> (reply.readInt32());
+    }
+
     status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags) override {
         Parcel data, reply;
         data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
@@ -1180,6 +1199,52 @@
         return status;
     }
 
+        virtual status_t setUserIdDeviceAffinities(int userId,
+                const Vector<AudioDeviceTypeAddr>& devices)
+        {
+            Parcel data, reply;
+            data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+
+            data.writeInt32((int32_t) userId);
+            size_t size = devices.size();
+            size_t sizePosition = data.dataPosition();
+            data.writeInt32((int32_t) size);
+            size_t finalSize = size;
+            for (size_t i = 0; i < size; i++) {
+                size_t position = data.dataPosition();
+                if (devices[i].writeToParcel(&data) != NO_ERROR) {
+                    data.setDataPosition(position);
+                    finalSize--;
+                }
+            }
+            if (size != finalSize) {
+                size_t position = data.dataPosition();
+                data.setDataPosition(sizePosition);
+                data.writeInt32(finalSize);
+                data.setDataPosition(position);
+            }
+
+            status_t status = remote()->transact(SET_USERID_DEVICE_AFFINITY, data, &reply);
+            if (status == NO_ERROR) {
+                status = (status_t)reply.readInt32();
+            }
+            return status;
+        }
+
+        virtual status_t removeUserIdDeviceAffinities(int userId) {
+            Parcel data, reply;
+            data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+
+            data.writeInt32((int32_t) userId);
+
+            status_t status =
+                remote()->transact(REMOVE_USERID_DEVICE_AFFINITY, data, &reply);
+            if (status == NO_ERROR) {
+                status = (status_t) reply.readInt32();
+            }
+            return status;
+        }
+
     virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies)
     {
         Parcel data, reply;
@@ -1348,6 +1413,41 @@
         }
         return static_cast<status_t>(reply.readInt32());
     }
+
+    virtual status_t getDevicesForAttributes(const AudioAttributes &aa,
+            AudioDeviceTypeAddrVector *devices) const
+    {
+        if (devices == nullptr) {
+            return BAD_VALUE;
+        }
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+        status_t status = aa.writeToParcel(&data);
+        if (status != NO_ERROR) {
+            return status;
+        }
+        status = remote()->transact(GET_DEVICES_FOR_ATTRIBUTES, data, &reply);
+        if (status != NO_ERROR) {
+            // transaction failed, return error
+            return status;
+        }
+        status = static_cast<status_t>(reply.readInt32());
+        if (status != NO_ERROR) {
+            // APM method call failed, return error
+            return status;
+        }
+
+        const size_t numberOfDevices = (size_t)reply.readInt32();
+        for (size_t i = 0; i < numberOfDevices; i++) {
+            AudioDeviceTypeAddr device;
+            if (device.readFromParcel((Parcel*)&reply) == NO_ERROR) {
+                devices->push_back(device);
+            } else {
+                return FAILED_TRANSACTION;
+            }
+        }
+        return NO_ERROR;
+    }
 };
 
 IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService");
@@ -1405,6 +1505,8 @@
         case SET_A11Y_SERVICES_UIDS:
         case SET_UID_DEVICE_AFFINITY:
         case REMOVE_UID_DEVICE_AFFINITY:
+        case SET_USERID_DEVICE_AFFINITY:
+        case REMOVE_USERID_DEVICE_AFFINITY:
         case GET_OFFLOAD_FORMATS_A2DP:
         case LIST_AUDIO_VOLUME_GROUPS:
         case GET_VOLUME_GROUP_FOR_ATTRIBUTES:
@@ -1413,8 +1515,10 @@
         case SET_RTT_ENABLED:
         case IS_CALL_SCREEN_MODE_SUPPORTED:
         case SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
+        case SET_SUPPORTED_SYSTEM_USAGES:
         case REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
-        case GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY: {
+        case GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
+        case GET_DEVICES_FOR_ATTRIBUTES: {
             if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
                 ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
                       __func__, code, IPCThreadState::self()->getCallingPid(),
@@ -2325,6 +2429,30 @@
             return NO_ERROR;
         }
 
+        case SET_USERID_DEVICE_AFFINITY: {
+            CHECK_INTERFACE(IAudioPolicyService, data, reply);
+            const int userId = (int) data.readInt32();
+            Vector<AudioDeviceTypeAddr> devices;
+            size_t size = (size_t)data.readInt32();
+            for (size_t i = 0; i < size; i++) {
+                AudioDeviceTypeAddr device;
+                if (device.readFromParcel((Parcel*)&data) == NO_ERROR) {
+                    devices.add(device);
+                }
+            }
+            status_t status = setUserIdDeviceAffinities(userId, devices);
+            reply->writeInt32(status);
+            return NO_ERROR;
+        }
+
+        case REMOVE_USERID_DEVICE_AFFINITY: {
+            CHECK_INTERFACE(IAudioPolicyService, data, reply);
+            const int userId = (int) data.readInt32();
+            status_t status = removeUserIdDeviceAffinities(userId);
+            reply->writeInt32(status);
+            return NO_ERROR;
+        }
+
         case LIST_AUDIO_PRODUCT_STRATEGIES: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             AudioProductStrategyVector strategies;
@@ -2405,16 +2533,46 @@
             if (status != NO_ERROR) {
                 return status;
             }
+
             volume_group_t group;
             status = getVolumeGroupFromAudioAttributes(attributes, group);
-            reply->writeInt32(status);
             if (status != NO_ERROR) {
                 return NO_ERROR;
             }
+
+            reply->writeInt32(status);
             reply->writeUint32(static_cast<int>(group));
             return NO_ERROR;
         }
 
+        case SET_SUPPORTED_SYSTEM_USAGES: {
+             CHECK_INTERFACE(IAudioPolicyService, data, reply);
+             std::vector<audio_usage_t> systemUsages;
+
+             int32_t size;
+             status_t status = data.readInt32(&size);
+             if (status != NO_ERROR) {
+                 return status;
+             }
+             if (size > MAX_ITEMS_PER_LIST) {
+                 size = MAX_ITEMS_PER_LIST;
+             }
+
+             for (int32_t i = 0; i < size; i++) {
+                 int32_t systemUsageInt;
+                 status = data.readInt32(&systemUsageInt);
+                 if (status != NO_ERROR) {
+                     return status;
+                 }
+
+                 audio_usage_t systemUsage = static_cast<audio_usage_t>(systemUsageInt);
+                 systemUsages.push_back(systemUsage);
+             }
+             status = setSupportedSystemUsages(systemUsages);
+             reply->writeInt32(static_cast <int32_t>(status));
+             return NO_ERROR;
+        }
+
         case SET_ALLOWED_CAPTURE_POLICY: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             uid_t uid = data.readInt32();
@@ -2473,6 +2631,37 @@
             return NO_ERROR;
         }
 
+        case GET_DEVICES_FOR_ATTRIBUTES: {
+            CHECK_INTERFACE(IAudioPolicyService, data, reply);
+            AudioAttributes attributes;
+            status_t status = attributes.readFromParcel(&data);
+            if (status != NO_ERROR) {
+                return status;
+            }
+            AudioDeviceTypeAddrVector devices;
+            status = getDevicesForAttributes(attributes.getAttributes(), &devices);
+            // reply data formatted as:
+            //  - (int32) method call result from APM
+            //  - (int32) number of devices (n) if method call returned NO_ERROR
+            //  - n AudioDeviceTypeAddr         if method call returned NO_ERROR
+            reply->writeInt32(status);
+            if (status != NO_ERROR) {
+                return NO_ERROR;
+            }
+            status = reply->writeInt32(devices.size());
+            if (status != NO_ERROR) {
+                return status;
+            }
+            for (const auto& device : devices) {
+                status = device.writeToParcel(reply);
+                if (status != NO_ERROR) {
+                    return status;
+                }
+            }
+
+            return NO_ERROR;
+        }
+
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/media/libaudioclient/include/media/AudioEffect.h b/media/libaudioclient/include/media/AudioEffect.h
index f17d737..eec9dfc 100644
--- a/media/libaudioclient/include/media/AudioEffect.h
+++ b/media/libaudioclient/include/media/AudioEffect.h
@@ -639,7 +639,8 @@
     sp<EffectClient>        mIEffectClient;     // IEffectClient implementation
     sp<IMemory>             mCblkMemory;        // shared memory for deferred parameter setting
     effect_param_cblk_t*    mCblk;              // control block for deferred parameter setting
-    pid_t                   mClientPid;
+    pid_t                   mClientPid = (pid_t)-1;
+    uid_t                   mClientUid = (uid_t)-1;
 };
 
 
diff --git a/media/libaudioclient/include/media/AudioPolicy.h b/media/libaudioclient/include/media/AudioPolicy.h
index 0ab1c9d..20d2c0b 100644
--- a/media/libaudioclient/include/media/AudioPolicy.h
+++ b/media/libaudioclient/include/media/AudioPolicy.h
@@ -18,12 +18,14 @@
 #ifndef ANDROID_AUDIO_POLICY_H
 #define ANDROID_AUDIO_POLICY_H
 
+#include <functional>
 #include <binder/Parcel.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <system/audio.h>
 #include <system/audio_policy.h>
 #include <utils/String8.h>
 #include <utils/Vector.h>
+#include <cutils/multiuser.h>
 
 namespace android {
 
@@ -32,10 +34,12 @@
 #define RULE_MATCH_ATTRIBUTE_USAGE           0x1
 #define RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET (0x1 << 1)
 #define RULE_MATCH_UID                      (0x1 << 2)
+#define RULE_MATCH_USERID                   (0x1 << 3)
 #define RULE_EXCLUDE_ATTRIBUTE_USAGE  (RULE_EXCLUSION_MASK|RULE_MATCH_ATTRIBUTE_USAGE)
 #define RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET \
                                       (RULE_EXCLUSION_MASK|RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET)
 #define RULE_EXCLUDE_UID              (RULE_EXCLUSION_MASK|RULE_MATCH_UID)
+#define RULE_EXCLUDE_USERID           (RULE_EXCLUSION_MASK|RULE_MATCH_USERID)
 
 #define MIX_TYPE_INVALID (-1)
 #define MIX_TYPE_PLAYERS 0
@@ -73,6 +77,7 @@
         audio_usage_t   mUsage;
         audio_source_t  mSource;
         uid_t           mUid;
+        int        mUserId;
     } mValue;
     uint32_t        mRule;
 };
@@ -98,9 +103,23 @@
     bool hasUidRule(bool match, uid_t uid) const;
     /** returns true if this mix has a rule for uid match (any uid) */
     bool hasMatchUidRule() const;
+
+    void setExcludeUserId(int userId) const;
+    void setMatchUserId(int userId) const;
+    /** returns true if this mix has a rule to match or exclude the given userId */
+    bool hasUserIdRule(bool match, int userId) const;
+    /** returns true if this mix has a rule for userId match (any userId) */
+    bool hasMatchUserIdRule() const;
     /** returns true if this mix can be used for uid-device affinity routing */
     bool isDeviceAffinityCompatible() const;
 
+    /**
+     * returns true if the mix has a capture rule for a usage that
+     * matches the given predicate
+     */
+    bool hasMatchingRuleForUsage(
+        std::function<bool (audio_usage_t)>const& func) const;
+
     mutable Vector<AudioMixMatchCriterion> mCriteria;
     uint32_t        mMixType;
     audio_config_t  mFormat;
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index db90e6a..b510378 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -92,6 +92,11 @@
             int8_t*     i8;         // unsigned 8-bit, offset by 0x80
                                     // input to obtainBuffer(): unused, output: pointer to buffer
         };
+
+        uint32_t    sequence;       // IAudioRecord instance sequence number, as of obtainBuffer().
+                                    // It is set by obtainBuffer() and confirmed by releaseBuffer().
+                                    // Not "user-serviceable".
+                                    // TODO Consider sp<IMemory> instead, or in addition to this.
     };
 
     /* As a convenience, if a callback is supplied, a handler thread
@@ -420,14 +425,17 @@
      *  frameCount  number of frames requested
      *  size        ignored
      *  raw         ignored
+     *  sequence    ignored
      * After error return:
      *  frameCount  0
      *  size        0
      *  raw         undefined
+     *  sequence    undefined
      * After successful return:
      *  frameCount  actual number of frames available, <= number requested
      *  size        actual number of bytes available
      *  raw         pointer to the buffer
+     *  sequence    IAudioRecord instance sequence number, as of obtainBuffer()
      */
 
             status_t    obtainBuffer(Buffer* audioBuffer, int32_t waitCount,
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index c4b528e..9d3f8b6 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -27,6 +27,7 @@
 #include <media/IAudioFlingerClient.h>
 #include <media/IAudioPolicyServiceClient.h>
 #include <media/MicrophoneInfo.h>
+#include <set>
 #include <system/audio.h>
 #include <system/audio_effect.h>
 #include <system/audio_policy.h>
@@ -107,7 +108,16 @@
     static status_t setParameters(const String8& keyValuePairs);
     static String8  getParameters(const String8& keys);
 
-    static void setErrorCallback(audio_error_callback cb);
+    // Registers an error callback. When this callback is invoked, it means all
+    // state implied by this interface has been reset.
+    // Returns a token that can be used for un-registering.
+    // Might block while callbacks are being invoked.
+    static uintptr_t addErrorCallback(audio_error_callback cb);
+
+    // Un-registers a callback previously added with addErrorCallback.
+    // Might block while callbacks are being invoked.
+    static void removeErrorCallback(uintptr_t cb);
+
     static void setDynPolicyCallback(dynamic_policy_callback cb);
     static void setRecordConfigCallback(record_config_callback);
 
@@ -172,7 +182,7 @@
     //       or an unspecified existing unique ID.
     static audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use);
 
-    static void acquireAudioSessionId(audio_session_t audioSession, pid_t pid);
+    static void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid);
     static void releaseAudioSessionId(audio_session_t audioSession, pid_t pid);
 
     // Get the HW synchronization source used for an audio session.
@@ -279,6 +289,8 @@
 
     static uint32_t getStrategyForStream(audio_stream_type_t stream);
     static audio_devices_t getDevicesForStream(audio_stream_type_t stream);
+    static status_t getDevicesForAttributes(const AudioAttributes &aa,
+                                            AudioDeviceTypeAddrVector *devices);
 
     static audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc);
     static status_t registerEffect(const effect_descriptor_t *desc,
@@ -302,6 +314,8 @@
 
     static status_t setLowRamDevice(bool isLowRamDevice, int64_t totalMemory);
 
+    static status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages);
+
     static status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags);
 
     // Check if hw offload is possible for given format, stream type, sample rate,
@@ -350,6 +364,10 @@
 
     static status_t removeUidDeviceAffinities(uid_t uid);
 
+    static status_t setUserIdDeviceAffinities(int userId, const Vector<AudioDeviceTypeAddr>& devices);
+
+    static status_t removeUserIdDeviceAffinities(int userId);
+
     static status_t startAudioSource(const struct audio_port_config *source,
                                      const audio_attributes_t *attributes,
                                      audio_port_handle_t *portId);
@@ -560,15 +578,19 @@
     static const sp<AudioFlingerClient> getAudioFlingerClient();
     static sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
 
+    // Invokes all registered error callbacks with the given error code.
+    static void reportError(status_t err);
+
     static sp<AudioFlingerClient> gAudioFlingerClient;
     static sp<AudioPolicyServiceClient> gAudioPolicyServiceClient;
     friend class AudioFlingerClient;
     friend class AudioPolicyServiceClient;
 
-    static Mutex gLock;      // protects gAudioFlinger and gAudioErrorCallback,
+    static Mutex gLock;      // protects gAudioFlinger
+    static Mutex gLockErrorCallbacks;      // protects gAudioErrorCallbacks
     static Mutex gLockAPS;   // protects gAudioPolicyService and gAudioPolicyServiceClient
     static sp<IAudioFlinger> gAudioFlinger;
-    static audio_error_callback gAudioErrorCallback;
+    static std::set<audio_error_callback> gAudioErrorCallbacks;
     static dynamic_policy_callback gDynPolicyCallback;
     static record_config_callback gRecordConfigCallback;
 
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 8703f55..8bc1d83 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -446,7 +446,7 @@
 
     virtual audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use) = 0;
 
-    virtual void acquireAudioSessionId(audio_session_t audioSession, pid_t pid) = 0;
+    virtual void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) = 0;
     virtual void releaseAudioSessionId(audio_session_t audioSession, pid_t pid) = 0;
 
     virtual status_t queryNumberEffects(uint32_t *numEffects) const = 0;
diff --git a/media/libaudioclient/include/media/IAudioPolicyService.h b/media/libaudioclient/include/media/IAudioPolicyService.h
index 6623061..caa1d13 100644
--- a/media/libaudioclient/include/media/IAudioPolicyService.h
+++ b/media/libaudioclient/include/media/IAudioPolicyService.h
@@ -108,6 +108,8 @@
 
     virtual uint32_t getStrategyForStream(audio_stream_type_t stream) = 0;
     virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream) = 0;
+    virtual status_t getDevicesForAttributes(const AudioAttributes &aa,
+            AudioDeviceTypeAddrVector *devices) const = 0;
     virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc) = 0;
     virtual status_t registerEffect(const effect_descriptor_t *desc,
                                     audio_io_handle_t io,
@@ -138,6 +140,7 @@
                                             audio_unique_id_t* id) = 0;
     virtual status_t removeSourceDefaultEffect(audio_unique_id_t id) = 0;
     virtual status_t removeStreamDefaultEffect(audio_unique_id_t id) = 0;
+    virtual status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) = 0;
     virtual status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags) = 0;
    // Check if offload is possible for given format, stream type, sample rate,
     // bit rate, duration, video and streaming or offload property is enabled
@@ -192,6 +195,11 @@
 
     virtual status_t removeUidDeviceAffinities(uid_t uid) = 0;
 
+    virtual status_t setUserIdDeviceAffinities(int userId,
+            const Vector<AudioDeviceTypeAddr>& devices) = 0;
+
+    virtual status_t removeUserIdDeviceAffinities(int userId) = 0;
+
     virtual status_t startAudioSource(const struct audio_port_config *source,
                                       const audio_attributes_t *attributes,
                                       audio_port_handle_t *portId) = 0;
diff --git a/media/libaudiofoundation/AudioDeviceTypeAddr.cpp b/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
index d390467..b44043a 100644
--- a/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
+++ b/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
@@ -26,6 +26,16 @@
     return mType == other.mType && mAddress == other.mAddress;
 }
 
+bool AudioDeviceTypeAddr::operator<(const AudioDeviceTypeAddr& other) const {
+    if (mType < other.mType)  return true;
+    if (mType > other.mType)  return false;
+
+    if (mAddress < other.mAddress)  return true;
+    // if (mAddress > other.mAddress)  return false;
+
+    return false;
+}
+
 void AudioDeviceTypeAddr::reset() {
     mType = AUDIO_DEVICE_NONE;
     mAddress = "";
diff --git a/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h b/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
index acc37ca..60ea78e 100644
--- a/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
+++ b/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
@@ -39,6 +39,8 @@
 
     AudioDeviceTypeAddr& operator= (const AudioDeviceTypeAddr&) = default;
 
+    bool operator<(const AudioDeviceTypeAddr& other) const;
+
     void reset();
 
     status_t readFromParcel(const Parcel *parcel) override;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index 342ceb6..7d0d83d 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -28,6 +28,7 @@
 #include <common/all-versions/VersionUtils.h>
 
 #include "DeviceHalHidl.h"
+#include "EffectHalHidl.h"
 #include "HidlUtils.h"
 #include "StreamHalHidl.h"
 #include "VersionUtils.h"
@@ -43,6 +44,8 @@
 using namespace ::android::hardware::audio::common::CPP_VERSION;
 using namespace ::android::hardware::audio::CPP_VERSION;
 
+using EffectHalHidl = ::android::effect::CPP_VERSION::EffectHalHidl;
+
 namespace {
 
 status_t deviceAddressFromHal(
@@ -417,6 +420,36 @@
 }
 #endif
 
+#if MAJOR_VERSION >= 6
+status_t DeviceHalHidl::addDeviceEffect(
+        audio_port_handle_t device, sp<EffectHalInterface> effect) {
+    if (mDevice == 0) return NO_INIT;
+    return processReturn("addDeviceEffect", mDevice->addDeviceEffect(
+            static_cast<AudioPortHandle>(device),
+            static_cast<EffectHalHidl*>(effect.get())->effectId()));
+}
+#else
+status_t DeviceHalHidl::addDeviceEffect(
+        audio_port_handle_t device __unused, sp<EffectHalInterface> effect __unused) {
+    return INVALID_OPERATION;
+}
+#endif
+
+#if MAJOR_VERSION >= 6
+status_t DeviceHalHidl::removeDeviceEffect(
+        audio_port_handle_t device, sp<EffectHalInterface> effect) {
+    if (mDevice == 0) return NO_INIT;
+    return processReturn("removeDeviceEffect", mDevice->removeDeviceEffect(
+            static_cast<AudioPortHandle>(device),
+            static_cast<EffectHalHidl*>(effect.get())->effectId()));
+}
+#else
+status_t DeviceHalHidl::removeDeviceEffect(
+        audio_port_handle_t device __unused, sp<EffectHalInterface> effect __unused) {
+    return INVALID_OPERATION;
+}
+#endif
+
 status_t DeviceHalHidl::dump(int fd) {
     if (mDevice == 0) return NO_INIT;
     native_handle_t* hidlHandle = native_handle_create(1, 0);
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index f7d465f..d342d4a 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -113,6 +113,9 @@
     // List microphones
     virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones);
 
+    status_t addDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
+    status_t removeDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
+
     virtual status_t dump(int fd);
 
   private:
diff --git a/media/libaudiohal/impl/DeviceHalLocal.cpp b/media/libaudiohal/impl/DeviceHalLocal.cpp
index dfbb6b2..8021d92 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.cpp
+++ b/media/libaudiohal/impl/DeviceHalLocal.cpp
@@ -206,6 +206,17 @@
 }
 #endif
 
+// Local HAL implementation does not support effects
+status_t DeviceHalLocal::addDeviceEffect(
+        audio_port_handle_t device __unused, sp<EffectHalInterface> effect __unused) {
+    return INVALID_OPERATION;
+}
+
+status_t DeviceHalLocal::removeDeviceEffect(
+        audio_port_handle_t device __unused, sp<EffectHalInterface> effect __unused) {
+    return INVALID_OPERATION;
+}
+
 status_t DeviceHalLocal::dump(int fd) {
     return mDev->dump(mDev, fd);
 }
diff --git a/media/libaudiohal/impl/DeviceHalLocal.h b/media/libaudiohal/impl/DeviceHalLocal.h
index 36db72e..d85e2a7 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.h
+++ b/media/libaudiohal/impl/DeviceHalLocal.h
@@ -106,6 +106,9 @@
     // List microphones
     virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones);
 
+    status_t addDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
+    status_t removeDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
+
     virtual status_t dump(int fd);
 
     void closeOutputStream(struct audio_stream_out *stream_out);
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 2200a7f..1e04b21 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_HARDWARE_DEVICE_HAL_INTERFACE_H
 #define ANDROID_HARDWARE_DEVICE_HAL_INTERFACE_H
 
+#include <media/audiohal/EffectHalInterface.h>
 #include <media/MicrophoneInfo.h>
 #include <system/audio.h>
 #include <utils/Errors.h>
@@ -111,6 +112,11 @@
     // List microphones
     virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones) = 0;
 
+    virtual status_t addDeviceEffect(
+            audio_port_handle_t device, sp<EffectHalInterface> effect) = 0;
+    virtual status_t removeDeviceEffect(
+            audio_port_handle_t device, sp<EffectHalInterface> effect) = 0;
+
     virtual status_t dump(int fd) = 0;
 
   protected:
diff --git a/media/libaudioprocessing/Android.bp b/media/libaudioprocessing/Android.bp
index 9b5d58c..1a6a5a2 100644
--- a/media/libaudioprocessing/Android.bp
+++ b/media/libaudioprocessing/Android.bp
@@ -32,6 +32,7 @@
     ],
 
     header_libs: [
+        "libaudiohal_headers",
         "libbase_headers",
         "libmedia_headers"
     ],
diff --git a/media/libeffects/config/src/EffectsConfig.cpp b/media/libeffects/config/src/EffectsConfig.cpp
index f39eb0c..218249d 100644
--- a/media/libeffects/config/src/EffectsConfig.cpp
+++ b/media/libeffects/config/src/EffectsConfig.cpp
@@ -100,6 +100,7 @@
         {AUDIO_STREAM_ENFORCED_AUDIBLE, "enforced_audible"},
         {AUDIO_STREAM_DTMF, "dtmf"},
         {AUDIO_STREAM_TTS, "tts"},
+        {AUDIO_STREAM_ASSISTANT, "assistant"},
 };
 
 /** All input stream types which support effects.
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 9d79f89..2208eca 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -26,7 +26,7 @@
 #include <binder/MemoryDealer.h>
 #include <drm/drm_framework_common.h>
 #include <media/mediametadataretriever.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <private/media/VideoFrame.h>
 #include <utils/Log.h>
diff --git a/media/libmedia/IMediaSource.cpp b/media/libmedia/IMediaSource.cpp
index 50826c5..b18571f 100644
--- a/media/libmedia/IMediaSource.cpp
+++ b/media/libmedia/IMediaSource.cpp
@@ -26,7 +26,7 @@
 #include <media/IMediaSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaBufferGroup.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 
 namespace android {
diff --git a/media/libmedia/include/media/IMediaPlayer.h b/media/libmedia/include/media/IMediaPlayer.h
index 97a998e..3c13540 100644
--- a/media/libmedia/include/media/IMediaPlayer.h
+++ b/media/libmedia/include/media/IMediaPlayer.h
@@ -23,7 +23,7 @@
 #include <utils/KeyedVector.h>
 #include <system/audio.h>
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/VolumeShaper.h>
 
 // Fwd decl to make sure everyone agrees that the scope of struct sockaddr_in is
diff --git a/media/libmedia/include/media/IMediaSource.h b/media/libmedia/include/media/IMediaSource.h
index 381df24..f3fa39b 100644
--- a/media/libmedia/include/media/IMediaSource.h
+++ b/media/libmedia/include/media/IMediaSource.h
@@ -22,7 +22,7 @@
 
 #include <binder/IInterface.h>
 #include <binder/IMemory.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaErrors.h>
 
diff --git a/media/libmedia/include/media/MediaCodecBuffer.h b/media/libmedia/include/media/MediaCodecBuffer.h
index 2c16fba..101377c 100644
--- a/media/libmedia/include/media/MediaCodecBuffer.h
+++ b/media/libmedia/include/media/MediaCodecBuffer.h
@@ -22,6 +22,8 @@
 #include <utils/RefBase.h>
 #include <utils/StrongPointer.h>
 
+class C2Buffer;
+
 namespace android {
 
 struct ABuffer;
@@ -57,6 +59,36 @@
 
     void setFormat(const sp<AMessage> &format);
 
+    /**
+     * \return  C2Buffer object represents this buffer.
+     */
+    virtual std::shared_ptr<C2Buffer> asC2Buffer() { return nullptr; }
+
+    /**
+     * Test if we can copy the content of |buffer| into this object.
+     *
+     * \param   buffer  C2Buffer object to copy.
+     * \return  true    if the content of buffer can be copied over to this buffer
+     *          false   otherwise.
+     */
+    virtual bool canCopy(const std::shared_ptr<C2Buffer> &buffer) const {
+        (void)buffer;
+        return false;
+    }
+
+    /**
+     * Copy the content of |buffer| into this object. This method assumes that
+     * canCopy() check already passed.
+     *
+     * \param   buffer  C2Buffer object to copy.
+     * \return  true    if successful
+     *          false   otherwise.
+     */
+    virtual bool copy(const std::shared_ptr<C2Buffer> &buffer) {
+        (void)buffer;
+        return false;
+    }
+
 private:
     MediaCodecBuffer() = delete;
 
diff --git a/media/libmedia/include/media/MidiIoWrapper.h b/media/libmedia/include/media/MidiIoWrapper.h
index b19d49e..7beb619 100644
--- a/media/libmedia/include/media/MidiIoWrapper.h
+++ b/media/libmedia/include/media/MidiIoWrapper.h
@@ -19,8 +19,6 @@
 
 #include <libsonivox/eas_types.h>
 
-#include <media/DataSourceBase.h>
-
 namespace android {
 
 struct CDataSource;
diff --git a/media/libmedia/include/media/NdkWrapper.h b/media/libmedia/include/media/NdkWrapper.h
index 8a417f6..39c0574 100644
--- a/media/libmedia/include/media/NdkWrapper.h
+++ b/media/libmedia/include/media/NdkWrapper.h
@@ -19,7 +19,7 @@
 #define NDK_WRAPPER_H_
 
 #include <media/DataSource.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/NdkMediaDataSource.h>
 #include <media/NdkMediaError.h>
 #include <media/NdkMediaExtractor.h>
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 60d2f85..1fadc94 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -60,7 +60,7 @@
     mVideoWidth = mVideoHeight = 0;
     mLockThreadId = 0;
     mAudioSessionId = (audio_session_t) AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
-    AudioSystem::acquireAudioSessionId(mAudioSessionId, -1);
+    AudioSystem::acquireAudioSessionId(mAudioSessionId, (pid_t)-1, (uid_t)-1); // always in client.
     mSendLevel = 0;
     mRetransmitEndpointValid = false;
 }
@@ -72,7 +72,7 @@
         delete mAudioAttributesParcel;
         mAudioAttributesParcel = NULL;
     }
-    AudioSystem::releaseAudioSessionId(mAudioSessionId, -1);
+    AudioSystem::releaseAudioSessionId(mAudioSessionId, (pid_t)-1);
     disconnect();
     IPCThreadState::self()->flushCommands();
 }
@@ -709,8 +709,8 @@
         return BAD_VALUE;
     }
     if (sessionId != mAudioSessionId) {
-        AudioSystem::acquireAudioSessionId(sessionId, -1);
-        AudioSystem::releaseAudioSessionId(mAudioSessionId, -1);
+        AudioSystem::acquireAudioSessionId(sessionId, (pid_t)-1, (uid_t)-1);
+        AudioSystem::releaseAudioSessionId(mAudioSessionId, (pid_t)-1);
         mAudioSessionId = sessionId;
     }
     return NO_ERROR;
diff --git a/media/libmediahelper/TypeConverter.cpp b/media/libmediahelper/TypeConverter.cpp
index aa54b82..6f14081 100644
--- a/media/libmediahelper/TypeConverter.cpp
+++ b/media/libmediahelper/TypeConverter.cpp
@@ -312,6 +312,7 @@
     MAKE_STRING_FROM_ENUM(AUDIO_STREAM_DTMF),
     MAKE_STRING_FROM_ENUM(AUDIO_STREAM_TTS),
     MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ACCESSIBILITY),
+    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ASSISTANT),
     MAKE_STRING_FROM_ENUM(AUDIO_STREAM_REROUTING),
     MAKE_STRING_FROM_ENUM(AUDIO_STREAM_PATCH),
     TERMINATOR
@@ -359,6 +360,10 @@
     MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VIRTUAL_SOURCE),
     MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANT),
     MAKE_STRING_FROM_ENUM(AUDIO_USAGE_CALL_ASSISTANT),
+    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_EMERGENCY),
+    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_SAFETY),
+    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VEHICLE_STATUS),
+    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ANNOUNCEMENT),
     TERMINATOR
 };
 
diff --git a/media/libmediametrics/MediaMetrics.cpp b/media/libmediametrics/MediaMetrics.cpp
index 4aceac5..a3c2f1a 100644
--- a/media/libmediametrics/MediaMetrics.cpp
+++ b/media/libmediametrics/MediaMetrics.cpp
@@ -31,28 +31,31 @@
 // ALL functions returning a char * give responsibility for the allocated buffer
 // to the caller. The caller is responsible to call free() on that pointer.
 //
+//
+
+using namespace android::mediametrics;
 
 // manage the overall record
 mediametrics_handle_t mediametrics_create(mediametricskey_t key) {
-    android::mediametrics::Item *item = android::mediametrics::Item::create(key);
+    Item *item = Item::create(key);
     return (mediametrics_handle_t) item;
 }
 
 void mediametrics_delete(mediametrics_handle_t handle) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item == NULL) return;
     delete item;
 }
 
 mediametricskey_t mediametrics_getKey(mediametrics_handle_t handle) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item == NULL) return NULL;
     return strdup(item->getKey().c_str());
 }
 
 // nuplayer, et al use it when acting as proxies
 void mediametrics_setUid(mediametrics_handle_t handle, uid_t uid) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item != NULL) item->setUid(uid);
 }
 
@@ -61,31 +64,31 @@
 
 void mediametrics_setInt32(mediametrics_handle_t handle, attr_t attr,
                                 int32_t value) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item != NULL) item->setInt32(attr, value);
 }
 
 void mediametrics_setInt64(mediametrics_handle_t handle, attr_t attr,
                                 int64_t value) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item != NULL) item->setInt64(attr, value);
 }
 
 void mediametrics_setDouble(mediametrics_handle_t handle, attr_t attr,
                                  double value) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item != NULL) item->setDouble(attr, value);
 }
 
 void mediametrics_setRate(mediametrics_handle_t handle, attr_t attr,
                                int64_t count, int64_t duration) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item != NULL) item->setRate(attr, count, duration);
 }
 
 void mediametrics_setCString(mediametrics_handle_t handle, attr_t attr,
                                  const char *value) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item != NULL) item->setCString(attr, value);
 }
 
@@ -94,25 +97,25 @@
 
 void mediametrics_addInt32(mediametrics_handle_t handle, attr_t attr,
                                 int32_t value) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item != NULL) item->addInt32(attr, value);
 }
 
 void mediametrics_addInt64(mediametrics_handle_t handle, attr_t attr,
                                 int64_t value) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item != NULL) item->addInt64(attr, value);
 }
 
 void mediametrics_addDouble(mediametrics_handle_t handle, attr_t attr,
                                  double value) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item != NULL) item->addDouble(attr, value);
 }
 
 void mediametrics_addRate(mediametrics_handle_t handle, attr_t attr,
                                int64_t count, int64_t duration) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item != NULL) item->addRate(attr, count, duration);
 }
 
@@ -123,28 +126,28 @@
 
 bool mediametrics_getInt32(mediametrics_handle_t handle, attr_t attr,
                                 int32_t * value) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item == NULL) return false;
     return item->getInt32(attr, value);
 }
 
 bool mediametrics_getInt64(mediametrics_handle_t handle, attr_t attr,
                                 int64_t * value) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item == NULL) return false;
     return item->getInt64(attr, value);
 }
 
 bool mediametrics_getDouble(mediametrics_handle_t handle, attr_t attr,
                                  double *value) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item == NULL) return false;
     return item->getDouble(attr, value);
 }
 
 bool mediametrics_getRate(mediametrics_handle_t handle, attr_t attr,
                                int64_t * count, int64_t * duration, double *rate) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item == NULL) return false;
     return item->getRate(attr, count, duration, rate);
 }
@@ -152,7 +155,7 @@
 // NB: caller owns the string that comes back, is responsible for freeing it
 bool mediametrics_getCString(mediametrics_handle_t handle, attr_t attr,
                                  char **value) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item == NULL) return false;
 
     return item->getCString(attr, value);
@@ -164,36 +167,36 @@
 }
 
 bool mediametrics_selfRecord(mediametrics_handle_t handle) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item == NULL) return false;
     return item->selfrecord();
 }
 
 mediametrics_handle_t mediametrics_dup(mediametrics_handle_t handle) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
-    if (item == NULL) return android::mediametrics::Item::convert(item);
-    return android::mediametrics::Item::convert(item->dup());
+    Item *item = (Item *) handle;
+    if (item == NULL) return Item::convert(item);
+    return Item::convert(item->dup());
 }
 
 const char *mediametrics_readable(mediametrics_handle_t handle) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item == NULL) return "";
     return item->toCString();
 }
 
 int32_t mediametrics_count(mediametrics_handle_t handle) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item == NULL) return 0;
     return item->count();
 }
 
 bool mediametrics_isEnabled() {
     // static, so doesn't need an instance
-    return android::mediametrics::Item::isEnabled();
+    return Item::isEnabled();
 }
 
 bool mediametrics_getAttributes(mediametrics_handle_t handle, char **buffer, size_t *length) {
-    android::mediametrics::Item *item = (android::mediametrics::Item *) handle;
+    Item *item = (Item *) handle;
     if (item == NULL) return false;
     return item->writeToByteString(buffer, length) == android::NO_ERROR;
 
diff --git a/media/libmediametrics/MediaMetricsItem.cpp b/media/libmediametrics/MediaMetricsItem.cpp
index 98158d2..62af0f7 100644
--- a/media/libmediametrics/MediaMetricsItem.cpp
+++ b/media/libmediametrics/MediaMetricsItem.cpp
@@ -63,51 +63,6 @@
     if (DEBUG_ALLOCATIONS) {
         ALOGD("Destroy  mediametrics::Item @ %p", this);
     }
-    clear();
-}
-
-void mediametrics::Item::clear() {
-
-    // clean allocated storage from key
-    mKey.clear();
-
-    // clean attributes
-    // contents of the attributes
-    for (size_t i = 0 ; i < mPropCount; i++ ) {
-        mProps[i].clear();
-    }
-    // the attribute records themselves
-    if (mProps != NULL) {
-        free(mProps);
-        mProps = NULL;
-    }
-    mPropSize = 0;
-    mPropCount = 0;
-
-    return;
-}
-
-// make a deep copy of myself
-mediametrics::Item *mediametrics::Item::dup() {
-    mediametrics::Item *dst = new mediametrics::Item(this->mKey);
-
-    if (dst != NULL) {
-        // key as part of constructor
-        dst->mPid = this->mPid;
-        dst->mUid = this->mUid;
-        dst->mPkgName = this->mPkgName;
-        dst->mPkgVersionCode = this->mPkgVersionCode;
-        dst->mTimestamp = this->mTimestamp;
-
-        // properties aka attributes
-        dst->growProps(this->mPropCount);
-        for(size_t i=0;i<mPropCount;i++) {
-            dst->mProps[i] = this->mProps[i];
-        }
-        dst->mPropCount = this->mPropCount;
-    }
-
-    return dst;
 }
 
 mediametrics::Item &mediametrics::Item::setTimestamp(nsecs_t ts) {
@@ -151,84 +106,12 @@
     return mPkgVersionCode;
 }
 
-
-// find the proper entry in the list
-size_t mediametrics::Item::findPropIndex(const char *name) const
-{
-    size_t i = 0;
-    for (; i < mPropCount; i++) {
-        if (mProps[i].isNamed(name)) break;
-    }
-    return i;
-}
-
-mediametrics::Item::Prop *mediametrics::Item::findProp(const char *name) const {
-    const size_t i = findPropIndex(name);
-    if (i < mPropCount) {
-        return &mProps[i];
-    }
-    return nullptr;
-}
-
-// consider this "find-or-allocate".
-// caller validates type and uses clearPropValue() accordingly
-mediametrics::Item::Prop *mediametrics::Item::allocateProp(const char *name) {
-    const size_t i = findPropIndex(name);
-    if (i < mPropCount) {
-        return &mProps[i]; // already have it, return
-    }
-
-    Prop *prop = allocateProp(); // get a new prop
-    if (prop == nullptr) return nullptr;
-    prop->setName(name);
-    return prop;
-}
-
-mediametrics::Item::Prop *mediametrics::Item::allocateProp() {
-    if (mPropCount == mPropSize && growProps() == false) {
-        ALOGE("%s: failed allocation for new properties", __func__);
-        return nullptr;
-    }
-    return &mProps[mPropCount++];
-}
-
-// used within the summarizers; return whether property existed
-bool mediametrics::Item::removeProp(const char *name) {
-    const size_t i = findPropIndex(name);
-    if (i < mPropCount) {
-        mProps[i].clear();
-        if (i != mPropCount-1) {
-            // in the middle, bring last one down to fill gap
-            mProps[i].swap(mProps[mPropCount-1]);
-        }
-        mPropCount--;
-        return true;
-    }
-    return false;
-}
-
 // remove indicated keys and their values
 // return value is # keys removed
 size_t mediametrics::Item::filter(size_t n, const char *attrs[]) {
     size_t zapped = 0;
     for (size_t i = 0; i < n; ++i) {
-        const char *name = attrs[i];
-        size_t j = findPropIndex(name);
-        if (j >= mPropCount) {
-            // not there
-            continue;
-        } else if (j + 1 == mPropCount) {
-            // last one, shorten
-            zapped++;
-            mProps[j].clear();
-            mPropCount--;
-        } else {
-            // in the middle, bring last one down and shorten
-            zapped++;
-            mProps[j].clear();
-            mProps[j] = mProps[mPropCount-1];
-            mPropCount--;
-        }
+        zapped += mProps.erase(attrs[i]);
     }
     return zapped;
 }
@@ -238,49 +121,17 @@
 size_t mediametrics::Item::filterNot(size_t n, const char *attrs[]) {
     std::set<std::string> check(attrs, attrs + n);
     size_t zapped = 0;
-    for (size_t j = 0; j < mPropCount;) {
-        if (check.find(mProps[j].getName()) != check.end()) {
-            ++j;
-            continue;
-        }
-        if (j + 1 == mPropCount) {
-            // last one, shorten
-            zapped++;
-            mProps[j].clear();
-            mPropCount--;
-            break;
+    for (auto it = mProps.begin(); it != mProps.end();) {
+        if (check.find(it->first) != check.end()) {
+            ++it;
         } else {
-            // in the middle, bring last one down and shorten
-            zapped++;
-            mProps[j].clear();
-            mProps[j] = mProps[mPropCount-1];
-            mPropCount--;
+           it = mProps.erase(it);
+           ++zapped;
         }
     }
     return zapped;
 }
 
-bool mediametrics::Item::growProps(int increment)
-{
-    if (increment <= 0) {
-        increment = kGrowProps;
-    }
-    int nsize = mPropSize + increment;
-    Prop *ni = (Prop *)realloc(mProps, sizeof(Prop) * nsize);
-
-    if (ni != NULL) {
-        for (int i = mPropSize; i < nsize; i++) {
-            new (&ni[i]) Prop(); // placement new
-        }
-        mProps = ni;
-        mPropSize = nsize;
-        return true;
-    } else {
-        ALOGW("mediametrics::Item::growProps fails");
-        return false;
-    }
-}
-
 // Parcel / serialize things for binder calls
 //
 
@@ -315,10 +166,11 @@
     if (count < 0) return BAD_VALUE;
     mPkgVersionCode = version;
     mTimestamp = timestamp;
-    for (int i = 0; i < count ; i++) {
-        Prop *prop = allocateProp();
-        status_t status = prop->readFromParcel(data);
+    for (int i = 0; i < count; i++) {
+        Prop prop;
+        status_t status = prop.readFromParcel(data);
         if (status != NO_ERROR) return status;
+        mProps[prop.getName()] = std::move(prop);
     }
     return NO_ERROR;
 }
@@ -349,9 +201,9 @@
         ?: data->writeInt64(mTimestamp);
     if (status != NO_ERROR) return status;
 
-    data->writeInt32((int32_t)mPropCount);
-    for (size_t i = 0 ; i < mPropCount; ++i) {
-        status = mProps[i].writeToParcel(data);
+    data->writeInt32((int32_t)mProps.size());
+    for (auto &prop : *this) {
+        status = prop.writeToParcel(data);
         if (status != NO_ERROR) return status;
     }
     return NO_ERROR;
@@ -376,10 +228,10 @@
 
     snprintf(buffer, sizeof(buffer), "[%d:%s:%d:%d:%lld:%s:%zu:",
             version, mKey.c_str(), mPid, mUid, (long long)mTimestamp,
-            mPkgName.c_str(), mPropCount);
+            mPkgName.c_str(), mProps.size());
     result.append(buffer);
-    for (size_t i = 0 ; i < mPropCount; ++i) {
-        mProps[i].toString(buffer, sizeof(buffer));
+    for (auto &prop : *this) {
+        prop.toStringBuffer(buffer, sizeof(buffer));
         result.append(buffer);
     }
     result.append("]");
@@ -390,7 +242,7 @@
 // calls the appropriate daemon
 bool mediametrics::Item::selfrecord() {
     ALOGD_IF(DEBUG_API, "%s: delivering %s", __func__, this->toString().c_str());
-    sp<IMediaMetricsService> svc = getInstance();
+    sp<IMediaMetricsService> svc = getService();
     if (svc != NULL) {
         status_t status = svc->submit(this);
         if (status != NO_ERROR) {
@@ -460,7 +312,7 @@
     */
 
     ALOGD_IF(DEBUG_API, "%s: delivering %zu bytes", __func__, size);
-    sp<IMediaMetricsService> svc = getInstance();
+    sp<IMediaMetricsService> svc = getService();
     if (svc != nullptr) {
         const status_t status = svc->submitBuffer(buffer, size);
         if (status != NO_ERROR) {
@@ -473,7 +325,7 @@
 }
 
 //static
-sp<IMediaMetricsService> BaseItem::getInstance() {
+sp<IMediaMetricsService> BaseItem::getService() {
     static const char *servicename = "media.metrics";
     static const bool enabled = isEnabled(); // singleton initialized
 
@@ -512,115 +364,6 @@
 }
 
 
-// merge the info from 'incoming' into this record.
-// we finish with a union of this+incoming and special handling for collisions
-bool mediametrics::Item::merge(mediametrics::Item *incoming) {
-
-    // if I don't have key or session id, take them from incoming
-    // 'this' should never be missing both of them...
-    if (mKey.empty()) {
-        mKey = incoming->mKey;
-    }
-
-    // for each attribute from 'incoming', resolve appropriately
-    int nattr = incoming->mPropCount;
-    for (int i = 0 ; i < nattr; i++ ) {
-        Prop *iprop = &incoming->mProps[i];
-        const char *p = iprop->mName;
-        size_t len = strlen(p);
-
-        // should ignore a zero length name...
-        if (len == 0) {
-            continue;
-        }
-
-        Prop *oprop = findProp(iprop->mName);
-
-        if (oprop == NULL) {
-            // no oprop, so we insert the new one
-            oprop = allocateProp(p);
-            if (oprop != NULL) {
-                *oprop = *iprop;
-            } else {
-                ALOGW("dropped property '%s'", iprop->mName);
-            }
-        } else {
-            *oprop = *iprop;
-        }
-    }
-
-    // not sure when we'd return false...
-    return true;
-}
-
-namespace {
-
-template <typename T>
-status_t insert(const T& val, char **bufferpptr, char *bufferptrmax)
-{
-    const size_t size = sizeof(val);
-    if (*bufferpptr + size > bufferptrmax) {
-        ALOGE("%s: buffer exceeded with size %zu", __func__, size);
-        return BAD_VALUE;
-    }
-    memcpy(*bufferpptr, &val, size);
-    *bufferpptr += size;
-    return NO_ERROR;
-}
-
-template <>
-status_t insert(const char * const& val, char **bufferpptr, char *bufferptrmax)
-{
-    const size_t size = strlen(val) + 1;
-    if (size > UINT16_MAX || *bufferpptr + size > bufferptrmax) {
-        ALOGE("%s: buffer exceeded with size %zu", __func__, size);
-        return BAD_VALUE;
-    }
-    memcpy(*bufferpptr, val, size);
-    *bufferpptr += size;
-    return NO_ERROR;
-}
-
-template <>
- __unused
-status_t insert(char * const& val, char **bufferpptr, char *bufferptrmax)
-{
-    return insert((const char *)val, bufferpptr, bufferptrmax);
-}
-
-template <typename T>
-status_t extract(T *val, const char **bufferpptr, const char *bufferptrmax)
-{
-    const size_t size = sizeof(*val);
-    if (*bufferpptr + size > bufferptrmax) {
-        ALOGE("%s: buffer exceeded with size %zu", __func__, size);
-        return BAD_VALUE;
-    }
-    memcpy(val, *bufferpptr, size);
-    *bufferpptr += size;
-    return NO_ERROR;
-}
-
-template <>
-status_t extract(char **val, const char **bufferpptr, const char *bufferptrmax)
-{
-    const char *ptr = *bufferpptr;
-    while (*ptr != 0) {
-        if (ptr >= bufferptrmax) {
-            ALOGE("%s: buffer exceeded", __func__);
-            return BAD_VALUE;
-        }
-        ++ptr;
-    }
-    const size_t size = (ptr - *bufferpptr) + 1;
-    *val = (char *)malloc(size);
-    memcpy(*val, *bufferpptr, size);
-    *bufferpptr += size;
-    return NO_ERROR;
-}
-
-} // namespace
-
 status_t mediametrics::Item::writeToByteString(char **pbuffer, size_t *plength) const
 {
     if (pbuffer == nullptr || plength == nullptr)
@@ -647,14 +390,14 @@
     uint32_t size = header_size
         + sizeof(uint32_t) // # properties
         ;
-    for (size_t i = 0 ; i < mPropCount; ++i) {
-        const size_t propSize = mProps[i].getByteStringSize();
+    for (auto &prop : *this) {
+        const size_t propSize = prop.getByteStringSize();
         if (propSize > UINT16_MAX) {
-            ALOGW("%s: prop %zu size %zu too large", __func__, i, propSize);
+            ALOGW("%s: prop %s size %zu too large", __func__, prop.getName(), propSize);
             return INVALID_OPERATION;
         }
         if (__builtin_add_overflow(size, propSize, &size)) {
-            ALOGW("%s: item size overflow at property %zu", __func__, i);
+            ALOGW("%s: item size overflow at property %s", __func__, prop.getName());
             return INVALID_OPERATION;
         }
     }
@@ -674,16 +417,16 @@
             || insert((int32_t)mPid, &filling, buildmax) != NO_ERROR
             || insert((int32_t)mUid, &filling, buildmax) != NO_ERROR
             || insert((int64_t)mTimestamp, &filling, buildmax) != NO_ERROR
-            || insert((uint32_t)mPropCount, &filling, buildmax) != NO_ERROR) {
+            || insert((uint32_t)mProps.size(), &filling, buildmax) != NO_ERROR) {
         ALOGE("%s:could not write header", __func__);  // shouldn't happen
         free(build);
         return INVALID_OPERATION;
     }
-    for (size_t i = 0 ; i < mPropCount; ++i) {
-        if (mProps[i].writeToByteString(&filling, buildmax) != NO_ERROR) {
+    for (auto &prop : *this) {
+        if (prop.writeToByteString(&filling, buildmax) != NO_ERROR) {
             free(build);
             // shouldn't happen
-            ALOGE("%s:could not write prop %zu of %zu", __func__, i, mPropCount);
+            ALOGE("%s:could not write prop %s", __func__, prop.getName());
             return INVALID_OPERATION;
         }
     }
@@ -710,7 +453,7 @@
     uint32_t header_size;
     uint16_t version;
     uint16_t key_size;
-    char *key = nullptr;
+    std::string key;
     int32_t pid;
     int32_t uid;
     int64_t timestamp;
@@ -724,14 +467,12 @@
             || extract(&uid, &read, readend) != NO_ERROR
             || extract(&timestamp, &read, readend) != NO_ERROR
             || size > length
-            || strlen(key) + 1 != key_size
+            || key.size() + 1 != key_size
             || header_size > size) {
-        free(key);
         ALOGW("%s: invalid header", __func__);
         return INVALID_OPERATION;
     }
-    mKey = key;
-    free(key);
+    mKey = std::move(key);
     const size_t pos = read - bufferptr;
     if (pos > header_size) {
         ALOGW("%s: invalid header pos:%zu > header_size:%u",
@@ -750,45 +491,16 @@
     mUid = uid;
     mTimestamp = timestamp;
     for (size_t i = 0; i < propCount; ++i) {
-        Prop *prop = allocateProp();
-        if (prop->readFromByteString(&read, readend) != NO_ERROR) {
+        Prop prop;
+        if (prop.readFromByteString(&read, readend) != NO_ERROR) {
             ALOGW("%s: cannot read prop %zu", __func__, i);
             return INVALID_OPERATION;
         }
+        mProps[prop.getName()] = std::move(prop);
     }
     return NO_ERROR;
 }
 
-status_t mediametrics::Item::Prop::writeToParcel(Parcel *data) const
-{
-   switch (mType) {
-   case kTypeInt32:
-       return data->writeCString(mName)
-               ?: data->writeInt32(mType)
-               ?: data->writeInt32(u.int32Value);
-   case kTypeInt64:
-       return data->writeCString(mName)
-               ?: data->writeInt32(mType)
-               ?: data->writeInt64(u.int64Value);
-   case kTypeDouble:
-       return data->writeCString(mName)
-               ?: data->writeInt32(mType)
-               ?: data->writeDouble(u.doubleValue);
-   case kTypeRate:
-       return data->writeCString(mName)
-               ?: data->writeInt32(mType)
-               ?: data->writeInt64(u.rate.first)
-               ?: data->writeInt64(u.rate.second);
-   case kTypeCString:
-       return data->writeCString(mName)
-               ?: data->writeInt32(mType)
-               ?: data->writeCString(u.CStringValue);
-   default:
-       ALOGE("%s: found bad type: %d, name %s", __func__, mType, mName);
-       return BAD_VALUE;
-   }
-}
-
 status_t mediametrics::Item::Prop::readFromParcel(const Parcel& data)
 {
     const char *key = data.readCString();
@@ -797,239 +509,99 @@
     status_t status = data.readInt32(&type);
     if (status != NO_ERROR) return status;
     switch (type) {
-    case kTypeInt32:
-        status = data.readInt32(&u.int32Value);
-        break;
-    case kTypeInt64:
-        status = data.readInt64(&u.int64Value);
-        break;
-    case kTypeDouble:
-        status = data.readDouble(&u.doubleValue);
-        break;
-    case kTypeCString: {
+    case mediametrics::kTypeInt32: {
+        int32_t value;
+        status = data.readInt32(&value);
+        if (status != NO_ERROR) return status;
+        mElem = value;
+    } break;
+    case mediametrics::kTypeInt64: {
+        int64_t value;
+        status = data.readInt64(&value);
+        if (status != NO_ERROR) return status;
+        mElem = value;
+    } break;
+    case mediametrics::kTypeDouble: {
+        double value;
+        status = data.readDouble(&value);
+        if (status != NO_ERROR) return status;
+        mElem = value;
+    } break;
+    case mediametrics::kTypeCString: {
         const char *s = data.readCString();
         if (s == nullptr) return BAD_VALUE;
-        set(s);
-        break;
-        }
-    case kTypeRate: {
+        mElem = s;
+    } break;
+    case mediametrics::kTypeRate: {
         std::pair<int64_t, int64_t> rate;
         status = data.readInt64(&rate.first)
                 ?: data.readInt64(&rate.second);
-        if (status == NO_ERROR) {
-            set(rate);
-        }
-        break;
-        }
+        if (status != NO_ERROR) return status;
+        mElem = rate;
+    } break;
+    case mediametrics::kTypeNone: {
+        mElem = std::monostate{};
+    } break;
     default:
-        ALOGE("%s: reading bad item type: %d", __func__, mType);
+        ALOGE("%s: reading bad item type: %d", __func__, type);
         return BAD_VALUE;
     }
-    if (status == NO_ERROR) {
-        setName(key);
-        mType = (Type)type;
-    }
-    return status;
-}
-
-void mediametrics::Item::Prop::toString(char *buffer, size_t length) const
-{
-    switch (mType) {
-    case kTypeInt32:
-        snprintf(buffer, length, "%s=%d:", mName, u.int32Value);
-        break;
-    case mediametrics::Item::kTypeInt64:
-        snprintf(buffer, length, "%s=%lld:", mName, (long long)u.int64Value);
-        break;
-    case mediametrics::Item::kTypeDouble:
-        snprintf(buffer, length, "%s=%e:", mName, u.doubleValue);
-        break;
-    case mediametrics::Item::kTypeRate:
-        snprintf(buffer, length, "%s=%lld/%lld:",
-                mName, (long long)u.rate.first, (long long)u.rate.second);
-        break;
-    case mediametrics::Item::kTypeCString:
-        // TODO sanitize string for ':' '='
-        snprintf(buffer, length, "%s=%s:", mName, u.CStringValue);
-        break;
-    default:
-        ALOGE("%s: bad item type: %d for %s", __func__, mType, mName);
-        if (length > 0) buffer[0] = 0;
-        break;
-    }
-}
-
-size_t mediametrics::Item::Prop::getByteStringSize() const
-{
-    const size_t header =
-        sizeof(uint16_t)      // length
-        + sizeof(uint8_t)     // type
-        + strlen(mName) + 1;  // mName + 0 termination
-    size_t payload = 0;
-    switch (mType) {
-    case mediametrics::Item::kTypeInt32:
-        payload = sizeof(u.int32Value);
-        break;
-    case mediametrics::Item::kTypeInt64:
-        payload = sizeof(u.int64Value);
-        break;
-    case mediametrics::Item::kTypeDouble:
-        payload = sizeof(u.doubleValue);
-        break;
-    case mediametrics::Item::kTypeRate:
-        payload = sizeof(u.rate.first) + sizeof(u.rate.second);
-        break;
-    case mediametrics::Item::kTypeCString:
-        payload = strlen(u.CStringValue) + 1;
-        break;
-    default:
-        ALOGE("%s: found bad prop type: %d, name %s",
-                __func__, mType, mName); // no payload computed
-        break;
-    }
-    return header + payload;
-}
-
-
-// TODO: fold into a template later.
-status_t BaseItem::writeToByteString(
-        const char *name, int32_t value, char **bufferpptr, char *bufferptrmax)
-{
-    const size_t len = 2 + 1 + strlen(name) + 1 + sizeof(value);
-    if (len > UINT16_MAX) return BAD_VALUE;
-    return insert((uint16_t)len, bufferpptr, bufferptrmax)
-            ?: insert((uint8_t)kTypeInt32, bufferpptr, bufferptrmax)
-            ?: insert(name, bufferpptr, bufferptrmax)
-            ?: insert(value, bufferpptr, bufferptrmax);
-}
-
-status_t BaseItem::writeToByteString(
-        const char *name, int64_t value, char **bufferpptr, char *bufferptrmax)
-{
-    const size_t len = 2 + 1 + strlen(name) + 1 + sizeof(value);
-    if (len > UINT16_MAX) return BAD_VALUE;
-    return insert((uint16_t)len, bufferpptr, bufferptrmax)
-            ?: insert((uint8_t)kTypeInt64, bufferpptr, bufferptrmax)
-            ?: insert(name, bufferpptr, bufferptrmax)
-            ?: insert(value, bufferpptr, bufferptrmax);
-}
-
-status_t BaseItem::writeToByteString(
-        const char *name, double value, char **bufferpptr, char *bufferptrmax)
-{
-    const size_t len = 2 + 1 + strlen(name) + 1 + sizeof(value);
-    if (len > UINT16_MAX) return BAD_VALUE;
-    return insert((uint16_t)len, bufferpptr, bufferptrmax)
-            ?: insert((uint8_t)kTypeDouble, bufferpptr, bufferptrmax)
-            ?: insert(name, bufferpptr, bufferptrmax)
-            ?: insert(value, bufferpptr, bufferptrmax);
-}
-
-status_t BaseItem::writeToByteString(
-        const char *name, const std::pair<int64_t, int64_t> &value, char **bufferpptr, char *bufferptrmax)
-{
-    const size_t len = 2 + 1 + strlen(name) + 1 + 8 + 8;
-    if (len > UINT16_MAX) return BAD_VALUE;
-    return insert((uint16_t)len, bufferpptr, bufferptrmax)
-            ?: insert((uint8_t)kTypeRate, bufferpptr, bufferptrmax)
-            ?: insert(name, bufferpptr, bufferptrmax)
-            ?: insert(value.first, bufferpptr, bufferptrmax)
-            ?: insert(value.second, bufferpptr, bufferptrmax);
-}
-
-status_t BaseItem::writeToByteString(
-        const char *name, char * const &value, char **bufferpptr, char *bufferptrmax)
-{
-    return writeToByteString(name, (const char *)value, bufferpptr, bufferptrmax);
-}
-
-status_t BaseItem::writeToByteString(
-        const char *name, const char * const &value, char **bufferpptr, char *bufferptrmax)
-{
-    const size_t len = 2 + 1 + strlen(name) + 1 + strlen(value) + 1;
-    if (len > UINT16_MAX) return BAD_VALUE;
-    return insert((uint16_t)len, bufferpptr, bufferptrmax)
-            ?: insert((uint8_t)kTypeCString, bufferpptr, bufferptrmax)
-            ?: insert(name, bufferpptr, bufferptrmax)
-            ?: insert(value, bufferpptr, bufferptrmax);
-}
-
-
-status_t BaseItem::writeToByteString(
-        const char *name, const none_t &, char **bufferpptr, char *bufferptrmax)
-{
-    const size_t len = 2 + 1 + strlen(name) + 1;
-    if (len > UINT16_MAX) return BAD_VALUE;
-    return insert((uint16_t)len, bufferpptr, bufferptrmax)
-            ?: insert((uint8_t)kTypeCString, bufferpptr, bufferptrmax)
-            ?: insert(name, bufferpptr, bufferptrmax);
-}
-
-
-status_t mediametrics::Item::Prop::writeToByteString(
-        char **bufferpptr, char *bufferptrmax) const
-{
-    switch (mType) {
-    case kTypeInt32:
-        return BaseItem::writeToByteString(mName, u.int32Value, bufferpptr, bufferptrmax);
-    case kTypeInt64:
-        return BaseItem::writeToByteString(mName, u.int64Value, bufferpptr, bufferptrmax);
-    case kTypeDouble:
-        return BaseItem::writeToByteString(mName, u.doubleValue, bufferpptr, bufferptrmax);
-    case kTypeRate:
-        return BaseItem::writeToByteString(mName, u.rate, bufferpptr, bufferptrmax);
-    case kTypeCString:
-        return BaseItem::writeToByteString(mName, u.CStringValue, bufferpptr, bufferptrmax);
-    case kTypeNone:
-        return BaseItem::writeToByteString(mName, none_t{}, bufferpptr, bufferptrmax);
-    default:
-        ALOGE("%s: found bad prop type: %d, name %s",
-                __func__, mType, mName);  // no payload sent
-        return BAD_VALUE;
-    }
+    setName(key);
+    return NO_ERROR;
 }
 
 status_t mediametrics::Item::Prop::readFromByteString(
         const char **bufferpptr, const char *bufferptrmax)
 {
     uint16_t len;
-    char *name;
+    std::string name;
     uint8_t type;
     status_t status = extract(&len, bufferpptr, bufferptrmax)
             ?: extract(&type, bufferpptr, bufferptrmax)
             ?: extract(&name, bufferpptr, bufferptrmax);
     if (status != NO_ERROR) return status;
-    if (mName != nullptr) {
-        free(mName);
-    }
-    mName = name;
-    if (mType == kTypeCString) {
-        free(u.CStringValue);
-        u.CStringValue = nullptr;
-    }
-    mType = (Type)type;
-    switch (mType) {
-    case kTypeInt32:
-        return extract(&u.int32Value, bufferpptr, bufferptrmax);
-    case kTypeInt64:
-        return extract(&u.int64Value, bufferpptr, bufferptrmax);
-    case kTypeDouble:
-        return extract(&u.doubleValue, bufferpptr, bufferptrmax);
-    case kTypeRate:
-        return extract(&u.rate.first, bufferpptr, bufferptrmax)
-                ?: extract(&u.rate.second, bufferpptr, bufferptrmax);
-    case kTypeCString:
-        status = extract(&u.CStringValue, bufferpptr, bufferptrmax);
-        if (status != NO_ERROR) mType = kTypeNone;
-        return status;
-    case kTypeNone:
-        return NO_ERROR;
+    switch (type) {
+    case mediametrics::kTypeInt32: {
+        int32_t value;
+        status = extract(&value, bufferpptr, bufferptrmax);
+        if (status != NO_ERROR) return status;
+        mElem = value;
+    } break;
+    case mediametrics::kTypeInt64: {
+        int64_t value;
+        status = extract(&value, bufferpptr, bufferptrmax);
+        if (status != NO_ERROR) return status;
+        mElem = value;
+    } break;
+    case mediametrics::kTypeDouble: {
+        double value;
+        status = extract(&value, bufferpptr, bufferptrmax);
+        if (status != NO_ERROR) return status;
+        mElem = value;
+    } break;
+    case mediametrics::kTypeRate: {
+        std::pair<int64_t, int64_t> value;
+        status = extract(&value.first, bufferpptr, bufferptrmax)
+                ?: extract(&value.second, bufferpptr, bufferptrmax);
+        if (status != NO_ERROR) return status;
+        mElem = value;
+    } break;
+    case mediametrics::kTypeCString: {
+        std::string value;
+        status = extract(&value, bufferpptr, bufferptrmax);
+        if (status != NO_ERROR) return status;
+        mElem = std::move(value);
+    } break;
+    case mediametrics::kTypeNone: {
+        mElem = std::monostate{};
+    } break;
     default:
-        mType = kTypeNone;
         ALOGE("%s: found bad prop type: %d, name %s",
-                __func__, mType, mName);  // no payload sent
+                __func__, (int)type, mName.c_str());  // no payload sent
         return BAD_VALUE;
     }
+    mName = name;
+    return NO_ERROR;
 }
 
 } // namespace android::mediametrics
diff --git a/media/libmediametrics/include/MediaMetricsItem.h b/media/libmediametrics/include/MediaMetricsItem.h
index 5f33650..6141b36 100644
--- a/media/libmediametrics/include/MediaMetricsItem.h
+++ b/media/libmediametrics/include/MediaMetricsItem.h
@@ -14,15 +14,18 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_MEDIA_MEDIAANALYTICSITEM_H
-#define ANDROID_MEDIA_MEDIAANALYTICSITEM_H
+#ifndef ANDROID_MEDIA_MEDIAMETRICSITEM_H
+#define ANDROID_MEDIA_MEDIAMETRICSITEM_H
 
 #include "MediaMetrics.h"
 
 #include <algorithm>
+#include <map>
 #include <string>
 #include <sys/types.h>
+#include <variant>
 
+#include <binder/Parcel.h>
 #include <cutils/properties.h>
 #include <utils/Errors.h>
 #include <utils/KeyedVector.h>
@@ -32,13 +35,61 @@
 
 namespace android {
 
+/*
+ * MediaMetrics Keys and Properties for Audio.
+ *
+ * C/C++ friendly constants that ensure
+ * 1) Compilation error on misspelling
+ * 2) Consistent behavior and documentation.
+ *
+ * TODO: Move to separate header file.
+ */
+
+// Taxonomy of audio keys
+
+// Key Prefixes are used for MediaMetrics Item Keys and ends with a ".".
+// They must be appended with another value to make a key.
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO "audio."
+
+// The AudioRecord key appends the "trackId" to the prefix.
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD AMEDIAMETRICS_KEY_PREFIX_AUDIO "record."
+
+// The AudioThread key appends the "threadId" to the prefix.
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_THREAD AMEDIAMETRICS_KEY_PREFIX_AUDIO "thread."
+
+// The AudioTrack key appends the "trackId" to the prefix.
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK  AMEDIAMETRICS_KEY_PREFIX_AUDIO "track."
+
+// Keys are strings used for MediaMetrics Item Keys
+#define AMEDIAMETRICS_KEY_AUDIO_FLINGER       AMEDIAMETRICS_KEY_PREFIX_AUDIO "flinger"
+#define AMEDIAMETRICS_KEY_AUDIO_POLICY        AMEDIAMETRICS_KEY_PREFIX_AUDIO "policy"
+
+// Props are properties allowed for Mediametrics Items.
+#define AMEDIAMETRICS_PROP_EVENT          "event"          // string value (often func name)
+#define AMEDIAMETRICS_PROP_LATENCYMS      "latencyMs"      // double value
+#define AMEDIAMETRICS_PROP_OUTPUTDEVICES  "outputDevices"  // string value
+#define AMEDIAMETRICS_PROP_STARTUPMS      "startupMs"      // double value
+#define AMEDIAMETRICS_PROP_THREADID       "threadId"       // int32 value io handle
+
+// Values are strings accepted for a given property.
+
+// An event is a general description, which often is a function name.
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR       "ctor"
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_DTOR       "dtor"
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_UNDERRUN   "underrun" // from Thread
+
 class IMediaMetricsService;
 class Parcel;
 
 /*
  * MediaMetrics Item
  *
- * Byte string format.
+ * The MediaMetrics Item allows get/set operations and recording to the service.
+ *
+ * The MediaMetrics LogItem is a faster logging variant. It allows set operations only,
+ * and then recording to the service.
+ *
+ * The Byte String format is as follows:
  *
  * For Java
  *  int64 corresponds to long
@@ -46,10 +97,16 @@
  *  uint16 corresponds to char
  *  uint8, int8 corresponds to byte
  *
- * Hence uint8 and uint32 values are limited to INT8_MAX and INT32_MAX.
+ * For items transmitted from Java, uint8 and uint32 values are limited
+ * to INT8_MAX and INT32_MAX.  This constrains the size of large items
+ * to 2GB, which is consistent with ByteBuffer max size. A native item
+ * can conceivably have size of 4GB.
  *
  * Physical layout of integers and doubles within the MediaMetrics byte string
- * is in Native / host order, which is nearly always little endian.
+ * is in Native / host order, which is usually little endian.
+ *
+ * Note that primitive data (ints, doubles) within a Byte String has
+ * no extra padding or alignment requirements, like ByteBuffer.
  *
  * -- begin of item
  * -- begin of header
@@ -57,7 +114,7 @@
  * (uint32) header size, including the item size and header size fields.
  * (uint16) version: exactly 0
  * (uint16) key size, that is key strlen + 1 for zero termination.
- * (int8)+ key string which is 0 terminated
+ * (int8)+ key, a string which is 0 terminated (UTF-8).
  * (int32) pid
  * (int32) uid
  * (int64) timestamp
@@ -72,10 +129,12 @@
  *       (int32)
  *       (int64)
  *       (double)
- *       (int8)+ for cstring, including 0 termination
+ *       (int8)+ for TYPE_CSTRING, including 0 termination
  *       (int64, int64) for rate
  * -- end body
  * -- end of item
+ *
+ * The Byte String format must match MediaMetrics.java.
  */
 
 namespace mediametrics {
@@ -90,11 +149,50 @@
     kTypeRate = 5,
 };
 
+/**
+ * The MediaMetrics Item has special Item properties,
+ * derived internally or through dedicated setters.
+ *
+ * For consistency we use the following keys to represent
+ * these special Item properties when in a generic Bundle
+ * or in a std::map.
+ *
+ * These values must match MediaMetrics.java
+ */
+static inline constexpr const char *BUNDLE_TOTAL_SIZE = "_totalSize";
+static inline constexpr const char *BUNDLE_HEADER_SIZE = "_headerSize";
+static inline constexpr const char *BUNDLE_VERSION = "_version";
+static inline constexpr const char *BUNDLE_KEY_SIZE = "_keySize";
+static inline constexpr const char *BUNDLE_KEY = "_key";
+static inline constexpr const char *BUNDLE_PID = "_pid";
+static inline constexpr const char *BUNDLE_UID = "_uid";
+static inline constexpr const char *BUNDLE_TIMESTAMP = "_timestamp";
+static inline constexpr const char *BUNDLE_PROPERTY_COUNT = "_propertyCount";
+
 template<size_t N>
 static inline bool startsWith(const std::string &s, const char (&comp)[N]) {
-    return !strncmp(s.c_str(), comp, N-1);
+    return !strncmp(s.c_str(), comp, N - 1);
 }
 
+static inline bool startsWith(const std::string& s, const std::string& comp) {
+    return !strncmp(s.c_str(), comp.c_str(), comp.size() - 1);
+}
+
+/**
+ * Defers a function to run in the destructor.
+ *
+ * This helper class is used to log results on exit of a method.
+ */
+class Defer {
+public:
+    template <typename U>
+    Defer(U &&f) : mThunk(std::forward<U>(f)) {}
+    ~Defer() { mThunk(); }
+
+private:
+    const std::function<void()> mThunk;
+};
+
 /**
  * Media Metrics BaseItem
  *
@@ -106,8 +204,9 @@
     friend class MediaMetricsDeathNotifier; // for dropInstance
     // enabled 1, disabled 0
 public:
-    // are we collecting analytics data
+    // are we collecting metrics data
     static bool isEnabled();
+    static sp<IMediaMetricsService> getService();
 
 protected:
     static constexpr const char * const EnabledProperty = "media.metrics.enabled";
@@ -116,43 +215,253 @@
 
     // let's reuse a binder connection
     static sp<IMediaMetricsService> sMediaMetricsService;
-    static sp<IMediaMetricsService> getInstance();
+
     static void dropInstance();
     static bool submitBuffer(const char *buffer, size_t len);
 
-    static status_t writeToByteString(
-            const char *name, int32_t value, char **bufferpptr, char *bufferptrmax);
-    static status_t writeToByteString(
-            const char *name, int64_t value, char **bufferpptr, char *bufferptrmax);
-    static status_t writeToByteString(
-            const char *name, double value, char **bufferpptr, char *bufferptrmax);
-    static status_t writeToByteString(
-            const char *name, const std::pair<int64_t, int64_t> &value,
-            char **bufferpptr, char *bufferptrmax);
-    static status_t writeToByteString(
-            const char *name, char * const &value, char **bufferpptr, char *bufferptrmax);
-    static status_t writeToByteString(
-            const char *name, const char * const &value, char **bufferpptr, char *bufferptrmax);
-    struct none_t {}; // for kTypeNone
-    static status_t writeToByteString(
-            const char *name, const none_t &, char **bufferpptr, char *bufferptrmax);
+    template <typename T>
+    struct is_item_type {
+        static constexpr inline bool value =
+             std::is_same<T, int32_t>::value
+             || std::is_same<T, int64_t>::value
+             || std::is_same<T, double>::value
+             || std::is_same<T, std::pair<int64_t, int64_t>>:: value
+             || std::is_same<T, std::string>::value
+             || std::is_same<T, std::monostate>::value;
+    };
 
-    template<typename T>
-    static status_t sizeOfByteString(const char *name, const T& value) {
+    template <typename T>
+    struct get_type_of {
+        static_assert(is_item_type<T>::value);
+        static constexpr inline Type value =
+             std::is_same<T, int32_t>::value ? kTypeInt32
+             : std::is_same<T, int64_t>::value ? kTypeInt64
+             : std::is_same<T, double>::value ? kTypeDouble
+             : std::is_same<T, std::pair<int64_t, int64_t>>:: value ? kTypeRate
+             : std::is_same<T, std::string>::value ? kTypeCString
+             : std::is_same<T, std::monostate>::value ? kTypeNone
+         : kTypeNone;
+    };
+
+    template <typename T>
+    static size_t sizeOfByteString(const char *name, const T& value) {
+        static_assert(is_item_type<T>::value);
         return 2 + 1 + strlen(name) + 1 + sizeof(value);
     }
-    template<> // static
-    status_t sizeOfByteString(const char *name, char * const &value) {
-        return 2 + 1 + strlen(name) + 1 + strlen(value) + 1;
+    template <> // static
+    size_t sizeOfByteString(const char *name, const std::string& value) {
+        return 2 + 1 + strlen(name) + 1 + value.size() + 1;
     }
-    template<> // static
-    status_t sizeOfByteString(const char *name, const char * const &value) {
-        return 2 + 1 + strlen(name) + 1 + strlen(value) + 1;
-    }
-    template<> // static
-    status_t sizeOfByteString(const char *name, const none_t &) {
+    template <> // static
+    size_t sizeOfByteString(const char *name, const std::monostate&) {
          return 2 + 1 + strlen(name) + 1;
     }
+    // for speed
+    static size_t sizeOfByteString(const char *name, const char *value) {
+        return 2 + 1 + strlen(name) + 1 + strlen(value) + 1;
+    }
+
+    template <typename T>
+    static status_t insert(const T& val, char **bufferpptr, char *bufferptrmax) {
+        static_assert(std::is_trivially_constructible<T>::value);
+        const size_t size = sizeof(val);
+        if (*bufferpptr + size > bufferptrmax) {
+            ALOGE("%s: buffer exceeded with size %zu", __func__, size);
+            return BAD_VALUE;
+        }
+        memcpy(*bufferpptr, &val, size);
+        *bufferpptr += size;
+        return NO_ERROR;
+    }
+    template <> // static
+    status_t insert(const std::string& val, char **bufferpptr, char *bufferptrmax) {
+        const size_t size = val.size() + 1;
+        if (size > UINT16_MAX || *bufferpptr + size > bufferptrmax) {
+            ALOGE("%s: buffer exceeded with size %zu", __func__, size);
+            return BAD_VALUE;
+        }
+        memcpy(*bufferpptr, val.c_str(), size);
+        *bufferpptr += size;
+        return NO_ERROR;
+    }
+    template <> // static
+    status_t insert(const std::pair<int64_t, int64_t>& val,
+            char **bufferpptr, char *bufferptrmax) {
+        const size_t size = sizeof(val.first) + sizeof(val.second);
+        if (*bufferpptr + size > bufferptrmax) {
+            ALOGE("%s: buffer exceeded with size %zu", __func__, size);
+            return BAD_VALUE;
+        }
+        memcpy(*bufferpptr, &val.first, sizeof(val.first));
+        memcpy(*bufferpptr + sizeof(val.first), &val.second, sizeof(val.second));
+        *bufferpptr += size;
+        return NO_ERROR;
+    }
+    template <> // static
+    status_t insert(const std::monostate&, char **, char *) {
+        return NO_ERROR;
+    }
+    // for speed
+    static status_t insert(const char *val, char **bufferpptr, char *bufferptrmax) {
+        const size_t size = strlen(val) + 1;
+        if (size > UINT16_MAX || *bufferpptr + size > bufferptrmax) {
+            ALOGE("%s: buffer exceeded with size %zu", __func__, size);
+            return BAD_VALUE;
+        }
+        memcpy(*bufferpptr, val, size);
+        *bufferpptr += size;
+        return NO_ERROR;
+    }
+
+    template <typename T>
+    static status_t writeToByteString(
+            const char *name, const T& value, char **bufferpptr, char *bufferptrmax) {
+        static_assert(is_item_type<T>::value);
+        const size_t len = sizeOfByteString(name, value);
+        if (len > UINT16_MAX) return BAD_VALUE;
+        return insert((uint16_t)len, bufferpptr, bufferptrmax)
+                ?: insert((uint8_t)get_type_of<T>::value, bufferpptr, bufferptrmax)
+                ?: insert(name, bufferpptr, bufferptrmax)
+                ?: insert(value, bufferpptr, bufferptrmax);
+    }
+    // for speed
+    static status_t writeToByteString(
+            const char *name, const char *value, char **bufferpptr, char *bufferptrmax) {
+        const size_t len = sizeOfByteString(name, value);
+        if (len > UINT16_MAX) return BAD_VALUE;
+        return insert((uint16_t)len, bufferpptr, bufferptrmax)
+                ?: insert((uint8_t)kTypeCString, bufferpptr, bufferptrmax)
+                ?: insert(name, bufferpptr, bufferptrmax)
+                ?: insert(value, bufferpptr, bufferptrmax);
+    }
+
+    template <typename T>
+    static void toStringBuffer(
+            const char *name, const T& value, char *buffer, size_t length) = delete;
+    template <> // static
+    void toStringBuffer(
+            const char *name, const int32_t& value, char *buffer, size_t length) {
+        snprintf(buffer, length, "%s=%d:", name, value);
+    }
+    template <> // static
+    void toStringBuffer(
+            const char *name, const int64_t& value, char *buffer, size_t length) {
+        snprintf(buffer, length, "%s=%lld:", name, (long long)value);
+    }
+    template <> // static
+    void toStringBuffer(
+            const char *name, const double& value, char *buffer, size_t length) {
+        snprintf(buffer, length, "%s=%e:", name, value);
+    }
+    template <> // static
+    void toStringBuffer(
+            const char *name, const std::pair<int64_t, int64_t>& value,
+            char *buffer, size_t length) {
+        snprintf(buffer, length, "%s=%lld/%lld:",
+                name, (long long)value.first, (long long)value.second);
+    }
+    template <> // static
+    void toStringBuffer(
+            const char *name, const std::string& value, char *buffer, size_t length) {
+        // TODO sanitize string for ':' '='
+        snprintf(buffer, length, "%s=%s:", name, value.c_str());
+    }
+    template <> // static
+    void toStringBuffer(
+            const char *name, const std::monostate&, char *buffer, size_t length) {
+        snprintf(buffer, length, "%s=():", name);
+    }
+
+    template <typename T>
+    static status_t writeToParcel(
+            const char *name, const T& value, Parcel *parcel) = delete;
+    template <> // static
+    status_t writeToParcel(
+            const char *name, const int32_t& value, Parcel *parcel) {
+        return parcel->writeCString(name)
+               ?: parcel->writeInt32(get_type_of<int32_t>::value)
+               ?: parcel->writeInt32(value);
+    }
+    template <> // static
+    status_t writeToParcel(
+            const char *name, const int64_t& value, Parcel *parcel) {
+        return parcel->writeCString(name)
+               ?: parcel->writeInt32(get_type_of<int64_t>::value)
+               ?: parcel->writeInt64(value);
+    }
+    template <> // static
+    status_t writeToParcel(
+            const char *name, const double& value, Parcel *parcel) {
+        return parcel->writeCString(name)
+               ?: parcel->writeInt32(get_type_of<double>::value)
+               ?: parcel->writeDouble(value);
+    }
+    template <> // static
+    status_t writeToParcel(
+            const char *name, const std::pair<int64_t, int64_t>& value, Parcel *parcel) {
+        return parcel->writeCString(name)
+               ?: parcel->writeInt32(get_type_of< std::pair<int64_t, int64_t>>::value)
+               ?: parcel->writeInt64(value.first)
+               ?: parcel->writeInt64(value.second);
+    }
+    template <> // static
+    status_t writeToParcel(
+            const char *name, const std::string& value, Parcel *parcel) {
+        return parcel->writeCString(name)
+               ?: parcel->writeInt32(get_type_of<std::string>::value)
+               ?: parcel->writeCString(value.c_str());
+    }
+    template <> // static
+    status_t writeToParcel(
+            const char *name, const std::monostate&, Parcel *parcel) {
+        return parcel->writeCString(name)
+               ?: parcel->writeInt32(get_type_of<std::monostate>::value);
+    }
+
+    template <typename T>
+    static status_t extract(T *val, const char **bufferpptr, const char *bufferptrmax) {
+        static_assert(std::is_trivially_constructible<T>::value);
+        const size_t size = sizeof(*val);
+        if (*bufferpptr + size > bufferptrmax) {
+            ALOGE("%s: buffer exceeded with size %zu", __func__, size);
+            return BAD_VALUE;
+        }
+        memcpy(val, *bufferpptr, size);
+        *bufferpptr += size;
+        return NO_ERROR;
+    }
+    template <> // static
+    status_t extract(std::string *val, const char **bufferpptr, const char *bufferptrmax) {
+        const char *ptr = *bufferpptr;
+        while (*ptr != 0) {
+            if (ptr >= bufferptrmax) {
+                ALOGE("%s: buffer exceeded", __func__);
+                return BAD_VALUE;
+            }
+            ++ptr;
+        }
+        const size_t size = (ptr - *bufferpptr) + 1;
+        *val = *bufferpptr;
+        *bufferpptr += size;
+        return NO_ERROR;
+    }
+    template <> // static
+    status_t extract(std::pair<int64_t, int64_t> *val,
+            const char **bufferpptr, const char *bufferptrmax) {
+        const size_t size = sizeof(val->first) + sizeof(val->second);
+        if (*bufferpptr + size > bufferptrmax) {
+            ALOGE("%s: buffer exceeded with size %zu", __func__, size);
+            return BAD_VALUE;
+        }
+        memcpy(&val->first, *bufferpptr, sizeof(val->first));
+        memcpy(&val->second, *bufferpptr + sizeof(val->first), sizeof(val->second));
+        *bufferpptr += size;
+        return NO_ERROR;
+    }
+    template <> // static
+    status_t extract(std::monostate *, const char **, const char *) {
+        return NO_ERROR;
+    }
 };
 
 /**
@@ -329,7 +638,7 @@
 };
 
 /**
- * MediaMetrics LogItem is a stack allocated media analytics item used for
+ * MediaMetrics LogItem is a stack allocated mediametrics item used for
  * fast logging.  It falls over to a malloc if needed.
  *
  * This is templated with a buffer size to allocate on the stack.
@@ -369,23 +678,166 @@
  *
  * The Item is designed for the service as it has getters.
  */
-class Item : public mediametrics::BaseItem {
-    friend class MediaMetricsJNI;           // TODO: remove this access
-
+class Item final : public mediametrics::BaseItem {
 public:
 
-     // TODO: remove this duplicate definition when frameworks base is updated.
-            enum Type {
-                kTypeNone = 0,
-                kTypeInt32 = 1,
-                kTypeInt64 = 2,
-                kTypeDouble = 3,
-                kTypeCString = 4,
-                kTypeRate = 5,
-            };
+    class Prop {
+    public:
+        using Elem = std::variant<
+                std::monostate,               // kTypeNone
+                int32_t,                      // kTypeInt32
+                int64_t,                      // kTypeInt64
+                double,                       // kTypeDouble
+                std::string,                  // kTypeCString
+                std::pair<int64_t, int64_t>   // kTypeRate
+                >;
 
-    static constexpr const char * const kKeyNone = "none";
-    static constexpr const char * const kKeyAny = "any";
+        Prop() = default;
+        Prop(const Prop& other) {
+           *this = other;
+        }
+        Prop& operator=(const Prop& other) {
+            mName = other.mName;
+            mElem = other.mElem;
+            return *this;
+        }
+        Prop(Prop&& other) {
+            *this = std::move(other);
+        }
+        Prop& operator=(Prop&& other) {
+            mName = std::move(other.mName);
+            mElem = std::move(other.mElem);
+            return *this;
+        }
+
+        bool operator==(const Prop& other) const {
+            return mName == other.mName && mElem == other.mElem;
+        }
+        bool operator!=(const Prop& other) const {
+            return !(*this == other);
+        }
+
+        void clear() {
+            mName.clear();
+            mElem = std::monostate{};
+        }
+        void clearValue() {
+            mElem = std::monostate{};
+        }
+
+        const char *getName() const {
+            return mName.c_str();
+        }
+
+        void swap(Prop& other) {
+            std::swap(mName, other.mName);
+            std::swap(mElem, other.mElem);
+        }
+
+        void setName(const char *name) {
+            mName = name;
+        }
+
+        bool isNamed(const char *name) const {
+            return mName == name;
+        }
+
+        template <typename T> void visit(T f) const {
+            std::visit(f, mElem);
+        }
+
+        template <typename T> bool get(T *value) const {
+            auto pval = std::get_if<T>(&mElem);
+            if (pval != nullptr) {
+                *value = *pval;
+                return true;
+            }
+            return false;
+        }
+
+        const Elem& get() const {
+            return mElem;
+        }
+
+        template <typename T> void set(const T& value) {
+            mElem = value;
+        }
+
+        template <typename T> void add(const T& value) {
+            auto pval = std::get_if<T>(&mElem);
+            if (pval != nullptr) {
+                *pval += value;
+            } else {
+                mElem = value;
+            }
+        }
+
+        template <> void add(const std::pair<int64_t, int64_t>& value) {
+            auto pval = std::get_if<std::pair<int64_t, int64_t>>(&mElem);
+            if (pval != nullptr) {
+                pval->first += value.first;
+                pval->second += value.second;
+            } else {
+                mElem = value;
+            }
+        }
+
+        status_t writeToParcel(Parcel *parcel) const {
+            return std::visit([this, parcel](auto &value) {
+                    return BaseItem::writeToParcel(mName.c_str(), value, parcel);}, mElem);
+        }
+
+        void toStringBuffer(char *buffer, size_t length) const {
+            return std::visit([this, buffer, length](auto &value) {
+                BaseItem::toStringBuffer(mName.c_str(), value, buffer, length);}, mElem);
+        }
+
+        size_t getByteStringSize() const {
+            return std::visit([this](auto &value) {
+                return BaseItem::sizeOfByteString(mName.c_str(), value);}, mElem);
+        }
+
+        status_t writeToByteString(char **bufferpptr, char *bufferptrmax) const {
+            return std::visit([this, bufferpptr, bufferptrmax](auto &value) {
+                return BaseItem::writeToByteString(mName.c_str(), value, bufferpptr, bufferptrmax);
+            }, mElem);
+        }
+
+        status_t readFromParcel(const Parcel& data);
+
+        status_t readFromByteString(const char **bufferpptr, const char *bufferptrmax);
+
+    private:
+        std::string mName;
+        Elem mElem;
+    };
+
+    // Iteration of props within item
+    class iterator {
+    public:
+        iterator(const std::map<std::string, Prop>::const_iterator &_it) : it(_it) { }
+        iterator &operator++() {
+            ++it;
+            return *this;
+        }
+        bool operator!=(iterator &other) const {
+            return it != other.it;
+        }
+        const Prop &operator*() const {
+            return it->second;
+        }
+
+    private:
+        std::map<std::string, Prop>::const_iterator it;
+    };
+
+    iterator begin() const {
+        return iterator(mProps.cbegin());
+    }
+
+    iterator end() const {
+        return iterator(mProps.cend());
+    }
 
         enum {
             PROTO_V0 = 0,
@@ -400,22 +852,22 @@
         : mKey(key) { }
     Item() = default;
 
-    Item(const Item&) = delete;
-    Item &operator=(const Item&) = delete;
+    // We enable default copy and move constructors and make this class final
+    // to prevent a derived class; this avoids possible data slicing.
+    Item(const Item& other) = default;
+    Item(Item&& other) = default;
+    Item& operator=(const Item& other) = default;
+    Item& operator=(Item&& other) = default;
 
     bool operator==(const Item& other) const {
-        if (mPropCount != other.mPropCount
-            || mPid != other.mPid
-            || mUid != other.mUid
-            || mPkgName != other.mPkgName
-            || mPkgVersionCode != other.mPkgVersionCode
-            || mKey != other.mKey
-            || mTimestamp != other.mTimestamp) return false;
-         for (size_t i = 0; i < mPropCount; ++i) {
-             Prop *p = other.findProp(mProps[i].getName());
-             if (p == nullptr || mProps[i] != *p) return false;
-         }
-         return true;
+        return mPid == other.mPid
+            && mUid == other.mUid
+            && mPkgName == other.mPkgName
+            && mPkgVersionCode == other.mPkgVersionCode
+            && mKey == other.mKey
+            && mTimestamp == other.mTimestamp
+            && mProps == other.mProps
+            ;
     }
     bool operator!=(const Item& other) const {
         return !(*this == other);
@@ -435,9 +887,17 @@
         // access functions for the class
         ~Item();
 
-        // reset all contents, discarding any extra data
-        void clear();
-        Item *dup();
+    void clear() {
+        mPid = -1;
+        mUid = -1;
+        mPkgName.clear();
+        mPkgVersionCode = 0;
+        mTimestamp = 0;
+        mKey.clear();
+        mProps.clear();
+    }
+
+    Item *dup() const { return new Item(*this); }
 
     Item &setKey(const char *key) {
         mKey = key;
@@ -446,11 +906,11 @@
     const std::string& getKey() const { return mKey; }
 
     // # of properties in the record
-    size_t count() const { return mPropCount; }
+    size_t count() const { return mProps.size(); }
 
     template<typename S, typename T>
     Item &set(S key, T value) {
-        allocateProp(key)->set(value);
+        findOrAllocateProp(key).set(value);
         return *this;
     }
 
@@ -475,7 +935,7 @@
     // type-mismatch counts as "wasn't there".
     template<typename S, typename T>
     Item &add(S key, T value) {
-        allocateProp(key)->add(value);
+        findOrAllocateProp(key).add(value);
         return *this;
     }
 
@@ -497,7 +957,7 @@
     // NULL parameter value suppresses storage of value.
     template<typename S, typename T>
     bool get(S key, T *value) const {
-        Prop *prop = findProp(key);
+        const Prop *prop = findProp(key);
         return prop != nullptr && prop->get(value);
     }
 
@@ -526,9 +986,9 @@
     }
     // Caller owns the returned string
     bool getCString(const char *key, char **value) const {
-        const char *cs;
-        if (get(key, &cs)) {
-            *value = cs != nullptr ? strdup(cs) : nullptr;
+        std::string s;
+        if (get(key, &s)) {
+            *value = strdup(s.c_str());
             return true;
         }
         return false;
@@ -537,6 +997,11 @@
         return get(key, value);
     }
 
+    const Prop::Elem* get(const char *key) const {
+        const Prop *prop = findProp(key);
+        return prop == nullptr ? nullptr : &prop->get();
+    }
+
         // Deliver the item to MediaMetrics
         bool selfrecord();
 
@@ -582,362 +1047,109 @@
         const char *toCString();
         const char *toCString(int version);
 
-    protected:
+    /**
+     * Returns true if the item has a property with a target value.
+     *
+     * If propName is nullptr, hasPropElem() returns false.
+     *
+     * \param propName is the property name.
+     * \param elem is the value to match.  std::monostate matches any.
+     */
+    bool hasPropElem(const char *propName, const Prop::Elem& elem) const {
+        if (propName == nullptr) return false;
+        const Prop::Elem *e = get(propName);
+        return e != nullptr && (std::holds_alternative<std::monostate>(elem) || elem == *e);
+    }
 
-        // merge fields from arg into this
-        // with rules for first/last/add, etc
-        // XXX: document semantics and how they are indicated
-        // caller continues to own 'incoming'
-        bool merge(Item *incoming);
+    /**
+     * Returns -2, -1, 0 (success) if the item has a property (wildcard matched) with a
+     * target value.
+     *
+     * The enum RecursiveWildcardCheck designates the meaning of the returned value.
+     *
+     * RECURSIVE_WILDCARD_CHECK_NO_MATCH_NO_WILDCARD = -2,
+     * RECURSIVE_WILDCARD_CHECK_NO_MATCH_WILDCARD_FOUND = -1,
+     * RECURSIVE_WILDCARD_CHECK_MATCH_FOUND = 0.
+     *
+     * If url is nullptr, RECURSIVE_WILDCARD_CHECK_NO_MATCH_NO_WILDCARD is returned.
+     *
+     * \param url is the full item + property name, which may have wildcards '*'
+     *            denoting an arbitrary sequence of 0 or more characters.
+     * \param elem is the target property value to match. std::monostate matches any.
+     * \return 0 if the property was matched,
+     *         -1 if the property was not matched and a wildcard char was encountered,
+     *         -2 if the property was not matched with no wildcard char encountered.
+     */
+    enum RecursiveWildcardCheck {
+        RECURSIVE_WILDCARD_CHECK_NO_MATCH_NO_WILDCARD = -2,
+        RECURSIVE_WILDCARD_CHECK_NO_MATCH_WILDCARD_FOUND = -1,
+        RECURSIVE_WILDCARD_CHECK_MATCH_FOUND = 0,
+    };
+
+    enum RecursiveWildcardCheck recursiveWildcardCheckElem(
+        const char *url, const Prop::Elem& elem) const {
+        if (url == nullptr) return RECURSIVE_WILDCARD_CHECK_NO_MATCH_NO_WILDCARD;
+        return recursiveWildcardCheckElem(getKey().c_str(), url, elem);
+    }
 
 private:
+
+    enum RecursiveWildcardCheck recursiveWildcardCheckElem(
+            const char *itemKeyPtr, const char *url, const Prop::Elem& elem) const {
+        for (; *url && *itemKeyPtr; ++url, ++itemKeyPtr) {
+            if (*url != *itemKeyPtr) {
+                if (*url == '*') { // wildcard
+                    ++url;
+                    while (true) {
+                        if (recursiveWildcardCheckElem(itemKeyPtr, url, elem)
+                                == RECURSIVE_WILDCARD_CHECK_MATCH_FOUND) {
+                            return RECURSIVE_WILDCARD_CHECK_MATCH_FOUND;
+                        }
+                        if (*itemKeyPtr == 0) break;
+                        ++itemKeyPtr;
+                    }
+                    return RECURSIVE_WILDCARD_CHECK_NO_MATCH_WILDCARD_FOUND;
+                }
+                return RECURSIVE_WILDCARD_CHECK_NO_MATCH_NO_WILDCARD;
+            }
+        }
+        if (itemKeyPtr[0] != 0 || url[0] != '.') {
+            return RECURSIVE_WILDCARD_CHECK_NO_MATCH_NO_WILDCARD;
+        }
+        const char *propName = url + 1; // skip the '.'
+        return hasPropElem(propName, elem)
+                ? RECURSIVE_WILDCARD_CHECK_MATCH_FOUND
+                : RECURSIVE_WILDCARD_CHECK_NO_MATCH_NO_WILDCARD;
+    }
+
     // handle Parcel version 0
     int32_t writeToParcel0(Parcel *) const;
     int32_t readFromParcel0(const Parcel&);
 
-
-
-    // checks equality even with nullptr.
-    static bool stringEquals(const char *a, const char *b) {
-        if (a == nullptr) {
-            return b == nullptr;
-        } else {
-            return b != nullptr && strcmp(a, b) == 0;
-        }
+    const Prop *findProp(const char *key) const {
+        auto it = mProps.find(key);
+        return it != mProps.end() ? &it->second : nullptr;
     }
 
-public:
-
-    class Prop {
-    friend class MediaMetricsJNI;           // TODO: remove this access
-    public:
-        Prop() = default;
-        Prop(const Prop& other) {
-           *this = other;
-        }
-        Prop& operator=(const Prop& other) {
-            if (other.mName != nullptr) {
-                mName = strdup(other.mName);
-            } else {
-                mName = nullptr;
-            }
-            mType = other.mType;
-            switch (mType) {
-            case kTypeInt32:
-                u.int32Value = other.u.int32Value;
-                break;
-            case kTypeInt64:
-                u.int64Value = other.u.int64Value;
-                break;
-            case kTypeDouble:
-                u.doubleValue = other.u.doubleValue;
-                break;
-            case kTypeCString:
-                u.CStringValue = strdup(other.u.CStringValue);
-                break;
-            case kTypeRate:
-                u.rate = other.u.rate;
-                break;
-            case kTypeNone:
-                break;
-            default:
-                // abort?
-                break;
-            }
-            return *this;
-        }
-        bool operator==(const Prop& other) const {
-            if (!stringEquals(mName, other.mName)
-                    || mType != other.mType) return false;
-            switch (mType) {
-            case kTypeInt32:
-                return u.int32Value == other.u.int32Value;
-            case kTypeInt64:
-                return u.int64Value == other.u.int64Value;
-            case kTypeDouble:
-                return u.doubleValue == other.u.doubleValue;
-            case kTypeCString:
-                return stringEquals(u.CStringValue, other.u.CStringValue);
-            case kTypeRate:
-                return u.rate == other.u.rate;
-            case kTypeNone:
-            default:
-                return true;
-            }
-        }
-        bool operator!=(const Prop& other) const {
-            return !(*this == other);
-        }
-
-        void clear() {
-            free(mName);
-            mName = nullptr;
-            clearValue();
-        }
-        void clearValue() {
-            if (mType == kTypeCString) {
-                free(u.CStringValue);
-                u.CStringValue = nullptr;
-            }
-            mType = kTypeNone;
-        }
-
-        Type getType() const {
-            return mType;
-        }
-
-        const char *getName() const {
-            return mName;
-        }
-
-        void swap(Prop& other) {
-            std::swap(mName, other.mName);
-            std::swap(mType, other.mType);
-            std::swap(u, other.u);
-        }
-
-        void setName(const char *name) {
-            free(mName);
-            if (name != nullptr) {
-                mName = strdup(name);
-            } else {
-                mName = nullptr;
-            }
-        }
-
-        bool isNamed(const char *name) const {
-            return stringEquals(name, mName);
-        }
-
-        template <typename T> void visit(T f) const {
-            switch (mType) {
-            case Item::kTypeInt32:
-                f(u.int32Value);
-                return;
-            case Item::kTypeInt64:
-                f(u.int64Value);
-                return;
-            case Item::kTypeDouble:
-                f(u.doubleValue);
-                return;
-            case Item::kTypeRate:
-                f(u.rate);
-                return;
-            case Item::kTypeCString:
-                f(u.CStringValue);
-                return;
-            default:
-                return;
-            }
-        }
-
-        template <typename T> bool get(T *value) const = delete;
-        template <>
-        bool get(int32_t *value) const {
-           if (mType != kTypeInt32) return false;
-           if (value != nullptr) *value = u.int32Value;
-           return true;
-        }
-        template <>
-        bool get(int64_t *value) const {
-           if (mType != kTypeInt64) return false;
-           if (value != nullptr) *value = u.int64Value;
-           return true;
-        }
-        template <>
-        bool get(double *value) const {
-           if (mType != kTypeDouble) return false;
-           if (value != nullptr) *value = u.doubleValue;
-           return true;
-        }
-        template <>
-        bool get(const char** value) const {
-            if (mType != kTypeCString) return false;
-            if (value != nullptr) *value = u.CStringValue;
-            return true;
-        }
-        template <>
-        bool get(std::string* value) const {
-            if (mType != kTypeCString) return false;
-            if (value != nullptr) *value = u.CStringValue;
-            return true;
-        }
-        template <>
-        bool get(std::pair<int64_t, int64_t> *value) const {
-           if (mType != kTypeRate) return false;
-           if (value != nullptr) {
-               *value = u.rate;
-           }
-           return true;
-        }
-
-        template <typename T> void set(const T& value) = delete;
-        template <>
-        void set(const int32_t& value) {
-            mType = kTypeInt32;
-            u.int32Value = value;
-        }
-        template <>
-        void set(const int64_t& value) {
-            mType = kTypeInt64;
-            u.int64Value = value;
-        }
-        template <>
-        void set(const double& value) {
-            mType = kTypeDouble;
-            u.doubleValue = value;
-        }
-        template <>
-        void set(const char* const& value) {
-            if (mType == kTypeCString) {
-                free(u.CStringValue);
-            } else {
-                mType = kTypeCString;
-            }
-            if (value == nullptr) {
-                u.CStringValue = nullptr;
-            } else {
-                size_t len = strlen(value);
-                if (len > UINT16_MAX - 1) {
-                    len = UINT16_MAX - 1;
-                }
-                u.CStringValue = (char *)malloc(len + 1);
-                strncpy(u.CStringValue, value, len);
-                u.CStringValue[len] = 0;
-            }
-        }
-        template <>
-        void set(const std::pair<int64_t, int64_t> &value) {
-            mType = kTypeRate;
-            u.rate = {value.first, value.second};
-        }
-
-        template <typename T> void add(const T& value) = delete;
-        template <>
-        void add(const int32_t& value) {
-            if (mType == kTypeInt32) {
-                u.int32Value += value;
-            } else {
-                mType = kTypeInt32;
-                u.int32Value = value;
-            }
-        }
-        template <>
-        void add(const int64_t& value) {
-            if (mType == kTypeInt64) {
-                u.int64Value += value;
-            } else {
-                mType = kTypeInt64;
-                u.int64Value = value;
-            }
-        }
-        template <>
-        void add(const double& value) {
-            if (mType == kTypeDouble) {
-                u.doubleValue += value;
-            } else {
-                mType = kTypeDouble;
-                u.doubleValue = value;
-            }
-        }
-        template <>
-        void add(const std::pair<int64_t, int64_t>& value) {
-            if (mType == kTypeRate) {
-                u.rate.first += value.first;
-                u.rate.second += value.second;
-            } else {
-                mType = kTypeRate;
-                u.rate = value;
-            }
-        }
-
-        status_t writeToParcel(Parcel *data) const;
-        status_t readFromParcel(const Parcel& data);
-        void toString(char *buffer, size_t length) const;
-        size_t getByteStringSize() const;
-        status_t writeToByteString(char **bufferpptr, char *bufferptrmax) const;
-        status_t readFromByteString(const char **bufferpptr, const char *bufferptrmax);
-
-    // TODO: make private (and consider converting to std::variant)
-    // private:
-        char *mName = nullptr;
-        Type mType = kTypeNone;
-        union u__ {
-            u__() { zero(); }
-            u__(u__ &&other) {
-                *this = std::move(other);
-            }
-            u__& operator=(u__ &&other) {
-                memcpy(this, &other, sizeof(*this));
-                other.zero();
-                return *this;
-            }
-            void zero() { memset(this, 0, sizeof(*this)); }
-
-            int32_t int32Value;
-            int64_t int64Value;
-            double doubleValue;
-            char *CStringValue;
-            std::pair<int64_t, int64_t> rate;
-        } u;
-    };
-
-    class iterator {
-    public:
-       iterator(size_t pos, const Item &_item)
-           : i(std::min(pos, _item.count()))
-           , item(_item) { }
-       iterator &operator++() {
-           i = std::min(i + 1, item.count());
-           return *this;
-       }
-       bool operator!=(iterator &other) const {
-           return i != other.i;
-       }
-       Prop &operator*() const {
-           return item.mProps[i];
-       }
-
-    private:
-      size_t i;
-      const Item &item;
-    };
-
-    iterator begin() const {
-        return iterator(0, *this);
-    }
-    iterator end() const {
-        return iterator(SIZE_MAX, *this);
+    Prop &findOrAllocateProp(const char *key) {
+        auto it = mProps.find(key);
+        if (it != mProps.end()) return it->second;
+        Prop &prop = mProps[key];
+        prop.setName(key);
+        return prop;
     }
 
-private:
-
-    // TODO: make prop management class
-    size_t findPropIndex(const char *name) const;
-    Prop *findProp(const char *name) const;
-    Prop *allocateProp();
-
-        enum {
-            kGrowProps = 10
-        };
-        bool growProps(int increment = kGrowProps);
-        Prop *allocateProp(const char *name);
-        bool removeProp(const char *name);
-    Prop *allocateProp(const std::string& name) { return allocateProp(name.c_str()); }
-
-        size_t mPropCount = 0;
-        size_t mPropSize = 0;
-        Prop *mProps = nullptr;
-
+    // Changes to member variables below require changes to clear().
     pid_t         mPid = -1;
     uid_t         mUid = -1;
     std::string   mPkgName;
     int64_t       mPkgVersionCode = 0;
-    std::string   mKey{kKeyNone};
+    std::string   mKey;
     nsecs_t       mTimestamp = 0;
+    std::map<std::string, Prop> mProps;
 };
 
 } // namespace mediametrics
 } // namespace android
 
-#endif
+#endif // ANDROID_MEDIA_MEDIAMETRICSITEM_H
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index 762ed19..5301f5c 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -39,7 +39,6 @@
         "libpowermanager",
         "libstagefright",
         "libstagefright_foundation",
-        "libstagefright_framecapture_utils",
         "libstagefright_httplive",
         "libutils",
     ],
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index f2a38dd..81ffcbc 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -58,7 +58,6 @@
 #include <media/Metadata.h>
 #include <media/AudioTrack.h>
 #include <media/MemoryLeakTrackUtil.h>
-#include <media/stagefright/FrameCaptureProcessor.h>
 #include <media/stagefright/InterfaceUtils.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaCodecList.h>
@@ -448,8 +447,6 @@
     mNextConnId = 1;
 
     MediaPlayerFactory::registerBuiltinFactories();
-    // initialize the frame capture utilities
-    (void)FrameCaptureProcessor::getInstance();
 }
 
 MediaPlayerService::~MediaPlayerService()
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 001dccf..c10e6e0 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -2002,7 +2002,6 @@
         (*meta)->setInt32(kKeyTimeScale, mMovieTimeScale);
     }
     if (mOutputFormat != OUTPUT_FORMAT_WEBM) {
-        (*meta)->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
         if (mTrackEveryTimeDurationUs > 0) {
             (*meta)->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
         }
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 4657f9e..0eaa503 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -29,7 +29,7 @@
 #include <datasource/NuCachedSource2.h>
 #include <media/DataSource.h>
 #include <media/MediaBufferHolder.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <android/IMediaExtractorService.h>
 #include <media/IMediaHTTPService.h>
 #include <media/stagefright/foundation/ABuffer.h>
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 3388097..c1c4b55 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -1798,7 +1798,9 @@
 }
 
 void NuPlayer::closeAudioSink() {
-    mRenderer->closeAudioSink();
+    if (mRenderer != NULL) {
+        mRenderer->closeAudioSink();
+    }
 }
 
 void NuPlayer::restartAudio(
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
index f21d2b3..14f1323 100644
--- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
@@ -24,7 +24,7 @@
 #include "AnotherPacketSource.h"
 #include "NuPlayerStreamListener.h"
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
diff --git a/media/libmediatranscoding/Android.bp b/media/libmediatranscoding/Android.bp
index 8f0e278..9d0c534 100644
--- a/media/libmediatranscoding/Android.bp
+++ b/media/libmediatranscoding/Android.bp
@@ -6,10 +6,11 @@
         "aidl/android/media/IMediaTranscodingService.aidl",
         "aidl/android/media/ITranscodingServiceClient.aidl",
         "aidl/android/media/TranscodingErrorCode.aidl",
+        "aidl/android/media/TranscodingJobPriority.aidl",
         "aidl/android/media/TranscodingType.aidl",
         "aidl/android/media/TranscodingVideoCodecType.aidl",
-        "aidl/android/media/TranscodingJob.aidl",
-        "aidl/android/media/TranscodingRequest.aidl",
-        "aidl/android/media/TranscodingResult.aidl",
+        "aidl/android/media/TranscodingJobParcel.aidl",
+        "aidl/android/media/TranscodingRequestParcel.aidl",
+        "aidl/android/media/TranscodingResultParcel.aidl",
     ],
 }
diff --git a/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl b/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
index cae8ff0..798300a 100644
--- a/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
+++ b/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
@@ -16,8 +16,8 @@
 
 package android.media;
 
-import android.media.TranscodingJob;
-import android.media.TranscodingRequest;
+import android.media.TranscodingJobParcel;
+import android.media.TranscodingRequestParcel;
 import android.media.ITranscodingServiceClient;
 
 /**
@@ -27,6 +27,25 @@
  */
 interface IMediaTranscodingService {
     /**
+     * All MediaTranscoding service and device Binder calls may return a
+     * ServiceSpecificException with the following error codes
+     */
+    const int ERROR_PERMISSION_DENIED = 1;
+    const int ERROR_ALREADY_EXISTS = 2;
+    const int ERROR_ILLEGAL_ARGUMENT = 3;
+    const int ERROR_DISCONNECTED = 4;
+    const int ERROR_TIMED_OUT = 5;
+    const int ERROR_DISABLED = 6;
+    const int ERROR_INVALID_OPERATION = 7;
+
+    /**
+     * Default UID/PID values for non-privileged callers of
+     * registerClient().
+     */
+    const int USE_CALLING_UID = -1;
+    const int USE_CALLING_PID = -1;
+
+    /**
      * Register the client with the MediaTranscodingService.
      *
      * Client must call this function to register itself with the service in order to perform
@@ -34,10 +53,16 @@
      * Client should save this Id and use it for all the transaction with the service.
      *
      * @param client interface for the MediaTranscodingService to call the client.
+     * @param opPackageName op package name of the client.
+     * @param clientUid user id of the client.
+     * @param clientPid process id of the client.
      * @return a unique positive Id assigned to the client by the service, -1  means failed to
      * register.
      */
-    int registerClient(in ITranscodingServiceClient client);
+    int registerClient(in ITranscodingServiceClient client,
+                       in String opPackageName,
+                       in int clientUid,
+                       in int clientPid);
 
     /**
     * Unregister the client with the MediaTranscodingService.
@@ -58,8 +83,8 @@
      * @return a unique positive jobId generated by the MediaTranscodingService, -1 means failure.
      */
     int submitRequest(in int clientId,
-                      in TranscodingRequest request,
-                      out TranscodingJob job);
+                      in TranscodingRequestParcel request,
+                      out TranscodingJobParcel job);
 
     /**
      * Cancels a transcoding job.
@@ -77,5 +102,5 @@
      * @param job(output variable) the TranscodingJob associated with the jobId.
      * @return true if succeeds, false otherwise.
      */
-    boolean getJobWithId(in int jobId, out TranscodingJob job);
+    boolean getJobWithId(in int jobId, out TranscodingJobParcel job);
 }
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl b/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl
index 29b6f35..e23c833 100644
--- a/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl
+++ b/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl
@@ -17,8 +17,8 @@
 package android.media;
 
 import android.media.TranscodingErrorCode;
-import android.media.TranscodingJob;
-import android.media.TranscodingResult;
+import android.media.TranscodingJobParcel;
+import android.media.TranscodingResultParcel;
 
 /**
  * ITranscodingServiceClient interface for the MediaTranscodingervice to communicate with the
@@ -39,7 +39,7 @@
     * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
     * @param result contains the transcoded file stats and other transcoding metrics if requested.
     */
-    oneway void onTranscodingFinished(in int jobId, in TranscodingResult result);
+    oneway void onTranscodingFinished(in int jobId, in TranscodingResultParcel result);
 
     /**
     * Called when the transcoding associated with the jobId failed.
@@ -60,7 +60,9 @@
     * @param oldAwaitNumber previous number of jobs ahead of current job.
     * @param newAwaitNumber updated number of jobs ahead of current job.
     */
-    oneway void onAwaitNumberOfJobsChanged(in int jobId, in int oldAwaitNumber, in int newAwaitNumber);
+    oneway void onAwaitNumberOfJobsChanged(in int jobId,
+                                           in int oldAwaitNumber,
+                                           in int newAwaitNumber);
 
     /**
     * Called when there is an update on the progress of the TranscodingJob.
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingJob.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
similarity index 91%
rename from media/libmediatranscoding/aidl/android/media/TranscodingJob.aidl
rename to media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
index 42ad8ad..d912c38 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingJob.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
@@ -16,7 +16,7 @@
 
 package android.media;
 
-import android.media.TranscodingRequest;
+import android.media.TranscodingRequestParcel;
 
 /**
  * TranscodingJob is generated by the MediaTranscodingService upon receiving a TranscodingRequest.
@@ -26,7 +26,7 @@
  * {@hide}
  */
 //TODO(hkuang): Implement the parcelable.
-parcelable TranscodingJob {
+parcelable TranscodingJobParcel {
     /**
      * A unique positive Id generated by the MediaTranscodingService.
      */
@@ -35,7 +35,7 @@
     /**
      * The request associated with the TranscodingJob.
      */
-    TranscodingRequest request;
+    TranscodingRequestParcel request;
 
     /**
     * Current number of jobs ahead of this job. The service schedules the job based on the priority
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl
new file mode 100644
index 0000000..1a5d81a
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Priority of a transcoding job.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+enum TranscodingJobPriority {
+    // TODO(hkuang): define what each priority level actually mean.
+    kUnspecified = 0,
+    kLow = 1,
+    /**
+     * 2 ~ 20 is reserved for future use.
+     */
+    kNormal = 21,
+    /**
+     * 22 ~ 30 is reserved for future use.
+     */
+    kHigh = 31,
+}
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingRequest.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
similarity index 91%
rename from media/libmediatranscoding/aidl/android/media/TranscodingRequest.aidl
rename to media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
index 6b51ee3..7b7986d 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingRequest.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
@@ -16,6 +16,7 @@
 
 package android.media;
 
+import android.media.TranscodingJobPriority;
 import android.media.TranscodingType;
 
 /**
@@ -24,7 +25,7 @@
  * {@hide}
  */
 //TODO(hkuang): Implement the parcelable.
-parcelable TranscodingRequest {
+parcelable TranscodingRequestParcel {
     /**
      * Name of file to be transcoded.
      */
@@ -48,8 +49,7 @@
     /**
      * Priority of this transcoding. Service will schedule the transcoding based on the priority.
      */
-    // TODO(hkuang): Define the priority level.
-    int priority;
+    TranscodingJobPriority priority;
 
     /**
      * Whether to receive update on progress and change of awaitNumJobs.
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingResult.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
similarity index 93%
rename from media/libmediatranscoding/aidl/android/media/TranscodingResult.aidl
rename to media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
index a41e025..65c49e7 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingResult.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
@@ -16,15 +16,13 @@
 
 package android.media;
 
-import android.media.TranscodingJob;
-
 /**
  * Result of the transcoding.
  *
  * {@hide}
  */
 //TODO(hkuang): Implement the parcelable.
-parcelable TranscodingResult {
+parcelable TranscodingResultParcel {
     /**
      * The jobId associated with the TranscodingResult.
      */
diff --git a/media/libstagefright/AACWriter.cpp b/media/libstagefright/AACWriter.cpp
index e173974..9c0a7a5 100644
--- a/media/libstagefright/AACWriter.cpp
+++ b/media/libstagefright/AACWriter.cpp
@@ -31,7 +31,7 @@
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/mediarecorder.h>
 
 namespace android {
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index ef9d253..960120f 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1310,7 +1310,7 @@
     OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers;
     status_t err = configureOutputBuffersFromNativeWindow(
             &bufferCount, &bufferSize, &minUndequeuedBuffers,
-            false /* preregister */);
+            mFlags & kFlagPreregisterMetadataBuffers /* preregister */);
     if (err != OK)
         return err;
     mNumUndequeuedBuffers = minUndequeuedBuffers;
@@ -1896,6 +1896,19 @@
             setPortMode(kPortIndexInput, IOMX::kPortModePresetByteBuffer);
             err = OK; // ignore error for now
         }
+
+        OMX_INDEXTYPE index;
+        if (mOMXNode->getExtensionIndex(
+                "OMX.google.android.index.preregisterMetadataBuffers", &index) == OK) {
+            OMX_CONFIG_BOOLEANTYPE param;
+            InitOMXParams(&param);
+            param.bEnabled = OMX_FALSE;
+            if (mOMXNode->getParameter(index, &param, sizeof(param)) == OK) {
+                if (param.bEnabled == OMX_TRUE) {
+                    mFlags |= kFlagPreregisterMetadataBuffers;
+                }
+            }
+        }
     }
     if (haveNativeWindow) {
         sp<ANativeWindow> nativeWindow =
diff --git a/media/libstagefright/ACodecBufferChannel.cpp b/media/libstagefright/ACodecBufferChannel.cpp
index bf8f09c..e5115d9 100644
--- a/media/libstagefright/ACodecBufferChannel.cpp
+++ b/media/libstagefright/ACodecBufferChannel.cpp
@@ -20,6 +20,8 @@
 
 #include <numeric>
 
+#include <C2Buffer.h>
+
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <android/hardware/drm/1.0/types.h>
 #include <binder/MemoryDealer.h>
@@ -250,6 +252,178 @@
     return OK;
 }
 
+status_t ACodecBufferChannel::attachBuffer(
+        const std::shared_ptr<C2Buffer> &c2Buffer,
+        const sp<MediaCodecBuffer> &buffer) {
+    switch (c2Buffer->data().type()) {
+        case C2BufferData::LINEAR: {
+            if (c2Buffer->data().linearBlocks().size() != 1u) {
+                return -ENOSYS;
+            }
+            C2ConstLinearBlock block{c2Buffer->data().linearBlocks().front()};
+            C2ReadView view{block.map().get()};
+            if (view.capacity() > buffer->capacity()) {
+                return -ENOSYS;
+            }
+            memcpy(buffer->base(), view.data(), view.capacity());
+            buffer->setRange(0, view.capacity());
+            break;
+        }
+        case C2BufferData::GRAPHIC: {
+            // TODO
+            return -ENOSYS;
+        }
+        case C2BufferData::LINEAR_CHUNKS:  [[fallthrough]];
+        case C2BufferData::GRAPHIC_CHUNKS: [[fallthrough]];
+        default:
+            return -ENOSYS;
+    }
+
+    return OK;
+}
+
+int32_t ACodecBufferChannel::getHeapSeqNum(const sp<HidlMemory> &memory) {
+    CHECK(mCrypto);
+    auto it = mHeapSeqNumMap.find(memory);
+    int32_t heapSeqNum = -1;
+    if (it == mHeapSeqNumMap.end()) {
+        heapSeqNum = mCrypto->setHeap(memory);
+        mHeapSeqNumMap.emplace(memory, heapSeqNum);
+    } else {
+        heapSeqNum = it->second;
+    }
+    return heapSeqNum;
+}
+
+status_t ACodecBufferChannel::attachEncryptedBuffer(
+        const sp<hardware::HidlMemory> &memory,
+        bool secure,
+        const uint8_t *key,
+        const uint8_t *iv,
+        CryptoPlugin::Mode mode,
+        CryptoPlugin::Pattern pattern,
+        size_t offset,
+        const CryptoPlugin::SubSample *subSamples,
+        size_t numSubSamples,
+        const sp<MediaCodecBuffer> &buffer) {
+    std::shared_ptr<const std::vector<const BufferInfo>> array(
+            std::atomic_load(&mInputBuffers));
+    BufferInfoIterator it = findClientBuffer(array, buffer);
+    if (it == array->end()) {
+        return -ENOENT;
+    }
+
+    native_handle_t *secureHandle = NULL;
+    if (secure) {
+        sp<SecureBuffer> secureData =
+                static_cast<SecureBuffer *>(it->mCodecBuffer.get());
+        if (secureData->getDestinationType() != ICrypto::kDestinationTypeNativeHandle) {
+            return BAD_VALUE;
+        }
+        secureHandle = static_cast<native_handle_t *>(secureData->getDestinationPointer());
+    }
+    size_t size = 0;
+    for (size_t i = 0; i < numSubSamples; ++i) {
+        size += subSamples[i].mNumBytesOfClearData + subSamples[i].mNumBytesOfEncryptedData;
+    }
+    ssize_t result = -1;
+    ssize_t codecDataOffset = 0;
+    if (mCrypto != NULL) {
+        AString errorDetailMsg;
+        hardware::drm::V1_0::DestinationBuffer destination;
+        if (secure) {
+            destination.type = DrmBufferType::NATIVE_HANDLE;
+            destination.secureMemory = hidl_handle(secureHandle);
+        } else {
+            destination.type = DrmBufferType::SHARED_MEMORY;
+            IMemoryToSharedBuffer(
+                    mDecryptDestination, mHeapSeqNum, &destination.nonsecureMemory);
+        }
+
+        int32_t heapSeqNum = getHeapSeqNum(memory);
+        hardware::drm::V1_0::SharedBuffer source{(uint32_t)heapSeqNum, offset, size};
+
+        result = mCrypto->decrypt(key, iv, mode, pattern,
+                source, it->mClientBuffer->offset(),
+                subSamples, numSubSamples, destination, &errorDetailMsg);
+
+        if (result < 0) {
+            return result;
+        }
+
+        if (destination.type == DrmBufferType::SHARED_MEMORY) {
+            memcpy(it->mCodecBuffer->base(), mDecryptDestination->unsecurePointer(), result);
+        }
+    } else {
+        // Here we cast CryptoPlugin::SubSample to hardware::cas::native::V1_0::SubSample
+        // directly, the structure definitions should match as checked in DescramblerImpl.cpp.
+        hidl_vec<SubSample> hidlSubSamples;
+        hidlSubSamples.setToExternal((SubSample *)subSamples, numSubSamples, false /*own*/);
+
+        hardware::cas::native::V1_0::SharedBuffer srcBuffer = {
+                .heapBase = *memory,
+                .offset = (uint64_t) offset,
+                .size = size
+        };
+
+        DestinationBuffer dstBuffer;
+        if (secure) {
+            dstBuffer.type = BufferType::NATIVE_HANDLE;
+            dstBuffer.secureMemory = hidl_handle(secureHandle);
+        } else {
+            dstBuffer.type = BufferType::SHARED_MEMORY;
+            dstBuffer.nonsecureMemory = srcBuffer;
+        }
+
+        Status status = Status::OK;
+        hidl_string detailedError;
+        ScramblingControl sctrl = ScramblingControl::UNSCRAMBLED;
+
+        if (key != NULL) {
+            sctrl = (ScramblingControl)key[0];
+            // Adjust for the PES offset
+            codecDataOffset = key[2] | (key[3] << 8);
+        }
+
+        auto returnVoid = mDescrambler->descramble(
+                sctrl,
+                hidlSubSamples,
+                srcBuffer,
+                0,
+                dstBuffer,
+                0,
+                [&status, &result, &detailedError] (
+                        Status _status, uint32_t _bytesWritten,
+                        const hidl_string& _detailedError) {
+                    status = _status;
+                    result = (ssize_t)_bytesWritten;
+                    detailedError = _detailedError;
+                });
+
+        if (!returnVoid.isOk() || status != Status::OK || result < 0) {
+            ALOGE("descramble failed, trans=%s, status=%d, result=%zd",
+                    returnVoid.description().c_str(), status, result);
+            return UNKNOWN_ERROR;
+        }
+
+        if (result < codecDataOffset) {
+            ALOGD("invalid codec data offset: %zd, result %zd", codecDataOffset, result);
+            return BAD_VALUE;
+        }
+
+        ALOGV("descramble succeeded, %zd bytes", result);
+
+        if (dstBuffer.type == BufferType::SHARED_MEMORY) {
+            memcpy(it->mCodecBuffer->base(),
+                    (uint8_t*)it->mSharedEncryptedBuffer->unsecurePointer(),
+                    result);
+        }
+    }
+
+    it->mCodecBuffer->setRange(codecDataOffset, result - codecDataOffset);
+    return OK;
+}
+
 status_t ACodecBufferChannel::renderOutputBuffer(
         const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) {
     std::shared_ptr<const std::vector<const BufferInfo>> array(
@@ -434,6 +608,16 @@
 }
 
 void ACodecBufferChannel::setCrypto(const sp<ICrypto> &crypto) {
+    if (mCrypto != nullptr) {
+        for (std::pair<wp<HidlMemory>, int32_t> entry : mHeapSeqNumMap) {
+            mCrypto->unsetHeap(entry.second);
+        }
+        mHeapSeqNumMap.clear();
+        if (mHeapSeqNum >= 0) {
+            mCrypto->unsetHeap(mHeapSeqNum);
+            mHeapSeqNum = -1;
+        }
+    }
     mCrypto = crypto;
 }
 
diff --git a/media/libstagefright/AMRWriter.cpp b/media/libstagefright/AMRWriter.cpp
index 24f6f0b..a1aa5dd 100644
--- a/media/libstagefright/AMRWriter.cpp
+++ b/media/libstagefright/AMRWriter.cpp
@@ -26,7 +26,7 @@
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/mediarecorder.h>
 
 namespace android {
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index e78e1e7..2336a1f 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -53,6 +53,7 @@
         "CodecBase.cpp",
         "FrameRenderTracker.cpp",
         "MediaCodecListWriter.cpp",
+        "SkipCutBuffer.cpp",
     ],
 
     cflags: [
@@ -62,6 +63,7 @@
 
     header_libs: [
         "libmediadrm_headers",
+        "media_ndk_headers",
     ],
 
     shared_libs: [
@@ -69,6 +71,7 @@
         "libhidlallocatorutils",
         "liblog",
         "libmedia_codeclist",
+        "libmedia_omx",
         "libstagefright_foundation",
         "libui",
         "libutils",
@@ -212,9 +215,7 @@
         "RemoteMediaExtractor.cpp",
         "RemoteMediaSource.cpp",
         "SimpleDecodingSource.cpp",
-        "SkipCutBuffer.cpp",
         "StagefrightMediaScanner.cpp",
-        "StagefrightPluginLoader.cpp",
         "SurfaceUtils.cpp",
         "ThrottledSource.cpp",
         "Utils.cpp",
@@ -229,6 +230,8 @@
         "libbinder",
         "libbinder_ndk",
         "libcamera_client",
+        "libcodec2",
+        "libcodec2_vndk",
         "libcutils",
         "libdatasource",
         "libdl",
@@ -244,6 +247,7 @@
         "libui",
         "libutils",
         "libmedia_helper",
+        "libsfplugin_ccodec",
         "libstagefright_codecbase",
         "libstagefright_foundation",
         "libstagefright_omx_utils",
diff --git a/media/libstagefright/FrameCaptureProcessor.cpp b/media/libstagefright/FrameCaptureProcessor.cpp
index c517e33..96c1195 100644
--- a/media/libstagefright/FrameCaptureProcessor.cpp
+++ b/media/libstagefright/FrameCaptureProcessor.cpp
@@ -136,7 +136,7 @@
 status_t FrameCaptureProcessor::onCapture(const sp<Layer> &layer,
         const Rect &sourceCrop, const sp<GraphicBuffer> &buffer) {
     renderengine::DisplaySettings clientCompositionDisplay;
-    std::vector<renderengine::LayerSettings> clientCompositionLayers;
+    std::vector<const renderengine::LayerSettings*> clientCompositionLayers;
 
     clientCompositionDisplay.physicalDisplay = sourceCrop;
     clientCompositionDisplay.clip = sourceCrop;
@@ -150,7 +150,7 @@
 
     layer->getLayerSettings(sourceCrop, mTextureName, &layerSettings);
 
-    clientCompositionLayers.push_back(layerSettings);
+    clientCompositionLayers.push_back(&layerSettings);
 
     // Use an empty fence for the buffer fence, since we just created the buffer so
     // there is no need for synchronization with the GPU.
diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp
index a9715c9..34b840e 100644
--- a/media/libstagefright/MPEG2TSWriter.cpp
+++ b/media/libstagefright/MPEG2TSWriter.cpp
@@ -17,7 +17,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "MPEG2TSWriter"
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/foundation/ABuffer.h>
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index bf4e7de..c88a82a 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -31,8 +31,9 @@
 #include <utils/Log.h>
 
 #include <functional>
+#include <fcntl.h>
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
@@ -64,9 +65,6 @@
 namespace android {
 
 static const int64_t kMinStreamableFileSizeInBytes = 5 * 1024 * 1024;
-static const int64_t kMax32BitFileSize = 0x00ffffffffLL; // 2^32-1 : max FAT32
-                                                         // filesystem file size
-                                                         // used by most SD cards
 static const uint8_t kNalUnitTypeSeqParamSet = 0x07;
 static const uint8_t kNalUnitTypePicParamSet = 0x08;
 static const int64_t kInitialDelayTimeUs     = 700000LL;
@@ -118,7 +116,7 @@
     int64_t getDurationUs() const;
     int64_t getEstimatedTrackSizeBytes() const;
     int32_t getMetaSizeIncrease(int32_t angle, int32_t trackCount) const;
-    void writeTrackHeader(bool use32BitOffset = true);
+    void writeTrackHeader();
     int64_t getMinCttsOffsetTimeUs();
     void bufferChunk(int64_t timestampUs);
     bool isAvc() const { return mIsAvc; }
@@ -136,6 +134,7 @@
     static const char *getFourCCForMime(const char *mime);
     const char *getTrackType() const;
     void resetInternal();
+    int64_t trackMetaDataSize();
 
 private:
     // A helper class to handle faster write box with table entries
@@ -295,20 +294,16 @@
     int64_t mTrackDurationUs;
     int64_t mMaxChunkDurationUs;
     int64_t mLastDecodingTimeUs;
-
     int64_t mEstimatedTrackSizeBytes;
     int64_t mMdatSizeBytes;
     int32_t mTimeScale;
 
     pthread_t mThread;
 
-
     List<MediaBuffer *> mChunkSamples;
 
-    bool                mSamplesHaveSameSize;
+    bool mSamplesHaveSameSize;
     ListTableEntries<uint32_t, 1> *mStszTableEntries;
-
-    ListTableEntries<uint32_t, 1> *mStcoTableEntries;
     ListTableEntries<off64_t, 1> *mCo64TableEntries;
     ListTableEntries<uint32_t, 3> *mStscTableEntries;
     ListTableEntries<uint32_t, 1> *mStssTableEntries;
@@ -352,6 +347,8 @@
     int64_t mStartTimestampUs;
     int64_t mStartTimeRealUs;
     int64_t mFirstSampleTimeRealUs;
+    // Captures negative start offset of a track(track starttime < 0).
+    int64_t mFirstSampleStartOffsetUs;
     int64_t mPreviousTrackTimeUs;
     int64_t mTrackEveryTimeDurationUs;
 
@@ -361,6 +358,7 @@
     ItemRefs mDimgRefs;
     Vector<uint16_t> mExifList;
     uint16_t mImageItemId;
+    uint16_t mItemIdBase;
     int32_t mIsPrimary;
     int32_t mWidth, mHeight;
     int32_t mTileWidth, mTileHeight;
@@ -410,10 +408,8 @@
     void updateTrackSizeEstimate();
     void addOneStscTableEntry(size_t chunkId, size_t sampleId);
     void addOneStssTableEntry(size_t sampleId);
-
-    // Duration is time scale based
-    void addOneSttsTableEntry(size_t sampleCount, int32_t timescaledDur);
-    void addOneCttsTableEntry(size_t sampleCount, int32_t timescaledDur);
+    void addOneSttsTableEntry(size_t sampleCount, int32_t delta /* media time scale based */);
+    void addOneCttsTableEntry(size_t sampleCount, int32_t sampleOffset);
     void addOneElstTableEntry(uint32_t segmentDuration, int32_t mediaTime,
         int16_t mediaRate, int16_t mediaRateFraction);
 
@@ -421,7 +417,7 @@
     void sendTrackSummary(bool hasMultipleTracks);
 
     // Write the boxes
-    void writeStcoBox(bool use32BitOffset);
+    void writeCo64Box();
     void writeStscBox();
     void writeStszBox();
     void writeStssBox();
@@ -447,7 +443,7 @@
     void writeAudioFourCCBox();
     void writeVideoFourCCBox();
     void writeMetadataFourCCBox();
-    void writeStblBox(bool use32BitOffset);
+    void writeStblBox();
     void writeEdtsBox();
 
     Track(const Track &);
@@ -489,14 +485,17 @@
     mStarted = false;
     mWriterThreadStarted = false;
     mSendNotify = false;
+    mWriteSeekErr = false;
+    mFallocateErr = false;
 
     // Reset following variables for all the sessions and they will be
     // initialized in start(MetaData *param).
     mIsRealTimeRecording = true;
     mUse4ByteNalLength = true;
-    mUse32BitOffset = true;
     mOffset = 0;
+    mPreAllocateFileEndOffset = 0;
     mMdatOffset = 0;
+    mMdatEndOffset = 0;
     mInMemoryCache = NULL;
     mInMemoryCacheOffset = 0;
     mInMemoryCacheSize = 0;
@@ -505,10 +504,14 @@
     mStreamableFile = false;
     mTimeScale = -1;
     mHasFileLevelMeta = false;
+    mFileLevelMetaDataSize = 0;
     mPrimaryItemId = 0;
     mAssociationEntryCount = 0;
     mNumGrids = 0;
+    mNextItemId = kItemIdBase;
     mHasRefs = false;
+    mPreAllocFirstTime = true;
+    mPrevAllTracksTotalMetaDataSizeEstimate = 0;
 
     // Following variables only need to be set for the first recording session.
     // And they will stay the same for all the recording sessions.
@@ -530,6 +533,15 @@
         ALOGE("cannot seek mFd: %s (%d) %lld", strerror(errno), errno, (long long)mFd);
         release();
     }
+
+    if (fallocate(mFd, 0, 0, 1) == 0) {
+        ALOGD("PreAllocation enabled");
+        mPreAllocationEnabled = true;
+    } else {
+        ALOGD("PreAllocation disabled. fallocate : %s, %d", strerror(errno), errno);
+        mPreAllocationEnabled = false;
+    }
+
     for (List<Track *>::iterator it = mTracks.begin();
          it != mTracks.end(); ++it) {
         (*it)->resetInternal();
@@ -736,9 +748,8 @@
 
     // If the estimation is wrong, we will pay the price of wasting
     // some reserved space. This should not happen so often statistically.
-    static const int32_t factor = mUse32BitOffset? 1: 2;
-    static const int64_t MIN_MOOV_BOX_SIZE = 3 * 1024;  // 3 KB
-    static const int64_t MAX_MOOV_BOX_SIZE = (180 * 3000000 * 6LL / 8000);
+    static const int64_t MIN_MOOV_BOX_SIZE = 3 * 1024;                      // 3 KibiBytes
+    static const int64_t MAX_MOOV_BOX_SIZE = (180 * 3000000 * 6LL / 8000);  // 395.5 KibiBytes
     int64_t size = MIN_MOOV_BOX_SIZE;
 
     // Max file size limit is set
@@ -781,10 +792,7 @@
          " estimated moov size %" PRId64 " bytes",
          mMaxFileSizeLimitBytes, mMaxFileDurationLimitUs, bitRate, size);
 
-    int64_t estimatedSize = factor * size;
-    CHECK_GE(estimatedSize, 8);
-
-    return estimatedSize;
+    return size;
 }
 
 status_t MPEG4Writer::start(MetaData *param) {
@@ -794,36 +802,24 @@
     mStartMeta = param;
 
     /*
-     * Check mMaxFileSizeLimitBytes at the beginning
-     * since mMaxFileSizeLimitBytes may be implicitly
-     * changed later for 32-bit file offset even if
-     * user does not ask to set it explicitly.
+     * Check mMaxFileSizeLimitBytes at the beginning since mMaxFileSizeLimitBytes may be implicitly
+     * changed later as per filesizebits of filesystem even if user does not set it explicitly.
      */
     if (mMaxFileSizeLimitBytes != 0) {
         mIsFileSizeLimitExplicitlyRequested = true;
     }
 
-    int32_t use64BitOffset;
-    if (param &&
-        param->findInt32(kKey64BitFileOffset, &use64BitOffset) &&
-        use64BitOffset) {
-        mUse32BitOffset = false;
-    }
-
-    if (mUse32BitOffset) {
-        // Implicit 32 bit file size limit
-        if (mMaxFileSizeLimitBytes == 0) {
-            mMaxFileSizeLimitBytes = kMax32BitFileSize;
-        }
-
-        // If file size is set to be larger than the 32 bit file
-        // size limit, treat it as an error.
-        if (mMaxFileSizeLimitBytes > kMax32BitFileSize) {
-            ALOGW("32-bit file size limit (%" PRId64 " bytes) too big. "
-                 "It is changed to %" PRId64 " bytes",
-                mMaxFileSizeLimitBytes, kMax32BitFileSize);
-            mMaxFileSizeLimitBytes = kMax32BitFileSize;
-        }
+    int32_t fileSizeBits = fpathconf(mFd, _PC_FILESIZEBITS);
+    ALOGD("fpathconf _PC_FILESIZEBITS:%" PRId32, fileSizeBits);
+    fileSizeBits = std::min(fileSizeBits, 52 /* cap it below 4 peta bytes */);
+    int64_t maxFileSizeBytes = ((int64_t)1 << fileSizeBits) - 1;
+    if (mMaxFileSizeLimitBytes > maxFileSizeBytes) {
+        mMaxFileSizeLimitBytes = maxFileSizeBytes;
+        ALOGD("File size limit (%" PRId64 " bytes) too big. It is changed to %" PRId64 " bytes",
+              mMaxFileSizeLimitBytes, maxFileSizeBytes);
+    } else if (mMaxFileSizeLimitBytes == 0) {
+        mMaxFileSizeLimitBytes = maxFileSizeBytes;
+        ALOGD("File size limit set to %" PRId64 " bytes implicitly", maxFileSizeBytes);
     }
 
     int32_t use2ByteNalLength;
@@ -850,7 +846,8 @@
 
     if (!param ||
         !param->findInt32(kKeyTimeScale, &mTimeScale)) {
-        mTimeScale = 1000;
+        // Increased by a factor of 10 to improve precision of segment duration in edit list entry.
+        mTimeScale = 10000;
     }
     CHECK_GT(mTimeScale, 0);
     ALOGV("movie time scale: %d", mTimeScale);
@@ -915,7 +912,8 @@
     if (mInMemoryCacheSize == 0) {
         int32_t bitRate = -1;
         if (mHasFileLevelMeta) {
-            mInMemoryCacheSize += estimateFileLevelMetaSize(param);
+            mFileLevelMetaDataSize = estimateFileLevelMetaSize(param);
+            mInMemoryCacheSize += mFileLevelMetaDataSize;
         }
         if (mHasMoovBox) {
             if (param) {
@@ -926,7 +924,7 @@
     }
     if (mStreamableFile) {
         // Reserve a 'free' box only for streamable file
-        lseek64(mFd, mFreeBoxOffset, SEEK_SET);
+        seekOrPostError(mFd, mFreeBoxOffset, SEEK_SET);
         writeInt32(mInMemoryCacheSize);
         write("free", 4);
         mMdatOffset = mFreeBoxOffset + mInMemoryCacheSize;
@@ -935,18 +933,16 @@
     }
 
     mOffset = mMdatOffset;
-    lseek64(mFd, mMdatOffset, SEEK_SET);
-    if (mUse32BitOffset) {
-        write("????mdat", 8);
-    } else {
-        write("\x00\x00\x00\x01mdat????????", 16);
-    }
+    seekOrPostError(mFd, mMdatOffset, SEEK_SET);
+    write("\x00\x00\x00\x01mdat????????", 16);
 
     status_t err = startWriterThread();
     if (err != OK) {
         return err;
     }
 
+    setupAndStartLooper();
+
     err = startTracks(param);
     if (err != OK) {
         return err;
@@ -956,32 +952,30 @@
     return OK;
 }
 
-bool MPEG4Writer::use32BitFileOffset() const {
-    return mUse32BitOffset;
-}
-
 status_t MPEG4Writer::pause() {
     ALOGW("MPEG4Writer: pause is not supported");
     return ERROR_UNSUPPORTED;
 }
 
-void MPEG4Writer::stopWriterThread() {
-    ALOGD("Stopping writer thread");
+status_t MPEG4Writer::stopWriterThread() {
+    ALOGV("Stopping writer thread");
     if (!mWriterThreadStarted) {
-        return;
+        return OK;
     }
-
     {
         Mutex::Autolock autolock(mLock);
-
         mDone = true;
         mChunkReadyCondition.signal();
     }
 
     void *dummy;
-    pthread_join(mThread, &dummy);
+    status_t err = pthread_join(mThread, &dummy);
+    WARN_UNLESS(err == 0, "stopWriterThread pthread_join err: %d", err);
+
+    err = static_cast<status_t>(reinterpret_cast<uintptr_t>(dummy));
     mWriterThreadStarted = false;
-    ALOGD("Writer thread stopped");
+    WARN_UNLESS(err == 0, "stopWriterThread pthread_join retVal: %d, writer thread stopped", err);
+    return err;
 }
 
 /*
@@ -1037,12 +1031,21 @@
 }
 
 void MPEG4Writer::release() {
-    close(mFd);
+    ALOGD("release()");
+    if (mPreAllocationEnabled) {
+        truncatePreAllocation();
+    }
+    int retVal = fsync(mFd);
+    WARN_UNLESS(retVal == 0, "fsync retVal:%d", retVal);
+    retVal = close(mFd);
+    WARN_UNLESS(retVal == 0, "close mFd retVal :%d", retVal);
     mFd = -1;
     if (mNextFd != -1) {
-        close(mNextFd);
+        retVal = close(mNextFd);
         mNextFd = -1;
+        WARN_UNLESS(retVal == 0, "close mNextFd retVal :%d", retVal);
     }
+    stopAndReleaseLooper();
     mInitCheck = NO_INIT;
     mStarted = false;
     free(mInMemoryCache);
@@ -1073,16 +1076,19 @@
 }
 
 status_t MPEG4Writer::reset(bool stopSource) {
+    ALOGD("reset()");
+    std::lock_guard<std::mutex> l(mResetMutex);
     if (mInitCheck != OK) {
         return OK;
     } else {
         if (!mWriterThreadStarted ||
             !mStarted) {
+            status_t err = OK;
             if (mWriterThreadStarted) {
-                stopWriterThread();
+                err = stopWriterThread();
             }
             release();
-            return OK;
+            return err;
         }
     }
 
@@ -1092,9 +1098,10 @@
     int32_t nonImageTrackCount = 0;
     for (List<Track *>::iterator it = mTracks.begin();
         it != mTracks.end(); ++it) {
-        status_t status = (*it)->stop(stopSource);
-        if (err == OK && status != OK) {
-            err = status;
+        status_t trackErr = (*it)->stop(stopSource);
+        if (err == OK && trackErr != OK) {
+            ALOGW("%s track stopped with an error", (*it)->getTrackType());
+            err = trackErr;
         }
 
         // skip image tracks
@@ -1110,12 +1117,18 @@
         }
     }
 
+
     if (nonImageTrackCount > 1) {
         ALOGD("Duration from tracks range is [%" PRId64 ", %" PRId64 "] us",
             minDurationUs, maxDurationUs);
     }
 
-    stopWriterThread();
+    status_t writerErr = stopWriterThread();
+
+    // TODO: which error to propagage, writerErr or trackErr?
+    if (err == OK && writerErr != OK) {
+        err = writerErr;
+    }
 
     // Do not write out movie header on error.
     if (err != OK) {
@@ -1124,17 +1137,12 @@
     }
 
     // Fix up the size of the 'mdat' chunk.
-    if (mUse32BitOffset) {
-        lseek64(mFd, mMdatOffset, SEEK_SET);
-        uint32_t size = htonl(static_cast<uint32_t>(mOffset - mMdatOffset));
-        ::write(mFd, &size, 4);
-    } else {
-        lseek64(mFd, mMdatOffset + 8, SEEK_SET);
-        uint64_t size = mOffset - mMdatOffset;
-        size = hton64(size);
-        ::write(mFd, &size, 8);
-    }
-    lseek64(mFd, mOffset, SEEK_SET);
+    seekOrPostError(mFd, mMdatOffset + 8, SEEK_SET);
+    uint64_t size = mOffset - mMdatOffset;
+    size = hton64(size);
+    writeOrPostError(mFd, &size, 8);
+    seekOrPostError(mFd, mOffset, SEEK_SET);
+    mMdatEndOffset = mOffset;
 
     // Construct file-level meta and moov box now
     mInMemoryCacheOffset = 0;
@@ -1198,12 +1206,12 @@
     CHECK_LE(mInMemoryCacheOffset + 8, mInMemoryCacheSize);
 
     // Cached box
-    lseek64(mFd, mFreeBoxOffset, SEEK_SET);
+    seekOrPostError(mFd, mFreeBoxOffset, SEEK_SET);
     mOffset = mFreeBoxOffset;
     write(mInMemoryCache, 1, mInMemoryCacheOffset);
 
     // Free box
-    lseek64(mFd, mOffset, SEEK_SET);
+    seekOrPostError(mFd, mOffset, SEEK_SET);
     mFreeBoxOffset = mOffset;
     writeInt32(mInMemoryCacheSize - mInMemoryCacheOffset);
     write("free", 4);
@@ -1272,20 +1280,19 @@
                 std::min(minCttsOffsetTimeUs, (*it)->getMinCttsOffsetTimeUs());
         }
     }
-    ALOGI("Ajust the moov start time from %lld us -> %lld us",
-            (long long)mStartTimestampUs,
-            (long long)(mStartTimestampUs + minCttsOffsetTimeUs - kMaxCttsOffsetTimeUs));
-    // Adjust the global start time.
+    ALOGI("Adjust the moov start time from %lld us -> %lld us", (long long)mStartTimestampUs,
+          (long long)(mStartTimestampUs + minCttsOffsetTimeUs - kMaxCttsOffsetTimeUs));
+    // Adjust movie start time.
     mStartTimestampUs += minCttsOffsetTimeUs - kMaxCttsOffsetTimeUs;
 
-    // Add mStartTimeOffsetBFramesUs(-ve or zero) to the duration of first entry in STTS.
+    // Add mStartTimeOffsetBFramesUs(-ve or zero) to the start offset of tracks.
     mStartTimeOffsetBFramesUs = minCttsOffsetTimeUs - kMaxCttsOffsetTimeUs;
     ALOGV("mStartTimeOffsetBFramesUs :%" PRId32, mStartTimeOffsetBFramesUs);
 
     for (List<Track *>::iterator it = mTracks.begin();
         it != mTracks.end(); ++it) {
         if (!(*it)->isHeic()) {
-            (*it)->writeTrackHeader(mUse32BitOffset);
+            (*it)->writeTrackHeader();
         }
     }
     endBox();  // moov
@@ -1376,17 +1383,15 @@
     } else {
         if (tiffHdrOffset > 0) {
             tiffHdrOffset = htonl(tiffHdrOffset);
-            ::write(mFd, &tiffHdrOffset, 4); // exif_tiff_header_offset field
+            writeOrPostError(mFd, &tiffHdrOffset, 4);  // exif_tiff_header_offset field
             mOffset += 4;
         }
 
-        ::write(mFd,
-              (const uint8_t *)buffer->data() + buffer->range_offset(),
-              buffer->range_length());
+        writeOrPostError(mFd, (const uint8_t*)buffer->data() + buffer->range_offset(),
+                         buffer->range_length());
 
         mOffset += buffer->range_length();
     }
-
     *bytesWritten = mOffset - old_offset;
     return old_offset;
 }
@@ -1431,30 +1436,27 @@
 
 void MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
     size_t length = buffer->range_length();
-
     if (mUse4ByteNalLength) {
         uint8_t x = length >> 24;
-        ::write(mFd, &x, 1);
+        writeOrPostError(mFd, &x, 1);
         x = (length >> 16) & 0xff;
-        ::write(mFd, &x, 1);
+        writeOrPostError(mFd, &x, 1);
         x = (length >> 8) & 0xff;
-        ::write(mFd, &x, 1);
+        writeOrPostError(mFd, &x, 1);
         x = length & 0xff;
-        ::write(mFd, &x, 1);
+        writeOrPostError(mFd, &x, 1);
 
-        ::write(mFd,
-              (const uint8_t *)buffer->data() + buffer->range_offset(),
-              length);
+        writeOrPostError(mFd, (const uint8_t*)buffer->data() + buffer->range_offset(), length);
 
         mOffset += length + 4;
     } else {
         CHECK_LT(length, 65536u);
 
         uint8_t x = length >> 8;
-        ::write(mFd, &x, 1);
+        writeOrPostError(mFd, &x, 1);
         x = length & 0xff;
-        ::write(mFd, &x, 1);
-        ::write(mFd, (const uint8_t *)buffer->data() + buffer->range_offset(), length);
+        writeOrPostError(mFd, &x, 1);
+        writeOrPostError(mFd, (const uint8_t*)buffer->data() + buffer->range_offset(), length);
         mOffset += length + 2;
     }
 }
@@ -1476,9 +1478,9 @@
                  it != mBoxes.end(); ++it) {
                 (*it) += mOffset;
             }
-            lseek64(mFd, mOffset, SEEK_SET);
-            ::write(mFd, mInMemoryCache, mInMemoryCacheOffset);
-            ::write(mFd, ptr, bytes);
+            seekOrPostError(mFd, mOffset, SEEK_SET);
+            writeOrPostError(mFd, mInMemoryCache, mInMemoryCacheOffset);
+            writeOrPostError(mFd, ptr, bytes);
             mOffset += (bytes + mInMemoryCacheOffset);
 
             // All subsequent boxes will be written to the end of the file.
@@ -1488,13 +1490,54 @@
             mInMemoryCacheOffset += bytes;
         }
     } else {
-        ::write(mFd, ptr, size * nmemb);
+        writeOrPostError(mFd, ptr, bytes);
         mOffset += bytes;
     }
     return bytes;
 }
 
+void MPEG4Writer::writeOrPostError(int fd, const void* buf, size_t count) {
+    if (mWriteSeekErr == true)
+        return;
+    ssize_t bytesWritten = ::write(fd, buf, count);
+    /* Write as much as possible during stop() execution when there was an error
+     * (mWriteSeekErr == true) in the previous call to write() or lseek64().
+     */
+    if (bytesWritten == count)
+        return;
+    mWriteSeekErr = true;
+    // Note that errno is not changed even when bytesWritten < count.
+    ALOGE("writeOrPostError bytesWritten:%zd, count:%zu, error:%s(%d)", bytesWritten, count,
+          std::strerror(errno), errno);
+
+    // Can't guarantee that file is usable or write would succeed anymore, hence signal to stop.
+    sp<AMessage> msg = new AMessage(kWhatHandleIOError, mReflector);
+    status_t err = msg->post();
+    ALOGE("writeOrPostError post:%d", err);
+}
+
+void MPEG4Writer::seekOrPostError(int fd, off64_t offset, int whence) {
+    if (mWriteSeekErr == true)
+        return;
+    off64_t resOffset = lseek64(fd, offset, whence);
+    /* Allow to seek during stop() execution even when there was an error
+     * (mWriteSeekErr == true) in the previous call to write() or lseek64().
+     */
+    if (resOffset == offset)
+        return;
+    mWriteSeekErr = true;
+    ALOGE("seekOrPostError resOffset:%" PRIu64 ", offset:%" PRIu64 ", error:%s(%d)", resOffset,
+          offset, std::strerror(errno), errno);
+
+    // Can't guarantee that file is usable or seek would succeed anymore, hence signal to stop.
+    sp<AMessage> msg = new AMessage(kWhatHandleIOError, mReflector);
+    status_t err = msg->post();
+    ALOGE("seekOrPostError post:%d", err);
+}
+
 void MPEG4Writer::beginBox(uint32_t id) {
+    ALOGV("beginBox:%" PRIu32, id);
+
     mBoxes.push_back(mWriteBoxToMemory?
             mInMemoryCacheOffset: mOffset);
 
@@ -1503,6 +1546,7 @@
 }
 
 void MPEG4Writer::beginBox(const char *fourcc) {
+    ALOGV("beginBox:%s", fourcc);
     CHECK_EQ(strlen(fourcc), 4u);
 
     mBoxes.push_back(mWriteBoxToMemory?
@@ -1522,10 +1566,11 @@
         int32_t x = htonl(mInMemoryCacheOffset - offset);
         memcpy(mInMemoryCache + offset, &x, 4);
     } else {
-        lseek64(mFd, offset, SEEK_SET);
+        seekOrPostError(mFd, offset, SEEK_SET);
         writeInt32(mOffset - offset);
+        ALOGV("box size:%" PRIu64, mOffset - offset);
         mOffset -= 4;
-        lseek64(mFd, mOffset, SEEK_SET);
+        seekOrPostError(mFd, mOffset, SEEK_SET);
     }
 }
 
@@ -1679,6 +1724,79 @@
     return mStreamableFile;
 }
 
+bool MPEG4Writer::preAllocate(uint64_t wantSize) {
+    if (!mPreAllocationEnabled)
+        return true;
+
+    std::lock_guard<std::mutex> l(mFallocMutex);
+
+    if (mFallocateErr == true)
+        return false;
+
+    // approxMOOVHeadersSize has to be changed whenever its needed in the future.
+    uint64_t approxMOOVHeadersSize = 500;
+    // approxTrackHeadersSize has to be changed whenever its needed in the future.
+    const uint64_t approxTrackHeadersSize = 800;
+
+    uint64_t approxMOOVBoxSize = 0;
+    if (mPreAllocFirstTime) {
+        mPreAllocFirstTime = false;
+        approxMOOVBoxSize = approxMOOVHeadersSize + mFileLevelMetaDataSize + mMoovExtraSize +
+                            (approxTrackHeadersSize * numTracks());
+        ALOGV("firstTimeAllocation approxMOOVBoxSize:%" PRIu64, approxMOOVBoxSize);
+    }
+
+    uint64_t allTracksTotalMetaDataSizeEstimate = 0;
+    for (List<Track *>::iterator it = mTracks.begin(); it != mTracks.end(); ++it) {
+        allTracksTotalMetaDataSizeEstimate += ((*it)->trackMetaDataSize());
+    }
+    ALOGV(" allTracksTotalMetaDataSizeEstimate:%" PRIu64, allTracksTotalMetaDataSizeEstimate);
+
+    /* MOOVBoxSize will increase whenever a sample gets written to the file.  Enough to allocate
+     * the delta increase for each sample after the very first allocation.
+     */
+    uint64_t approxMetaDataSizeIncrease =
+            allTracksTotalMetaDataSizeEstimate - mPrevAllTracksTotalMetaDataSizeEstimate;
+    ALOGV("approxMetaDataSizeIncrease:%" PRIu64  " wantSize:%" PRIu64, approxMetaDataSizeIncrease,
+          wantSize);
+    mPrevAllTracksTotalMetaDataSizeEstimate = allTracksTotalMetaDataSizeEstimate;
+    ALOGV("mPreAllocateFileEndOffset:%" PRIu64 " mOffset:%" PRIu64, mPreAllocateFileEndOffset,
+          mOffset);
+    off64_t lastFileEndOffset = std::max(mPreAllocateFileEndOffset, mOffset);
+    uint64_t preAllocateSize = wantSize + approxMOOVBoxSize + approxMetaDataSizeIncrease;
+    ALOGV("preAllocateSize :%" PRIu64 " lastFileEndOffset:%" PRIu64, preAllocateSize,
+          lastFileEndOffset);
+
+    int res = fallocate(mFd, 0, lastFileEndOffset, preAllocateSize);
+    if (res == -1) {
+        ALOGE("fallocate err:%s, %d, fd:%d", strerror(errno), errno, mFd);
+        sp<AMessage> msg = new AMessage(kWhatHandleFallocateError, mReflector);
+        status_t err = msg->post();
+        mFallocateErr = true;
+        ALOGD("preAllocation post:%d", err);
+    } else {
+        mPreAllocateFileEndOffset = lastFileEndOffset + preAllocateSize;
+        ALOGV("mPreAllocateFileEndOffset:%" PRIu64, mPreAllocateFileEndOffset);
+    }
+    return (res == -1) ? false : true;
+}
+
+bool MPEG4Writer::truncatePreAllocation() {
+    bool status = true;
+    off64_t endOffset = std::max(mMdatEndOffset, mOffset);
+    /* if mPreAllocateFileEndOffset >= endOffset, then preallocation logic works good. (diff >= 0).
+     *  Otherwise, the logic needs to be modified.
+     */
+    ALOGD("ftruncate mPreAllocateFileEndOffset:%" PRId64 " mOffset:%" PRIu64
+          " mMdatEndOffset:%" PRIu64 " diff:%" PRId64, mPreAllocateFileEndOffset, mOffset,
+          mMdatEndOffset, mPreAllocateFileEndOffset - endOffset);
+    if(ftruncate(mFd, endOffset) == -1) {
+        ALOGE("ftruncate err:%s, %d, fd:%d", strerror(errno), errno, mFd);
+        status = false;
+    }
+    return status;
+}
+
 bool MPEG4Writer::exceedsFileSizeLimit() {
     // No limit
     if (mMaxFileSizeLimitBytes == 0) {
@@ -1764,6 +1882,9 @@
     return mStartTimestampUs;
 }
 
+/* Returns negative when reordering is needed because of BFrames or zero otherwise.
+ * CTTS values for tracks with BFrames offsets this negative value.
+ */
 int32_t MPEG4Writer::getStartTimeOffsetBFramesUs() {
     Mutex::Autolock autoLock(mLock);
     return mStartTimeOffsetBFramesUs;
@@ -1792,7 +1913,6 @@
       mEstimatedTrackSizeBytes(0),
       mSamplesHaveSameSize(true),
       mStszTableEntries(new ListTableEntries<uint32_t, 1>(1000)),
-      mStcoTableEntries(new ListTableEntries<uint32_t, 1>(1000)),
       mCo64TableEntries(new ListTableEntries<off64_t, 1>(1000)),
       mStscTableEntries(new ListTableEntries<uint32_t, 3>(1000)),
       mStssTableEntries(new ListTableEntries<uint32_t, 1>(1000)),
@@ -1807,9 +1927,12 @@
       mGotAllCodecSpecificData(false),
       mReachedEOS(false),
       mStartTimestampUs(-1),
+      mFirstSampleTimeRealUs(0),
+      mFirstSampleStartOffsetUs(0),
       mRotation(0),
       mDimgRefs("dimg"),
       mImageItemId(0),
+      mItemIdBase(0),
       mIsPrimary(0),
       mWidth(0),
       mHeight(0),
@@ -1872,15 +1995,11 @@
     mIsMalformed = false;
     mTrackDurationUs = 0;
     mEstimatedTrackSizeBytes = 0;
-    mSamplesHaveSameSize = 0;
+    mSamplesHaveSameSize = false;
     if (mStszTableEntries != NULL) {
         delete mStszTableEntries;
         mStszTableEntries = new ListTableEntries<uint32_t, 1>(1000);
     }
-    if (mStcoTableEntries != NULL) {
-        delete mStcoTableEntries;
-        mStcoTableEntries = new ListTableEntries<uint32_t, 1>(1000);
-    }
     if (mCo64TableEntries != NULL) {
         delete mCo64TableEntries;
         mCo64TableEntries = new ListTableEntries<off64_t, 1>(1000);
@@ -1908,25 +2027,24 @@
     mReachedEOS = false;
 }
 
+int64_t MPEG4Writer::Track::trackMetaDataSize() {
+    int64_t co64BoxSizeBytes = mCo64TableEntries->count() * 8;
+    int64_t stszBoxSizeBytes = mStszTableEntries->count() * 4;
+    int64_t trackMetaDataSize = mStscTableEntries->count() * 12 +  // stsc box size
+                                mStssTableEntries->count() * 4 +   // stss box size
+                                mSttsTableEntries->count() * 8 +   // stts box size
+                                mCttsTableEntries->count() * 8 +   // ctts box size
+                                mElstTableEntries->count() * 12 +  // elst box size
+                                co64BoxSizeBytes +                 // stco box size
+                                stszBoxSizeBytes;                  // stsz box size
+    return trackMetaDataSize;
+}
+
+
 void MPEG4Writer::Track::updateTrackSizeEstimate() {
     mEstimatedTrackSizeBytes = mMdatSizeBytes;  // media data size
-
     if (!isHeic() && !mOwner->isFileStreamable()) {
-        uint32_t stcoBoxCount = (mOwner->use32BitFileOffset()
-                                ? mStcoTableEntries->count()
-                                : mCo64TableEntries->count());
-        int64_t stcoBoxSizeBytes = stcoBoxCount * 4;
-        int64_t stszBoxSizeBytes = mSamplesHaveSameSize? 4: (mStszTableEntries->count() * 4);
-
-        // Reserved free space is not large enough to hold
-        // all meta data and thus wasted.
-        mEstimatedTrackSizeBytes += mStscTableEntries->count() * 12 +  // stsc box size
-                                    mStssTableEntries->count() * 4 +   // stss box size
-                                    mSttsTableEntries->count() * 8 +   // stts box size
-                                    mCttsTableEntries->count() * 8 +   // ctts box size
-                                    mElstTableEntries->count() * 12 +   // elst box size
-                                    stcoBoxSizeBytes +           // stco box size
-                                    stszBoxSizeBytes;            // stsz box size
+        mEstimatedTrackSizeBytes += trackMetaDataSize();
     }
 }
 
@@ -1941,24 +2059,20 @@
     mStssTableEntries->add(htonl(sampleId));
 }
 
-void MPEG4Writer::Track::addOneSttsTableEntry(
-        size_t sampleCount, int32_t duration) {
-
-    if (duration == 0) {
+void MPEG4Writer::Track::addOneSttsTableEntry(size_t sampleCount, int32_t delta) {
+    if (delta == 0) {
         ALOGW("0-duration samples found: %zu", sampleCount);
     }
     mSttsTableEntries->add(htonl(sampleCount));
-    mSttsTableEntries->add(htonl(duration));
+    mSttsTableEntries->add(htonl(delta));
 }
 
-void MPEG4Writer::Track::addOneCttsTableEntry(
-        size_t sampleCount, int32_t duration) {
-
+void MPEG4Writer::Track::addOneCttsTableEntry(size_t sampleCount, int32_t sampleOffset) {
     if (!mIsVideo) {
         return;
     }
     mCttsTableEntries->add(htonl(sampleCount));
-    mCttsTableEntries->add(htonl(duration));
+    mCttsTableEntries->add(htonl(sampleOffset));
 }
 
 void MPEG4Writer::Track::addOneElstTableEntry(
@@ -1971,16 +2085,30 @@
     mElstTableEntries->add(htonl((((uint32_t)mediaRate) << 16) | (uint32_t)mediaRateFraction));
 }
 
-status_t MPEG4Writer::setNextFd(int fd) {
-    ALOGV("addNextFd");
-    Mutex::Autolock l(mLock);
-    if (mLooper == NULL) {
-        mReflector = new AHandlerReflector<MPEG4Writer>(this);
+void MPEG4Writer::setupAndStartLooper() {
+    if (mLooper == nullptr) {
         mLooper = new ALooper;
-        mLooper->registerHandler(mReflector);
+        mLooper->setName("MP4WriterLooper");
         mLooper->start();
+        mReflector = new AHandlerReflector<MPEG4Writer>(this);
+        mLooper->registerHandler(mReflector);
     }
+}
 
+void MPEG4Writer::stopAndReleaseLooper() {
+    if (mLooper != nullptr) {
+        if (mReflector != nullptr) {
+            ALOGD("unregisterHandler");
+            mLooper->unregisterHandler(mReflector->id());
+            mReflector.clear();
+        }
+        mLooper->stop();
+        mLooper.clear();
+    }
+}
+
+status_t MPEG4Writer::setNextFd(int fd) {
+    Mutex::Autolock l(mLock);
     if (mNextFd != -1) {
         // No need to set a new FD yet.
         return INVALID_OPERATION;
@@ -2021,12 +2149,7 @@
 
 void MPEG4Writer::Track::addChunkOffset(off64_t offset) {
     CHECK(!mIsHeic);
-    if (mOwner->use32BitFileOffset()) {
-        uint32_t value = offset;
-        mStcoTableEntries->add(htonl(value));
-    } else {
-        mCo64TableEntries->add(hton64(offset));
-    }
+    mCo64TableEntries->add(hton64(offset));
 }
 
 void MPEG4Writer::Track::addItemOffsetAndSize(off64_t offset, size_t size, bool isExif) {
@@ -2042,8 +2165,14 @@
     }
 
     if (isExif) {
-         mExifList.push_back(mOwner->addItem_l({
+        uint16_t exifItemId;
+        if (mOwner->reserveItemId_l(1, &exifItemId) != OK) {
+            return;
+        }
+
+        mExifList.push_back(mOwner->addItem_l({
             .itemType = "Exif",
+            .itemId = exifItemId,
             .isPrimary = false,
             .isHidden = false,
             .offset = (uint32_t)offset,
@@ -2092,6 +2221,7 @@
     if (hasGrid) {
         mDimgRefs.value.push_back(mOwner->addItem_l({
             .itemType = "hvc1",
+            .itemId = mItemIdBase++,
             .isPrimary = false,
             .isHidden = true,
             .offset = (uint32_t)offset,
@@ -2114,6 +2244,7 @@
             }
             mImageItemId = mOwner->addItem_l({
                 .itemType = "grid",
+                .itemId = mItemIdBase++,
                 .isPrimary = (mIsPrimary != 0),
                 .isHidden = false,
                 .rows = (uint32_t)mGridRows,
@@ -2126,6 +2257,7 @@
     } else {
         mImageItemId = mOwner->addItem_l({
             .itemType = "hvc1",
+            .itemId = mItemIdBase++,
             .isPrimary = (mIsPrimary != 0),
             .isHidden = false,
             .offset = (uint32_t)offset,
@@ -2195,6 +2327,22 @@
             notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED, 0);
             break;
         }
+        // ::write() or lseek64() wasn't a success, file could be malformed
+        case kWhatHandleIOError: {
+            ALOGE("kWhatHandleIOError");
+            // Stop tracks' threads and main writer thread.
+            notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_RECORDER_ERROR_UNKNOWN, ERROR_MALFORMED);
+            stop();
+            break;
+        }
+        // fallocate() failed, hence notify app about it and stop().
+        case kWhatHandleFallocateError: {
+            ALOGE("kWhatHandleFallocateError");
+            //TODO: introduce new MEDIA_RECORDER_INFO_STOPPED instead MEDIA_RECORDER_INFO_UNKNOWN?
+            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_UNKNOWN, ERROR_IO);
+            stop();
+            break;
+        }
         default:
         TRESPASS();
     }
@@ -2236,7 +2384,6 @@
     stop();
 
     delete mStszTableEntries;
-    delete mStcoTableEntries;
     delete mCo64TableEntries;
     delete mStscTableEntries;
     delete mSttsTableEntries;
@@ -2245,7 +2392,6 @@
     delete mElstTableEntries;
 
     mStszTableEntries = NULL;
-    mStcoTableEntries = NULL;
     mCo64TableEntries = NULL;
     mStscTableEntries = NULL;
     mSttsTableEntries = NULL;
@@ -2385,7 +2531,6 @@
             if (interChunkTimeUs > it->mPrevChunkTimestampUs) {
                 it->mMaxInterChunkDurUs = interChunkTimeUs;
             }
-
             return true;
         }
     }
@@ -2468,6 +2613,22 @@
             params->findInt32(kKeyRotation, &rotationDegrees)) {
         mRotation = rotationDegrees;
     }
+    if (mIsHeic) {
+        // Reserve the item ids, so that the item ids are ordered in the same
+        // order that the image tracks are added.
+        // If we leave the item ids to be assigned when the sample is written out,
+        // the original track order may not be preserved, if two image tracks
+        // have data around the same time. (This could happen especially when
+        // we're encoding with single tile.) The reordering may be undesirable,
+        // even if the file is well-formed and the primary picture is correct.
+
+        // Reserve item ids for samples + grid
+        size_t numItemsToReserve = mNumTiles + (mNumTiles > 1);
+        status_t err = mOwner->reserveItemId_l(numItemsToReserve, &mItemIdBase);
+        if (err != OK) {
+            return err;
+        }
+    }
 
     initTrackingProgressStatus(params);
 
@@ -2544,10 +2705,11 @@
     mDone = true;
 
     void *dummy;
-    pthread_join(mThread, &dummy);
-    status_t err = static_cast<status_t>(reinterpret_cast<uintptr_t>(dummy));
-
-    ALOGD("%s track stopped. %s source", getTrackType(), stopSource ? "Stop" : "Not Stop");
+    status_t err = pthread_join(mThread, &dummy);
+    WARN_UNLESS(err == 0, "track::stop: pthread_join status:%d", err);
+    err = static_cast<status_t>(reinterpret_cast<uintptr_t>(dummy));
+    WARN_UNLESS(err == 0, "%s track stopped. Status :%d. %s source", getTrackType(), err,
+                stopSource ? "Stop" : "Not Stop");
     return err;
 }
 
@@ -2959,11 +3121,11 @@
     uint32_t lastSamplesPerChunk = 0;
 
     if (mIsAudio) {
-        prctl(PR_SET_NAME, (unsigned long)"AudioTrackEncoding", 0, 0, 0);
+        prctl(PR_SET_NAME, (unsigned long)"AudioTrackWriterThread", 0, 0, 0);
     } else if (mIsVideo) {
-        prctl(PR_SET_NAME, (unsigned long)"VideoTrackEncoding", 0, 0, 0);
+        prctl(PR_SET_NAME, (unsigned long)"VideoTrackWriterThread", 0, 0, 0);
     } else {
-        prctl(PR_SET_NAME, (unsigned long)"MetadataTrackEncoding", 0, 0, 0);
+        prctl(PR_SET_NAME, (unsigned long)"MetadataTrackWriterThread", 0, 0, 0);
     }
 
     if (mOwner->isRealTimeRecording()) {
@@ -2983,6 +3145,7 @@
             continue;
         }
 
+
         // If the codec specific data has not been received yet, delay pause.
         // After the codec specific data is received, discard what we received
         // when the track is to be paused.
@@ -2993,7 +3156,6 @@
         }
 
         ++count;
-
         int32_t isCodecConfig;
         if (buffer->meta_data().findInt32(kKeyIsCodecConfig, &isCodecConfig)
                 && isCodecConfig) {
@@ -3060,6 +3222,18 @@
             }
         }
 
+        /*
+         * Reserve space in the file for the current sample + to be written MOOV box. If reservation
+         * for a new sample fails, preAllocate(...) stops muxing session completely. Stop() could
+         * write MOOV box successfully as space for the same was reserved in the prior call.
+         * Release the current buffer/sample only here.
+         */
+        if (!mOwner->preAllocate(buffer->range_length())) {
+            buffer->release();
+            buffer = nullptr;
+            break;
+        }
+
         ++nActualFrames;
 
         // Make a deep copy of the MediaBuffer and Metadata and release
@@ -3071,7 +3245,6 @@
         meta_data = new MetaData(buffer->meta_data());
         buffer->release();
         buffer = NULL;
-
         if (isExif) {
             copy->meta_data().setInt32(kKeyExifTiffOffset, tiffHdrOffset);
         }
@@ -3119,10 +3292,10 @@
         if (mOwner->approachingFileSizeLimit()) {
             mOwner->notifyApproachingLimit();
         }
-
         int32_t isSync = false;
         meta_data->findInt32(kKeyIsSyncFrame, &isSync);
         CHECK(meta_data->findInt64(kKeyTime, &timestampUs));
+        timestampUs += mFirstSampleStartOffsetUs;
 
         // For video, skip the first several non-key frames until getting the first key frame.
         if (mIsVideo && !mGotStartKeyFrame && !isSync) {
@@ -3134,10 +3307,13 @@
             mGotStartKeyFrame = true;
         }
 ////////////////////////////////////////////////////////////////////////////////
-
         if (!mIsHeic) {
             if (mStszTableEntries->count() == 0) {
                 mFirstSampleTimeRealUs = systemTime() / 1000;
+                if (timestampUs < 0 && mFirstSampleStartOffsetUs == 0) {
+                    mFirstSampleStartOffsetUs = -timestampUs;
+                    timestampUs = 0;
+                }
                 mOwner->setStartTimestampUs(timestampUs);
                 mStartTimestampUs = timestampUs;
                 previousPausedDurationUs = mStartTimestampUs;
@@ -3309,17 +3485,17 @@
                 }
             }
             mStszTableEntries->add(htonl(sampleSize));
+
             if (mStszTableEntries->count() > 2) {
 
                 // Force the first sample to have its own stts entry so that
                 // we can adjust its value later to maintain the A/V sync.
-                if (mStszTableEntries->count() == 3 || currDurationTicks != lastDurationTicks) {
+                if (lastDurationTicks && currDurationTicks != lastDurationTicks) {
                     addOneSttsTableEntry(sampleCount, lastDurationTicks);
                     sampleCount = 1;
                 } else {
                     ++sampleCount;
                 }
-
             }
             if (mSamplesHaveSameSize) {
                 if (mStszTableEntries->count() >= 2 && previousSampleSize != sampleSize) {
@@ -3352,11 +3528,7 @@
             if (mIsHeic) {
                 addItemOffsetAndSize(offset, bytesWritten, isExif);
             } else {
-                uint32_t count = (mOwner->use32BitFileOffset()
-                            ? mStcoTableEntries->count()
-                            : mCo64TableEntries->count());
-
-                if (count == 0) {
+                if (mCo64TableEntries->count() == 0) {
                     addChunkOffset(offset);
                 }
             }
@@ -3392,9 +3564,7 @@
                 }
             }
         }
-
     }
-
     if (isTrackMalFormed()) {
         dumpTimeStamps();
         err = ERROR_MALFORMED;
@@ -3426,17 +3596,10 @@
             ++sampleCount;  // Count for the last sample
         }
 
-        if (mStszTableEntries->count() <= 2) {
-            addOneSttsTableEntry(1, lastDurationTicks);
-            if (sampleCount - 1 > 0) {
-                addOneSttsTableEntry(sampleCount - 1, lastDurationTicks);
-            }
-        } else {
-            addOneSttsTableEntry(sampleCount, lastDurationTicks);
-        }
+        addOneSttsTableEntry(sampleCount, lastDurationTicks);
 
-        // The last ctts box may not have been written yet, and this
-        // is to make sure that we write out the last ctts box.
+        // The last ctts box entry may not have been written yet, and this
+        // is to make sure that we write out the last ctts box entry.
         if (currCttsOffsetTimeTicks == lastCttsOffsetTimeTicks) {
             if (cttsSampleCount > 0) {
                 addOneCttsTableEntry(cttsSampleCount, lastCttsOffsetTimeTicks);
@@ -3687,7 +3850,7 @@
                       "Metadata";
 }
 
-void MPEG4Writer::Track::writeTrackHeader(bool use32BitOffset) {
+void MPEG4Writer::Track::writeTrackHeader() {
     uint32_t now = getMpeg4Time();
     mOwner->beginBox("trak");
         writeTkhdBox(now);
@@ -3704,7 +3867,7 @@
                     writeNmhdBox();
                 }
                 writeDinfBox();
-                writeStblBox(use32BitOffset);
+                writeStblBox();
             mOwner->endBox();  // minf
         mOwner->endBox();  // mdia
     mOwner->endBox();  // trak
@@ -3720,7 +3883,7 @@
     return mMinCttsOffsetTimeUs;
 }
 
-void MPEG4Writer::Track::writeStblBox(bool use32BitOffset) {
+void MPEG4Writer::Track::writeStblBox() {
     mOwner->beginBox("stbl");
     mOwner->beginBox("stsd");
     mOwner->writeInt32(0);               // version=0, flags=0
@@ -3740,7 +3903,7 @@
     }
     writeStszBox();
     writeStscBox();
-    writeStcoBox(use32BitOffset);
+    writeCo64Box();
     mOwner->endBox();  // stbl
 }
 
@@ -4099,19 +4262,122 @@
     mOwner->endBox();
 }
 
-void MPEG4Writer::Track::writeEdtsBox(){
+void MPEG4Writer::Track::writeEdtsBox() {
     ALOGV("%s : getStartTimeOffsetTimeUs of track:%" PRId64 " us", getTrackType(),
         getStartTimeOffsetTimeUs());
 
-    // Prepone video playback.
-    if (mMinCttsOffsetTicks != mMaxCttsOffsetTicks) {
-        int32_t mvhdTimeScale = mOwner->getTimeScale();
-        uint32_t tkhdDuration = (getDurationUs() * mvhdTimeScale + 5E5) / 1E6;
-        int64_t mediaTime = ((kMaxCttsOffsetTimeUs - getMinCttsOffsetTimeUs())
-            * mTimeScale + 5E5) / 1E6;
-        if (tkhdDuration > 0 && mediaTime > 0) {
-            addOneElstTableEntry(tkhdDuration, mediaTime, 1, 0);
+    int32_t mvhdTimeScale = mOwner->getTimeScale();
+    ALOGV("mvhdTimeScale:%" PRId32, mvhdTimeScale);
+    /* trackStartOffsetUs of this track is the sum of longest offset needed by a track among all
+     * tracks with B frames in this movie and the start offset of this track.
+     */
+    int64_t trackStartOffsetUs = getStartTimeOffsetTimeUs();
+    ALOGV("trackStartOffsetUs:%" PRIu64, trackStartOffsetUs);
+
+    // Longest offset needed by a track among all tracks with B frames.
+    int32_t movieStartOffsetBFramesUs = mOwner->getStartTimeOffsetBFramesUs();
+    ALOGV("movieStartOffsetBFramesUs:%" PRId32, movieStartOffsetBFramesUs);
+
+    // This media/track's real duration (sum of duration of all samples in this track).
+    uint32_t tkhdDurationTicks = (mTrackDurationUs * mvhdTimeScale + 5E5) / 1E6;
+    ALOGV("mTrackDurationUs:%" PRId64 "us", mTrackDurationUs);
+
+    int64_t movieStartTimeUs = mOwner->getStartTimestampUs();
+    ALOGV("movieStartTimeUs:%" PRId64, movieStartTimeUs);
+
+    int64_t trackStartTimeUs = movieStartTimeUs + trackStartOffsetUs;
+    ALOGV("trackStartTimeUs:%" PRId64, trackStartTimeUs);
+
+    if (movieStartOffsetBFramesUs == 0) {
+        // No B frames in any tracks.
+        if (trackStartOffsetUs > 0) {
+            // Track with positive start offset.
+            uint32_t segDuration = (trackStartOffsetUs * mvhdTimeScale + 5E5) / 1E6;
+            ALOGV("segDuration:%" PRIu64 "us", trackStartOffsetUs);
+            /* The first entry is an empty edit (indicated by media_time equal to -1), and its
+             * duration (segment_duration) is equal to the difference of the presentation times of
+             * the earliest media sample among all tracks and the earliest media sample of the track.
+             */
+            ALOGV("Empty edit list entry");
+            addOneElstTableEntry(segDuration, -1, 1, 0);
+            addOneElstTableEntry(tkhdDurationTicks, 0, 1, 0);
+        } else if (mFirstSampleStartOffsetUs > 0) {
+            // Track with start time < 0 / negative start offset.
+            ALOGV("Normal edit list entry");
+            int32_t mediaTime = (mFirstSampleStartOffsetUs * mTimeScale + 5E5) / 1E6;
+            int32_t firstSampleOffsetTicks =
+                    (mFirstSampleStartOffsetUs * mvhdTimeScale + 5E5) / 1E6;
+            // samples before 0 don't count in for duration, hence subtract firstSampleOffsetTicks.
+            addOneElstTableEntry(tkhdDurationTicks - firstSampleOffsetTicks, mediaTime, 1, 0);
+        } else {
+            // Track starting at zero.
+            ALOGV("No edit list entry required for this track");
         }
+    } else if (movieStartOffsetBFramesUs < 0) {
+        // B frames present in at least one of the tracks.
+        ALOGV("writeEdtsBox - Reordered frames(B frames) present");
+        if (trackStartOffsetUs == std::abs(movieStartOffsetBFramesUs)) {
+            // Track starting at 0, no start offset.
+            // TODO : need to take care of mFirstSampleStartOffsetUs > 0 and trackStartOffsetUs > 0
+            // separately
+            if (mMinCttsOffsetTicks == mMaxCttsOffsetTicks) {
+                // Video with no B frame or non-video track.
+                if (mFirstSampleStartOffsetUs > 0) {
+                    // Track with start time < 0 / negative start offset.
+                    ALOGV("Normal edit list entry");
+                    ALOGV("mFirstSampleStartOffsetUs:%" PRId64 "us", mFirstSampleStartOffsetUs);
+                    int32_t mediaTimeTicks = (mFirstSampleStartOffsetUs * mTimeScale + 5E5) / 1E6;
+                    int32_t firstSampleOffsetTicks =
+                            (mFirstSampleStartOffsetUs * mvhdTimeScale + 5E5) / 1E6;
+                    // Samples before 0 don't count for duration, subtract firstSampleOffsetTicks.
+                    addOneElstTableEntry(tkhdDurationTicks - firstSampleOffsetTicks, mediaTimeTicks,
+                                         1, 0);
+                }
+            } else {
+                // Track with B Frames.
+                int32_t mediaTimeTicks = (trackStartOffsetUs * mTimeScale + 5E5) / 1E6;
+                ALOGV("mediaTime:%" PRId64 "us", trackStartOffsetUs);
+                ALOGV("Normal edit list entry to negate start offset by B Frames in others tracks");
+                addOneElstTableEntry(tkhdDurationTicks, mediaTimeTicks, 1, 0);
+            }
+        } else if (trackStartOffsetUs > std::abs(movieStartOffsetBFramesUs)) {
+            // Track with start offset.
+            ALOGV("Tracks starting > 0");
+            int32_t editDurationTicks = 0;
+            if (mMinCttsOffsetTicks == mMaxCttsOffsetTicks) {
+                // Video with no B frame or non-video track.
+                editDurationTicks =
+                        ((trackStartOffsetUs + movieStartOffsetBFramesUs) * mvhdTimeScale + 5E5) /
+                        1E6;
+                ALOGV("editDuration:%" PRId64 "us", (trackStartOffsetUs + movieStartOffsetBFramesUs));
+            } else {
+                // Track with B frame.
+                int32_t trackStartOffsetBFramesUs = getMinCttsOffsetTimeUs() - kMaxCttsOffsetTimeUs;
+                ALOGV("trackStartOffsetBFramesUs:%" PRId32, trackStartOffsetBFramesUs);
+                editDurationTicks =
+                        ((trackStartOffsetUs + movieStartOffsetBFramesUs +
+                          trackStartOffsetBFramesUs) * mvhdTimeScale + 5E5) / 1E6;
+                ALOGV("editDuration:%" PRId64 "us", (trackStartOffsetUs + movieStartOffsetBFramesUs + trackStartOffsetBFramesUs));
+            }
+            ALOGV("editDurationTicks:%" PRIu32, editDurationTicks);
+            if (editDurationTicks > 0) {
+                ALOGV("Empty edit list entry");
+                addOneElstTableEntry(editDurationTicks, -1, 1, 0);
+                addOneElstTableEntry(tkhdDurationTicks, 0, 1, 0);
+            } else if (editDurationTicks < 0) {
+                // Only video tracks with B Frames would hit this case.
+                ALOGV("Edit list entry to negate start offset by B frames in other tracks");
+                addOneElstTableEntry(tkhdDurationTicks, std::abs(editDurationTicks), 1, 0);
+            } else {
+                ALOGV("No edit list entry needed for this track");
+            }
+        } else {
+            // Not expecting this case as we adjust negative start timestamps to zero.
+            ALOGW("trackStartOffsetUs < std::abs(movieStartOffsetBFramesUs)");
+        }
+    } else {
+        // Neither B frames present nor absent! or any other case?.
+        ALOGW("movieStartOffsetBFramesUs > 0");
     }
 
     if (mElstTableEntries->count() == 0) {
@@ -4253,19 +4519,6 @@
 void MPEG4Writer::Track::writeSttsBox() {
     mOwner->beginBox("stts");
     mOwner->writeInt32(0);  // version=0, flags=0
-    if (mMinCttsOffsetTicks == mMaxCttsOffsetTicks) {
-        // For non-vdeio tracks or video tracks without ctts table,
-        // adjust duration of first sample for tracks to account for
-        // first sample not starting at the media start time.
-        // TODO: consider signaling this using some offset
-        // as this is not quite correct.
-        uint32_t duration;
-        CHECK(mSttsTableEntries->get(duration, 1));
-        duration = htonl(duration);  // Back to host byte order
-        int32_t startTimeOffsetScaled = (((getStartTimeOffsetTimeUs() +
-            mOwner->getStartTimeOffsetBFramesUs()) * mTimeScale) + 500000LL) / 1000000LL;
-        mSttsTableEntries->set(htonl((int32_t)duration + startTimeOffsetScaled), 1);
-    }
     mSttsTableEntries->write(mOwner);
     mOwner->endBox();  // stts
 }
@@ -4286,7 +4539,9 @@
 
     mOwner->beginBox("ctts");
     mOwner->writeInt32(0);  // version=0, flags=0
-    int64_t deltaTimeUs = kMaxCttsOffsetTimeUs - getStartTimeOffsetTimeUs();
+    // Adjust ctts entries to have only offset needed for reordering frames.
+    int64_t deltaTimeUs = mMinCttsOffsetTimeUs;
+    ALOGV("ctts deltaTimeUs:%" PRId64, deltaTimeUs);
     int64_t delta = (deltaTimeUs * mTimeScale + 500000LL) / 1000000LL;
     mCttsTableEntries->adjustEntries([delta](size_t /* ix */, uint32_t (&value)[2]) {
         // entries are <count, ctts> pairs; adjust only ctts
@@ -4327,14 +4582,10 @@
     mOwner->endBox();  // stsc
 }
 
-void MPEG4Writer::Track::writeStcoBox(bool use32BitOffset) {
-    mOwner->beginBox(use32BitOffset? "stco": "co64");
+void MPEG4Writer::Track::writeCo64Box() {
+    mOwner->beginBox("co64");
     mOwner->writeInt32(0);  // version=0, flags=0
-    if (use32BitOffset) {
-        mStcoTableEntries->write(mOwner);
-    } else {
-        mCo64TableEntries->write(mOwner);
-    }
+    mCo64TableEntries->write(mOwner);
     mOwner->endBox();  // stco or co64
 }
 
@@ -4457,9 +4708,11 @@
     }
     writeInt16((uint16_t)itemCount);
 
-    for (size_t i = 0; i < itemCount; i++) {
-        writeInt16(mItems[i].itemId);
-        bool isGrid = mItems[i].isGrid();
+    for (auto it = mItems.begin(); it != mItems.end(); it++) {
+        ItemInfo &item = it->second;
+
+        writeInt16(item.itemId);
+        bool isGrid = item.isGrid();
 
         writeInt16(isGrid ? 1 : 0); // construction_method
         writeInt16(0); // data_reference_index = 0
@@ -4470,8 +4723,8 @@
             writeInt32(mNumGrids++ * 8);
             writeInt32(8);
         } else {
-            writeInt32(mItems[i].offset);
-            writeInt32(mItems[i].size);
+            writeInt32(item.offset);
+            writeInt32(item.size);
         }
     }
     endBox();
@@ -4500,9 +4753,11 @@
     }
 
     writeInt16((uint16_t)itemCount);
-    for (size_t i = 0; i < itemCount; i++) {
-        writeInfeBox(mItems[i].itemId, mItems[i].itemType,
-                (mItems[i].isImage() && mItems[i].isHidden) ? 1 : 0);
+    for (auto it = mItems.begin(); it != mItems.end(); it++) {
+        ItemInfo &item = it->second;
+
+        writeInfeBox(item.itemId, item.itemType,
+                (item.isImage() && item.isHidden) ? 1 : 0);
     }
 
     endBox();
@@ -4511,20 +4766,22 @@
 void MPEG4Writer::writeIdatBox() {
     beginBox("idat");
 
-    for (size_t i = 0; i < mItems.size(); i++) {
-        if (mItems[i].isGrid()) {
+    for (auto it = mItems.begin(); it != mItems.end(); it++) {
+        ItemInfo &item = it->second;
+
+        if (item.isGrid()) {
             writeInt8(0); // version
             // flags == 1 means 32-bit width,height
-            int8_t flags = (mItems[i].width > 65535 || mItems[i].height > 65535);
+            int8_t flags = (item.width > 65535 || item.height > 65535);
             writeInt8(flags);
-            writeInt8(mItems[i].rows - 1);
-            writeInt8(mItems[i].cols - 1);
+            writeInt8(item.rows - 1);
+            writeInt8(item.cols - 1);
             if (flags) {
-                writeInt32(mItems[i].width);
-                writeInt32(mItems[i].height);
+                writeInt32(item.width);
+                writeInt32(item.height);
             } else {
-                writeInt16((uint16_t)mItems[i].width);
-                writeInt16((uint16_t)mItems[i].height);
+                writeInt16((uint16_t)item.width);
+                writeInt16((uint16_t)item.height);
             }
         }
     }
@@ -4536,11 +4793,13 @@
     beginBox("iref");
     writeInt32(0);          // Version = 0, Flags = 0
     {
-        for (size_t i = 0; i < mItems.size(); i++) {
-            for (size_t r = 0; r < mItems[i].refsList.size(); r++) {
-                const ItemRefs &refs = mItems[i].refsList[r];
+        for (auto it = mItems.begin(); it != mItems.end(); it++) {
+            ItemInfo &item = it->second;
+
+            for (size_t r = 0; r < item.refsList.size(); r++) {
+                const ItemRefs &refs = item.refsList[r];
                 beginBox(refs.key);
-                writeInt16(mItems[i].itemId);
+                writeInt16(item.itemId);
                 size_t refCount = refs.value.size();
                 if (refCount > 65535) {
                     ALOGW("too many entries in %s", refs.key);
@@ -4615,12 +4874,14 @@
     writeInt32(flags); // Version = 0
 
     writeInt32(mAssociationEntryCount);
-    for (size_t itemIndex = 0; itemIndex < mItems.size(); itemIndex++) {
-        const Vector<uint16_t> &properties = mItems[itemIndex].properties;
+    for (auto it = mItems.begin(); it != mItems.end(); it++) {
+        ItemInfo &item = it->second;
+
+        const Vector<uint16_t> &properties = item.properties;
         if (properties.empty()) {
             continue;
         }
-        writeInt16(mItems[itemIndex].itemId);
+        writeInt16(item.itemId);
 
         size_t entryCount = properties.size();
         if (entryCount > 255) {
@@ -4650,19 +4911,21 @@
     // patch up the mPrimaryItemId and count items with prop associations
     uint16_t firstVisibleItemId = 0;
     uint16_t firstImageItemId = 0;
-    for (size_t index = 0; index < mItems.size(); index++) {
-        if (!mItems[index].isImage()) continue;
+    for (auto it = mItems.begin(); it != mItems.end(); it++) {
+        ItemInfo &item = it->second;
 
-        if (mItems[index].isPrimary) {
-            mPrimaryItemId = mItems[index].itemId;
+        if (!item.isImage()) continue;
+
+        if (item.isPrimary) {
+            mPrimaryItemId = item.itemId;
         }
         if (!firstImageItemId) {
-            firstImageItemId = mItems[index].itemId;
+            firstImageItemId = item.itemId;
         }
-        if (!firstVisibleItemId && !mItems[index].isHidden) {
-            firstVisibleItemId = mItems[index].itemId;
+        if (!firstVisibleItemId && !item.isHidden) {
+            firstVisibleItemId = item.itemId;
         }
-        if (!mItems[index].properties.empty()) {
+        if (!item.properties.empty()) {
             mAssociationEntryCount++;
         }
     }
@@ -4716,15 +4979,25 @@
     return mProperties.size();
 }
 
+status_t MPEG4Writer::reserveItemId_l(size_t numItems, uint16_t *itemIdBase) {
+    if (numItems > UINT16_MAX - mNextItemId) {
+        ALOGE("couldn't reserve item ids for %zu items", numItems);
+        return ERROR_OUT_OF_RANGE;
+    }
+    *itemIdBase = mNextItemId;
+    mNextItemId += numItems;
+    return OK;
+}
+
 uint16_t MPEG4Writer::addItem_l(const ItemInfo &info) {
     ALOGV("addItem_l: type %s, offset %u, size %u",
             info.itemType, info.offset, info.size);
 
-    size_t index = mItems.size();
-    mItems.push_back(info);
+    if (info.itemId < kItemIdBase || info.itemId >= mNextItemId) {
+        ALOGW("Item id %u is used without reservation!", info.itemId);
+    }
 
-    // make the item id start at kItemIdBase
-    mItems.editItemAt(index).itemId = index + kItemIdBase;
+    mItems[info.itemId] = info;
 
 #if (LOG_NDEBUG==0)
     if (!info.properties.empty()) {
@@ -4735,24 +5008,28 @@
             }
             str.append(info.properties[i]);
         }
-        ALOGV("addItem_l: id %d, properties: %s", mItems[index].itemId, str.c_str());
+        ALOGV("addItem_l: id %d, properties: %s", info.itemId, str.c_str());
     }
 #endif // (LOG_NDEBUG==0)
 
-    return mItems[index].itemId;
+    return info.itemId;
 }
 
 void MPEG4Writer::addRefs_l(uint16_t itemId, const ItemRefs &refs) {
     if (refs.value.empty()) {
         return;
     }
-    if (itemId < kItemIdBase) {
-        ALOGW("itemId shouldn't be smaller than kItemIdBase");
+    if (itemId < kItemIdBase || itemId >= mNextItemId) {
+        ALOGW("itemId %u for ref is invalid!", itemId);
         return;
     }
 
-    size_t index = itemId - kItemIdBase;
-    mItems.editItemAt(index).refsList.push_back(refs);
+    auto it = mItems.find(itemId);
+    if (it == mItems.end()) {
+        ALOGW("itemId %u was not added yet", itemId);
+        return;
+    }
+    it->second.refsList.push_back(refs);
     mHasRefs = true;
 }
 
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 709bc08..6757474 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -21,8 +21,9 @@
 #include <inttypes.h>
 #include <stdlib.h>
 
+#include <C2Buffer.h>
+
 #include "include/SoftwareRenderer.h"
-#include "StagefrightPluginLoader.h"
 
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -39,6 +40,7 @@
 #include <mediadrm/ICrypto.h>
 #include <media/IOMX.h>
 #include <media/MediaCodecBuffer.h>
+#include <media/MediaCodecInfo.h>
 #include <media/MediaMetricsItem.h>
 #include <media/MediaResource.h>
 #include <media/stagefright/foundation/ABuffer.h>
@@ -51,6 +53,7 @@
 #include <media/stagefright/ACodec.h>
 #include <media/stagefright/BatteryChecker.h>
 #include <media/stagefright/BufferProducerWrapper.h>
+#include <media/stagefright/CCodec.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecList.h>
 #include <media/stagefright/MediaDefs.h>
@@ -128,6 +131,9 @@
 static const int kMaxReclaimWaitTimeInUs = 500000;  // 0.5s
 static const int kNumBuffersAlign = 16;
 
+static const C2MemoryUsage kDefaultReadWriteUsage{
+    C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
 ////////////////////////////////////////////////////////////////////////////////
 
 struct ResourceManagerClient : public BnResourceManagerClient {
@@ -538,9 +544,7 @@
 
 // static
 sp<PersistentSurface> MediaCodec::CreatePersistentInputSurface() {
-    // allow plugin to create surface
-    sp<PersistentSurface> pluginSurface =
-        StagefrightPluginLoader::GetCCodecInstance()->createInputSurface();
+    sp<PersistentSurface> pluginSurface = CCodec::CreateInputSurface();
     if (pluginSurface != nullptr) {
         return pluginSurface;
     }
@@ -969,7 +973,7 @@
 }
 
 static CodecBase *CreateCCodec() {
-    return StagefrightPluginLoader::GetCCodecInstance()->createCodec();
+    return new CCodec;
 }
 
 //static
@@ -994,6 +998,28 @@
     }
 }
 
+struct CodecListCache {
+    CodecListCache()
+        : mCodecInfoMap{[] {
+              const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
+              size_t count = mcl->countCodecs();
+              std::map<std::string, sp<MediaCodecInfo>> codecInfoMap;
+              for (size_t i = 0; i < count; ++i) {
+                  sp<MediaCodecInfo> info = mcl->getCodecInfo(i);
+                  codecInfoMap.emplace(info->getCodecName(), info);
+              }
+              return codecInfoMap;
+          }()} {
+    }
+
+    const std::map<std::string, sp<MediaCodecInfo>> mCodecInfoMap;
+};
+
+static const CodecListCache &GetCodecListCache() {
+    static CodecListCache sCache{};
+    return sCache;
+}
+
 status_t MediaCodec::init(const AString &name) {
     mResourceManagerProxy->init();
 
@@ -1503,6 +1529,75 @@
     return err;
 }
 
+status_t MediaCodec::queueBuffer(
+        size_t index,
+        const std::shared_ptr<C2Buffer> &buffer,
+        int64_t presentationTimeUs,
+        uint32_t flags,
+        const sp<AMessage> &tunings,
+        AString *errorDetailMsg) {
+    if (errorDetailMsg != NULL) {
+        errorDetailMsg->clear();
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
+    msg->setSize("index", index);
+    sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
+        new WrapperObject<std::shared_ptr<C2Buffer>>{buffer}};
+    msg->setObject("c2buffer", obj);
+    msg->setInt64("timeUs", presentationTimeUs);
+    msg->setInt32("flags", flags);
+    msg->setMessage("tunings", tunings);
+    msg->setPointer("errorDetailMsg", errorDetailMsg);
+
+    sp<AMessage> response;
+    status_t err = PostAndAwaitResponse(msg, &response);
+
+    return err;
+}
+
+status_t MediaCodec::queueEncryptedBuffer(
+        size_t index,
+        const sp<hardware::HidlMemory> &buffer,
+        size_t offset,
+        const CryptoPlugin::SubSample *subSamples,
+        size_t numSubSamples,
+        const uint8_t key[16],
+        const uint8_t iv[16],
+        CryptoPlugin::Mode mode,
+        const CryptoPlugin::Pattern &pattern,
+        int64_t presentationTimeUs,
+        uint32_t flags,
+        const sp<AMessage> &tunings,
+        AString *errorDetailMsg) {
+    if (errorDetailMsg != NULL) {
+        errorDetailMsg->clear();
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
+    msg->setSize("index", index);
+    sp<WrapperObject<sp<hardware::HidlMemory>>> memory{
+        new WrapperObject<sp<hardware::HidlMemory>>{buffer}};
+    msg->setObject("memory", memory);
+    msg->setSize("offset", offset);
+    msg->setPointer("subSamples", (void *)subSamples);
+    msg->setSize("numSubSamples", numSubSamples);
+    msg->setPointer("key", (void *)key);
+    msg->setPointer("iv", (void *)iv);
+    msg->setInt32("mode", mode);
+    msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
+    msg->setInt32("skipBlocks", pattern.mSkipBlocks);
+    msg->setInt64("timeUs", presentationTimeUs);
+    msg->setInt32("flags", flags);
+    msg->setMessage("tunings", tunings);
+    msg->setPointer("errorDetailMsg", errorDetailMsg);
+
+    sp<AMessage> response;
+    status_t err = PostAndAwaitResponse(msg, &response);
+
+    return err;
+}
+
 status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) {
     sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, this);
     msg->setInt64("timeoutUs", timeoutUs);
@@ -2357,6 +2452,23 @@
                     sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
 
                     if (mOutputFormat != buffer->format()) {
+                        if (mFlags & kFlagUseBlockModel) {
+                            sp<AMessage> diff1 = mOutputFormat->changesFrom(buffer->format());
+                            sp<AMessage> diff2 = buffer->format()->changesFrom(mOutputFormat);
+                            std::set<std::string> keys;
+                            size_t numEntries = diff1->countEntries();
+                            AMessage::Type type;
+                            for (size_t i = 0; i < numEntries; ++i) {
+                                keys.emplace(diff1->getEntryNameAt(i, &type));
+                            }
+                            numEntries = diff2->countEntries();
+                            for (size_t i = 0; i < numEntries; ++i) {
+                                keys.emplace(diff2->getEntryNameAt(i, &type));
+                            }
+                            sp<WrapperObject<std::set<std::string>>> changedKeys{
+                                new WrapperObject<std::set<std::string>>{std::move(keys)}};
+                            buffer->meta()->setObject("changedKeys", changedKeys);
+                        }
                         mOutputFormat = buffer->format();
                         ALOGV("[%s] output format changed to: %s",
                                 mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
@@ -2624,6 +2736,15 @@
                 handleSetSurface(NULL);
             }
 
+            uint32_t flags;
+            CHECK(msg->findInt32("flags", (int32_t *)&flags));
+            if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL) {
+                if (!(mFlags & kFlagIsAsync)) {
+                    PostReplyWithError(replyID, INVALID_OPERATION);
+                    break;
+                }
+                mFlags |= kFlagUseBlockModel;
+            }
             mReplyID = replyID;
             setState(CONFIGURING);
 
@@ -2649,9 +2770,7 @@
             mDescrambler = static_cast<IDescrambler *>(descrambler);
             mBufferChannel->setDescrambler(mDescrambler);
 
-            uint32_t flags;
-            CHECK(msg->findInt32("flags", (int32_t *)&flags));
-
+            format->setInt32("flags", flags);
             if (flags & CONFIGURE_FLAG_ENCODE) {
                 format->setInt32("encoder", true);
                 mFlags |= kFlagIsEncoder;
@@ -3236,22 +3355,36 @@
 status_t MediaCodec::queueCSDInputBuffer(size_t bufferIndex) {
     CHECK(!mCSD.empty());
 
-    const BufferInfo &info = mPortBuffers[kPortIndexInput][bufferIndex];
-
     sp<ABuffer> csd = *mCSD.begin();
     mCSD.erase(mCSD.begin());
+    std::shared_ptr<C2Buffer> c2Buffer;
 
-    const sp<MediaCodecBuffer> &codecInputData = info.mData;
+    if ((mFlags & kFlagUseBlockModel) && mOwnerName.startsWith("codec2::")) {
+        std::shared_ptr<C2LinearBlock> block =
+            FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
+        C2WriteView view{block->map().get()};
+        if (view.error() != C2_OK) {
+            return -EINVAL;
+        }
+        if (csd->size() > view.capacity()) {
+            return -EINVAL;
+        }
+        memcpy(view.base(), csd->data(), csd->size());
+        c2Buffer = C2Buffer::CreateLinearBuffer(block->share(0, csd->size(), C2Fence{}));
+    } else {
+        const BufferInfo &info = mPortBuffers[kPortIndexInput][bufferIndex];
+        const sp<MediaCodecBuffer> &codecInputData = info.mData;
 
-    if (csd->size() > codecInputData->capacity()) {
-        return -EINVAL;
+        if (csd->size() > codecInputData->capacity()) {
+            return -EINVAL;
+        }
+        if (codecInputData->data() == NULL) {
+            ALOGV("Input buffer %zu is not properly allocated", bufferIndex);
+            return -EINVAL;
+        }
+
+        memcpy(codecInputData->data(), csd->data(), csd->size());
     }
-    if (codecInputData->data() == NULL) {
-        ALOGV("Input buffer %zu is not properly allocated", bufferIndex);
-        return -EINVAL;
-    }
-
-    memcpy(codecInputData->data(), csd->data(), csd->size());
 
     AString errorDetailMsg;
 
@@ -3262,6 +3395,12 @@
     msg->setInt64("timeUs", 0LL);
     msg->setInt32("flags", BUFFER_FLAG_CODECCONFIG);
     msg->setPointer("errorDetailMsg", &errorDetailMsg);
+    if (c2Buffer) {
+        sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
+            new WrapperObject<std::shared_ptr<C2Buffer>>{c2Buffer}};
+        msg->setObject("c2buffer", obj);
+        msg->setMessage("tunings", new AMessage);
+    }
 
     return onQueueInputBuffer(msg);
 }
@@ -3367,10 +3506,20 @@
     int64_t timeUs;
     uint32_t flags;
     CHECK(msg->findSize("index", &index));
-    CHECK(msg->findSize("offset", &offset));
     CHECK(msg->findInt64("timeUs", &timeUs));
     CHECK(msg->findInt32("flags", (int32_t *)&flags));
-
+    std::shared_ptr<C2Buffer> c2Buffer;
+    sp<hardware::HidlMemory> memory;
+    sp<RefBase> obj;
+    if (msg->findObject("c2buffer", &obj)) {
+        CHECK(obj);
+        c2Buffer = static_cast<WrapperObject<std::shared_ptr<C2Buffer>> *>(obj.get())->value;
+    } else if (msg->findObject("memory", &obj)) {
+        CHECK(obj);
+        memory = static_cast<WrapperObject<sp<hardware::HidlMemory>> *>(obj.get())->value;
+    } else {
+        CHECK(msg->findSize("offset", &offset));
+    }
     const CryptoPlugin::SubSample *subSamples;
     size_t numSubSamples;
     const uint8_t *key;
@@ -3394,7 +3543,7 @@
             pattern.mEncryptBlocks = 0;
             pattern.mSkipBlocks = 0;
         }
-    } else {
+    } else if (!c2Buffer) {
         if (!hasCryptoOrDescrambler()) {
             ALOGE("[%s] queuing secure buffer without mCrypto or mDescrambler!",
                     mComponentName.c_str());
@@ -3425,31 +3574,52 @@
     }
 
     BufferInfo *info = &mPortBuffers[kPortIndexInput][index];
+    sp<MediaCodecBuffer> buffer = info->mData;
 
-    if (info->mData == nullptr || !info->mOwnedByClient) {
+    if (c2Buffer || memory) {
+        sp<AMessage> tunings;
+        CHECK(msg->findMessage("tunings", &tunings));
+        onSetParameters(tunings);
+
+        status_t err = OK;
+        if (c2Buffer) {
+            err = mBufferChannel->attachBuffer(c2Buffer, buffer);
+        } else if (memory) {
+            err = mBufferChannel->attachEncryptedBuffer(
+                    memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
+                    offset, subSamples, numSubSamples, buffer);
+        }
+        offset = buffer->offset();
+        size = buffer->size();
+        if (err != OK) {
+            return err;
+        }
+    }
+
+    if (buffer == nullptr || !info->mOwnedByClient) {
         return -EACCES;
     }
 
-    if (offset + size > info->mData->capacity()) {
+    if (offset + size > buffer->capacity()) {
         return -EINVAL;
     }
 
-    info->mData->setRange(offset, size);
-    info->mData->meta()->setInt64("timeUs", timeUs);
+    buffer->setRange(offset, size);
+    buffer->meta()->setInt64("timeUs", timeUs);
     if (flags & BUFFER_FLAG_EOS) {
-        info->mData->meta()->setInt32("eos", true);
+        buffer->meta()->setInt32("eos", true);
     }
 
     if (flags & BUFFER_FLAG_CODECCONFIG) {
-        info->mData->meta()->setInt32("csd", true);
+        buffer->meta()->setInt32("csd", true);
     }
 
-    sp<MediaCodecBuffer> buffer = info->mData;
     status_t err = OK;
     if (hasCryptoOrDescrambler()) {
         AString *errorDetailMsg;
         CHECK(msg->findPointer("errorDetailMsg", (void **)&errorDetailMsg));
 
+        ALOGI("calling queueSec");
         err = mBufferChannel->queueSecureInputBuffer(
                 buffer,
                 (mFlags & kFlagIsSecure),
@@ -3837,4 +4007,70 @@
     return rval;
 }
 
+// static
+status_t MediaCodec::CanFetchLinearBlock(
+        const std::vector<std::string> &names, bool *isCompatible) {
+    *isCompatible = false;
+    if (names.size() == 0) {
+        *isCompatible = true;
+        return OK;
+    }
+    const CodecListCache &cache = GetCodecListCache();
+    for (const std::string &name : names) {
+        auto it = cache.mCodecInfoMap.find(name);
+        if (it == cache.mCodecInfoMap.end()) {
+            return NAME_NOT_FOUND;
+        }
+        const char *owner = it->second->getOwnerName();
+        if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
+            *isCompatible = false;
+            return OK;
+        } else if (strncmp(owner, "codec2::", 8) != 0) {
+            return NAME_NOT_FOUND;
+        }
+    }
+    return CCodec::CanFetchLinearBlock(names, kDefaultReadWriteUsage, isCompatible);
+}
+
+// static
+std::shared_ptr<C2LinearBlock> MediaCodec::FetchLinearBlock(
+        size_t capacity, const std::vector<std::string> &names) {
+    return CCodec::FetchLinearBlock(capacity, kDefaultReadWriteUsage, names);
+}
+
+// static
+status_t MediaCodec::CanFetchGraphicBlock(
+        const std::vector<std::string> &names, bool *isCompatible) {
+    *isCompatible = false;
+    if (names.size() == 0) {
+        *isCompatible = true;
+        return OK;
+    }
+    const CodecListCache &cache = GetCodecListCache();
+    for (const std::string &name : names) {
+        auto it = cache.mCodecInfoMap.find(name);
+        if (it == cache.mCodecInfoMap.end()) {
+            return NAME_NOT_FOUND;
+        }
+        const char *owner = it->second->getOwnerName();
+        if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
+            *isCompatible = false;
+            return OK;
+        } else if (strncmp(owner, "codec2.", 7) != 0) {
+            return NAME_NOT_FOUND;
+        }
+    }
+    return CCodec::CanFetchGraphicBlock(names, isCompatible);
+}
+
+// static
+std::shared_ptr<C2GraphicBlock> MediaCodec::FetchGraphicBlock(
+        int32_t width,
+        int32_t height,
+        int32_t format,
+        uint64_t usage,
+        const std::vector<std::string> &names) {
+    return CCodec::FetchGraphicBlock(width, height, format, usage, names);
+}
+
 }  // namespace android
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index a267f7e..ac54fa1 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -19,7 +19,6 @@
 #include <utils/Log.h>
 
 #include "MediaCodecListOverrides.h"
-#include "StagefrightPluginLoader.h"
 
 #include <binder/IServiceManager.h>
 
@@ -30,11 +29,14 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/MediaDefs.h>
+#include <media/stagefright/omx/OMXUtils.h>
+#include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
+#include <media/stagefright/CCodec.h>
+#include <media/stagefright/Codec2InfoBuilder.h>
 #include <media/stagefright/MediaCodecList.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/OmxInfoBuilder.h>
-#include <media/stagefright/omx/OMXUtils.h>
-#include <xmlparser/include/media/stagefright/xmlparser/MediaCodecsXmlParser.h>
+#include <media/stagefright/PersistentSurface.h>
 
 #include <sys/stat.h>
 #include <utils/threads.h>
@@ -86,8 +88,7 @@
 MediaCodecListBuilderBase *GetCodec2InfoBuilder() {
     Mutex::Autolock _l(sCodec2InfoBuilderMutex);
     if (!sCodec2InfoBuilder) {
-        sCodec2InfoBuilder.reset(
-                StagefrightPluginLoader::GetCCodecInstance()->createBuilder());
+        sCodec2InfoBuilder.reset(new Codec2InfoBuilder);
     }
     return sCodec2InfoBuilder.get();
 }
@@ -96,8 +97,7 @@
     std::vector<MediaCodecListBuilderBase *> builders;
     // if plugin provides the input surface, we cannot use OMX video encoders.
     // In this case, rely on plugin to provide list of OMX codecs that are usable.
-    sp<PersistentSurface> surfaceTest =
-        StagefrightPluginLoader::GetCCodecInstance()->createInputSurface();
+    sp<PersistentSurface> surfaceTest = CCodec::CreateInputSurface();
     if (surfaceTest == nullptr) {
         ALOGD("Allowing all OMX codecs");
         builders.push_back(&sOmxInfoBuilder);
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 7243b82..7fec072 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -25,7 +25,7 @@
 #include <mediadrm/ICrypto.h>
 #include <media/MediaBufferHolder.h>
 #include <media/MediaCodecBuffer.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index 7ebdb1a..3808d7e 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -24,7 +24,7 @@
 #include <media/stagefright/MediaMuxer.h>
 
 #include <media/mediarecorder.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
@@ -48,7 +48,8 @@
 
 MediaMuxer::MediaMuxer(int fd, OutputFormat format)
     : mFormat(format),
-      mState(UNINITIALIZED) {
+      mState(UNINITIALIZED),
+      mError(OK) {
     if (isMp4Format(format)) {
         mWriter = new MPEG4Writer(fd);
     } else if (format == OUTPUT_FORMAT_WEBM) {
@@ -58,6 +59,7 @@
     }
 
     if (mWriter != NULL) {
+        mWriter->setMuxerListener(this);
         mFileMeta = new MetaData;
         if (format == OUTPUT_FORMAT_HEIF) {
             // Note that the key uses recorder file types.
@@ -156,14 +158,22 @@
 status_t MediaMuxer::stop() {
     Mutex::Autolock autoLock(mMuxerLock);
 
-    if (mState == STARTED) {
+    if (mState == STARTED || mState == ERROR) {
         mState = STOPPED;
         for (size_t i = 0; i < mTrackList.size(); i++) {
             if (mTrackList[i]->stop() != OK) {
                 return INVALID_OPERATION;
             }
         }
-        return mWriter->stop();
+        status_t err = mWriter->stop();
+        if (err != OK || mError != OK) {
+            ALOGE("stop err: %d, mError:%d", err, mError);
+        }
+        // Prioritize mError over err.
+        if (mError != OK) {
+            err = mError;
+        }
+        return err;
     } else {
         ALOGE("stop() is called in invalid state %d", mState);
         return INVALID_OPERATION;
@@ -212,4 +222,29 @@
     return currentTrack->pushBuffer(mediaBuffer);
 }
 
+void MediaMuxer::notify(int msg, int ext1, int ext2) {
+    switch (msg) {
+        case MEDIA_RECORDER_EVENT_ERROR:
+        case MEDIA_RECORDER_TRACK_EVENT_ERROR: {
+            Mutex::Autolock autoLock(mMuxerLock);
+            mState = ERROR;
+            mError = ext2;
+            ALOGW("message received msg=%d, ext1=%d, ext2=%d", msg, ext1, ext2);
+            break;
+        }
+        case MEDIA_RECORDER_EVENT_INFO: {
+            if (ext1 == MEDIA_RECORDER_INFO_UNKNOWN) {
+                Mutex::Autolock autoLock(mMuxerLock);
+                mState = ERROR;
+                mError = ext2;
+                ALOGW("message received msg=%d, ext1=%d, ext2=%d", msg, ext1, ext2);
+            }
+            break;
+        }
+        default:
+            // Ignore INFO and other notifications for now.
+            break;
+    }
+}
+
 }  // namespace android
diff --git a/media/libstagefright/MediaSource.cpp b/media/libstagefright/MediaSource.cpp
index 5bbd3d8..ee0031f 100644
--- a/media/libstagefright/MediaSource.cpp
+++ b/media/libstagefright/MediaSource.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/IMediaSource.h>
 
 namespace android {
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 66fb4b0..9fe09fa 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -25,7 +25,7 @@
 #include <datasource/DataSourceFactory.h>
 #include <datasource/FileSource.h>
 #include <media/DataSource.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
diff --git a/media/libstagefright/OggWriter.cpp b/media/libstagefright/OggWriter.cpp
index b738fef..0bc5976 100644
--- a/media/libstagefright/OggWriter.cpp
+++ b/media/libstagefright/OggWriter.cpp
@@ -22,7 +22,7 @@
 #include <sys/stat.h>
 #include <sys/types.h>
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/mediarecorder.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
diff --git a/media/libstagefright/RemoteMediaExtractor.cpp b/media/libstagefright/RemoteMediaExtractor.cpp
index 2e26fd0..c841d10 100644
--- a/media/libstagefright/RemoteMediaExtractor.cpp
+++ b/media/libstagefright/RemoteMediaExtractor.cpp
@@ -21,7 +21,7 @@
 #include <binder/IPCThreadState.h>
 #include <media/stagefright/InterfaceUtils.h>
 #include <media/MediaMetricsItem.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/RemoteMediaExtractor.h>
 
 // still doing some on/off toggling here.
diff --git a/media/libstagefright/StagefrightPluginLoader.cpp b/media/libstagefright/StagefrightPluginLoader.cpp
deleted file mode 100644
index fb03c5e..0000000
--- a/media/libstagefright/StagefrightPluginLoader.cpp
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Copyright 2018, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "StagefrightPluginLoader"
-#include <utils/Log.h>
-
-#include <android-base/properties.h>
-#include <dlfcn.h>
-
-#include "StagefrightPluginLoader.h"
-
-namespace android {
-
-/* static */ Mutex StagefrightPluginLoader::sMutex;
-/* static */ std::unique_ptr<StagefrightPluginLoader> StagefrightPluginLoader::sInstance;
-
-namespace /* unnamed */ {
-
-constexpr const char kCCodecPluginPath[] = "libsfplugin_ccodec.so";
-
-}  // unnamed namespace
-
-StagefrightPluginLoader::StagefrightPluginLoader(const char *libPath) {
-    if (android::base::GetIntProperty("debug.stagefright.ccodec", 1) == 0) {
-        ALOGD("CCodec is disabled.");
-        return;
-    }
-    mLibHandle = dlopen(libPath, RTLD_NOW | RTLD_NODELETE);
-    if (mLibHandle == nullptr) {
-        ALOGD("Failed to load library: %s (%s)", libPath, dlerror());
-        return;
-    }
-    mCreateCodec = (CodecBase::CreateCodecFunc)dlsym(mLibHandle, "CreateCodec");
-    if (mCreateCodec == nullptr) {
-        ALOGD("Failed to find symbol: CreateCodec (%s)", dlerror());
-    }
-    mCreateBuilder = (MediaCodecListBuilderBase::CreateBuilderFunc)dlsym(
-            mLibHandle, "CreateBuilder");
-    if (mCreateBuilder == nullptr) {
-        ALOGD("Failed to find symbol: CreateBuilder (%s)", dlerror());
-    }
-    mCreateInputSurface = (CodecBase::CreateInputSurfaceFunc)dlsym(
-            mLibHandle, "CreateInputSurface");
-    if (mCreateInputSurface == nullptr) {
-        ALOGD("Failed to find symbol: CreateInputSurface (%s)", dlerror());
-    }
-}
-
-StagefrightPluginLoader::~StagefrightPluginLoader() {
-    if (mLibHandle != nullptr) {
-        ALOGV("Closing handle");
-        dlclose(mLibHandle);
-    }
-}
-
-CodecBase *StagefrightPluginLoader::createCodec() {
-    if (mLibHandle == nullptr || mCreateCodec == nullptr) {
-        ALOGD("Handle or CreateCodec symbol is null");
-        return nullptr;
-    }
-    return mCreateCodec();
-}
-
-MediaCodecListBuilderBase *StagefrightPluginLoader::createBuilder() {
-    if (mLibHandle == nullptr || mCreateBuilder == nullptr) {
-        ALOGD("Handle or CreateBuilder symbol is null");
-        return nullptr;
-    }
-    return mCreateBuilder();
-}
-
-PersistentSurface *StagefrightPluginLoader::createInputSurface() {
-    if (mLibHandle == nullptr || mCreateInputSurface == nullptr) {
-        ALOGD("Handle or CreateInputSurface symbol is null");
-        return nullptr;
-    }
-    return mCreateInputSurface();
-}
-
-//static
-const std::unique_ptr<StagefrightPluginLoader> &StagefrightPluginLoader::GetCCodecInstance() {
-    Mutex::Autolock _l(sMutex);
-    if (!sInstance) {
-        ALOGV("Loading library");
-        sInstance.reset(new StagefrightPluginLoader(kCCodecPluginPath));
-    }
-    return sInstance;
-}
-
-}  // namespace android
diff --git a/media/libstagefright/StagefrightPluginLoader.h b/media/libstagefright/StagefrightPluginLoader.h
deleted file mode 100644
index 78effbf..0000000
--- a/media/libstagefright/StagefrightPluginLoader.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright 2018, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef STAGEFRIGHT_PLUGIN_LOADER_H_
-
-#define STAGEFRIGHT_PLUGIN_LOADER_H_
-
-#include <media/stagefright/CodecBase.h>
-#include <media/stagefright/MediaCodecListWriter.h>
-#include <media/stagefright/PersistentSurface.h>
-#include <utils/Mutex.h>
-
-namespace android {
-
-class StagefrightPluginLoader {
-public:
-    static const std::unique_ptr<StagefrightPluginLoader> &GetCCodecInstance();
-    ~StagefrightPluginLoader();
-
-    CodecBase *createCodec();
-    MediaCodecListBuilderBase *createBuilder();
-    PersistentSurface *createInputSurface();
-
-private:
-    explicit StagefrightPluginLoader(const char *libPath);
-
-    static Mutex sMutex;
-    static std::unique_ptr<StagefrightPluginLoader> sInstance;
-
-    void *mLibHandle{nullptr};
-    CodecBase::CreateCodecFunc mCreateCodec{nullptr};
-    MediaCodecListBuilderBase::CreateBuilderFunc mCreateBuilder{nullptr};
-    CodecBase::CreateInputSurfaceFunc mCreateInputSurface{nullptr};
-};
-
-}  // namespace android
-
-#endif  // STAGEFRIGHT_PLUGIN_LOADER_H_
diff --git a/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecTestEnvironment.h b/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecTestEnvironment.h
new file mode 100644
index 0000000..0344ac5
--- /dev/null
+++ b/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecTestEnvironment.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __AMRNBDEC_TEST_ENVIRONMENT_H__
+#define __AMRNBDEC_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class AmrnbDecTestEnvironment : public ::testing::Environment {
+  public:
+    AmrnbDecTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int AmrnbDecTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"res", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P':
+                setRes(optarg);
+                break;
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __AMRNBDEC_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp b/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
new file mode 100644
index 0000000..af62074
--- /dev/null
+++ b/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
@@ -0,0 +1,175 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AmrnbDecoderTest"
+#define OUTPUT_FILE "/data/local/tmp/amrnbDecode.out"
+
+#include <utils/Log.h>
+
+#include <audio_utils/sndfile.h>
+#include <stdio.h>
+
+#include "gsmamr_dec.h"
+
+#include "AmrnbDecTestEnvironment.h"
+
+// Constants for AMR-NB
+constexpr int32_t kInputBufferSize = 64;
+constexpr int32_t kSamplesPerFrame = L_FRAME;
+constexpr int32_t kBitsPerSample = 16;
+constexpr int32_t kSampleRate = 8000;
+constexpr int32_t kChannels = 1;
+constexpr int32_t kOutputBufferSize = kSamplesPerFrame * kBitsPerSample / 8;
+const int32_t kFrameSizes[] = {12, 13, 15, 17, 19, 20, 26, 31, -1, -1, -1, -1, -1, -1, -1, -1};
+
+constexpr int32_t kNumFrameReset = 150;
+
+static AmrnbDecTestEnvironment *gEnv = nullptr;
+
+class AmrnbDecoderTest : public ::testing::TestWithParam<string> {
+  public:
+    AmrnbDecoderTest() : mFpInput(nullptr) {}
+
+    ~AmrnbDecoderTest() {
+        if (mFpInput) {
+            fclose(mFpInput);
+            mFpInput = nullptr;
+        }
+    }
+
+    FILE *mFpInput;
+    SNDFILE *openOutputFile(SF_INFO *sfInfo);
+    int32_t DecodeFrames(void *amrHandle, SNDFILE *outFileHandle, int32_t frameCount = INT32_MAX);
+};
+
+SNDFILE *AmrnbDecoderTest::openOutputFile(SF_INFO *sfInfo) {
+    memset(sfInfo, 0, sizeof(SF_INFO));
+    sfInfo->channels = kChannels;
+    sfInfo->format = SF_FORMAT_WAV | SF_FORMAT_PCM_16;
+    sfInfo->samplerate = kSampleRate;
+    SNDFILE *outFileHandle = sf_open(OUTPUT_FILE, SFM_WRITE, sfInfo);
+    return outFileHandle;
+}
+
+int32_t AmrnbDecoderTest::DecodeFrames(void *amrHandle, SNDFILE *outFileHandle,
+                                       int32_t frameCount) {
+    uint8_t inputBuf[kInputBufferSize];
+    int16_t outputBuf[kOutputBufferSize];
+
+    while (frameCount > 0) {
+        uint8_t mode;
+        int32_t bytesRead = fread(&mode, 1, 1, mFpInput);
+        if (bytesRead != 1) break;
+
+        // Find frame type
+        Frame_Type_3GPP frameType = (Frame_Type_3GPP)((mode >> 3) & 0x0f);
+        int32_t frameSize = kFrameSizes[frameType];
+        if (frameSize < 0) {
+            ALOGE("Illegal frame type");
+            return -1;
+        }
+        bytesRead = fread(inputBuf, 1, frameSize, mFpInput);
+        if (bytesRead != frameSize) break;
+
+        int32_t bytesDecoded = AMRDecode(amrHandle, frameType, inputBuf, outputBuf, MIME_IETF);
+        if (bytesDecoded == -1) {
+            ALOGE("Failed to decode the input file");
+            return -1;
+        }
+
+        sf_writef_short(outFileHandle, outputBuf, kSamplesPerFrame);
+        frameCount--;
+    }
+    return 0;
+}
+
+TEST_F(AmrnbDecoderTest, CreateAmrnbDecoderTest) {
+    void *amrHandle;
+    int32_t status = GSMInitDecode(&amrHandle, (Word8 *)"AMRNBDecoder");
+    ASSERT_EQ(status, 0) << "Error creating AMR-NB decoder";
+    GSMDecodeFrameExit(&amrHandle);
+    ASSERT_EQ(amrHandle, nullptr) << "Error deleting AMR-NB decoder";
+}
+
+TEST_P(AmrnbDecoderTest, DecodeTest) {
+    string inputFile = gEnv->getRes() + GetParam();
+    mFpInput = fopen(inputFile.c_str(), "rb");
+    ASSERT_NE(mFpInput, nullptr) << "Error opening input file " << inputFile;
+
+    // Open the output file.
+    SF_INFO sfInfo;
+    SNDFILE *outFileHandle = openOutputFile(&sfInfo);
+    ASSERT_NE(outFileHandle, nullptr) << "Error opening output file for writing decoded output";
+
+    void *amrHandle;
+    int32_t status = GSMInitDecode(&amrHandle, (Word8 *)"AMRNBDecoder");
+    ASSERT_EQ(status, 0) << "Error creating AMR-NB decoder";
+
+    // Decode
+    int32_t decoderErr = DecodeFrames(amrHandle, outFileHandle);
+    ASSERT_EQ(decoderErr, 0) << "DecodeFrames returned error";
+
+    sf_close(outFileHandle);
+    GSMDecodeFrameExit(&amrHandle);
+    ASSERT_EQ(amrHandle, nullptr) << "Error deleting AMR-NB decoder";
+}
+
+TEST_P(AmrnbDecoderTest, ResetDecodeTest) {
+    string inputFile = gEnv->getRes() + GetParam();
+    mFpInput = fopen(inputFile.c_str(), "rb");
+    ASSERT_NE(mFpInput, nullptr) << "Error opening input file " << inputFile;
+
+    // Open the output file.
+    SF_INFO sfInfo;
+    SNDFILE *outFileHandle = openOutputFile(&sfInfo);
+    ASSERT_NE(outFileHandle, nullptr) << "Error opening output file for writing decoded output";
+
+    void *amrHandle;
+    int32_t status = GSMInitDecode(&amrHandle, (Word8 *)"AMRNBDecoder");
+    ASSERT_EQ(status, 0) << "Error creating AMR-NB decoder";
+
+    // Decode kNumFrameReset first
+    int32_t decoderErr = DecodeFrames(amrHandle, outFileHandle, kNumFrameReset);
+    ASSERT_EQ(decoderErr, 0) << "DecodeFrames returned error";
+
+    status = Speech_Decode_Frame_reset(amrHandle);
+    ASSERT_EQ(status, 0) << "Error resting AMR-NB decoder";
+
+    // Start decoding again
+    decoderErr = DecodeFrames(amrHandle, outFileHandle);
+    ASSERT_EQ(decoderErr, 0) << "DecodeFrames returned error";
+
+    sf_close(outFileHandle);
+    GSMDecodeFrameExit(&amrHandle);
+    ASSERT_EQ(amrHandle, nullptr) << "Error deleting AMR-NB decoder";
+}
+
+INSTANTIATE_TEST_SUITE_P(AmrnbDecoderTestAll, AmrnbDecoderTest,
+                         ::testing::Values(("bbb_8000hz_1ch_8kbps_amrnb_30sec.amrnb"),
+                                           ("sine_amrnb_1ch_12kbps_8000hz.amrnb")));
+
+int main(int argc, char **argv) {
+    gEnv = new AmrnbDecTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("Test result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/codecs/amrnb/dec/test/Android.bp b/media/libstagefright/codecs/amrnb/dec/test/Android.bp
new file mode 100644
index 0000000..7a95cfa
--- /dev/null
+++ b/media/libstagefright/codecs/amrnb/dec/test/Android.bp
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+    name: "AmrnbDecoderTest",
+    gtest: true,
+
+    srcs: [
+        "AmrnbDecoderTest.cpp",
+    ],
+
+    static_libs: [
+        "libstagefright_amrnb_common",
+        "libstagefright_amrnbdec",
+        "libaudioutils",
+        "libsndfile",
+    ],
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/codecs/amrnb/dec/test/README.md b/media/libstagefright/codecs/amrnb/dec/test/README.md
new file mode 100644
index 0000000..62e13ae
--- /dev/null
+++ b/media/libstagefright/codecs/amrnb/dec/test/README.md
@@ -0,0 +1,34 @@
+## Media Testing ##
+---
+#### AMR-NB Decoder :
+The Amr-Nb Decoder Test Suite validates the amrnb decoder available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m AmrnbDecoderTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/AmrnbDecoderTest/AmrnbDecoderTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/AmrnbDecoderTest/AmrnbDecoderTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://drive.google.com/drive/folders/13cM4tAaVFrmr-zGFqaAzFBbKs75pnm9b). Push these files into device for testing.
+Download amr-nb folder and push all the files in this folder to /data/local/tmp/ on the device.
+```
+adb push amr-nb/. /data/local/tmp/
+```
+
+usage: AmrnbDecoderTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/AmrnbDecoderTest -P /data/local/tmp/
+```
diff --git a/media/libstagefright/codecs/amrwb/test/AmrwbDecTestEnvironment.h b/media/libstagefright/codecs/amrwb/test/AmrwbDecTestEnvironment.h
new file mode 100644
index 0000000..84d337d
--- /dev/null
+++ b/media/libstagefright/codecs/amrwb/test/AmrwbDecTestEnvironment.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __AMRWBDEC_TEST_ENVIRONMENT_H__
+#define __AMRWBDEC_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class AmrwbDecTestEnvironment : public ::testing::Environment {
+  public:
+    AmrwbDecTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int AmrwbDecTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"res", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P':
+                setRes(optarg);
+                break;
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __AMRWBDEC_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/codecs/amrwb/test/AmrwbDecoderTest.cpp b/media/libstagefright/codecs/amrwb/test/AmrwbDecoderTest.cpp
new file mode 100644
index 0000000..2aad81b
--- /dev/null
+++ b/media/libstagefright/codecs/amrwb/test/AmrwbDecoderTest.cpp
@@ -0,0 +1,223 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AmrwbDecoderTest"
+#define OUTPUT_FILE "/data/local/tmp/amrwbDecode.out"
+
+#include <utils/Log.h>
+
+#include <audio_utils/sndfile.h>
+#include <stdio.h>
+
+#include "pvamrwbdecoder.h"
+#include "pvamrwbdecoder_api.h"
+
+#include "AmrwbDecTestEnvironment.h"
+
+// Constants for AMR-WB.
+constexpr int32_t kInputBufferSize = 64;
+constexpr int32_t kSamplesPerFrame = 320;
+constexpr int32_t kBitsPerSample = 16;
+constexpr int32_t kSampleRate = 16000;
+constexpr int32_t kChannels = 1;
+constexpr int32_t kMaxSourceDataUnitSize = KAMRWB_NB_BITS_MAX * sizeof(int16_t);
+constexpr int32_t kOutputBufferSize = kSamplesPerFrame * kBitsPerSample / 8;
+const int32_t kFrameSizes[16] = {17, 23, 32, 36, 40, 46, 50, 58, 60, -1, -1, -1, -1, -1, -1, -1};
+constexpr int32_t kNumFrameReset = 150;
+
+constexpr int32_t kMaxCount = 10;
+
+static AmrwbDecTestEnvironment *gEnv = nullptr;
+
+class AmrwbDecoderTest : public ::testing::TestWithParam<string> {
+  public:
+    AmrwbDecoderTest() : mFpInput(nullptr) {}
+
+    ~AmrwbDecoderTest() {
+        if (mFpInput) {
+            fclose(mFpInput);
+            mFpInput = nullptr;
+        }
+    }
+
+    FILE *mFpInput;
+    int32_t DecodeFrames(int16_t *decoderCookie, void *decoderBuf, SNDFILE *outFileHandle,
+                         int32_t frameCount = INT32_MAX);
+    SNDFILE *openOutputFile(SF_INFO *sfInfo);
+};
+
+SNDFILE *AmrwbDecoderTest::openOutputFile(SF_INFO *sfInfo) {
+    memset(sfInfo, 0, sizeof(SF_INFO));
+    sfInfo->channels = kChannels;
+    sfInfo->format = SF_FORMAT_WAV | SF_FORMAT_PCM_16;
+    sfInfo->samplerate = kSampleRate;
+    SNDFILE *outFileHandle = sf_open(OUTPUT_FILE, SFM_WRITE, sfInfo);
+    return outFileHandle;
+}
+
+int32_t AmrwbDecoderTest::DecodeFrames(int16_t *decoderCookie, void *decoderBuf,
+                                       SNDFILE *outFileHandle, int32_t frameCount) {
+    uint8_t inputBuf[kInputBufferSize];
+    int16_t inputSampleBuf[kMaxSourceDataUnitSize];
+    int16_t outputBuf[kOutputBufferSize];
+
+    while (frameCount > 0) {
+        uint8_t modeByte;
+        int32_t bytesRead = fread(&modeByte, 1, 1, mFpInput);
+        if (bytesRead != 1) break;
+
+        int16 mode = ((modeByte >> 3) & 0x0f);
+        if (mode >= 9) {
+            // Produce silence for comfort noise, speech lost and no data.
+            int32_t outputBufferSize = kSamplesPerFrame * kBitsPerSample / 8;
+            memset(outputBuf, 0, outputBufferSize);
+        } else {
+            // Read rest of the frame.
+            int32_t frameSize = kFrameSizes[mode];
+            // AMR-WB file format cannot have mode 10, 11, 12 and 13.
+            if (frameSize < 0) {
+                ALOGE("Illegal frame mode");
+                return -1;
+            }
+            bytesRead = fread(inputBuf, 1, frameSize, mFpInput);
+            if (bytesRead != frameSize) break;
+
+            int16 frameMode = mode;
+            int16 frameType;
+            RX_State_wb rx_state;
+            mime_unsorting(inputBuf, inputSampleBuf, &frameType, &frameMode, 1, &rx_state);
+
+            int16_t numSamplesOutput;
+            pvDecoder_AmrWb(frameMode, inputSampleBuf, outputBuf, &numSamplesOutput, decoderBuf,
+                            frameType, decoderCookie);
+            if (numSamplesOutput != kSamplesPerFrame) {
+                ALOGE("Failed to decode the input file");
+                return -1;
+            }
+            for (int count = 0; count < kSamplesPerFrame; ++count) {
+                /* Delete the 2 LSBs (14-bit output) */
+                outputBuf[count] &= 0xfffc;
+            }
+        }
+        sf_writef_short(outFileHandle, outputBuf, kSamplesPerFrame / kChannels);
+        frameCount--;
+    }
+    return 0;
+}
+
+TEST_F(AmrwbDecoderTest, MultiCreateAmrwbDecoderTest) {
+    uint32_t memRequirements = pvDecoder_AmrWbMemRequirements();
+    void *decoderBuf = malloc(memRequirements);
+    ASSERT_NE(decoderBuf, nullptr)
+            << "Failed to allocate decoder memory of size " << memRequirements;
+
+    // Create AMR-WB decoder instance.
+    void *amrHandle = nullptr;
+    int16_t *decoderCookie;
+    for (int count = 0; count < kMaxCount; count++) {
+        pvDecoder_AmrWb_Init(&amrHandle, decoderBuf, &decoderCookie);
+        ASSERT_NE(amrHandle, nullptr) << "Failed to initialize decoder";
+        ALOGV("Decoder created successfully");
+    }
+    if (decoderBuf) {
+        free(decoderBuf);
+        decoderBuf = nullptr;
+    }
+}
+
+TEST_P(AmrwbDecoderTest, DecodeTest) {
+    uint32_t memRequirements = pvDecoder_AmrWbMemRequirements();
+    void *decoderBuf = malloc(memRequirements);
+    ASSERT_NE(decoderBuf, nullptr)
+            << "Failed to allocate decoder memory of size " << memRequirements;
+
+    void *amrHandle = nullptr;
+    int16_t *decoderCookie;
+    pvDecoder_AmrWb_Init(&amrHandle, decoderBuf, &decoderCookie);
+    ASSERT_NE(amrHandle, nullptr) << "Failed to initialize decoder";
+
+    string inputFile = gEnv->getRes() + GetParam();
+    mFpInput = fopen(inputFile.c_str(), "rb");
+    ASSERT_NE(mFpInput, nullptr) << "Error opening input file " << inputFile;
+
+    // Open the output file.
+    SF_INFO sfInfo;
+    SNDFILE *outFileHandle = openOutputFile(&sfInfo);
+    ASSERT_NE(outFileHandle, nullptr) << "Error opening output file for writing decoded output";
+
+    int32_t decoderErr = DecodeFrames(decoderCookie, decoderBuf, outFileHandle);
+    ASSERT_EQ(decoderErr, 0) << "DecodeFrames returned error";
+
+    sf_close(outFileHandle);
+    if (decoderBuf) {
+        free(decoderBuf);
+        decoderBuf = nullptr;
+    }
+}
+
+TEST_P(AmrwbDecoderTest, ResetDecoderTest) {
+    uint32_t memRequirements = pvDecoder_AmrWbMemRequirements();
+    void *decoderBuf = malloc(memRequirements);
+    ASSERT_NE(decoderBuf, nullptr)
+            << "Failed to allocate decoder memory of size " << memRequirements;
+
+    void *amrHandle = nullptr;
+    int16_t *decoderCookie;
+    pvDecoder_AmrWb_Init(&amrHandle, decoderBuf, &decoderCookie);
+    ASSERT_NE(amrHandle, nullptr) << "Failed to initialize decoder";
+
+    string inputFile = gEnv->getRes() + GetParam();
+    mFpInput = fopen(inputFile.c_str(), "rb");
+    ASSERT_NE(mFpInput, nullptr) << "Error opening input file " << inputFile;
+
+    // Open the output file.
+    SF_INFO sfInfo;
+    SNDFILE *outFileHandle = openOutputFile(&sfInfo);
+    ASSERT_NE(outFileHandle, nullptr) << "Error opening output file for writing decoded output";
+
+    // Decode 150 frames first
+    int32_t decoderErr = DecodeFrames(decoderCookie, decoderBuf, outFileHandle, kNumFrameReset);
+    ASSERT_EQ(decoderErr, 0) << "DecodeFrames returned error";
+
+    // Reset Decoder
+    pvDecoder_AmrWb_Reset(decoderBuf, 1);
+
+    // Start decoding again
+    decoderErr = DecodeFrames(decoderCookie, decoderBuf, outFileHandle);
+    ASSERT_EQ(decoderErr, 0) << "DecodeFrames returned error";
+
+    sf_close(outFileHandle);
+    if (decoderBuf) {
+        free(decoderBuf);
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(AmrwbDecoderTestAll, AmrwbDecoderTest,
+                         ::testing::Values(("bbb_amrwb_1ch_14kbps_16000hz.amrwb"),
+                                           ("bbb_16000hz_1ch_9kbps_amrwb_30sec.amrwb")));
+
+int main(int argc, char **argv) {
+    gEnv = new AmrwbDecTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("Test result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/codecs/amrwb/test/Android.bp b/media/libstagefright/codecs/amrwb/test/Android.bp
new file mode 100644
index 0000000..968215a
--- /dev/null
+++ b/media/libstagefright/codecs/amrwb/test/Android.bp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+    name: "AmrwbDecoderTest",
+    gtest: true,
+
+    srcs: [
+        "AmrwbDecoderTest.cpp",
+    ],
+
+    static_libs: [
+        "libstagefright_amrwbdec",
+        "libsndfile",
+        "libaudioutils",
+    ],
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/codecs/amrwb/test/README.md b/media/libstagefright/codecs/amrwb/test/README.md
new file mode 100644
index 0000000..502a20d
--- /dev/null
+++ b/media/libstagefright/codecs/amrwb/test/README.md
@@ -0,0 +1,34 @@
+## Media Testing ##
+---
+#### AMR-WB Decoder :
+The Amr-Wb Decoder Test Suite validates the amrwb decoder available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m AmrwbDecoderTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/AmrwbDecoderTest/AmrwbDecoderTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/AmrwbDecoderTest/AmrwbDecoderTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://drive.google.com/drive/folders/13cM4tAaVFrmr-zGFqaAzFBbKs75pnm9b). Push these files into device for testing.
+Download amr-wb folder and push all the files in this folder to /data/local/tmp/ on the device.
+```
+adb push amr-wb/. /data/local/tmp/
+```
+
+usage: AmrwbDecoderTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/AmrwbDecoderTest -P /data/local/tmp/
+```
diff --git a/media/libstagefright/colorconversion/Android.bp b/media/libstagefright/colorconversion/Android.bp
index ba57497..6b08b08 100644
--- a/media/libstagefright/colorconversion/Android.bp
+++ b/media/libstagefright/colorconversion/Android.bp
@@ -15,6 +15,11 @@
         "libnativewindow",
     ],
 
+    header_libs: [
+        "libstagefright_headers",
+        "libstagefright_foundation_headers",
+    ],
+
     static_libs: ["libyuv_static"],
 
     cflags: ["-Werror"],
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/libstagefright/flac/dec/Android.bp
index 7ebe71f..d65a663 100644
--- a/media/libstagefright/flac/dec/Android.bp
+++ b/media/libstagefright/flac/dec/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
     name: "libstagefright_flacdec",
     vendor_available: true,
 
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index 5485f6d..7398690 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -2,6 +2,7 @@
     name: "libstagefright_foundation_headers",
     export_include_dirs: ["include"],
     vendor_available: true,
+    host_supported: true,
 }
 
 cc_defaults {
diff --git a/media/libstagefright/id3/test/Android.bp b/media/libstagefright/id3/test/Android.bp
new file mode 100644
index 0000000..9d26eec
--- /dev/null
+++ b/media/libstagefright/id3/test/Android.bp
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+    name: "ID3Test",
+    gtest: true,
+
+    srcs: ["ID3Test.cpp"],
+
+    static_libs: [
+        "libdatasource",
+        "libstagefright_id3",
+        "libstagefright",
+        "libstagefright_foundation",
+    ],
+
+    shared_libs: [
+        "libutils",
+        "liblog",
+        "libbinder",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/id3/test/ID3Test.cpp b/media/libstagefright/id3/test/ID3Test.cpp
new file mode 100644
index 0000000..a8f1470
--- /dev/null
+++ b/media/libstagefright/id3/test/ID3Test.cpp
@@ -0,0 +1,217 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ID3Test"
+#include <utils/Log.h>
+
+#include <ctype.h>
+#include <string>
+#include <sys/stat.h>
+#include <datasource/FileSource.h>
+
+#include <media/stagefright/foundation/hexdump.h>
+#include <ID3.h>
+
+#include "ID3TestEnvironment.h"
+
+using namespace android;
+
+static ID3TestEnvironment *gEnv = nullptr;
+
+class ID3tagTest : public ::testing::TestWithParam<string> {};
+class ID3versionTest : public ::testing::TestWithParam<pair<string, int>> {};
+class ID3textTagTest : public ::testing::TestWithParam<pair<string, int>> {};
+class ID3albumArtTest : public ::testing::TestWithParam<pair<string, bool>> {};
+class ID3multiAlbumArtTest : public ::testing::TestWithParam<pair<string, int>> {};
+
+TEST_P(ID3tagTest, TagTest) {
+    string path = gEnv->getRes() + GetParam();
+    sp<FileSource> file = new FileSource(path.c_str());
+    ASSERT_EQ(file->initCheck(), (status_t)OK) << "File initialization failed! \n";
+    ID3 tag(file.get());
+    ASSERT_TRUE(tag.isValid()) << "No valid ID3 tag found for " << path.c_str() << "\n";
+
+    ID3::Iterator it(tag, nullptr);
+    while (!it.done()) {
+        String8 id;
+        it.getID(&id);
+        ASSERT_GT(id.length(), 0) << "No ID tag found! \n";
+        ALOGV("Found ID tag: %s\n", String8(id).c_str());
+        it.next();
+    }
+}
+
+TEST_P(ID3versionTest, VersionTest) {
+    int versionNumber = GetParam().second;
+    string path = gEnv->getRes() + GetParam().first;
+    sp<android::FileSource> file = new FileSource(path.c_str());
+    ASSERT_EQ(file->initCheck(), (status_t)OK) << "File initialization failed! \n";
+
+    ID3 tag(file.get());
+    ASSERT_TRUE(tag.isValid()) << "No valid ID3 tag found for " << path.c_str() << "\n";
+    ASSERT_TRUE(tag.version() >= versionNumber)
+            << "Expected version: " << tag.version() << " Found version: " << versionNumber;
+}
+
+TEST_P(ID3textTagTest, TextTagTest) {
+    int numTextFrames = GetParam().second;
+    string path = gEnv->getRes() + GetParam().first;
+    sp<android::FileSource> file = new FileSource(path.c_str());
+    ASSERT_EQ(file->initCheck(), (status_t)OK) << "File initialization failed! \n";
+
+    ID3 tag(file.get());
+    ASSERT_TRUE(tag.isValid()) << "No valid ID3 tag found for " << path.c_str() << "\n";
+    int countTextFrames = 0;
+    ID3::Iterator it(tag, nullptr);
+    while (!it.done()) {
+        String8 id;
+        it.getID(&id);
+        ASSERT_GT(id.length(), 0);
+        if (id[0] == 'T') {
+            String8 text;
+            countTextFrames++;
+            it.getString(&text);
+            ALOGV("Found text frame %s : %s \n", id.string(), text.string());
+        }
+        it.next();
+    }
+    ASSERT_EQ(countTextFrames, numTextFrames)
+            << "Expected " << numTextFrames << " text frames, found " << countTextFrames;
+}
+
+TEST_P(ID3albumArtTest, AlbumArtTest) {
+    bool albumArtPresent = GetParam().second;
+    string path = gEnv->getRes() + GetParam().first;
+    sp<android::FileSource> file = new FileSource(path.c_str());
+    ASSERT_EQ(file->initCheck(), (status_t)OK) << "File initialization failed! \n";
+
+    ID3 tag(file.get());
+    ASSERT_TRUE(tag.isValid()) << "No valid ID3 tag found for " << path.c_str() << "\n";
+    size_t dataSize;
+    String8 mime;
+    const void *data = tag.getAlbumArt(&dataSize, &mime);
+
+    if (albumArtPresent) {
+        if (data) {
+            ALOGV("Found album art: size = %zu mime = %s \n", dataSize, mime.string());
+        }
+        ASSERT_NE(data, nullptr) << "Expected album art, found none!" << path;
+    } else {
+        ASSERT_EQ(data, nullptr) << "Found album art when expected none!";
+    }
+#if (LOG_NDEBUG == 0)
+    hexdump(data, dataSize > 128 ? 128 : dataSize);
+#endif
+}
+
+TEST_P(ID3multiAlbumArtTest, MultiAlbumArtTest) {
+    int numAlbumArt = GetParam().second;
+    string path = gEnv->getRes() + GetParam().first;
+    sp<android::FileSource> file = new FileSource(path.c_str());
+    ASSERT_EQ(file->initCheck(), (status_t)OK) << "File initialization failed! \n";
+
+    ID3 tag(file.get());
+    ASSERT_TRUE(tag.isValid()) << "No valid ID3 tag found for " << path.c_str() << "\n";
+    int count = 0;
+    ID3::Iterator it(tag, nullptr);
+    while (!it.done()) {
+        String8 id;
+        it.getID(&id);
+        ASSERT_GT(id.length(), 0);
+        // Check if the tag is an "APIC/PIC" tag.
+        if (String8(id) == "APIC" || String8(id) == "PIC") {
+            count++;
+            size_t dataSize;
+            String8 mime;
+            const void *data = tag.getAlbumArt(&dataSize, &mime);
+            if (data) {
+                ALOGV("Found album art: size = %zu mime = %s \n", dataSize, mime.string());
+#if (LOG_NDEBUG == 0)
+                hexdump(data, dataSize > 128 ? 128 : dataSize);
+#endif
+            }
+            ASSERT_NE(data, nullptr) << "Expected album art, found none!" << path;
+        }
+        it.next();
+    }
+    ASSERT_EQ(count, numAlbumArt) << "Found " << count << " album arts, expected " << numAlbumArt
+                                  << " album arts! \n";
+}
+
+INSTANTIATE_TEST_SUITE_P(id3TestAll, ID3tagTest,
+                         ::testing::Values("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3",
+                                           "bbb_44100hz_2ch_128kbps_mp3_30sec_1_image.mp3",
+                                           "bbb_44100hz_2ch_128kbps_mp3_30sec_2_image.mp3",
+                                           "bbb_44100hz_2ch_128kbps_mp3_5mins.mp3",
+                                           "bbb_44100hz_2ch_128kbps_mp3_5mins_1_image.mp3",
+                                           "bbb_44100hz_2ch_128kbps_mp3_5mins_2_image.mp3",
+                                           "bbb_44100hz_2ch_128kbps_mp3_5mins_largeSize.mp3",
+                                           "bbb_44100hz_2ch_128kbps_mp3_30sec_moreTextFrames.mp3"));
+
+INSTANTIATE_TEST_SUITE_P(
+        id3TestAll, ID3versionTest,
+        ::testing::Values(make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3", 4),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_1_image.mp3", 4),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_2_image.mp3", 4),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3", 4),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_1_image.mp3", 4),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_2_image.mp3", 4),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_largeSize.mp3", 4),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_moreTextFrames.mp3", 4)));
+
+INSTANTIATE_TEST_SUITE_P(
+        id3TestAll, ID3textTagTest,
+        ::testing::Values(make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3", 1),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_1_image.mp3", 1),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_2_image.mp3", 1),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3", 1),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_1_image.mp3", 1),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_2_image.mp3", 1),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_largeSize.mp3", 1),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_moreTextFrames.mp3", 5)));
+
+INSTANTIATE_TEST_SUITE_P(
+        id3TestAll, ID3albumArtTest,
+        ::testing::Values(make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3", false),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_1_image.mp3", true),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_2_image.mp3", true),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3", false),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_1_image.mp3", true),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_2_image.mp3", true),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_largeSize.mp3", true)));
+
+INSTANTIATE_TEST_SUITE_P(
+        id3TestAll, ID3multiAlbumArtTest,
+        ::testing::Values(make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3", 0),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3", 0),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_1_image.mp3", 1),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_1_image.mp3", 1),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_2_image.mp3", 2),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_2_image.mp3", 2),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_largeSize.mp3", 3)));
+
+int main(int argc, char **argv) {
+    gEnv = new ID3TestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGI("ID3 Test result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/id3/test/ID3TestEnvironment.h b/media/libstagefright/id3/test/ID3TestEnvironment.h
new file mode 100644
index 0000000..2229718
--- /dev/null
+++ b/media/libstagefright/id3/test/ID3TestEnvironment.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __ID3_TEST_ENVIRONMENT_H__
+#define __ID3_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class ID3TestEnvironment : public::testing::Environment {
+  public:
+    ID3TestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int ID3TestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"path", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P': {
+                setRes(optarg);
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __ID3_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/id3/test/README.md b/media/libstagefright/id3/test/README.md
new file mode 100644
index 0000000..72f730c
--- /dev/null
+++ b/media/libstagefright/id3/test/README.md
@@ -0,0 +1,34 @@
+## Media Testing ##
+---
+#### ID3 Test :
+The ID3 Test Suite validates the ID3 parser available in libstagefright.
+
+Run the following command in the id3 folder to build the test suite:
+```
+m ID3Test
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/ID3Test/ID3Test /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/ID3Test/ID3Test /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://drive.google.com/drive/folders/1pt5HFVSysbqfyqY1sVJ9MTupZKCdqjYZ). Push these files into device for testing.
+Download ID3 folder and push all the files in this folder to /data/local/tmp/ID3 on the device.
+```
+adb push ID3/. /data/local/tmp/ID3
+```
+
+usage: ID3Test -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/ID3Test -P /data/local/tmp/ID3/
+```
diff --git a/media/libstagefright/include/ACodecBufferChannel.h b/media/libstagefright/include/ACodecBufferChannel.h
index 5f65575..da962d1 100644
--- a/media/libstagefright/include/ACodecBufferChannel.h
+++ b/media/libstagefright/include/ACodecBufferChannel.h
@@ -81,6 +81,20 @@
             const CryptoPlugin::SubSample *subSamples,
             size_t numSubSamples,
             AString *errorDetailMsg) override;
+    virtual status_t attachBuffer(
+            const std::shared_ptr<C2Buffer> &c2Buffer,
+            const sp<MediaCodecBuffer> &buffer) override;
+    virtual status_t attachEncryptedBuffer(
+            const sp<hardware::HidlMemory> &memory,
+            bool secure,
+            const uint8_t *key,
+            const uint8_t *iv,
+            CryptoPlugin::Mode mode,
+            CryptoPlugin::Pattern pattern,
+            size_t offset,
+            const CryptoPlugin::SubSample *subSamples,
+            size_t numSubSamples,
+            const sp<MediaCodecBuffer> &buffer) override;
     virtual status_t renderOutputBuffer(
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
     virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
@@ -118,12 +132,15 @@
     void drainThisBuffer(IOMX::buffer_id bufferID, OMX_U32 omxFlags);
 
 private:
+    int32_t getHeapSeqNum(const sp<HidlMemory> &memory);
+
     const sp<AMessage> mInputBufferFilled;
     const sp<AMessage> mOutputBufferDrained;
 
     sp<MemoryDealer> mDealer;
     sp<IMemory> mDecryptDestination;
     int32_t mHeapSeqNum;
+    std::map<wp<HidlMemory>, int32_t> mHeapSeqNumMap;
     sp<HidlMemory> mHidlMemory;
 
     // These should only be accessed via std::atomic_* functions.
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
index 353c957..19ae0e3 100644
--- a/media/libstagefright/include/FrameDecoder.h
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -22,7 +22,7 @@
 
 #include <media/stagefright/foundation/AString.h>
 #include <media/stagefright/foundation/ABase.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/openmax/OMX_Video.h>
 #include <ui/GraphicTypes.h>
 
diff --git a/media/libstagefright/include/media/stagefright/AACWriter.h b/media/libstagefright/include/media/stagefright/AACWriter.h
index 7c63ddd..2671138 100644
--- a/media/libstagefright/include/media/stagefright/AACWriter.h
+++ b/media/libstagefright/include/media/stagefright/AACWriter.h
@@ -17,7 +17,7 @@
 #ifndef AAC_WRITER_H_
 #define AAC_WRITER_H_
 
-#include "foundation/ABase.h"
+#include "media/stagefright/foundation/ABase.h"
 #include <media/stagefright/MediaWriter.h>
 #include <utils/threads.h>
 
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 7754de4..d56ec4f 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -158,6 +158,7 @@
         kFlagIsSecure                                 = 1,
         kFlagPushBlankBuffersToNativeWindowOnShutdown = 2,
         kFlagIsGrallocUsageProtected                  = 4,
+        kFlagPreregisterMetadataBuffers               = 8,
     };
 
     enum {
diff --git a/media/libstagefright/include/media/stagefright/AudioSource.h b/media/libstagefright/include/media/stagefright/AudioSource.h
index ad4ab47..451aa57 100644
--- a/media/libstagefright/include/media/stagefright/AudioSource.h
+++ b/media/libstagefright/include/media/stagefright/AudioSource.h
@@ -20,7 +20,7 @@
 
 #include <media/AudioRecord.h>
 #include <media/AudioSystem.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/MicrophoneInfo.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <utils/List.h>
diff --git a/media/libstagefright/include/media/stagefright/CallbackMediaSource.h b/media/libstagefright/include/media/stagefright/CallbackMediaSource.h
index 33453fa..d2adbb9 100644
--- a/media/libstagefright/include/media/stagefright/CallbackMediaSource.h
+++ b/media/libstagefright/include/media/stagefright/CallbackMediaSource.h
@@ -17,7 +17,7 @@
 #ifndef CALLBACK_MEDIA_SOURCE_H_
 #define CALLBACK_MEDIA_SOURCE_H_
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ABase.h>
 
 namespace android {
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index 3037b72..6f0d3b5 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -19,7 +19,7 @@
 #define CAMERA_SOURCE_H_
 
 #include <deque>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <camera/android/hardware/ICamera.h>
 #include <camera/ICameraRecordingProxy.h>
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index bc7881c..dd6df90 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -34,6 +34,8 @@
 #include <system/graphics.h>
 #include <utils/NativeHandle.h>
 
+class C2Buffer;
+
 namespace android {
 class BufferChannelBase;
 struct BufferProducerWrapper;
@@ -45,6 +47,7 @@
 class IMemory;
 
 namespace hardware {
+class HidlMemory;
 namespace cas {
 namespace native {
 namespace V1_0 {
@@ -295,6 +298,50 @@
             size_t numSubSamples,
             AString *errorDetailMsg) = 0;
     /**
+     * Attach a Codec 2.0 buffer to MediaCodecBuffer.
+     *
+     * @return    OK if successful;
+     *            -ENOENT if index is not recognized
+     *            -ENOSYS if attaching buffer is not possible or not supported
+     */
+    virtual status_t attachBuffer(
+            const std::shared_ptr<C2Buffer> &c2Buffer,
+            const sp<MediaCodecBuffer> &buffer) {
+        (void)c2Buffer;
+        (void)buffer;
+        return -ENOSYS;
+    }
+    /**
+     * Attach an encrypted HidlMemory buffer to an index
+     *
+     * @return    OK if successful;
+     *            -ENOENT if index is not recognized
+     *            -ENOSYS if attaching buffer is not possible or not supported
+     */
+    virtual status_t attachEncryptedBuffer(
+            const sp<hardware::HidlMemory> &memory,
+            bool secure,
+            const uint8_t *key,
+            const uint8_t *iv,
+            CryptoPlugin::Mode mode,
+            CryptoPlugin::Pattern pattern,
+            size_t offset,
+            const CryptoPlugin::SubSample *subSamples,
+            size_t numSubSamples,
+            const sp<MediaCodecBuffer> &buffer) {
+        (void)memory;
+        (void)secure;
+        (void)key;
+        (void)iv;
+        (void)mode;
+        (void)pattern;
+        (void)offset;
+        (void)subSamples;
+        (void)numSubSamples;
+        (void)buffer;
+        return -ENOSYS;
+    }
+    /**
      * Request buffer rendering at specified time.
      *
      * @param     timestampNs   nanosecond timestamp for rendering time.
diff --git a/media/libstagefright/include/media/stagefright/InterfaceUtils.h b/media/libstagefright/include/media/stagefright/InterfaceUtils.h
index 671f2ff..92ef543 100644
--- a/media/libstagefright/include/media/stagefright/InterfaceUtils.h
+++ b/media/libstagefright/include/media/stagefright/InterfaceUtils.h
@@ -19,7 +19,7 @@
 
 #include <utils/RefBase.h>
 #include <media/stagefright/RemoteMediaExtractor.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <android/IMediaExtractor.h>
 #include <media/IMediaSource.h>
 
diff --git a/media/libstagefright/include/media/stagefright/JPEGSource.h b/media/libstagefright/include/media/stagefright/JPEGSource.h
index 8ab3d11..53cb344 100644
--- a/media/libstagefright/include/media/stagefright/JPEGSource.h
+++ b/media/libstagefright/include/media/stagefright/JPEGSource.h
@@ -18,7 +18,7 @@
 
 #define JPEG_SOURCE_H_
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 
 namespace android {
 
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 6f19023..34a7d55 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -23,8 +23,10 @@
 #include <media/stagefright/MediaWriter.h>
 #include <utils/List.h>
 #include <utils/threads.h>
+#include <map>
 #include <media/stagefright/foundation/AHandlerReflector.h>
 #include <media/stagefright/foundation/ALooper.h>
+#include <mutex>
 
 namespace android {
 
@@ -58,6 +60,10 @@
     void writeFourcc(const char *fourcc);
     void write(const void *data, size_t size);
     inline size_t write(const void *ptr, size_t size, size_t nmemb);
+    // Write to file system by calling ::write() or post error message to looper on failure.
+    void writeOrPostError(int fd, const void *buf, size_t count);
+    // Seek in the file by calling ::lseek64() or post error message to looper on failure.
+    void seekOrPostError(int fd, off64_t offset, int whence);
     void endBox();
     uint32_t interleaveDuration() const { return mInterleaveDurationUs; }
     status_t setInterleaveDuration(uint32_t duration);
@@ -80,6 +86,8 @@
 
     enum {
         kWhatSwitch                          = 'swch',
+        kWhatHandleIOError                   = 'ioer',
+        kWhatHandleFallocateError            = 'faer'
     };
 
     int  mFd;
@@ -88,14 +96,15 @@
     status_t mInitCheck;
     bool mIsRealTimeRecording;
     bool mUse4ByteNalLength;
-    bool mUse32BitOffset;
     bool mIsFileSizeLimitExplicitlyRequested;
     bool mPaused;
     bool mStarted;  // Writer thread + track threads started successfully
     bool mWriterThreadStarted;  // Only writer thread started successfully
     bool mSendNotify;
     off64_t mOffset;
-    off_t mMdatOffset;
+    off64_t mPreAllocateFileEndOffset;  //End of file offset during preallocation.
+    off64_t mMdatOffset;
+    off64_t mMdatEndOffset;  // End offset of mdat atom.
     uint8_t *mInMemoryCache;
     off64_t mInMemoryCacheOffset;
     off64_t mInMemoryCacheSize;
@@ -106,17 +115,24 @@
     uint32_t mInterleaveDurationUs;
     int32_t mTimeScale;
     int64_t mStartTimestampUs;
-    int32_t mStartTimeOffsetBFramesUs; // Start time offset when B Frames are present
+    int32_t mStartTimeOffsetBFramesUs;  // Longest offset needed for reordering tracks with B Frames
     int mLatitudex10000;
     int mLongitudex10000;
     bool mAreGeoTagsAvailable;
     int32_t mStartTimeOffsetMs;
     bool mSwitchPending;
+    bool mWriteSeekErr;
+    bool mFallocateErr;
+    bool mPreAllocationEnabled;
 
     sp<ALooper> mLooper;
     sp<AHandlerReflector<MPEG4Writer> > mReflector;
 
     Mutex mLock;
+    std::mutex mResetMutex;
+    std::mutex mFallocMutex;
+    bool mPreAllocFirstTime; // Pre-allocate space for file and track headers only once per file.
+    uint64_t mPrevAllTracksTotalMetaDataSizeEstimate;
 
     List<Track *> mTracks;
 
@@ -200,19 +216,23 @@
     } ItemProperty;
 
     bool mHasFileLevelMeta;
+    uint64_t mFileLevelMetaDataSize;
     bool mHasMoovBox;
     uint32_t mPrimaryItemId;
     uint32_t mAssociationEntryCount;
     uint32_t mNumGrids;
+    uint16_t mNextItemId;
     bool mHasRefs;
-    Vector<ItemInfo> mItems;
+    std::map<uint32_t, ItemInfo> mItems;
     Vector<ItemProperty> mProperties;
 
     // Writer thread handling
     status_t startWriterThread();
-    void stopWriterThread();
+    status_t stopWriterThread();
     static void *ThreadWrapper(void *me);
     void threadFunc();
+    void setupAndStartLooper();
+    void stopAndReleaseLooper();
 
     // Buffer a single chunk to be written out later.
     void bufferChunk(const Chunk& chunk);
@@ -259,11 +279,11 @@
     void addLengthPrefixedSample_l(MediaBuffer *buffer);
     void addMultipleLengthPrefixedSamples_l(MediaBuffer *buffer);
     uint16_t addProperty_l(const ItemProperty &);
+    status_t reserveItemId_l(size_t numItems, uint16_t *itemIdBase);
     uint16_t addItem_l(const ItemInfo &);
     void addRefs_l(uint16_t itemId, const ItemRefs &);
 
     bool exceedsFileSizeLimit();
-    bool use32BitFileOffset() const;
     bool exceedsFileDurationLimit();
     bool approachingFileSizeLimit();
     bool isFileStreamable() const;
@@ -284,6 +304,16 @@
     void writeIlst();
     void writeMoovLevelMetaBox();
 
+    /*
+     * Allocate space needed for MOOV atom in advance and maintain just enough before write
+     * of any data.  Stop writing and save MOOV atom if there was any error.
+     */
+    bool preAllocate(uint64_t wantSize);
+    /*
+     * Truncate file as per the size used for meta data and actual data in a session.
+     */
+    bool truncatePreAllocation();
+
     // HEIF writing
     void writeIlocBox();
     void writeInfeBox(uint16_t itemId, const char *type, uint32_t flags);
diff --git a/media/libstagefright/include/media/stagefright/MediaAdapter.h b/media/libstagefright/include/media/stagefright/MediaAdapter.h
index 589c827..177a9e9 100644
--- a/media/libstagefright/include/media/stagefright/MediaAdapter.h
+++ b/media/libstagefright/include/media/stagefright/MediaAdapter.h
@@ -17,7 +17,7 @@
 #ifndef MEDIA_ADAPTER_H
 #define MEDIA_ADAPTER_H
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ABase.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MetaData.h>
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 5c5c23a..022c48e 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -29,6 +29,10 @@
 #include <media/stagefright/FrameRenderTracker.h>
 #include <utils/Vector.h>
 
+class C2Buffer;
+class C2GraphicBlock;
+class C2LinearBlock;
+
 namespace aidl {
 namespace android {
 namespace media {
@@ -65,7 +69,8 @@
 
 struct MediaCodec : public AHandler {
     enum ConfigureFlags {
-        CONFIGURE_FLAG_ENCODE   = 1,
+        CONFIGURE_FLAG_ENCODE           = 1,
+        CONFIGURE_FLAG_USE_BLOCK_MODEL  = 2,
     };
 
     enum BufferFlags {
@@ -157,6 +162,38 @@
             uint32_t flags,
             AString *errorDetailMsg = NULL);
 
+    status_t queueBuffer(
+            size_t index,
+            const std::shared_ptr<C2Buffer> &buffer,
+            int64_t presentationTimeUs,
+            uint32_t flags,
+            const sp<AMessage> &tunings,
+            AString *errorDetailMsg = NULL);
+
+    status_t queueEncryptedBuffer(
+            size_t index,
+            const sp<hardware::HidlMemory> &memory,
+            size_t offset,
+            const CryptoPlugin::SubSample *subSamples,
+            size_t numSubSamples,
+            const uint8_t key[16],
+            const uint8_t iv[16],
+            CryptoPlugin::Mode mode,
+            const CryptoPlugin::Pattern &pattern,
+            int64_t presentationTimeUs,
+            uint32_t flags,
+            const sp<AMessage> &tunings,
+            AString *errorDetailMsg = NULL);
+
+    std::shared_ptr<C2Buffer> decrypt(
+            const std::shared_ptr<C2Buffer> &buffer,
+            const CryptoPlugin::SubSample *subSamples,
+            size_t numSubSamples,
+            const uint8_t key[16],
+            const uint8_t iv[16],
+            CryptoPlugin::Mode mode,
+            const CryptoPlugin::Pattern &pattern);
+
     status_t dequeueInputBuffer(size_t *index, int64_t timeoutUs = 0ll);
 
     status_t dequeueOutputBuffer(
@@ -206,6 +243,29 @@
     static size_t CreateFramesRenderedMessage(
             const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg);
 
+    static status_t CanFetchLinearBlock(
+            const std::vector<std::string> &names, bool *isCompatible);
+
+    static std::shared_ptr<C2LinearBlock> FetchLinearBlock(
+            size_t capacity, const std::vector<std::string> &names);
+
+    static status_t CanFetchGraphicBlock(
+            const std::vector<std::string> &names, bool *isCompatible);
+
+    static std::shared_ptr<C2GraphicBlock> FetchGraphicBlock(
+            int32_t width,
+            int32_t height,
+            int32_t format,
+            uint64_t usage,
+            const std::vector<std::string> &names);
+
+    template <typename T>
+    struct WrapperObject : public RefBase {
+        WrapperObject(const T& v) : value(v) {}
+        WrapperObject(T&& v) : value(std::move(v)) {}
+        T value;
+    };
+
 protected:
     virtual ~MediaCodec();
     virtual void onMessageReceived(const sp<AMessage> &msg);
@@ -282,6 +342,7 @@
         kFlagIsAsync                    = 1024,
         kFlagIsComponentAllocated       = 2048,
         kFlagPushBlankBuffersOnShutdown = 4096,
+        kFlagUseBlockModel              = 8192,
     };
 
     struct BufferInfo {
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 16e207d..eb7e5f7 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -830,6 +830,7 @@
 constexpr int32_t BUFFER_FLAG_PARTIAL_FRAME = 8;
 constexpr int32_t BUFFER_FLAG_SYNC_FRAME = 1;
 constexpr int32_t CONFIGURE_FLAG_ENCODE = 1;
+constexpr int32_t CONFIGURE_FLAG_USE_BLOCK_MODEL = 2;
 constexpr int32_t CRYPTO_MODE_AES_CBC     = 2;
 constexpr int32_t CRYPTO_MODE_AES_CTR     = 1;
 constexpr int32_t CRYPTO_MODE_UNENCRYPTED = 0;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecSource.h b/media/libstagefright/include/media/stagefright/MediaCodecSource.h
index a68cc19..2f98af1 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecSource.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecSource.h
@@ -17,7 +17,7 @@
 #ifndef MediaCodecSource_H_
 #define MediaCodecSource_H_
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ABase.h>
 #include <media/stagefright/foundation/AHandlerReflector.h>
 #include <media/stagefright/foundation/Mutexed.h>
diff --git a/media/libstagefright/include/media/stagefright/MediaErrors.h b/media/libstagefright/include/media/stagefright/MediaErrors.h
index 6f48c5d..5418f10 100644
--- a/media/libstagefright/include/media/stagefright/MediaErrors.h
+++ b/media/libstagefright/include/media/stagefright/MediaErrors.h
@@ -105,7 +105,8 @@
     ERROR_CAS_CARD_MUTE                      = CAS_ERROR_BASE - 15,
     ERROR_CAS_CARD_INVALID                   = CAS_ERROR_BASE - 16,
     ERROR_CAS_BLACKOUT                       = CAS_ERROR_BASE - 17,
-    ERROR_CAS_LAST_USED_ERRORCODE            = CAS_ERROR_BASE - 17,
+    ERROR_CAS_REBOOTING                      = CAS_ERROR_BASE - 18,
+    ERROR_CAS_LAST_USED_ERRORCODE            = CAS_ERROR_BASE - 18,
 
     ERROR_CAS_VENDOR_MAX                     = CAS_ERROR_BASE - 500,
     ERROR_CAS_VENDOR_MIN                     = CAS_ERROR_BASE - 999,
diff --git a/media/libstagefright/include/media/stagefright/MediaMuxer.h b/media/libstagefright/include/media/stagefright/MediaMuxer.h
index 69d6cde..7c75f74 100644
--- a/media/libstagefright/include/media/stagefright/MediaMuxer.h
+++ b/media/libstagefright/include/media/stagefright/MediaMuxer.h
@@ -22,7 +22,7 @@
 #include <utils/Vector.h>
 #include <utils/threads.h>
 
-#include "foundation/ABase.h"
+#include "media/stagefright/foundation/ABase.h"
 
 namespace android {
 
@@ -117,21 +117,24 @@
     status_t writeSampleData(const sp<ABuffer> &buffer, size_t trackIndex,
                              int64_t timeUs, uint32_t flags) ;
 
+    void notify(int msg, int ext1, int ext2);
+
 private:
     const OutputFormat mFormat;
     sp<MediaWriter> mWriter;
     Vector< sp<MediaAdapter> > mTrackList;  // Each track has its MediaAdapter.
     sp<MetaData> mFileMeta;  // Metadata for the whole file.
-
     Mutex mMuxerLock;
 
     enum State {
         UNINITIALIZED,
         INITIALIZED,
         STARTED,
-        STOPPED
+        STOPPED,
+        ERROR
     };
     State mState;
+    status_t mError;
 
     DISALLOW_EVIL_CONSTRUCTORS(MediaMuxer);
 };
diff --git a/media/libstagefright/include/media/stagefright/MediaWriter.h b/media/libstagefright/include/media/stagefright/MediaWriter.h
index 972ae1d..08e54b3 100644
--- a/media/libstagefright/include/media/stagefright/MediaWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaWriter.h
@@ -19,8 +19,9 @@
 #define MEDIA_WRITER_H_
 
 #include <utils/RefBase.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/IMediaRecorderClient.h>
+#include <media/stagefright/MediaMuxer.h>
 
 namespace android {
 
@@ -36,7 +37,7 @@
     virtual status_t stop() = 0;
     virtual status_t pause() = 0;
     virtual status_t setCaptureRate(float /* captureFps */) {
-        ALOGW("setCaptureRate unsupported");
+        ALOG(LOG_WARN, "MediaWriter", "setCaptureRate unsupported");
         return ERROR_UNSUPPORTED;
     }
 
@@ -45,6 +46,7 @@
     virtual void setListener(const sp<IMediaRecorderClient>& listener) {
         mListener = listener;
     }
+    virtual void setMuxerListener(const wp<MediaMuxer>& muxer) { mMuxer = muxer; }
 
     virtual status_t dump(int /*fd*/, const Vector<String16>& /*args*/) {
         return OK;
@@ -59,11 +61,17 @@
     int64_t mMaxFileSizeLimitBytes;
     int64_t mMaxFileDurationLimitUs;
     sp<IMediaRecorderClient> mListener;
+    wp<MediaMuxer> mMuxer;
 
     void notify(int msg, int ext1, int ext2) {
-        if (mListener != NULL) {
+        ALOG(LOG_VERBOSE, "MediaWriter", "notify msg:%d, ext1:%d, ext2:%d", msg, ext1, ext2);
+        if (mListener != nullptr) {
             mListener->notify(msg, ext1, ext2);
         }
+        sp<MediaMuxer> muxer = mMuxer.promote();
+        if (muxer != nullptr) {
+            muxer->notify(msg, ext1, ext2);
+        }
     }
 private:
     MediaWriter(const MediaWriter &);
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index e17093a..b9cd18a 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -113,8 +113,6 @@
     kKeyVideoProfile      = 'vprf',  // int32_t
     kKeyVideoLevel        = 'vlev',  // int32_t
 
-    // Set this key to enable authoring files in 64-bit offset
-    kKey64BitFileOffset   = 'fobt',  // int32_t (bool)
     kKey2ByteNalLength    = '2NAL',  // int32_t (bool)
 
     // Identify the file output format for authoring
diff --git a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
index 98e4b22..227cead 100644
--- a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
+++ b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
@@ -22,7 +22,7 @@
 #include <media/stagefright/foundation/ABase.h>
 #include <media/stagefright/foundation/AudioPresentationInfo.h>
 #include <android/IMediaExtractor.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <utils/Errors.h>
 #include <utils/KeyedVector.h>
 #include <utils/RefBase.h>
diff --git a/media/libstagefright/include/media/stagefright/RemoteMediaSource.h b/media/libstagefright/include/media/stagefright/RemoteMediaSource.h
index 03d3869..2cd23f0 100644
--- a/media/libstagefright/include/media/stagefright/RemoteMediaSource.h
+++ b/media/libstagefright/include/media/stagefright/RemoteMediaSource.h
@@ -18,7 +18,7 @@
 #define REMOTE_MEDIA_SOURCE_H_
 
 #include <media/IMediaSource.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ABase.h>
 
 namespace android {
diff --git a/media/libstagefright/include/media/stagefright/SimpleDecodingSource.h b/media/libstagefright/include/media/stagefright/SimpleDecodingSource.h
index 23defb4..a97ae23 100644
--- a/media/libstagefright/include/media/stagefright/SimpleDecodingSource.h
+++ b/media/libstagefright/include/media/stagefright/SimpleDecodingSource.h
@@ -17,7 +17,7 @@
 #ifndef SIMPLE_DECODING_SOURCE_H_
 #define SIMPLE_DECODING_SOURCE_H_
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/AString.h>
 #include <media/stagefright/foundation/Mutexed.h>
 
diff --git a/media/libstagefright/include/media/stagefright/foundation b/media/libstagefright/include/media/stagefright/foundation
deleted file mode 120000
index b9fd3b3..0000000
--- a/media/libstagefright/include/media/stagefright/foundation
+++ /dev/null
@@ -1 +0,0 @@
-../../../foundation/include/media/stagefright/foundation/
\ No newline at end of file
diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h
index 0ff2d7e..49578d3 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.h
+++ b/media/libstagefright/mpeg2ts/ATSParser.h
@@ -20,7 +20,7 @@
 
 #include <sys/types.h>
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ABase.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AudioPresentationInfo.h>
diff --git a/media/libstagefright/mpeg2ts/Android.bp b/media/libstagefright/mpeg2ts/Android.bp
index a507b91..42afea3 100644
--- a/media/libstagefright/mpeg2ts/Android.bp
+++ b/media/libstagefright/mpeg2ts/Android.bp
@@ -29,7 +29,6 @@
 
     shared_libs: [
         "libcrypto",
-        "libmedia",
         "libhidlmemory",
         "android.hardware.cas.native@1.0",
         "android.hidl.memory@1.0",
@@ -37,9 +36,13 @@
     ],
 
     header_libs: [
+        "libmedia_headers",
+        "libaudioclient_headers",
         "media_ndk_headers",
     ],
 
+    export_include_dirs: ["."],
+
     whole_static_libs: [
         "libstagefright_metadatautils",
     ],
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
index f4a6acb..44fe2c8 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
@@ -18,7 +18,7 @@
 
 #define ANOTHER_PACKET_SOURCE_H_
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ABase.h>
 #include <utils/threads.h>
 #include <utils/List.h>
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 7d612b4..78b4f19 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -4,6 +4,7 @@
     vndk: {
         enabled: true,
     },
+    double_loadable: true,
 
     srcs: [
         "OMXMaster.cpp",
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index e1c3916..2f69f45 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -492,6 +492,12 @@
         }
 
         case OMX_StateLoaded:
+        {
+            if (mActiveBuffers.size() > 0) {
+                freeActiveBuffers();
+            }
+            FALLTHROUGH_INTENDED;
+        }
         case OMX_StateInvalid:
             break;
 
diff --git a/media/libstagefright/omx/tests/OMXHarness.cpp b/media/libstagefright/omx/tests/OMXHarness.cpp
index 6848a83..7893148 100644
--- a/media/libstagefright/omx/tests/OMXHarness.cpp
+++ b/media/libstagefright/omx/tests/OMXHarness.cpp
@@ -30,7 +30,7 @@
 #include <datasource/DataSourceFactory.h>
 #include <media/DataSource.h>
 #include <media/IMediaHTTPService.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/OMXBuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 4f86773..58d6086 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -22,7 +22,7 @@
 
 #include <fcntl.h>
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
diff --git a/media/libstagefright/rtsp/VideoSource.h b/media/libstagefright/rtsp/VideoSource.h
index 4be9bf6..f29db57 100644
--- a/media/libstagefright/rtsp/VideoSource.h
+++ b/media/libstagefright/rtsp/VideoSource.h
@@ -18,7 +18,7 @@
 
 #define VIDEO_SOURCE_H_
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
diff --git a/media/libstagefright/tests/writer/README.md b/media/libstagefright/tests/writer/README.md
index 52db6f0..ae07917 100644
--- a/media/libstagefright/tests/writer/README.md
+++ b/media/libstagefright/tests/writer/README.md
@@ -19,12 +19,13 @@
 
 adb push ${OUT}/data/nativetest/writerTest/writerTest /data/local/tmp/
 
-The resource file for the tests is taken from Codec2 VTS resource folder. Push these files into device for testing.
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/writerTestRes.zip).
+Download and extract the folder. Push all the files in this folder to /data/local/tmp/ on the device.
 ```
-adb push  $ANDROID_BUILD_TOP/frameworks/av/media/codec2/hidl/1.0/vts/functional/res /sdcard/
+adb push writerTestRes /data/local/tmp/
 ```
 
 usage: writerTest -P \<path_to_res_folder\>
 ```
-adb shell /data/local/tmp/writerTest -P /sdcard/res/
+adb shell /data/local/tmp/writerTest -P /data/local/tmp/
 ```
diff --git a/media/libstagefright/tests/writer/WriterTest.cpp b/media/libstagefright/tests/writer/WriterTest.cpp
index d68438c..409f141 100644
--- a/media/libstagefright/tests/writer/WriterTest.cpp
+++ b/media/libstagefright/tests/writer/WriterTest.cpp
@@ -29,9 +29,9 @@
 
 #include <media/stagefright/AACWriter.h>
 #include <media/stagefright/AMRWriter.h>
-#include <media/stagefright/OggWriter.h>
-#include <media/stagefright/MPEG4Writer.h>
 #include <media/stagefright/MPEG2TSWriter.h>
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/OggWriter.h>
 #include <webm/WebmWriter.h>
 
 #include "WriterTestEnvironment.h"
@@ -89,19 +89,36 @@
 
 class WriterTest : public ::testing::TestWithParam<pair<string, int32_t>> {
   public:
+    WriterTest() : mWriter(nullptr), mFileMeta(nullptr), mCurrentTrack(nullptr) {}
+
+    ~WriterTest() {
+        if (mWriter) {
+            mWriter.clear();
+            mWriter = nullptr;
+        }
+        if (mFileMeta) {
+            mFileMeta.clear();
+            mFileMeta = nullptr;
+        }
+        if (mCurrentTrack) {
+            mCurrentTrack.clear();
+            mCurrentTrack = nullptr;
+        }
+    }
+
     virtual void SetUp() override {
         mNumCsds = 0;
         mInputFrameId = 0;
         mWriterName = unknown_comp;
         mDisableTest = false;
 
-        std::map<std::string, standardWriters> mapWriter = {
+        static const std::map<std::string, standardWriters> mapWriter = {
                 {"ogg", OGG},     {"aac", AAC},      {"aac_adts", AAC_ADTS}, {"webm", WEBM},
                 {"mpeg4", MPEG4}, {"amrnb", AMR_NB}, {"amrwb", AMR_WB},      {"mpeg2Ts", MPEG2TS}};
         // Find the component type
         string writerFormat = GetParam().first;
         if (mapWriter.find(writerFormat) != mapWriter.end()) {
-            mWriterName = mapWriter[writerFormat];
+            mWriterName = mapWriter.at(writerFormat);
         }
         if (mWriterName == standardWriters::unknown_comp) {
             cout << "[   WARN   ] Test Skipped. No specific writer mentioned\n";
@@ -110,10 +127,8 @@
     }
 
     virtual void TearDown() override {
-        mWriter.clear();
-        mFileMeta.clear();
         mBufferInfo.clear();
-        if (mInputStream) mInputStream.close();
+        if (mInputStream.is_open()) mInputStream.close();
     }
 
     void getInputBufferInfo(string inputFileName, string inputInfo);
@@ -149,7 +164,7 @@
 void WriterTest::getInputBufferInfo(string inputFileName, string inputInfo) {
     std::ifstream eleInfo;
     eleInfo.open(inputInfo.c_str());
-    CHECK_EQ(eleInfo.is_open(), true);
+    ASSERT_EQ(eleInfo.is_open(), true);
     int32_t bytesCount = 0;
     uint32_t flags = 0;
     int64_t timestamp = 0;
@@ -162,7 +177,7 @@
     }
     eleInfo.close();
     mInputStream.open(inputFileName.c_str(), std::ifstream::binary);
-    CHECK_EQ(mInputStream.is_open(), true);
+    ASSERT_EQ(mInputStream.is_open(), true);
 }
 
 int32_t WriterTest::createWriter(int32_t fd) {
@@ -258,14 +273,11 @@
     string outputFile = OUTPUT_FILE_NAME;
     int32_t fd =
             open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
-    if (fd < 0) return;
+    ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
 
     // Creating writer within a test scope. Destructor should be called when the test ends
-    int32_t status = createWriter(fd);
-    if (status) {
-        cout << "Failed to create writer for output format:" << GetParam().first << "\n";
-        ASSERT_TRUE(false);
-    }
+    ASSERT_EQ((status_t)OK, createWriter(fd))
+            << "Failed to create writer for output format:" << GetParam().first;
 }
 
 TEST_P(WriterTest, WriterTest) {
@@ -276,38 +288,32 @@
     string outputFile = OUTPUT_FILE_NAME;
     int32_t fd =
             open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
-    if (fd < 0) return;
+    ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
     int32_t status = createWriter(fd);
-    if (status) {
-        cout << "Failed to create writer for output format:" << writerFormat << "\n";
-        ASSERT_TRUE(false);
-    }
+    ASSERT_EQ((status_t)OK, status) << "Failed to create writer for output format:" << writerFormat;
+
     string inputFile = gEnv->getRes();
     string inputInfo = gEnv->getRes();
     configFormat param;
     bool isAudio;
     int32_t inputFileIdx = GetParam().second;
     getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
-    if (!inputFile.compare(gEnv->getRes())) {
-        ALOGV("No input file specified");
-        return;
-    }
+    ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
     getInputBufferInfo(inputFile, inputInfo);
     status = addWriterSource(isAudio, param);
-    if (status) {
-        cout << "Failed to add source for " << writerFormat << "Writer \n";
-        ASSERT_TRUE(false);
-    }
-    CHECK_EQ((status_t)OK, mWriter->start(mFileMeta.get()));
+    ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+
+    status = mWriter->start(mFileMeta.get());
+    ASSERT_EQ((status_t)OK, status);
     status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
                                  mBufferInfo.size());
+    ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
     mCurrentTrack->stop();
-    if (status) {
-        cout << writerFormat << " writer failed \n";
-        mWriter->stop();
-        ASSERT_TRUE(false);
-    }
-    CHECK_EQ((status_t)OK, mWriter->stop());
+
+    status = mWriter->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
     close(fd);
 }
 
@@ -319,59 +325,125 @@
     string outputFile = OUTPUT_FILE_NAME;
     int32_t fd =
             open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
-    if (fd < 0) return;
+    ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
     int32_t status = createWriter(fd);
-    if (status) {
-        cout << "Failed to create writer for output format:" << writerFormat << "\n";
-        ASSERT_TRUE(false);
-    }
+    ASSERT_EQ((status_t)OK, status) << "Failed to create writer for output format:" << writerFormat;
+
     string inputFile = gEnv->getRes();
     string inputInfo = gEnv->getRes();
     configFormat param;
     bool isAudio;
     int32_t inputFileIdx = GetParam().second;
     getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
-    if (!inputFile.compare(gEnv->getRes())) {
-        ALOGV("No input file specified");
-        return;
-    }
+    ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
     getInputBufferInfo(inputFile, inputInfo);
     status = addWriterSource(isAudio, param);
-    if (status) {
-        cout << "Failed to add source for " << writerFormat << "Writer \n";
-        ASSERT_TRUE(false);
-    }
-    CHECK_EQ((status_t)OK, mWriter->start(mFileMeta.get()));
+    ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+
+    status = mWriter->start(mFileMeta.get());
+    ASSERT_EQ((status_t)OK, status);
     status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
                                  mBufferInfo.size() / 4);
-    if (status) {
-        cout << writerFormat << " writer failed \n";
-        mCurrentTrack->stop();
-        mWriter->stop();
-        ASSERT_TRUE(false);
-    }
+    ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
 
     bool isPaused = false;
     if ((mWriterName != standardWriters::MPEG2TS) && (mWriterName != standardWriters::MPEG4)) {
-        CHECK_EQ((status_t)OK, mWriter->pause());
+        status = mWriter->pause();
+        ASSERT_EQ((status_t)OK, status);
         isPaused = true;
     }
     // In the pause state, writers shouldn't write anything. Testing the writers for the same
     int32_t numFramesPaused = mBufferInfo.size() / 4;
-    status |= sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
+    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
                                   mInputFrameId, numFramesPaused, isPaused);
+    ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
     if (isPaused) {
-        CHECK_EQ((status_t)OK, mWriter->start(mFileMeta.get()));
+        status = mWriter->start(mFileMeta.get());
+        ASSERT_EQ((status_t)OK, status);
     }
-    status |= sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
+    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
                                   mInputFrameId, mBufferInfo.size());
+    ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
     mCurrentTrack->stop();
-    if (status) {
-        cout << writerFormat << " writer failed \n";
-        mWriter->stop();
-        ASSERT_TRUE(false);
+
+    status = mWriter->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
+    close(fd);
+}
+
+TEST_P(WriterTest, MultiStartStopPauseTest) {
+    // TODO: (b/144821804)
+    // Enable the test for MPE2TS writer
+    if (mDisableTest || mWriterName == standardWriters::MPEG2TS) return;
+    ALOGV("Test writers for multiple start, stop and pause calls");
+
+    string outputFile = OUTPUT_FILE_NAME;
+    int32_t fd =
+            open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+    ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
+    string writerFormat = GetParam().first;
+    int32_t status = createWriter(fd);
+    ASSERT_EQ(status, (status_t)OK) << "Failed to create writer for output format:" << writerFormat;
+
+    string inputFile = gEnv->getRes();
+    string inputInfo = gEnv->getRes();
+    configFormat param;
+    bool isAudio;
+    int32_t inputFileIdx = GetParam().second;
+    getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
+    ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+    getInputBufferInfo(inputFile, inputInfo);
+    status = addWriterSource(isAudio, param);
+    ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+
+    // first start should succeed.
+    status = mWriter->start(mFileMeta.get());
+    ASSERT_EQ((status_t)OK, status) << "Could not start the writer";
+
+    // Multiple start() may/may not succeed.
+    // Writers are expected to not crash on multiple start() calls.
+    for (int32_t count = 0; count < kMaxCount; count++) {
+        mWriter->start(mFileMeta.get());
     }
-    CHECK_EQ((status_t)OK, mWriter->stop());
+
+    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
+                              mBufferInfo.size() / 4);
+    ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
+    for (int32_t count = 0; count < kMaxCount; count++) {
+        mWriter->pause();
+        mWriter->start(mFileMeta.get());
+    }
+
+    mWriter->pause();
+    int32_t numFramesPaused = mBufferInfo.size() / 4;
+    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
+                              mInputFrameId, numFramesPaused, true);
+    ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
+    for (int32_t count = 0; count < kMaxCount; count++) {
+        mWriter->start(mFileMeta.get());
+    }
+
+    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
+                              mInputFrameId, mBufferInfo.size());
+    ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
+    mCurrentTrack->stop();
+
+    // first stop should succeed.
+    status = mWriter->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
+    // Multiple stop() may/may not succeed.
+    // Writers are expected to not crash on multiple stop() calls.
+    for (int32_t count = 0; count < kMaxCount; count++) {
+        mWriter->stop();
+    }
     close(fd);
 }
 
diff --git a/media/libstagefright/tests/writer/WriterTestEnvironment.h b/media/libstagefright/tests/writer/WriterTestEnvironment.h
index 34c2baa..99e686f 100644
--- a/media/libstagefright/tests/writer/WriterTestEnvironment.h
+++ b/media/libstagefright/tests/writer/WriterTestEnvironment.h
@@ -25,7 +25,7 @@
 
 class WriterTestEnvironment : public ::testing::Environment {
   public:
-    WriterTestEnvironment() : res("/sdcard/media/") {}
+    WriterTestEnvironment() : res("/data/local/tmp/") {}
 
     // Parses the command line arguments
     int initFromOptions(int argc, char **argv);
diff --git a/media/libstagefright/tests/writer/WriterUtility.cpp b/media/libstagefright/tests/writer/WriterUtility.cpp
index 2ba90a0..f24ccb6 100644
--- a/media/libstagefright/tests/writer/WriterUtility.cpp
+++ b/media/libstagefright/tests/writer/WriterUtility.cpp
@@ -26,7 +26,7 @@
                             int32_t &inputFrameId, sp<MediaAdapter> &currentTrack, int32_t offset,
                             int32_t range, bool isPaused) {
     while (1) {
-        if (inputFrameId == (int)bufferInfo.size() || inputFrameId >= (offset + range)) break;
+        if (inputFrameId >= (int)bufferInfo.size() || inputFrameId >= (offset + range)) break;
         int32_t size = bufferInfo[inputFrameId].size;
         char *data = (char *)malloc(size);
         if (!data) {
diff --git a/media/libstagefright/tests/writer/WriterUtility.h b/media/libstagefright/tests/writer/WriterUtility.h
index d402798..cdd6246 100644
--- a/media/libstagefright/tests/writer/WriterUtility.h
+++ b/media/libstagefright/tests/writer/WriterUtility.h
@@ -33,6 +33,7 @@
 #define CODEC_CONFIG_FLAG 32
 
 constexpr uint32_t kMaxCSDStrlen = 16;
+constexpr uint32_t kMaxCount = 20;
 
 struct BufferInfo {
     int32_t size;
diff --git a/media/libstagefright/webm/WebmFrameThread.h b/media/libstagefright/webm/WebmFrameThread.h
index 2dde20a..5aa6feb 100644
--- a/media/libstagefright/webm/WebmFrameThread.h
+++ b/media/libstagefright/webm/WebmFrameThread.h
@@ -21,7 +21,7 @@
 #include "LinkedBlockingQueue.h"
 
 #include <datasource/FileSource.h>
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 
 #include <utils/List.h>
 #include <utils/Errors.h>
diff --git a/media/libstagefright/webm/WebmWriter.h b/media/libstagefright/webm/WebmWriter.h
index ffe4c79..ed5bc4c 100644
--- a/media/libstagefright/webm/WebmWriter.h
+++ b/media/libstagefright/webm/WebmWriter.h
@@ -21,7 +21,7 @@
 #include "WebmFrameThread.h"
 #include "LinkedBlockingQueue.h"
 
-#include <media/MediaSource.h>
+#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MediaWriter.h>
 
 #include <utils/Errors.h>
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index 38de831..7ed0e88 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -10,6 +10,7 @@
     vndk: {
         enabled: true,
     },
+    double_loadable: true,
 
     srcs: [
         "MediaCodecsXmlParser.cpp",
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index f83d530..7b285a6 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -179,6 +179,10 @@
         "libcutils",
         "android.hardware.graphics.bufferqueue@1.0",
     ],
+    header_libs: [
+        "libstagefright_foundation_headers",
+    ],
+
     cflags: [
         "-D__ANDROID_VNDK__",
     ],
diff --git a/media/tests/benchmark/MediaBenchmarkTest/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
index 91b03f1..d80d9a5 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
@@ -17,11 +17,13 @@
 android_test {
     name: "MediaBenchmarkTest",
 
+    defaults: [
+        "MediaBenchmark-defaults",
+    ],
+
     // Include all the test code
     srcs: ["src/androidTest/**/*.java"],
 
-    sdk_version: "system_current",
-
     resource_dirs: ["res"],
 
     libs: [
@@ -29,6 +31,10 @@
         "android.test.base",
     ],
 
+    jni_libs: [
+        "libmediabenchmark_jni",
+    ],
+
     static_libs: [
         "libMediaBenchmark",
         "junit",
@@ -39,12 +45,22 @@
 android_library {
     name: "libMediaBenchmark",
 
+    defaults: [
+        "MediaBenchmark-defaults",
+    ],
+
     // Include all the libraries
     srcs: ["src/main/**/*.java"],
 
-    sdk_version: "system_current",
-
     static_libs: [
         "androidx.test.core",
     ],
 }
+
+java_defaults {
+    name: "MediaBenchmark-defaults",
+
+    sdk_version: "system_current",
+    min_sdk_version: "28",
+    target_sdk_version: "29",
+}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/AndroidTest.xml b/media/tests/benchmark/MediaBenchmarkTest/AndroidTest.xml
index 89d6ce2..1179d6c 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/AndroidTest.xml
+++ b/media/tests/benchmark/MediaBenchmarkTest/AndroidTest.xml
@@ -15,6 +15,7 @@
 -->
 <configuration description="Runs Media Benchmark Tests">
     <target_preparer class="com.android.tradefed.targetprep.TestAppInstallSetup">
+        <option name="cleanup-apks" value="false" />
         <option name="test-file-name" value="MediaBenchmarkTest.apk" />
     </target_preparer>
 
diff --git a/media/tests/benchmark/MediaBenchmarkTest/build.gradle b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
index b0ee692..b2aee1a 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/build.gradle
+++ b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
@@ -30,7 +30,7 @@
     compileSdkVersion 29
     defaultConfig {
         applicationId "com.android.media.benchmark"
-        minSdkVersion 21
+        minSdkVersion 28
         targetSdkVersion 29
         versionCode 1
         versionName "1.0"
@@ -48,6 +48,18 @@
             manifest.srcFile 'AndroidManifest.xml'
         }
     }
+    buildTypes {
+        release {
+            minifyEnabled false
+            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
+        }
+    }
+    externalNativeBuild {
+        cmake {
+            path "src/main/cpp/CMakeLists.txt"
+            version "3.10.2"
+        }
+    }
 }
 
 repositories {
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/DecoderTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/DecoderTest.java
index be2633d..c41f198 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/DecoderTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/DecoderTest.java
@@ -27,7 +27,10 @@
 import com.android.media.benchmark.library.CodecUtils;
 import com.android.media.benchmark.library.Decoder;
 import com.android.media.benchmark.library.Extractor;
+import com.android.media.benchmark.library.Native;
+import com.android.media.benchmark.library.Stats;
 
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -44,12 +47,17 @@
 import java.util.Arrays;
 import java.util.Collection;
 
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertEquals;
+
 @RunWith(Parameterized.class)
 public class DecoderTest {
     private static final Context mContext =
             InstrumentationRegistry.getInstrumentation().getTargetContext();
     private static final String mInputFilePath = mContext.getString(R.string.input_file_path);
     private static final String mOutputFilePath = mContext.getString(R.string.output_file_path);
+    private static final String mStatsFile =
+            mContext.getExternalFilesDir(null) + "/Decoder." + System.currentTimeMillis() + ".csv";
     private static final String TAG = "DecoderTest";
     private static final long PER_TEST_TIMEOUT_MS = 60000;
     private static final boolean DEBUG = false;
@@ -101,97 +109,112 @@
                 {"crowd_1920x1080_25fps_4000kbps_h265.mkv", true}});
     }
 
+    @BeforeClass
+    public static void writeStatsHeaderToFile() throws IOException {
+        Stats mStats = new Stats();
+        boolean status = mStats.writeStatsHeader(mStatsFile);
+        assertTrue("Unable to open stats file for writing!", status);
+        Log.d(TAG, "Saving Benchmark results in: " + mStatsFile);
+    }
+
     @Test(timeout = PER_TEST_TIMEOUT_MS)
     public void testDecoder() throws IOException {
         File inputFile = new File(mInputFilePath + mInputFile);
-        if (inputFile.exists()) {
-            FileInputStream fileInput = new FileInputStream(inputFile);
-            FileDescriptor fileDescriptor = fileInput.getFD();
-            Extractor extractor = new Extractor();
-            int trackCount = extractor.setUpExtractor(fileDescriptor);
-            ArrayList<ByteBuffer> inputBuffer = new ArrayList<>();
-            ArrayList<MediaCodec.BufferInfo> frameInfo = new ArrayList<>();
-            if (trackCount <= 0) {
-                Log.e(TAG, "Extraction failed. No tracks for file: " + mInputFile);
-                return;
-            }
-            for (int currentTrack = 0; currentTrack < trackCount; currentTrack++) {
-                extractor.selectExtractorTrack(currentTrack);
-                MediaFormat format = extractor.getFormat(currentTrack);
-                String mime = format.getString(MediaFormat.KEY_MIME);
-                ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mime, false);
-                if (mediaCodecs.size() <= 0) {
-                    Log.e(TAG,
-                            "No suitable codecs found for file: " + mInputFile
-                                    + " track : " + currentTrack + " mime: " + mime);
-                    continue;
+        assertTrue("Cannot find " + mInputFile + " in directory " + mInputFilePath,
+                inputFile.exists());
+        FileInputStream fileInput = new FileInputStream(inputFile);
+        FileDescriptor fileDescriptor = fileInput.getFD();
+        Extractor extractor = new Extractor();
+        int trackCount = extractor.setUpExtractor(fileDescriptor);
+        assertTrue("Extraction failed. No tracks for file: " + mInputFile, (trackCount > 0));
+        ArrayList<ByteBuffer> inputBuffer = new ArrayList<>();
+        ArrayList<MediaCodec.BufferInfo> frameInfo = new ArrayList<>();
+        for (int currentTrack = 0; currentTrack < trackCount; currentTrack++) {
+            extractor.selectExtractorTrack(currentTrack);
+            MediaFormat format = extractor.getFormat(currentTrack);
+            String mime = format.getString(MediaFormat.KEY_MIME);
+            ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mime, false);
+            assertTrue("No suitable codecs found for file: " + mInputFile + " track : " +
+                    currentTrack + " mime: " + mime, (mediaCodecs.size() > 0));
+
+            // Get samples from extractor
+            int sampleSize;
+            do {
+                sampleSize = extractor.getFrameSample();
+                MediaCodec.BufferInfo bufInfo = new MediaCodec.BufferInfo();
+                MediaCodec.BufferInfo info = extractor.getBufferInfo();
+                ByteBuffer dataBuffer = ByteBuffer.allocate(info.size);
+                dataBuffer.put(extractor.getFrameBuffer().array(), 0, info.size);
+                bufInfo.set(info.offset, info.size, info.presentationTimeUs, info.flags);
+                inputBuffer.add(dataBuffer);
+                frameInfo.add(bufInfo);
+                if (DEBUG) {
+                    Log.d(TAG, "Extracted bufInfo: flag = " + bufInfo.flags + " timestamp = " +
+                            bufInfo.presentationTimeUs + " size = " + bufInfo.size);
                 }
-                // Get samples from extractor
-                int sampleSize;
-                do {
-                    sampleSize = extractor.getFrameSample();
-                    MediaCodec.BufferInfo bufInfo = new MediaCodec.BufferInfo();
-                    MediaCodec.BufferInfo info = extractor.getBufferInfo();
-                    ByteBuffer dataBuffer = ByteBuffer.allocate(info.size);
-                    dataBuffer.put(extractor.getFrameBuffer().array(), 0, info.size);
-                    bufInfo.set(info.offset, info.size, info.presentationTimeUs, info.flags);
-                    inputBuffer.add(dataBuffer);
-                    frameInfo.add(bufInfo);
-                    if (DEBUG) {
-                        Log.d(TAG,
-                                "Extracted bufInfo: flag = " + bufInfo.flags + " timestamp = "
-                                        + bufInfo.presentationTimeUs + " size = " + bufInfo.size);
+            } while (sampleSize > 0);
+            for (String codecName : mediaCodecs) {
+                FileOutputStream decodeOutputStream = null;
+                if (WRITE_OUTPUT) {
+                    if (!Paths.get(mOutputFilePath).toFile().exists()) {
+                        Files.createDirectories(Paths.get(mOutputFilePath));
                     }
-                } while (sampleSize > 0);
-                for (String codecName : mediaCodecs) {
-                    FileOutputStream decodeOutputStream = null;
-                    if (WRITE_OUTPUT) {
-                        if (!Paths.get(mOutputFilePath).toFile().exists()) {
-                            Files.createDirectories(Paths.get(mOutputFilePath));
-                        }
-                        File outFile = new File(mOutputFilePath + "decoder.out");
-                        if (outFile.exists()) {
-                            if (!outFile.delete()) {
-                                Log.e(TAG, " Unable to delete existing file" + outFile.toString());
-                            }
-                        }
-                        if (outFile.createNewFile()) {
-                            decodeOutputStream = new FileOutputStream(outFile);
-                        } else {
-                            Log.e(TAG, "Unable to create file: " + outFile.toString());
-                        }
+                    File outFile = new File(mOutputFilePath + "decoder.out");
+                    if (outFile.exists()) {
+                        assertTrue(" Unable to delete existing file" + outFile.toString(),
+                                outFile.delete());
                     }
-                    Decoder decoder = new Decoder();
-                    decoder.setupDecoder(decodeOutputStream);
-                    int status =
-                            decoder.decode(inputBuffer, frameInfo, mAsyncMode, format, codecName);
-                    decoder.deInitCodec();
-                    if (status == 0) {
-                        decoder.dumpStatistics(
-                                mInputFile + " " + codecName, extractor.getClipDuration());
-                        Log.i(TAG,
-                                "Decoding Successful for file: " + mInputFile
-                                        + " with codec: " + codecName);
-                    } else {
-                        Log.e(TAG,
-                                "Decoder returned error " + status + " for file: " + mInputFile
-                                        + " with codec: " + codecName);
-                    }
-                    decoder.resetDecoder();
-                    if (decodeOutputStream != null) {
-                        decodeOutputStream.close();
-                    }
+                    assertTrue("Unable to create file: " + outFile.toString(),
+                            outFile.createNewFile());
+                    decodeOutputStream = new FileOutputStream(outFile);
                 }
-                extractor.unselectExtractorTrack(currentTrack);
-                inputBuffer.clear();
-                frameInfo.clear();
+                Decoder decoder = new Decoder();
+                decoder.setupDecoder(decodeOutputStream);
+                int status = decoder.decode(inputBuffer, frameInfo, mAsyncMode, format, codecName);
+                decoder.deInitCodec();
+                assertEquals("Decoder returned error " + status + " for file: " + mInputFile +
+                        " with codec: " + codecName, 0, status);
+                decoder.dumpStatistics(mInputFile, codecName, (mAsyncMode ? "async" : "sync"),
+                        extractor.getClipDuration(), mStatsFile);
+                Log.i(TAG, "Decoding Successful for file: " + mInputFile + " with codec: " +
+                        codecName);
+                decoder.resetDecoder();
+                if (decodeOutputStream != null) {
+                    decodeOutputStream.close();
+                }
             }
-            extractor.deinitExtractor();
-            fileInput.close();
-        } else {
-            Log.w(TAG,
-                    "Warning: Test Skipped. Cannot find " + mInputFile + " in directory "
-                            + mInputFilePath);
+            extractor.unselectExtractorTrack(currentTrack);
+            inputBuffer.clear();
+            frameInfo.clear();
         }
+        extractor.deinitExtractor();
+        fileInput.close();
+    }
+
+    @Test
+    public void testNativeDecoder() throws IOException {
+        File inputFile = new File(mInputFilePath + mInputFile);
+        assertTrue("Cannot find " + mInputFile + " in directory " + mInputFilePath,
+                inputFile.exists());
+        FileInputStream fileInput = new FileInputStream(inputFile);
+        FileDescriptor fileDescriptor = fileInput.getFD();
+        Extractor extractor = new Extractor();
+        int trackCount = extractor.setUpExtractor(fileDescriptor);
+        assertTrue("Extraction failed. No tracks for file: ", trackCount > 0);
+        for (int currentTrack = 0; currentTrack < trackCount; currentTrack++) {
+            extractor.selectExtractorTrack(currentTrack);
+            MediaFormat format = extractor.getFormat(currentTrack);
+            String mime = format.getString(MediaFormat.KEY_MIME);
+            ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mime, false);
+            for (String codecName : mediaCodecs) {
+                Log.i("Test: %s\n", mInputFile);
+                Native nativeDecoder = new Native();
+                int status = nativeDecoder.Decode(
+                        mInputFilePath, mInputFile, mStatsFile, codecName, mAsyncMode);
+                assertEquals("Decoder returned error " + status + " for file: " + mInputFile, 0,
+                        status);
+            }
+        }
+        fileInput.close();
     }
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
index 9db9c84..831467a 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
@@ -28,7 +28,10 @@
 import com.android.media.benchmark.library.Decoder;
 import com.android.media.benchmark.library.Encoder;
 import com.android.media.benchmark.library.Extractor;
+import com.android.media.benchmark.library.Native;
+import com.android.media.benchmark.library.Stats;
 
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -37,18 +40,24 @@
 import java.io.FileDescriptor;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
+import java.io.IOException;
 import java.nio.ByteBuffer;
 
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertEquals;
+
 @RunWith(Parameterized.class)
 public class EncoderTest {
     private static final Context mContext =
             InstrumentationRegistry.getInstrumentation().getTargetContext();
     private static final String mInputFilePath = mContext.getString(R.string.input_file_path);
     private static final String mOutputFilePath = mContext.getString(R.string.output_file_path);
+    private static final String mStatsFile =
+            mContext.getExternalFilesDir(null) + "/Encoder." + System.currentTimeMillis() + ".csv";
     private static final String TAG = "EncoderTest";
     private static final long PER_TEST_TIMEOUT_MS = 120000;
     private static final boolean DEBUG = false;
@@ -56,7 +65,6 @@
     private static final int ENCODE_DEFAULT_FRAME_RATE = 25;
     private static final int ENCODE_DEFAULT_BIT_RATE = 8000000 /* 8 Mbps */;
     private static final int ENCODE_MIN_BIT_RATE = 600000 /* 600 Kbps */;
-
     private String mInputFile;
 
     @Parameterized.Parameters
@@ -81,192 +89,205 @@
         this.mInputFile = inputFileName;
     }
 
+    @BeforeClass
+    public static void writeStatsHeaderToFile() throws IOException {
+        Stats mStats = new Stats();
+        boolean status = mStats.writeStatsHeader(mStatsFile);
+        assertTrue("Unable to open stats file for writing!", status);
+        Log.d(TAG, "Saving Benchmark results in: " + mStatsFile);
+    }
+
     @Test(timeout = PER_TEST_TIMEOUT_MS)
     public void sampleEncoderTest() throws Exception {
         int status;
         int frameSize;
-
         //Parameters for video
         int width = 0;
         int height = 0;
         int profile = 0;
         int level = 0;
         int frameRate = 0;
-
         //Parameters for audio
         int bitRate = 0;
         int sampleRate = 0;
         int numChannels = 0;
-
         File inputFile = new File(mInputFilePath + mInputFile);
-        if (inputFile.exists()) {
-            FileInputStream fileInput = new FileInputStream(inputFile);
-            FileDescriptor fileDescriptor = fileInput.getFD();
-            Extractor extractor = new Extractor();
-            int trackCount = extractor.setUpExtractor(fileDescriptor);
-            if (trackCount <= 0) {
-                Log.e(TAG, "Extraction failed. No tracks for file: " + mInputFile);
-                return;
+        assertTrue("Cannot find " + mInputFile + " in directory " + mInputFilePath,
+                inputFile.exists());
+        FileInputStream fileInput = new FileInputStream(inputFile);
+        FileDescriptor fileDescriptor = fileInput.getFD();
+        Extractor extractor = new Extractor();
+        int trackCount = extractor.setUpExtractor(fileDescriptor);
+        assertTrue("Extraction failed. No tracks for file: " + mInputFile, (trackCount > 0));
+        ArrayList<ByteBuffer> inputBuffer = new ArrayList<>();
+        ArrayList<MediaCodec.BufferInfo> frameInfo = new ArrayList<>();
+        for (int currentTrack = 0; currentTrack < trackCount; currentTrack++) {
+            extractor.selectExtractorTrack(currentTrack);
+            MediaFormat format = extractor.getFormat(currentTrack);
+            // Get samples from extractor
+            int sampleSize;
+            do {
+                sampleSize = extractor.getFrameSample();
+                MediaCodec.BufferInfo bufInfo = new MediaCodec.BufferInfo();
+                MediaCodec.BufferInfo info = extractor.getBufferInfo();
+                ByteBuffer dataBuffer = ByteBuffer.allocate(info.size);
+                dataBuffer.put(extractor.getFrameBuffer().array(), 0, info.size);
+                bufInfo.set(info.offset, info.size, info.presentationTimeUs, info.flags);
+                inputBuffer.add(dataBuffer);
+                frameInfo.add(bufInfo);
+                if (DEBUG) {
+                    Log.d(TAG, "Extracted bufInfo: flag = " + bufInfo.flags + " timestamp = " +
+                            bufInfo.presentationTimeUs + " size = " + bufInfo.size);
+                }
+            } while (sampleSize > 0);
+            int tid = android.os.Process.myTid();
+            File decodedFile = new File(mContext.getFilesDir() + "/decoder_" + tid + ".out");
+            FileOutputStream decodeOutputStream = new FileOutputStream(decodedFile);
+            Decoder decoder = new Decoder();
+            decoder.setupDecoder(decodeOutputStream);
+            status = decoder.decode(inputBuffer, frameInfo, false, format, "");
+            assertEquals("Decoder returned error " + status + " for file: " + mInputFile, 0,
+                    status);
+            decoder.deInitCodec();
+            extractor.unselectExtractorTrack(currentTrack);
+            inputBuffer.clear();
+            frameInfo.clear();
+            if (decodeOutputStream != null) {
+                decodeOutputStream.close();
             }
-            ArrayList<ByteBuffer> inputBuffer = new ArrayList<>();
-            ArrayList<MediaCodec.BufferInfo> frameInfo = new ArrayList<>();
-            for (int currentTrack = 0; currentTrack < trackCount; currentTrack++) {
-                extractor.selectExtractorTrack(currentTrack);
-                MediaFormat format = extractor.getFormat(currentTrack);
-                // Get samples from extractor
-                int sampleSize;
-                do {
-                    sampleSize = extractor.getFrameSample();
-                    MediaCodec.BufferInfo bufInfo = new MediaCodec.BufferInfo();
-                    MediaCodec.BufferInfo info = extractor.getBufferInfo();
-                    ByteBuffer dataBuffer = ByteBuffer.allocate(info.size);
-                    dataBuffer.put(extractor.getFrameBuffer().array(), 0, info.size);
-                    bufInfo.set(info.offset, info.size, info.presentationTimeUs, info.flags);
-                    inputBuffer.add(dataBuffer);
-                    frameInfo.add(bufInfo);
+            String mime = format.getString(MediaFormat.KEY_MIME);
+            ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mime, true);
+            assertTrue("No suitable codecs found for file: " + mInputFile + " track : " +
+                    currentTrack + " mime: " + mime, (mediaCodecs.size() > 0));
+            Boolean[] encodeMode = {true, false};
+            /* Encoding the decoder's output */
+            for (Boolean asyncMode : encodeMode) {
+                for (String codecName : mediaCodecs) {
+                    FileOutputStream encodeOutputStream = null;
+                    if (WRITE_OUTPUT) {
+                        File outEncodeFile = new File(mOutputFilePath + "encoder.out");
+                        if (outEncodeFile.exists()) {
+                            assertTrue(" Unable to delete existing file" + outEncodeFile.toString(),
+                                    outEncodeFile.delete());
+                        }
+                        assertTrue("Unable to create file to write encoder output: " +
+                                outEncodeFile.toString(), outEncodeFile.createNewFile());
+                        encodeOutputStream = new FileOutputStream(outEncodeFile);
+                    }
+                    File rawFile = new File(mContext.getFilesDir() + "/decoder_" + tid + ".out");
+                    assertTrue("Cannot open file to write decoded output", rawFile.exists());
                     if (DEBUG) {
-                        Log.d(TAG, "Extracted bufInfo: flag = " + bufInfo.flags + " timestamp = " +
-                                bufInfo.presentationTimeUs + " size = " + bufInfo.size);
+                        Log.i(TAG, "Path of decoded input file: " + rawFile.toString());
                     }
-                } while (sampleSize > 0);
-
-                int tid = android.os.Process.myTid();
-                File decodedFile = new File(mContext.getFilesDir() + "/decoder_" + tid + ".out");
-                FileOutputStream decodeOutputStream = new FileOutputStream(decodedFile);
-                Decoder decoder = new Decoder();
-                decoder.setupDecoder(decodeOutputStream);
-                status = decoder.decode(inputBuffer, frameInfo, false, format, "");
-                if (status == 0) {
-                    Log.i(TAG, "Decoding complete.");
-                } else {
-                    Log.e(TAG, "Decode returned error. Encoding did not take place." + status);
-                    return;
-                }
-                decoder.deInitCodec();
-                extractor.unselectExtractorTrack(currentTrack);
-                inputBuffer.clear();
-                frameInfo.clear();
-                if (decodeOutputStream != null) {
-                    decodeOutputStream.close();
-                }
-                String mime = format.getString(MediaFormat.KEY_MIME);
-                ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mime, true);
-                if (mediaCodecs.size() <= 0) {
-                    Log.e(TAG, "No suitable codecs found for file: " + mInputFile + " track : " +
-                            currentTrack + " mime: " + mime);
-                    return;
-                }
-                Boolean[] encodeMode = {true, false};
-                /* Encoding the decoder's output */
-                for (Boolean asyncMode : encodeMode) {
-                    for (String codecName : mediaCodecs) {
-                        FileOutputStream encodeOutputStream = null;
-                        if (WRITE_OUTPUT) {
-                            File outEncodeFile = new File(mOutputFilePath + "encoder.out");
-                            if (outEncodeFile.exists()) {
-                                if (!outEncodeFile.delete()) {
-                                    Log.e(TAG, "Unable to delete existing file" +
-                                            decodedFile.toString());
-                                }
-                            }
-                            if (outEncodeFile.createNewFile()) {
-                                encodeOutputStream = new FileOutputStream(outEncodeFile);
+                    FileInputStream eleStream = new FileInputStream(rawFile);
+                    if (mime.startsWith("video/")) {
+                        width = format.getInteger(MediaFormat.KEY_WIDTH);
+                        height = format.getInteger(MediaFormat.KEY_HEIGHT);
+                        if (format.containsKey(MediaFormat.KEY_FRAME_RATE)) {
+                            frameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE);
+                        } else if (frameRate <= 0) {
+                            frameRate = ENCODE_DEFAULT_FRAME_RATE;
+                        }
+                        if (format.containsKey(MediaFormat.KEY_BIT_RATE)) {
+                            bitRate = format.getInteger(MediaFormat.KEY_BIT_RATE);
+                        } else if (bitRate <= 0) {
+                            if (mime.contains("video/3gpp") || mime.contains("video/mp4v-es")) {
+                                bitRate = ENCODE_MIN_BIT_RATE;
                             } else {
-                                Log.e(TAG, "Unable to create file to write encoder output: " +
-                                        outEncodeFile.toString());
+                                bitRate = ENCODE_DEFAULT_BIT_RATE;
                             }
                         }
-                        File rawFile =
-                                new File(mContext.getFilesDir() + "/decoder_" + tid + ".out");
-                        if (rawFile.exists()) {
-                            if (DEBUG) {
-                                Log.i(TAG, "Path of decoded input file: " + rawFile.toString());
-                            }
-                            FileInputStream eleStream = new FileInputStream(rawFile);
-                            if (mime.startsWith("video/")) {
-                                width = format.getInteger(MediaFormat.KEY_WIDTH);
-                                height = format.getInteger(MediaFormat.KEY_HEIGHT);
-                                if (format.containsKey(MediaFormat.KEY_FRAME_RATE)) {
-                                    frameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE);
-                                } else if (frameRate <= 0) {
-                                    frameRate = ENCODE_DEFAULT_FRAME_RATE;
-                                }
-                                if (format.containsKey(MediaFormat.KEY_BIT_RATE)) {
-                                    bitRate = format.getInteger(MediaFormat.KEY_BIT_RATE);
-                                } else if (bitRate <= 0) {
-                                    if (mime.contains("video/3gpp") ||
-                                            mime.contains("video/mp4v-es")) {
-                                        bitRate = ENCODE_MIN_BIT_RATE;
-                                    } else {
-                                        bitRate = ENCODE_DEFAULT_BIT_RATE;
-                                    }
-                                }
-                                if (format.containsKey(MediaFormat.KEY_PROFILE)) {
-                                    profile = format.getInteger(MediaFormat.KEY_PROFILE);
-                                }
-                                if (format.containsKey(MediaFormat.KEY_PROFILE)) {
-                                    level = format.getInteger(MediaFormat.KEY_LEVEL);
-                                }
-                            } else {
-                                sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
-                                numChannels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
-                                bitRate = sampleRate * numChannels * 16;
-                            }
-                            /*Setup Encode Format*/
-                            MediaFormat encodeFormat;
-                            if (mime.startsWith("video/")) {
-                                frameSize = width * height * 3 / 2;
-                                encodeFormat = MediaFormat.createVideoFormat(mime, width, height);
-                                encodeFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
-                                encodeFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
-                                encodeFormat.setInteger(MediaFormat.KEY_PROFILE, profile);
-                                encodeFormat.setInteger(MediaFormat.KEY_LEVEL, level);
-                                encodeFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
-                                encodeFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, frameSize);
-                            } else {
-                                encodeFormat = MediaFormat
-                                        .createAudioFormat(mime, sampleRate, numChannels);
-                                encodeFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
-                                frameSize = 4096;
-                            }
-                            Encoder encoder = new Encoder();
-                            encoder.setupEncoder(encodeOutputStream, eleStream);
-                            status = encoder.encode(codecName, encodeFormat, mime, frameRate,
-                                    sampleRate, frameSize, asyncMode);
-                            encoder.deInitEncoder();
-                            if (status == 0) {
-                                encoder.dumpStatistics(mInputFile + "with " + codecName + " for " +
-                                        "aSyncMode = " + asyncMode, extractor.getClipDuration());
-                                Log.i(TAG, "Encoding complete for file: " + mInputFile +
-                                        " with codec: " + codecName + " for aSyncMode = " +
-                                        asyncMode);
-                            } else {
-                                Log.e(TAG,
-                                        codecName + " encoder returned error " + status + " for " +
-                                                "file:" + " " + mInputFile);
-                            }
-                            encoder.resetEncoder();
-                            eleStream.close();
-                            if (encodeOutputStream != null) {
-                                encodeOutputStream.close();
-                            }
+                        if (format.containsKey(MediaFormat.KEY_PROFILE)) {
+                            profile = format.getInteger(MediaFormat.KEY_PROFILE);
                         }
-                    }
-                }
-                //Cleanup temporary input file
-                if (decodedFile.exists()) {
-                    if (decodedFile.delete()) {
-                        Log.i(TAG, "Successfully deleted decoded file");
+                        if (format.containsKey(MediaFormat.KEY_PROFILE)) {
+                            level = format.getInteger(MediaFormat.KEY_LEVEL);
+                        }
                     } else {
-                        Log.e(TAG, "Unable to delete decoded file");
+                        sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
+                        numChannels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
+                        bitRate = sampleRate * numChannels * 16;
                     }
+                    /*Setup Encode Format*/
+                    MediaFormat encodeFormat;
+                    if (mime.startsWith("video/")) {
+                        frameSize = width * height * 3 / 2;
+                        encodeFormat = MediaFormat.createVideoFormat(mime, width, height);
+                        encodeFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
+                        encodeFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
+                        encodeFormat.setInteger(MediaFormat.KEY_PROFILE, profile);
+                        encodeFormat.setInteger(MediaFormat.KEY_LEVEL, level);
+                        encodeFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
+                        encodeFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, frameSize);
+                    } else {
+                        encodeFormat = MediaFormat.createAudioFormat(mime, sampleRate, numChannels);
+                        encodeFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
+                        frameSize = 4096;
+                    }
+                    Encoder encoder = new Encoder();
+                    encoder.setupEncoder(encodeOutputStream, eleStream);
+                    status = encoder.encode(codecName, encodeFormat, mime, frameRate, sampleRate,
+                            frameSize, asyncMode);
+                    encoder.deInitEncoder();
+                    assertEquals(
+                            codecName + " encoder returned error " + status + " for " + "file:" +
+                                    " " + mInputFile, 0, status);
+                    encoder.dumpStatistics(mInputFile, codecName, (asyncMode ? "async" : "sync"),
+                            extractor.getClipDuration(), mStatsFile);
+                    Log.i(TAG, "Encoding complete for file: " + mInputFile + " with codec: " +
+                            codecName + " for aSyncMode = " + asyncMode);
+                    encoder.resetEncoder();
+                    eleStream.close();
+                    if (encodeOutputStream != null) {
+                        encodeOutputStream.close();
+                    }
+
                 }
             }
-            extractor.deinitExtractor();
-            fileInput.close();
-        } else {
-            Log.w(TAG, "Warning: Test Skipped. Cannot find " + mInputFile + " in directory " +
-                    mInputFilePath);
+            //Cleanup temporary input file
+            if (decodedFile.exists()) {
+                assertTrue(" Unable to delete decoded file" + decodedFile.toString(),
+                        decodedFile.delete());
+                Log.i(TAG, "Successfully deleted decoded file");
+            }
         }
+        extractor.deinitExtractor();
+        fileInput.close();
+    }
+
+    @Test
+    public void testNativeEncoder() throws Exception {
+        File inputFile = new File(mInputFilePath + mInputFile);
+        assertTrue("Cannot find " + mInputFile + " in directory " + mInputFilePath,
+                inputFile.exists());
+        int tid = android.os.Process.myTid();
+        final String mDecodedFile = mContext.getFilesDir() + "/decoder_" + tid + ".out";
+        FileInputStream fileInput = new FileInputStream(inputFile);
+        FileDescriptor fileDescriptor = fileInput.getFD();
+        Extractor extractor = new Extractor();
+        int trackCount = extractor.setUpExtractor(fileDescriptor);
+        assertTrue("Extraction failed. No tracks for file: ", trackCount > 0);
+        for (int currentTrack = 0; currentTrack < trackCount; currentTrack++) {
+            extractor.selectExtractorTrack(currentTrack);
+            MediaFormat format = extractor.getFormat(currentTrack);
+            String mime = format.getString(MediaFormat.KEY_MIME);
+            ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mime, true);
+            // Encoding the decoder's output
+            for (String codecName : mediaCodecs) {
+                Native nativeEncoder = new Native();
+                int status = nativeEncoder.Encode(
+                        mInputFilePath, mInputFile, mDecodedFile, mStatsFile, codecName);
+                assertEquals(
+                        codecName + " encoder returned error " + status + " for " + "file:" + " " +
+                                mInputFile, 0, status);
+            }
+        }
+        File decodedFile = new File(mDecodedFile);
+        // Cleanup temporary input file
+        if (decodedFile.exists()) {
+            assertTrue("Unable to delete - " + mDecodedFile, decodedFile.delete());
+            Log.i(TAG, "Successfully deleted - " + mDecodedFile);
+        }
+        fileInput.close();
     }
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/ExtractorTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/ExtractorTest.java
index a02011c..6b7aad1 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/ExtractorTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/ExtractorTest.java
@@ -18,12 +18,16 @@
 
 import com.android.media.benchmark.R;
 import com.android.media.benchmark.library.Extractor;
+import com.android.media.benchmark.library.Native;
+import com.android.media.benchmark.library.Stats;
 
 import android.content.Context;
+import android.media.MediaFormat;
 import android.util.Log;
 
 import androidx.test.platform.app.InstrumentationRegistry;
 
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -31,19 +35,23 @@
 import java.io.File;
 import java.io.FileDescriptor;
 import java.io.FileInputStream;
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collection;
 
-import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertEquals;
+
+import static org.junit.Assert.assertTrue;
 
 @RunWith(Parameterized.class)
 public class ExtractorTest {
     private static Context mContext =
             InstrumentationRegistry.getInstrumentation().getTargetContext();
     private static final String mInputFilePath = mContext.getString(R.string.input_file_path);
+    private static final String mStatsFile = mContext.getExternalFilesDir(null) + "/Extractor."
+            + System.currentTimeMillis() + ".csv";
     private static final String TAG = "ExtractorTest";
     private String mInputFileName;
     private int mTrackId;
@@ -71,22 +79,43 @@
         this.mTrackId = track;
     }
 
+    @BeforeClass
+    public static void writeStatsHeaderToFile() throws IOException {
+        Stats mStats = new Stats();
+        boolean status = mStats.writeStatsHeader(mStatsFile);
+        assertTrue("Unable to open stats file for writing!", status);
+        Log.d(TAG, "Saving Benchmark results in: " + mStatsFile);
+    }
+
     @Test
     public void sampleExtractTest() throws IOException {
-        int status = -1;
         File inputFile = new File(mInputFilePath + mInputFileName);
-        if (inputFile.exists()) {
-            FileInputStream fileInput = new FileInputStream(inputFile);
-            FileDescriptor fileDescriptor = fileInput.getFD();
-            Extractor extractor = new Extractor();
-            extractor.setUpExtractor(fileDescriptor);
-            status = extractor.extractSample(mTrackId);
-            extractor.deinitExtractor();
-            extractor.dumpStatistics(mInputFileName);
-            fileInput.close();
-        } else {
-            Log.e(TAG, "Cannot find " + mInputFileName + " in directory " + mInputFilePath);
-        }
-        assertThat(status, is(equalTo(0)));
+        assertTrue("Cannot find " + mInputFileName + " in directory " + mInputFilePath,
+                inputFile.exists());
+        FileInputStream fileInput = new FileInputStream(inputFile);
+        FileDescriptor fileDescriptor = fileInput.getFD();
+        Extractor extractor = new Extractor();
+        extractor.setUpExtractor(fileDescriptor);
+        MediaFormat format = extractor.getFormat(mTrackId);
+        String mime = format.getString(MediaFormat.KEY_MIME);
+        int status = extractor.extractSample(mTrackId);
+        assertEquals("Extraction failed for " + mInputFileName, 0, status);
+        Log.i(TAG, "Extracted " + mInputFileName + " successfully.");
+        extractor.deinitExtractor();
+        extractor.dumpStatistics(mInputFileName, mime, mStatsFile);
+        fileInput.close();
+    }
+
+    @Test
+    public void sampleExtractNativeTest() throws IOException {
+        Native nativeExtractor = new Native();
+        File inputFile = new File(mInputFilePath + mInputFileName);
+        assertTrue("Cannot find " + mInputFileName + " in directory " + mInputFilePath,
+                inputFile.exists());
+        FileInputStream fileInput = new FileInputStream(inputFile);
+        int status = nativeExtractor.Extract(mInputFilePath, mInputFileName, mStatsFile);
+        fileInput.close();
+        assertEquals("Extraction failed for " + mInputFileName, 0, status);
+        Log.i(TAG, "Extracted " + mInputFileName + " successfully.");
     }
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
index 8c3080c..2efdba2 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
@@ -18,6 +18,8 @@
 import com.android.media.benchmark.R;
 import com.android.media.benchmark.library.Extractor;
 import com.android.media.benchmark.library.Muxer;
+import com.android.media.benchmark.library.Native;
+import com.android.media.benchmark.library.Stats;
 
 import androidx.test.platform.app.InstrumentationRegistry;
 
@@ -27,6 +29,7 @@
 import android.media.MediaMuxer;
 import android.util.Log;
 
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -34,6 +37,7 @@
 import java.io.File;
 import java.io.FileDescriptor;
 import java.io.FileInputStream;
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
@@ -42,15 +46,19 @@
 import java.util.Hashtable;
 import java.util.Map;
 
-import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+
+import static org.junit.Assert.assertTrue;
 
 @RunWith(Parameterized.class)
 public class MuxerTest {
     private static Context mContext =
             InstrumentationRegistry.getInstrumentation().getTargetContext();
     private static final String mInputFilePath = mContext.getString(R.string.input_file_path);
+    private static final String mStatsFile =
+            mContext.getExternalFilesDir(null) + "/Muxer." + System.currentTimeMillis() + ".csv";
     private static final String TAG = "MuxerTest";
     private static final Map<String, Integer> mMapFormat = new Hashtable<String, Integer>() {
         {
@@ -93,60 +101,80 @@
         this.mFormat = outputFormat;
     }
 
+    @BeforeClass
+    public static void writeStatsHeaderToFile() throws IOException {
+        Stats mStats = new Stats();
+        boolean status = mStats.writeStatsHeader(mStatsFile);
+        assertTrue("Unable to open stats file for writing!", status);
+        Log.d(TAG, "Saving Benchmark results in: " + mStatsFile);
+    }
+
     @Test
     public void sampleMuxerTest() throws IOException {
-        int status = -1;
         File inputFile = new File(mInputFilePath + mInputFileName);
-        if (inputFile.exists()) {
-            FileInputStream fileInput = new FileInputStream(inputFile);
-            FileDescriptor fileDescriptor = fileInput.getFD();
-            ArrayList<ByteBuffer> inputBuffer = new ArrayList<>();
-            ArrayList<MediaCodec.BufferInfo> inputBufferInfo = new ArrayList<>();
-            Extractor extractor = new Extractor();
-            int trackCount = extractor.setUpExtractor(fileDescriptor);
-            for (int currentTrack = 0; currentTrack < trackCount; currentTrack++) {
-                extractor.selectExtractorTrack(currentTrack);
-                while (true) {
-                    int sampleSize = extractor.getFrameSample();
-                    MediaCodec.BufferInfo bufferInfo = extractor.getBufferInfo();
-                    MediaCodec.BufferInfo tempBufferInfo = new MediaCodec.BufferInfo();
-                    tempBufferInfo
-                            .set(bufferInfo.offset, bufferInfo.size, bufferInfo.presentationTimeUs,
-                                    bufferInfo.flags);
-                    inputBufferInfo.add(tempBufferInfo);
-                    ByteBuffer tempSampleBuffer = ByteBuffer.allocate(tempBufferInfo.size);
-                    tempSampleBuffer.put(extractor.getFrameBuffer().array(), 0, bufferInfo.size);
-                    inputBuffer.add(tempSampleBuffer);
-                    if (sampleSize < 0) {
-                        break;
-                    }
-                }
-                MediaFormat format = extractor.getFormat(currentTrack);
-                int outputFormat = mMapFormat.getOrDefault(mFormat, -1);
-                if (outputFormat != -1) {
-                    Muxer muxer = new Muxer();
-                    int trackIndex = muxer.setUpMuxer(mContext, outputFormat, format);
-                    status = muxer.mux(trackIndex, inputBuffer, inputBufferInfo);
-                    if (status != 0) {
-                        Log.e(TAG, "Cannot perform write operation for " + mInputFileName);
-                    }
-                    muxer.deInitMuxer();
-                    muxer.dumpStatistics(mInputFileName, extractor.getClipDuration());
-                    muxer.resetMuxer();
-                    extractor.unselectExtractorTrack(currentTrack);
-                    inputBufferInfo.clear();
-                    inputBuffer.clear();
-                } else {
-                    Log.e(TAG, "Test failed for " + mInputFileName + ". Returned invalid " +
-                            "output format for given " + mFormat + " format.");
+        assertTrue("Cannot find " + mInputFileName + " in directory " + mInputFilePath,
+                inputFile.exists());
+        FileInputStream fileInput = new FileInputStream(inputFile);
+        FileDescriptor fileDescriptor = fileInput.getFD();
+        ArrayList<ByteBuffer> inputBuffer = new ArrayList<>();
+        ArrayList<MediaCodec.BufferInfo> inputBufferInfo = new ArrayList<>();
+        Extractor extractor = new Extractor();
+        int trackCount = extractor.setUpExtractor(fileDescriptor);
+        for (int currentTrack = 0; currentTrack < trackCount; currentTrack++) {
+            extractor.selectExtractorTrack(currentTrack);
+            while (true) {
+                int sampleSize = extractor.getFrameSample();
+                MediaCodec.BufferInfo bufferInfo = extractor.getBufferInfo();
+                MediaCodec.BufferInfo tempBufferInfo = new MediaCodec.BufferInfo();
+                tempBufferInfo
+                        .set(bufferInfo.offset, bufferInfo.size, bufferInfo.presentationTimeUs,
+                                bufferInfo.flags);
+                inputBufferInfo.add(tempBufferInfo);
+                ByteBuffer tempSampleBuffer = ByteBuffer.allocate(tempBufferInfo.size);
+                tempSampleBuffer.put(extractor.getFrameBuffer().array(), 0, bufferInfo.size);
+                inputBuffer.add(tempSampleBuffer);
+                if (sampleSize < 0) {
+                    break;
                 }
             }
-            extractor.deinitExtractor();
-            fileInput.close();
-        } else {
-            Log.w(TAG, "Warning: Test Skipped. Cannot find " + mInputFileName + " in directory " +
-                    mInputFilePath);
+            MediaFormat format = extractor.getFormat(currentTrack);
+            int outputFormat = mMapFormat.getOrDefault(mFormat, -1);
+            assertNotEquals("Test failed for " + mInputFileName + ". Returned invalid " +
+                    "output format for given " + mFormat + " format.", -1, outputFormat);
+            Muxer muxer = new Muxer();
+            int trackIndex = muxer.setUpMuxer(mContext, outputFormat, format);
+            int status = muxer.mux(trackIndex, inputBuffer, inputBufferInfo);
+            assertEquals("Cannot perform write operation for " + mInputFileName, 0, status);
+            Log.i(TAG, "Muxed " + mInputFileName + " successfully.");
+            muxer.deInitMuxer();
+            muxer.dumpStatistics(mInputFileName, mFormat, extractor.getClipDuration(), mStatsFile);
+            muxer.resetMuxer();
+            extractor.unselectExtractorTrack(currentTrack);
+            inputBufferInfo.clear();
+            inputBuffer.clear();
+
         }
-        assertThat(status, is(equalTo(0)));
+        extractor.deinitExtractor();
+        fileInput.close();
+    }
+
+    @Test
+    public void sampleMuxerNativeTest() {
+        Native nativeMuxer = new Native();
+        File inputFile = new File(mInputFilePath + mInputFileName);
+        assertTrue("Cannot find " + mInputFileName + " in directory " + mInputFilePath,
+                inputFile.exists());
+        int tid = android.os.Process.myTid();
+        String mMuxOutputFile = (mContext.getFilesDir() + "/mux_" + tid + ".out");
+        int status = nativeMuxer.Mux(
+                mInputFilePath, mInputFileName, mMuxOutputFile, mStatsFile, mFormat);
+        assertEquals("Cannot perform write operation for " + mInputFileName, 0, status);
+        Log.i(TAG, "Muxed " + mInputFileName + " successfully.");
+        File muxedFile = new File(mMuxOutputFile);
+        // Cleanup temporary output file
+        if (muxedFile.exists()) {
+            assertTrue("Unable to delete" + mMuxOutputFile + " file.",
+                    muxedFile.delete());
+        }
     }
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
new file mode 100644
index 0000000..c72eb55
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
@@ -0,0 +1,32 @@
+cc_test_library {
+    name: "libmediabenchmark_jni",
+
+    defaults: [
+        "libmediabenchmark_common-defaults",
+        "libmediabenchmark_soft_sanitize_all-defaults",
+    ],
+
+    srcs: [
+        "NativeExtractor.cpp",
+        "NativeMuxer.cpp",
+        "NativeEncoder.cpp",
+        "NativeDecoder.cpp",
+    ],
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    static_libs: [
+        "libmediabenchmark_common",
+        "libmediabenchmark_extractor",
+        "libmediabenchmark_muxer",
+        "libmediabenchmark_decoder",
+        "libmediabenchmark_encoder",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/CMakeLists.txt b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/CMakeLists.txt
new file mode 100644
index 0000000..5823883
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/CMakeLists.txt
@@ -0,0 +1,44 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+
+cmake_minimum_required(VERSION 3.4.1)
+
+set(native_source_path "../../../../src/native")
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Werror")
+
+add_library(
+  mediabenchmark_jni SHARED
+  NativeExtractor.cpp
+  NativeMuxer.cpp
+  NativeDecoder.cpp
+  NativeEncoder.cpp
+  ${native_source_path}/common/BenchmarkCommon.cpp
+  ${native_source_path}/common/Stats.cpp
+  ${native_source_path}/common/utils/Timers.cpp
+  ${native_source_path}/extractor/Extractor.cpp
+  ${native_source_path}/muxer/Muxer.cpp
+  ${native_source_path}/decoder/Decoder.cpp
+  ${native_source_path}/encoder/Encoder.cpp)
+
+include_directories(${native_source_path}/common)
+include_directories(${native_source_path}/extractor)
+include_directories(${native_source_path}/muxer)
+include_directories(${native_source_path}/decoder)
+include_directories(${native_source_path}/encoder)
+
+find_library(log-lib log)
+
+target_link_libraries(mediabenchmark_jni mediandk ${log-lib})
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeDecoder.cpp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeDecoder.cpp
new file mode 100644
index 0000000..043bc9e
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeDecoder.cpp
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NativeDecoder"
+
+#include <jni.h>
+#include <fstream>
+#include <stdio.h>
+#include <string.h>
+#include <sys/stat.h>
+
+#include <android/log.h>
+
+#include "Decoder.h"
+
+extern "C" JNIEXPORT int JNICALL Java_com_android_media_benchmark_library_Native_Decode(
+        JNIEnv *env, jobject thiz, jstring jFilePath, jstring jFileName, jstring jStatsFile,
+        jstring jCodecName, jboolean asyncMode) {
+    const char *filePath = env->GetStringUTFChars(jFilePath, nullptr);
+    const char *fileName = env->GetStringUTFChars(jFileName, nullptr);
+    string sFilePath = string(filePath) + string(fileName);
+    UNUSED(thiz);
+    FILE *inputFp = fopen(sFilePath.c_str(), "rb");
+    env->ReleaseStringUTFChars(jFileName, fileName);
+    env->ReleaseStringUTFChars(jFilePath, filePath);
+    if (!inputFp) {
+        ALOGE("Unable to open input file for reading");
+        return -1;
+    }
+
+    Decoder *decoder = new Decoder();
+    Extractor *extractor = decoder->getExtractor();
+    if (!extractor) {
+        ALOGE("Extractor creation failed");
+        return -1;
+    }
+
+    // Read file properties
+    struct stat buf;
+    stat(sFilePath.c_str(), &buf);
+    size_t fileSize = buf.st_size;
+    if (fileSize > kMaxBufferSize) {
+        ALOGE("File size greater than maximum buffer size");
+        return -1;
+    }
+    int32_t fd = fileno(inputFp);
+    int32_t trackCount = extractor->initExtractor(fd, fileSize);
+    if (trackCount <= 0) {
+        ALOGE("initExtractor failed");
+        return -1;
+    }
+    for (int curTrack = 0; curTrack < trackCount; curTrack++) {
+        int32_t status = extractor->setupTrackFormat(curTrack);
+        if (status != 0) {
+            ALOGE("Track Format invalid");
+            return -1;
+        }
+
+        uint8_t *inputBuffer = (uint8_t *) malloc(fileSize);
+        if (!inputBuffer) {
+            ALOGE("Insufficient memory");
+            return -1;
+        }
+
+        vector<AMediaCodecBufferInfo> frameInfo;
+        AMediaCodecBufferInfo info;
+        uint32_t inputBufferOffset = 0;
+
+        // Get frame data
+        while (1) {
+            status = extractor->getFrameSample(info);
+            if (status || !info.size) break;
+            // copy the meta data and buffer to be passed to decoder
+            if (inputBufferOffset + info.size > kMaxBufferSize) {
+                ALOGE("Memory allocated not sufficient");
+                free(inputBuffer);
+                return -1;
+            }
+            memcpy(inputBuffer + inputBufferOffset, extractor->getFrameBuf(), info.size);
+            frameInfo.push_back(info);
+            inputBufferOffset += info.size;
+        }
+
+        const char *codecName = env->GetStringUTFChars(jCodecName, nullptr);
+        string sCodecName = string(codecName);
+        decoder->setupDecoder();
+        status = decoder->decode(inputBuffer, frameInfo, sCodecName, asyncMode);
+        if (status != AMEDIA_OK) {
+            ALOGE("Decode returned error");
+            free(inputBuffer);
+            env->ReleaseStringUTFChars(jCodecName, codecName);
+            return -1;
+        }
+        decoder->deInitCodec();
+        const char *inputReference = env->GetStringUTFChars(jFileName, nullptr);
+        const char *statsFile = env->GetStringUTFChars(jStatsFile, nullptr);
+        string sInputReference = string(inputReference);
+        decoder->dumpStatistics(sInputReference, sCodecName, (asyncMode ? "async" : "sync"),
+                                statsFile);
+        env->ReleaseStringUTFChars(jCodecName, codecName);
+        env->ReleaseStringUTFChars(jStatsFile, statsFile);
+        env->ReleaseStringUTFChars(jFileName, inputReference);
+        if (inputBuffer) {
+            free(inputBuffer);
+            inputBuffer = nullptr;
+        }
+        decoder->resetDecoder();
+    }
+    if (inputFp) {
+        fclose(inputFp);
+        inputFp = nullptr;
+    }
+    extractor->deInitExtractor();
+    delete decoder;
+    return 0;
+}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeEncoder.cpp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeEncoder.cpp
new file mode 100644
index 0000000..271b852
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeEncoder.cpp
@@ -0,0 +1,202 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NativeEncoder"
+
+#include <jni.h>
+#include <fstream>
+#include <iostream>
+#include <sys/stat.h>
+
+#include <android/log.h>
+
+#include "Decoder.h"
+#include "Encoder.h"
+
+#include <stdio.h>
+
+extern "C" JNIEXPORT int JNICALL Java_com_android_media_benchmark_library_Native_Encode(
+        JNIEnv *env, jobject thiz, jstring jFilePath, jstring jFileName, jstring jOutFilePath,
+        jstring jStatsFile, jstring jCodecName) {
+    const char *filePath = env->GetStringUTFChars(jFilePath, nullptr);
+    const char *fileName = env->GetStringUTFChars(jFileName, nullptr);
+    string sFilePath = string(filePath) + string(fileName);
+    UNUSED(thiz);
+    FILE *inputFp = fopen(sFilePath.c_str(), "rb");
+    env->ReleaseStringUTFChars(jFileName, fileName);
+    env->ReleaseStringUTFChars(jFilePath, filePath);
+    if (!inputFp) {
+        ALOGE("Unable to open input file for reading");
+        return -1;
+    }
+
+    Decoder *decoder = new Decoder();
+    Extractor *extractor = decoder->getExtractor();
+    if (!extractor) {
+        ALOGE("Extractor creation failed");
+        return -1;
+    }
+
+    // Read file properties
+    struct stat buf;
+    stat(sFilePath.c_str(), &buf);
+    size_t fileSize = buf.st_size;
+    if (fileSize > kMaxBufferSize) {
+        ALOGE("File size greater than maximum buffer size");
+        return -1;
+    }
+    int32_t fd = fileno(inputFp);
+    int32_t trackCount = extractor->initExtractor(fd, fileSize);
+    if (trackCount <= 0) {
+        ALOGE("initExtractor failed");
+        return -1;
+    }
+
+    for (int curTrack = 0; curTrack < trackCount; curTrack++) {
+        int32_t status = extractor->setupTrackFormat(curTrack);
+        if (status != 0) {
+            ALOGE("Track Format invalid");
+            return -1;
+        }
+        uint8_t *inputBuffer = (uint8_t *) malloc(fileSize);
+        if (!inputBuffer) {
+            ALOGE("Insufficient memory");
+            return -1;
+        }
+        vector<AMediaCodecBufferInfo> frameInfo;
+        AMediaCodecBufferInfo info;
+        uint32_t inputBufferOffset = 0;
+
+        // Get frame data
+        while (1) {
+            status = extractor->getFrameSample(info);
+            if (status || !info.size) break;
+            // copy the meta data and buffer to be passed to decoder
+            if (inputBufferOffset + info.size > kMaxBufferSize) {
+                ALOGE("Memory allocated not sufficient");
+                free(inputBuffer);
+                return -1;
+            }
+            memcpy(inputBuffer + inputBufferOffset, extractor->getFrameBuf(), info.size);
+            frameInfo.push_back(info);
+            inputBufferOffset += info.size;
+        }
+        string decName = "";
+        const char *outputFilePath = env->GetStringUTFChars(jOutFilePath, nullptr);
+        FILE *outFp = fopen(outputFilePath, "wb");
+        if (outFp == nullptr) {
+            ALOGE("%s - File failed to open for writing!", outputFilePath);
+            free(inputBuffer);
+            return -1;
+        }
+        decoder->setupDecoder();
+        status = decoder->decode(inputBuffer, frameInfo, decName, false /*asyncMode */, outFp);
+        if (status != AMEDIA_OK) {
+            ALOGE("Decode returned error");
+            free(inputBuffer);
+            return -1;
+        }
+        AMediaFormat *format = extractor->getFormat();
+        if (inputBuffer) {
+            free(inputBuffer);
+            inputBuffer = nullptr;
+        }
+        const char *mime = nullptr;
+        AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
+        if (!mime) {
+            ALOGE("Error in AMediaFormat_getString");
+            return -1;
+        }
+        ifstream eleStream;
+        eleStream.open(outputFilePath, ifstream::binary | ifstream::ate);
+        if (!eleStream.is_open()) {
+            ALOGE("%s - File failed to open for reading!", outputFilePath);
+            env->ReleaseStringUTFChars(jOutFilePath, outputFilePath);
+            return -1;
+        }
+        const char *codecName = env->GetStringUTFChars(jCodecName, NULL);
+        const char *inputReference = env->GetStringUTFChars(jFileName, nullptr);
+        string sCodecName = string(codecName);
+        string sInputReference = string(inputReference);
+
+        bool asyncMode[2] = {true, false};
+        for (int i = 0; i < 2; i++) {
+            size_t eleSize = eleStream.tellg();
+            eleStream.seekg(0, ifstream::beg);
+
+            // Get encoder params
+            encParameter encParams;
+            if (!strncmp(mime, "video/", 6)) {
+                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_WIDTH, &encParams.width);
+                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_HEIGHT, &encParams.height);
+                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_FRAME_RATE, &encParams.frameRate);
+                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, &encParams.bitrate);
+                if (encParams.bitrate <= 0 || encParams.frameRate <= 0) {
+                    encParams.frameRate = 25;
+                    if (!strcmp(mime, "video/3gpp") || !strcmp(mime, "video/mp4v-es")) {
+                        encParams.bitrate = 600000 /* 600 Kbps */;
+                    } else {
+                        encParams.bitrate = 8000000 /* 8 Mbps */;
+                    }
+                }
+                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_PROFILE, &encParams.profile);
+                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_LEVEL, &encParams.level);
+            } else {
+                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, &encParams.sampleRate);
+                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+                                      &encParams.numChannels);
+                encParams.bitrate =
+                        encParams.sampleRate * encParams.numChannels * 16 /* bitsPerSample */;
+            }
+            Encoder *encoder = new Encoder();
+            encoder->setupEncoder();
+            status = encoder->encode(sCodecName, eleStream, eleSize, asyncMode[i], encParams,
+                                     (char *) mime);
+            encoder->deInitCodec();
+            cout << "codec : " << codecName << endl;
+            ALOGV(" asyncMode = %d \n", asyncMode[i]);
+            const char *statsFile = env->GetStringUTFChars(jStatsFile, nullptr);
+            encoder->dumpStatistics(sInputReference, extractor->getClipDuration(), sCodecName,
+                                    (asyncMode[i] ? "async" : "sync"), statsFile);
+            env->ReleaseStringUTFChars(jStatsFile, statsFile);
+            encoder->resetEncoder();
+            delete encoder;
+            encoder = nullptr;
+        }
+        eleStream.close();
+        if (outFp) {
+            fclose(outFp);
+            outFp = nullptr;
+        }
+        env->ReleaseStringUTFChars(jFileName, inputReference);
+        env->ReleaseStringUTFChars(jCodecName, codecName);
+        env->ReleaseStringUTFChars(jOutFilePath, outputFilePath);
+        if (format) {
+            AMediaFormat_delete(format);
+            format = nullptr;
+        }
+        decoder->deInitCodec();
+        decoder->resetDecoder();
+    }
+    if (inputFp) {
+        fclose(inputFp);
+        inputFp = nullptr;
+    }
+    extractor->deInitExtractor();
+    delete decoder;
+    return 0;
+}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeExtractor.cpp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeExtractor.cpp
new file mode 100644
index 0000000..a762760
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeExtractor.cpp
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NativeExtractor"
+
+#include <jni.h>
+#include <fstream>
+#include <string>
+#include <sys/stat.h>
+
+#include "Extractor.h"
+
+extern "C" JNIEXPORT int32_t JNICALL Java_com_android_media_benchmark_library_Native_Extract(
+        JNIEnv *env, jobject thiz, jstring jInputFilePath, jstring jInputFileName,
+        jstring jStatsFile) {
+    UNUSED(thiz);
+    const char *inputFilePath = env->GetStringUTFChars(jInputFilePath, nullptr);
+    const char *inputFileName = env->GetStringUTFChars(jInputFileName, nullptr);
+    string sFilePath = string(inputFilePath) + string(inputFileName);
+    FILE *inputFp = fopen(sFilePath.c_str(), "rb");
+
+    // Read file properties
+    struct stat buf;
+    stat(sFilePath.c_str(), &buf);
+    size_t fileSize = buf.st_size;
+    int32_t fd = fileno(inputFp);
+
+    Extractor *extractObj = new Extractor();
+    int32_t trackCount = extractObj->initExtractor((long) fd, fileSize);
+    if (trackCount <= 0) {
+        ALOGE("initExtractor failed");
+        return -1;
+    }
+
+    int32_t trackID = 0;
+    const char *mime = nullptr;
+    int32_t status = extractObj->extract(trackID);
+    if (status != AMEDIA_OK) {
+        ALOGE("Extraction failed");
+        return -1;
+    }
+
+    if (inputFp) {
+        fclose(inputFp);
+        inputFp = nullptr;
+    }
+    status = extractObj->setupTrackFormat(trackID);
+    AMediaFormat *format = extractObj->getFormat();
+    if (!format) {
+        ALOGE("format is null!");
+        return -1;
+    }
+    AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
+    if (!mime) {
+        ALOGE("mime is null!");
+        return -1;
+    }
+    extractObj->deInitExtractor();
+    const char *statsFile = env->GetStringUTFChars(jStatsFile, nullptr);
+    extractObj->dumpStatistics(string(inputFileName), string(mime), statsFile);
+    env->ReleaseStringUTFChars(jStatsFile, statsFile);
+    env->ReleaseStringUTFChars(jInputFilePath, inputFilePath);
+    env->ReleaseStringUTFChars(jInputFileName, inputFileName);
+
+    delete extractObj;
+    return status;
+}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeMuxer.cpp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeMuxer.cpp
new file mode 100644
index 0000000..a5ef5b8
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeMuxer.cpp
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NativeMuxer"
+
+#include <jni.h>
+#include <fstream>
+#include <string>
+#include <sys/stat.h>
+
+#include "Muxer.h"
+
+MUXER_OUTPUT_T getMuxerOutFormat(const char *fmt);
+
+extern "C" JNIEXPORT int32_t JNICALL Java_com_android_media_benchmark_library_Native_Mux(
+        JNIEnv *env, jobject thiz, jstring jInputFilePath, jstring jInputFileName,
+        jstring jOutputFilePath, jstring jStatsFile, jstring jFormat) {
+    UNUSED(thiz);
+    ALOGV("Mux the samples given by extractor");
+    const char *inputFilePath = env->GetStringUTFChars(jInputFilePath, nullptr);
+    const char *inputFileName = env->GetStringUTFChars(jInputFileName, nullptr);
+    string sInputFile = string(inputFilePath) + string(inputFileName);
+    FILE *inputFp = fopen(sInputFile.c_str(), "rb");
+    if (!inputFp) {
+        ALOGE("Unable to open input file for reading");
+        return -1;
+    }
+
+    const char *fmt = env->GetStringUTFChars(jFormat, nullptr);
+    MUXER_OUTPUT_T outputFormat = getMuxerOutFormat(fmt);
+    if (outputFormat == MUXER_OUTPUT_FORMAT_INVALID) {
+        ALOGE("output format is MUXER_OUTPUT_FORMAT_INVALID");
+        return MUXER_OUTPUT_FORMAT_INVALID;
+    }
+
+    Muxer *muxerObj = new Muxer();
+    Extractor *extractor = muxerObj->getExtractor();
+    if (!extractor) {
+        ALOGE("Extractor creation failed");
+        return -1;
+    }
+
+    // Read file properties
+    struct stat buf;
+    stat(sInputFile.c_str(), &buf);
+    size_t fileSize = buf.st_size;
+    int32_t fd = fileno(inputFp);
+
+    int32_t trackCount = extractor->initExtractor(fd, fileSize);
+    if (trackCount <= 0) {
+        ALOGE("initExtractor failed");
+        return -1;
+    }
+
+    for (int curTrack = 0; curTrack < trackCount; curTrack++) {
+        int32_t status = extractor->setupTrackFormat(curTrack);
+        if (status != 0) {
+            ALOGE("Track Format invalid");
+            return -1;
+        }
+
+        uint8_t *inputBuffer = (uint8_t *) malloc(fileSize);
+        if (!inputBuffer) {
+            ALOGE("Allocation Failed");
+            return -1;
+        }
+        vector<AMediaCodecBufferInfo> frameInfos;
+        AMediaCodecBufferInfo info;
+        uint32_t inputBufferOffset = 0;
+
+        // Get Frame Data
+        while (1) {
+            status = extractor->getFrameSample(info);
+            if (status || !info.size) break;
+            // copy the meta data and buffer to be passed to muxer
+            if (inputBufferOffset + info.size > fileSize) {
+                ALOGE("Memory allocated not sufficient");
+                if (inputBuffer) {
+                    free(inputBuffer);
+                    inputBuffer = nullptr;
+                }
+                return -1;
+            }
+            memcpy(inputBuffer + inputBufferOffset, extractor->getFrameBuf(),
+                   static_cast<size_t>(info.size));
+            info.offset = inputBufferOffset;
+            frameInfos.push_back(info);
+            inputBufferOffset += info.size;
+        }
+
+        const char *outputFilePath = env->GetStringUTFChars(jOutputFilePath, nullptr);
+        FILE *outputFp = fopen(((string) outputFilePath).c_str(), "w+b");
+        env->ReleaseStringUTFChars(jOutputFilePath, outputFilePath);
+
+        if (!outputFp) {
+            ALOGE("Unable to open output file for writing");
+            if (inputBuffer) {
+                free(inputBuffer);
+                inputBuffer = nullptr;
+            }
+            return -1;
+        }
+        int32_t outFd = fileno(outputFp);
+
+        status = muxerObj->initMuxer(outFd, (MUXER_OUTPUT_T) outputFormat);
+        if (status != 0) {
+            ALOGE("initMuxer failed");
+            if (inputBuffer) {
+                free(inputBuffer);
+                inputBuffer = nullptr;
+            }
+            return -1;
+        }
+
+        status = muxerObj->mux(inputBuffer, frameInfos);
+        if (status != 0) {
+            ALOGE("Mux failed");
+            if (inputBuffer) {
+                free(inputBuffer);
+                inputBuffer = nullptr;
+            }
+            return -1;
+        }
+        muxerObj->deInitMuxer();
+        const char *statsFile = env->GetStringUTFChars(jStatsFile, nullptr);
+        string muxFormat(fmt);
+        muxerObj->dumpStatistics(string(inputFileName), muxFormat, statsFile);
+        env->ReleaseStringUTFChars(jStatsFile, statsFile);
+        env->ReleaseStringUTFChars(jInputFilePath, inputFilePath);
+        env->ReleaseStringUTFChars(jInputFileName, inputFileName);
+
+        if (inputBuffer) {
+            free(inputBuffer);
+            inputBuffer = nullptr;
+        }
+        if (outputFp) {
+            fclose(outputFp);
+            outputFp = nullptr;
+        }
+        muxerObj->resetMuxer();
+    }
+    if (inputFp) {
+        fclose(inputFp);
+        inputFp = nullptr;
+    }
+    env->ReleaseStringUTFChars(jFormat, fmt);
+    extractor->deInitExtractor();
+    delete muxerObj;
+
+    return 0;
+}
+
+MUXER_OUTPUT_T getMuxerOutFormat(const char *fmt) {
+    static const struct {
+        const char *name;
+        int value;
+    } kFormatMaps[] = {{"mp4",  MUXER_OUTPUT_FORMAT_MPEG_4},
+                       {"webm", MUXER_OUTPUT_FORMAT_WEBM},
+                       {"3gpp", MUXER_OUTPUT_FORMAT_3GPP},
+                       {"ogg",  MUXER_OUTPUT_FORMAT_OGG}};
+
+    int32_t muxOutputFormat = MUXER_OUTPUT_FORMAT_INVALID;
+    for (auto kFormatMap : kFormatMaps) {
+        if (!strcmp(fmt, kFormatMap.name)) {
+            muxOutputFormat = kFormatMap.value;
+            break;
+        }
+    }
+    return (MUXER_OUTPUT_T) muxOutputFormat;
+}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
index 2cd27c2..3b1eed4 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
@@ -239,11 +239,16 @@
      * Prints out the statistics in the information log
      *
      * @param inputReference The operation being performed, in this case decode
+     * @param componentName  Name of the component/codec
+     * @param mode           The operating mode: Sync/Async
      * @param durationUs     Duration of the clip in microseconds
+     * @param statsFile      The output file where the stats data is written
      */
-    public void dumpStatistics(String inputReference, long durationUs) {
+    public void dumpStatistics(String inputReference, String componentName, String mode,
+            long durationUs, String statsFile) throws IOException {
         String operation = "decode";
-        mStats.dumpStatistics(operation, inputReference, durationUs);
+        mStats.dumpStatistics(
+                inputReference, operation, componentName, mode, durationUs, statsFile);
     }
 
     /**
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
index 03db294..40cf8bd 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
@@ -326,12 +326,17 @@
     /**
      * Prints out the statistics in the information log
      *
-     * @param inputReference The operation being performed, in this case encode
+     * @param inputReference The operation being performed, in this case decode
+     * @param componentName  Name of the component/codec
+     * @param mode           The operating mode: Sync/Async
      * @param durationUs     Duration of the clip in microseconds
+     * @param statsFile      The output file where the stats data is written
      */
-    public void dumpStatistics(String inputReference, long durationUs) {
+    public void dumpStatistics(String inputReference, String componentName, String mode,
+                               long durationUs, String statsFile) throws IOException {
         String operation = "encode";
-        mStats.dumpStatistics(operation, inputReference, durationUs);
+        mStats.dumpStatistics(
+                inputReference, operation, componentName, mode, durationUs, statsFile);
     }
 
     /**
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Extractor.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Extractor.java
index 459e2a9..f3024e7 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Extractor.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Extractor.java
@@ -167,9 +167,12 @@
      * Write the benchmark logs for the given input file
      *
      * @param inputReference Name of the input file
+     * @param mimeType       Mime type of the muxed file
+     * @param statsFile      The output file where the stats data is written
      */
-    public void dumpStatistics(String inputReference) {
+    public void dumpStatistics(String inputReference, String mimeType, String statsFile)
+            throws IOException {
         String operation = "extract";
-        mStats.dumpStatistics(operation, inputReference, mDurationUs);
+        mStats.dumpStatistics(inputReference, operation, mimeType, "", mDurationUs, statsFile);
     }
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Muxer.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Muxer.java
index 49eaa1c..340b539 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Muxer.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Muxer.java
@@ -101,10 +101,13 @@
      * Write the benchmark logs for the given input file
      *
      * @param inputReference Name of the input file
+     * @param muxFormat      Format of the muxed output
      * @param clipDuration   Duration of the given inputReference file
+     * @param statsFile      The output file where the stats data is written
      */
-    public void dumpStatistics(String inputReference, long clipDuration) {
+    public void dumpStatistics(String inputReference, String muxFormat, long clipDuration,
+                               String statsFile) throws IOException {
         String operation = "mux";
-        mStats.dumpStatistics(operation, inputReference, clipDuration);
+        mStats.dumpStatistics(inputReference, operation, muxFormat, "", clipDuration, statsFile);
     }
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Native.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Native.java
new file mode 100644
index 0000000..38b608a
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Native.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.benchmark.library;
+
+public class Native {
+    static { System.loadLibrary("mediabenchmark_jni"); }
+
+    public native int Extract(String inputFilePath, String inputFileName, String statsFile);
+
+    public native int Mux(String inputFilePath, String inputFileName, String outputFilePath,
+            String statsFile, String format);
+
+    public native int Decode(String inputFilePath, String inputFileName, String statsFile,
+            String codecName, boolean asyncMode);
+
+    public native int Encode(String inputFilePath, String inputFileName, String outputFilePath,
+            String statsFile, String codecName);
+}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java
index 18ab5be..7245a3a 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java
@@ -18,6 +18,10 @@
 
 import android.util.Log;
 
+import java.io.File;
+import java.io.FileDescriptor;
+import java.io.FileOutputStream;
+import java.io.IOException;
 import java.util.ArrayList;
 
 /**
@@ -91,14 +95,38 @@
     }
 
     /**
+     * Writes the stats header to a file
+     * <p>
+     * \param statsFile    file where the stats data is to be written
+     **/
+    public boolean writeStatsHeader(String statsFile) throws IOException {
+        File outputFile = new File(statsFile);
+        FileOutputStream out = new FileOutputStream(outputFile, true);
+        if (!outputFile.exists())
+            return false;
+        String statsHeader =
+                "currentTime, fileName, operation, componentName, NDK/SDK, sync/async, setupTime, "
+                        + "destroyTime, minimumTime, maximumTime, "
+                        + "averageTime, timeToProcess1SecContent, totalBytesProcessedPerSec, "
+                        + "timeToFirstFrame, totalSizeInBytes, totalTime\n";
+        out.write(statsHeader.getBytes());
+        out.close();
+        return true;
+    }
+
+    /**
      * Dumps the stats of the operation for a given input media.
      * <p>
+     * \param inputReference input media
      * \param operation      describes the operation performed on the input media
      * (i.e. extract/mux/decode/encode)
-     * \param inputReference input media
-     * \param durationUs    is a duration of the input media in microseconds.
+     * \param componentName  name of the codec/muxFormat/mime
+     * \param mode           the operating mode: sync/async.
+     * \param durationUs     is a duration of the input media in microseconds.
+     * \param statsFile      the file where the stats data is to be written.
      */
-    public void dumpStatistics(String operation, String inputReference, long durationUs) {
+    public void dumpStatistics(String inputReference, String operation, String componentName,
+            String mode, long durationUs, String statsFile) throws IOException {
         if (mOutputTimer.size() == 0) {
             Log.e(TAG, "No output produced");
             return;
@@ -121,18 +149,30 @@
                 maxTimeTakenNs = intervalNs;
             }
         }
-        // Print the Stats
-        Log.i(TAG, "Input Reference : " + inputReference);
-        Log.i(TAG, "Setup Time in nano sec : " + mInitTimeNs);
-        Log.i(TAG, "Average Time in nano sec : " + totalTimeTakenNs / mOutputTimer.size());
-        Log.i(TAG, "Time to first frame in nano sec : " + timeToFirstFrameNs);
-        Log.i(TAG, "Time taken (in nano sec) to " + operation + " 1 sec of content : " +
-                timeTakenPerSec);
-        Log.i(TAG, "Total bytes " + operation + "ed : " + size);
-        Log.i(TAG, "Number of bytes " + operation + "ed per second : " +
-                (size * 1000000000) / totalTimeTakenNs);
-        Log.i(TAG, "Minimum Time in nano sec : " + minTimeTakenNs);
-        Log.i(TAG, "Maximum Time in nano sec : " + maxTimeTakenNs);
-        Log.i(TAG, "Destroy Time in nano sec : " + mDeInitTimeNs);
+
+        // Write the stats row data to file
+        String rowData = "";
+        rowData += System.nanoTime() + ", ";
+        rowData += inputReference + ", ";
+        rowData += operation + ", ";
+        rowData += componentName + ", ";
+        rowData += "SDK, ";
+        rowData += mode + ", ";
+        rowData += mInitTimeNs + ", ";
+        rowData += mDeInitTimeNs + ", ";
+        rowData += minTimeTakenNs + ", ";
+        rowData += maxTimeTakenNs + ", ";
+        rowData += totalTimeTakenNs / mOutputTimer.size() + ", ";
+        rowData += timeTakenPerSec + ", ";
+        rowData += (size * 1000000000) / totalTimeTakenNs + ", ";
+        rowData += timeToFirstFrameNs + ", ";
+        rowData += size + ", ";
+        rowData += totalTimeTakenNs + "\n";
+
+        File outputFile = new File(statsFile);
+        FileOutputStream out = new FileOutputStream(outputFile, true);
+        assert outputFile.exists() : "Failed to open the stats file for writing!";
+        out.write(rowData.getBytes());
+        out.close();
     }
-}
\ No newline at end of file
+}
diff --git a/media/tests/benchmark/README.md b/media/tests/benchmark/README.md
index 6627462..05fbe6f 100644
--- a/media/tests/benchmark/README.md
+++ b/media/tests/benchmark/README.md
@@ -145,3 +145,12 @@
 ```
 adb shell /data/local/tmp/C2DecoderTest -P /data/local/tmp/MediaBenchmark/res/
 ```
+## C2 Encoder
+
+The test encodes input stream and benchmarks the codec2 encoders available in device.
+
+Setup steps are same as [extractor](#extractor).
+
+```
+adb shell /data/local/tmp/C2EncoderTest -P /data/local/tmp/MediaBenchmark/res/
+```
diff --git a/media/tests/benchmark/src/native/common/Android.bp b/media/tests/benchmark/src/native/common/Android.bp
index babc329..f8ea25c 100644
--- a/media/tests/benchmark/src/native/common/Android.bp
+++ b/media/tests/benchmark/src/native/common/Android.bp
@@ -29,7 +29,7 @@
 
     export_include_dirs: ["."],
 
-    ldflags: ["-Wl,-Bsymbolic"]
+    ldflags: ["-Wl,-Bsymbolic"],
 }
 
 cc_defaults {
@@ -55,7 +55,7 @@
     cflags: [
         "-Wall",
         "-Werror",
-    ]
+    ],
 }
 
 cc_library_static {
@@ -66,18 +66,20 @@
 
     srcs: [
         "BenchmarkC2Common.cpp",
+        "BenchmarkCommon.cpp",
+        "Stats.cpp",
+        "utils/Timers.cpp",
     ],
 
     export_include_dirs: ["."],
 
-    ldflags: ["-Wl,-Bsymbolic"]
+    ldflags: ["-Wl,-Bsymbolic"],
 }
 
 cc_defaults {
     name: "libmediabenchmark_codec2_common-defaults",
 
     defaults: [
-        "libmediabenchmark_common-defaults",
         "libcodec2-hidl-client-defaults",
         "libmediabenchmark_soft_sanitize_all-defaults",
     ],
@@ -88,7 +90,14 @@
 
     shared_libs: [
         "libcodec2_client",
-    ]
+        "libmediandk",
+        "liblog",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
 }
 
 // public dependency for native implementation
@@ -102,5 +111,5 @@
             "signed-integer-overflow",
         ],
         cfi: true,
-    }
+    },
 }
diff --git a/media/tests/benchmark/src/native/common/BenchmarkC2Common.cpp b/media/tests/benchmark/src/native/common/BenchmarkC2Common.cpp
index 622a0e1..e09f468 100644
--- a/media/tests/benchmark/src/native/common/BenchmarkC2Common.cpp
+++ b/media/tests/benchmark/src/native/common/BenchmarkC2Common.cpp
@@ -22,6 +22,9 @@
 int32_t BenchmarkC2Common::setupCodec2() {
     ALOGV("In %s", __func__);
     mClient = android::Codec2Client::CreateFromService("default");
+    if (!mClient) {
+        mClient = android::Codec2Client::CreateFromService("software");
+    }
     if (!mClient) return -1;
 
     std::shared_ptr<C2AllocatorStore> store = android::GetCodec2PlatformAllocatorStore();
diff --git a/media/tests/benchmark/src/native/common/BenchmarkCommon.cpp b/media/tests/benchmark/src/native/common/BenchmarkCommon.cpp
index ab74508..cb49b8e 100644
--- a/media/tests/benchmark/src/native/common/BenchmarkCommon.cpp
+++ b/media/tests/benchmark/src/native/common/BenchmarkCommon.cpp
@@ -56,6 +56,7 @@
 
 void OnErrorCB(AMediaCodec *codec, void *userdata, media_status_t err, int32_t actionCode,
                const char *detail) {
+    (void)codec;
     ALOGE("OnErrorCB: err(%d), actionCode(%d), detail(%s)", err, actionCode, detail);
     CallBackHandle *self = (CallBackHandle *)userdata;
     self->mSawError = true;
@@ -91,7 +92,7 @@
 
     /* Configure codec with the given format*/
     const char *s = AMediaFormat_toString(format);
-    ALOGV("Input format: %s\n", s);
+    ALOGI("Input format: %s\n", s);
 
     media_status_t status = AMediaCodec_configure(codec, format, nullptr, nullptr, isEncoder);
     if (status != AMEDIA_OK) {
@@ -99,4 +100,4 @@
         return nullptr;
     }
     return codec;
-}
\ No newline at end of file
+}
diff --git a/media/tests/benchmark/src/native/common/BenchmarkCommon.h b/media/tests/benchmark/src/native/common/BenchmarkCommon.h
index 8153a86..c11fe36 100644
--- a/media/tests/benchmark/src/native/common/BenchmarkCommon.h
+++ b/media/tests/benchmark/src/native/common/BenchmarkCommon.h
@@ -17,15 +17,18 @@
 #ifndef __BENCHMARK_COMMON_H__
 #define __BENCHMARK_COMMON_H__
 
+#include <sys/stat.h>
 #include <inttypes.h>
 #include <mutex>
 #include <queue>
 #include <thread>
+#include <iostream>
 
 #include <media/NdkMediaCodec.h>
 #include <media/NdkMediaError.h>
 
 #include "Stats.h"
+#define UNUSED(x) (void)(x)
 
 using namespace std;
 
diff --git a/media/tests/benchmark/src/native/common/Stats.cpp b/media/tests/benchmark/src/native/common/Stats.cpp
index 2d9bb31..bfde125 100644
--- a/media/tests/benchmark/src/native/common/Stats.cpp
+++ b/media/tests/benchmark/src/native/common/Stats.cpp
@@ -17,8 +17,10 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "Stats"
 
+#include <ctime>
 #include <iostream>
 #include <stdint.h>
+#include <fstream>
 
 #include "Stats.h"
 
@@ -28,16 +30,20 @@
  * \param operation      describes the operation performed on the input media
  *                       (i.e. extract/mux/decode/encode)
  * \param inputReference input media
- * \param duarationUs    is a duration of the input media in microseconds.
+ * \param durationUs     is a duration of the input media in microseconds.
+ * \param componentName  describes the codecName/muxFormat/mimeType.
+ * \param mode           the operating mode: sync/async.
+ * \param statsFile      the file where the stats data is to be written.
  */
-void Stats::dumpStatistics(std::string operation, std::string inputReference, int64_t duarationUs) {
+void Stats::dumpStatistics(string operation, string inputReference, int64_t durationUs,
+                           string componentName, string mode, string statsFile) {
     ALOGV("In %s", __func__);
     if (!mOutputTimer.size()) {
         ALOGE("No output produced");
         return;
     }
     nsecs_t totalTimeTakenNs = getTotalTime();
-    nsecs_t timeTakenPerSec = (totalTimeTakenNs * 1000000) / duarationUs;
+    nsecs_t timeTakenPerSec = (totalTimeTakenNs * 1000000) / durationUs;
     nsecs_t timeToFirstFrameNs = *mOutputTimer.begin() - mStartTimeNs;
     int32_t size = std::accumulate(mFrameSizes.begin(), mFrameSizes.end(), 0);
     // get min and max output intervals.
@@ -52,15 +58,32 @@
         else if (maxTimeTakenNs < intervalNs) maxTimeTakenNs = intervalNs;
     }
 
-    // Print the Stats
-    std::cout << "Input Reference : " << inputReference << endl;
-    std::cout << "Setup Time in nano sec : " << mInitTimeNs << endl;
-    std::cout << "Average Time in nano sec : " << totalTimeTakenNs / mOutputTimer.size() << endl;
-    std::cout << "Time to first frame in nano sec : " << timeToFirstFrameNs << endl;
-    std::cout << "Time taken (in nano sec) to " << operation
-              << " 1 sec of content : " << timeTakenPerSec << endl;
-    std::cout << "Total bytes " << operation << "ed : " << size << endl;
-    std::cout << "Minimum Time in nano sec : " << minTimeTakenNs << endl;
-    std::cout << "Maximum Time in nano sec : " << maxTimeTakenNs << endl;
-    std::cout << "Destroy Time in nano sec : " << mDeInitTimeNs << endl;
+    // Write the stats data to file.
+    int64_t dataSize = size;
+    int64_t bytesPerSec = ((int64_t)dataSize * 1000000000) / totalTimeTakenNs;
+    string rowData = "";
+    rowData.append(to_string(systemTime(CLOCK_MONOTONIC)) + ", ");
+    rowData.append(inputReference + ", ");
+    rowData.append(operation + ", ");
+    rowData.append(componentName + ", ");
+    rowData.append("NDK, ");
+    rowData.append(mode + ", ");
+    rowData.append(to_string(mInitTimeNs) + ", ");
+    rowData.append(to_string(mDeInitTimeNs) + ", ");
+    rowData.append(to_string(minTimeTakenNs) + ", ");
+    rowData.append(to_string(maxTimeTakenNs) + ", ");
+    rowData.append(to_string(totalTimeTakenNs / mOutputTimer.size()) + ", ");
+    rowData.append(to_string(timeTakenPerSec) + ", ");
+    rowData.append(to_string(bytesPerSec) + ", ");
+    rowData.append(to_string(timeToFirstFrameNs) + ", ");
+    rowData.append(to_string(size) + ",");
+    rowData.append(to_string(totalTimeTakenNs) + ",\n");
+
+    ofstream out(statsFile, ios::out | ios::app);
+    if(out.bad()) {
+        ALOGE("Failed to open stats file for writing!");
+        return;
+    }
+    out << rowData;
+    out.close();
 }
diff --git a/media/tests/benchmark/src/native/common/Stats.h b/media/tests/benchmark/src/native/common/Stats.h
index 2f556ee..18e4b06 100644
--- a/media/tests/benchmark/src/native/common/Stats.h
+++ b/media/tests/benchmark/src/native/common/Stats.h
@@ -18,6 +18,7 @@
 #define __STATS_H__
 
 #include <android/log.h>
+#include <inttypes.h>
 
 #ifndef ALOG
 #define ALOG(priority, tag, ...) ((void)__android_log_print(ANDROID_##priority, tag, __VA_ARGS__))
@@ -25,6 +26,12 @@
 #define ALOGI(...) ALOG(LOG_INFO, LOG_TAG, __VA_ARGS__)
 #define ALOGE(...) ALOG(LOG_ERROR, LOG_TAG, __VA_ARGS__)
 #define ALOGD(...) ALOG(LOG_DEBUG, LOG_TAG, __VA_ARGS__)
+#define ALOGW(...) ALOG(LOG_WARN, LOG_TAG, __VA_ARGS__)
+
+#ifndef LOG_NDEBUG
+#define LOG_NDEBUG 1
+#endif
+
 #if LOG_NDEBUG
 #define ALOGV(cond, ...)   ((void)0)
 #else
@@ -95,7 +102,8 @@
         return (*(mOutputTimer.end() - 1) - mStartTimeNs);
     }
 
-    void dumpStatistics(std::string operation, std::string inputReference, int64_t duarationUs);
+    void dumpStatistics(string operation, string inputReference, int64_t duarationUs,
+                        string codecName = "", string mode = "", string statsFile = "");
 };
 
 #endif  // __STATS_H__
diff --git a/media/tests/benchmark/src/native/decoder/Android.bp b/media/tests/benchmark/src/native/decoder/Android.bp
index b5072ab..9791c11 100644
--- a/media/tests/benchmark/src/native/decoder/Android.bp
+++ b/media/tests/benchmark/src/native/decoder/Android.bp
@@ -27,24 +27,26 @@
 
     export_include_dirs: ["."],
 
-    ldflags: ["-Wl,-Bsymbolic"]
+    ldflags: ["-Wl,-Bsymbolic"],
 }
 
 cc_library_static {
     name: "libmediabenchmark_codec2_decoder",
     defaults: [
-        "libmediabenchmark_common-defaults",
         "libmediabenchmark_codec2_common-defaults",
     ],
 
-    srcs: ["C2Decoder.cpp"],
+    srcs: [
+        "C2Decoder.cpp",
+        "Decoder.cpp",
+    ],
 
     static_libs: [
         "libmediabenchmark_codec2_common",
-        "libmediabenchmark_extractor",
+        "libmediabenchmark_codec2_extractor",
     ],
 
     export_include_dirs: ["."],
 
-    ldflags: ["-Wl,-Bsymbolic"]
+    ldflags: ["-Wl,-Bsymbolic"],
 }
diff --git a/media/tests/benchmark/src/native/decoder/Decoder.cpp b/media/tests/benchmark/src/native/decoder/Decoder.cpp
index ac0d525..2171589 100644
--- a/media/tests/benchmark/src/native/decoder/Decoder.cpp
+++ b/media/tests/benchmark/src/native/decoder/Decoder.cpp
@@ -225,12 +225,12 @@
 }
 
 void Decoder::deInitCodec() {
-    int64_t sTime = mStats->getCurTime();
     if (mFormat) {
         AMediaFormat_delete(mFormat);
         mFormat = nullptr;
     }
     if (!mCodec) return;
+    int64_t sTime = mStats->getCurTime();
     AMediaCodec_stop(mCodec);
     AMediaCodec_delete(mCodec);
     int64_t eTime = mStats->getCurTime();
@@ -238,10 +238,11 @@
     mStats->setDeInitTime(timeTaken);
 }
 
-void Decoder::dumpStatistics(string inputReference) {
+void Decoder::dumpStatistics(string inputReference, string componentName, string mode,
+                             string statsFile) {
     int64_t durationUs = mExtractor->getClipDuration();
     string operation = "decode";
-    mStats->dumpStatistics(operation, inputReference, durationUs);
+    mStats->dumpStatistics(operation, inputReference, durationUs, componentName, mode, statsFile);
 }
 
 void Decoder::resetDecoder() {
diff --git a/media/tests/benchmark/src/native/decoder/Decoder.h b/media/tests/benchmark/src/native/decoder/Decoder.h
index aeda080..f3fa6a1 100644
--- a/media/tests/benchmark/src/native/decoder/Decoder.h
+++ b/media/tests/benchmark/src/native/decoder/Decoder.h
@@ -71,7 +71,8 @@
     int32_t decode(uint8_t *inputBuffer, vector<AMediaCodecBufferInfo> &frameInfo,
                    string &codecName, bool asyncMode, FILE *outFp = nullptr);
 
-    void dumpStatistics(string inputReference);
+    void dumpStatistics(string inputReference, string componentName = "", string mode = "",
+                        string statsFile = "");
 
   private:
     AMediaCodec *mCodec;
diff --git a/media/tests/benchmark/src/native/encoder/Android.bp b/media/tests/benchmark/src/native/encoder/Android.bp
index 239f378..8de7823 100644
--- a/media/tests/benchmark/src/native/encoder/Android.bp
+++ b/media/tests/benchmark/src/native/encoder/Android.bp
@@ -23,11 +23,31 @@
 
     srcs: ["Encoder.cpp"],
 
-    static_libs: ["libmediabenchmark_extractor",
-                  "libmediabenchmark_decoder",
+    static_libs: [
+        "libmediabenchmark_extractor",
+        "libmediabenchmark_decoder",
     ],
 
     export_include_dirs: ["."],
 
-    ldflags: ["-Wl,-Bsymbolic"]
+    ldflags: ["-Wl,-Bsymbolic"],
+}
+
+cc_library_static {
+    name: "libmediabenchmark_codec2_encoder",
+    defaults: [
+        "libmediabenchmark_codec2_common-defaults",
+    ],
+
+    srcs: ["C2Encoder.cpp"],
+
+    static_libs: [
+        "libmediabenchmark_codec2_common",
+        "libmediabenchmark_codec2_extractor",
+        "libmediabenchmark_codec2_decoder",
+    ],
+
+    export_include_dirs: ["."],
+
+    ldflags: ["-Wl,-Bsymbolic"],
 }
diff --git a/media/tests/benchmark/src/native/encoder/C2Encoder.cpp b/media/tests/benchmark/src/native/encoder/C2Encoder.cpp
new file mode 100644
index 0000000..33429ef
--- /dev/null
+++ b/media/tests/benchmark/src/native/encoder/C2Encoder.cpp
@@ -0,0 +1,264 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2Encoder"
+
+#include "C2Encoder.h"
+
+int32_t C2Encoder::createCodec2Component(string compName, AMediaFormat *format) {
+    ALOGV("In %s", __func__);
+    mListener.reset(new CodecListener(
+            [this](std::list<std::unique_ptr<C2Work>> &workItems) { handleWorkDone(workItems); }));
+    if (!mListener) return -1;
+
+    const char *mime = nullptr;
+    AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
+    if (!mime) {
+        ALOGE("Error in AMediaFormat_getString");
+        return -1;
+    }
+    // Configure the plugin with Input properties
+    std::vector<C2Param *> configParam;
+    if (!strncmp(mime, "audio/", 6)) {
+        mIsAudioEncoder = true;
+        int32_t numChannels;
+        if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, &mSampleRate)) {
+            ALOGE("AMEDIAFORMAT_KEY_SAMPLE_RATE not set");
+            return -1;
+        }
+        if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &numChannels)) {
+            ALOGE("AMEDIAFORMAT_KEY_CHANNEL_COUNT not set");
+            return -1;
+        }
+        C2StreamSampleRateInfo::input sampleRateInfo(0u, mSampleRate);
+        C2StreamChannelCountInfo::input channelCountInfo(0u, numChannels);
+        configParam.push_back(&sampleRateInfo);
+        configParam.push_back(&channelCountInfo);
+    } else {
+        mIsAudioEncoder = false;
+        if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_WIDTH, &mWidth)) {
+            ALOGE("AMEDIAFORMAT_KEY_WIDTH not set");
+            return -1;
+        }
+        if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_HEIGHT, &mHeight)) {
+            ALOGE("AMEDIAFORMAT_KEY_HEIGHT not set");
+            return -1;
+        }
+        C2StreamPictureSizeInfo::input inputSize(0u, mWidth, mHeight);
+        configParam.push_back(&inputSize);
+
+        if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_FRAME_RATE, &mFrameRate) ||
+            (mFrameRate <= 0)) {
+            mFrameRate = KDefaultFrameRate;
+        }
+    }
+
+    int64_t sTime = mStats->getCurTime();
+    mComponent = mClient->CreateComponentByName(compName.c_str(), mListener, &mClient);
+    if (mComponent == nullptr) {
+        ALOGE("Create component failed for %s", compName.c_str());
+        return -1;
+    }
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+    int32_t status = mComponent->config(configParam, C2_DONT_BLOCK, &failures);
+    if (failures.size() != 0) {
+        ALOGE("Invalid Configuration");
+        return -1;
+    }
+
+    status |= mComponent->start();
+    int64_t eTime = mStats->getCurTime();
+    int64_t timeTaken = mStats->getTimeDiff(sTime, eTime);
+    mStats->setInitTime(timeTaken);
+    return status;
+}
+
+// In encoder components, fetch the size of input buffer allocated
+int32_t C2Encoder::getInputMaxBufSize() {
+    int32_t bitStreamInfo[1] = {0};
+    std::vector<std::unique_ptr<C2Param>> inParams;
+    c2_status_t status = mComponent->query({}, {C2StreamMaxBufferSizeInfo::input::PARAM_TYPE},
+                                           C2_DONT_BLOCK, &inParams);
+    if (status != C2_OK && inParams.size() == 0) {
+        ALOGE("Query MaxBufferSizeInfo failed => %d", status);
+        return status;
+    } else {
+        size_t offset = sizeof(C2Param);
+        for (size_t i = 0; i < inParams.size(); ++i) {
+            C2Param *param = inParams[i].get();
+            bitStreamInfo[i] = *(int32_t *)((uint8_t *)param + offset);
+        }
+    }
+    mInputMaxBufSize = bitStreamInfo[0];
+    if (mInputMaxBufSize < 0) {
+        ALOGE("Invalid mInputMaxBufSize %d\n", mInputMaxBufSize);
+        return -1;
+    }
+    return status;
+}
+
+int32_t C2Encoder::encodeFrames(ifstream &eleStream, size_t inputBufferSize) {
+    ALOGV("In %s", __func__);
+    int32_t frameSize = 0;
+    if (!mIsAudioEncoder) {
+        frameSize = mWidth * mHeight * 3 / 2;
+    } else {
+        frameSize = DEFAULT_AUDIO_FRAME_SIZE;
+        if (getInputMaxBufSize() != 0) return -1;
+        if (frameSize > mInputMaxBufSize) {
+            frameSize = mInputMaxBufSize;
+        }
+    }
+    int32_t numFrames = (inputBufferSize + frameSize - 1) / frameSize;
+    // Temporary buffer to read data from the input file
+    char *data = (char *)malloc(frameSize);
+    if (!data) {
+        ALOGE("Insufficient memory to read from input file");
+        return -1;
+    }
+
+    typedef std::unique_lock<std::mutex> ULock;
+    uint64_t presentationTimeUs = 0;
+    size_t offset = 0;
+    c2_status_t status = C2_OK;
+
+    mStats->setStartTime();
+    while (numFrames > 0) {
+        std::unique_ptr<C2Work> work;
+        // Prepare C2Work
+        {
+            ULock l(mQueueLock);
+            if (mWorkQueue.empty()) mQueueCondition.wait_for(l, MAX_RETRY * TIME_OUT);
+            if (!mWorkQueue.empty()) {
+                mStats->addInputTime();
+                work.swap(mWorkQueue.front());
+                mWorkQueue.pop_front();
+            } else {
+                cout << "Wait for generating C2Work exceeded timeout" << endl;
+                return -1;
+            }
+        }
+
+        if (mIsAudioEncoder) {
+            presentationTimeUs = mNumInputFrame * frameSize * (1000000 / mSampleRate);
+        } else {
+            presentationTimeUs = mNumInputFrame * (1000000 / mFrameRate);
+        }
+        uint32_t flags = 0;
+        if (numFrames == 1) flags |= C2FrameData::FLAG_END_OF_STREAM;
+
+        work->input.flags = (C2FrameData::flags_t)flags;
+        work->input.ordinal.timestamp = presentationTimeUs;
+        work->input.ordinal.frameIndex = mNumInputFrame;
+        work->input.buffers.clear();
+
+        if (inputBufferSize - offset < frameSize) {
+            frameSize = inputBufferSize - offset;
+        }
+        eleStream.read(data, frameSize);
+        if (eleStream.gcount() != frameSize) {
+            ALOGE("read() from file failed. Incorrect bytes read");
+            return -1;
+        }
+        offset += frameSize;
+
+        if (frameSize) {
+            if (mIsAudioEncoder) {
+                std::shared_ptr<C2LinearBlock> block;
+                status = mLinearPool->fetchLinearBlock(
+                        frameSize, {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}, &block);
+                if (status != C2_OK || !block) {
+                    cout << "fetchLinearBlock failed : " << status << endl;
+                    return status;
+                }
+                C2WriteView view = block->map().get();
+                if (view.error() != C2_OK) {
+                    cout << "C2LinearBlock::map() failed : " << view.error() << endl;
+                    return view.error();
+                }
+
+                memcpy(view.base(), data, frameSize);
+                work->input.buffers.emplace_back(new LinearBuffer(block));
+            } else {
+                std::shared_ptr<C2GraphicBlock> block;
+                status = mGraphicPool->fetchGraphicBlock(
+                        mWidth, mHeight, HAL_PIXEL_FORMAT_YV12,
+                        {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}, &block);
+                if (status != C2_OK || !block) {
+                    cout << "fetchGraphicBlock failed : " << status << endl;
+                    return status;
+                }
+                C2GraphicView view = block->map().get();
+                if (view.error() != C2_OK) {
+                    cout << "C2GraphicBlock::map() failed : " << view.error() << endl;
+                    return view.error();
+                }
+
+                uint8_t *pY = view.data()[C2PlanarLayout::PLANE_Y];
+                uint8_t *pU = view.data()[C2PlanarLayout::PLANE_U];
+                uint8_t *pV = view.data()[C2PlanarLayout::PLANE_V];
+                memcpy(pY, data, mWidth * mHeight);
+                memcpy(pU, data + mWidth * mHeight, (mWidth * mHeight >> 2));
+                memcpy(pV, data + (mWidth * mHeight * 5 >> 2), mWidth * mHeight >> 2);
+                work->input.buffers.emplace_back(new GraphicBuffer(block));
+            }
+            mStats->addFrameSize(frameSize);
+        }
+
+        work->worklets.clear();
+        work->worklets.emplace_back(new C2Worklet);
+
+        std::list<std::unique_ptr<C2Work>> items;
+        items.push_back(std::move(work));
+        // queue() invokes process() function of C2 Plugin.
+        status = mComponent->queue(&items);
+        if (status != C2_OK) {
+            ALOGE("queue failed");
+            return status;
+        }
+        ALOGV("Frame #%d size = %d queued", mNumInputFrame, frameSize);
+        numFrames--;
+        mNumInputFrame++;
+    }
+    free(data);
+    return status;
+}
+
+void C2Encoder::deInitCodec() {
+    ALOGV("In %s", __func__);
+    if (!mComponent) return;
+
+    int64_t sTime = mStats->getCurTime();
+    mComponent->stop();
+    mComponent->release();
+    mComponent = nullptr;
+    int64_t eTime = mStats->getCurTime();
+    int64_t timeTaken = mStats->getTimeDiff(sTime, eTime);
+    mStats->setDeInitTime(timeTaken);
+}
+
+void C2Encoder::dumpStatistics(string inputReference, int64_t durationUs) {
+    string operation = "c2encode";
+    mStats->dumpStatistics(operation, inputReference, durationUs);
+}
+
+void C2Encoder::resetEncoder() {
+    mIsAudioEncoder = false;
+    mNumInputFrame = 0;
+    mEos = false;
+    if (mStats) mStats->reset();
+}
diff --git a/media/tests/benchmark/src/native/encoder/C2Encoder.h b/media/tests/benchmark/src/native/encoder/C2Encoder.h
new file mode 100644
index 0000000..a4ca097
--- /dev/null
+++ b/media/tests/benchmark/src/native/encoder/C2Encoder.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __C2_ENCODER_H__
+#define __C2_ENCODER_H__
+
+#include <stdio.h>
+#include <algorithm>
+#include <fstream>
+
+#include "BenchmarkC2Common.h"
+
+#define DEFAULT_AUDIO_FRAME_SIZE 4096
+
+constexpr int32_t KDefaultFrameRate = 25;
+
+class C2Encoder : public BenchmarkC2Common {
+  public:
+    C2Encoder()
+        : mIsAudioEncoder(false),
+          mWidth(0),
+          mHeight(0),
+          mNumInputFrame(0),
+          mComponent(nullptr) {}
+
+    int32_t createCodec2Component(string codecName, AMediaFormat *format);
+
+    int32_t encodeFrames(ifstream &eleStream, size_t inputBufferSize);
+
+    int32_t getInputMaxBufSize();
+
+    void deInitCodec();
+
+    void dumpStatistics(string inputReference, int64_t durationUs);
+
+    void resetEncoder();
+
+  private:
+    bool mIsAudioEncoder;
+
+    int32_t mWidth;
+    int32_t mHeight;
+    int32_t mFrameRate;
+    int32_t mSampleRate;
+
+    int32_t mNumInputFrame;
+    int32_t mInputMaxBufSize;
+
+    std::shared_ptr<android::Codec2Client::Listener> mListener;
+    std::shared_ptr<android::Codec2Client::Component> mComponent;
+};
+
+#endif  // __C2_ENCODER_H__
diff --git a/media/tests/benchmark/src/native/encoder/Encoder.cpp b/media/tests/benchmark/src/native/encoder/Encoder.cpp
index a5605de..2db612c 100644
--- a/media/tests/benchmark/src/native/encoder/Encoder.cpp
+++ b/media/tests/benchmark/src/native/encoder/Encoder.cpp
@@ -154,11 +154,12 @@
 }
 
 void Encoder::deInitCodec() {
-    int64_t sTime = mStats->getCurTime();
     if (mFormat) {
         AMediaFormat_delete(mFormat);
         mFormat = nullptr;
     }
+    if (!mCodec) return;
+    int64_t sTime = mStats->getCurTime();
     AMediaCodec_stop(mCodec);
     AMediaCodec_delete(mCodec);
     int64_t eTime = mStats->getCurTime();
@@ -174,9 +175,10 @@
     memset(&mParams, 0, sizeof mParams);
 }
 
-void Encoder::dumpStatistics(string inputReference, int64_t durationUs) {
+void Encoder::dumpStatistics(string inputReference, int64_t durationUs, string componentName,
+                             string mode, string statsFile) {
     string operation = "encode";
-    mStats->dumpStatistics(operation, inputReference, durationUs);
+    mStats->dumpStatistics(operation, inputReference, durationUs, componentName, mode, statsFile);
 }
 
 int32_t Encoder::encode(string &codecName, ifstream &eleStream, size_t eleSize,
@@ -206,7 +208,7 @@
         AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_BIT_RATE, mParams.bitrate);
     }
     const char *s = AMediaFormat_toString(mFormat);
-    ALOGV("Input format: %s\n", s);
+    ALOGI("Input format: %s\n", s);
 
     int64_t sTime = mStats->getCurTime();
     mCodec = createMediaCodec(mFormat, mMime, codecName, true /*isEncoder*/);
diff --git a/media/tests/benchmark/src/native/encoder/Encoder.h b/media/tests/benchmark/src/native/encoder/Encoder.h
index 6059c4a..3d12600 100644
--- a/media/tests/benchmark/src/native/encoder/Encoder.h
+++ b/media/tests/benchmark/src/native/encoder/Encoder.h
@@ -75,7 +75,8 @@
     int32_t encode(std::string &codecName, std::ifstream &eleStream, size_t eleSize, bool asyncMode,
                    encParameter encParams, char *mime);
 
-    void dumpStatistics(string inputReference, int64_t durationUs);
+    void dumpStatistics(string inputReference, int64_t durationUs, string codecName = "",
+                        string mode = "", string statsFile = "");
 
   private:
     AMediaCodec *mCodec;
diff --git a/media/tests/benchmark/src/native/extractor/Android.bp b/media/tests/benchmark/src/native/extractor/Android.bp
index dfd0d49..7ed9476 100644
--- a/media/tests/benchmark/src/native/extractor/Android.bp
+++ b/media/tests/benchmark/src/native/extractor/Android.bp
@@ -27,3 +27,20 @@
 
     ldflags: ["-Wl,-Bsymbolic"]
 }
+
+cc_library_static {
+    name: "libmediabenchmark_codec2_extractor",
+    defaults: [
+        "libmediabenchmark_codec2_common-defaults",
+    ],
+
+    srcs: ["Extractor.cpp"],
+
+    static_libs: [
+        "libmediabenchmark_codec2_common",
+    ],
+
+    export_include_dirs: ["."],
+
+    ldflags: ["-Wl,-Bsymbolic"]
+}
diff --git a/media/tests/benchmark/src/native/extractor/Extractor.cpp b/media/tests/benchmark/src/native/extractor/Extractor.cpp
index b4cad0b..f0bb3b9 100644
--- a/media/tests/benchmark/src/native/extractor/Extractor.cpp
+++ b/media/tests/benchmark/src/native/extractor/Extractor.cpp
@@ -111,9 +111,9 @@
     return AMEDIA_OK;
 }
 
-void Extractor::dumpStatistics(string inputReference) {
+void Extractor::dumpStatistics(string inputReference, string componentName, string statsFile) {
     string operation = "extract";
-    mStats->dumpStatistics(operation, inputReference, mDurationUs);
+    mStats->dumpStatistics(operation, inputReference, mDurationUs, componentName, "", statsFile);
 }
 
 void Extractor::deInitExtractor() {
diff --git a/media/tests/benchmark/src/native/extractor/Extractor.h b/media/tests/benchmark/src/native/extractor/Extractor.h
index 4c39a72..1694fc7 100644
--- a/media/tests/benchmark/src/native/extractor/Extractor.h
+++ b/media/tests/benchmark/src/native/extractor/Extractor.h
@@ -45,7 +45,7 @@
 
     int32_t extract(int32_t trackId);
 
-    void dumpStatistics(std::string inputReference);
+    void dumpStatistics(string inputReference, string componentName = "", string statsFile = "");
 
     void deInitExtractor();
 
diff --git a/media/tests/benchmark/src/native/muxer/Muxer.cpp b/media/tests/benchmark/src/native/muxer/Muxer.cpp
index b297a66..3e150ca 100644
--- a/media/tests/benchmark/src/native/muxer/Muxer.cpp
+++ b/media/tests/benchmark/src/native/muxer/Muxer.cpp
@@ -29,7 +29,7 @@
     int64_t sTime = mStats->getCurTime();
     mMuxer = AMediaMuxer_new(fd, (OutputFormat)outputFormat);
     if (!mMuxer) {
-        cout << "[   WARN   ] Test Skipped. Unable to create muxer \n";
+        ALOGV("Unable to create muxer");
         return AMEDIA_ERROR_INVALID_OBJECT;
     }
     /*
@@ -38,7 +38,7 @@
      */
     ssize_t index = AMediaMuxer_addTrack(mMuxer, mFormat);
     if (index < 0) {
-        cout << "[   WARN   ] Test Skipped. Format not supported \n";
+        ALOGV("Format not supported");
         return index;
     }
     AMediaMuxer_start(mMuxer);
@@ -49,12 +49,12 @@
 }
 
 void Muxer::deInitMuxer() {
-    int64_t sTime = mStats->getCurTime();
     if (mFormat) {
         AMediaFormat_delete(mFormat);
         mFormat = nullptr;
     }
     if (!mMuxer) return;
+    int64_t sTime = mStats->getCurTime();
     AMediaMuxer_stop(mMuxer);
     AMediaMuxer_delete(mMuxer);
     int64_t eTime = mStats->getCurTime();
@@ -66,9 +66,10 @@
     if (mStats) mStats->reset();
 }
 
-void Muxer::dumpStatistics(string inputReference) {
+void Muxer::dumpStatistics(string inputReference, string componentName, string statsFile) {
     string operation = "mux";
-    mStats->dumpStatistics(operation, inputReference, mExtractor->getClipDuration());
+    mStats->dumpStatistics(operation, inputReference, mExtractor->getClipDuration(), componentName,
+                           "", statsFile);
 }
 
 int32_t Muxer::mux(uint8_t *inputBuffer, vector<AMediaCodecBufferInfo> &frameInfos) {
diff --git a/media/tests/benchmark/src/native/muxer/Muxer.h b/media/tests/benchmark/src/native/muxer/Muxer.h
index eee3146..860fdaf 100644
--- a/media/tests/benchmark/src/native/muxer/Muxer.h
+++ b/media/tests/benchmark/src/native/muxer/Muxer.h
@@ -51,7 +51,7 @@
     /* Process the frames and give Muxed output */
     int32_t mux(uint8_t *inputBuffer, vector<AMediaCodecBufferInfo> &frameSizes);
 
-    void dumpStatistics(string inputReference);
+    void dumpStatistics(string inputReference, string codecName = "", string statsFile = "");
 
   private:
     AMediaFormat *mFormat;
diff --git a/media/tests/benchmark/tests/Android.bp b/media/tests/benchmark/tests/Android.bp
index 128d055..f46fa4a 100644
--- a/media/tests/benchmark/tests/Android.bp
+++ b/media/tests/benchmark/tests/Android.bp
@@ -87,8 +87,25 @@
     srcs: ["C2DecoderTest.cpp"],
 
     static_libs: [
-        "libmediabenchmark_extractor",
+        "libmediabenchmark_codec2_extractor",
         "libmediabenchmark_codec2_common",
         "libmediabenchmark_codec2_decoder",
     ],
 }
+
+cc_test {
+    name: "C2EncoderTest",
+    gtest: true,
+    defaults: [
+        "libmediabenchmark_codec2_common-defaults",
+    ],
+
+    srcs: ["C2EncoderTest.cpp"],
+
+    static_libs: [
+        "libmediabenchmark_codec2_extractor",
+        "libmediabenchmark_codec2_decoder",
+        "libmediabenchmark_codec2_common",
+        "libmediabenchmark_codec2_encoder",
+    ],
+}
diff --git a/media/tests/benchmark/tests/C2DecoderTest.cpp b/media/tests/benchmark/tests/C2DecoderTest.cpp
index 3531d8a..ecd9759 100644
--- a/media/tests/benchmark/tests/C2DecoderTest.cpp
+++ b/media/tests/benchmark/tests/C2DecoderTest.cpp
@@ -29,82 +29,64 @@
 
 class C2DecoderTest : public ::testing::TestWithParam<pair<string, string>> {
   public:
-    C2DecoderTest() : mDecoder(nullptr), disableTest(false) { setupC2DecoderTest(); }
+    C2DecoderTest() : mDecoder(nullptr) {}
+
+    ~C2DecoderTest() {
+        if (!mCodecList.empty()) {
+            mCodecList.clear();
+        }
+        if (mDecoder) {
+            delete mDecoder;
+            mDecoder = nullptr;
+        }
+    }
+
+    virtual void SetUp() override { setupC2DecoderTest(); }
 
     void setupC2DecoderTest();
 
     vector<string> mCodecList;
     C2Decoder *mDecoder;
-    bool disableTest;
 };
 
 void C2DecoderTest::setupC2DecoderTest() {
     mDecoder = new C2Decoder();
-    if (!mDecoder) {
-        cout << "[   WARN   ] Test Skipped. C2Decoder creation failed\n";
-        disableTest = true;
-        return;
-    }
+    ASSERT_NE(mDecoder, nullptr) << "C2Decoder creation failed";
+
     int32_t status = mDecoder->setupCodec2();
-    if (status != 0) {
-        cout << "[   WARN   ] Test Skipped. Codec2 setup failed \n";
-        disableTest = true;
-        return;
-    }
+    ASSERT_EQ(status, 0) << "Codec2 setup failed";
+
     mCodecList = mDecoder->getSupportedComponentList(false /* isEncoder*/);
-    if (!mCodecList.size()) {
-        cout << "[   WARN   ] Test Skipped. Codec2 client didn't recognise any component \n";
-        disableTest = true;
-        return;
-    }
+    ASSERT_GT(mCodecList.size(), 0) << "Codec2 client didn't recognise any component";
 }
 
 TEST_P(C2DecoderTest, Codec2Decode) {
-    if (disableTest) return;
-
     ALOGV("Decode the samples given by extractor using codec2");
     string inputFile = gEnv->getRes() + GetParam().first;
     FILE *inputFp = fopen(inputFile.c_str(), "rb");
-    if (!inputFp) {
-        cout << "[   WARN   ] Test Skipped. Unable to open input file" << inputFile
-             << " for reading \n";
-        return;
-    }
+    ASSERT_NE(inputFp, nullptr) << "Unable to open " << inputFile << " file for reading";
 
     Extractor *extractor = new Extractor();
-    if (!extractor) {
-        cout << "[   WARN   ] Test Skipped. Extractor creation failed \n";
-        return;
-    }
+    ASSERT_NE(extractor, nullptr) << "Extractor creation failed";
 
     // Read file properties
-    fseek(inputFp, 0, SEEK_END);
-    size_t fileSize = ftell(inputFp);
-    fseek(inputFp, 0, SEEK_SET);
+    struct stat buf;
+    stat(inputFile.c_str(), &buf);
+    size_t fileSize = buf.st_size;
     int32_t fd = fileno(inputFp);
 
-    if (fileSize > kMaxBufferSize) {
-        cout << "[   WARN   ] Test Skipped. Input file size is greater than the threshold memory "
-                "dedicated to the test \n";
-    }
+    ASSERT_LE(fileSize, kMaxBufferSize)
+            << "Input file size is greater than the threshold memory dedicated to the test";
 
     int32_t trackCount = extractor->initExtractor(fd, fileSize);
-    if (trackCount <= 0) {
-        cout << "[   WARN   ] Test Skipped. initExtractor failed\n";
-        return;
-    }
+    ASSERT_GT(trackCount, 0) << "initExtractor failed";
+
     for (int32_t curTrack = 0; curTrack < trackCount; curTrack++) {
         int32_t status = extractor->setupTrackFormat(curTrack);
-        if (status != 0) {
-            cout << "[   WARN   ] Test Skipped. Track Format invalid \n";
-            return;
-        }
+        ASSERT_EQ(status, 0) << "Track Format invalid";
 
         uint8_t *inputBuffer = (uint8_t *)malloc(fileSize);
-        if (!inputBuffer) {
-            cout << "[   WARN   ] Test Skipped. Insufficient memory \n";
-            return;
-        }
+        ASSERT_NE(inputBuffer, nullptr) << "Insufficient memory";
 
         vector<AMediaCodecBufferInfo> frameInfo;
         AMediaCodecBufferInfo info;
@@ -116,11 +98,8 @@
             void *csdBuffer = extractor->getCSDSample(info, idx);
             if (!csdBuffer || !info.size) break;
             // copy the meta data and buffer to be passed to decoder
-            if (inputBufferOffset + info.size > fileSize) {
-                cout << "[   WARN   ] Test Skipped. Memory allocated not sufficient\n";
-                free(inputBuffer);
-                return;
-            }
+            ASSERT_LE(inputBufferOffset + info.size, fileSize) << "Memory allocated not sufficient";
+
             memcpy(inputBuffer + inputBufferOffset, csdBuffer, info.size);
             frameInfo.push_back(info);
             inputBufferOffset += info.size;
@@ -132,11 +111,8 @@
             status = extractor->getFrameSample(info);
             if (status || !info.size) break;
             // copy the meta data and buffer to be passed to decoder
-            if (inputBufferOffset + info.size > fileSize) {
-                cout << "[   WARN   ] Test Skipped. Memory allocated not sufficient\n";
-                free(inputBuffer);
-                return;
-            }
+            ASSERT_LE(inputBufferOffset + info.size, fileSize) << "Memory allocated not sufficient";
+
             memcpy(inputBuffer + inputBufferOffset, extractor->getFrameBuf(), info.size);
             frameInfo.push_back(info);
             inputBufferOffset += info.size;
@@ -148,21 +124,18 @@
             if (codecName.find(GetParam().second) != string::npos &&
                 codecName.find("secure") == string::npos) {
                 status = mDecoder->createCodec2Component(codecName, format);
-                if (status != 0) {
-                    cout << "[   WARN   ] Test Skipped. Create component failed for " << codecName
-                         << "\n";
-                    continue;
-                }
+                ASSERT_EQ(status, 0) << "Create component failed for " << codecName;
 
                 // Send the inputs to C2 Decoder and wait till all buffers are returned.
-                mDecoder->decodeFrames(inputBuffer, frameInfo);
+                status = mDecoder->decodeFrames(inputBuffer, frameInfo);
+                ASSERT_EQ(status, 0) << "Decoder failed for " << codecName;
+
                 mDecoder->waitOnInputConsumption();
-                if (!mDecoder->mEos) {
-                    cout << "[   WARN   ] Test Failed. Didn't receive EOS \n";
-                }
+                ASSERT_TRUE(mDecoder->mEos) << "Test Failed. Didn't receive EOS \n";
+
                 mDecoder->deInitCodec();
                 int64_t durationUs = extractor->getClipDuration();
-                cout << "codec: " << codecName << endl;
+                ALOGV("codec : %s", codecName.c_str());
                 mDecoder->dumpStatistics(GetParam().first, durationUs);
                 mDecoder->resetDecoder();
             }
@@ -172,6 +145,7 @@
         extractor->deInitExtractor();
         delete extractor;
         delete mDecoder;
+        mDecoder = nullptr;
     }
 }
 
@@ -179,26 +153,24 @@
 // Add wav files
 INSTANTIATE_TEST_SUITE_P(
         AudioDecoderTest, C2DecoderTest,
-        ::testing::Values(
-                make_pair("bbb_44100hz_2ch_128kbps_aac_30sec.mp4", "aac"),
-                make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3", "mp3"),
-                make_pair("bbb_8000hz_1ch_8kbps_amrnb_30sec.3gp", "amrnb"),
-                make_pair("bbb_16000hz_1ch_9kbps_amrwb_30sec.3gp", "amrnb"),
-                make_pair("bbb_44100hz_2ch_80kbps_vorbis_30sec.mp4", "vorbis"),
-                make_pair("bbb_44100hz_2ch_600kbps_flac_30sec.mp4", "flac"),
-                make_pair("bbb_48000hz_2ch_100kbps_opus_30sec.webm", "opus")));
+        ::testing::Values(make_pair("bbb_44100hz_2ch_128kbps_aac_30sec.mp4", "aac"),
+                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3", "mp3"),
+                          make_pair("bbb_8000hz_1ch_8kbps_amrnb_30sec.3gp", "amrnb"),
+                          make_pair("bbb_16000hz_1ch_9kbps_amrwb_30sec.3gp", "amrnb"),
+                          make_pair("bbb_44100hz_2ch_80kbps_vorbis_30sec.mp4", "vorbis"),
+                          make_pair("bbb_44100hz_2ch_600kbps_flac_30sec.mp4", "flac"),
+                          make_pair("bbb_48000hz_2ch_100kbps_opus_30sec.webm", "opus")));
 
 INSTANTIATE_TEST_SUITE_P(
         VideoDecoderTest, C2DecoderTest,
-        ::testing::Values(
-                make_pair("crowd_1920x1080_25fps_4000kbps_vp9.webm", "vp9"),
-                make_pair("crowd_1920x1080_25fps_4000kbps_vp8.webm", "vp8"),
-                make_pair("crowd_1920x1080_25fps_4000kbps_av1.webm", "av1"),
-                make_pair("crowd_1920x1080_25fps_7300kbps_mpeg2.mp4", "mpeg2"),
-                make_pair("crowd_1920x1080_25fps_6000kbps_mpeg4.mp4", "mpeg4"),
-                make_pair("crowd_352x288_25fps_6000kbps_h263.3gp", "h263"),
-                make_pair("crowd_1920x1080_25fps_6700kbps_h264.ts", "avc"),
-                make_pair("crowd_1920x1080_25fps_4000kbps_h265.mkv", "hevc")));
+        ::testing::Values(make_pair("crowd_1920x1080_25fps_4000kbps_vp9.webm", "vp9"),
+                          make_pair("crowd_1920x1080_25fps_4000kbps_vp8.webm", "vp8"),
+                          make_pair("crowd_1920x1080_25fps_4000kbps_av1.webm", "av1"),
+                          make_pair("crowd_1920x1080_25fps_7300kbps_mpeg2.mp4", "mpeg2"),
+                          make_pair("crowd_1920x1080_25fps_6000kbps_mpeg4.mp4", "mpeg4"),
+                          make_pair("crowd_352x288_25fps_6000kbps_h263.3gp", "h263"),
+                          make_pair("crowd_1920x1080_25fps_6700kbps_h264.ts", "avc"),
+                          make_pair("crowd_1920x1080_25fps_4000kbps_h265.mkv", "hevc")));
 
 int main(int argc, char **argv) {
     gEnv = new BenchmarkTestEnvironment();
diff --git a/media/tests/benchmark/tests/C2EncoderTest.cpp b/media/tests/benchmark/tests/C2EncoderTest.cpp
new file mode 100644
index 0000000..98eb17a
--- /dev/null
+++ b/media/tests/benchmark/tests/C2EncoderTest.cpp
@@ -0,0 +1,187 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2EncoderTest"
+
+#include <fstream>
+#include <iostream>
+#include <limits>
+
+#include "BenchmarkTestEnvironment.h"
+#include "C2Encoder.h"
+#include "Decoder.h"
+
+static BenchmarkTestEnvironment *gEnv = nullptr;
+
+class C2EncoderTest : public ::testing::TestWithParam<pair<string, string>> {
+  public:
+    C2EncoderTest() : mEncoder(nullptr) {}
+
+    ~C2EncoderTest() {
+        if (!mCodecList.empty()) {
+            mCodecList.clear();
+        }
+        if (mEncoder) {
+            delete mEncoder;
+            mEncoder = nullptr;
+        }
+    }
+
+    virtual void SetUp() override { setupC2EncoderTest(); }
+
+    void setupC2EncoderTest();
+
+    vector<string> mCodecList;
+    C2Encoder *mEncoder;
+};
+
+void C2EncoderTest::setupC2EncoderTest() {
+    mEncoder = new C2Encoder();
+    ASSERT_NE(mEncoder, nullptr) << "C2Encoder creation failed";
+
+    int32_t status = mEncoder->setupCodec2();
+    ASSERT_EQ(status, 0) << "Codec2 setup failed";
+
+    mCodecList = mEncoder->getSupportedComponentList(true /* isEncoder*/);
+    ASSERT_GT(mCodecList.size(), 0) << "Codec2 client didn't recognise any component";
+}
+
+TEST_P(C2EncoderTest, Codec2Encode) {
+    ALOGV("Encodes the input using codec2 framework");
+    string inputFile = gEnv->getRes() + GetParam().first;
+    FILE *inputFp = fopen(inputFile.c_str(), "rb");
+    ASSERT_NE(inputFp, nullptr) << "Unable to open input file for reading";
+
+    Decoder *decoder = new Decoder();
+    ASSERT_NE(decoder, nullptr) << "Decoder creation failed";
+
+    Extractor *extractor = decoder->getExtractor();
+    ASSERT_NE(extractor, nullptr) << "Extractor creation failed";
+
+    // Read file properties
+    struct stat buf;
+    stat(inputFile.c_str(), &buf);
+    size_t fileSize = buf.st_size;
+    int32_t fd = fileno(inputFp);
+
+    ASSERT_LE(fileSize, kMaxBufferSize)
+            << "Input file size is greater than the threshold memory dedicated to the test";
+
+    int32_t trackCount = extractor->initExtractor(fd, fileSize);
+    ASSERT_GT(trackCount, 0) << "initExtractor failed";
+
+    for (int curTrack = 0; curTrack < trackCount; curTrack++) {
+        int32_t status = extractor->setupTrackFormat(curTrack);
+        ASSERT_EQ(status, 0) << "Track Format invalid";
+
+        uint8_t *inputBuffer = (uint8_t *)malloc(fileSize);
+        ASSERT_NE(inputBuffer, nullptr) << "Insufficient memory";
+
+        vector<AMediaCodecBufferInfo> frameInfo;
+        AMediaCodecBufferInfo info;
+        uint32_t inputBufferOffset = 0;
+
+        // Get frame data
+        while (1) {
+            status = extractor->getFrameSample(info);
+            if (status || !info.size) break;
+            // copy the meta data and buffer to be passed to decoder
+            ASSERT_LE(inputBufferOffset + info.size, fileSize) << "Memory allocated not sufficient";
+
+            memcpy(inputBuffer + inputBufferOffset, extractor->getFrameBuf(), info.size);
+            frameInfo.push_back(info);
+            inputBufferOffset += info.size;
+        }
+
+        string decName = "";
+        string outputFileName = "decode.out";
+        FILE *outFp = fopen(outputFileName.c_str(), "wb");
+        ASSERT_NE(outFp, nullptr) << "Unable to open output file" << outputFileName
+                                  << " for dumping decoder's output";
+
+        decoder->setupDecoder();
+        status = decoder->decode(inputBuffer, frameInfo, decName, false /*asyncMode */, outFp);
+        ASSERT_EQ(status, AMEDIA_OK) << "Decode returned error : " << status;
+
+        // Encode the given input stream for all C2 codecs supported by device
+        AMediaFormat *format = extractor->getFormat();
+        ifstream eleStream;
+        eleStream.open(outputFileName.c_str(), ifstream::binary | ifstream::ate);
+        ASSERT_EQ(eleStream.is_open(), true) << outputFileName.c_str() << " - file not found";
+        size_t eleSize = eleStream.tellg();
+
+        for (string codecName : mCodecList) {
+            if (codecName.find(GetParam().second) != string::npos) {
+                status = mEncoder->createCodec2Component(codecName, format);
+                ASSERT_EQ(status, 0) << "Create component failed for " << codecName;
+
+                // Send the inputs to C2 Encoder and wait till all buffers are returned.
+                eleStream.seekg(0, ifstream::beg);
+                status = mEncoder->encodeFrames(eleStream, eleSize);
+                ASSERT_EQ(status, 0) << "Encoder failed for " << codecName;
+
+                mEncoder->waitOnInputConsumption();
+                ASSERT_TRUE(mEncoder->mEos) << "Test Failed. Didn't receive EOS \n";
+
+                mEncoder->deInitCodec();
+                int64_t durationUs = extractor->getClipDuration();
+                ALOGV("codec : %s", codecName.c_str());
+                mEncoder->dumpStatistics(GetParam().first, durationUs);
+                mEncoder->resetEncoder();
+            }
+        }
+
+        // Destroy the decoder for the given input
+        decoder->deInitCodec();
+        decoder->resetDecoder();
+        free(inputBuffer);
+    }
+    fclose(inputFp);
+    extractor->deInitExtractor();
+    delete decoder;
+    delete mEncoder;
+    mEncoder = nullptr;
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        AudioEncoderTest, C2EncoderTest,
+        ::testing::Values(make_pair("bbb_44100hz_2ch_128kbps_aac_30sec.mp4", "aac"),
+                          make_pair("bbb_8000hz_1ch_8kbps_amrnb_30sec.3gp", "amrnb"),
+                          make_pair("bbb_16000hz_1ch_9kbps_amrwb_30sec.3gp", "amrwb"),
+                          make_pair("bbb_44100hz_2ch_600kbps_flac_30sec.mp4", "flac"),
+                          make_pair("bbb_48000hz_2ch_100kbps_opus_30sec.webm", "opus")));
+
+INSTANTIATE_TEST_SUITE_P(
+        VideoEncoderTest, C2EncoderTest,
+        ::testing::Values(make_pair("crowd_1920x1080_25fps_4000kbps_vp9.webm", "vp9"),
+                          make_pair("crowd_1920x1080_25fps_4000kbps_vp8.webm", "vp8"),
+                          make_pair("crowd_176x144_25fps_6000kbps_mpeg4.mp4", "mpeg4"),
+                          make_pair("crowd_176x144_25fps_6000kbps_h263.3gp", "h263"),
+                          make_pair("crowd_1920x1080_25fps_6700kbps_h264.ts", "avc"),
+                          make_pair("crowd_1920x1080_25fps_4000kbps_h265.mkv", "hevc")));
+
+int main(int argc, char **argv) {
+    gEnv = new BenchmarkTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("C2 Encoder Test result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/tests/benchmark/tests/DecoderTest.cpp b/media/tests/benchmark/tests/DecoderTest.cpp
index fa37435..5c6aa5b 100644
--- a/media/tests/benchmark/tests/DecoderTest.cpp
+++ b/media/tests/benchmark/tests/DecoderTest.cpp
@@ -21,8 +21,8 @@
 #include <iostream>
 #include <limits>
 
-#include "Decoder.h"
 #include "BenchmarkTestEnvironment.h"
+#include "Decoder.h"
 
 static BenchmarkTestEnvironment *gEnv = nullptr;
 
@@ -34,41 +34,30 @@
 
     string inputFile = gEnv->getRes() + get<0>(params);
     FILE *inputFp = fopen(inputFile.c_str(), "rb");
-    if (!inputFp) {
-        cout << "[   WARN   ] Test Skipped. Unable to open input file for reading \n";
-        return;
-    }
+    ASSERT_NE(inputFp, nullptr) << "Unable to open " << inputFile << " file for reading";
 
     Decoder *decoder = new Decoder();
+    ASSERT_NE(decoder, nullptr) << "Decoder creation failed";
+
     Extractor *extractor = decoder->getExtractor();
-    if (!extractor) {
-        cout << "[   WARN   ] Test Skipped. Extractor creation failed \n";
-        return;
-    }
+    ASSERT_NE(extractor, nullptr) << "Extractor creation failed";
 
     // Read file properties
-    fseek(inputFp, 0, SEEK_END);
-    size_t fileSize = ftell(inputFp);
-    fseek(inputFp, 0, SEEK_SET);
+    struct stat buf;
+    stat(inputFile.c_str(), &buf);
+    size_t fileSize = buf.st_size;
     int32_t fd = fileno(inputFp);
 
     int32_t trackCount = extractor->initExtractor(fd, fileSize);
-    if (trackCount <= 0) {
-        cout << "[   WARN   ] Test Skipped. initExtractor failed\n";
-        return;
-    }
+    ASSERT_GT(trackCount, 0) << "initExtractor failed";
+
     for (int curTrack = 0; curTrack < trackCount; curTrack++) {
         int32_t status = extractor->setupTrackFormat(curTrack);
-        if (status != 0) {
-            cout << "[   WARN   ] Test Skipped. Track Format invalid \n";
-            return;
-        }
+        ASSERT_EQ(status, 0) << "Track Format invalid";
 
         uint8_t *inputBuffer = (uint8_t *)malloc(kMaxBufferSize);
-        if (!inputBuffer) {
-            cout << "[   WARN   ] Test Skipped. Insufficient memory \n";
-            return;
-        }
+        ASSERT_NE(inputBuffer, nullptr) << "Insufficient memory";
+
         vector<AMediaCodecBufferInfo> frameInfo;
         AMediaCodecBufferInfo info;
         uint32_t inputBufferOffset = 0;
@@ -78,11 +67,9 @@
             status = extractor->getFrameSample(info);
             if (status || !info.size) break;
             // copy the meta data and buffer to be passed to decoder
-            if (inputBufferOffset + info.size > kMaxBufferSize) {
-                cout << "[   WARN   ] Test Skipped. Memory allocated not sufficient\n";
-                free(inputBuffer);
-                return;
-            }
+            ASSERT_LE(inputBufferOffset + info.size, kMaxBufferSize)
+                    << "Memory allocated not sufficient";
+
             memcpy(inputBuffer + inputBufferOffset, extractor->getFrameBuf(), info.size);
             frameInfo.push_back(info);
             inputBufferOffset += info.size;
@@ -92,13 +79,10 @@
         bool asyncMode = get<2>(params);
         decoder->setupDecoder();
         status = decoder->decode(inputBuffer, frameInfo, codecName, asyncMode);
-        if (status != AMEDIA_OK) {
-            cout << "[   WARN   ] Test Failed. Decode returned error " << status << endl;
-            free(inputBuffer);
-            return;
-        }
+        ASSERT_EQ(status, AMEDIA_OK) << "Decoder failed for " << codecName;
+
         decoder->deInitCodec();
-        cout << "codec : " << codecName << endl;
+        ALOGV("codec : %s", codecName.c_str());
         string inputReference = get<0>(params);
         decoder->dumpStatistics(inputReference);
         free(inputBuffer);
diff --git a/media/tests/benchmark/tests/EncoderTest.cpp b/media/tests/benchmark/tests/EncoderTest.cpp
index c3963f8..dc2a2dd 100644
--- a/media/tests/benchmark/tests/EncoderTest.cpp
+++ b/media/tests/benchmark/tests/EncoderTest.cpp
@@ -20,8 +20,8 @@
 #include <fstream>
 
 #include "BenchmarkTestEnvironment.h"
-#include "Encoder.h"
 #include "Decoder.h"
+#include "Encoder.h"
 
 static BenchmarkTestEnvironment *gEnv = nullptr;
 
@@ -33,42 +33,33 @@
 
     string inputFile = gEnv->getRes() + get<0>(params);
     FILE *inputFp = fopen(inputFile.c_str(), "rb");
-    if (!inputFp) {
-        cout << "[   WARN   ] Test Skipped. Unable to open input file for reading \n";
-        return;
-    }
+    ASSERT_NE(inputFp, nullptr) << "Unable to open " << inputFile << " file for reading";
 
     Decoder *decoder = new Decoder();
+    ASSERT_NE(decoder, nullptr) << "Decoder creation failed";
+
     Extractor *extractor = decoder->getExtractor();
-    if (!extractor) {
-        cout << "[   WARN   ] Test Skipped. Extractor creation failed \n";
-        return;
-    }
+    ASSERT_NE(extractor, nullptr) << "Extractor creation failed";
+
     // Read file properties
-    fseek(inputFp, 0, SEEK_END);
-    size_t fileSize = ftell(inputFp);
-    fseek(inputFp, 0, SEEK_SET);
+    struct stat buf;
+    stat(inputFile.c_str(), &buf);
+    size_t fileSize = buf.st_size;
     int32_t fd = fileno(inputFp);
 
     int32_t trackCount = extractor->initExtractor(fd, fileSize);
-    if (trackCount <= 0) {
-        cout << "[   WARN   ] Test Skipped. initExtractor failed\n";
-        return;
-    }
+    ASSERT_GT(trackCount, 0) << "initExtractor failed";
 
     Encoder *encoder = new Encoder();
+    ASSERT_NE(encoder, nullptr) << "Decoder creation failed";
+
     for (int curTrack = 0; curTrack < trackCount; curTrack++) {
         int32_t status = extractor->setupTrackFormat(curTrack);
-        if (status != 0) {
-            cout << "[   WARN   ] Test Skipped. Track Format invalid \n";
-            return;
-        }
+        ASSERT_EQ(status, 0) << "Track Format invalid";
 
         uint8_t *inputBuffer = (uint8_t *)malloc(kMaxBufferSize);
-        if (!inputBuffer) {
-            cout << "[   WARN   ] Test Skipped. Insufficient memory \n";
-            return;
-        }
+        ASSERT_NE(inputBuffer, nullptr) << "Insufficient memory";
+
         vector<AMediaCodecBufferInfo> frameInfo;
         AMediaCodecBufferInfo info;
         uint32_t inputBufferOffset = 0;
@@ -78,11 +69,9 @@
             status = extractor->getFrameSample(info);
             if (status || !info.size) break;
             // copy the meta data and buffer to be passed to decoder
-            if (inputBufferOffset + info.size > kMaxBufferSize) {
-                cout << "[   WARN   ] Test Skipped. Memory allocated not sufficient\n";
-                free(inputBuffer);
-                return;
-            }
+            ASSERT_LE(inputBufferOffset + info.size, kMaxBufferSize)
+                    << "Memory allocated not sufficient";
+
             memcpy(inputBuffer + inputBufferOffset, extractor->getFrameBuf(), info.size);
             frameInfo.push_back(info);
             inputBufferOffset += info.size;
@@ -91,16 +80,12 @@
         string decName = "";
         string outputFileName = "decode.out";
         FILE *outFp = fopen(outputFileName.c_str(), "wb");
-        if (outFp == nullptr) {
-            ALOGE("Unable to open output file for writing");
-            return;
-        }
+        ASSERT_NE(outFp, nullptr) << "Unable to open output file" << outputFileName
+                                  << " for dumping decoder's output";
+
         decoder->setupDecoder();
         status = decoder->decode(inputBuffer, frameInfo, decName, false /*asyncMode */, outFp);
-        if (status != AMEDIA_OK) {
-            cout << "[   WARN   ] Test Skipped. Decode returned error \n";
-            return;
-        }
+        ASSERT_EQ(status, AMEDIA_OK) << "Decode returned error : " << status;
 
         ifstream eleStream;
         eleStream.open(outputFileName.c_str(), ifstream::binary | ifstream::ate);
@@ -111,15 +96,13 @@
         AMediaFormat *format = extractor->getFormat();
         const char *mime = nullptr;
         AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
-        if (!mime) {
-            ALOGE("Error in AMediaFormat_getString");
-            return;
-        }
+        ASSERT_NE(mime, nullptr) << "Invalid mime type";
+
         // Get encoder params
         encParameter encParams;
         if (!strncmp(mime, "video/", 6)) {
-            AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_WIDTH, &encParams.width);
-            AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_HEIGHT, &encParams.height);
+            ASSERT_TRUE(AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_WIDTH, &encParams.width));
+            ASSERT_TRUE(AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_HEIGHT, &encParams.height));
             AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_FRAME_RATE, &encParams.frameRate);
             AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, &encParams.bitrate);
             if (encParams.bitrate <= 0 || encParams.frameRate <= 0) {
@@ -133,8 +116,10 @@
             AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_PROFILE, &encParams.profile);
             AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_LEVEL, &encParams.level);
         } else {
-            AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, &encParams.sampleRate);
-            AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &encParams.numChannels);
+            ASSERT_TRUE(AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE,
+                                              &encParams.sampleRate));
+            ASSERT_TRUE(AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+                                              &encParams.numChannels));
             encParams.bitrate =
                     encParams.sampleRate * encParams.numChannels * 16 /* bitsPerSample */;
         }
@@ -143,13 +128,10 @@
         string codecName = get<1>(params);
         bool asyncMode = get<2>(params);
         status = encoder->encode(codecName, eleStream, eleSize, asyncMode, encParams, (char *)mime);
-        if (status != AMEDIA_OK) {
-            cout << "[   WARN   ] Test Failed. Encode returned error " << status << endl;
-            free(inputBuffer);
-            return;
-        }
+        ASSERT_EQ(status, 0) << "Encoder failed for " << codecName;
+
         encoder->deInitCodec();
-        cout << "codec : " << codecName << endl;
+        ALOGV("codec : %s", codecName.c_str());
         string inputReference = get<0>(params);
         encoder->dumpStatistics(inputReference, extractor->getClipDuration());
         eleStream.close();
diff --git a/media/tests/benchmark/tests/ExtractorTest.cpp b/media/tests/benchmark/tests/ExtractorTest.cpp
index dd0d711..c2d72ff 100644
--- a/media/tests/benchmark/tests/ExtractorTest.cpp
+++ b/media/tests/benchmark/tests/ExtractorTest.cpp
@@ -19,8 +19,8 @@
 
 #include <gtest/gtest.h>
 
-#include "Extractor.h"
 #include "BenchmarkTestEnvironment.h"
+#include "Extractor.h"
 
 static BenchmarkTestEnvironment *gEnv = nullptr;
 
@@ -28,33 +28,24 @@
 
 TEST_P(ExtractorTest, Extract) {
     Extractor *extractObj = new Extractor();
+    ASSERT_NE(extractObj, nullptr) << "Extractor creation failed";
 
     string inputFile = gEnv->getRes() + GetParam().first;
     FILE *inputFp = fopen(inputFile.c_str(), "rb");
-    if (!inputFp) {
-        cout << "[   WARN   ] Test Skipped. Unable to open input file for reading \n";
-        return;
-    }
+    ASSERT_NE(inputFp, nullptr) << "Unable to open " << inputFile << " file for reading";
 
     // Read file properties
-    size_t fileSize = 0;
-    fseek(inputFp, 0, SEEK_END);
-    fileSize = ftell(inputFp);
-    fseek(inputFp, 0, SEEK_SET);
+    struct stat buf;
+    stat(inputFile.c_str(), &buf);
+    size_t fileSize = buf.st_size;
     int32_t fd = fileno(inputFp);
 
     int32_t trackCount = extractObj->initExtractor(fd, fileSize);
-    if (trackCount <= 0) {
-        cout << "[   WARN   ] Test Skipped. initExtractor failed\n";
-        return;
-    }
+    ASSERT_GT(trackCount, 0) << "initExtractor failed";
 
     int32_t trackID = GetParam().second;
     int32_t status = extractObj->extract(trackID);
-    if (status != AMEDIA_OK) {
-        cout << "[   WARN   ] Test Skipped. Extraction failed \n";
-        return;
-    }
+    ASSERT_EQ(status, AMEDIA_OK) << "Extraction failed \n";
 
     extractObj->deInitExtractor();
 
@@ -79,7 +70,8 @@
                                            make_pair("bbb_8000hz_1ch_8kbps_amrnb_5mins.3gp", 0),
                                            make_pair("bbb_16000hz_1ch_9kbps_amrwb_5mins.3gp", 0),
                                            make_pair("bbb_44100hz_2ch_80kbps_vorbis_5mins.mp4", 0),
-                                           make_pair("bbb_48000hz_2ch_100kbps_opus_5mins.webm", 0)));
+                                           make_pair("bbb_48000hz_2ch_100kbps_opus_5mins.webm",
+                                                     0)));
 
 int main(int argc, char **argv) {
     gEnv = new BenchmarkTestEnvironment();
diff --git a/media/tests/benchmark/tests/MuxerTest.cpp b/media/tests/benchmark/tests/MuxerTest.cpp
index e814f90..7b01732 100644
--- a/media/tests/benchmark/tests/MuxerTest.cpp
+++ b/media/tests/benchmark/tests/MuxerTest.cpp
@@ -21,8 +21,8 @@
 #include <fstream>
 #include <iostream>
 
-#include "Muxer.h"
 #include "BenchmarkTestEnvironment.h"
+#include "Muxer.h"
 
 #define OUTPUT_FILE_NAME "/data/local/tmp/mux.out"
 
@@ -53,49 +53,34 @@
     ALOGV("Mux the samples given by extractor");
     string inputFile = gEnv->getRes() + GetParam().first;
     FILE *inputFp = fopen(inputFile.c_str(), "rb");
-    if (!inputFp) {
-        cout << "[   WARN   ] Test Skipped. Unable to open input file for reading \n";
-        return;
-    }
+    ASSERT_NE(inputFp, nullptr) << "Unable to open " << inputFile << " file for reading";
+
     string fmt = GetParam().second;
     MUXER_OUTPUT_T outputFormat = getMuxerOutFormat(fmt);
-    if (outputFormat == MUXER_OUTPUT_FORMAT_INVALID) {
-        ALOGE("output format is MUXER_OUTPUT_FORMAT_INVALID");
-        return;
-    }
+    ASSERT_NE(outputFormat, MUXER_OUTPUT_FORMAT_INVALID) << "Invalid muxer output format";
 
     Muxer *muxerObj = new Muxer();
+    ASSERT_NE(muxerObj, nullptr) << "Muxer creation failed";
+
     Extractor *extractor = muxerObj->getExtractor();
-    if (!extractor) {
-        cout << "[   WARN   ] Test Skipped. Extractor creation failed \n";
-        return;
-    }
+    ASSERT_NE(extractor, nullptr) << "Extractor creation failed";
 
     // Read file properties
-    size_t fileSize = 0;
-    fseek(inputFp, 0, SEEK_END);
-    fileSize = ftell(inputFp);
-    fseek(inputFp, 0, SEEK_SET);
+    struct stat buf;
+    stat(inputFile.c_str(), &buf);
+    size_t fileSize = buf.st_size;
     int32_t fd = fileno(inputFp);
 
     int32_t trackCount = extractor->initExtractor(fd, fileSize);
-    if (trackCount <= 0) {
-        cout << "[   WARN   ] Test Skipped. initExtractor failed\n";
-        return;
-    }
+    ASSERT_GT(trackCount, 0) << "initExtractor failed";
 
     for (int curTrack = 0; curTrack < trackCount; curTrack++) {
         int32_t status = extractor->setupTrackFormat(curTrack);
-        if (status != 0) {
-            cout << "[   WARN   ] Test Skipped. Track Format invalid \n";
-            return;
-        }
+        ASSERT_EQ(status, 0) << "Track Format invalid";
 
         uint8_t *inputBuffer = (uint8_t *)malloc(kMaxBufferSize);
-        if (!inputBuffer) {
-            std::cout << "[   WARN   ] Test Skipped. Insufficient memory \n";
-            return;
-        }
+        ASSERT_NE(inputBuffer, nullptr) << "Insufficient memory";
+
         // AMediaCodecBufferInfo : <size of frame> <flags> <presentationTimeUs> <offset>
         vector<AMediaCodecBufferInfo> frameInfos;
         AMediaCodecBufferInfo info;
@@ -106,11 +91,9 @@
             status = extractor->getFrameSample(info);
             if (status || !info.size) break;
             // copy the meta data and buffer to be passed to muxer
-            if (inputBufferOffset + info.size > kMaxBufferSize) {
-                cout << "[   WARN   ] Test Skipped. Memory allocated not sufficient\n";
-                free(inputBuffer);
-                return;
-            }
+            ASSERT_LE(inputBufferOffset + info.size, kMaxBufferSize)
+                    << "Memory allocated not sufficient";
+
             memcpy(inputBuffer + inputBufferOffset, extractor->getFrameBuf(), info.size);
             info.offset = inputBufferOffset;
             frameInfos.push_back(info);
@@ -119,22 +102,16 @@
 
         string outputFileName = OUTPUT_FILE_NAME;
         FILE *outputFp = fopen(outputFileName.c_str(), "w+b");
-        if (!outputFp) {
-            cout << "[   WARN   ] Test Skipped. Unable to open output file for writing \n";
-            return;
-        }
+        ASSERT_NE(outputFp, nullptr)
+                << "Unable to open output file" << outputFileName << " for writing";
+
         int32_t fd = fileno(outputFp);
         status = muxerObj->initMuxer(fd, outputFormat);
-        if (status != 0) {
-            cout << "[   WARN   ] Test Skipped. initMuxer failed\n";
-            return;
-        }
+        ASSERT_EQ(status, 0) << "initMuxer failed";
 
         status = muxerObj->mux(inputBuffer, frameInfos);
-        if (status != 0) {
-            cout << "[   WARN   ] Test Skipped. Mux failed \n";
-            return;
-        }
+        ASSERT_EQ(status, 0) << "Mux failed";
+
         muxerObj->deInitMuxer();
         muxerObj->dumpStatistics(GetParam().first + "." + fmt.c_str());
         free(inputBuffer);
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 5047b19..b7037e9 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -29,6 +29,7 @@
         "libc_malloc_debug_backtrace",
     ],
     shared_libs: [
+        "libaudioutils", // for clock.h
         "libbinder",
         "libcutils",
         "liblog",
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index a77311e..87ea084 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -16,6 +16,7 @@
 
 #define LOG_TAG "ServiceUtilities"
 
+#include <audio_utils/clock.h>
 #include <binder/AppOpsManager.h>
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
@@ -24,6 +25,7 @@
 
 #include <iterator>
 #include <algorithm>
+#include <pwd.h>
 
 /* When performing permission checks we do not use permission cache for
  * runtime permissions (protection level dangerous) as they may change at
@@ -143,6 +145,15 @@
     return ok;
 }
 
+bool captureVoiceCommunicationOutputAllowed(pid_t pid, uid_t uid) {
+    if (isAudioServerOrRootUid(uid)) return true;
+    static const String16 sCaptureVoiceCommOutput(
+        "android.permission.CAPTURE_VOICE_COMMUNICATION_OUTPUT");
+    bool ok = PermissionCache::checkPermission(sCaptureVoiceCommOutput, pid, uid);
+    if (!ok) ALOGE("Request requires android.permission.CAPTURE_VOICE_COMMUNICATION_OUTPUT");
+    return ok;
+}
+
 bool captureHotwordAllowed(const String16& opPackageName, pid_t pid, uid_t uid) {
     // CAPTURE_AUDIO_HOTWORD permission implies RECORD_AUDIO permission
     bool ok = recordingAllowed(opPackageName, pid, uid);
@@ -329,4 +340,130 @@
     }
 }
 
+// How long we hold info before we re-fetch it (24 hours) if we found it previously.
+static constexpr nsecs_t INFO_EXPIRATION_NS = 24 * 60 * 60 * NANOS_PER_SECOND;
+// Maximum info records we retain before clearing everything.
+static constexpr size_t INFO_CACHE_MAX = 1000;
+
+// The original code is from MediaMetricsService.cpp.
+mediautils::UidInfo::Info mediautils::UidInfo::getInfo(uid_t uid)
+{
+    const nsecs_t now = systemTime(SYSTEM_TIME_REALTIME);
+    struct mediautils::UidInfo::Info info;
+    {
+        std::lock_guard _l(mLock);
+        auto it = mInfoMap.find(uid);
+        if (it != mInfoMap.end()) {
+            info = it->second;
+            ALOGV("%s: uid %d expiration %lld now %lld",
+                    __func__, uid, (long long)info.expirationNs, (long long)now);
+            if (info.expirationNs <= now) {
+                // purge the stale entry and fall into re-fetching
+                ALOGV("%s: entry for uid %d expired, now %lld",
+                        __func__, uid, (long long)now);
+                mInfoMap.erase(it);
+                info.uid = (uid_t)-1;  // this is always fully overwritten
+            }
+        }
+    }
+
+    // if we did not find it in our map, look it up
+    if (info.uid == (uid_t)(-1)) {
+        sp<IServiceManager> sm = defaultServiceManager();
+        sp<content::pm::IPackageManagerNative> package_mgr;
+        if (sm.get() == nullptr) {
+            ALOGE("%s: Cannot find service manager", __func__);
+        } else {
+            sp<IBinder> binder = sm->getService(String16("package_native"));
+            if (binder.get() == nullptr) {
+                ALOGE("%s: Cannot find package_native", __func__);
+            } else {
+                package_mgr = interface_cast<content::pm::IPackageManagerNative>(binder);
+            }
+        }
+
+        // find package name
+        std::string pkg;
+        if (package_mgr != nullptr) {
+            std::vector<std::string> names;
+            binder::Status status = package_mgr->getNamesForUids({(int)uid}, &names);
+            if (!status.isOk()) {
+                ALOGE("%s: getNamesForUids failed: %s",
+                        __func__, status.exceptionMessage().c_str());
+            } else {
+                if (!names[0].empty()) {
+                    pkg = names[0].c_str();
+                }
+            }
+        }
+
+        if (pkg.empty()) {
+            struct passwd pw{}, *result;
+            char buf[8192]; // extra buffer space - should exceed what is
+                            // required in struct passwd_pw (tested),
+                            // and even then this is only used in backup
+                            // when the package manager is unavailable.
+            if (getpwuid_r(uid, &pw, buf, sizeof(buf), &result) == 0
+                    && result != nullptr
+                    && result->pw_name != nullptr) {
+                pkg = result->pw_name;
+            }
+        }
+
+        // strip any leading "shared:" strings that came back
+        if (pkg.compare(0, 7, "shared:") == 0) {
+            pkg.erase(0, 7);
+        }
+
+        // determine how pkg was installed and the versionCode
+        std::string installer;
+        int64_t versionCode = 0;
+        bool notFound = false;
+        if (pkg.empty()) {
+            pkg = std::to_string(uid); // not found
+            notFound = true;
+        } else if (strchr(pkg.c_str(), '.') == nullptr) {
+            // not of form 'com.whatever...'; assume internal
+            // so we don't need to look it up in package manager.
+        } else if (strncmp(pkg.c_str(), "android.", 8) == 0) {
+            // android.* packages are assumed fine
+        } else if (package_mgr.get() != nullptr) {
+            String16 pkgName16(pkg.c_str());
+            binder::Status status = package_mgr->getInstallerForPackage(pkgName16, &installer);
+            if (!status.isOk()) {
+                ALOGE("%s: getInstallerForPackage failed: %s",
+                        __func__, status.exceptionMessage().c_str());
+            }
+
+            // skip if we didn't get an installer
+            if (status.isOk()) {
+                status = package_mgr->getVersionCodeForPackage(pkgName16, &versionCode);
+                if (!status.isOk()) {
+                    ALOGE("%s: getVersionCodeForPackage failed: %s",
+                            __func__, status.exceptionMessage().c_str());
+                }
+            }
+
+            ALOGV("%s: package '%s' installed by '%s' versioncode %lld",
+                    __func__, pkg.c_str(), installer.c_str(), (long long)versionCode);
+        }
+
+        // add it to the map, to save a subsequent lookup
+        std::lock_guard _l(mLock);
+        // first clear if we have too many cached elements.  This would be rare.
+        if (mInfoMap.size() >= INFO_CACHE_MAX) mInfoMap.clear();
+
+        // always overwrite
+        info.uid = uid;
+        info.package = std::move(pkg);
+        info.installer = std::move(installer);
+        info.versionCode = versionCode;
+        info.expirationNs = now + (notFound ? 0 : INFO_EXPIRATION_NS);
+        ALOGV("%s: adding uid %d package '%s' expirationNs: %lld",
+                __func__, uid, info.package.c_str(), (long long)info.expirationNs);
+        mInfoMap[uid] = info;
+    }
+    return info;
+}
+
 } // namespace android
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index e467058..212599a 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -28,6 +28,7 @@
 #include <map>
 #include <optional>
 #include <string>
+#include <unordered_map>
 #include <vector>
 
 namespace android {
@@ -82,6 +83,7 @@
 void finishRecording(const String16& opPackageName, uid_t uid);
 bool captureAudioOutputAllowed(pid_t pid, uid_t uid);
 bool captureMediaOutputAllowed(pid_t pid, uid_t uid);
+bool captureVoiceCommunicationOutputAllowed(pid_t pid, uid_t uid);
 bool captureHotwordAllowed(const String16& opPackageName, pid_t pid, uid_t uid);
 bool settingsAllowed();
 bool modifyAudioRoutingAllowed();
@@ -118,6 +120,40 @@
     using Packages = std::vector<Package>;
     std::map<uid_t, Packages> mDebugLog;
 };
-}
+
+namespace mediautils {
+
+/**
+ * This class is used to retrieve (and cache) package information
+ * for a given uid.
+ */
+class UidInfo {
+public:
+    struct Info {
+        uid_t uid = -1;           // uid used for lookup.
+        std::string package;      // package name.
+        std::string installer;    // installer for the package (e.g. preload, play store).
+        int64_t versionCode = 0;  // reported version code.
+        int64_t expirationNs = 0; // after this time in SYSTEM_TIME_REALTIME we refetch.
+    };
+
+    /**
+     * Returns the package information for a UID.
+     *
+     * The package name will be the uid if we cannot find the associated name.
+     *
+     * \param uid is the uid of the app or service.
+     */
+    Info getInfo(uid_t uid);
+
+private:
+    std::mutex mLock;
+    // TODO: use concurrent hashmap with striped lock.
+    std::unordered_map<uid_t, Info> mInfoMap; // GUARDED_BY(mLock)
+};
+
+} // namespace mediautils
+
+} // namespace android
 
 #endif // ANDROID_MEDIAUTILS_SERVICEUTILITIES_H
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index de8c7e7..f06d026 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -3,12 +3,15 @@
 cc_library_shared {
     name: "libaudioflinger",
 
+    tidy: false, // b/146435095, segmentation fault with Effects.cpp
+
     srcs: [
         "AudioFlinger.cpp",
         "AudioHwDevice.cpp",
         "AudioStreamOut.cpp",
         "AudioWatchdog.cpp",
         "BufLog.cpp",
+        "DeviceEffectManager.cpp",
         "Effects.cpp",
         "FastCapture.cpp",
         "FastCaptureDumpState.cpp",
@@ -62,6 +65,7 @@
     ],
 
     header_libs: [
+        "libaudiohal_headers",
         "libmedia_headers",
     ],
 
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index e953c8e..d6f0824 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -170,6 +170,7 @@
       mClientSharedHeapSize(kMinimumClientSharedHeapSizeBytes),
       mGlobalEffectEnableTime(0),
       mPatchPanel(this),
+      mDeviceEffectManager(this),
       mSystemReady(false)
 {
     // unsigned instead of audio_unique_id_use_t, because ++ operator is unavailable for enum
@@ -347,6 +348,9 @@
         thread->configure(&localAttr, streamType, actualSessionId, callback, *deviceId, portId);
         *handle = portId;
         *sessionId = actualSessionId;
+        config->sample_rate = thread->sampleRate();
+        config->channel_mask = thread->channelMask();
+        config->format = thread->format();
     } else {
         if (direction == MmapStreamInterface::DIRECTION_OUTPUT) {
             AudioSystem::releaseOutput(portId);
@@ -382,6 +386,24 @@
     }
 }
 
+status_t AudioFlinger::addEffectToHal(audio_port_handle_t deviceId,
+        audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect) {
+    AudioHwDevice *audioHwDevice = mAudioHwDevs.valueFor(hwModuleId);
+    if (audioHwDevice == nullptr) {
+        return NO_INIT;
+    }
+    return audioHwDevice->hwDevice()->addDeviceEffect(deviceId, effect);
+}
+
+status_t AudioFlinger::removeEffectFromHal(audio_port_handle_t deviceId,
+        audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect) {
+    AudioHwDevice *audioHwDevice = mAudioHwDevs.valueFor(hwModuleId);
+    if (audioHwDevice == nullptr) {
+        return NO_INIT;
+    }
+    return audioHwDevice->hwDevice()->removeDeviceEffect(deviceId, effect);
+}
+
 static const char * const audio_interfaces[] = {
     AUDIO_HARDWARE_MODULE_ID_PRIMARY,
     AUDIO_HARDWARE_MODULE_ID_A2DP,
@@ -422,31 +444,32 @@
 
 void AudioFlinger::dumpClients(int fd, const Vector<String16>& args __unused)
 {
-    const size_t SIZE = 256;
-    char buffer[SIZE];
     String8 result;
 
     result.append("Clients:\n");
     for (size_t i = 0; i < mClients.size(); ++i) {
         sp<Client> client = mClients.valueAt(i).promote();
         if (client != 0) {
-            snprintf(buffer, SIZE, "  pid: %d\n", client->pid());
-            result.append(buffer);
+            result.appendFormat("  pid: %d\n", client->pid());
         }
     }
 
     result.append("Notification Clients:\n");
+    result.append("   pid    uid  name\n");
     for (size_t i = 0; i < mNotificationClients.size(); ++i) {
-        snprintf(buffer, SIZE, "  pid: %d\n", mNotificationClients.keyAt(i));
-        result.append(buffer);
+        const pid_t pid = mNotificationClients[i]->getPid();
+        const uid_t uid = mNotificationClients[i]->getUid();
+        const mediautils::UidInfo::Info info = mUidInfo.getInfo(uid);
+        result.appendFormat("%6d %6u  %s\n", pid, uid, info.package.c_str());
     }
 
     result.append("Global session refs:\n");
-    result.append("  session   pid count\n");
+    result.append("  session  cnt     pid    uid  name\n");
     for (size_t i = 0; i < mAudioSessionRefs.size(); i++) {
         AudioSessionRef *r = mAudioSessionRefs[i];
-        snprintf(buffer, SIZE, "  %7d %5d %5d\n", r->mSessionid, r->mPid, r->mCnt);
-        result.append(buffer);
+        const mediautils::UidInfo::Info info = mUidInfo.getInfo(r->mUid);
+        result.appendFormat("  %7d %4d %7d %6u  %s\n", r->mSessionid, r->mCnt, r->mPid,
+                r->mUid, info.package.c_str());
     }
     write(fd, result.string(), result.size());
 }
@@ -558,6 +581,8 @@
 
         mPatchPanel.dump(fd);
 
+        mDeviceEffectManager.dump(fd);
+
         // dump external setParameters
         auto dumpLogger = [fd](SimpleLog& logger, const char* name) {
             dprintf(fd, "\n%s setParameters:\n", name);
@@ -1582,11 +1607,6 @@
 
     AutoMutex lock(mHardwareLock);
     mHardwareStatus = AUDIO_HW_GET_INPUT_BUFFER_SIZE;
-    audio_config_t config, proposed;
-    memset(&proposed, 0, sizeof(proposed));
-    proposed.sample_rate = sampleRate;
-    proposed.channel_mask = channelMask;
-    proposed.format = format;
 
     sp<DeviceHalInterface> dev = mPrimaryHardwareDev->hwDevice();
     std::vector<audio_channel_mask_t> channelMasks = {channelMask};
@@ -1610,12 +1630,16 @@
 
     mHardwareStatus = AUDIO_HW_IDLE;
 
+    // Change parameters of the configuration each iteration until we find a
+    // configuration that the device will support.
+    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
     for (auto testChannelMask : channelMasks) {
         config.channel_mask = testChannelMask;
         for (auto testFormat : formats) {
             config.format = testFormat;
             for (auto testSampleRate : sampleRates) {
                 config.sample_rate = testSampleRate;
+
                 size_t bytes = 0;
                 status_t result = dev->getInputBufferSize(&config, &bytes);
                 if (result != OK || bytes == 0) {
@@ -1695,13 +1719,16 @@
         return;
     }
     pid_t pid = IPCThreadState::self()->getCallingPid();
+    const uid_t uid = IPCThreadState::self()->getCallingUid();
     {
         Mutex::Autolock _cl(mClientLock);
         if (mNotificationClients.indexOfKey(pid) < 0) {
             sp<NotificationClient> notificationClient = new NotificationClient(this,
                                                                                 client,
-                                                                                pid);
-            ALOGV("registerClient() client %p, pid %d", notificationClient.get(), pid);
+                                                                                pid,
+                                                                                uid);
+            ALOGV("registerClient() client %p, pid %d, uid %u",
+                    notificationClient.get(), pid, uid);
 
             mNotificationClients.add(pid, notificationClient);
 
@@ -1841,8 +1868,9 @@
 
 AudioFlinger::NotificationClient::NotificationClient(const sp<AudioFlinger>& audioFlinger,
                                                      const sp<IAudioFlingerClient>& client,
-                                                     pid_t pid)
-    : mAudioFlinger(audioFlinger), mPid(pid), mAudioFlingerClient(client)
+                                                     pid_t pid,
+                                                     uid_t uid)
+    : mAudioFlinger(audioFlinger), mPid(pid), mUid(uid), mAudioFlingerClient(client)
 {
 }
 
@@ -2869,14 +2897,18 @@
     return nextUniqueId(use);
 }
 
-void AudioFlinger::acquireAudioSessionId(audio_session_t audioSession, pid_t pid)
+void AudioFlinger::acquireAudioSessionId(
+        audio_session_t audioSession, pid_t pid, uid_t uid)
 {
     Mutex::Autolock _l(mLock);
     pid_t caller = IPCThreadState::self()->getCallingPid();
     ALOGV("acquiring %d from %d, for %d", audioSession, caller, pid);
     const uid_t callerUid = IPCThreadState::self()->getCallingUid();
-    if (pid != -1 && isAudioServerUid(callerUid)) { // check must match releaseAudioSessionId()
-        caller = pid;
+    if (pid != (pid_t)-1 && isAudioServerOrMediaServerUid(callerUid)) {
+        caller = pid;  // check must match releaseAudioSessionId()
+    }
+    if (uid == (uid_t)-1 || !isAudioServerOrMediaServerUid(callerUid)) {
+        uid = callerUid;
     }
 
     {
@@ -2900,7 +2932,7 @@
             return;
         }
     }
-    mAudioSessionRefs.push(new AudioSessionRef(audioSession, caller));
+    mAudioSessionRefs.push(new AudioSessionRef(audioSession, caller, uid));
     ALOGV(" added new entry for %d", audioSession);
 }
 
@@ -2912,8 +2944,8 @@
         pid_t caller = IPCThreadState::self()->getCallingPid();
         ALOGV("releasing %d from %d for %d", audioSession, caller, pid);
         const uid_t callerUid = IPCThreadState::self()->getCallingUid();
-        if (pid != -1 && isAudioServerUid(callerUid)) { // check must match acquireAudioSessionId()
-            caller = pid;
+        if (pid != (pid_t)-1 && isAudioServerOrMediaServerUid(callerUid)) {
+            caller = pid;  // check must match acquireAudioSessionId()
         }
         size_t num = mAudioSessionRefs.size();
         for (size_t i = 0; i < num; i++) {
@@ -3316,7 +3348,7 @@
         int32_t priority,
         audio_io_handle_t io,
         audio_session_t sessionId,
-        const AudioDeviceTypeAddr& device __unused,
+        const AudioDeviceTypeAddr& device,
         const String16& opPackageName,
         pid_t pid,
         status_t *status,
@@ -3380,7 +3412,6 @@
             lStatus = BAD_VALUE;
             goto Exit;
         }
-        //TODO: add check on device ID when added to arguments
     } else {
         // general sessionId.
 
@@ -3433,6 +3464,23 @@
 
         Mutex::Autolock _l(mLock);
 
+        if (sessionId == AUDIO_SESSION_DEVICE) {
+            sp<Client> client = registerPid(pid);
+            ALOGV("%s device type %d address %s", __func__, device.mType, device.getAddress());
+            handle = mDeviceEffectManager.createEffect_l(
+                    &desc, device, client, effectClient, mPatchPanel.patches_l(),
+                    enabled, &lStatus);
+            if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
+                // remove local strong reference to Client with mClientLock held
+                Mutex::Autolock _cl(mClientLock);
+                client.clear();
+            } else {
+                // handle must be valid here, but check again to be safe.
+                if (handle.get() != nullptr && id != nullptr) *id = handle->id();
+            }
+            goto Register;
+        }
+
         // If output is not specified try to find a matching audio session ID in one of the
         // output threads.
         // If output is 0 here, sessionId is neither SESSION_OUTPUT_STAGE nor SESSION_OUTPUT_MIX
@@ -3527,9 +3575,10 @@
         }
     }
 
+Register:
     if (lStatus == NO_ERROR || lStatus == ALREADY_EXISTS) {
         // Check CPU and memory usage
-        sp<EffectModule> effect = handle->effect().promote();
+        sp<EffectBase> effect = handle->effect().promote();
         if (effect != nullptr) {
             status_t rStatus = effect->updatePolicyState();
             if (rStatus != NO_ERROR) {
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index eb851ec..d4e0ae2 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -71,6 +71,7 @@
 #include <media/DeviceDescriptorBase.h>
 #include <media/ExtendedAudioBufferProvider.h>
 #include <media/VolumeShaper.h>
+#include <mediautils/ServiceUtilities.h>
 
 #include <audio_utils/clock.h>
 #include <audio_utils/FdToString.h>
@@ -213,7 +214,7 @@
     // This is the binder API.  For the internal API see nextUniqueId().
     virtual audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use);
 
-    virtual void acquireAudioSessionId(audio_session_t audioSession, pid_t pid);
+    void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) override;
 
     virtual void releaseAudioSessionId(audio_session_t audioSession, pid_t pid);
 
@@ -308,6 +309,12 @@
 
     static int onExternalVibrationStart(const sp<os::ExternalVibration>& externalVibration);
     static void onExternalVibrationStop(const sp<os::ExternalVibration>& externalVibration);
+
+    status_t addEffectToHal(audio_port_handle_t deviceId,
+            audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect);
+    status_t removeEffectFromHal(audio_port_handle_t deviceId,
+            audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect);
+
 private:
     // FIXME The 400 is temporarily too high until a leak of writers in media.log is fixed.
     static const size_t kLogMemorySize = 400 * 1024;
@@ -336,7 +343,10 @@
 
         virtual ~SyncEvent() {}
 
-        void trigger() { Mutex::Autolock _l(mLock); if (mCallback) mCallback(this); }
+        void trigger() {
+            Mutex::Autolock _l(mLock);
+            if (mCallback) mCallback(wp<SyncEvent>(this));
+        }
         bool isCancelled() const { Mutex::Autolock _l(mLock); return (mCallback == NULL); }
         void cancel() { Mutex::Autolock _l(mLock); mCallback = NULL; }
         AudioSystem::sync_event_t type() const { return mType; }
@@ -474,10 +484,13 @@
     public:
                             NotificationClient(const sp<AudioFlinger>& audioFlinger,
                                                 const sp<IAudioFlingerClient>& client,
-                                                pid_t pid);
+                                                pid_t pid,
+                                                uid_t uid);
         virtual             ~NotificationClient();
 
                 sp<IAudioFlingerClient> audioFlingerClient() const { return mAudioFlingerClient; }
+                pid_t getPid() const { return mPid; }
+                uid_t getUid() const { return mUid; }
 
                 // IBinder::DeathRecipient
                 virtual     void        binderDied(const wp<IBinder>& who);
@@ -487,6 +500,7 @@
 
         const sp<AudioFlinger>  mAudioFlinger;
         const pid_t             mPid;
+        const uid_t             mUid;
         const sp<IAudioFlingerClient> mAudioFlingerClient;
     };
 
@@ -533,9 +547,14 @@
     class AsyncCallbackThread;
     class Track;
     class RecordTrack;
+    class EffectBase;
     class EffectModule;
     class EffectHandle;
     class EffectChain;
+    class DeviceEffectProxy;
+    class DeviceEffectManager;
+    class PatchPanel;
+    class DeviceEffectManagerCallback;
 
     struct AudioStreamIn;
     struct TeePatch;
@@ -573,9 +592,11 @@
 
 #include "Threads.h"
 
+#include "PatchPanel.h"
+
 #include "Effects.h"
 
-#include "PatchPanel.h"
+#include "DeviceEffectManager.h"
 
     // Find io handle by session id.
     // Preference is given to an io handle with a matching effect chain to session id.
@@ -790,10 +811,11 @@
 
     // for mAudioSessionRefs only
     struct AudioSessionRef {
-        AudioSessionRef(audio_session_t sessionid, pid_t pid) :
-            mSessionid(sessionid), mPid(pid), mCnt(1) {}
+        AudioSessionRef(audio_session_t sessionid, pid_t pid, uid_t uid) :
+            mSessionid(sessionid), mPid(pid), mUid(uid), mCnt(1) {}
         const audio_session_t mSessionid;
         const pid_t mPid;
+        const uid_t mUid;
         int         mCnt;
     };
 
@@ -921,8 +943,12 @@
     PatchPanel mPatchPanel;
     sp<EffectsFactoryHalInterface> mEffectsFactoryHal;
 
+    DeviceEffectManager mDeviceEffectManager;
+
     bool       mSystemReady;
 
+    mediautils::UidInfo mUidInfo;
+
     SimpleLog  mRejectedSetParameterLog;
     SimpleLog  mAppSetParameterLog;
     SimpleLog  mSystemSetParameterLog;
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
new file mode 100644
index 0000000..87a4c6e
--- /dev/null
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -0,0 +1,277 @@
+/*
+**
+** Copyright 2019, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+
+#define LOG_TAG "AudioFlinger::DeviceEffectManager"
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+#include <audio_utils/primitives.h>
+
+#include "AudioFlinger.h"
+#include <media/audiohal/EffectsFactoryHalInterface.h>
+
+// ----------------------------------------------------------------------------
+
+
+namespace android {
+
+void AudioFlinger::DeviceEffectManager::createAudioPatch(audio_patch_handle_t handle,
+        const PatchPanel::Patch& patch) {
+    ALOGV("%s handle %d mHalHandle %d num sinks %d device sink %08x",
+            __func__, handle, patch.mHalHandle,
+            patch.mAudioPatch.num_sinks,
+            patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
+
+    mCommandThread->createAudioPatchCommand(handle, patch);
+}
+
+void AudioFlinger::DeviceEffectManager::onCreateAudioPatch(audio_patch_handle_t handle,
+        const PatchPanel::Patch& patch) {
+    ALOGV("%s handle %d mHalHandle %d device sink %08x",
+            __func__, handle, patch.mHalHandle,
+            patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
+    Mutex::Autolock _l(mLock);
+    for (auto& effect : mDeviceEffects) {
+        status_t status = effect.second->onCreatePatch(handle, patch);
+        ALOGV("%s Effect onCreatePatch status %d", __func__, status);
+        ALOGW_IF(status == BAD_VALUE, "%s onCreatePatch error %d", __func__, status);
+    }
+}
+
+void AudioFlinger::DeviceEffectManager::releaseAudioPatch(audio_patch_handle_t handle) {
+    ALOGV("%s", __func__);
+    mCommandThread->releaseAudioPatchCommand(handle);
+}
+
+void AudioFlinger::DeviceEffectManager::onReleaseAudioPatch(audio_patch_handle_t handle) {
+    ALOGV("%s", __func__);
+    Mutex::Autolock _l(mLock);
+    for (auto& effect : mDeviceEffects) {
+        effect.second->onReleasePatch(handle);
+    }
+}
+
+// DeviceEffectManager::createEffect_l() must be called with AudioFlinger::mLock held
+sp<AudioFlinger::EffectHandle> AudioFlinger::DeviceEffectManager::createEffect_l(
+        effect_descriptor_t *descriptor,
+        const AudioDeviceTypeAddr& device,
+        const sp<AudioFlinger::Client>& client,
+        const sp<IEffectClient>& effectClient,
+        const std::map<audio_patch_handle_t, PatchPanel::Patch>& patches,
+        int *enabled,
+        status_t *status) {
+    sp<DeviceEffectProxy> effect;
+    sp<EffectHandle> handle;
+    status_t lStatus;
+
+    lStatus = checkEffectCompatibility(descriptor);
+    if (lStatus != NO_ERROR) {
+       *status = lStatus;
+       return handle;
+    }
+
+    {
+        Mutex::Autolock _l(mLock);
+        auto iter = mDeviceEffects.find(device);
+        if (iter != mDeviceEffects.end()) {
+            effect = iter->second;
+        } else {
+            effect = new DeviceEffectProxy(device, mMyCallback,
+                    descriptor, mAudioFlinger.nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT));
+        }
+        // create effect handle and connect it to effect module
+        handle = new EffectHandle(effect, client, effectClient, 0 /*priority*/);
+        lStatus = handle->initCheck();
+        if (lStatus == NO_ERROR) {
+            lStatus = effect->addHandle(handle.get());
+            if (lStatus == NO_ERROR) {
+                effect->init(patches);
+                mDeviceEffects.emplace(device, effect);
+            }
+        }
+    }
+    if (enabled != NULL) {
+        *enabled = (int)effect->isEnabled();
+    }
+    *status = lStatus;
+    return handle;
+}
+
+status_t AudioFlinger::DeviceEffectManager::checkEffectCompatibility(
+        const effect_descriptor_t *desc) {
+
+    if ((desc->flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_PRE_PROC
+        && (desc->flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_POST_PROC) {
+        ALOGW("%s() non pre/post processing device effect %s", __func__, desc->name);
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+status_t AudioFlinger::DeviceEffectManager::createEffectHal(
+        const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t deviceId,
+        sp<EffectHalInterface> *effect) {
+    status_t status = NO_INIT;
+    sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
+    if (effectsFactory != 0) {
+        status = effectsFactory->createEffect(
+                pEffectUuid, sessionId, AUDIO_IO_HANDLE_NONE, deviceId, effect);
+    }
+    return status;
+}
+
+void AudioFlinger::DeviceEffectManager::dump(int fd) {
+    const bool locked = dumpTryLock(mLock);
+    if (!locked) {
+        String8 result("DeviceEffectManager may be deadlocked\n");
+        write(fd, result.string(), result.size());
+    }
+
+    write(fd, "\nDevice Effects:\n", sizeof("\nDevice Effects:\n"));
+    for (const auto& iter : mDeviceEffects) {
+        String8 outStr;
+        outStr.appendFormat("%*sEffect for device %s address %s:\n", 2, "",
+                ::android::toString(iter.first.mType).c_str(), iter.first.getAddress());
+        write(fd, outStr.string(), outStr.size());
+        iter.second->dump(fd, 4);
+    }
+
+    if (locked) {
+        mLock.unlock();
+    }
+}
+
+
+size_t AudioFlinger::DeviceEffectManager::removeEffect(const sp<DeviceEffectProxy>& effect)
+{
+    Mutex::Autolock _l(mLock);
+    mDeviceEffects.erase(effect->device());
+    return mDeviceEffects.size();
+}
+
+bool AudioFlinger::DeviceEffectManagerCallback::disconnectEffectHandle(
+        EffectHandle *handle, bool unpinIfLast) {
+    sp<EffectBase> effectBase = handle->effect().promote();
+    if (effectBase == nullptr) {
+        return false;
+    }
+
+    sp<DeviceEffectProxy> effect = effectBase->asDeviceEffectProxy();
+    if (effect == nullptr) {
+        return false;
+    }
+    // restore suspended effects if the disconnected handle was enabled and the last one.
+    bool remove = (effect->removeHandle(handle) == 0) && (!effect->isPinned() || unpinIfLast);
+    if (remove) {
+        mManager.removeEffect(effect);
+        if (handle->enabled()) {
+            effectBase->checkSuspendOnEffectEnabled(false, false /*threadLocked*/);
+        }
+    }
+    return true;
+}
+
+// -----------  DeviceEffectManager::CommandThread implementation ----------
+
+
+AudioFlinger::DeviceEffectManager::CommandThread::~CommandThread()
+{
+    Mutex::Autolock _l(mLock);
+    mCommands.clear();
+}
+
+void AudioFlinger::DeviceEffectManager::CommandThread::onFirstRef()
+{
+    run("DeviceEffectManage_CommandThread", ANDROID_PRIORITY_AUDIO);
+}
+
+bool AudioFlinger::DeviceEffectManager::CommandThread::threadLoop()
+{
+    mLock.lock();
+    while (!exitPending())
+    {
+        while (!mCommands.empty() && !exitPending()) {
+            sp<Command> command = mCommands.front();
+            mCommands.pop_front();
+            mLock.unlock();
+
+            switch (command->mCommand) {
+            case CREATE_AUDIO_PATCH: {
+                CreateAudioPatchData *data = (CreateAudioPatchData *)command->mData.get();
+                ALOGV("CommandThread() processing create audio patch handle %d", data->mHandle);
+                mManager.onCreateAudioPatch(data->mHandle, data->mPatch);
+                } break;
+            case RELEASE_AUDIO_PATCH: {
+                ReleaseAudioPatchData *data = (ReleaseAudioPatchData *)command->mData.get();
+                ALOGV("CommandThread() processing release audio patch handle %d", data->mHandle);
+                mManager.onReleaseAudioPatch(data->mHandle);
+                } break;
+            default:
+                ALOGW("CommandThread() unknown command %d", command->mCommand);
+            }
+            mLock.lock();
+        }
+
+        // At this stage we have either an empty command queue or the first command in the queue
+        // has a finite delay. So unless we are exiting it is safe to wait.
+        if (!exitPending()) {
+            ALOGV("CommandThread() going to sleep");
+            mWaitWorkCV.wait(mLock);
+        }
+    }
+    mLock.unlock();
+    return false;
+}
+
+void AudioFlinger::DeviceEffectManager::CommandThread::sendCommand(sp<Command> command) {
+    Mutex::Autolock _l(mLock);
+    mCommands.push_back(command);
+    mWaitWorkCV.signal();
+}
+
+void AudioFlinger::DeviceEffectManager::CommandThread::createAudioPatchCommand(
+        audio_patch_handle_t handle, const PatchPanel::Patch& patch)
+{
+    sp<Command> command = new Command(CREATE_AUDIO_PATCH, new CreateAudioPatchData(handle, patch));
+    ALOGV("CommandThread() adding create patch handle %d mHalHandle %d.", handle, patch.mHalHandle);
+    sendCommand(command);
+}
+
+void AudioFlinger::DeviceEffectManager::CommandThread::releaseAudioPatchCommand(
+        audio_patch_handle_t handle)
+{
+    sp<Command> command = new Command(RELEASE_AUDIO_PATCH, new ReleaseAudioPatchData(handle));
+    ALOGV("CommandThread() adding release patch");
+    sendCommand(command);
+}
+
+void AudioFlinger::DeviceEffectManager::CommandThread::exit()
+{
+    ALOGV("CommandThread::exit");
+    {
+        AutoMutex _l(mLock);
+        requestExit();
+        mWaitWorkCV.signal();
+    }
+    // Note that we can call it from the thread loop if all other references have been released
+    // but it will safely return WOULD_BLOCK in this case
+    requestExitAndWait();
+}
+
+} // namespace android
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
new file mode 100644
index 0000000..14ff14d
--- /dev/null
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -0,0 +1,203 @@
+/*
+**
+** Copyright 2019, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef INCLUDING_FROM_AUDIOFLINGER_H
+    #error This header file should only be included from AudioFlinger.h
+#endif
+
+// DeviceEffectManager is concealed within AudioFlinger, their lifetimes are the same.
+class DeviceEffectManager {
+public:
+    explicit DeviceEffectManager(AudioFlinger* audioFlinger)
+        : mCommandThread(new CommandThread(*this)), mAudioFlinger(*audioFlinger),
+        mMyCallback(new DeviceEffectManagerCallback(this)) {}
+
+            ~DeviceEffectManager() {
+                mCommandThread->exit();
+            }
+
+    sp<EffectHandle> createEffect_l(effect_descriptor_t *descriptor,
+                const AudioDeviceTypeAddr& device,
+                const sp<AudioFlinger::Client>& client,
+                const sp<IEffectClient>& effectClient,
+                const std::map<audio_patch_handle_t, PatchPanel::Patch>& patches,
+                int *enabled,
+                status_t *status);
+    void createAudioPatch(audio_patch_handle_t handle, const PatchPanel::Patch& patch);
+    void releaseAudioPatch(audio_patch_handle_t handle);
+
+    size_t removeEffect(const sp<DeviceEffectProxy>& effect);
+    status_t createEffectHal(const effect_uuid_t *pEffectUuid,
+           int32_t sessionId, int32_t deviceId,
+           sp<EffectHalInterface> *effect);
+    status_t addEffectToHal(audio_port_handle_t deviceId, audio_module_handle_t hwModuleId,
+            sp<EffectHalInterface> effect) {
+        return mAudioFlinger.addEffectToHal(deviceId, hwModuleId, effect);
+    };
+    status_t removeEffectFromHal(audio_port_handle_t deviceId, audio_module_handle_t hwModuleId,
+            sp<EffectHalInterface> effect) {
+        return mAudioFlinger.removeEffectFromHal(deviceId, hwModuleId, effect);
+    };
+
+    AudioFlinger& audioFlinger() const { return mAudioFlinger; }
+
+    void dump(int fd);
+
+private:
+
+    // Thread to execute create and release patch commands asynchronously. This is needed because
+    // PatchPanel::createAudioPatch and releaseAudioPatch are executed from audio policy service
+    // with mutex locked and effect management requires to call back into audio policy service
+    class Command;
+    class CommandThread : public Thread {
+    public:
+
+        enum {
+            CREATE_AUDIO_PATCH,
+            RELEASE_AUDIO_PATCH,
+        };
+
+        CommandThread(DeviceEffectManager& manager)
+            : Thread(false), mManager(manager) {}
+        ~CommandThread() override;
+
+        // Thread virtuals
+        void onFirstRef() override;
+        bool threadLoop() override;
+
+                void exit();
+
+                void createAudioPatchCommand(audio_patch_handle_t handle,
+                        const PatchPanel::Patch& patch);
+                void releaseAudioPatchCommand(audio_patch_handle_t handle);
+
+    private:
+        class CommandData;
+
+        // descriptor for requested tone playback event
+        class Command: public RefBase {
+        public:
+            Command() = default;
+            Command(int command, sp<CommandData> data)
+                : mCommand(command), mData(data) {}
+
+            int mCommand = -1;
+            sp<CommandData> mData;
+        };
+
+        class CommandData: public RefBase {
+        public:
+            virtual ~CommandData() = default;
+        };
+
+        class CreateAudioPatchData : public CommandData {
+        public:
+            CreateAudioPatchData(audio_patch_handle_t handle, const PatchPanel::Patch& patch)
+                :   mHandle(handle), mPatch(patch) {}
+
+            audio_patch_handle_t mHandle;
+            const PatchPanel::Patch mPatch;
+        };
+
+        class ReleaseAudioPatchData : public CommandData {
+        public:
+            ReleaseAudioPatchData(audio_patch_handle_t handle)
+                :   mHandle(handle) {}
+
+            audio_patch_handle_t mHandle;
+        };
+
+        void sendCommand(sp<Command> command);
+
+        Mutex   mLock;
+        Condition mWaitWorkCV;
+        std::deque <sp<Command>> mCommands; // list of pending commands
+        DeviceEffectManager& mManager;
+    };
+
+    void onCreateAudioPatch(audio_patch_handle_t handle, const PatchPanel::Patch& patch);
+    void onReleaseAudioPatch(audio_patch_handle_t handle);
+
+    status_t checkEffectCompatibility(const effect_descriptor_t *desc);
+
+    Mutex mLock;
+    sp<CommandThread> mCommandThread;
+    AudioFlinger &mAudioFlinger;
+    const sp<DeviceEffectManagerCallback> mMyCallback;
+    std::map<AudioDeviceTypeAddr, sp<DeviceEffectProxy>> mDeviceEffects;
+};
+
+class DeviceEffectManagerCallback :  public EffectCallbackInterface {
+public:
+            DeviceEffectManagerCallback(DeviceEffectManager *manager)
+                : mManager(*manager) {}
+
+    status_t createEffectHal(const effect_uuid_t *pEffectUuid,
+           int32_t sessionId, int32_t deviceId,
+           sp<EffectHalInterface> *effect) override {
+                return mManager.createEffectHal(pEffectUuid, sessionId, deviceId, effect);
+            }
+    status_t allocateHalBuffer(size_t size __unused,
+            sp<EffectBufferHalInterface>* buffer __unused) override { return NO_ERROR; }
+    bool updateOrphanEffectChains(const sp<EffectBase>& effect __unused) override { return false; }
+
+    audio_io_handle_t io() const override  { return AUDIO_IO_HANDLE_NONE; }
+    bool isOutput() const override { return false; }
+    bool isOffload() const override { return false; }
+    bool isOffloadOrDirect() const override { return false; }
+    bool isOffloadOrMmap() const override { return false; }
+
+    uint32_t  sampleRate() const override { return 0; }
+    audio_channel_mask_t channelMask() const override { return AUDIO_CHANNEL_NONE; }
+    uint32_t channelCount() const override { return 0; }
+    size_t    frameCount() const override  { return 0; }
+    uint32_t  latency() const override  { return 0; }
+
+    status_t addEffectToHal(sp<EffectHalInterface> effect __unused) override {
+        return NO_ERROR;
+    }
+    status_t removeEffectFromHal(sp<EffectHalInterface> effect __unused) override {
+        return NO_ERROR;
+    }
+
+    bool disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast) override;
+    void setVolumeForOutput(float left __unused, float right __unused) const override {}
+
+    // check if effects should be suspended or restored when a given effect is enable or disabled
+    void checkSuspendOnEffectEnabled(const sp<EffectBase>& effect __unused,
+                          bool enabled __unused, bool threadLocked __unused) override {}
+    void resetVolume() override {}
+    uint32_t strategy() const override  { return 0; }
+    int32_t activeTrackCnt() const override { return 0; }
+    void onEffectEnable(const sp<EffectBase>& effect __unused) override {}
+    void onEffectDisable(const sp<EffectBase>& effect __unused) override {}
+
+    wp<EffectChain> chain() const override { return nullptr; }
+
+    int newEffectId() { return mManager.audioFlinger().nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT); }
+
+    status_t addEffectToHal(audio_port_handle_t deviceId,
+            audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect) {
+        return mManager.addEffectToHal(deviceId, hwModuleId, effect);
+    }
+    status_t removeEffectFromHal(audio_port_handle_t deviceId,
+            audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect) {
+        return mManager.removeEffectFromHal(deviceId, hwModuleId, effect);
+    }
+private:
+    DeviceEffectManager& mManager;
+};
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 2a83fd3..2f3724f 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -58,71 +58,115 @@
 namespace android {
 
 // ----------------------------------------------------------------------------
-//  EffectModule implementation
+//  EffectBase implementation
 // ----------------------------------------------------------------------------
 
 #undef LOG_TAG
-#define LOG_TAG "AudioFlinger::EffectModule"
+#define LOG_TAG "AudioFlinger::EffectBase"
 
-AudioFlinger::EffectModule::EffectModule(const sp<AudioFlinger::EffectCallbackInterface>& callback,
+AudioFlinger::EffectBase::EffectBase(const sp<AudioFlinger::EffectCallbackInterface>& callback,
                                         effect_descriptor_t *desc,
                                         int id,
                                         audio_session_t sessionId,
                                         bool pinned)
     : mPinned(pinned),
       mCallback(callback), mId(id), mSessionId(sessionId),
-      mDescriptor(*desc),
-      // clear mConfig to ensure consistent initial value of buffer framecount
-      // in case buffers are associated by setInBuffer() or setOutBuffer()
-      // prior to configure().
-      mConfig{{}, {}},
-      mStatus(NO_INIT), mState(IDLE),
-      mMaxDisableWaitCnt(1), // set by configure(), should be >= 1
-      mDisableWaitCnt(0),    // set by process() and updateState()
-      mSuspended(false),
-      mOffloaded(false)
-#ifdef FLOAT_EFFECT_CHAIN
-      , mSupportsFloat(false)
-#endif
+      mDescriptor(*desc)
 {
-    ALOGV("Constructor %p pinned %d", this, pinned);
-    int lStatus;
-
-    // create effect engine from effect factory
-    mStatus = callback->createEffectHal(
-            &desc->uuid, sessionId, AUDIO_PORT_HANDLE_NONE, &mEffectInterface);
-    if (mStatus != NO_ERROR) {
-        return;
-    }
-    lStatus = init();
-    if (lStatus < 0) {
-        mStatus = lStatus;
-        goto Error;
-    }
-
-    setOffloaded(callback->isOffload(), callback->io());
-    ALOGV("Constructor success name %s, Interface %p", mDescriptor.name, mEffectInterface.get());
-
-    return;
-Error:
-    mEffectInterface.clear();
-    ALOGV("Constructor Error %d", mStatus);
 }
 
-AudioFlinger::EffectModule::~EffectModule()
+// must be called with EffectModule::mLock held
+status_t AudioFlinger::EffectBase::setEnabled_l(bool enabled)
 {
-    ALOGV("Destructor %p", this);
-    if (mEffectInterface != 0) {
-        char uuidStr[64];
-        AudioEffect::guidToString(&mDescriptor.uuid, uuidStr, sizeof(uuidStr));
-        ALOGW("EffectModule %p destructor called with unreleased interface, effect %s",
-                this, uuidStr);
-        release_l();
-    }
 
+    ALOGV("setEnabled %p enabled %d", this, enabled);
+
+    if (enabled != isEnabled()) {
+        switch (mState) {
+        // going from disabled to enabled
+        case IDLE:
+            mState = STARTING;
+            break;
+        case STOPPED:
+            mState = RESTART;
+            break;
+        case STOPPING:
+            mState = ACTIVE;
+            break;
+
+        // going from enabled to disabled
+        case RESTART:
+            mState = STOPPED;
+            break;
+        case STARTING:
+            mState = IDLE;
+            break;
+        case ACTIVE:
+            mState = STOPPING;
+            break;
+        case DESTROYED:
+            return NO_ERROR; // simply ignore as we are being destroyed
+        }
+        for (size_t i = 1; i < mHandles.size(); i++) {
+            EffectHandle *h = mHandles[i];
+            if (h != NULL && !h->disconnected()) {
+                h->setEnabled(enabled);
+            }
+        }
+    }
+    return NO_ERROR;
 }
 
-status_t AudioFlinger::EffectModule::addHandle(EffectHandle *handle)
+status_t AudioFlinger::EffectBase::setEnabled(bool enabled, bool fromHandle)
+{
+    status_t status;
+    {
+        Mutex::Autolock _l(mLock);
+        status = setEnabled_l(enabled);
+    }
+    if (fromHandle) {
+        if (enabled) {
+            if (status != NO_ERROR) {
+                mCallback->checkSuspendOnEffectEnabled(this, false, false /*threadLocked*/);
+            } else {
+                mCallback->onEffectEnable(this);
+            }
+        } else {
+            mCallback->onEffectDisable(this);
+        }
+    }
+    return status;
+}
+
+bool AudioFlinger::EffectBase::isEnabled() const
+{
+    switch (mState) {
+    case RESTART:
+    case STARTING:
+    case ACTIVE:
+        return true;
+    case IDLE:
+    case STOPPING:
+    case STOPPED:
+    case DESTROYED:
+    default:
+        return false;
+    }
+}
+
+void AudioFlinger::EffectBase::setSuspended(bool suspended)
+{
+    Mutex::Autolock _l(mLock);
+    mSuspended = suspended;
+}
+
+bool AudioFlinger::EffectBase::suspended() const
+{
+    Mutex::Autolock _l(mLock);
+    return mSuspended;
+}
+
+status_t AudioFlinger::EffectBase::addHandle(EffectHandle *handle)
 {
     status_t status;
 
@@ -161,7 +205,7 @@
     return status;
 }
 
-status_t AudioFlinger::EffectModule::updatePolicyState()
+status_t AudioFlinger::EffectBase::updatePolicyState()
 {
     status_t status = NO_ERROR;
     bool doRegister = false;
@@ -217,13 +261,13 @@
 }
 
 
-ssize_t AudioFlinger::EffectModule::removeHandle(EffectHandle *handle)
+ssize_t AudioFlinger::EffectBase::removeHandle(EffectHandle *handle)
 {
     Mutex::Autolock _l(mLock);
     return removeHandle_l(handle);
 }
 
-ssize_t AudioFlinger::EffectModule::removeHandle_l(EffectHandle *handle)
+ssize_t AudioFlinger::EffectBase::removeHandle_l(EffectHandle *handle)
 {
     size_t size = mHandles.size();
     size_t i;
@@ -247,19 +291,15 @@
         }
     }
 
-    // Prevent calls to process() and other functions on effect interface from now on.
-    // The effect engine will be released by the destructor when the last strong reference on
-    // this object is released which can happen after next process is called.
     if (mHandles.size() == 0 && !mPinned) {
         mState = DESTROYED;
-        mEffectInterface->close();
     }
 
     return mHandles.size();
 }
 
 // must be called with EffectModule::mLock held
-AudioFlinger::EffectHandle *AudioFlinger::EffectModule::controlHandle_l()
+AudioFlinger::EffectHandle *AudioFlinger::EffectBase::controlHandle_l()
 {
     // the first valid handle in the list has control over the module
     for (size_t i = 0; i < mHandles.size(); i++) {
@@ -273,7 +313,7 @@
 }
 
 // unsafe method called when the effect parent thread has been destroyed
-ssize_t AudioFlinger::EffectModule::disconnectHandle(EffectHandle *handle, bool unpinIfLast)
+ssize_t AudioFlinger::EffectBase::disconnectHandle(EffectHandle *handle, bool unpinIfLast)
 {
     ALOGV("disconnect() %p handle %p", this, handle);
     if (mCallback->disconnectEffectHandle(handle, unpinIfLast)) {
@@ -290,6 +330,254 @@
     return numHandles;
 }
 
+bool AudioFlinger::EffectBase::purgeHandles()
+{
+    bool enabled = false;
+    Mutex::Autolock _l(mLock);
+    EffectHandle *handle = controlHandle_l();
+    if (handle != NULL) {
+        enabled = handle->enabled();
+    }
+    mHandles.clear();
+    return enabled;
+}
+
+void AudioFlinger::EffectBase::checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) {
+    mCallback->checkSuspendOnEffectEnabled(this, enabled, threadLocked);
+}
+
+static String8 effectFlagsToString(uint32_t flags) {
+    String8 s;
+
+    s.append("conn. mode: ");
+    switch (flags & EFFECT_FLAG_TYPE_MASK) {
+    case EFFECT_FLAG_TYPE_INSERT: s.append("insert"); break;
+    case EFFECT_FLAG_TYPE_AUXILIARY: s.append("auxiliary"); break;
+    case EFFECT_FLAG_TYPE_REPLACE: s.append("replace"); break;
+    case EFFECT_FLAG_TYPE_PRE_PROC: s.append("preproc"); break;
+    case EFFECT_FLAG_TYPE_POST_PROC: s.append("postproc"); break;
+    default: s.append("unknown/reserved"); break;
+    }
+    s.append(", ");
+
+    s.append("insert pref: ");
+    switch (flags & EFFECT_FLAG_INSERT_MASK) {
+    case EFFECT_FLAG_INSERT_ANY: s.append("any"); break;
+    case EFFECT_FLAG_INSERT_FIRST: s.append("first"); break;
+    case EFFECT_FLAG_INSERT_LAST: s.append("last"); break;
+    case EFFECT_FLAG_INSERT_EXCLUSIVE: s.append("exclusive"); break;
+    default: s.append("unknown/reserved"); break;
+    }
+    s.append(", ");
+
+    s.append("volume mgmt: ");
+    switch (flags & EFFECT_FLAG_VOLUME_MASK) {
+    case EFFECT_FLAG_VOLUME_NONE: s.append("none"); break;
+    case EFFECT_FLAG_VOLUME_CTRL: s.append("implements control"); break;
+    case EFFECT_FLAG_VOLUME_IND: s.append("requires indication"); break;
+    case EFFECT_FLAG_VOLUME_MONITOR: s.append("monitors volume"); break;
+    default: s.append("unknown/reserved"); break;
+    }
+    s.append(", ");
+
+    uint32_t devind = flags & EFFECT_FLAG_DEVICE_MASK;
+    if (devind) {
+        s.append("device indication: ");
+        switch (devind) {
+        case EFFECT_FLAG_DEVICE_IND: s.append("requires updates"); break;
+        default: s.append("unknown/reserved"); break;
+        }
+        s.append(", ");
+    }
+
+    s.append("input mode: ");
+    switch (flags & EFFECT_FLAG_INPUT_MASK) {
+    case EFFECT_FLAG_INPUT_DIRECT: s.append("direct"); break;
+    case EFFECT_FLAG_INPUT_PROVIDER: s.append("provider"); break;
+    case EFFECT_FLAG_INPUT_BOTH: s.append("direct+provider"); break;
+    default: s.append("not set"); break;
+    }
+    s.append(", ");
+
+    s.append("output mode: ");
+    switch (flags & EFFECT_FLAG_OUTPUT_MASK) {
+    case EFFECT_FLAG_OUTPUT_DIRECT: s.append("direct"); break;
+    case EFFECT_FLAG_OUTPUT_PROVIDER: s.append("provider"); break;
+    case EFFECT_FLAG_OUTPUT_BOTH: s.append("direct+provider"); break;
+    default: s.append("not set"); break;
+    }
+    s.append(", ");
+
+    uint32_t accel = flags & EFFECT_FLAG_HW_ACC_MASK;
+    if (accel) {
+        s.append("hardware acceleration: ");
+        switch (accel) {
+        case EFFECT_FLAG_HW_ACC_SIMPLE: s.append("non-tunneled"); break;
+        case EFFECT_FLAG_HW_ACC_TUNNEL: s.append("tunneled"); break;
+        default: s.append("unknown/reserved"); break;
+        }
+        s.append(", ");
+    }
+
+    uint32_t modeind = flags & EFFECT_FLAG_AUDIO_MODE_MASK;
+    if (modeind) {
+        s.append("mode indication: ");
+        switch (modeind) {
+        case EFFECT_FLAG_AUDIO_MODE_IND: s.append("required"); break;
+        default: s.append("unknown/reserved"); break;
+        }
+        s.append(", ");
+    }
+
+    uint32_t srcind = flags & EFFECT_FLAG_AUDIO_SOURCE_MASK;
+    if (srcind) {
+        s.append("source indication: ");
+        switch (srcind) {
+        case EFFECT_FLAG_AUDIO_SOURCE_IND: s.append("required"); break;
+        default: s.append("unknown/reserved"); break;
+        }
+        s.append(", ");
+    }
+
+    if (flags & EFFECT_FLAG_OFFLOAD_MASK) {
+        s.append("offloadable, ");
+    }
+
+    int len = s.length();
+    if (s.length() > 2) {
+        (void) s.lockBuffer(len);
+        s.unlockBuffer(len - 2);
+    }
+    return s;
+}
+
+void AudioFlinger::EffectBase::dump(int fd, const Vector<String16>& args __unused)
+{
+    String8 result;
+
+    result.appendFormat("\tEffect ID %d:\n", mId);
+
+    bool locked = AudioFlinger::dumpTryLock(mLock);
+    // failed to lock - AudioFlinger is probably deadlocked
+    if (!locked) {
+        result.append("\t\tCould not lock Fx mutex:\n");
+    }
+
+    result.append("\t\tSession State Registered Enabled Suspended:\n");
+    result.appendFormat("\t\t%05d   %03d   %s          %s       %s\n",
+            mSessionId, mState, mPolicyRegistered ? "y" : "n",
+            mPolicyEnabled ? "y" : "n", mSuspended ? "y" : "n");
+
+    result.append("\t\tDescriptor:\n");
+    char uuidStr[64];
+    AudioEffect::guidToString(&mDescriptor.uuid, uuidStr, sizeof(uuidStr));
+    result.appendFormat("\t\t- UUID: %s\n", uuidStr);
+    AudioEffect::guidToString(&mDescriptor.type, uuidStr, sizeof(uuidStr));
+    result.appendFormat("\t\t- TYPE: %s\n", uuidStr);
+    result.appendFormat("\t\t- apiVersion: %08X\n\t\t- flags: %08X (%s)\n",
+            mDescriptor.apiVersion,
+            mDescriptor.flags,
+            effectFlagsToString(mDescriptor.flags).string());
+    result.appendFormat("\t\t- name: %s\n",
+            mDescriptor.name);
+
+    result.appendFormat("\t\t- implementor: %s\n",
+            mDescriptor.implementor);
+
+    result.appendFormat("\t\t%zu Clients:\n", mHandles.size());
+    result.append("\t\t\t  Pid Priority Ctrl Locked client server\n");
+    char buffer[256];
+    for (size_t i = 0; i < mHandles.size(); ++i) {
+        EffectHandle *handle = mHandles[i];
+        if (handle != NULL && !handle->disconnected()) {
+            handle->dumpToBuffer(buffer, sizeof(buffer));
+            result.append(buffer);
+        }
+    }
+    if (locked) {
+        mLock.unlock();
+    }
+
+    write(fd, result.string(), result.length());
+}
+
+// ----------------------------------------------------------------------------
+//  EffectModule implementation
+// ----------------------------------------------------------------------------
+
+#undef LOG_TAG
+#define LOG_TAG "AudioFlinger::EffectModule"
+
+AudioFlinger::EffectModule::EffectModule(const sp<AudioFlinger::EffectCallbackInterface>& callback,
+                                         effect_descriptor_t *desc,
+                                         int id,
+                                         audio_session_t sessionId,
+                                         bool pinned,
+                                         audio_port_handle_t deviceId)
+    : EffectBase(callback, desc, id, sessionId, pinned),
+      // clear mConfig to ensure consistent initial value of buffer framecount
+      // in case buffers are associated by setInBuffer() or setOutBuffer()
+      // prior to configure().
+      mConfig{{}, {}},
+      mStatus(NO_INIT),
+      mMaxDisableWaitCnt(1), // set by configure(), should be >= 1
+      mDisableWaitCnt(0),    // set by process() and updateState()
+      mOffloaded(false)
+#ifdef FLOAT_EFFECT_CHAIN
+      , mSupportsFloat(false)
+#endif
+{
+    ALOGV("Constructor %p pinned %d", this, pinned);
+    int lStatus;
+
+    // create effect engine from effect factory
+    mStatus = callback->createEffectHal(
+            &desc->uuid, sessionId, deviceId, &mEffectInterface);
+    if (mStatus != NO_ERROR) {
+        return;
+    }
+    lStatus = init();
+    if (lStatus < 0) {
+        mStatus = lStatus;
+        goto Error;
+    }
+
+    setOffloaded(callback->isOffload(), callback->io());
+    ALOGV("Constructor success name %s, Interface %p", mDescriptor.name, mEffectInterface.get());
+
+    return;
+Error:
+    mEffectInterface.clear();
+    ALOGV("Constructor Error %d", mStatus);
+}
+
+AudioFlinger::EffectModule::~EffectModule()
+{
+    ALOGV("Destructor %p", this);
+    if (mEffectInterface != 0) {
+        char uuidStr[64];
+        AudioEffect::guidToString(&mDescriptor.uuid, uuidStr, sizeof(uuidStr));
+        ALOGW("EffectModule %p destructor called with unreleased interface, effect %s",
+                this, uuidStr);
+        release_l();
+    }
+
+}
+
+ssize_t AudioFlinger::EffectModule::removeHandle_l(EffectHandle *handle)
+{
+    ssize_t status = EffectBase::removeHandle_l(handle);
+
+    // Prevent calls to process() and other functions on effect interface from now on.
+    // The effect engine will be released by the destructor when the last strong reference on
+    // this object is released which can happen after next process is called.
+    if (status == 0 && !mPinned) {
+        mEffectInterface->close();
+    }
+
+    return status;
+}
+
 bool AudioFlinger::EffectModule::updateState() {
     Mutex::Autolock _l(mLock);
 
@@ -449,7 +737,7 @@
             }
             if (!mSupportsFloat) { // convert input to int16_t as effect doesn't support float.
                 if (!auxType) {
-                    if (mInConversionBuffer.get() == nullptr) {
+                    if (mInConversionBuffer == nullptr) {
                         ALOGW("%s: mInConversionBuffer is null, bypassing", __func__);
                         goto data_bypass;
                     }
@@ -460,7 +748,7 @@
                     inBuffer = mInConversionBuffer;
                 }
                 if (mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
-                    if (mOutConversionBuffer.get() == nullptr) {
+                    if (mOutConversionBuffer == nullptr) {
                         ALOGW("%s: mOutConversionBuffer is null, bypassing", __func__);
                         goto data_bypass;
                     }
@@ -599,7 +887,7 @@
     mConfig.outputCfg.format = EFFECT_BUFFER_FORMAT;
 
     // Don't use sample rate for thread if effect isn't offloadable.
-    if (mCallback->isOffload() && !isOffloaded()) {
+    if (mCallback->isOffloadOrDirect() && !isOffloaded()) {
         mConfig.inputCfg.samplingRate = DEFAULT_OUTPUT_SAMPLE_RATE;
         ALOGV("Overriding effect input as 48kHz");
     } else {
@@ -633,9 +921,8 @@
     mConfig.outputCfg.buffer.frameCount = mConfig.inputCfg.buffer.frameCount;
 
     ALOGV("configure() %p chain %p buffer %p framecount %zu",
-            this, mCallback->chain().promote() != nullptr ? mCallback->chain().promote().get() :
-                                                            nullptr,
-              mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount);
+          this, mCallback->chain().promote().get(),
+          mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount);
 
     status_t cmdStatus;
     size = sizeof(int);
@@ -950,89 +1237,6 @@
     return status;
 }
 
-void AudioFlinger::EffectModule::checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) {
-    mCallback->checkSuspendOnEffectEnabled(this, enabled, threadLocked);
-}
-
-status_t AudioFlinger::EffectModule::setEnabled(bool enabled, bool fromHandle)
-{
-    status_t status;
-    {
-        Mutex::Autolock _l(mLock);
-        status = setEnabled_l(enabled);
-    }
-    if (fromHandle) {
-        if (enabled) {
-            if (status != NO_ERROR) {
-                mCallback->checkSuspendOnEffectEnabled(this, false, false /*threadLocked*/);
-            } else {
-                mCallback->onEffectEnable(this);
-            }
-        } else {
-            mCallback->onEffectDisable(this);
-        }
-    }
-    return status;
-}
-
-// must be called with EffectModule::mLock held
-status_t AudioFlinger::EffectModule::setEnabled_l(bool enabled)
-{
-
-    ALOGV("setEnabled %p enabled %d", this, enabled);
-
-    if (enabled != isEnabled()) {
-        switch (mState) {
-        // going from disabled to enabled
-        case IDLE:
-            mState = STARTING;
-            break;
-        case STOPPED:
-            mState = RESTART;
-            break;
-        case STOPPING:
-            mState = ACTIVE;
-            break;
-
-        // going from enabled to disabled
-        case RESTART:
-            mState = STOPPED;
-            break;
-        case STARTING:
-            mState = IDLE;
-            break;
-        case ACTIVE:
-            mState = STOPPING;
-            break;
-        case DESTROYED:
-            return NO_ERROR; // simply ignore as we are being destroyed
-        }
-        for (size_t i = 1; i < mHandles.size(); i++) {
-            EffectHandle *h = mHandles[i];
-            if (h != NULL && !h->disconnected()) {
-                h->setEnabled(enabled);
-            }
-        }
-    }
-    return NO_ERROR;
-}
-
-bool AudioFlinger::EffectModule::isEnabled() const
-{
-    switch (mState) {
-    case RESTART:
-    case STARTING:
-    case ACTIVE:
-        return true;
-    case IDLE:
-    case STOPPING:
-    case STOPPED:
-    case DESTROYED:
-    default:
-        return false;
-    }
-}
-
 bool AudioFlinger::EffectModule::isProcessEnabled() const
 {
     if (mStatus != NO_ERROR) {
@@ -1085,7 +1289,7 @@
     const uint32_t inChannelCount =
             audio_channel_count_from_out_mask(mConfig.inputCfg.channels);
     const bool formatMismatch = !mSupportsFloat || mInChannelCountRequested != inChannelCount;
-    if (!auxType && formatMismatch && mInBuffer.get() != nullptr) {
+    if (!auxType && formatMismatch && mInBuffer != nullptr) {
         // we need to translate - create hidl shared buffer and intercept
         const size_t inFrameCount = mConfig.inputCfg.buffer.frameCount;
         // Use FCC_2 in case mInChannelCountRequested is mono and the effect is stereo.
@@ -1095,13 +1299,13 @@
         ALOGV("%s: setInBuffer updating for inChannels:%d inFrameCount:%zu total size:%zu",
                 __func__, inChannels, inFrameCount, size);
 
-        if (size > 0 && (mInConversionBuffer.get() == nullptr
+        if (size > 0 && (mInConversionBuffer == nullptr
                 || size > mInConversionBuffer->getSize())) {
             mInConversionBuffer.clear();
             ALOGV("%s: allocating mInConversionBuffer %zu", __func__, size);
             (void)mCallback->allocateHalBuffer(size, &mInConversionBuffer);
         }
-        if (mInConversionBuffer.get() != nullptr) {
+        if (mInConversionBuffer != nullptr) {
             mInConversionBuffer->setFrameCount(inFrameCount);
             mEffectInterface->setInBuffer(mInConversionBuffer);
         } else if (size > 0) {
@@ -1130,7 +1334,7 @@
     const uint32_t outChannelCount =
             audio_channel_count_from_out_mask(mConfig.outputCfg.channels);
     const bool formatMismatch = !mSupportsFloat || mOutChannelCountRequested != outChannelCount;
-    if (formatMismatch && mOutBuffer.get() != nullptr) {
+    if (formatMismatch && mOutBuffer != nullptr) {
         const size_t outFrameCount = mConfig.outputCfg.buffer.frameCount;
         // Use FCC_2 in case mOutChannelCountRequested is mono and the effect is stereo.
         const uint32_t outChannels = std::max((uint32_t)FCC_2, mOutChannelCountRequested);
@@ -1139,13 +1343,13 @@
         ALOGV("%s: setOutBuffer updating for outChannels:%d outFrameCount:%zu total size:%zu",
                 __func__, outChannels, outFrameCount, size);
 
-        if (size > 0 && (mOutConversionBuffer.get() == nullptr
+        if (size > 0 && (mOutConversionBuffer == nullptr
                 || size > mOutConversionBuffer->getSize())) {
             mOutConversionBuffer.clear();
             ALOGV("%s: allocating mOutConversionBuffer %zu", __func__, size);
             (void)mCallback->allocateHalBuffer(size, &mOutConversionBuffer);
         }
-        if (mOutConversionBuffer.get() != nullptr) {
+        if (mOutConversionBuffer != nullptr) {
             mOutConversionBuffer->setFrameCount(outFrameCount);
             mEffectInterface->setOutBuffer(mOutConversionBuffer);
         } else if (size > 0) {
@@ -1274,30 +1478,6 @@
     return status;
 }
 
-void AudioFlinger::EffectModule::setSuspended(bool suspended)
-{
-    Mutex::Autolock _l(mLock);
-    mSuspended = suspended;
-}
-
-bool AudioFlinger::EffectModule::suspended() const
-{
-    Mutex::Autolock _l(mLock);
-    return mSuspended;
-}
-
-bool AudioFlinger::EffectModule::purgeHandles()
-{
-    bool enabled = false;
-    Mutex::Autolock _l(mLock);
-    EffectHandle *handle = controlHandle_l();
-    if (handle != NULL) {
-        enabled = handle->enabled();
-    }
-    mHandles.clear();
-    return enabled;
-}
-
 status_t AudioFlinger::EffectModule::setOffloaded(bool offloaded, audio_io_handle_t io)
 {
     Mutex::Autolock _l(mLock);
@@ -1337,115 +1517,10 @@
     return mOffloaded;
 }
 
-String8 effectFlagsToString(uint32_t flags) {
-    String8 s;
-
-    s.append("conn. mode: ");
-    switch (flags & EFFECT_FLAG_TYPE_MASK) {
-    case EFFECT_FLAG_TYPE_INSERT: s.append("insert"); break;
-    case EFFECT_FLAG_TYPE_AUXILIARY: s.append("auxiliary"); break;
-    case EFFECT_FLAG_TYPE_REPLACE: s.append("replace"); break;
-    case EFFECT_FLAG_TYPE_PRE_PROC: s.append("preproc"); break;
-    case EFFECT_FLAG_TYPE_POST_PROC: s.append("postproc"); break;
-    default: s.append("unknown/reserved"); break;
-    }
-    s.append(", ");
-
-    s.append("insert pref: ");
-    switch (flags & EFFECT_FLAG_INSERT_MASK) {
-    case EFFECT_FLAG_INSERT_ANY: s.append("any"); break;
-    case EFFECT_FLAG_INSERT_FIRST: s.append("first"); break;
-    case EFFECT_FLAG_INSERT_LAST: s.append("last"); break;
-    case EFFECT_FLAG_INSERT_EXCLUSIVE: s.append("exclusive"); break;
-    default: s.append("unknown/reserved"); break;
-    }
-    s.append(", ");
-
-    s.append("volume mgmt: ");
-    switch (flags & EFFECT_FLAG_VOLUME_MASK) {
-    case EFFECT_FLAG_VOLUME_NONE: s.append("none"); break;
-    case EFFECT_FLAG_VOLUME_CTRL: s.append("implements control"); break;
-    case EFFECT_FLAG_VOLUME_IND: s.append("requires indication"); break;
-    case EFFECT_FLAG_VOLUME_MONITOR: s.append("monitors volume"); break;
-    default: s.append("unknown/reserved"); break;
-    }
-    s.append(", ");
-
-    uint32_t devind = flags & EFFECT_FLAG_DEVICE_MASK;
-    if (devind) {
-        s.append("device indication: ");
-        switch (devind) {
-        case EFFECT_FLAG_DEVICE_IND: s.append("requires updates"); break;
-        default: s.append("unknown/reserved"); break;
-        }
-        s.append(", ");
-    }
-
-    s.append("input mode: ");
-    switch (flags & EFFECT_FLAG_INPUT_MASK) {
-    case EFFECT_FLAG_INPUT_DIRECT: s.append("direct"); break;
-    case EFFECT_FLAG_INPUT_PROVIDER: s.append("provider"); break;
-    case EFFECT_FLAG_INPUT_BOTH: s.append("direct+provider"); break;
-    default: s.append("not set"); break;
-    }
-    s.append(", ");
-
-    s.append("output mode: ");
-    switch (flags & EFFECT_FLAG_OUTPUT_MASK) {
-    case EFFECT_FLAG_OUTPUT_DIRECT: s.append("direct"); break;
-    case EFFECT_FLAG_OUTPUT_PROVIDER: s.append("provider"); break;
-    case EFFECT_FLAG_OUTPUT_BOTH: s.append("direct+provider"); break;
-    default: s.append("not set"); break;
-    }
-    s.append(", ");
-
-    uint32_t accel = flags & EFFECT_FLAG_HW_ACC_MASK;
-    if (accel) {
-        s.append("hardware acceleration: ");
-        switch (accel) {
-        case EFFECT_FLAG_HW_ACC_SIMPLE: s.append("non-tunneled"); break;
-        case EFFECT_FLAG_HW_ACC_TUNNEL: s.append("tunneled"); break;
-        default: s.append("unknown/reserved"); break;
-        }
-        s.append(", ");
-    }
-
-    uint32_t modeind = flags & EFFECT_FLAG_AUDIO_MODE_MASK;
-    if (modeind) {
-        s.append("mode indication: ");
-        switch (modeind) {
-        case EFFECT_FLAG_AUDIO_MODE_IND: s.append("required"); break;
-        default: s.append("unknown/reserved"); break;
-        }
-        s.append(", ");
-    }
-
-    uint32_t srcind = flags & EFFECT_FLAG_AUDIO_SOURCE_MASK;
-    if (srcind) {
-        s.append("source indication: ");
-        switch (srcind) {
-        case EFFECT_FLAG_AUDIO_SOURCE_IND: s.append("required"); break;
-        default: s.append("unknown/reserved"); break;
-        }
-        s.append(", ");
-    }
-
-    if (flags & EFFECT_FLAG_OFFLOAD_MASK) {
-        s.append("offloadable, ");
-    }
-
-    int len = s.length();
-    if (s.length() > 2) {
-        (void) s.lockBuffer(len);
-        s.unlockBuffer(len - 2);
-    }
-    return s;
-}
-
 static std::string dumpInOutBuffer(bool isInput, const sp<EffectBufferHalInterface> &buffer) {
     std::stringstream ss;
 
-    if (buffer.get() == nullptr) {
+    if (buffer == nullptr) {
         return "nullptr"; // make different than below
     } else if (buffer->externalData() != nullptr) {
         ss << (isInput ? buffer->externalData() : buffer->audioBuffer()->raw)
@@ -1457,38 +1532,16 @@
     return ss.str();
 }
 
-void AudioFlinger::EffectModule::dump(int fd, const Vector<String16>& args __unused)
+void AudioFlinger::EffectModule::dump(int fd, const Vector<String16>& args)
 {
+    EffectBase::dump(fd, args);
+
     String8 result;
-
-    result.appendFormat("\tEffect ID %d:\n", mId);
-
     bool locked = AudioFlinger::dumpTryLock(mLock);
-    // failed to lock - AudioFlinger is probably deadlocked
-    if (!locked) {
-        result.append("\t\tCould not lock Fx mutex:\n");
-    }
 
-    result.append("\t\tSession Status State Registered Enabled Suspended Engine:\n");
-    result.appendFormat("\t\t%05d   %03d    %03d   %s          %s       %s         %p\n",
-            mSessionId, mStatus, mState, mPolicyRegistered ? "y" : "n", mPolicyEnabled ? "y" : "n",
-            mSuspended ? "y" : "n", mEffectInterface.get());
-
-    result.append("\t\tDescriptor:\n");
-    char uuidStr[64];
-    AudioEffect::guidToString(&mDescriptor.uuid, uuidStr, sizeof(uuidStr));
-    result.appendFormat("\t\t- UUID: %s\n", uuidStr);
-    AudioEffect::guidToString(&mDescriptor.type, uuidStr, sizeof(uuidStr));
-    result.appendFormat("\t\t- TYPE: %s\n", uuidStr);
-    result.appendFormat("\t\t- apiVersion: %08X\n\t\t- flags: %08X (%s)\n",
-            mDescriptor.apiVersion,
-            mDescriptor.flags,
-            effectFlagsToString(mDescriptor.flags).string());
-    result.appendFormat("\t\t- name: %s\n",
-            mDescriptor.name);
-
-    result.appendFormat("\t\t- implementor: %s\n",
-            mDescriptor.implementor);
+    result.append("\t\tStatus Engine:\n");
+    result.appendFormat("\t\t%03d    %p\n",
+            mStatus, mEffectInterface.get());
 
     result.appendFormat("\t\t- data: %s\n", mSupportsFloat ? "float" : "int16");
 
@@ -1522,17 +1575,6 @@
             dumpInOutBuffer(false /* isInput */, mOutConversionBuffer).c_str());
 #endif
 
-    result.appendFormat("\t\t%zu Clients:\n", mHandles.size());
-    result.append("\t\t\t  Pid Priority Ctrl Locked client server\n");
-    char buffer[256];
-    for (size_t i = 0; i < mHandles.size(); ++i) {
-        EffectHandle *handle = mHandles[i];
-        if (handle != NULL && !handle->disconnected()) {
-            handle->dumpToBuffer(buffer, sizeof(buffer));
-            result.append(buffer);
-        }
-    }
-
     write(fd, result.string(), result.length());
 
     if (mEffectInterface != 0) {
@@ -1552,7 +1594,7 @@
 #undef LOG_TAG
 #define LOG_TAG "AudioFlinger::EffectHandle"
 
-AudioFlinger::EffectHandle::EffectHandle(const sp<EffectModule>& effect,
+AudioFlinger::EffectHandle::EffectHandle(const sp<EffectBase>& effect,
                                         const sp<AudioFlinger::Client>& client,
                                         const sp<IEffectClient>& effectClient,
                                         int32_t priority)
@@ -1560,7 +1602,7 @@
     mEffect(effect), mEffectClient(effectClient), mClient(client), mCblk(NULL),
     mPriority(priority), mHasControl(false), mEnabled(false), mDisconnected(false)
 {
-    ALOGV("constructor %p", this);
+    ALOGV("constructor %p client %p", this, client.get());
 
     if (client == 0) {
         return;
@@ -1593,7 +1635,7 @@
 {
     AutoMutex _l(mLock);
     ALOGV("enable %p", this);
-    sp<EffectModule> effect = mEffect.promote();
+    sp<EffectBase> effect = mEffect.promote();
     if (effect == 0 || mDisconnected) {
         return DEAD_OBJECT;
     }
@@ -1631,7 +1673,7 @@
 {
     ALOGV("disable %p", this);
     AutoMutex _l(mLock);
-    sp<EffectModule> effect = mEffect.promote();
+    sp<EffectBase> effect = mEffect.promote();
     if (effect == 0 || mDisconnected) {
         return DEAD_OBJECT;
     }
@@ -1672,7 +1714,7 @@
     }
     mDisconnected = true;
     {
-        sp<EffectModule> effect = mEffect.promote();
+        sp<EffectBase> effect = mEffect.promote();
         if (effect != 0) {
             if (effect->disconnectHandle(this, unpinIfLast) > 0) {
                 ALOGW("%s Effect handle %p disconnected after thread destruction",
@@ -1740,7 +1782,7 @@
     }
 
     AutoMutex _l(mLock);
-    sp<EffectModule> effect = mEffect.promote();
+    sp<EffectBase> effect = mEffect.promote();
     if (effect == 0 || mDisconnected) {
         return DEAD_OBJECT;
     }
@@ -1748,12 +1790,13 @@
     if (!mHasControl && cmdCode != EFFECT_CMD_GET_PARAM) {
         return INVALID_OPERATION;
     }
-    if (mClient == 0) {
-        return INVALID_OPERATION;
-    }
 
     // handle commands that are not forwarded transparently to effect engine
     if (cmdCode == EFFECT_CMD_SET_PARAM_COMMIT) {
+        if (mClient == 0) {
+            return INVALID_OPERATION;
+        }
+
         if (*replySize < sizeof(int)) {
             android_errorWriteLog(0x534e4554, "32095713");
             return BAD_VALUE;
@@ -1788,12 +1831,13 @@
             }
 
             // copy to local memory in case of client corruption b/32220769
-            param = (effect_param_t *)realloc(param, size);
-            if (param == NULL) {
+            auto *newParam = (effect_param_t *)realloc(param, size);
+            if (newParam == NULL) {
                 ALOGW("command(): out of memory");
                 status = NO_MEMORY;
                 break;
             }
+            param = newParam;
             memcpy(param, p, size);
 
             int reply = 0;
@@ -1892,19 +1936,20 @@
 #undef LOG_TAG
 #define LOG_TAG "AudioFlinger::EffectChain"
 
-AudioFlinger::EffectChain::EffectChain(ThreadBase *thread,
-                                        audio_session_t sessionId)
+AudioFlinger::EffectChain::EffectChain(const wp<ThreadBase>& thread,
+                                       audio_session_t sessionId)
     : mSessionId(sessionId), mActiveTrackCnt(0), mTrackCnt(0), mTailBufferCount(0),
       mVolumeCtrlIdx(-1), mLeftVolume(UINT_MAX), mRightVolume(UINT_MAX),
       mNewLeftVolume(UINT_MAX), mNewRightVolume(UINT_MAX),
-      mEffectCallback(new EffectCallback(this, thread, thread->mAudioFlinger.get()))
+      mEffectCallback(new EffectCallback(wp<EffectChain>(this), thread))
 {
     mStrategy = AudioSystem::getStrategyForStream(AUDIO_STREAM_MUSIC);
-    if (thread == nullptr) {
+    sp<ThreadBase> p = thread.promote();
+    if (p == nullptr) {
         return;
     }
-    mMaxTailBuffers = ((kProcessTailDurationMs * thread->sampleRate()) / 1000) /
-                                    thread->frameCount();
+    mMaxTailBuffers = ((kProcessTailDurationMs * p->sampleRate()) / 1000) /
+                                    p->frameCount();
 }
 
 AudioFlinger::EffectChain::~EffectChain()
@@ -2042,7 +2087,7 @@
                                                    bool pinned)
 {
     Mutex::Autolock _l(mLock);
-    effect = new EffectModule(mEffectCallback, desc, id, sessionId, pinned);
+    effect = new EffectModule(mEffectCallback, desc, id, sessionId, pinned, AUDIO_PORT_HANDLE_NONE);
     status_t lStatus = effect->status();
     if (lStatus == NO_ERROR) {
         lStatus = addEffect_ll(effect);
@@ -2593,7 +2638,7 @@
 void AudioFlinger::EffectChain::setThread(const sp<ThreadBase>& thread)
 {
     Mutex::Autolock _l(mLock);
-    mEffectCallback->setThread(thread.get());
+    mEffectCallback->setThread(thread);
 }
 
 void AudioFlinger::EffectChain::checkOutputFlagCompatibility(audio_output_flags_t *flags) const
@@ -2670,12 +2715,13 @@
 }
 
 bool AudioFlinger::EffectChain::EffectCallback::updateOrphanEffectChains(
-        const sp<AudioFlinger::EffectModule>& effect) {
+        const sp<AudioFlinger::EffectBase>& effect) {
     sp<AudioFlinger> af = mAudioFlinger.promote();
     if (af == nullptr) {
         return false;
     }
-    return af->updateOrphanEffectChains(effect);
+    // in EffectChain context, an EffectBase is always from an EffectModule so static cast is safe
+    return af->updateOrphanEffectChains(effect->asEffectModule());
 }
 
 status_t AudioFlinger::EffectChain::EffectCallback::allocateHalBuffer(
@@ -2806,7 +2852,7 @@
 }
 
 void AudioFlinger::EffectChain::EffectCallback::checkSuspendOnEffectEnabled(
-        const sp<EffectModule>& effect, bool enabled, bool threadLocked) {
+        const sp<EffectBase>& effect, bool enabled, bool threadLocked) {
     sp<ThreadBase> t = mThread.promote();
     if (t == nullptr) {
         return;
@@ -2817,18 +2863,20 @@
     if (c == nullptr) {
         return;
     }
-    c->checkSuspendOnEffectEnabled(effect, enabled);
+    // in EffectChain context, an EffectBase is always from an EffectModule so static cast is safe
+    c->checkSuspendOnEffectEnabled(effect->asEffectModule(), enabled);
 }
 
-void AudioFlinger::EffectChain::EffectCallback::onEffectEnable(const sp<EffectModule>& effect) {
+void AudioFlinger::EffectChain::EffectCallback::onEffectEnable(const sp<EffectBase>& effect) {
     sp<ThreadBase> t = mThread.promote();
     if (t == nullptr) {
         return;
     }
-    t->onEffectEnable(effect);
+    // in EffectChain context, an EffectBase is always from an EffectModule so static cast is safe
+    t->onEffectEnable(effect->asEffectModule());
 }
 
-void AudioFlinger::EffectChain::EffectCallback::onEffectDisable(const sp<EffectModule>& effect) {
+void AudioFlinger::EffectChain::EffectCallback::onEffectDisable(const sp<EffectBase>& effect) {
     checkSuspendOnEffectEnabled(effect, false, false /*threadLocked*/);
 
     sp<ThreadBase> t = mThread.promote();
@@ -2873,4 +2921,326 @@
     return c->activeTrackCnt();
 }
 
+
+#undef LOG_TAG
+#define LOG_TAG "AudioFlinger::DeviceEffectProxy"
+
+status_t AudioFlinger::DeviceEffectProxy::setEnabled(bool enabled, bool fromHandle)
+{
+    status_t status = EffectBase::setEnabled(enabled, fromHandle);
+    Mutex::Autolock _l(mProxyLock);
+    if (status == NO_ERROR) {
+        for (auto& handle : mEffectHandles) {
+            if (enabled) {
+                status = handle.second->enable();
+            } else {
+                status = handle.second->disable();
+            }
+        }
+    }
+    ALOGV("%s enable %d status %d", __func__, enabled, status);
+    return status;
+}
+
+status_t AudioFlinger::DeviceEffectProxy::init(
+        const std::map <audio_patch_handle_t, PatchPanel::Patch>& patches) {
+//For all audio patches
+//If src or sink device match
+//If the effect is HW accelerated
+//	if no corresponding effect module
+//		Create EffectModule: mHalEffect
+//Create and attach EffectHandle
+//If the effect is not HW accelerated and the patch sink or src is a mixer port
+//	Create Effect on patch input or output thread on session -1
+//Add EffectHandle to EffectHandle map of Effect Proxy:
+    ALOGV("%s device type %d address %s", __func__,  mDevice.mType, mDevice.getAddress());
+    status_t status = NO_ERROR;
+    for (auto &patch : patches) {
+        status = onCreatePatch(patch.first, patch.second);
+        ALOGV("%s onCreatePatch status %d", __func__, status);
+        if (status == BAD_VALUE) {
+            return status;
+        }
+    }
+    return status;
+}
+
+status_t AudioFlinger::DeviceEffectProxy::onCreatePatch(
+        audio_patch_handle_t patchHandle, const AudioFlinger::PatchPanel::Patch& patch) {
+    status_t status = NAME_NOT_FOUND;
+    sp<EffectHandle> handle;
+    // only consider source[0] as this is the only "true" source of a patch
+    status = checkPort(patch, &patch.mAudioPatch.sources[0], &handle);
+    ALOGV("%s source checkPort status %d", __func__, status);
+    for (uint32_t i = 0; i < patch.mAudioPatch.num_sinks && status == NAME_NOT_FOUND; i++) {
+        status = checkPort(patch, &patch.mAudioPatch.sinks[i], &handle);
+        ALOGV("%s sink %d checkPort status %d", __func__, i, status);
+    }
+    if (status == NO_ERROR || status == ALREADY_EXISTS) {
+        Mutex::Autolock _l(mProxyLock);
+        mEffectHandles.emplace(patchHandle, handle);
+    }
+    ALOGW_IF(status == BAD_VALUE,
+            "%s cannot attach effect %s on patch %d", __func__, mDescriptor.name, patchHandle);
+
+    return status;
+}
+
+status_t AudioFlinger::DeviceEffectProxy::checkPort(const PatchPanel::Patch& patch,
+        const struct audio_port_config *port, sp <EffectHandle> *handle) {
+
+    ALOGV("%s type %d device type %d address %s device ID %d patch.isSoftware() %d",
+            __func__, port->type, port->ext.device.type,
+            port->ext.device.address, port->id, patch.isSoftware());
+    if (port->type != AUDIO_PORT_TYPE_DEVICE || port->ext.device.type != mDevice.mType
+        || port->ext.device.address != mDevice.mAddress) {
+        return NAME_NOT_FOUND;
+    }
+    status_t status = NAME_NOT_FOUND;
+
+    if (mDescriptor.flags & EFFECT_FLAG_HW_ACC_TUNNEL) {
+        Mutex::Autolock _l(mProxyLock);
+        mDevicePort = *port;
+        mHalEffect = new EffectModule(mMyCallback,
+                                      const_cast<effect_descriptor_t *>(&mDescriptor),
+                                      mMyCallback->newEffectId(), AUDIO_SESSION_DEVICE,
+                                      false /* pinned */, port->id);
+        if (audio_is_input_device(mDevice.mType)) {
+            mHalEffect->setInputDevice(mDevice);
+        } else {
+            mHalEffect->setDevices({mDevice});
+        }
+        *handle = new EffectHandle(mHalEffect, nullptr, nullptr, 0 /*priority*/);
+        status = (*handle)->initCheck();
+        if (status == OK) {
+            status = mHalEffect->addHandle((*handle).get());
+        } else {
+            mHalEffect.clear();
+            mDevicePort.id = AUDIO_PORT_HANDLE_NONE;
+        }
+    } else if (patch.isSoftware() || patch.thread().promote() != nullptr) {
+        sp <ThreadBase> thread;
+        if (audio_port_config_has_input_direction(port)) {
+            if (patch.isSoftware()) {
+                thread = patch.mRecord.thread();
+            } else {
+                thread = patch.thread().promote();
+            }
+        } else {
+            if (patch.isSoftware()) {
+                thread = patch.mPlayback.thread();
+            } else {
+                thread = patch.thread().promote();
+            }
+        }
+        int enabled;
+        *handle = thread->createEffect_l(nullptr, nullptr, 0, AUDIO_SESSION_DEVICE,
+                                         const_cast<effect_descriptor_t *>(&mDescriptor),
+                                         &enabled, &status, false);
+        ALOGV("%s thread->createEffect_l status %d", __func__, status);
+    } else {
+        status = BAD_VALUE;
+    }
+    if (status == NO_ERROR || status == ALREADY_EXISTS) {
+        if (isEnabled()) {
+            (*handle)->enable();
+        } else {
+            (*handle)->disable();
+        }
+    }
+    return status;
+}
+
+void AudioFlinger::DeviceEffectProxy::onReleasePatch(audio_patch_handle_t patchHandle) {
+    Mutex::Autolock _l(mProxyLock);
+    mEffectHandles.erase(patchHandle);
+}
+
+
+size_t AudioFlinger::DeviceEffectProxy::removeEffect(const sp<EffectModule>& effect)
+{
+    Mutex::Autolock _l(mProxyLock);
+    if (effect == mHalEffect) {
+        mHalEffect.clear();
+        mDevicePort.id = AUDIO_PORT_HANDLE_NONE;
+    }
+    return mHalEffect == nullptr ? 0 : 1;
+}
+
+status_t AudioFlinger::DeviceEffectProxy::addEffectToHal(
+    sp<EffectHalInterface> effect) {
+    if (mHalEffect == nullptr) {
+        return NO_INIT;
+    }
+    return mManagerCallback->addEffectToHal(
+            mDevicePort.id, mDevicePort.ext.device.hw_module, effect);
+}
+
+status_t AudioFlinger::DeviceEffectProxy::removeEffectFromHal(
+    sp<EffectHalInterface> effect) {
+    if (mHalEffect == nullptr) {
+        return NO_INIT;
+    }
+    return mManagerCallback->removeEffectFromHal(
+            mDevicePort.id, mDevicePort.ext.device.hw_module, effect);
+}
+
+bool AudioFlinger::DeviceEffectProxy::isOutput() const {
+    if (mDevicePort.id != AUDIO_PORT_HANDLE_NONE) {
+        return mDevicePort.role == AUDIO_PORT_ROLE_SINK;
+    }
+    return true;
+}
+
+uint32_t AudioFlinger::DeviceEffectProxy::sampleRate() const {
+    if (mDevicePort.id != AUDIO_PORT_HANDLE_NONE &&
+            (mDevicePort.config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) != 0) {
+        return mDevicePort.sample_rate;
+    }
+    return DEFAULT_OUTPUT_SAMPLE_RATE;
+}
+
+audio_channel_mask_t AudioFlinger::DeviceEffectProxy::channelMask() const {
+    if (mDevicePort.id != AUDIO_PORT_HANDLE_NONE &&
+            (mDevicePort.config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK) != 0) {
+        return mDevicePort.channel_mask;
+    }
+    return AUDIO_CHANNEL_OUT_STEREO;
+}
+
+uint32_t AudioFlinger::DeviceEffectProxy::channelCount() const {
+    if (isOutput()) {
+        return audio_channel_count_from_out_mask(channelMask());
+    }
+    return audio_channel_count_from_in_mask(channelMask());
+}
+
+void AudioFlinger::DeviceEffectProxy::dump(int fd, int spaces) {
+    const Vector<String16> args;
+    EffectBase::dump(fd, args);
+
+    const bool locked = dumpTryLock(mProxyLock);
+    if (!locked) {
+        String8 result("DeviceEffectProxy may be deadlocked\n");
+        write(fd, result.string(), result.size());
+    }
+
+    String8 outStr;
+    if (mHalEffect != nullptr) {
+        outStr.appendFormat("%*sHAL Effect Id: %d\n", spaces, "", mHalEffect->id());
+    } else {
+        outStr.appendFormat("%*sNO HAL Effect\n", spaces, "");
+    }
+    write(fd, outStr.string(), outStr.size());
+    outStr.clear();
+
+    outStr.appendFormat("%*sSub Effects:\n", spaces, "");
+    write(fd, outStr.string(), outStr.size());
+    outStr.clear();
+
+    for (const auto& iter : mEffectHandles) {
+        outStr.appendFormat("%*sEffect for patch handle %d:\n", spaces + 2, "", iter.first);
+        write(fd, outStr.string(), outStr.size());
+        outStr.clear();
+        sp<EffectBase> effect = iter.second->effect().promote();
+        if (effect != nullptr) {
+            effect->dump(fd, args);
+        }
+    }
+
+    if (locked) {
+        mLock.unlock();
+    }
+}
+
+#undef LOG_TAG
+#define LOG_TAG "AudioFlinger::DeviceEffectProxy::ProxyCallback"
+
+int AudioFlinger::DeviceEffectProxy::ProxyCallback::newEffectId() {
+    return mManagerCallback->newEffectId();
+}
+
+
+bool AudioFlinger::DeviceEffectProxy::ProxyCallback::disconnectEffectHandle(
+        EffectHandle *handle, bool unpinIfLast) {
+    sp<EffectBase> effectBase = handle->effect().promote();
+    if (effectBase == nullptr) {
+        return false;
+    }
+
+    sp<EffectModule> effect = effectBase->asEffectModule();
+    if (effect == nullptr) {
+        return false;
+    }
+
+    // restore suspended effects if the disconnected handle was enabled and the last one.
+    bool remove = (effect->removeHandle(handle) == 0) && (!effect->isPinned() || unpinIfLast);
+    if (remove) {
+        sp<DeviceEffectProxy> proxy = mProxy.promote();
+        if (proxy != nullptr) {
+            proxy->removeEffect(effect);
+        }
+        if (handle->enabled()) {
+            effectBase->checkSuspendOnEffectEnabled(false, false /*threadLocked*/);
+        }
+    }
+    return true;
+}
+
+status_t AudioFlinger::DeviceEffectProxy::ProxyCallback::createEffectHal(
+        const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t deviceId,
+        sp<EffectHalInterface> *effect) {
+    return mManagerCallback->createEffectHal(pEffectUuid, sessionId, deviceId, effect);
+}
+
+status_t AudioFlinger::DeviceEffectProxy::ProxyCallback::addEffectToHal(
+        sp<EffectHalInterface> effect) {
+    sp<DeviceEffectProxy> proxy = mProxy.promote();
+    if (proxy == nullptr) {
+        return NO_INIT;
+    }
+    return proxy->addEffectToHal(effect);
+}
+
+status_t AudioFlinger::DeviceEffectProxy::ProxyCallback::removeEffectFromHal(
+        sp<EffectHalInterface> effect) {
+    sp<DeviceEffectProxy> proxy = mProxy.promote();
+    if (proxy == nullptr) {
+        return NO_INIT;
+    }
+    return proxy->addEffectToHal(effect);
+}
+
+bool AudioFlinger::DeviceEffectProxy::ProxyCallback::isOutput() const {
+    sp<DeviceEffectProxy> proxy = mProxy.promote();
+    if (proxy == nullptr) {
+        return true;
+    }
+    return proxy->isOutput();
+}
+
+uint32_t AudioFlinger::DeviceEffectProxy::ProxyCallback::sampleRate() const {
+    sp<DeviceEffectProxy> proxy = mProxy.promote();
+    if (proxy == nullptr) {
+        return DEFAULT_OUTPUT_SAMPLE_RATE;
+    }
+    return proxy->sampleRate();
+}
+
+audio_channel_mask_t AudioFlinger::DeviceEffectProxy::ProxyCallback::channelMask() const {
+    sp<DeviceEffectProxy> proxy = mProxy.promote();
+    if (proxy == nullptr) {
+        return AUDIO_CHANNEL_OUT_STEREO;
+    }
+    return proxy->channelMask();
+}
+
+uint32_t AudioFlinger::DeviceEffectProxy::ProxyCallback::channelCount() const {
+    sp<DeviceEffectProxy> proxy = mProxy.promote();
+    if (proxy == nullptr) {
+        return 2;
+    }
+    return proxy->channelCount();
+}
+
 } // namespace android
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 0b0f9f5..1dfa1ae 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -33,7 +33,7 @@
     virtual bool isOffload() const = 0;
     virtual bool isOffloadOrDirect() const = 0;
     virtual bool isOffloadOrMmap() const = 0;
-    virtual uint32_t  sampleRate() const = 0;
+    virtual uint32_t sampleRate() const = 0;
     virtual audio_channel_mask_t channelMask() const = 0;
     virtual uint32_t channelCount() const = 0;
     virtual size_t frameCount() const = 0;
@@ -45,17 +45,17 @@
     virtual status_t removeEffectFromHal(sp<EffectHalInterface> effect) = 0;
     virtual void setVolumeForOutput(float left, float right) const = 0;
     virtual bool disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast) = 0;
-    virtual void checkSuspendOnEffectEnabled(const sp<EffectModule>& effect,
+    virtual void checkSuspendOnEffectEnabled(const sp<EffectBase>& effect,
                                              bool enabled,
                                              bool threadLocked) = 0;
-    virtual void onEffectEnable(const sp<EffectModule>& effect) = 0;
-    virtual void onEffectDisable(const sp<EffectModule>& effect) = 0;
+    virtual void onEffectEnable(const sp<EffectBase>& effect) = 0;
+    virtual void onEffectDisable(const sp<EffectBase>& effect) = 0;
 
     // Methods usually implemented with help from AudioFlinger: pay attention to mutex locking order
     virtual status_t createEffectHal(const effect_uuid_t *pEffectUuid,
                     int32_t sessionId, int32_t deviceId, sp<EffectHalInterface> *effect) = 0;
     virtual status_t allocateHalBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) = 0;
-    virtual bool updateOrphanEffectChains(const sp<EffectModule>& effect) = 0;
+    virtual bool updateOrphanEffectChains(const sp<EffectBase>& effect) = 0;
 
     // Methods usually implemented with help from EffectChain: pay attention to mutex locking order
     virtual uint32_t strategy() const = 0;
@@ -65,11 +65,11 @@
     virtual wp<EffectChain> chain() const = 0;
 };
 
-// EffectModule and EffectChain classes both have their own mutex to protect
+// EffectBase(EffectModule) and EffectChain classes both have their own mutex to protect
 // state changes or resource modifications. Always respect the following order
 // if multiple mutexes must be acquired to avoid cross deadlock:
-// AudioFlinger -> ThreadBase -> EffectChain -> EffectModule
-// AudioHandle -> ThreadBase -> EffectChain -> EffectModule
+// AudioFlinger -> ThreadBase -> EffectChain -> EffectBase(EffectModule)
+// AudioHandle -> ThreadBase -> EffectChain -> EffectBase(EffectModule)
 
 // NOTE: When implementing the EffectCallbackInterface, in an EffectChain or other, it is important
 // to pay attention to this locking order as some callback methods can be called from a state where
@@ -80,23 +80,19 @@
 // Threadbase mutex locked to avoid cross deadlock with other clients calling AudioPolicyService
 // methods that in turn call AudioFlinger thus locking the same mutexes in the reverse order.
 
-// The EffectModule class is a wrapper object controlling the effect engine implementation
-// in the effect library. It prevents concurrent calls to process() and command() functions
-// from different client threads. It keeps a list of EffectHandle objects corresponding
-// to all client applications using this effect and notifies applications of effect state,
-// control or parameter changes. It manages the activation state machine to send appropriate
-// reset, enable, disable commands to effect engine and provide volume
-// ramping when effects are activated/deactivated.
-// When controlling an auxiliary effect, the EffectModule also provides an input buffer used by
-// the attached track(s) to accumulate their auxiliary channel.
-class EffectModule : public RefBase {
+
+// The EffectBase class contains common properties, state and behavior for and EffectModule or
+// other derived classes managing an audio effect instance within the effect framework.
+// It also contains the class mutex (see comment on locking order above).
+class EffectBase : public RefBase {
 public:
-    EffectModule(const sp<EffectCallbackInterface>& chain,
-                    effect_descriptor_t *desc,
-                    int id,
-                    audio_session_t sessionId,
-                    bool pinned);
-    virtual ~EffectModule();
+    EffectBase(const sp<EffectCallbackInterface>& callback,
+               effect_descriptor_t *desc,
+               int id,
+               audio_session_t sessionId,
+               bool pinned);
+
+    ~EffectBase() override = default;
 
     enum effect_state {
         IDLE,
@@ -108,69 +104,14 @@
         DESTROYED
     };
 
-    int         id() const { return mId; }
-    void process();
-    bool updateState();
-    status_t command(uint32_t cmdCode,
-                     uint32_t cmdSize,
-                     void *pCmdData,
-                     uint32_t *replySize,
-                     void *pReplyData);
-
-    void reset_l();
-    status_t configure();
-    status_t init();
+    int id() const { return mId; }
     effect_state state() const {
         return mState;
     }
-    uint32_t status() {
-        return mStatus;
-    }
     audio_session_t sessionId() const {
         return mSessionId;
     }
-    status_t    setEnabled(bool enabled, bool fromHandle);
-    status_t    setEnabled_l(bool enabled);
-    bool isEnabled() const;
-    bool isProcessEnabled() const;
-    bool isOffloadedOrDirect() const;
-    bool isVolumeControlEnabled() const;
-
-    void        setInBuffer(const sp<EffectBufferHalInterface>& buffer);
-    int16_t     *inBuffer() const {
-        return mInBuffer != 0 ? reinterpret_cast<int16_t*>(mInBuffer->ptr()) : NULL;
-    }
-    void        setOutBuffer(const sp<EffectBufferHalInterface>& buffer);
-    int16_t     *outBuffer() const {
-        return mOutBuffer != 0 ? reinterpret_cast<int16_t*>(mOutBuffer->ptr()) : NULL;
-    }
-    void        setCallback(const sp<EffectCallbackInterface>& callback) { mCallback = callback; }
-
-    status_t addHandle(EffectHandle *handle);
-    ssize_t  disconnectHandle(EffectHandle *handle, bool unpinIfLast);
-    ssize_t removeHandle(EffectHandle *handle);
-    ssize_t removeHandle_l(EffectHandle *handle);
-
     const effect_descriptor_t& desc() const { return mDescriptor; }
-    sp<EffectCallbackInterface>&     callback() { return mCallback; }
-
-    status_t         setDevices(const AudioDeviceTypeAddrVector &devices);
-    status_t         setInputDevice(const AudioDeviceTypeAddr &device);
-    status_t         setVolume(uint32_t *left, uint32_t *right, bool controller);
-    status_t         setMode(audio_mode_t mode);
-    status_t         setAudioSource(audio_source_t source);
-    status_t         start();
-    status_t         stop();
-    void             setSuspended(bool suspended);
-    bool             suspended() const;
-
-    EffectHandle*    controlHandle_l();
-
-    bool             isPinned() const { return mPinned; }
-    void             unPin() { mPinned = false; }
-    bool             purgeHandles();
-    void             lock() { mLock.lock(); }
-    void             unlock() { mLock.unlock(); }
     bool             isOffloadable() const
                         { return (mDescriptor.flags & EFFECT_FLAG_OFFLOAD_SUPPORTED) != 0; }
     bool             isImplementationSoftware() const
@@ -183,19 +124,143 @@
     bool             isVolumeMonitor() const
                         { return (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK)
                             == EFFECT_FLAG_VOLUME_MONITOR; }
-    status_t         setOffloaded(bool offloaded, audio_io_handle_t io);
-    bool             isOffloaded() const;
-    void             addEffectToHal_l();
-    void             release_l();
+
+    virtual status_t setEnabled(bool enabled, bool fromHandle);
+    status_t    setEnabled_l(bool enabled);
+    bool isEnabled() const;
+
+    void             setSuspended(bool suspended);
+    bool             suspended() const;
+
+    virtual status_t command(uint32_t cmdCode __unused,
+                 uint32_t cmdSize __unused,
+                 void *pCmdData __unused,
+                 uint32_t *replySize __unused,
+                 void *pReplyData __unused) { return NO_ERROR; };
+
+    void setCallback(const sp<EffectCallbackInterface>& callback) { mCallback = callback; }
+    sp<EffectCallbackInterface>&     callback() { return mCallback; }
+
+    status_t addHandle(EffectHandle *handle);
+    ssize_t disconnectHandle(EffectHandle *handle, bool unpinIfLast);
+    ssize_t removeHandle(EffectHandle *handle);
+    virtual ssize_t removeHandle_l(EffectHandle *handle);
+    EffectHandle* controlHandle_l();
+    bool purgeHandles();
+
+    void             checkSuspendOnEffectEnabled(bool enabled, bool threadLocked);
+
+    bool             isPinned() const { return mPinned; }
+    void             unPin() { mPinned = false; }
+
+    void             lock() { mLock.lock(); }
+    void             unlock() { mLock.unlock(); }
 
     status_t         updatePolicyState();
-    void             checkSuspendOnEffectEnabled(bool enabled, bool threadLocked);
+
+    virtual          sp<EffectModule> asEffectModule() { return nullptr; }
+    virtual          sp<DeviceEffectProxy> asDeviceEffectProxy() { return nullptr; }
 
     void             dump(int fd, const Vector<String16>& args);
 
 private:
     friend class AudioFlinger;      // for mHandles
-    bool                mPinned;
+    bool             mPinned = false;
+
+    DISALLOW_COPY_AND_ASSIGN(EffectBase);
+
+mutable Mutex                 mLock;      // mutex for process, commands and handles list protection
+    sp<EffectCallbackInterface> mCallback; // parent effect chain
+    const int                 mId;        // this instance unique ID
+    const audio_session_t     mSessionId; // audio session ID
+    const effect_descriptor_t mDescriptor;// effect descriptor received from effect engine
+    effect_state              mState = IDLE; // current activation state
+    // effect is suspended: temporarily disabled by framework
+    bool                      mSuspended = false;
+
+    Vector<EffectHandle *>    mHandles;   // list of client handles
+                // First handle in mHandles has highest priority and controls the effect module
+
+    // Audio policy effect state management
+    // Mutex protecting transactions with audio policy manager as mLock cannot
+    // be held to avoid cross deadlocks with audio policy mutex
+    Mutex                     mPolicyLock;
+    // Effect is registered in APM or not
+    bool                      mPolicyRegistered = false;
+    // Effect enabled state communicated to APM. Enabled state corresponds to
+    // state requested by the EffectHandle with control
+    bool                      mPolicyEnabled = false;
+};
+
+// The EffectModule class is a wrapper object controlling the effect engine implementation
+// in the effect library. It prevents concurrent calls to process() and command() functions
+// from different client threads. It keeps a list of EffectHandle objects corresponding
+// to all client applications using this effect and notifies applications of effect state,
+// control or parameter changes. It manages the activation state machine to send appropriate
+// reset, enable, disable commands to effect engine and provide volume
+// ramping when effects are activated/deactivated.
+// When controlling an auxiliary effect, the EffectModule also provides an input buffer used by
+// the attached track(s) to accumulate their auxiliary channel.
+class EffectModule : public EffectBase {
+public:
+    EffectModule(const sp<EffectCallbackInterface>& callabck,
+                    effect_descriptor_t *desc,
+                    int id,
+                    audio_session_t sessionId,
+                    bool pinned,
+                    audio_port_handle_t deviceId);
+    virtual ~EffectModule();
+
+    void process();
+    bool updateState();
+    status_t command(uint32_t cmdCode,
+                     uint32_t cmdSize,
+                     void *pCmdData,
+                     uint32_t *replySize,
+                     void *pReplyData) override;
+
+    void reset_l();
+    status_t configure();
+    status_t init();
+
+    uint32_t status() {
+        return mStatus;
+    }
+
+    bool isProcessEnabled() const;
+    bool isOffloadedOrDirect() const;
+    bool isVolumeControlEnabled() const;
+
+    void        setInBuffer(const sp<EffectBufferHalInterface>& buffer);
+    int16_t     *inBuffer() const {
+        return mInBuffer != 0 ? reinterpret_cast<int16_t*>(mInBuffer->ptr()) : NULL;
+    }
+    void        setOutBuffer(const sp<EffectBufferHalInterface>& buffer);
+    int16_t     *outBuffer() const {
+        return mOutBuffer != 0 ? reinterpret_cast<int16_t*>(mOutBuffer->ptr()) : NULL;
+    }
+
+    ssize_t removeHandle_l(EffectHandle *handle) override;
+
+    status_t         setDevices(const AudioDeviceTypeAddrVector &devices);
+    status_t         setInputDevice(const AudioDeviceTypeAddr &device);
+    status_t         setVolume(uint32_t *left, uint32_t *right, bool controller);
+    status_t         setMode(audio_mode_t mode);
+    status_t         setAudioSource(audio_source_t source);
+    status_t         start();
+    status_t         stop();
+
+    status_t         setOffloaded(bool offloaded, audio_io_handle_t io);
+    bool             isOffloaded() const;
+    void             addEffectToHal_l();
+    void             release_l();
+
+    sp<EffectModule> asEffectModule() override { return this; }
+
+    void             dump(int fd, const Vector<String16>& args);
+
+private:
+    friend class AudioFlinger;      // for mHandles
 
     // Maximum time allocated to effect engines to complete the turn off sequence
     static const uint32_t MAX_DISABLE_TIME_MS = 10000;
@@ -207,23 +272,15 @@
     status_t removeEffectFromHal_l();
     status_t sendSetAudioDevicesCommand(const AudioDeviceTypeAddrVector &devices, uint32_t cmdCode);
 
-mutable Mutex               mLock;      // mutex for process, commands and handles list protection
-    sp<EffectCallbackInterface>     mCallback;     // parent effect chain
-    const int           mId;        // this instance unique ID
-    const audio_session_t mSessionId; // audio session ID
-    const effect_descriptor_t mDescriptor;// effect descriptor received from effect engine
     effect_config_t     mConfig;    // input and output audio configuration
     sp<EffectHalInterface> mEffectInterface; // Effect module HAL
     sp<EffectBufferHalInterface> mInBuffer;  // Buffers for interacting with HAL
     sp<EffectBufferHalInterface> mOutBuffer;
     status_t            mStatus;    // initialization status
-    effect_state        mState;     // current activation state
-    Vector<EffectHandle *> mHandles;    // list of client handles
                 // First handle in mHandles has highest priority and controls the effect module
     uint32_t mMaxDisableWaitCnt;    // maximum grace period before forcing an effect off after
                                     // sending disable command.
     uint32_t mDisableWaitCnt;       // current process() calls count during disable period.
-    bool     mSuspended;            // effect is suspended: temporarily disabled by framework
     bool     mOffloaded;            // effect is currently offloaded to the audio DSP
 
 #ifdef FLOAT_EFFECT_CHAIN
@@ -251,16 +308,6 @@
     static constexpr pid_t INVALID_PID = (pid_t)-1;
     // this tid is allowed to call setVolume() without acquiring the mutex.
     pid_t mSetVolumeReentrantTid = INVALID_PID;
-
-    // Audio policy effect state management
-    // Mutex protecting transactions with audio policy manager as mLock cannot
-    // be held to avoid cross deadlocks with audio policy mutex
-    Mutex   mPolicyLock;
-    // Effect is registered in APM or not
-    bool    mPolicyRegistered = false;
-    // Effect enabled state communicated to APM. Enabled state corresponds to
-    // state requested by the EffectHandle with control
-    bool    mPolicyEnabled = false;
 };
 
 // The EffectHandle class implements the IEffect interface. It provides resources
@@ -272,7 +319,7 @@
 class EffectHandle: public android::BnEffect {
 public:
 
-    EffectHandle(const sp<EffectModule>& effect,
+    EffectHandle(const sp<EffectBase>& effect,
             const sp<AudioFlinger::Client>& client,
             const sp<IEffectClient>& effectClient,
             int32_t priority);
@@ -310,9 +357,9 @@
     bool enabled() const { return mEnabled; }
 
     // Getters
-    wp<EffectModule> effect() const { return mEffect; }
+    wp<EffectBase> effect() const { return mEffect; }
     int id() const {
-        sp<EffectModule> effect = mEffect.promote();
+        sp<EffectBase> effect = mEffect.promote();
         if (effect == 0) {
             return 0;
         }
@@ -329,7 +376,7 @@
     DISALLOW_COPY_AND_ASSIGN(EffectHandle);
 
     Mutex mLock;                        // protects IEffect method calls
-    wp<EffectModule> mEffect;           // pointer to controlled EffectModule
+    wp<EffectBase> mEffect;           // pointer to controlled EffectModule
     sp<IEffectClient> mEffectClient;    // callback interface for client notifications
     /*const*/ sp<Client> mClient;       // client for shared memory allocation, see disconnect()
     sp<IMemory>         mCblkMemory;    // shared memory for control block
@@ -355,7 +402,6 @@
 class EffectChain : public RefBase {
 public:
     EffectChain(const wp<ThreadBase>& wThread, audio_session_t sessionId);
-    EffectChain(ThreadBase *thread, audio_session_t sessionId);
     virtual ~EffectChain();
 
     // special key used for an entry in mSuspendedEffects keyed vector
@@ -466,13 +512,16 @@
 
     class EffectCallback :  public EffectCallbackInterface {
     public:
-        EffectCallback(EffectChain *chain, ThreadBase *thread, AudioFlinger *audioFlinger)
-            : mChain(chain), mThread(thread), mAudioFlinger(audioFlinger) {}
+        EffectCallback(const wp<EffectChain>& chain,
+                       const wp<ThreadBase>& thread)
+            : mChain(chain) {
+            setThread(thread);
+        }
 
         status_t createEffectHal(const effect_uuid_t *pEffectUuid,
                int32_t sessionId, int32_t deviceId, sp<EffectHalInterface> *effect) override;
         status_t allocateHalBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) override;
-        bool updateOrphanEffectChains(const sp<EffectModule>& effect) override;
+        bool updateOrphanEffectChains(const sp<EffectBase>& effect) override;
 
         audio_io_handle_t io() const override;
         bool isOutput() const override;
@@ -492,18 +541,23 @@
         void setVolumeForOutput(float left, float right) const override;
 
         // check if effects should be suspended/restored when a given effect is enable/disabled
-        void checkSuspendOnEffectEnabled(const sp<EffectModule>& effect,
+        void checkSuspendOnEffectEnabled(const sp<EffectBase>& effect,
                               bool enabled, bool threadLocked) override;
         void resetVolume() override;
         uint32_t strategy() const override;
         int32_t activeTrackCnt() const override;
-        void onEffectEnable(const sp<EffectModule>& effect) override;
-        void onEffectDisable(const sp<EffectModule>& effect) override;
+        void onEffectEnable(const sp<EffectBase>& effect) override;
+        void onEffectDisable(const sp<EffectBase>& effect) override;
 
         wp<EffectChain> chain() const override { return mChain; }
 
         wp<ThreadBase> thread() { return mThread; }
-        void setThread(ThreadBase *thread) { mThread = thread; };
+
+        void setThread(const wp<ThreadBase>& thread) {
+            mThread = thread;
+            sp<ThreadBase> p = thread.promote();
+            mAudioFlinger = p ? p->mAudioFlinger : nullptr;
+        }
 
     private:
         wp<EffectChain> mChain;
@@ -568,3 +622,97 @@
 
              const sp<EffectCallback> mEffectCallback;
 };
+
+class DeviceEffectProxy : public EffectBase {
+public:
+        DeviceEffectProxy (const AudioDeviceTypeAddr& device,
+                const sp<DeviceEffectManagerCallback>& callback,
+                effect_descriptor_t *desc, int id)
+            : EffectBase(callback, desc, id, AUDIO_SESSION_DEVICE, false),
+                mDevice(device), mManagerCallback(callback),
+                mMyCallback(new ProxyCallback(wp<DeviceEffectProxy>(this),
+                                              callback)) {}
+
+    status_t setEnabled(bool enabled, bool fromHandle) override;
+    sp<DeviceEffectProxy> asDeviceEffectProxy() override { return this; }
+
+    status_t init(const std::map<audio_patch_handle_t, PatchPanel::Patch>& patches);
+    status_t onCreatePatch(audio_patch_handle_t patchHandle, const PatchPanel::Patch& patch);
+    void onReleasePatch(audio_patch_handle_t patchHandle);
+
+    size_t removeEffect(const sp<EffectModule>& effect);
+
+    status_t addEffectToHal(sp<EffectHalInterface> effect);
+    status_t removeEffectFromHal(sp<EffectHalInterface> effect);
+
+    const AudioDeviceTypeAddr& device() { return mDevice; };
+    bool isOutput() const;
+    uint32_t sampleRate() const;
+    audio_channel_mask_t channelMask() const;
+    uint32_t channelCount() const;
+
+    void dump(int fd, int spaces);
+
+private:
+
+    class ProxyCallback :  public EffectCallbackInterface {
+    public:
+                ProxyCallback(const wp<DeviceEffectProxy>& proxy,
+                        const sp<DeviceEffectManagerCallback>& callback)
+                    : mProxy(proxy), mManagerCallback(callback) {}
+
+        status_t createEffectHal(const effect_uuid_t *pEffectUuid,
+               int32_t sessionId, int32_t deviceId, sp<EffectHalInterface> *effect) override;
+        status_t allocateHalBuffer(size_t size __unused,
+                sp<EffectBufferHalInterface>* buffer __unused) override { return NO_ERROR; }
+        bool updateOrphanEffectChains(const sp<EffectBase>& effect __unused) override {
+                    return false;
+        }
+
+        audio_io_handle_t io() const override { return AUDIO_IO_HANDLE_NONE; }
+        bool isOutput() const override;
+        bool isOffload() const override { return false; }
+        bool isOffloadOrDirect() const override { return false; }
+        bool isOffloadOrMmap() const override { return false; }
+
+        uint32_t sampleRate() const override;
+        audio_channel_mask_t channelMask() const override;
+        uint32_t channelCount() const override;
+        size_t frameCount() const override  { return 0; }
+        uint32_t latency() const override  { return 0; }
+
+        status_t addEffectToHal(sp<EffectHalInterface> effect) override;
+        status_t removeEffectFromHal(sp<EffectHalInterface> effect) override;
+
+        bool disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast) override;
+        void setVolumeForOutput(float left __unused, float right __unused) const override {}
+
+        void checkSuspendOnEffectEnabled(const sp<EffectBase>& effect __unused,
+                              bool enabled __unused, bool threadLocked __unused) override {}
+        void resetVolume() override {}
+        uint32_t strategy() const override  { return 0; }
+        int32_t activeTrackCnt() const override { return 0; }
+        void onEffectEnable(const sp<EffectBase>& effect __unused) override {}
+        void onEffectDisable(const sp<EffectBase>& effect __unused) override {}
+
+        wp<EffectChain> chain() const override { return nullptr; }
+
+        int newEffectId();
+
+    private:
+        const wp<DeviceEffectProxy> mProxy;
+        const sp<DeviceEffectManagerCallback> mManagerCallback;
+    };
+
+    status_t checkPort(const PatchPanel::Patch& patch, const struct audio_port_config *port,
+            sp<EffectHandle> *handle);
+
+    const AudioDeviceTypeAddr mDevice;
+    const sp<DeviceEffectManagerCallback> mManagerCallback;
+    const sp<ProxyCallback> mMyCallback;
+
+    Mutex mProxyLock;
+    std::map<audio_patch_handle_t, sp<EffectHandle>> mEffectHandles; // protected by mProxyLock
+    sp<EffectModule> mHalEffect; // protected by mProxyLock
+    struct audio_port_config mDevicePort = { .id = AUDIO_PORT_HANDLE_NONE };
+};
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 53e2dd5..b58fd8b 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -170,8 +170,7 @@
                 }
                 halHandle = removedPatch.mHalHandle;
             }
-            mPatches.erase(iter);
-            removeSoftwarePatchFromInsertedModules(*handle);
+            erasePatch(*handle);
         }
     }
 
@@ -326,10 +325,14 @@
                         }
                     }
                     status = thread->sendCreateAudioPatchConfigEvent(patch, &halHandle);
+                    if (status == NO_ERROR) {
+                        newPatch.setThread(thread);
+                    }
+
                     // remove stale audio patch with same input as sink if any
                     for (auto& iter : mPatches) {
                         if (iter.second.mAudioPatch.sinks[0].ext.mix.handle == thread->id()) {
-                            mPatches.erase(iter.first);
+                            erasePatch(iter.first);
                             break;
                         }
                     }
@@ -388,11 +391,14 @@
             }
 
             status = thread->sendCreateAudioPatchConfigEvent(patch, &halHandle);
+            if (status == NO_ERROR) {
+                newPatch.setThread(thread);
+            }
 
             // remove stale audio patch with same output as source if any
             for (auto& iter : mPatches) {
                 if (iter.second.mAudioPatch.sources[0].ext.mix.handle == thread->id()) {
-                    mPatches.erase(iter.first);
+                    erasePatch(iter.first);
                     break;
                 }
             }
@@ -406,11 +412,11 @@
     if (status == NO_ERROR) {
         *handle = (audio_patch_handle_t) mAudioFlinger.nextUniqueId(AUDIO_UNIQUE_ID_USE_PATCH);
         newPatch.mHalHandle = halHandle;
+        mAudioFlinger.mDeviceEffectManager.createAudioPatch(*handle, newPatch);
         mPatches.insert(std::make_pair(*handle, std::move(newPatch)));
         if (insertedModule != AUDIO_MODULE_HANDLE_NONE) {
             addSoftwarePatchToInsertedModules(insertedModule, *handle);
         }
-        ALOGV("%s() added new patch handle %d halHandle %d", __func__, *handle, halHandle);
     } else {
         newPatch.clearConnections(this);
     }
@@ -448,18 +454,6 @@
         *mPlayback.handlePtr() = AUDIO_PATCH_HANDLE_NONE;
     }
 
-    // use a pseudo LCM between input and output framecount
-    size_t playbackFrameCount = mPlayback.thread()->frameCount();
-    int playbackShift = __builtin_ctz(playbackFrameCount);
-    size_t recordFrameCount = mRecord.thread()->frameCount();
-    int shift = __builtin_ctz(recordFrameCount);
-    if (playbackShift < shift) {
-        shift = playbackShift;
-    }
-    size_t frameCount = (playbackFrameCount * recordFrameCount) >> shift;
-    ALOGV("%s() playframeCount %zu recordFrameCount %zu frameCount %zu",
-            __func__, playbackFrameCount, recordFrameCount, frameCount);
-
     // create a special record track to capture from record thread
     uint32_t channelCount = mPlayback.thread()->channelCount();
     audio_channel_mask_t inChannelMask = audio_channel_in_mask_from_count(channelCount);
@@ -503,7 +497,17 @@
     }
 
     sp<RecordThread::PatchRecord> tempRecordTrack;
-    if ((inputFlags & AUDIO_INPUT_FLAG_DIRECT) && (outputFlags & AUDIO_OUTPUT_FLAG_DIRECT)) {
+    const bool usePassthruPatchRecord =
+            (inputFlags & AUDIO_INPUT_FLAG_DIRECT) && (outputFlags & AUDIO_OUTPUT_FLAG_DIRECT);
+    const size_t playbackFrameCount = mPlayback.thread()->frameCount();
+    const size_t recordFrameCount = mRecord.thread()->frameCount();
+    size_t frameCount = 0;
+    if (usePassthruPatchRecord) {
+        // PassthruPatchRecord producesBufferOnDemand, so use
+        // maximum of playback and record thread framecounts
+        frameCount = std::max(playbackFrameCount, recordFrameCount);
+        ALOGV("%s() playframeCount %zu recordFrameCount %zu frameCount %zu",
+            __func__, playbackFrameCount, recordFrameCount, frameCount);
         tempRecordTrack = new RecordThread::PassthruPatchRecord(
                                                  mRecord.thread().get(),
                                                  sampleRate,
@@ -512,6 +516,16 @@
                                                  frameCount,
                                                  inputFlags);
     } else {
+        // use a pseudo LCM between input and output framecount
+        int playbackShift = __builtin_ctz(playbackFrameCount);
+        int shift = __builtin_ctz(recordFrameCount);
+        if (playbackShift < shift) {
+            shift = playbackShift;
+        }
+        frameCount = (playbackFrameCount * recordFrameCount) >> shift;
+        ALOGV("%s() playframeCount %zu recordFrameCount %zu frameCount %zu",
+            __func__, playbackFrameCount, recordFrameCount, frameCount);
+
         tempRecordTrack = new RecordThread::PatchRecord(
                                                  mRecord.thread().get(),
                                                  sampleRate,
@@ -545,8 +559,14 @@
     }
 
     // tie playback and record tracks together
-    mRecord.setTrackAndPeer(tempRecordTrack, tempPatchTrack);
-    mPlayback.setTrackAndPeer(tempPatchTrack, tempRecordTrack);
+    // In the case of PassthruPatchRecord no I/O activity happens on RecordThread,
+    // everything is driven from PlaybackThread. Thus AudioBufferProvider methods
+    // of PassthruPatchRecord can only be called if the corresponding PatchTrack
+    // is alive. There is no need to hold a reference, and there is no need
+    // to clear it. In fact, since playback stopping is asynchronous, there is
+    // no proper time when clearing could be done.
+    mRecord.setTrackAndPeer(tempRecordTrack, tempPatchTrack, !usePassthruPatchRecord);
+    mPlayback.setTrackAndPeer(tempPatchTrack, tempRecordTrack, true /*holdReference*/);
 
     // start capture and playback
     mRecord.track()->start(AudioSystem::SYNC_EVENT_NONE, AUDIO_SESSION_NONE);
@@ -628,8 +648,21 @@
 String8 AudioFlinger::PatchPanel::Patch::dump(audio_patch_handle_t myHandle) const
 {
     // TODO: Consider table dump form for patches, just like tracks.
-    String8 result = String8::format("Patch %d: thread %p => thread %p",
-            myHandle, mRecord.const_thread().get(), mPlayback.const_thread().get());
+    String8 result = String8::format("Patch %d: %s (thread %p => thread %p)",
+            myHandle, isSoftware() ? "Software bridge between" : "No software bridge",
+            mRecord.const_thread().get(), mPlayback.const_thread().get());
+
+    bool hasSinkDevice =
+            mAudioPatch.num_sinks > 0 && mAudioPatch.sinks[0].type == AUDIO_PORT_TYPE_DEVICE;
+    bool hasSourceDevice =
+            mAudioPatch.num_sources > 0 && mAudioPatch.sources[0].type == AUDIO_PORT_TYPE_DEVICE;
+    result.appendFormat(" thread %p %s (%d) first device type %08x", mThread.unsafe_get(),
+            hasSinkDevice ? "num sinks" :
+                (hasSourceDevice ? "num sources" : "no devices"),
+            hasSinkDevice ? mAudioPatch.num_sinks :
+                (hasSourceDevice ? mAudioPatch.num_sources : 0),
+            hasSinkDevice ? mAudioPatch.sinks[0].ext.device.type :
+                (hasSourceDevice ? mAudioPatch.sources[0].ext.device.type : 0));
 
     // add latency if it exists
     double latencyMs;
@@ -705,11 +738,16 @@
             status = BAD_VALUE;
     }
 
-    mPatches.erase(iter);
-    removeSoftwarePatchFromInsertedModules(handle);
+    erasePatch(handle);
     return status;
 }
 
+void AudioFlinger::PatchPanel::erasePatch(audio_patch_handle_t handle) {
+    mPatches.erase(handle);
+    removeSoftwarePatchFromInsertedModules(handle);
+    mAudioFlinger.mDeviceEffectManager.releaseAudioPatch(handle);
+}
+
 /* List connected audio ports and they attributes */
 status_t AudioFlinger::PatchPanel::listAudioPatches(unsigned int *num_patches __unused,
                                   struct audio_patch *patches __unused)
@@ -793,16 +831,13 @@
     String8 patchPanelDump;
     const char *indent = "  ";
 
-    // Only dump software patches.
     bool headerPrinted = false;
     for (const auto& iter : mPatches) {
-        if (iter.second.isSoftware()) {
-            if (!headerPrinted) {
-                patchPanelDump += "\nSoftware patches:\n";
-                headerPrinted = true;
-            }
-            patchPanelDump.appendFormat("%s%s\n", indent, iter.second.dump(iter.first).string());
+        if (!headerPrinted) {
+            patchPanelDump += "\nPatches:\n";
+            headerPrinted = true;
         }
+        patchPanelDump.appendFormat("%s%s\n", indent, iter.second.dump(iter.first).string());
     }
 
     headerPrinted = false;
diff --git a/services/audioflinger/PatchPanel.h b/services/audioflinger/PatchPanel.h
index 181e27c..89d4eb1 100644
--- a/services/audioflinger/PatchPanel.h
+++ b/services/audioflinger/PatchPanel.h
@@ -76,13 +76,18 @@
 
     void dump(int fd) const;
 
-private:
     template<typename ThreadType, typename TrackType>
-    class Endpoint {
+    class Endpoint final {
     public:
         Endpoint() = default;
         Endpoint(const Endpoint&) = delete;
-        Endpoint& operator=(const Endpoint&) = delete;
+        Endpoint& operator=(const Endpoint& other) noexcept {
+            mThread = other.mThread;
+            mCloseThread = other.mCloseThread;
+            mHandle = other.mHandle;
+            mTrack = other.mTrack;
+            return *this;
+        }
         Endpoint(Endpoint&& other) noexcept { swap(other); }
         Endpoint& operator=(Endpoint&& other) noexcept {
             swap(other);
@@ -98,8 +103,8 @@
             return trackOrNull->initCheck();
         }
         audio_patch_handle_t handle() const { return mHandle; }
-        sp<ThreadType> thread() { return mThread; }
-        sp<TrackType> track() { return mTrack; }
+        sp<ThreadType> thread() const { return mThread; }
+        sp<TrackType> track() const { return mTrack; }
         sp<const ThreadType> const_thread() const { return mThread; }
         sp<const TrackType> const_track() const { return mTrack; }
 
@@ -123,18 +128,20 @@
             mCloseThread = closeThread;
         }
         template <typename T>
-        void setTrackAndPeer(const sp<TrackType>& track, const sp<T> &peer) {
+        void setTrackAndPeer(const sp<TrackType>& track, const sp<T> &peer, bool holdReference) {
             mTrack = track;
             mThread->addPatchTrack(mTrack);
-            mTrack->setPeerProxy(peer, true /* holdReference */);
+            mTrack->setPeerProxy(peer, holdReference);
+            mClearPeerProxy = holdReference;
         }
-        void clearTrackPeer() { if (mTrack) mTrack->clearPeerProxy(); }
+        void clearTrackPeer() { if (mClearPeerProxy && mTrack) mTrack->clearPeerProxy(); }
         void stopTrack() { if (mTrack) mTrack->stop(); }
 
         void swap(Endpoint &other) noexcept {
             using std::swap;
             swap(mThread, other.mThread);
             swap(mCloseThread, other.mCloseThread);
+            swap(mClearPeerProxy, other.mClearPeerProxy);
             swap(mHandle, other.mHandle);
             swap(mTrack, other.mTrack);
         }
@@ -146,18 +153,41 @@
     private:
         sp<ThreadType> mThread;
         bool mCloseThread = true;
+        bool mClearPeerProxy = true;
         audio_patch_handle_t mHandle = AUDIO_PATCH_HANDLE_NONE;
         sp<TrackType> mTrack;
     };
 
-    class Patch {
+    class Patch final {
     public:
         explicit Patch(const struct audio_patch &patch) : mAudioPatch(patch) {}
+        Patch() = default;
         ~Patch();
-        Patch(const Patch&) = delete;
-        Patch(Patch&&) = default;
-        Patch& operator=(const Patch&) = delete;
-        Patch& operator=(Patch&&) = default;
+        Patch(const Patch& other) noexcept {
+            mAudioPatch = other.mAudioPatch;
+            mHalHandle = other.mHalHandle;
+            mPlayback = other.mPlayback;
+            mRecord = other.mRecord;
+            mThread = other.mThread;
+        }
+        Patch(Patch&& other) noexcept { swap(other); }
+        Patch& operator=(Patch&& other) noexcept {
+            swap(other);
+            return *this;
+        }
+
+        void swap(Patch &other) noexcept {
+            using std::swap;
+            swap(mAudioPatch, other.mAudioPatch);
+            swap(mHalHandle, other.mHalHandle);
+            swap(mPlayback, other.mPlayback);
+            swap(mRecord, other.mRecord);
+            swap(mThread, other.mThread);
+        }
+
+        friend void swap(Patch &a, Patch &b) noexcept {
+            a.swap(b);
+        }
 
         status_t createConnections(PatchPanel *panel);
         void clearConnections(PatchPanel *panel);
@@ -165,6 +195,9 @@
             return mRecord.handle() != AUDIO_PATCH_HANDLE_NONE ||
                     mPlayback.handle() != AUDIO_PATCH_HANDLE_NONE; }
 
+        void setThread(sp<ThreadBase> thread) { mThread = thread; }
+        wp<ThreadBase> thread() const { return mThread; }
+
         // returns the latency of the patch (from record to playback).
         status_t getLatencyMs(double *latencyMs) const;
 
@@ -182,13 +215,20 @@
         Endpoint<PlaybackThread, PlaybackThread::PatchTrack> mPlayback;
         // connects source device to record thread input
         Endpoint<RecordThread, RecordThread::PatchRecord> mRecord;
+
+        wp<ThreadBase> mThread;
     };
 
+    // Call with AudioFlinger mLock held
+    std::map<audio_patch_handle_t, Patch>& patches_l() { return mPatches; }
+
+private:
     AudioHwDevice* findAudioHwDeviceByModule(audio_module_handle_t module);
     sp<DeviceHalInterface> findHwDeviceByModule(audio_module_handle_t module);
     void addSoftwarePatchToInsertedModules(
             audio_module_handle_t module, audio_patch_handle_t handle);
     void removeSoftwarePatchFromInsertedModules(audio_patch_handle_t handle);
+    void erasePatch(audio_patch_handle_t handle);
 
     AudioFlinger &mAudioFlinger;
     std::map<audio_patch_handle_t, Patch> mPatches;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index a6d6914..87b72fb 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -1419,8 +1419,11 @@
     sp<EffectModule> effect;
     {
         Mutex::Autolock _l(mLock);
-
-        effect = handle->effect().promote();
+        sp<EffectBase> effectBase = handle->effect().promote();
+        if (effectBase == nullptr) {
+            return;
+        }
+        effect = effectBase->asEffectModule();
         if (effect == nullptr) {
             return;
         }
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index c5cdc25..4d53be4 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -192,6 +192,10 @@
     // return the enabled output devices for the given stream type
     virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream) = 0;
 
+    // retrieves the list of enabled output devices for the given audio attributes
+    virtual status_t getDevicesForAttributes(const audio_attributes_t &attr,
+                                             AudioDeviceTypeAddrVector *devices) = 0;
+
     // Audio effect management
     virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc) = 0;
     virtual status_t registerEffect(const effect_descriptor_t *desc,
@@ -246,6 +250,10 @@
             = 0;
     virtual status_t removeUidDeviceAffinities(uid_t uid) = 0;
 
+    virtual status_t setUserIdDeviceAffinities(int userId,
+            const Vector<AudioDeviceTypeAddr>& devices) = 0;
+    virtual status_t removeUserIdDeviceAffinities(int userId) = 0;
+
     virtual status_t startAudioSource(const struct audio_port_config *source,
                                       const audio_attributes_t *attributes,
                                       audio_port_handle_t *portId,
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index fc79ab1..a757551 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -105,12 +105,27 @@
     status_t removeUidDeviceAffinities(uid_t uid);
     status_t getDevicesForUid(uid_t uid, Vector<AudioDeviceTypeAddr>& devices) const;
 
+    /**
+     * Updates the mix rules in order to make streams associated with the given user
+     * be routed to the given audio devices.
+     * @param userId the userId for which the device affinity is set
+     * @param devices the vector of devices that this userId may be routed to. A typical
+     *    use is to pass the devices associated with a given zone in a multi-zone setup.
+     * @return NO_ERROR if the update was successful, INVALID_OPERATION otherwise.
+     *    An example of failure is when there are already rules in place to restrict
+     *    a mix to the given userId (i.e. when a MATCH_USERID rule was set for it).
+     */
+    status_t setUserIdDeviceAffinities(int userId, const Vector<AudioDeviceTypeAddr>& devices);
+    status_t removeUserIdDeviceAffinities(int userId);
+    status_t getDevicesForUserId(int userId, Vector<AudioDeviceTypeAddr>& devices) const;
+
     void dump(String8 *dst) const;
 
 private:
     enum class MixMatchStatus { MATCH, NO_MATCH, INVALID_MIX };
     MixMatchStatus mixMatch(const AudioMix* mix, size_t mixIndex,
-                            const audio_attributes_t& attributes, uid_t uid);
+                            const audio_attributes_t& attributes,
+                            uid_t uid);
 };
 
 } // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index 20c0a24..11660f0 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -60,6 +60,9 @@
         case RULE_MATCH_UID:
             ruleValue = std::to_string(criterion.mValue.mUid);
             break;
+        case RULE_MATCH_USERID:
+            ruleValue = std::to_string(criterion.mValue.mUserId);
+            break;
         default:
             unknownRule = true;
         }
@@ -197,7 +200,9 @@
                 ALOGV("%s: Mix %zu ignored as secondaryOutput because not opened yet", __func__, i);
             } else {
                 ALOGV("%s: Add a secondary desc %zu", __func__, i);
-                secondaryDescs->push_back(policyDesc);
+                if (secondaryDescs != nullptr) {
+                    secondaryDescs->push_back(policyDesc);
+                }
             }
         }
     }
@@ -221,10 +226,14 @@
             }
             if (!(attributes.usage == AUDIO_USAGE_UNKNOWN ||
                   attributes.usage == AUDIO_USAGE_MEDIA ||
-                  attributes.usage == AUDIO_USAGE_GAME)) {
+                  attributes.usage == AUDIO_USAGE_GAME ||
+                  attributes.usage == AUDIO_USAGE_VOICE_COMMUNICATION)) {
                 return MixMatchStatus::NO_MATCH;
             }
         }
+
+        int userId = (int) multiuser_get_user_id(uid);
+
         // TODO if adding more player rules (currently only 2), make rule handling "generic"
         //      as there is no difference in the treatment of usage- or uid-based rules
         bool hasUsageMatchRules = false;
@@ -237,6 +246,12 @@
         bool uidMatchFound = false;
         bool uidExclusionFound = false;
 
+        bool hasUserIdExcludeRules = false;
+        bool userIdExclusionFound = false;
+        bool hasUserIdMatchRules = false;
+        bool userIdMatchFound = false;
+
+
         bool hasAddrMatch = false;
 
         // iterate over all mix criteria to list what rules this mix contains
@@ -288,6 +303,24 @@
                     uidExclusionFound = true;
                 }
                 break;
+            case RULE_MATCH_USERID:
+                ALOGV("\tmix has RULE_MATCH_USERID for userId %d",
+                    mix->mCriteria[j].mValue.mUserId);
+                hasUserIdMatchRules = true;
+                if (mix->mCriteria[j].mValue.mUserId == userId) {
+                    // found one userId match against all allowed userIds
+                    userIdMatchFound = true;
+                }
+                break;
+            case RULE_EXCLUDE_USERID:
+                ALOGV("\tmix has RULE_EXCLUDE_USERID for userId %d",
+                    mix->mCriteria[j].mValue.mUserId);
+                hasUserIdExcludeRules = true;
+                if (mix->mCriteria[j].mValue.mUserId == userId) {
+                    // found this userId is to be excluded
+                    userIdExclusionFound = true;
+                }
+                break;
             default:
                 break;
             }
@@ -304,20 +337,27 @@
                         " and RULE_EXCLUDE_UID in mix %zu", mixIndex);
                 return MixMatchStatus::INVALID_MIX;
             }
-
-            if ((hasUsageExcludeRules && usageExclusionFound)
-                    || (hasUidExcludeRules && uidExclusionFound)) {
-                break; // stop iterating on criteria because an exclusion was found (will fail)
+            if (hasUserIdMatchRules && hasUserIdExcludeRules) {
+                ALOGE("getOutputForAttr: invalid combination of RULE_MATCH_USERID"
+                        " and RULE_EXCLUDE_USERID in mix %zu", mixIndex);
+                    return MixMatchStatus::INVALID_MIX;
             }
 
+            if ((hasUsageExcludeRules && usageExclusionFound)
+                    || (hasUidExcludeRules && uidExclusionFound)
+                    || (hasUserIdExcludeRules && userIdExclusionFound)) {
+                break; // stop iterating on criteria because an exclusion was found (will fail)
+            }
         }//iterate on mix criteria
 
         // determine if exiting on success (or implicit failure as desc is 0)
         if (hasAddrMatch ||
                 !((hasUsageExcludeRules && usageExclusionFound) ||
+                  (hasUserIdExcludeRules && userIdExclusionFound) ||
                   (hasUsageMatchRules && !usageMatchFound)  ||
                   (hasUidExcludeRules && uidExclusionFound) ||
-                  (hasUidMatchRules && !uidMatchFound))) {
+                  (hasUidMatchRules && !uidMatchFound)) ||
+                  (hasUserIdMatchRules && !userIdMatchFound)) {
             ALOGV("\tgetOutputForAttr will use mix %zu", mixIndex);
             return MixMatchStatus::MATCH;
         }
@@ -528,6 +568,109 @@
     return NO_ERROR;
 }
 
+status_t AudioPolicyMixCollection::setUserIdDeviceAffinities(int userId,
+        const Vector<AudioDeviceTypeAddr>& devices) {
+    // verify feasibility: for each player mix: if it already contains a
+    //    "match userId" rule for this userId, return an error
+    //    (adding a userId-device affinity would result in contradictory rules)
+    for (size_t i = 0; i < size(); i++) {
+        const AudioPolicyMix* mix = itemAt(i).get();
+        if (!mix->isDeviceAffinityCompatible()) {
+            continue;
+        }
+        if (mix->hasUserIdRule(true /*match*/, userId)) {
+            return INVALID_OPERATION;
+        }
+    }
+
+    // remove existing rules for this userId
+    removeUserIdDeviceAffinities(userId);
+
+    // for each player mix:
+    //   IF    device is not a target for the mix,
+    //     AND it doesn't have a "match userId" rule
+    //   THEN add a rule to exclude the userId
+    for (size_t i = 0; i < size(); i++) {
+        const AudioPolicyMix *mix = itemAt(i).get();
+        if (!mix->isDeviceAffinityCompatible()) {
+            continue;
+        }
+        // check if this mix goes to a device in the list of devices
+        bool deviceMatch = false;
+        const AudioDeviceTypeAddr mixDevice(mix->mDeviceType, mix->mDeviceAddress.string());
+        for (size_t j = 0; j < devices.size(); j++) {
+            if (mixDevice.equals(devices[j])) {
+                deviceMatch = true;
+                break;
+            }
+        }
+        if (!deviceMatch && !mix->hasMatchUserIdRule()) {
+            // this mix doesn't go to one of the listed devices for the given userId,
+            // and it's not already restricting the mix on a userId,
+            // modify its rules to exclude the userId
+            if (!mix->hasUserIdRule(false /*match*/, userId)) {
+                // no need to do it again if userId is already excluded
+                mix->setExcludeUserId(userId);
+            }
+        }
+    }
+
+    return NO_ERROR;
+}
+
+status_t AudioPolicyMixCollection::removeUserIdDeviceAffinities(int userId) {
+    // for each player mix: remove existing rules that match or exclude this userId
+    for (size_t i = 0; i < size(); i++) {
+        bool foundUserIdRule = false;
+        const AudioPolicyMix *mix = itemAt(i).get();
+        if (!mix->isDeviceAffinityCompatible()) {
+            continue;
+        }
+        std::vector<size_t> criteriaToRemove;
+        for (size_t j = 0; j < mix->mCriteria.size(); j++) {
+            const uint32_t rule = mix->mCriteria[j].mRule;
+            // is this rule excluding the userId? (not considering userId match rules
+            // as those are not used for userId-device affinity)
+            if (rule == RULE_EXCLUDE_USERID
+                    && userId == mix->mCriteria[j].mValue.mUserId) {
+                foundUserIdRule = true;
+                criteriaToRemove.insert(criteriaToRemove.begin(), j);
+            }
+        }
+        if (foundUserIdRule) {
+            for (size_t j = 0; j < criteriaToRemove.size(); j++) {
+                mix->mCriteria.removeAt(criteriaToRemove[j]);
+            }
+        }
+    }
+    return NO_ERROR;
+}
+
+status_t AudioPolicyMixCollection::getDevicesForUserId(int userId,
+        Vector<AudioDeviceTypeAddr>& devices) const {
+    // for each player mix:
+    // find rules that don't exclude this userId, and add the device to the list
+    for (size_t i = 0; i < size(); i++) {
+        bool ruleAllowsUserId = true;
+        const AudioPolicyMix *mix = itemAt(i).get();
+        if (mix->mMixType != MIX_TYPE_PLAYERS) {
+            continue;
+        }
+        for (size_t j = 0; j < mix->mCriteria.size(); j++) {
+            const uint32_t rule = mix->mCriteria[j].mRule;
+            if (rule == RULE_EXCLUDE_USERID
+                    && userId == mix->mCriteria[j].mValue.mUserId) {
+                ruleAllowsUserId = false;
+                break;
+            }
+        }
+        if (ruleAllowsUserId) {
+            devices.add(AudioDeviceTypeAddr(mix->mDeviceType, mix->mDeviceAddress.string()));
+        }
+    }
+    return NO_ERROR;
+}
+
 void AudioPolicyMixCollection::dump(String8 *dst) const
 {
     dst->append("\nAudio Policy Mix:\n");
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 4376802..1b2f7c7 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -200,6 +200,7 @@
     {
         static constexpr const char *speakerDrcEnabled = "speaker_drc_enabled";
         static constexpr const char *callScreenModeSupported= "call_screen_mode_supported";
+        static constexpr const char *engineLibrarySuffix = "engine_library";
     };
 
     static status_t deserialize(const xmlNode *root, AudioPolicyConfig *config);
@@ -692,6 +693,11 @@
                     convertTo<std::string, bool>(attr, value)) {
                 config->setCallScreenModeSupported(value);
             }
+            std::string engineLibrarySuffix = getXmlAttribute(cur, Attributes::engineLibrarySuffix);
+            if (!engineLibrarySuffix.empty()) {
+                config->setEngineLibraryNameSuffix(engineLibrarySuffix);
+            }
+            return NO_ERROR;
         }
     }
     return NO_ERROR;
diff --git a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
index 2b5455e..c5b3546 100644
--- a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
@@ -55,9 +55,11 @@
     MAKE_STRING_FROM_ENUM(RULE_MATCH_ATTRIBUTE_USAGE),
     MAKE_STRING_FROM_ENUM(RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET),
     MAKE_STRING_FROM_ENUM(RULE_MATCH_UID),
+    MAKE_STRING_FROM_ENUM(RULE_MATCH_USERID),
     MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_ATTRIBUTE_USAGE),
     MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET),
     MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_UID),
+    MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_USERID),
     TERMINATOR
 };
 
diff --git a/services/audiopolicy/config/Android.bp b/services/audiopolicy/config/Android.bp
index 4b5e788..f4610bb 100644
--- a/services/audiopolicy/config/Android.bp
+++ b/services/audiopolicy/config/Android.bp
@@ -92,6 +92,10 @@
     srcs: ["audio_policy_configuration_generic.xml"],
 }
 filegroup {
+    name: "audio_policy_configuration_generic_configurable",
+    srcs: ["audio_policy_configuration_generic_configurable.xml"],
+}
+filegroup {
     name: "usb_audio_policy_configuration",
     srcs: ["usb_audio_policy_configuration.xml"],
 }
diff --git a/services/audiopolicy/config/audio_policy_configuration_generic_configurable.xml b/services/audiopolicy/config/audio_policy_configuration_generic_configurable.xml
new file mode 100644
index 0000000..fbe4f7f
--- /dev/null
+++ b/services/audiopolicy/config/audio_policy_configuration_generic_configurable.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <!-- version section contains a “version” tag in the form “major.minor” e.g version=”1.0” -->
+
+    <!-- Global configuration Decalaration -->
+    <globalConfiguration speaker_drc_enabled="false" engine_library="configurable"/>
+
+    <modules>
+        <!-- Primary Audio HAL -->
+        <xi:include href="primary_audio_policy_configuration.xml"/>
+
+        <!-- Remote Submix Audio HAL -->
+        <xi:include href="r_submix_audio_policy_configuration.xml"/>
+
+    </modules>
+    <!-- End of Modules section -->
+
+    <!-- Volume section:
+        IMPORTANT NOTE: Volume tables have been moved to engine configuration.
+                        Keep it here for legacy.
+                        Engine will fallback on these files if none are provided by engine.
+     -->
+
+    <xi:include href="audio_policy_volumes.xml"/>
+    <xi:include href="default_volume_tables.xml"/>
+
+    <!-- End of Volume section -->
+
+    <!-- Surround Sound configuration -->
+
+    <xi:include href="surround_sound_configuration_5_0.xml"/>
+
+    <!-- End of Surround Sound configuration -->
+
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/config/audio_policy_volumes.xml b/services/audiopolicy/config/audio_policy_volumes.xml
index 27bd3ff..1dec6f4 100644
--- a/services/audiopolicy/config/audio_policy_volumes.xml
+++ b/services/audiopolicy/config/audio_policy_volumes.xml
@@ -181,6 +181,16 @@
                                                 ref="DEFAULT_NON_MUTABLE_VOLUME_CURVE"/>
     <volume stream="AUDIO_STREAM_ACCESSIBILITY" deviceCategory="DEVICE_CATEGORY_HEARING_AID"
                                                 ref="DEFAULT_NON_MUTABLE_HEARING_AID_VOLUME_CURVE"/>
+    <volume stream="AUDIO_STREAM_ASSISTANT" deviceCategory="DEVICE_CATEGORY_HEADSET"
+                                                ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+    <volume stream="AUDIO_STREAM_ASSISTANT" deviceCategory="DEVICE_CATEGORY_SPEAKER"
+                                                ref="DEFAULT_DEVICE_CATEGORY_SPEAKER_VOLUME_CURVE"/>
+    <volume stream="AUDIO_STREAM_ASSISTANT" deviceCategory="DEVICE_CATEGORY_EARPIECE"
+                                                ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+    <volume stream="AUDIO_STREAM_ASSISTANT" deviceCategory="DEVICE_CATEGORY_EXT_MEDIA"
+                                                ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+    <volume stream="AUDIO_STREAM_ASSISTANT" deviceCategory="DEVICE_CATEGORY_HEARING_AID"
+                                                ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
     <volume stream="AUDIO_STREAM_REROUTING" deviceCategory="DEVICE_CATEGORY_HEADSET"
                                             ref="FULL_SCALE_VOLUME_CURVE"/>
     <volume stream="AUDIO_STREAM_REROUTING" deviceCategory="DEVICE_CATEGORY_SPEAKER"
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 01efc7a..46b950c 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -106,48 +106,33 @@
 
 engineConfig::ParsingResult EngineBase::loadAudioPolicyEngineConfig()
 {
-    auto loadProductStrategies =
-            [](auto& strategyConfigs, auto& productStrategies, auto& volumeGroups) {
-        for (auto& strategyConfig : strategyConfigs) {
-            sp<ProductStrategy> strategy = new ProductStrategy(strategyConfig.name);
-            for (const auto &group : strategyConfig.attributesGroups) {
-                const auto &iter = std::find_if(begin(volumeGroups), end(volumeGroups),
-                                         [&group](const auto &volumeGroup) {
-                        return group.volumeGroup == volumeGroup.second->getName(); });
-                ALOG_ASSERT(iter != end(volumeGroups), "Invalid Volume Group Name %s",
-                            group.volumeGroup.c_str());
-                if (group.stream != AUDIO_STREAM_DEFAULT) {
-                    iter->second->addSupportedStream(group.stream);
-                }
-                for (const auto &attr : group.attributesVect) {
-                    strategy->addAttributes({group.stream, iter->second->getId(), attr});
-                    iter->second->addSupportedAttributes(attr);
-                }
-            }
-            product_strategy_t strategyId = strategy->getId();
-            productStrategies[strategyId] = strategy;
-        }
-    };
-    auto loadVolumeGroups = [](auto &volumeConfigs, auto &volumeGroups) {
-        for (auto &volumeConfig : volumeConfigs) {
-            sp<VolumeGroup> volumeGroup = new VolumeGroup(volumeConfig.name, volumeConfig.indexMin,
-                                                          volumeConfig.indexMax);
-            volumeGroups[volumeGroup->getId()] = volumeGroup;
+    auto loadVolumeConfig = [](auto &volumeGroups, auto &volumeConfig) {
+        sp<VolumeGroup> volumeGroup = new VolumeGroup(volumeConfig.name, volumeConfig.indexMin,
+                                                      volumeConfig.indexMax);
+        volumeGroups[volumeGroup->getId()] = volumeGroup;
 
-            for (auto &configCurve : volumeConfig.volumeCurves) {
-                device_category deviceCat = DEVICE_CATEGORY_SPEAKER;
-                if (!DeviceCategoryConverter::fromString(configCurve.deviceCategory, deviceCat)) {
-                    ALOGE("%s: Invalid %s", __FUNCTION__, configCurve.deviceCategory.c_str());
-                    continue;
-                }
-                sp<VolumeCurve> curve = new VolumeCurve(deviceCat);
-                for (auto &point : configCurve.curvePoints) {
-                    curve->add({point.index, point.attenuationInMb});
-                }
-                volumeGroup->add(curve);
+        for (auto &configCurve : volumeConfig.volumeCurves) {
+            device_category deviceCat = DEVICE_CATEGORY_SPEAKER;
+            if (!DeviceCategoryConverter::fromString(configCurve.deviceCategory, deviceCat)) {
+                ALOGE("%s: Invalid %s", __FUNCTION__, configCurve.deviceCategory.c_str());
+                continue;
             }
+            sp<VolumeCurve> curve = new VolumeCurve(deviceCat);
+            for (auto &point : configCurve.curvePoints) {
+                curve->add({point.index, point.attenuationInMb});
+            }
+            volumeGroup->add(curve);
+        }
+        return volumeGroup;
+    };
+    auto addSupportedStreamAttributes = [](auto &group, auto &volumeGroup, auto &strategy) {
+        volumeGroup->addSupportedStream(group.stream);
+        for (const auto &attr : group.attributesVect) {
+            strategy->addAttributes({group.stream, volumeGroup->getId(), attr});
+            volumeGroup->addSupportedAttributes(attr);
         }
     };
+
     auto result = engineConfig::parse();
     if (result.parsedConfig == nullptr) {
         ALOGW("%s: No configuration found, using default matching phone experience.", __FUNCTION__);
@@ -157,9 +142,37 @@
                   static_cast<size_t>(ret == NO_ERROR ? 0 : 1)};
     }
     ALOGE_IF(result.nbSkippedElement != 0, "skipped %zu elements", result.nbSkippedElement);
-    loadVolumeGroups(result.parsedConfig->volumeGroups, mVolumeGroups);
-    loadProductStrategies(result.parsedConfig->productStrategies, mProductStrategies,
-                          mVolumeGroups);
+
+    engineConfig::VolumeGroup defaultVolumeConfig;
+    for (auto &volumeConfig : result.parsedConfig->volumeGroups) {
+        // save default volume config for streams not defined in configuration
+        if (volumeConfig.name.compare("AUDIO_STREAM_MUSIC") == 0) {
+            defaultVolumeConfig = volumeConfig;
+        }
+        loadVolumeConfig(mVolumeGroups, volumeConfig);
+    }
+    for (auto& strategyConfig : result.parsedConfig->productStrategies) {
+        sp<ProductStrategy> strategy = new ProductStrategy(strategyConfig.name);
+        for (const auto &group : strategyConfig.attributesGroups) {
+            const auto &iter = std::find_if(begin(mVolumeGroups), end(mVolumeGroups),
+                                         [&group](const auto &volumeGroup) {
+                    return group.volumeGroup == volumeGroup.second->getName(); });
+            if (group.stream != AUDIO_STREAM_DEFAULT) {
+                if (iter == end(mVolumeGroups)) {
+                    ALOGW("%s: No configuration of %s found, using default volume configuration"
+                            , __FUNCTION__, group.volumeGroup.c_str());
+                    defaultVolumeConfig.name = group.volumeGroup;
+                    sp<VolumeGroup> volumeGroup =
+                            loadVolumeConfig(mVolumeGroups, defaultVolumeConfig);
+                    addSupportedStreamAttributes(group, volumeGroup, strategy);
+                } else {
+                    addSupportedStreamAttributes(group, iter->second, strategy);
+                }
+            }
+        }
+        product_strategy_t strategyId = strategy->getId();
+        mProductStrategies[strategyId] = strategy;
+    }
     mProductStrategies.initialize();
     return result;
 }
diff --git a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
index 20c57ee..fbce801 100644
--- a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
+++ b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
@@ -81,6 +81,10 @@
     },
     {"STRATEGY_MEDIA",
      {
+         {"assistant", AUDIO_STREAM_ASSISTANT, "AUDIO_STREAM_ASSISTANT",
+          {{AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
+            AUDIO_SOURCE_DEFAULT, 0, ""}}
+         },
          {"music", AUDIO_STREAM_MUSIC, "AUDIO_STREAM_MUSIC",
           {
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA, AUDIO_SOURCE_DEFAULT, 0, ""},
diff --git a/services/audiopolicy/engine/config/Android.bp b/services/audiopolicy/engine/config/Android.bp
index 885b5fa..ff840f9 100644
--- a/services/audiopolicy/engine/config/Android.bp
+++ b/services/audiopolicy/engine/config/Android.bp
@@ -1,4 +1,4 @@
-cc_library_static {
+cc_library {
     name: "libaudiopolicyengine_config",
     export_include_dirs: ["include"],
     include_dirs: [
@@ -14,17 +14,14 @@
     ],
     shared_libs: [
         "libmedia_helper",
-        "libandroidicu",
         "libxml2",
         "libutils",
         "liblog",
         "libcutils",
     ],
-    static_libs: [
-        "libaudiopolicycomponents",
-    ],
     header_libs: [
         "libaudio_system_headers",
-        "libaudiopolicycommon",
+        "libmedia_headers",
+        "libaudioclient_headers",
     ],
 }
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index d47fbd2..7f8cdd9 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -18,7 +18,6 @@
 //#define LOG_NDEBUG 0
 
 #include "EngineConfig.h"
-#include <policy.h>
 #include <cutils/properties.h>
 #include <media/TypeConverter.h>
 #include <media/convert.h>
diff --git a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
index 9398743..b1c0dcf 100644
--- a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
@@ -72,6 +72,12 @@
             <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"/> </Attributes>
             <Attributes></Attributes>
         </AttributesGroup>
+        <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
+            <Attributes>
+                <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
+                <Usage value="AUDIO_USAGE_ASSISTANT"/>
+            </Attributes>
+        </AttributesGroup>
         <AttributesGroup streamType="AUDIO_STREAM_SYSTEM" volumeGroup="system">
             <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_SONIFICATION"/> </Attributes>
         </AttributesGroup>
diff --git a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_stream_volumes.xml b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_stream_volumes.xml
index 707a184..0f9614e 100644
--- a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_stream_volumes.xml
+++ b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_stream_volumes.xml
@@ -205,7 +205,16 @@
         <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_NON_MUTABLE_VOLUME_CURVE"/>
         <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_NON_MUTABLE_HEARING_AID_VOLUME_CURVE"/>
     </volumeGroup>
-
+    <volumeGroup>
+        <name>assistant</name>
+        <indexMin>0</indexMin>
+        <indexMax>15</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_DEVICE_CATEGORY_SPEAKER_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID"  ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
     <volumeGroup>
         <name>rerouting</name>
         <indexMin>0</indexMin>
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index 752ba92..7e5c5e3 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -80,8 +80,9 @@
 
 status_t Engine::initCheck()
 {
-    if (mPolicyParameterMgr == nullptr || mPolicyParameterMgr->start() != NO_ERROR) {
-        ALOGE("%s: could not start Policy PFW", __FUNCTION__);
+    std::string error;
+    if (mPolicyParameterMgr == nullptr || mPolicyParameterMgr->start(error) != NO_ERROR) {
+        ALOGE("%s: could not start Policy PFW: %s", __FUNCTION__, error.c_str());
         return NO_INIT;
     }
     return EngineBase::initCheck();
@@ -162,21 +163,21 @@
     return mPolicyParameterMgr->getForceUse(usage);
 }
 
-status_t Engine::setDeviceConnectionState(const sp<DeviceDescriptor> devDesc,
+status_t Engine::setDeviceConnectionState(const sp<DeviceDescriptor> device,
                                           audio_policy_dev_state_t state)
 {
-    mPolicyParameterMgr->setDeviceConnectionState(devDesc, state);
-
-    if (audio_is_output_device(devDesc->type())) {
+    mPolicyParameterMgr->setDeviceConnectionState(
+                device->type(), device->address().c_str(), state);
+    if (audio_is_output_device(device->type())) {
         // FIXME: Use DeviceTypeSet when the interface is ready
         return mPolicyParameterMgr->setAvailableOutputDevices(
                     deviceTypesToBitMask(getApmObserver()->getAvailableOutputDevices().types()));
-    } else if (audio_is_input_device(devDesc->type())) {
+    } else if (audio_is_input_device(device->type())) {
         // FIXME: Use DeviceTypeSet when the interface is ready
         return mPolicyParameterMgr->setAvailableInputDevices(
                     deviceTypesToBitMask(getApmObserver()->getAvailableInputDevices().types()));
     }
-    return EngineBase::setDeviceConnectionState(devDesc, state);
+    return EngineBase::setDeviceConnectionState(device, state);
 }
 
 status_t Engine::loadAudioPolicyEngineConfig()
diff --git a/services/audiopolicy/engineconfigurable/wrapper/Android.bp b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
index 6f59487..301ecc0 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/Android.bp
+++ b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
@@ -11,7 +11,6 @@
         "libbase_headers",
         "libaudiopolicycommon",
     ],
-    static_libs: ["libaudiopolicycomponents"],
     shared_libs: [
         "liblog",
         "libutils",
diff --git a/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp b/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
index 465a6f9..1b3b9a0 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
+++ b/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
@@ -92,7 +92,8 @@
 template <>
 struct ParameterManagerWrapper::parameterManagerElementSupported<ISelectionCriterionTypeInterface> {};
 
-ParameterManagerWrapper::ParameterManagerWrapper()
+ParameterManagerWrapper::ParameterManagerWrapper(bool enableSchemaVerification,
+                                                 const std::string &schemaUri)
     : mPfwConnectorLogger(new ParameterMgrPlatformConnectorLogger)
 {
     // Connector
@@ -104,6 +105,15 @@
 
     // Logger
     mPfwConnector->setLogger(mPfwConnectorLogger);
+
+    // Schema validation
+    std::string error;
+    bool ret = mPfwConnector->setValidateSchemasOnStart(enableSchemaVerification, error);
+    ALOGE_IF(!ret, "Failed to activate schema validation: %s", error.c_str());
+    if (enableSchemaVerification && ret && !schemaUri.empty()) {
+        ALOGE("Schema verification activated with schema URI: %s", schemaUri.c_str());
+        mPfwConnector->setSchemaUri(schemaUri);
+    }
 }
 
 status_t ParameterManagerWrapper::addCriterion(const std::string &name, bool isInclusive,
@@ -145,11 +155,10 @@
     delete mPfwConnector;
 }
 
-status_t ParameterManagerWrapper::start()
+status_t ParameterManagerWrapper::start(std::string &error)
 {
     ALOGD("%s: in", __FUNCTION__);
     /// Start PFW
-    std::string error;
     if (!mPfwConnector->start(error)) {
         ALOGE("%s: Policy PFW start error: %s", __FUNCTION__, error.c_str());
         return NO_INIT;
@@ -253,13 +262,13 @@
     return interface->getLiteralValue(valueToCheck, literalValue);
 }
 
-status_t ParameterManagerWrapper::setDeviceConnectionState(const sp<DeviceDescriptor> devDesc,
-                                                           audio_policy_dev_state_t state)
+status_t ParameterManagerWrapper::setDeviceConnectionState(
+        audio_devices_t type, const std::string address, audio_policy_dev_state_t state)
 {
-    std::string criterionName = audio_is_output_device(devDesc->type()) ?
+    std::string criterionName = audio_is_output_device(type) ?
                 gOutputDeviceAddressCriterionName : gInputDeviceAddressCriterionName;
 
-    ALOGV("%s: device with address %s %s", __FUNCTION__, devDesc->address().c_str(),
+    ALOGV("%s: device with address %s %s", __FUNCTION__, address.c_str(),
           state != AUDIO_POLICY_DEVICE_STATE_AVAILABLE? "disconnected" : "connected");
     ISelectionCriterionInterface *criterion =
             getElement<ISelectionCriterionInterface>(criterionName, mPolicyCriteria);
@@ -271,8 +280,8 @@
 
     auto criterionType = criterion->getCriterionType();
     int deviceAddressId;
-    if (not criterionType->getNumericalValue(devDesc->address().c_str(), deviceAddressId)) {
-        ALOGW("%s: unknown device address reported (%s)", __FUNCTION__, devDesc->address().c_str());
+    if (not criterionType->getNumericalValue(address.c_str(), deviceAddressId)) {
+        ALOGW("%s: unknown device address reported (%s)", __FUNCTION__, address.c_str());
         return BAD_TYPE;
     }
     int currentValueMask = criterion->getCriterionState();
diff --git a/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h b/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
index 8443008..62b129a 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
+++ b/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
@@ -16,9 +16,6 @@
 
 #pragma once
 
-#include <PolicyAudioPort.h>
-#include <HwModule.h>
-#include <DeviceDescriptor.h>
 #include <system/audio.h>
 #include <system/audio_policy.h>
 #include <utils/Errors.h>
@@ -47,16 +44,18 @@
     using Criteria = std::map<std::string, ISelectionCriterionInterface *>;
 
 public:
-    ParameterManagerWrapper();
+    ParameterManagerWrapper(bool enableSchemaVerification = false,
+                            const std::string &schemaUri = {});
     ~ParameterManagerWrapper();
 
     /**
      * Starts the platform state service.
      * It starts the parameter framework policy instance.
+     * @param[out] contains human readable error if starts failed
      *
-     * @return NO_ERROR if success, error code otherwise.
+     * @return NO_ERROR if success, error code otherwise, and error is set to human readable string.
      */
-    status_t start();
+    status_t start(std::string &error);
 
     /**
      * The following API wrap policy action to criteria
@@ -117,7 +116,15 @@
      */
     status_t setAvailableOutputDevices(audio_devices_t outputDevices);
 
-    status_t setDeviceConnectionState(const sp<DeviceDescriptor> devDesc,
+    /**
+     * @brief setDeviceConnectionState propagates a state event on a given device(s)
+     * @param type bit mask of the device whose state has changed
+     * @param address of the device whose state has changed
+     * @param state new state of the given device
+     * @return NO_ERROR if new state corretly propagated to Engine Parameter-Framework, error
+     * code otherwise.
+     */
+    status_t setDeviceConnectionState(audio_devices_t type, const std::string address,
                                       audio_policy_dev_state_t state);
 
     /**
diff --git a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
index 9398743..b1c0dcf 100644
--- a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
@@ -72,6 +72,12 @@
             <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"/> </Attributes>
             <Attributes></Attributes>
         </AttributesGroup>
+        <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
+            <Attributes>
+                <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
+                <Usage value="AUDIO_USAGE_ASSISTANT"/>
+            </Attributes>
+        </AttributesGroup>
         <AttributesGroup streamType="AUDIO_STREAM_SYSTEM" volumeGroup="system">
             <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_SONIFICATION"/> </Attributes>
         </AttributesGroup>
diff --git a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_stream_volumes.xml b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_stream_volumes.xml
index 707a184..a259950 100644
--- a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_stream_volumes.xml
+++ b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_stream_volumes.xml
@@ -207,6 +207,17 @@
     </volumeGroup>
 
     <volumeGroup>
+        <name>assistant</name>
+        <indexMin>0</indexMin>
+        <indexMax>15</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_DEVICE_CATEGORY_SPEAKER_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID"  ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
         <name>rerouting</name>
         <indexMin>0</indexMin>
         <indexMax>1</indexMax>
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index a3fa974..a7d7cf6 100755
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -463,8 +463,8 @@
                  "getDevicesForStrategy() no default device defined");
     }
 
-    ALOGVV("getDevices"
-           "ForStrategy() strategy %d, device %x", strategy, devices.types());
+    ALOGVV("getDevices ForStrategy() strategy %d, device %s",
+           strategy, dumpDeviceTypes(devices.types()).c_str());
     return devices;
 }
 
@@ -636,8 +636,9 @@
                 String8(preferredStrategyDevice.mAddress.c_str()),
                 AUDIO_FORMAT_DEFAULT);
         if (preferredAvailableDevDescr != nullptr) {
-            ALOGVV("%s using pref device 0x%08x/%s for strategy %u", __FUNCTION__,
-                   preferredStrategyDevice.mType, preferredStrategyDevice.mAddress, strategy);
+            ALOGVV("%s using pref device 0x%08x/%s for strategy %u",
+                   __func__, preferredStrategyDevice.mType,
+                   preferredStrategyDevice.mAddress.c_str(), strategy);
             return DeviceVector(preferredAvailableDevDescr);
         }
     }
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 2124646..c3b1b9d 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -971,19 +971,21 @@
     if (usePrimaryOutputFromPolicyMixes) {
         *output = policyDesc->mIoHandle;
         sp<AudioPolicyMix> mix = policyDesc->mPolicyMix.promote();
-        sp<DeviceDescriptor> deviceDesc =
-                mAvailableOutputDevices.getDevice(mix->mDeviceType,
-                                                  mix->mDeviceAddress,
-                                                  AUDIO_FORMAT_DEFAULT);
-        *selectedDeviceId = deviceDesc != 0 ? deviceDesc->getId() : AUDIO_PORT_HANDLE_NONE;
+        if (mix != nullptr) {
+            sp<DeviceDescriptor> deviceDesc =
+                    mAvailableOutputDevices.getDevice(mix->mDeviceType,
+                                                      mix->mDeviceAddress,
+                                                      AUDIO_FORMAT_DEFAULT);
+            *selectedDeviceId = deviceDesc != 0 ? deviceDesc->getId() : AUDIO_PORT_HANDLE_NONE;
 
-        ALOGV("getOutputForAttr() returns output %d", *output);
-        if (resultAttr->usage == AUDIO_USAGE_VIRTUAL_SOURCE) {
-            *outputType = API_OUT_MIX_PLAYBACK;
-        } else {
-            *outputType = API_OUTPUT_LEGACY;
+            ALOGV("getOutputForAttr() returns output %d", *output);
+            if (resultAttr->usage == AUDIO_USAGE_VIRTUAL_SOURCE) {
+                *outputType = API_OUT_MIX_PLAYBACK;
+            } else {
+                *outputType = API_OUTPUT_LEGACY;
+            }
+            return NO_ERROR;
         }
-        return NO_ERROR;
     }
     // Virtual sources must always be dynamicaly or explicitly routed
     if (resultAttr->usage == AUDIO_USAGE_VIRTUAL_SOURCE) {
@@ -1369,6 +1371,14 @@
     // For encoded streams force direct flag to prevent downstream mixing.
     sinkConfig->flags.output = static_cast<audio_output_flags_t>(
             sinkConfig->flags.output | AUDIO_OUTPUT_FLAG_DIRECT);
+    if (audio_is_iec61937_compatible(sinkConfig->format)) {
+        // For formats compatible with IEC61937 encapsulation, assume that
+        // the record thread input from MSD is IEC61937 framed (for proportional buffer sizing).
+        // Add the AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO flag so downstream HAL can distinguish between
+        // raw and IEC61937 framed streams.
+        sinkConfig->flags.output = static_cast<audio_output_flags_t>(
+                sinkConfig->flags.output | AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO);
+    }
     sourceConfig->sample_rate = bestSinkConfig.sample_rate;
     // Specify exact channel mask to prevent guessing by bit count in PatchPanel.
     sourceConfig->channel_mask = audio_channel_mask_out_to_in(bestSinkConfig.channel_mask);
@@ -1638,7 +1648,7 @@
     DeviceVector devices;
     sp<AudioPolicyMix> policyMix = outputDesc->mPolicyMix.promote();
     const char *address = NULL;
-    if (policyMix != NULL) {
+    if (policyMix != nullptr) {
         audio_devices_t newDeviceType;
         address = policyMix->mDeviceAddress.string();
         if ((policyMix->mRouteFlags & MIX_ROUTE_FLAG_LOOP_BACK) == MIX_ROUTE_FLAG_LOOP_BACK) {
@@ -1820,7 +1830,7 @@
             sp<AudioPolicyMix> policyMix = outputDesc->mPolicyMix.promote();
             if (isSingleDeviceType(
                     outputDesc->devices().types(), &audio_is_remote_submix_device) &&
-                policyMix != NULL &&
+                policyMix != nullptr &&
                 policyMix->mMixType == MIX_TYPE_RECORDERS) {
                 setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
                                             AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
@@ -2267,7 +2277,7 @@
     if (status == NO_ERROR && inputDesc->activeCount() == 1) {
         sp<AudioPolicyMix> policyMix = inputDesc->mPolicyMix.promote();
         // if input maps to a dynamic policy with an activity listener, notify of state change
-        if ((policyMix != NULL)
+        if ((policyMix != nullptr)
                 && ((policyMix->mCbFlags & AudioMix::kCbFlagNotifyActivity) != 0)) {
             mpClientInterface->onDynamicPolicyMixStateUpdate(policyMix->mDeviceAddress,
                     MIX_STATE_MIXING);
@@ -2284,7 +2294,7 @@
         // For remote submix (a virtual device), we open only one input per capture request.
         if (audio_is_remote_submix_device(inputDesc->getDeviceType())) {
             String8 address = String8("");
-            if (policyMix == NULL) {
+            if (policyMix == nullptr) {
                 address = String8("0");
             } else if (policyMix->mMixType == MIX_TYPE_PLAYERS) {
                 address = policyMix->mDeviceAddress;
@@ -2331,7 +2341,7 @@
     } else {
         sp<AudioPolicyMix> policyMix = inputDesc->mPolicyMix.promote();
         // if input maps to a dynamic policy with an activity listener, notify of state change
-        if ((policyMix != NULL)
+        if ((policyMix != nullptr)
                 && ((policyMix->mCbFlags & AudioMix::kCbFlagNotifyActivity) != 0)) {
             mpClientInterface->onDynamicPolicyMixStateUpdate(policyMix->mDeviceAddress,
                     MIX_STATE_IDLE);
@@ -2341,7 +2351,7 @@
         // used by a policy mix of type MIX_TYPE_RECORDERS
         if (audio_is_remote_submix_device(inputDesc->getDeviceType())) {
             String8 address = String8("");
-            if (policyMix == NULL) {
+            if (policyMix == nullptr) {
                 address = String8("0");
             } else if (policyMix->mMixType == MIX_TYPE_PLAYERS) {
                 address = policyMix->mDeviceAddress;
@@ -2762,12 +2772,14 @@
                                 int session,
                                 int id)
 {
-    ssize_t index = mOutputs.indexOfKey(io);
-    if (index < 0) {
-        index = mInputs.indexOfKey(io);
+    if (session != AUDIO_SESSION_DEVICE) {
+        ssize_t index = mOutputs.indexOfKey(io);
         if (index < 0) {
-            ALOGW("registerEffect() unknown io %d", io);
-            return INVALID_OPERATION;
+            index = mInputs.indexOfKey(io);
+            if (index < 0) {
+                ALOGW("registerEffect() unknown io %d", io);
+                return INVALID_OPERATION;
+            }
         }
     }
     return mEffects.registerEffect(desc, io, session, id,
@@ -2787,16 +2799,6 @@
     return mEffects.unregisterEffect(id);
 }
 
-void AudioPolicyManager::cleanUpEffectsForIo(audio_io_handle_t io)
-{
-    EffectDescriptorCollection effects = mEffects.getEffectsForIo(io);
-    for (size_t i = 0; i < effects.size(); i++) {
-        ALOGW("%s removing stale effect %s, id %d on closed IO %d",
-              __func__, effects.valueAt(i)->mDesc.name, effects.keyAt(i), io);
-        unregisterEffect(effects.keyAt(i));
-    }
-}
-
 status_t AudioPolicyManager::setEffectEnabled(int id, bool enabled)
 {
     sp<EffectDescriptor> effect = mEffects.getEffect(id);
@@ -3148,6 +3150,49 @@
     return mEngine->getPreferredDeviceForStrategy(strategy, device);
 }
 
+status_t AudioPolicyManager::setUserIdDeviceAffinities(int userId,
+        const Vector<AudioDeviceTypeAddr>& devices) {
+    ALOGI("%s() userId=%d num devices %zu", __FUNCTION__, userId, devices.size());
+    // userId/device affinity is only for output devices
+    for (size_t i = 0; i < devices.size(); i++) {
+        if (!audio_is_output_device(devices[i].mType)) {
+            ALOGE("%s() device=%08x is NOT an output device",
+                    __FUNCTION__,
+                    devices[i].mType);
+            return BAD_VALUE;
+        }
+    }
+
+    status_t status =  mPolicyMixes.setUserIdDeviceAffinities(userId, devices);
+    if (status != NO_ERROR) {
+        ALOGE("%s() could not set device affinity for userId %d",
+            __FUNCTION__, userId);
+        return status;
+    }
+
+    // reevaluate outputs for all devices
+    checkForDeviceAndOutputChanges();
+    updateCallAndOutputRouting();
+
+    return NO_ERROR;
+}
+
+status_t AudioPolicyManager::removeUserIdDeviceAffinities(int userId) {
+    ALOGI("%s() userId=%d", __FUNCTION__, userId);
+    status_t status = mPolicyMixes.removeUserIdDeviceAffinities(userId);
+    if (status != NO_ERROR) {
+        ALOGE("%s() Could not remove all device affinities fo userId = %d",
+            __FUNCTION__, userId);
+        return status;
+    }
+
+    // reevaluate outputs for all devices
+    checkForDeviceAndOutputChanges();
+    updateCallAndOutputRouting();
+
+    return NO_ERROR;
+}
+
 void AudioPolicyManager::dump(String8 *dst) const
 {
     dst->appendFormat("\nAudioPolicyManager Dump: %p\n", this);
@@ -5043,8 +5088,6 @@
             setMsdPatch();
         }
     }
-
-    cleanUpEffectsForIo(output);
 }
 
 void AudioPolicyManager::closeInput(audio_io_handle_t input)
@@ -5076,8 +5119,6 @@
             mInputs.activeInputsCountOnDevices(primaryInputDevices) == 0) {
         mpClientInterface->setSoundTriggerCaptureState(false);
     }
-
-    cleanUpEffectsForIo(input);
 }
 
 SortedVector<audio_io_handle_t> AudioPolicyManager::getOutputsForDevices(
@@ -5344,7 +5385,8 @@
         auto attr = mEngine->getAllAttributesForProductStrategy(productStrategy).front();
 
         if ((hasVoiceStream(streams) &&
-             (isInCall() || mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc))) ||
+             (isInCall() || mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc)) &&
+             !isStreamActive(AUDIO_STREAM_ENFORCED_AUDIBLE, 0)) ||
              ((hasStream(streams, AUDIO_STREAM_ALARM) || hasStream(streams, AUDIO_STREAM_ENFORCED_AUDIBLE)) &&
                 mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc)) ||
                 outputDesc->isStrategyActive(productStrategy)) {
@@ -5440,6 +5482,35 @@
     return deviceTypesToBitMask(devices.types());
 }
 
+status_t AudioPolicyManager::getDevicesForAttributes(
+        const audio_attributes_t &attr, AudioDeviceTypeAddrVector *devices) {
+    if (devices == nullptr) {
+        return BAD_VALUE;
+    }
+    // check dynamic policies but only for primary descriptors (secondary not used for audible
+    // audio routing, only used for duplication for playback capture)
+    sp<SwAudioOutputDescriptor> policyDesc;
+    status_t status = mPolicyMixes.getOutputForAttr(attr, 0 /*uid unknown here*/,
+            AUDIO_OUTPUT_FLAG_NONE, policyDesc, nullptr);
+    if (status != OK) {
+        return status;
+    }
+    if (policyDesc != nullptr) {
+        sp<AudioPolicyMix> mix = policyDesc->mPolicyMix.promote();
+        if (mix != nullptr) {
+            AudioDeviceTypeAddr device(mix->mDeviceType, mix->mDeviceAddress.c_str());
+            devices->push_back(device);
+            return NO_ERROR;
+        }
+    }
+
+    DeviceVector curDevices = mEngine->getOutputDevicesForAttributes(attr, nullptr, false);
+    for (const auto& device : curDevices) {
+        devices->push_back(device->getDeviceTypeAddr());
+    }
+    return NO_ERROR;
+}
+
 void AudioPolicyManager::handleNotificationRoutingForStream(audio_stream_type_t stream) {
     switch(stream) {
     case AUDIO_STREAM_MUSIC:
@@ -6019,7 +6090,7 @@
                                             int delayMs,
                                             bool force)
 {
-    ALOGVV("applyStreamVolumes() for device %08x", device);
+    ALOGVV("applyStreamVolumes() for device %s", dumpDeviceTypes(deviceTypes).c_str());
     for (const auto &volumeGroup : mEngine->getVolumeGroups()) {
         auto &curves = getVolumeCurves(toVolumeSource(volumeGroup));
         checkAndSetVolume(curves, toVolumeSource(volumeGroup),
@@ -6113,6 +6184,10 @@
     case AUDIO_USAGE_VIRTUAL_SOURCE:
     case AUDIO_USAGE_ASSISTANT:
     case AUDIO_USAGE_CALL_ASSISTANT:
+    case AUDIO_USAGE_EMERGENCY:
+    case AUDIO_USAGE_SAFETY:
+    case AUDIO_USAGE_VEHICLE_STATUS:
+    case AUDIO_USAGE_ANNOUNCEMENT:
         break;
     default:
         return false;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index f6e4fc1..10adeb4 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -193,6 +193,10 @@
         // return the enabled output devices for the given stream type
         virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream);
 
+        virtual status_t getDevicesForAttributes(
+                const audio_attributes_t &attributes,
+                AudioDeviceTypeAddrVector *devices);
+
         virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc = NULL);
         virtual status_t registerEffect(const effect_descriptor_t *desc,
                                         audio_io_handle_t io,
@@ -258,6 +262,9 @@
         virtual status_t setUidDeviceAffinities(uid_t uid,
                 const Vector<AudioDeviceTypeAddr>& devices);
         virtual status_t removeUidDeviceAffinities(uid_t uid);
+        virtual status_t setUserIdDeviceAffinities(int userId,
+                const Vector<AudioDeviceTypeAddr>& devices);
+        virtual status_t removeUserIdDeviceAffinities(int userId);
 
         virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
                                                    const AudioDeviceTypeAddr &device);
@@ -886,8 +893,6 @@
                 int delayMs,
                 uid_t uid,
                 sp<AudioPatch> *patchDescPtr);
-
-        void cleanUpEffectsForIo(audio_io_handle_t io);
 };
 
 };
diff --git a/services/audiopolicy/service/Android.mk b/services/audiopolicy/service/Android.mk
index 80f4eab..d34dbe3 100644
--- a/services/audiopolicy/service/Android.mk
+++ b/services/audiopolicy/service/Android.mk
@@ -26,6 +26,7 @@
     libaudioutils \
     libaudiofoundation \
     libhardware_legacy \
+    libaudiopolicy \
     libaudiopolicymanager \
     libmedia_helper \
     libmediametrics \
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 4947714..60caa31 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -562,7 +562,8 @@
     AUDIO_STREAM_BLUETOOTH_SCO_TAG,
     AUDIO_STREAM_ENFORCED_AUDIBLE_TAG,
     AUDIO_STREAM_DTMF_TAG,
-    AUDIO_STREAM_TTS_TAG
+    AUDIO_STREAM_TTS_TAG,
+    AUDIO_STREAM_ASSISTANT_TAG
 };
 
 // returns the audio_stream_t enum corresponding to the output stream name or
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 44c1d09..ee32afe 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -25,6 +25,44 @@
 
 namespace android {
 
+const std::vector<audio_usage_t>& SYSTEM_USAGES = {
+    AUDIO_USAGE_CALL_ASSISTANT,
+    AUDIO_USAGE_EMERGENCY,
+    AUDIO_USAGE_SAFETY,
+    AUDIO_USAGE_VEHICLE_STATUS,
+    AUDIO_USAGE_ANNOUNCEMENT
+};
+
+bool isSystemUsage(audio_usage_t usage) {
+    return std::find(std::begin(SYSTEM_USAGES), std::end(SYSTEM_USAGES), usage)
+        != std::end(SYSTEM_USAGES);
+}
+
+bool AudioPolicyService::isSupportedSystemUsage(audio_usage_t usage) {
+    return std::find(std::begin(mSupportedSystemUsages), std::end(mSupportedSystemUsages), usage)
+        != std::end(mSupportedSystemUsages);
+}
+
+status_t AudioPolicyService::validateUsage(audio_usage_t usage) {
+     return validateUsage(usage, IPCThreadState::self()->getCallingPid(),
+        IPCThreadState::self()->getCallingUid());
+}
+
+status_t AudioPolicyService::validateUsage(audio_usage_t usage, pid_t pid, uid_t uid) {
+    if (isSystemUsage(usage)) {
+        if (isSupportedSystemUsage(usage)) {
+            if (!modifyAudioRoutingAllowed(pid, uid)) {
+                ALOGE("permission denied: modify audio routing not allowed for uid %d", uid);
+                return PERMISSION_DENIED;
+            }
+        } else {
+            return BAD_VALUE;
+        }
+    }
+    return NO_ERROR;
+}
+
+
 
 // ----------------------------------------------------------------------------
 
@@ -181,6 +219,12 @@
     if (mAudioPolicyManager == NULL) {
         return NO_INIT;
     }
+
+    status_t result = validateUsage(attr->usage, pid, uid);
+    if (result != NO_ERROR) {
+        return result;
+    }
+
     ALOGV("%s()", __func__);
     Mutex::Autolock _l(mLock);
 
@@ -199,7 +243,7 @@
     }
     AutoCallerClear acc;
     AudioPolicyInterface::output_type_t outputType;
-    status_t result = mAudioPolicyManager->getOutputForAttr(attr, output, session, stream, uid,
+    result = mAudioPolicyManager->getOutputForAttr(attr, output, session, stream, uid,
                                                  config,
                                                  &flags, selectedDeviceId, portId,
                                                  secondaryOutputs,
@@ -363,12 +407,22 @@
         return NO_INIT;
     }
 
+    status_t result = validateUsage(attr->usage, pid, uid);
+    if (result != NO_ERROR) {
+        return result;
+    }
+
+    audio_source_t inputSource = attr->source;
+    if (inputSource == AUDIO_SOURCE_DEFAULT) {
+        inputSource = AUDIO_SOURCE_MIC;
+    }
+
     // already checked by client, but double-check in case the client wrapper is bypassed
-    if ((attr->source < AUDIO_SOURCE_DEFAULT)
-            || (attr->source >= AUDIO_SOURCE_CNT
-                && attr->source != AUDIO_SOURCE_HOTWORD
-                && attr->source != AUDIO_SOURCE_FM_TUNER
-                && attr->source != AUDIO_SOURCE_ECHO_REFERENCE)) {
+    if ((inputSource < AUDIO_SOURCE_DEFAULT)
+            || (inputSource >= AUDIO_SOURCE_CNT
+                && inputSource != AUDIO_SOURCE_HOTWORD
+                && inputSource != AUDIO_SOURCE_FM_TUNER
+                && inputSource != AUDIO_SOURCE_ECHO_REFERENCE)) {
         return BAD_VALUE;
     }
 
@@ -399,17 +453,17 @@
     }
 
     bool canCaptureOutput = captureAudioOutputAllowed(pid, uid);
-    if ((attr->source == AUDIO_SOURCE_VOICE_UPLINK ||
-        attr->source == AUDIO_SOURCE_VOICE_DOWNLINK ||
-        attr->source == AUDIO_SOURCE_VOICE_CALL ||
-        attr->source == AUDIO_SOURCE_ECHO_REFERENCE||
-        attr->source == AUDIO_SOURCE_FM_TUNER) &&
+    if ((inputSource == AUDIO_SOURCE_VOICE_UPLINK ||
+        inputSource == AUDIO_SOURCE_VOICE_DOWNLINK ||
+        inputSource == AUDIO_SOURCE_VOICE_CALL ||
+        inputSource == AUDIO_SOURCE_ECHO_REFERENCE||
+        inputSource == AUDIO_SOURCE_FM_TUNER) &&
         !canCaptureOutput) {
         return PERMISSION_DENIED;
     }
 
     bool canCaptureHotword = captureHotwordAllowed(opPackageName, pid, uid);
-    if ((attr->source == AUDIO_SOURCE_HOTWORD) && !canCaptureHotword) {
+    if ((inputSource == AUDIO_SOURCE_HOTWORD) && !canCaptureHotword) {
         return BAD_VALUE;
     }
 
@@ -474,7 +528,7 @@
 
     if (audioPolicyEffects != 0) {
         // create audio pre processors according to input source
-        status_t status = audioPolicyEffects->addInputEffects(*input, attr->source, session);
+        status_t status = audioPolicyEffects->addInputEffects(*input, inputSource, session);
         if (status != NO_ERROR && status != ALREADY_EXISTS) {
             ALOGW("Failed to add effects on input %d", *input);
         }
@@ -801,6 +855,17 @@
     return mAudioPolicyManager->getDevicesForStream(stream);
 }
 
+status_t AudioPolicyService::getDevicesForAttributes(const AudioAttributes &aa,
+                                                     AudioDeviceTypeAddrVector *devices) const
+{
+    if (mAudioPolicyManager == NULL) {
+        return NO_INIT;
+    }
+    Mutex::Autolock _l(mLock);
+    AutoCallerClear acc;
+    return mAudioPolicyManager->getDevicesForAttributes(aa.getAttributes(), devices);
+}
+
 audio_io_handle_t AudioPolicyService::getOutputForEffect(const effect_descriptor_t *desc)
 {
     // FIXME change return type to status_t, and return NO_INIT here
@@ -986,6 +1051,22 @@
     return audioPolicyEffects->removeStreamDefaultEffect(id);
 }
 
+status_t AudioPolicyService::setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
+    Mutex::Autolock _l(mLock);
+    if(!modifyAudioRoutingAllowed()) {
+        return PERMISSION_DENIED;
+    }
+
+    bool areAllSystemUsages = std::all_of(begin(systemUsages), end(systemUsages),
+        [](audio_usage_t usage) { return isSystemUsage(usage); });
+    if (!areAllSystemUsages) {
+        return BAD_VALUE;
+    }
+
+    mSupportedSystemUsages = systemUsages;
+    return NO_ERROR;
+}
+
 status_t AudioPolicyService::setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t capturePolicy) {
     Mutex::Autolock _l(mLock);
     if (mAudioPolicyManager == NULL) {
@@ -1017,6 +1098,12 @@
         ALOGV("mAudioPolicyManager == NULL");
         return false;
     }
+
+    status_t result = validateUsage(attributes.usage);
+    if (result != NO_ERROR) {
+        return result;
+    }
+
     Mutex::Autolock _l(mLock);
     return mAudioPolicyManager->isDirectOutputSupported(config, attributes);
 }
@@ -1133,14 +1220,39 @@
         return PERMISSION_DENIED;
     }
 
+    // Require CAPTURE_VOICE_COMMUNICATION_OUTPUT if one of the
+    // mixes is a render|loopback mix that aim to capture audio played with
+    // USAGE_VOICE_COMMUNICATION.
+    bool needCaptureVoiceCommunicationOutput =
+        std::any_of(mixes.begin(), mixes.end(), [](auto& mix) {
+            return is_mix_loopback_render(mix.mRouteFlags) &&
+                mix.hasMatchingRuleForUsage([] (auto usage) {
+                    return usage == AUDIO_USAGE_VOICE_COMMUNICATION;});
+            });
+
+    // Require CAPTURE_MEDIA_OUTPUT if there is a mix for priveliged capture
+    // which is trying to capture any usage which is not USAGE_VOICE_COMMUNICATION.
+    // (If USAGE_VOICE_COMMUNICATION should be captured, then CAPTURE_VOICE_COMMUNICATION_OUTPUT
+    //  is required, even if it is not privileged capture).
     bool needCaptureMediaOutput = std::any_of(mixes.begin(), mixes.end(), [](auto& mix) {
-            return mix.mAllowPrivilegedPlaybackCapture; });
+            return mix.mAllowPrivilegedPlaybackCapture &&
+                mix.hasMatchingRuleForUsage([] (auto usage) {
+                    return usage != AUDIO_USAGE_VOICE_COMMUNICATION;
+                });
+            });
+
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
     const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+
     if (needCaptureMediaOutput && !captureMediaOutputAllowed(callingPid, callingUid)) {
         return PERMISSION_DENIED;
     }
 
+    if (needCaptureVoiceCommunicationOutput &&
+        !captureVoiceCommunicationOutputAllowed(callingPid, callingUid)) {
+        return PERMISSION_DENIED;
+    }
+
     if (mAudioPolicyManager == NULL) {
         return NO_INIT;
     }
@@ -1177,6 +1289,31 @@
     return mAudioPolicyManager->removeUidDeviceAffinities(uid);
 }
 
+status_t AudioPolicyService::setUserIdDeviceAffinities(int userId,
+        const Vector<AudioDeviceTypeAddr>& devices) {
+    Mutex::Autolock _l(mLock);
+    if(!modifyAudioRoutingAllowed()) {
+        return PERMISSION_DENIED;
+    }
+    if (mAudioPolicyManager == NULL) {
+        return NO_INIT;
+    }
+    AutoCallerClear acc;
+    return mAudioPolicyManager->setUserIdDeviceAffinities(userId, devices);
+}
+
+status_t AudioPolicyService::removeUserIdDeviceAffinities(int userId) {
+    Mutex::Autolock _l(mLock);
+    if(!modifyAudioRoutingAllowed()) {
+        return PERMISSION_DENIED;
+    }
+    if (mAudioPolicyManager == NULL) {
+        return NO_INIT;
+    }
+    AutoCallerClear acc;
+    return mAudioPolicyManager->removeUserIdDeviceAffinities(userId);
+}
+
 status_t AudioPolicyService::startAudioSource(const struct audio_port_config *source,
                                               const audio_attributes_t *attributes,
                                               audio_port_handle_t *portId)
@@ -1185,6 +1322,12 @@
     if (mAudioPolicyManager == NULL) {
         return NO_INIT;
     }
+
+    status_t result = validateUsage(attributes->usage);
+    if (result != NO_ERROR) {
+        return result;
+    }
+
     // startAudioSource should be created as the calling uid
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
     AutoCallerClear acc;
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index c83512f..e5c36ea 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -74,6 +74,8 @@
 
         mAudioPolicyClient = new AudioPolicyClient(this);
         mAudioPolicyManager = createAudioPolicyManager(mAudioPolicyClient);
+
+        mSupportedSystemUsages = std::vector<audio_usage_t> {};
     }
     // load audio processing modules
     sp<AudioPolicyEffects>audioPolicyEffects = new AudioPolicyEffects();
@@ -392,6 +394,14 @@
     snprintf(buffer, SIZE, "Command Thread: %p\n", mAudioCommandThread.get());
     result.append(buffer);
 
+    snprintf(buffer, SIZE, "Supported System Usages:\n");
+    result.append(buffer);
+    for (std::vector<audio_usage_t>::iterator it = mSupportedSystemUsages.begin();
+        it != mSupportedSystemUsages.end(); ++it) {
+        snprintf(buffer, SIZE, "\t%d\n", *it);
+        result.append(buffer);
+    }
+
     write(fd, result.string(), result.size());
     return NO_ERROR;
 }
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 135b3ac..e297f34 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -127,6 +127,8 @@
 
     virtual uint32_t getStrategyForStream(audio_stream_type_t stream);
     virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream);
+    virtual status_t getDevicesForAttributes(const AudioAttributes &aa,
+                                             AudioDeviceTypeAddrVector *devices) const;
 
     virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc);
     virtual status_t registerEffect(const effect_descriptor_t *desc,
@@ -185,6 +187,7 @@
                                      audio_io_handle_t output,
                                      int delayMs = 0);
     virtual status_t setVoiceVolume(float volume, int delayMs = 0);
+    status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages);
     status_t setAllowedCapturePolicy(uint_t uid, audio_flags_mask_t capturePolicy) override;
     virtual bool isOffloadSupported(const audio_offload_info_t &config);
     virtual bool isDirectOutputSupported(const audio_config_base_t& config,
@@ -232,6 +235,9 @@
 
     virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
                                                    AudioDeviceTypeAddr &device);
+    virtual status_t setUserIdDeviceAffinities(int userId, const Vector<AudioDeviceTypeAddr>& devices);
+
+    virtual status_t removeUserIdDeviceAffinities(int userId);
 
     virtual status_t startAudioSource(const struct audio_port_config *source,
                                       const audio_attributes_t *attributes,
@@ -342,6 +348,10 @@
 
     app_state_t apmStatFromAmState(int amState);
 
+    bool isSupportedSystemUsage(audio_usage_t usage);
+    status_t validateUsage(audio_usage_t usage);
+    status_t validateUsage(audio_usage_t usage, pid_t pid, uid_t uid);
+
     void updateUidStates();
     void updateUidStates_l();
 
@@ -861,6 +871,7 @@
     struct audio_policy *mpAudioPolicy;
     AudioPolicyInterface *mAudioPolicyManager;
     AudioPolicyClient *mAudioPolicyClient;
+    std::vector<audio_usage_t> mSupportedSystemUsages;
 
     DefaultKeyedVector< int64_t, sp<NotificationClient> >    mNotificationClients;
     Mutex mNotificationClientsLock;  // protects mNotificationClients
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index e4a19ea..2a8349c 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -800,6 +800,8 @@
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VIRTUAL_SOURCE,
                                      AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ASSISTANT,
+                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                (audio_attributes_t){AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
                                      AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"}
                 )
         );
@@ -841,6 +843,8 @@
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_GAME,
                                      AUDIO_SOURCE_DEFAULT, 0, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ASSISTANT,
+                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                (audio_attributes_t){AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
                                      AUDIO_SOURCE_DEFAULT, 0, ""}
                 )
         );
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 9cc15cd..496a21b 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -28,6 +28,7 @@
         "common/CameraDeviceBase.cpp",
         "common/CameraOfflineSessionBase.cpp",
         "common/CameraProviderManager.cpp",
+        "common/DepthPhotoProcessor.cpp",
         "common/FrameProcessorBase.cpp",
         "api1/CameraClient.cpp",
         "api1/Camera2Client.cpp",
@@ -40,11 +41,13 @@
         "api1/client2/CaptureSequencer.cpp",
         "api1/client2/ZslProcessor.cpp",
         "api2/CameraDeviceClient.cpp",
+        "api2/CameraOfflineSessionClient.cpp",
         "api2/CompositeStream.cpp",
         "api2/DepthCompositeStream.cpp",
         "api2/HeicEncoderInfoManager.cpp",
         "api2/HeicCompositeStream.cpp",
         "device1/CameraHardwareInterface.cpp",
+        "device3/BufferUtils.cpp",
         "device3/Camera3Device.cpp",
         "device3/Camera3OfflineSession.cpp",
         "device3/Camera3Stream.cpp",
@@ -56,7 +59,11 @@
         "device3/StatusTracker.cpp",
         "device3/Camera3BufferManager.cpp",
         "device3/Camera3StreamSplitter.cpp",
+        "device3/CoordinateMapper.cpp",
         "device3/DistortionMapper.cpp",
+        "device3/ZoomRatioMapper.cpp",
+        "device3/Camera3OutputStreamInterface.cpp",
+        "device3/Camera3OutputUtils.cpp",
         "gui/RingBufferConsumer.cpp",
         "utils/CameraThreadState.cpp",
         "hidl/AidlCameraDeviceCallbacks.cpp",
@@ -89,10 +96,12 @@
         "libmediautils",
         "libcamera_client",
         "libcamera_metadata",
+        "libdynamic_depth",
         "libfmq",
         "libgui",
         "libhardware",
         "libhidlbase",
+        "libimage_io",
         "libjpeg",
         "libmedia_codeclist",
         "libmedia_omx",
@@ -100,6 +109,7 @@
         "libsensorprivacy",
         "libstagefright",
         "libstagefright_foundation",
+        "libxml2",
         "libyuv",
         "android.frameworks.cameraservice.common@2.0",
         "android.frameworks.cameraservice.service@2.0",
@@ -141,40 +151,3 @@
 
 }
 
-cc_library_shared {
-    name: "libdepthphoto",
-
-    srcs: [
-        "utils/ExifUtils.cpp",
-        "common/DepthPhotoProcessor.cpp",
-    ],
-
-    shared_libs: [
-        "libimage_io",
-        "libdynamic_depth",
-        "libxml2",
-        "liblog",
-        "libutilscallstack",
-        "libutils",
-        "libcutils",
-        "libjpeg",
-        "libmemunreachable",
-        "libexif",
-        "libcamera_client",
-    ],
-
-    include_dirs: [
-        "external/dynamic_depth/includes",
-        "external/dynamic_depth/internal",
-    ],
-
-    export_include_dirs: ["."],
-
-    cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-        "-Wno-ignored-qualifiers",
-    ],
-
-}
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 18ed3a5..2fe7179 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -128,6 +128,7 @@
 static constexpr int32_t kVendorClientScore = 200;
 // Matches with PROCESS_STATE_PERSISTENT_UI in ActivityManager.java
 static constexpr int32_t kVendorClientState = 1;
+const String8 CameraService::kOfflineDevice("offline-");
 
 Mutex CameraService::sProxyMutex;
 sp<hardware::ICameraServiceProxy> CameraService::sCameraServiceProxy;
@@ -394,7 +395,7 @@
         // to this device until the status changes
         updateStatus(StatusInternal::NOT_PRESENT, id);
 
-        sp<BasicClient> clientToDisconnect;
+        sp<BasicClient> clientToDisconnectOnline, clientToDisconnectOffline;
         {
             // Don't do this in updateStatus to avoid deadlock over mServiceLock
             Mutex::Autolock lock(mServiceLock);
@@ -402,23 +403,14 @@
             // Remove cached shim parameters
             state->setShimParams(CameraParameters());
 
-            // Remove the client from the list of active clients, if there is one
-            clientToDisconnect = removeClientLocked(id);
+            // Remove online as well as offline client from the list of active clients,
+            // if they are present
+            clientToDisconnectOnline = removeClientLocked(id);
+            clientToDisconnectOffline = removeClientLocked(kOfflineDevice + id);
         }
 
-        // Disconnect client
-        if (clientToDisconnect.get() != nullptr) {
-            ALOGI("%s: Client for camera ID %s evicted due to device status change from HAL",
-                    __FUNCTION__, id.string());
-            // Notify the client of disconnection
-            clientToDisconnect->notifyError(
-                    hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
-                    CaptureResultExtras{});
-            // Ensure not in binder RPC so client disconnect PID checks work correctly
-            LOG_ALWAYS_FATAL_IF(CameraThreadState::getCallingPid() != getpid(),
-                    "onDeviceStatusChanged must be called from the camera service process!");
-            clientToDisconnect->disconnect();
-        }
+        disconnectClient(id, clientToDisconnectOnline);
+        disconnectClient(kOfflineDevice + id, clientToDisconnectOffline);
 
         removeStates(id);
     } else {
@@ -431,6 +423,21 @@
 
 }
 
+void CameraService::disconnectClient(const String8& id, sp<BasicClient> clientToDisconnect) {
+    if (clientToDisconnect.get() != nullptr) {
+        ALOGI("%s: Client for camera ID %s evicted due to device status change from HAL",
+                __FUNCTION__, id.string());
+        // Notify the client of disconnection
+        clientToDisconnect->notifyError(
+                hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
+                CaptureResultExtras{});
+        // Ensure not in binder RPC so client disconnect PID checks work correctly
+        LOG_ALWAYS_FATAL_IF(CameraThreadState::getCallingPid() != getpid(),
+                "onDeviceStatusChanged must be called from the camera service process!");
+        clientToDisconnect->disconnect();
+    }
+}
+
 void CameraService::onTorchStatusChanged(const String8& cameraId,
         TorchModeStatus newStatus) {
     Mutex::Autolock al(mTorchStatusMutex);
@@ -761,6 +768,7 @@
           case CAMERA_DEVICE_API_VERSION_3_3:
           case CAMERA_DEVICE_API_VERSION_3_4:
           case CAMERA_DEVICE_API_VERSION_3_5:
+          case CAMERA_DEVICE_API_VERSION_3_6:
             if (effectiveApiLevel == API_1) { // Camera1 API route
                 sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
                 *client = new Camera2Client(cameraService, tmp, packageName, featureId,
@@ -1695,6 +1703,77 @@
     return ret;
 }
 
+status_t CameraService::addOfflineClient(String8 cameraId, sp<BasicClient> offlineClient) {
+    if (offlineClient.get() == nullptr) {
+        return BAD_VALUE;
+    }
+
+    {
+        // Acquire mServiceLock and prevent other clients from connecting
+        std::unique_ptr<AutoConditionLock> lock =
+                AutoConditionLock::waitAndAcquire(mServiceLockWrapper, DEFAULT_CONNECT_TIMEOUT_NS);
+
+        if (lock == nullptr) {
+            ALOGE("%s: (PID %d) rejected (too many other clients connecting)."
+                    , __FUNCTION__, offlineClient->getClientPid());
+            return TIMED_OUT;
+        }
+
+        auto onlineClientDesc = mActiveClientManager.get(cameraId);
+        if (onlineClientDesc.get() == nullptr) {
+            ALOGE("%s: No active online client using camera id: %s", __FUNCTION__,
+                    cameraId.c_str());
+            return BAD_VALUE;
+        }
+
+        // Offline clients do not evict or conflict with other online devices. Resource sharing
+        // conflicts are handled by the camera provider which will either succeed or fail before
+        // reaching this method.
+        const auto& onlinePriority = onlineClientDesc->getPriority();
+        auto offlineClientDesc = CameraClientManager::makeClientDescriptor(
+                kOfflineDevice + onlineClientDesc->getKey(), offlineClient, /*cost*/ 0,
+                /*conflictingKeys*/ std::set<String8>(), onlinePriority.getScore(),
+                onlineClientDesc->getOwnerId(), onlinePriority.getState());
+
+        // Allow only one offline device per camera
+        auto incompatibleClients = mActiveClientManager.getIncompatibleClients(offlineClientDesc);
+        if (!incompatibleClients.empty()) {
+            ALOGE("%s: Incompatible offline clients present!", __FUNCTION__);
+            return BAD_VALUE;
+        }
+
+        auto err = offlineClient->initialize(mCameraProviderManager, mMonitorTags);
+        if (err != OK) {
+            ALOGE("%s: Could not initialize offline client.", __FUNCTION__);
+            return err;
+        }
+
+        auto evicted = mActiveClientManager.addAndEvict(offlineClientDesc);
+        if (evicted.size() > 0) {
+            for (auto& i : evicted) {
+                ALOGE("%s: Invalid state: Offline client for camera %s was not removed ",
+                        __FUNCTION__, i->getKey().string());
+            }
+
+            LOG_ALWAYS_FATAL("%s: Invalid state for CameraService, offline clients not evicted "
+                    "properly", __FUNCTION__);
+
+            return BAD_VALUE;
+        }
+
+        logConnectedOffline(offlineClientDesc->getKey(),
+                static_cast<int>(offlineClientDesc->getOwnerId()),
+                String8(offlineClient->getPackageName()));
+
+        sp<IBinder> remoteCallback = offlineClient->getRemote();
+        if (remoteCallback != nullptr) {
+            remoteCallback->linkToDeath(this);
+        }
+    } // lock is destroyed, allow further connect calls
+
+    return OK;
+}
+
 Status CameraService::setTorchMode(const String16& cameraId, bool enabled,
         const sp<IBinder>& clientBinder) {
     Mutex::Autolock lock(mServiceLock);
@@ -2093,6 +2172,7 @@
         case CAMERA_DEVICE_API_VERSION_3_3:
         case CAMERA_DEVICE_API_VERSION_3_4:
         case CAMERA_DEVICE_API_VERSION_3_5:
+        case CAMERA_DEVICE_API_VERSION_3_6:
             ALOGV("%s: Camera id %s uses HAL3.2 or newer, supports api1/api2 directly",
                     __FUNCTION__, id.string());
             *isSupported = true;
@@ -2298,6 +2378,13 @@
             clientPackage, clientPid));
 }
 
+void CameraService::logDisconnectedOffline(const char* cameraId, int clientPid,
+        const char* clientPackage) {
+    // Log the clients evicted
+    logEvent(String8::format("DISCONNECT offline device %s client for package %s (PID %d)",
+                cameraId, clientPackage, clientPid));
+}
+
 void CameraService::logConnected(const char* cameraId, int clientPid,
         const char* clientPackage) {
     // Log the clients evicted
@@ -2305,6 +2392,13 @@
             clientPackage, clientPid));
 }
 
+void CameraService::logConnectedOffline(const char* cameraId, int clientPid,
+        const char* clientPackage) {
+    // Log the clients evicted
+    logEvent(String8::format("CONNECT offline device %s client for package %s (PID %d)", cameraId,
+            clientPackage, clientPid));
+}
+
 void CameraService::logRejected(const char* cameraId, int clientPid,
         const char* clientPackage, const char* reason) {
     // Log the client rejected
@@ -2742,6 +2836,7 @@
     if (mAppOpsManager == nullptr) {
         return;
     }
+    // TODO : add offline camera session case
     if (op != AppOpsManager::OP_CAMERA) {
         ALOGW("Unexpected app ops notification received: %d", op);
         return;
@@ -2776,20 +2871,6 @@
 
 // ----------------------------------------------------------------------------
 
-sp<CameraService> CameraService::OfflineClient::sCameraService;
-
-status_t CameraService::OfflineClient::startCameraOps() {
-    // TODO
-    return OK;
-}
-
-status_t CameraService::OfflineClient::finishCameraOps() {
-    // TODO
-    return OK;
-}
-
-// ----------------------------------------------------------------------------
-
 void CameraService::Client::notifyError(int32_t errorCode,
         const CaptureResultExtras& resultExtras) {
     (void) resultExtras;
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index dae9c09..726cb0f 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -68,6 +68,7 @@
 {
     friend class BinderService<CameraService>;
     friend class CameraClient;
+    friend class CameraOfflineSessionClient;
 public:
     class Client;
     class BasicClient;
@@ -185,6 +186,9 @@
     // Monitored UIDs availability notification
     void                notifyMonitoredUids();
 
+    // Register an offline client for a given active camera id
+    status_t addOfflineClient(String8 cameraId, sp<BasicClient> offlineClient);
+
     /////////////////////////////////////////////////////////////////////
     // Client functionality
 
@@ -310,10 +314,9 @@
         sp<IBinder>                     mRemoteBinder;   // immutable after constructor
 
         // permissions management
-        status_t                        startCameraOps();
-        status_t                        finishCameraOps();
+        virtual status_t                startCameraOps();
+        virtual status_t                finishCameraOps();
 
-    private:
         std::unique_ptr<AppOpsManager>  mAppOpsManager = nullptr;
 
         class OpsCallback : public BnAppOpsCallback {
@@ -402,87 +405,6 @@
         int mCameraId;  // All API1 clients use integer camera IDs
     }; // class Client
 
-
-    // Client for offline session. Note that offline session client does not affect camera service's
-    // client arbitration logic. It is camera HAL's decision to decide whether a normal camera
-    // client is conflicting with existing offline client(s).
-    // The other distinctive difference between offline clients and normal clients is that normal
-    // clients are created through ICameraService binder calls, while the offline session client
-    // is created through ICameraDeviceUser::switchToOffline call.
-    class OfflineClient : public virtual RefBase {
-
-        virtual status_t dump(int fd, const Vector<String16>& args) = 0;
-
-        // Block the client form using the camera
-        virtual void block() = 0;
-
-        // Return the package name for this client
-        virtual String16 getPackageName() const = 0;
-
-        // Notify client about a fatal error
-        // TODO: maybe let impl notify within block?
-        virtual void notifyError(int32_t errorCode,
-                const CaptureResultExtras& resultExtras) = 0;
-
-        // Get the UID of the application client using this
-        virtual uid_t getClientUid() const = 0;
-
-        // Get the PID of the application client using this
-        virtual int getClientPid() const = 0;
-
-        protected:
-            OfflineClient(const sp<CameraService>& cameraService,
-                    const String16& clientPackageName,
-                    const String8& cameraIdStr,
-                    int clientPid,
-                    uid_t clientUid,
-                    int servicePid): mCameraIdStr(cameraIdStr),
-                            mClientPackageName(clientPackageName), mClientPid(clientPid),
-                            mClientUid(clientUid), mServicePid(servicePid) {
-                if (sCameraService == nullptr) {
-                    sCameraService = cameraService;
-                }
-            }
-
-            virtual ~OfflineClient() { /*TODO*/ }
-
-            // these are initialized in the constructor.
-            static sp<CameraService>        sCameraService;
-            const String8                   mCameraIdStr;
-            String16                        mClientPackageName;
-            pid_t                           mClientPid;
-            const uid_t                     mClientUid;
-            const pid_t                     mServicePid;
-            bool                            mDisconnected;
-
-            // - The app-side Binder interface to receive callbacks from us
-            sp<IBinder>                     mRemoteBinder;   // immutable after constructor
-
-            // permissions management
-            status_t                        startCameraOps();
-            status_t                        finishCameraOps();
-
-        private:
-            std::unique_ptr<AppOpsManager>  mAppOpsManager = nullptr;
-
-            class OpsCallback : public BnAppOpsCallback {
-            public:
-                explicit OpsCallback(wp<OfflineClient> client) : mClient(client) {}
-                virtual void opChanged(int32_t /*op*/, const String16& /*packageName*/) {
-                    //TODO
-                }
-
-            private:
-                wp<OfflineClient> mClient;
-
-            }; // class OpsCallback
-
-            sp<OpsCallback> mOpsCallback;
-
-            // IAppOpsCallback interface, indirected through opListener
-            // virtual void opChanged(int32_t op, const String16& packageName);
-    }; // class OfflineClient
-
     /**
      * A listener class that implements the LISTENER interface for use with a ClientManager, and
      * implements the following methods:
@@ -872,6 +794,17 @@
     void logDisconnected(const char* cameraId, int clientPid, const char* clientPackage);
 
     /**
+     * Add an event log message that a client has been disconnected from offline device.
+     */
+    void logDisconnectedOffline(const char* cameraId, int clientPid, const char* clientPackage);
+
+    /**
+     * Add an event log message that an offline client has been connected.
+     */
+    void logConnectedOffline(const char* cameraId, int clientPid,
+            const char* clientPackage);
+
+    /**
      * Add an event log message that a client has been connected.
      */
     void logConnected(const char* cameraId, int clientPid, const char* clientPackage);
@@ -1095,6 +1028,12 @@
     void broadcastTorchModeStatus(const String8& cameraId,
             hardware::camera::common::V1_0::TorchModeStatus status);
 
+    void disconnectClient(const String8& id, sp<BasicClient> clientToDisconnect);
+
+    // Regular online and offline devices must not be in conflict at camera service layer.
+    // Use separate keys for offline devices.
+    static const String8 kOfflineDevice;
+
     // TODO: right now each BasicClient holds one AppOpsManager instance.
     // We can refactor the code so all of clients share this instance
     AppOpsManager mAppOps;
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 18addb5..20333d1 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -2454,12 +2454,9 @@
 
     camera_metadata_ro_entry_t availableFocalLengths =
         staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 0, 0, /*required*/false);
-    if (!availableFocalLengths.count && !fastInfo.isExternalCamera) return NO_INIT;
 
     // Find focal length in PREVIEW template to use as default focal length.
-    if (fastInfo.isExternalCamera) {
-        fastInfo.defaultFocalLength = -1.0;
-    } else {
+    if (availableFocalLengths.count) {
         // Find smallest (widest-angle) focal length to use as basis of still
         // picture FOV reporting.
         fastInfo.defaultFocalLength = availableFocalLengths.data.f[0];
@@ -2481,6 +2478,10 @@
         if (entry.count != 0) {
             fastInfo.defaultFocalLength = entry.data.f[0];
         }
+    } else if (fastInfo.isExternalCamera) {
+        fastInfo.defaultFocalLength = -1.0;
+    } else {
+        return NO_INIT;
     }
     return OK;
 }
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 28421ba..9deb662 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -470,7 +470,8 @@
 }
 
 binder::Status CameraDeviceClient::endConfigure(int operatingMode,
-        const hardware::camera2::impl::CameraMetadataNative& sessionParams) {
+        const hardware::camera2::impl::CameraMetadataNative& sessionParams,
+        std::vector<int>* offlineStreamIds /*out*/) {
     ATRACE_CALL();
     ALOGV("%s: ending configure (%d input stream, %zu output surfaces)",
             __FUNCTION__, mInputStream.configured ? 1 : 0,
@@ -479,6 +480,12 @@
     binder::Status res;
     if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
 
+    if (offlineStreamIds == nullptr) {
+        String8 msg = String8::format("Invalid offline stream ids");
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+
     Mutex::Autolock icl(mBinderSerializationLock);
 
     if (!mDevice.get()) {
@@ -502,15 +509,41 @@
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
     } else {
+        offlineStreamIds->clear();
+        mDevice->getOfflineStreamIds(offlineStreamIds);
+
         for (size_t i = 0; i < mCompositeStreamMap.size(); ++i) {
             err = mCompositeStreamMap.valueAt(i)->configureStream();
-            if (err != OK ) {
+            if (err != OK) {
                 String8 msg = String8::format("Camera %s: Error configuring composite "
                         "streams: %s (%d)", mCameraIdStr.string(), strerror(-err), err);
                 ALOGE("%s: %s", __FUNCTION__, msg.string());
                 res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
                 break;
             }
+
+            // Composite streams can only support offline mode in case all individual internal
+            // streams are also supported.
+            std::vector<int> internalStreams;
+            mCompositeStreamMap.valueAt(i)->insertCompositeStreamIds(&internalStreams);
+            std::remove_if(offlineStreamIds->begin(), offlineStreamIds->end(),
+                    [&internalStreams] (int streamId) {
+                        auto it = std::find(internalStreams.begin(), internalStreams.end(),
+                                streamId);
+                        if (it != internalStreams.end()) {
+                            internalStreams.erase(it);
+                            return true;
+                        }
+
+                        return false;
+                    });
+            if (internalStreams.empty()) {
+                offlineStreamIds->push_back(mCompositeStreamMap.valueAt(i)->getStreamId());
+            }
+        }
+
+        for (const auto& offlineStreamId : *offlineStreamIds) {
+            mStreamInfoMap[offlineStreamId].supportsOffline = true;
         }
     }
 
@@ -1903,7 +1936,7 @@
 
 binder::Status CameraDeviceClient::switchToOffline(
         const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
-        const std::vector<view::Surface>& offlineOutputs,
+        const std::vector<int>& offlineOutputIds,
         /*out*/
         sp<hardware::camera2::ICameraOfflineSession>* session) {
     ATRACE_CALL();
@@ -1917,8 +1950,8 @@
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
 
-    if (offlineOutputs.empty()) {
-        String8 msg = String8::format("Offline outputs must not be empty");
+    if (offlineOutputIds.empty()) {
+        String8 msg = String8::format("Offline surfaces must not be empty");
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
@@ -1929,24 +1962,47 @@
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
 
-    std::vector<int32_t> offlineStreamIds(offlineOutputs.size());
-    for (auto& surface : offlineOutputs) {
-        sp<IBinder> binder = IInterface::asBinder(surface.graphicBufferProducer);
-        ssize_t index = mStreamMap.indexOfKey(binder);
+    std::vector<int32_t> offlineStreamIds(offlineOutputIds.size());
+    KeyedVector<sp<IBinder>, sp<CompositeStream>> offlineCompositeStreamMap;
+    for (const auto& streamId : offlineOutputIds) {
+        ssize_t index = mConfiguredOutputs.indexOfKey(streamId);
         if (index == NAME_NOT_FOUND) {
-            String8 msg = String8::format("Offline output is invalid");
+            String8 msg = String8::format("Offline surface with id: %d is not registered",
+                    streamId);
             ALOGE("%s: %s", __FUNCTION__, msg.string());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
         }
-        // TODO: Also check whether the offline output is supported by Hal for offline mode.
 
-        sp<Surface> s = new Surface(surface.graphicBufferProducer);
-        bool isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s);
-        isCompositeStream |= camera3::HeicCompositeStream::isHeicCompositeStream(s);
-        if (isCompositeStream) {
-            // TODO: Add composite specific handling
-        } else {
-            offlineStreamIds.push_back(mStreamMap.valueAt(index).streamId());
+        if (!mStreamInfoMap[streamId].supportsOffline) {
+            String8 msg = String8::format("Offline surface with id: %d doesn't support "
+                    "offline mode", streamId);
+            ALOGE("%s: %s", __FUNCTION__, msg.string());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        }
+
+        bool isCompositeStream = false;
+        for (const auto& gbp : mConfiguredOutputs[streamId].getGraphicBufferProducers()) {
+            sp<Surface> s = new Surface(gbp, false /*controlledByApp*/);
+            isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) |
+                camera3::HeicCompositeStream::isHeicCompositeStream(s);
+            if (isCompositeStream) {
+                auto compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp));
+                if (compositeIdx == NAME_NOT_FOUND) {
+                    ALOGE("%s: Unknown composite stream", __FUNCTION__);
+                    return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                            "Unknown composite stream");
+                }
+
+                mCompositeStreamMap.valueAt(compositeIdx)->insertCompositeStreamIds(
+                        &offlineStreamIds);
+                offlineCompositeStreamMap.add(mCompositeStreamMap.keyAt(compositeIdx),
+                        mCompositeStreamMap.valueAt(compositeIdx));
+                break;
+            }
+        }
+
+        if (!isCompositeStream) {
+            offlineStreamIds.push_back(streamId);
         }
     }
 
@@ -1958,16 +2014,36 @@
                 mCameraIdStr.string(), strerror(ret), ret);
     }
 
-    sp<CameraOfflineSessionClient> offlineClient = new CameraOfflineSessionClient(sCameraService,
-            offlineSession, cameraCb, mClientPackageName, mCameraIdStr, mClientPid, mClientUid,
-            mServicePid);
-    ret = offlineClient->initialize();
+    sp<CameraOfflineSessionClient> offlineClient;
+    if (offlineSession.get() != nullptr) {
+        offlineClient = new CameraOfflineSessionClient(sCameraService,
+                offlineSession, offlineCompositeStreamMap, cameraCb, mClientPackageName,
+                mClientFeatureId, mCameraIdStr, mCameraFacing, mClientPid, mClientUid, mServicePid);
+        ret = sCameraService->addOfflineClient(mCameraIdStr, offlineClient);
+    }
+
     if (ret == OK) {
-        // TODO: We need to update mStreamMap, mConfiguredOutputs
+        // A successful offline session switch must reset the current camera client
+        // and release any resources occupied by previously configured streams.
+        mStreamMap.clear();
+        mConfiguredOutputs.clear();
+        mDeferredStreams.clear();
+        mStreamInfoMap.clear();
+        mCompositeStreamMap.clear();
+        mInputStream = {false, 0, 0, 0, 0};
     } else {
-        return STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                "Camera %s: Failed to initilize offline session: %s (%d)",
-                mCameraIdStr.string(), strerror(ret), ret);
+        switch(ret) {
+            case BAD_VALUE:
+                return STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                        "Illegal argument to HAL module for camera \"%s\"", mCameraIdStr.c_str());
+            case TIMED_OUT:
+                return STATUS_ERROR_FMT(CameraService::ERROR_CAMERA_IN_USE,
+                        "Camera \"%s\" is already open", mCameraIdStr.c_str());
+            default:
+                return STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
+                        "Failed to initialize camera \"%s\": %s (%d)", mCameraIdStr.c_str(),
+                        strerror(-ret), ret);
+        }
     }
 
     *session = offlineClient;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 0a8f377..a3a3189 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -92,7 +92,9 @@
     virtual binder::Status beginConfigure() override;
 
     virtual binder::Status endConfigure(int operatingMode,
-            const hardware::camera2::impl::CameraMetadataNative& sessionParams) override;
+            const hardware::camera2::impl::CameraMetadataNative& sessionParams,
+            /*out*/
+            std::vector<int>* offlineStreamIds) override;
 
     // Verify specific session configuration.
     virtual binder::Status isSessionConfigurationSupported(
@@ -160,7 +162,7 @@
 
     virtual binder::Status switchToOffline(
             const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
-            const std::vector<view::Surface>& offlineOutputs,
+            const std::vector<int>& offlineOutputIds,
             /*out*/
             sp<hardware::camera2::ICameraOfflineSession>* session) override;
 
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
new file mode 100644
index 0000000..af7b9e1
--- /dev/null
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -0,0 +1,287 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraOfflineClient"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include "CameraOfflineSessionClient.h"
+#include "utils/CameraThreadState.h"
+#include <utils/Trace.h>
+
+namespace android {
+
+using binder::Status;
+
+status_t CameraOfflineSessionClient::initialize(sp<CameraProviderManager>, const String8&) {
+    ATRACE_CALL();
+
+    // Verify ops permissions
+    auto res = startCameraOps();
+    if (res != OK) {
+        return res;
+    }
+
+    if (mOfflineSession.get() == nullptr) {
+        ALOGE("%s: Camera %s: No valid offline session",
+                __FUNCTION__, mCameraIdStr.string());
+        return NO_INIT;
+    }
+
+    wp<NotificationListener> weakThis(this);
+    res = mOfflineSession->initialize(weakThis);
+    if (res != OK) {
+        ALOGE("%s: Camera %s: unable to initialize device: %s (%d)",
+                __FUNCTION__, mCameraIdStr.string(), strerror(-res), res);
+        return res;
+    }
+
+    return OK;
+}
+
+status_t CameraOfflineSessionClient::dump(int fd, const Vector<String16>& args) {
+    return BasicClient::dump(fd, args);
+}
+
+status_t CameraOfflineSessionClient::dumpClient(int fd, const Vector<String16>& /*args*/) {
+    String8 result;
+
+    result = "  Offline session dump:\n";
+    write(fd, result.string(), result.size());
+
+    if (mOfflineSession.get() == nullptr) {
+        result = "  *** Offline session is detached\n";
+        write(fd, result.string(), result.size());
+        return NO_ERROR;
+    }
+
+    auto res = mOfflineSession->dump(fd);
+    if (res != OK) {
+        result = String8::format("   Error dumping offline session: %s (%d)",
+                strerror(-res), res);
+        write(fd, result.string(), result.size());
+    }
+
+    return OK;
+}
+
+binder::Status CameraOfflineSessionClient::disconnect() {
+    Mutex::Autolock icl(mBinderSerializationLock);
+
+    binder::Status res = Status::ok();
+    if (mDisconnected) {
+        return res;
+    }
+    // Allow both client and the media server to disconnect at all times
+    int callingPid = CameraThreadState::getCallingPid();
+    if (callingPid != mClientPid &&
+            callingPid != mServicePid) {
+        return res;
+    }
+
+    mDisconnected = true;
+
+    sCameraService->removeByClient(this);
+    sCameraService->logDisconnectedOffline(mCameraIdStr, mClientPid, String8(mClientPackageName));
+
+    sp<IBinder> remote = getRemote();
+    if (remote != nullptr) {
+        remote->unlinkToDeath(sCameraService);
+    }
+
+    finishCameraOps();
+    ALOGI("%s: Disconnected client for offline camera %s for PID %d", __FUNCTION__,
+            mCameraIdStr.string(), mClientPid);
+
+    // client shouldn't be able to call into us anymore
+    mClientPid = 0;
+
+    if (mOfflineSession.get() != nullptr) {
+        auto ret = mOfflineSession->disconnect();
+        if (ret != OK) {
+            ALOGE("%s: Failed disconnecting from offline session %s (%d)", __FUNCTION__,
+                    strerror(-ret), ret);
+        }
+        mOfflineSession = nullptr;
+    }
+
+    for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+        auto ret = mCompositeStreamMap.valueAt(i)->deleteInternalStreams();
+        if (ret != OK) {
+            ALOGE("%s: Failed removing composite stream  %s (%d)", __FUNCTION__,
+                    strerror(-ret), ret);
+        }
+    }
+    mCompositeStreamMap.clear();
+
+    return res;
+}
+
+void CameraOfflineSessionClient::notifyError(int32_t errorCode,
+        const CaptureResultExtras& resultExtras) {
+    // Thread safe. Don't bother locking.
+    // Composites can have multiple internal streams. Error notifications coming from such internal
+    // streams may need to remain within camera service.
+    bool skipClientNotification = false;
+    for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+        skipClientNotification |= mCompositeStreamMap.valueAt(i)->onError(errorCode, resultExtras);
+    }
+
+    if ((mRemoteCallback.get() != nullptr) && (!skipClientNotification)) {
+        mRemoteCallback->onDeviceError(errorCode, resultExtras);
+    }
+}
+
+status_t CameraOfflineSessionClient::startCameraOps() {
+    ATRACE_CALL();
+    {
+        ALOGV("%s: Start camera ops, package name = %s, client UID = %d",
+              __FUNCTION__, String8(mClientPackageName).string(), mClientUid);
+    }
+
+    if (mAppOpsManager != nullptr) {
+        // Notify app ops that the camera is not available
+        mOpsCallback = new OpsCallback(this);
+        int32_t res;
+        // TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
+        mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
+                mClientPackageName, mOpsCallback);
+        // TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
+        res = mAppOpsManager->startOpNoThrow(AppOpsManager::OP_CAMERA,
+                mClientUid, mClientPackageName, /*startIfModeDefault*/ false);
+
+        if (res == AppOpsManager::MODE_ERRORED) {
+            ALOGI("Offline Camera %s: Access for \"%s\" has been revoked",
+                    mCameraIdStr.string(), String8(mClientPackageName).string());
+            return PERMISSION_DENIED;
+        }
+
+        if (res == AppOpsManager::MODE_IGNORED) {
+            ALOGI("Offline Camera %s: Access for \"%s\" has been restricted",
+                    mCameraIdStr.string(), String8(mClientPackageName).string());
+            // Return the same error as for device policy manager rejection
+            return -EACCES;
+        }
+    }
+
+    mOpsActive = true;
+
+    // Transition device state to OPEN
+    sCameraService->mUidPolicy->registerMonitorUid(mClientUid);
+
+    return OK;
+}
+
+status_t CameraOfflineSessionClient::finishCameraOps() {
+    ATRACE_CALL();
+
+    // Check if startCameraOps succeeded, and if so, finish the camera op
+    if (mOpsActive) {
+        // Notify app ops that the camera is available again
+        if (mAppOpsManager != nullptr) {
+        // TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
+            mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, mClientUid,
+                    mClientPackageName);
+            mOpsActive = false;
+        }
+    }
+    // Always stop watching, even if no camera op is active
+    if (mOpsCallback != nullptr && mAppOpsManager != nullptr) {
+        mAppOpsManager->stopWatchingMode(mOpsCallback);
+    }
+    mOpsCallback.clear();
+
+    sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid);
+
+    return OK;
+}
+
+void CameraOfflineSessionClient::onResultAvailable(const CaptureResult& result) {
+    ATRACE_CALL();
+    ALOGV("%s", __FUNCTION__);
+
+    if (mRemoteCallback.get() != NULL) {
+        mRemoteCallback->onResultReceived(result.mMetadata, result.mResultExtras,
+                result.mPhysicalMetadatas);
+    }
+
+    for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+        mCompositeStreamMap.valueAt(i)->onResultAvailable(result);
+    }
+}
+
+void CameraOfflineSessionClient::notifyShutter(const CaptureResultExtras& resultExtras,
+        nsecs_t timestamp) {
+
+    if (mRemoteCallback.get() != nullptr) {
+        mRemoteCallback->onCaptureStarted(resultExtras, timestamp);
+    }
+
+    for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+        mCompositeStreamMap.valueAt(i)->onShutter(resultExtras, timestamp);
+    }
+}
+
+void CameraOfflineSessionClient::notifyIdle() {
+    if (mRemoteCallback.get() != nullptr) {
+        mRemoteCallback->onDeviceIdle();
+    }
+}
+
+void CameraOfflineSessionClient::notifyAutoFocus(uint8_t newState, int triggerId) {
+    (void)newState;
+    (void)triggerId;
+
+    ALOGV("%s: Autofocus state now %d, last trigger %d",
+          __FUNCTION__, newState, triggerId);
+}
+
+void CameraOfflineSessionClient::notifyAutoExposure(uint8_t newState, int triggerId) {
+    (void)newState;
+    (void)triggerId;
+
+    ALOGV("%s: Autoexposure state now %d, last trigger %d",
+            __FUNCTION__, newState, triggerId);
+}
+
+void CameraOfflineSessionClient::notifyAutoWhitebalance(uint8_t newState, int triggerId) {
+    (void)newState;
+    (void)triggerId;
+
+    ALOGV("%s: Auto-whitebalance state now %d, last trigger %d", __FUNCTION__, newState,
+            triggerId);
+}
+
+void CameraOfflineSessionClient::notifyPrepared(int /*streamId*/) {
+    ALOGE("%s: Unexpected stream prepare notification in offline mode!", __FUNCTION__);
+    notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
+                CaptureResultExtras());
+}
+
+void CameraOfflineSessionClient::notifyRequestQueueEmpty() {
+    if (mRemoteCallback.get() != nullptr) {
+        mRemoteCallback->onRequestQueueEmpty();
+    }
+}
+
+void CameraOfflineSessionClient::notifyRepeatingRequestError(long /*lastFrameNumber*/) {
+    ALOGE("%s: Unexpected repeating request error in offline mode!", __FUNCTION__);
+    notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
+                CaptureResultExtras());
+}
+
+// ----------------------------------------------------------------------------
+}; // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index cb83e29..b0f000d 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -19,65 +19,88 @@
 
 #include <android/hardware/camera2/BnCameraOfflineSession.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+#include "common/FrameProcessorBase.h"
+#include "common/CameraDeviceBase.h"
 #include "CameraService.h"
+#include "CompositeStream.h"
 
 namespace android {
 
 using android::hardware::camera2::ICameraDeviceCallbacks;
+using camera3::CompositeStream;
 
+// Client for offline session. Note that offline session client does not affect camera service's
+// client arbitration logic. It is camera HAL's decision to decide whether a normal camera
+// client is conflicting with existing offline client(s).
+// The other distinctive difference between offline clients and normal clients is that normal
+// clients are created through ICameraService binder calls, while the offline session client
+// is created through ICameraDeviceUser::switchToOffline call.
 class CameraOfflineSessionClient :
-        public CameraService::OfflineClient,
-        public hardware::camera2::BnCameraOfflineSession
-        // public camera2::FrameProcessorBase::FilteredListener?
+        public CameraService::BasicClient,
+        public hardware::camera2::BnCameraOfflineSession,
+        public camera2::FrameProcessorBase::FilteredListener,
+        public NotificationListener
 {
 public:
     CameraOfflineSessionClient(
             const sp<CameraService>& cameraService,
             sp<CameraOfflineSessionBase> session,
+            const KeyedVector<sp<IBinder>, sp<CompositeStream>>& offlineCompositeStreamMap,
             const sp<ICameraDeviceCallbacks>& remoteCallback,
             const String16& clientPackageName,
-            const String8& cameraIdStr,
+            const std::unique_ptr<String16>& clientFeatureId,
+            const String8& cameraIdStr, int cameraFacing,
             int clientPid, uid_t clientUid, int servicePid) :
-                    CameraService::OfflineClient(cameraService, clientPackageName,
-                            cameraIdStr, clientPid, clientUid, servicePid),
-                            mRemoteCallback(remoteCallback), mOfflineSession(session) {}
+            CameraService::BasicClient(
+                    cameraService,
+                    IInterface::asBinder(remoteCallback),
+                    clientPackageName, clientFeatureId,
+                    cameraIdStr, cameraFacing, clientPid, clientUid, servicePid),
+            mRemoteCallback(remoteCallback), mOfflineSession(session),
+            mCompositeStreamMap(offlineCompositeStreamMap) {}
 
-    ~CameraOfflineSessionClient() {}
+    virtual ~CameraOfflineSessionClient() {}
 
-    virtual binder::Status disconnect() override { return binder::Status::ok(); }
-
-    virtual status_t dump(int /*fd*/, const Vector<String16>& /*args*/) override {
-        return OK;
+    sp<IBinder> asBinderWrapper() override {
+        return IInterface::asBinder(this);
     }
 
-    // Block the client form using the camera
-    virtual void block() override {};
+    binder::Status disconnect() override;
 
-    // Return the package name for this client
-    virtual String16 getPackageName() const override { String16 ret; return ret; };
+    status_t dump(int /*fd*/, const Vector<String16>& /*args*/) override;
 
-    // Notify client about a fatal error
-    // TODO: maybe let impl notify within block?
-    virtual void notifyError(int32_t /*errorCode*/,
-            const CaptureResultExtras& /*resultExtras*/) override {}
+    status_t dumpClient(int /*fd*/, const Vector<String16>& /*args*/) override;
 
-    // Get the UID of the application client using this
-    virtual uid_t getClientUid() const override { return 0; }
+    status_t initialize(sp<CameraProviderManager> /*manager*/,
+            const String8& /*monitorTags*/) override;
 
-    // Get the PID of the application client using this
-    virtual int getClientPid() const override { return 0; }
+    // permissions management
+    status_t startCameraOps() override;
+    status_t finishCameraOps() override;
 
-    status_t initialize() {
-        // TODO: Talk to camera service to add the offline session client book keeping
-        return OK;
-    }
+    // FilteredResultListener API
+    void onResultAvailable(const CaptureResult& result) override;
+
+    // NotificationListener API
+    void notifyError(int32_t errorCode, const CaptureResultExtras& resultExtras) override;
+    void notifyShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
+    void notifyIdle() override;
+    void notifyAutoFocus(uint8_t newState, int triggerId) override;
+    void notifyAutoExposure(uint8_t newState, int triggerId) override;
+    void notifyAutoWhitebalance(uint8_t newState, int triggerId) override;
+    void notifyPrepared(int streamId) override;
+    void notifyRequestQueueEmpty() override;
+    void notifyRepeatingRequestError(long lastFrameNumber) override;
+
 private:
-    sp<CameraOfflineSessionBase> mSession;
+    mutable Mutex mBinderSerializationLock;
 
     sp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
-    // This class is responsible to convert HAL callbacks to AIDL callbacks
 
     sp<CameraOfflineSessionBase> mOfflineSession;
+
+    // Offline composite stream map, output surface -> composite stream
+    KeyedVector<sp<IBinder>, sp<CompositeStream>> mCompositeStreamMap;
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index a401a82..de894f3 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -64,6 +64,10 @@
     virtual status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap,
             Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) = 0;
 
+    // Attach the internal composite stream ids.
+    virtual status_t insertCompositeStreamIds(
+            std::vector<int32_t>* compositeStreamIds /*out*/) = 0;
+
     // Return composite stream id.
     virtual int getStreamId() = 0;
 
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 0b91016..acad8c6 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -20,7 +20,6 @@
 
 #include "api1/client2/JpegProcessor.h"
 #include "common/CameraProviderManager.h"
-#include "dlfcn.h"
 #include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
@@ -43,9 +42,7 @@
         mBlobBufferAcquired(false),
         mProducerListener(new ProducerListener()),
         mMaxJpegSize(-1),
-        mIsLogicalCamera(false),
-        mDepthPhotoLibHandle(nullptr),
-        mDepthPhotoProcess(nullptr) {
+        mIsLogicalCamera(false) {
     sp<CameraDeviceBase> cameraDevice = device.promote();
     if (cameraDevice.get() != nullptr) {
         CameraMetadata staticInfo = cameraDevice->info();
@@ -83,19 +80,6 @@
         }
 
         getSupportedDepthSizes(staticInfo, &mSupportedDepthSizes);
-
-        mDepthPhotoLibHandle = dlopen(camera3::kDepthPhotoLibrary, RTLD_NOW | RTLD_LOCAL);
-        if (mDepthPhotoLibHandle != nullptr) {
-            mDepthPhotoProcess = reinterpret_cast<camera3::process_depth_photo_frame> (
-                    dlsym(mDepthPhotoLibHandle, camera3::kDepthPhotoProcessFunction));
-            if (mDepthPhotoProcess == nullptr) {
-                ALOGE("%s: Failed to link to depth photo process function: %s", __FUNCTION__,
-                        dlerror());
-            }
-        } else {
-            ALOGE("%s: Failed to link to depth photo library: %s", __FUNCTION__, dlerror());
-        }
-
     }
 }
 
@@ -108,11 +92,6 @@
     mDepthSurface.clear();
     mDepthConsumer = nullptr;
     mDepthSurface = nullptr;
-    if (mDepthPhotoLibHandle != nullptr) {
-        dlclose(mDepthPhotoLibHandle);
-        mDepthPhotoLibHandle = nullptr;
-    }
-    mDepthPhotoProcess = nullptr;
 }
 
 void DepthCompositeStream::compilePendingInputLocked() {
@@ -356,7 +335,7 @@
     }
 
     size_t actualJpegSize = 0;
-    res = mDepthPhotoProcess(depthPhoto, finalJpegBufferSize, dstBuffer, &actualJpegSize);
+    res = processDepthPhotoFrame(depthPhoto, finalJpegBufferSize, dstBuffer, &actualJpegSize);
     if (res != 0) {
         ALOGE("%s: Depth photo processing failed: %s (%d)", __FUNCTION__, strerror(-res), res);
         outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
@@ -583,11 +562,6 @@
         return NO_ERROR;
     }
 
-    if ((mDepthPhotoLibHandle == nullptr) || (mDepthPhotoProcess == nullptr)) {
-        ALOGE("%s: Depth photo library is not present!", __FUNCTION__);
-        return NO_INIT;
-    }
-
     if (mOutputSurface.get() == nullptr) {
         ALOGE("%s: No valid output surface set!", __FUNCTION__);
         return NO_INIT;
@@ -709,6 +683,18 @@
     return NO_ERROR;
 }
 
+status_t DepthCompositeStream::insertCompositeStreamIds(
+        std::vector<int32_t>* compositeStreamIds /*out*/) {
+    if (compositeStreamIds == nullptr) {
+        return BAD_VALUE;
+    }
+
+    compositeStreamIds->push_back(mDepthStreamId);
+    compositeStreamIds->push_back(mBlobStreamId);
+
+    return OK;
+}
+
 void DepthCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
     // Processing can continue even in case of result errors.
     // At the moment depth composite stream processing relies mainly on static camera
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index 28a7826..1bf714d 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -56,6 +56,7 @@
     status_t configureStream() override;
     status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
             int32_t* /*out*/currentStreamId) override;
+    status_t insertCompositeStreamIds(std::vector<int32_t>* compositeStreamIds /*out*/) override;
     int getStreamId() override { return mBlobStreamId; }
 
     // CpuConsumer listener implementation
@@ -126,8 +127,6 @@
     std::vector<std::tuple<size_t, size_t>> mSupportedDepthSizes;
     std::vector<float>   mIntrinsicCalibration, mLensDistortion;
     bool                 mIsLogicalCamera;
-    void*                mDepthPhotoLibHandle;
-    process_depth_photo_frame mDepthPhotoProcess;
 
     // Keep all incoming Depth buffer timestamps pending further processing.
     std::vector<int64_t> mInputDepthBuffers;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index bf4d1c8..d25e467 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -67,6 +67,7 @@
         mDequeuedOutputBufferCnt(0),
         mLockedAppSegmentBufferCnt(0),
         mCodecOutputCounter(0),
+        mQuality(-1),
         mGridTimestampUs(0) {
 }
 
@@ -503,6 +504,18 @@
     return NO_ERROR;
 }
 
+status_t HeicCompositeStream::insertCompositeStreamIds(
+        std::vector<int32_t>* compositeStreamIds /*out*/) {
+    if (compositeStreamIds == nullptr) {
+        return BAD_VALUE;
+    }
+
+    compositeStreamIds->push_back(mAppSegmentStreamId);
+    compositeStreamIds->push_back(mMainImageStreamId);
+
+    return OK;
+}
+
 void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) {
     Mutex::Autolock l(mMutex);
     if (mErrorState) {
@@ -525,6 +538,12 @@
         mPendingInputFrames[it->first].orientation = it->second.first;
         mPendingInputFrames[it->first].quality = it->second.second;
         mSettingsByTimestamp.erase(it);
+
+        // Set encoder quality if no inflight encoding
+        if (mPendingInputFrames.size() == 1) {
+            int32_t newQuality = mPendingInputFrames.begin()->second.quality;
+            updateCodecQualityLocked(newQuality);
+        }
     }
 
     while (!mInputAppSegmentBuffers.empty()) {
@@ -851,17 +870,6 @@
                 strerror(-res), res);
         return res;
     }
-    // Set encoder quality
-    {
-        sp<AMessage> qualityParams = new AMessage;
-        qualityParams->setInt32(PARAMETER_KEY_VIDEO_BITRATE, inputFrame.quality);
-        res = mCodec->setParameters(qualityParams);
-        if (res != OK) {
-            ALOGE("%s: Failed to set codec quality: %s (%d)",
-                    __FUNCTION__, strerror(-res), res);
-            return res;
-        }
-    }
 
     ssize_t trackId = inputFrame.muxer->addTrack(inputFrame.format);
     if (trackId < 0) {
@@ -1148,16 +1156,30 @@
 
 void HeicCompositeStream::releaseInputFramesLocked() {
     auto it = mPendingInputFrames.begin();
+    bool inputFrameDone = false;
     while (it != mPendingInputFrames.end()) {
         auto& inputFrame = it->second;
         if (inputFrame.error ||
             (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0)) {
             releaseInputFrameLocked(&inputFrame);
             it = mPendingInputFrames.erase(it);
+            inputFrameDone = true;
         } else {
             it++;
         }
     }
+
+    // Update codec quality based on first upcoming input frame.
+    // Note that when encoding is in surface mode, currently there is  no
+    // way for camera service to synchronize quality setting on a per-frame
+    // basis: we don't get notification when codec is ready to consume a new
+    // input frame. So we update codec quality on a best-effort basis.
+    if (inputFrameDone) {
+        auto firstPendingFrame = mPendingInputFrames.begin();
+        if (firstPendingFrame != mPendingInputFrames.end()) {
+            updateCodecQualityLocked(firstPendingFrame->second.quality);
+        }
+    }
 }
 
 status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
@@ -1546,6 +1568,20 @@
     return maxAppsSegment * (2 + 0xFFFF) + sizeof(struct CameraBlob);
 }
 
+void HeicCompositeStream::updateCodecQualityLocked(int32_t quality) {
+    if (quality != mQuality) {
+        sp<AMessage> qualityParams = new AMessage;
+        qualityParams->setInt32(PARAMETER_KEY_VIDEO_BITRATE, quality);
+        status_t res = mCodec->setParameters(qualityParams);
+        if (res != OK) {
+            ALOGE("%s: Failed to set codec quality: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+        } else {
+            mQuality = quality;
+        }
+    }
+}
+
 bool HeicCompositeStream::threadLoop() {
     int64_t currentTs = INT64_MAX;
     bool newInputAvailable = false;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 04e7b83..8fc521e 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -55,6 +55,8 @@
     status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
             int32_t* /*out*/currentStreamId) override;
 
+    status_t insertCompositeStreamIds(std::vector<int32_t>* compositeStreamIds /*out*/) override;
+
     void onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
 
     int getStreamId() override { return mMainImageStreamId; }
@@ -199,6 +201,7 @@
             size_t top, size_t left, size_t width, size_t height);
     void initCopyRowFunction(int32_t width);
     static size_t calcAppSegmentMaxSize(const CameraMetadata& info);
+    void updateCodecQualityLocked(int32_t quality);
 
     static const nsecs_t kWaitDuration = 10000000; // 10 ms
     static const int32_t kDefaultJpegQuality = 99;
@@ -240,6 +243,7 @@
     std::vector<CodecOutputBufferInfo> mCodecOutputBuffers;
     std::queue<int64_t> mCodecOutputBufferTimestamps;
     size_t mCodecOutputCounter;
+    int32_t mQuality;
 
     // Keep all incoming Yuv buffer pending tiling and encoding (for HEVC YUV tiling only)
     std::vector<int64_t> mInputYuvBuffers;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 8792f9a..0a41776 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -111,7 +111,7 @@
         return res;
     }
 
-    wp<CameraDeviceBase::NotificationListener> weakThis(this);
+    wp<NotificationListener> weakThis(this);
     res = mDevice->setNotifyCallback(weakThis);
 
     return OK;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 12cba0b..042f5aa 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -29,7 +29,7 @@
 template <typename TClientBase>
 class Camera2ClientBase :
         public TClientBase,
-        public CameraDeviceBase::NotificationListener
+        public NotificationListener
 {
 public:
     typedef typename TClientBase::TCamCallbacks TCamCallbacks;
@@ -61,7 +61,7 @@
     virtual status_t      dumpClient(int fd, const Vector<String16>& args);
 
     /**
-     * CameraDeviceBase::NotificationListener implementation
+     * NotificationListener implementation
      */
 
     virtual void          notifyError(int32_t errorCode,
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.cpp b/services/camera/libcameraservice/common/CameraDeviceBase.cpp
index 6c4e87f..0efe4bc 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.cpp
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.cpp
@@ -24,7 +24,4 @@
 CameraDeviceBase::~CameraDeviceBase() {
 }
 
-CameraDeviceBase::NotificationListener::~NotificationListener() {
-}
-
 } // namespace android
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 1026fdf..2f01198 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -234,6 +234,12 @@
     virtual status_t configureStreams(const CameraMetadata& sessionParams,
             int operatingMode = 0) = 0;
 
+    /**
+     * Retrieve a list of all stream ids that were advertised as capable of
+     * supporting offline processing mode by Hal after the last stream configuration.
+     */
+    virtual void getOfflineStreamIds(std::vector<int> *offlineStreamIds) = 0;
+
     // get the buffer producer of the input stream
     virtual status_t getInputBufferProducer(
             sp<IGraphicBufferProducer> *producer) = 0;
@@ -259,35 +265,6 @@
     virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const = 0;
 
     /**
-     * Abstract class for HAL notification listeners
-     */
-    class NotificationListener : public virtual RefBase {
-      public:
-        // The set of notifications is a merge of the notifications required for
-        // API1 and API2.
-
-        // Required for API 1 and 2
-        virtual void notifyError(int32_t errorCode,
-                                 const CaptureResultExtras &resultExtras) = 0;
-
-        // Required only for API2
-        virtual void notifyIdle() = 0;
-        virtual void notifyShutter(const CaptureResultExtras &resultExtras,
-                nsecs_t timestamp) = 0;
-        virtual void notifyPrepared(int streamId) = 0;
-        virtual void notifyRequestQueueEmpty() = 0;
-
-        // Required only for API1
-        virtual void notifyAutoFocus(uint8_t newState, int triggerId) = 0;
-        virtual void notifyAutoExposure(uint8_t newState, int triggerId) = 0;
-        virtual void notifyAutoWhitebalance(uint8_t newState,
-                int triggerId) = 0;
-        virtual void notifyRepeatingRequestError(long lastFrameNumber) = 0;
-      protected:
-        virtual ~NotificationListener();
-    };
-
-    /**
      * Connect HAL notifications to a listener. Overwrites previous
      * listener. Set to NULL to stop receiving notifications.
      */
diff --git a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
index 3862521..05ea7fb 100644
--- a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
+++ b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
@@ -25,10 +25,42 @@
 
 namespace android {
 
+/**
+ * Abstract class for HAL notification listeners
+ */
+class NotificationListener : public virtual RefBase {
+  public:
+    // The set of notifications is a merge of the notifications required for
+    // API1 and API2.
+
+    // Required for API 1 and 2
+    virtual void notifyError(int32_t errorCode,
+                             const CaptureResultExtras &resultExtras) = 0;
+
+    // Required only for API2
+    virtual void notifyIdle() = 0;
+    virtual void notifyShutter(const CaptureResultExtras &resultExtras,
+            nsecs_t timestamp) = 0;
+    virtual void notifyPrepared(int streamId) = 0;
+    virtual void notifyRequestQueueEmpty() = 0;
+
+    // Required only for API1
+    virtual void notifyAutoFocus(uint8_t newState, int triggerId) = 0;
+    virtual void notifyAutoExposure(uint8_t newState, int triggerId) = 0;
+    virtual void notifyAutoWhitebalance(uint8_t newState,
+            int triggerId) = 0;
+    virtual void notifyRepeatingRequestError(long lastFrameNumber) = 0;
+  protected:
+    virtual ~NotificationListener() {}
+};
+
 class CameraOfflineSessionBase : public virtual RefBase {
   public:
     virtual ~CameraOfflineSessionBase();
 
+    virtual status_t initialize(
+            wp<NotificationListener> listener) = 0;
+
     // The session's original camera ID
     virtual const String8& getId() const = 0;
 
@@ -36,8 +68,6 @@
 
     virtual status_t dump(int fd) = 0;
 
-    virtual status_t abort() = 0;
-
     /**
      * Capture result passing
      */
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 7ed4c3d..0f74a48 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -40,6 +40,7 @@
 #include <utils/Trace.h>
 
 #include "api2/HeicCompositeStream.h"
+#include "device3/ZoomRatioMapper.h"
 
 namespace android {
 
@@ -232,6 +233,15 @@
     return deviceInfo->hasFlashUnit();
 }
 
+bool CameraProviderManager::supportNativeZoomRatio(const std::string &id) const {
+    std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+    auto deviceInfo = findDeviceInfoLocked(id);
+    if (deviceInfo == nullptr) return false;
+
+    return deviceInfo->supportNativeZoomRatio();
+}
+
 status_t CameraProviderManager::getResourceCost(const std::string &id,
         CameraResourceCost* cost) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
@@ -706,31 +716,6 @@
     }
 }
 
-bool CameraProviderManager::ProviderInfo::DeviceInfo3::isDepthPhotoLibraryPresent() {
-    static bool libraryPresent = false;
-    static bool initialized = false;
-    if (initialized) {
-        return libraryPresent;
-    } else {
-        initialized = true;
-    }
-
-    void* depthLibHandle = dlopen(camera3::kDepthPhotoLibrary, RTLD_NOW | RTLD_LOCAL);
-    if (depthLibHandle == nullptr) {
-        return false;
-    }
-
-    auto processFunc = dlsym(depthLibHandle, camera3::kDepthPhotoProcessFunction);
-    if (processFunc != nullptr) {
-        libraryPresent = true;
-    } else {
-        libraryPresent = false;
-    }
-    dlclose(depthLibHandle);
-
-    return libraryPresent;
-}
-
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addDynamicDepthTags() {
     uint32_t depthExclTag = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE;
     uint32_t depthSizesTag = ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS;
@@ -778,11 +763,6 @@
         return OK;
     }
 
-    if(!isDepthPhotoLibraryPresent()) {
-        // Depth photo processing library is not present, nothing more to do.
-        return OK;
-    }
-
     std::vector<int32_t> dynamicDepthEntries;
     for (const auto& it : supportedDynamicDepthSizes) {
         int32_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(it)),
@@ -2035,6 +2015,13 @@
                 __FUNCTION__, strerror(-res), res);
     }
 
+    res = camera3::ZoomRatioMapper::overrideZoomRatioTags(
+            &mCameraCharacteristics, &mSupportNativeZoomRatio);
+    if (OK != res) {
+        ALOGE("%s: Unable to override zoomRatio related tags: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+    }
+
     camera_metadata_entry flashAvailable =
             mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
     if (flashAvailable.count == 1 &&
@@ -2095,6 +2082,13 @@
                         CameraProviderManager::statusToString(status), status);
                 return;
             }
+
+            res = camera3::ZoomRatioMapper::overrideZoomRatioTags(
+                    &mPhysicalCameraCharacteristics[id], &mSupportNativeZoomRatio);
+            if (OK != res) {
+                ALOGE("%s: Unable to override zoomRatio related tags: %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+            }
         }
     }
 
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 2ef1f6f..58df0e8 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -187,6 +187,11 @@
     bool hasFlashUnit(const std::string &id) const;
 
     /**
+     * Return true if the camera device has native zoom ratio support.
+     */
+    bool supportNativeZoomRatio(const std::string &id) const;
+
+    /**
      * Return the resource cost of this camera device
      */
     status_t getResourceCost(const std::string &id,
@@ -416,6 +421,7 @@
             sp<ProviderInfo> mParentProvider;
 
             bool hasFlashUnit() const { return mHasFlashUnit; }
+            bool supportNativeZoomRatio() const { return mSupportNativeZoomRatio; }
             virtual status_t setTorchMode(bool enabled) = 0;
             virtual status_t getCameraInfo(hardware::CameraInfo *info) const = 0;
             virtual bool isAPI1Compatible() const = 0;
@@ -449,10 +455,11 @@
                     mIsLogicalCamera(false), mResourceCost(resourceCost),
                     mStatus(hardware::camera::common::V1_0::CameraDeviceStatus::PRESENT),
                     mParentProvider(parentProvider), mHasFlashUnit(false),
-                    mPublicCameraIds(publicCameraIds) {}
+                    mSupportNativeZoomRatio(false), mPublicCameraIds(publicCameraIds) {}
             virtual ~DeviceInfo();
         protected:
-            bool mHasFlashUnit;
+            bool mHasFlashUnit; // const after constructor
+            bool mSupportNativeZoomRatio; // const after constructor
             const std::vector<std::string>& mPublicCameraIds;
 
             template<class InterfaceT>
@@ -538,7 +545,6 @@
             void getSupportedDynamicDepthDurations(const std::vector<int64_t>& depthDurations,
                     const std::vector<int64_t>& blobDurations,
                     std::vector<int64_t> *dynamicDepthDurations /*out*/);
-            static bool isDepthPhotoLibraryPresent();
             static void getSupportedDynamicDepthSizes(
                     const std::vector<std::tuple<size_t, size_t>>& blobSizes,
                     const std::vector<std::tuple<size_t, size_t>>& depthSizes,
diff --git a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
index 94541d8..c995670 100644
--- a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
+++ b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
@@ -410,7 +410,7 @@
     return DepthMap::FromData(depthParams, items);
 }
 
-extern "C" int processDepthPhotoFrame(DepthPhotoInputFrame inputFrame, size_t depthPhotoBufferSize,
+int processDepthPhotoFrame(DepthPhotoInputFrame inputFrame, size_t depthPhotoBufferSize,
         void* depthPhotoBuffer /*out*/, size_t* depthPhotoActualSize /*out*/) {
     if ((inputFrame.mMainJpegBuffer == nullptr) || (inputFrame.mDepthMapBuffer == nullptr) ||
             (depthPhotoBuffer == nullptr) || (depthPhotoActualSize == nullptr)) {
diff --git a/services/camera/libcameraservice/common/DepthPhotoProcessor.h b/services/camera/libcameraservice/common/DepthPhotoProcessor.h
index ba5ca9e..09b6935 100644
--- a/services/camera/libcameraservice/common/DepthPhotoProcessor.h
+++ b/services/camera/libcameraservice/common/DepthPhotoProcessor.h
@@ -64,9 +64,7 @@
             mOrientation(DepthPhotoOrientation::DEPTH_ORIENTATION_0_DEGREES) {}
 };
 
-static const char *kDepthPhotoLibrary = "libdepthphoto.so";
-static const char *kDepthPhotoProcessFunction = "processDepthPhotoFrame";
-typedef int (*process_depth_photo_frame) (DepthPhotoInputFrame /*inputFrame*/,
+int processDepthPhotoFrame(DepthPhotoInputFrame /*inputFrame*/,
         size_t /*depthPhotoBufferSize*/, void* /*depthPhotoBuffer out*/,
         size_t* /*depthPhotoActualSize out*/);
 
diff --git a/services/camera/libcameraservice/device3/BufferUtils.cpp b/services/camera/libcameraservice/device3/BufferUtils.cpp
new file mode 100644
index 0000000..cc29390
--- /dev/null
+++ b/services/camera/libcameraservice/device3/BufferUtils.cpp
@@ -0,0 +1,277 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-BufUtils"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0  // Per-frame verbose logging
+
+#include <inttypes.h>
+
+#include <utils/Log.h>
+
+#include "device3/BufferUtils.h"
+
+namespace android {
+namespace camera3 {
+
+camera3_buffer_status_t mapHidlBufferStatus(hardware::camera::device::V3_2::BufferStatus status) {
+    using hardware::camera::device::V3_2::BufferStatus;
+
+    switch (status) {
+        case BufferStatus::OK: return CAMERA3_BUFFER_STATUS_OK;
+        case BufferStatus::ERROR: return CAMERA3_BUFFER_STATUS_ERROR;
+    }
+    return CAMERA3_BUFFER_STATUS_ERROR;
+}
+
+void BufferRecords::takeInflightBufferMap(BufferRecords& other) {
+    std::lock_guard<std::mutex> oLock(other.mInflightLock);
+    std::lock_guard<std::mutex> lock(mInflightLock);
+    if (mInflightBufferMap.size() > 0) {
+        ALOGE("%s: inflight map is set in non-empty state!", __FUNCTION__);
+    }
+    mInflightBufferMap = std::move(other.mInflightBufferMap);
+    other.mInflightBufferMap.clear();
+}
+
+void BufferRecords::takeRequestedBufferMap(BufferRecords& other) {
+    std::lock_guard<std::mutex> oLock(other.mRequestedBuffersLock);
+    std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
+    if (mRequestedBufferMap.size() > 0) {
+        ALOGE("%s: requested buffer map is set in non-empty state!", __FUNCTION__);
+    }
+    mRequestedBufferMap = std::move(other.mRequestedBufferMap);
+    other.mRequestedBufferMap.clear();
+}
+
+void BufferRecords::takeBufferCaches(BufferRecords& other, const std::vector<int32_t>& streams) {
+    std::lock_guard<std::mutex> oLock(other.mBufferIdMapLock);
+    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+    if (mBufferIdMaps.size() > 0) {
+        ALOGE("%s: buffer ID map is set in non-empty state!", __FUNCTION__);
+    }
+    for (auto streamId : streams) {
+        mBufferIdMaps.insert({streamId, std::move(other.mBufferIdMaps.at(streamId))});
+    }
+    other.mBufferIdMaps.clear();
+}
+
+std::pair<bool, uint64_t> BufferRecords::getBufferId(
+        const buffer_handle_t& buf, int streamId) {
+    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+
+    BufferIdMap& bIdMap = mBufferIdMaps.at(streamId);
+    auto it = bIdMap.find(buf);
+    if (it == bIdMap.end()) {
+        bIdMap[buf] = mNextBufferId++;
+        ALOGV("stream %d now have %zu buffer caches, buf %p",
+                streamId, bIdMap.size(), buf);
+        return std::make_pair(true, mNextBufferId - 1);
+    } else {
+        return std::make_pair(false, it->second);
+    }
+}
+
+void BufferRecords::tryCreateBufferCache(int streamId) {
+    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+    if (mBufferIdMaps.count(streamId) == 0) {
+        mBufferIdMaps.emplace(streamId, BufferIdMap{});
+    }
+}
+
+void BufferRecords::removeInactiveBufferCaches(const std::set<int32_t>& activeStreams) {
+    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+    for(auto it = mBufferIdMaps.begin(); it != mBufferIdMaps.end();) {
+        int streamId = it->first;
+        bool active = activeStreams.count(streamId) > 0;
+        if (!active) {
+            it = mBufferIdMaps.erase(it);
+        } else {
+            ++it;
+        }
+    }
+}
+
+uint64_t BufferRecords::removeOneBufferCache(int streamId, const native_handle_t* handle) {
+    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+    uint64_t bufferId = BUFFER_ID_NO_BUFFER;
+    auto mapIt = mBufferIdMaps.find(streamId);
+    if (mapIt == mBufferIdMaps.end()) {
+        // streamId might be from a deleted stream here
+        ALOGI("%s: stream %d has been removed",
+                __FUNCTION__, streamId);
+        return BUFFER_ID_NO_BUFFER;
+    }
+    BufferIdMap& bIdMap = mapIt->second;
+    auto it = bIdMap.find(handle);
+    if (it == bIdMap.end()) {
+        ALOGW("%s: cannot find buffer %p in stream %d",
+                __FUNCTION__, handle, streamId);
+        return BUFFER_ID_NO_BUFFER;
+    } else {
+        bufferId = it->second;
+        bIdMap.erase(it);
+        ALOGV("%s: stream %d now have %zu buffer caches after removing buf %p",
+                __FUNCTION__, streamId, bIdMap.size(), handle);
+    }
+    return bufferId;
+}
+
+std::vector<uint64_t> BufferRecords::clearBufferCaches(int streamId) {
+    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+    std::vector<uint64_t> ret;
+    auto mapIt = mBufferIdMaps.find(streamId);
+    if (mapIt == mBufferIdMaps.end()) {
+        ALOGE("%s: streamId %d not found!", __FUNCTION__, streamId);
+        return ret;
+    }
+    BufferIdMap& bIdMap = mapIt->second;
+    ret.reserve(bIdMap.size());
+    for (const auto& it : bIdMap) {
+        ret.push_back(it.second);
+    }
+    bIdMap.clear();
+    return ret;
+}
+
+bool BufferRecords::isStreamCached(int streamId) {
+    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+    return mBufferIdMaps.find(streamId) != mBufferIdMaps.end();
+}
+
+bool BufferRecords::verifyBufferIds(
+        int32_t streamId, std::vector<uint64_t>& bufIds) {
+    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+    camera3::BufferIdMap& bIdMap = mBufferIdMaps.at(streamId);
+    if (bIdMap.size() != bufIds.size()) {
+        ALOGE("%s: stream ID %d buffer cache number mismatch: %zu/%zu (service/HAL)",
+                __FUNCTION__, streamId, bIdMap.size(), bufIds.size());
+        return false;
+    }
+    std::vector<uint64_t> internalBufIds;
+    internalBufIds.reserve(bIdMap.size());
+    for (const auto& pair : bIdMap) {
+        internalBufIds.push_back(pair.second);
+    }
+    std::sort(bufIds.begin(), bufIds.end());
+    std::sort(internalBufIds.begin(), internalBufIds.end());
+    for (size_t i = 0; i < bufIds.size(); i++) {
+        if (bufIds[i] != internalBufIds[i]) {
+            ALOGE("%s: buffer cache mismatch! Service %" PRIu64 ", HAL %" PRIu64,
+                    __FUNCTION__, internalBufIds[i], bufIds[i]);
+            return false;
+        }
+    }
+    return true;
+}
+
+void BufferRecords::getInflightBufferKeys(
+        std::vector<std::pair<int32_t, int32_t>>* out) {
+    std::lock_guard<std::mutex> lock(mInflightLock);
+    out->clear();
+    out->reserve(mInflightBufferMap.size());
+    for (auto& pair : mInflightBufferMap) {
+        uint64_t key = pair.first;
+        int32_t streamId = key & 0xFFFFFFFF;
+        int32_t frameNumber = (key >> 32) & 0xFFFFFFFF;
+        out->push_back(std::make_pair(frameNumber, streamId));
+    }
+    return;
+}
+
+status_t BufferRecords::pushInflightBuffer(
+        int32_t frameNumber, int32_t streamId, buffer_handle_t *buffer) {
+    std::lock_guard<std::mutex> lock(mInflightLock);
+    uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
+    mInflightBufferMap[key] = buffer;
+    return OK;
+}
+
+status_t BufferRecords::popInflightBuffer(
+        int32_t frameNumber, int32_t streamId,
+        /*out*/ buffer_handle_t **buffer) {
+    std::lock_guard<std::mutex> lock(mInflightLock);
+
+    uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
+    auto it = mInflightBufferMap.find(key);
+    if (it == mInflightBufferMap.end()) return NAME_NOT_FOUND;
+    if (buffer != nullptr) {
+        *buffer = it->second;
+    }
+    mInflightBufferMap.erase(it);
+    return OK;
+}
+
+void BufferRecords::popInflightBuffers(
+        const std::vector<std::pair<int32_t, int32_t>>& buffers) {
+    for (const auto& pair : buffers) {
+        int32_t frameNumber = pair.first;
+        int32_t streamId = pair.second;
+        popInflightBuffer(frameNumber, streamId, nullptr);
+    }
+}
+
+status_t BufferRecords::pushInflightRequestBuffer(
+        uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) {
+    std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
+    auto pair = mRequestedBufferMap.insert({bufferId, {streamId, buf}});
+    if (!pair.second) {
+        ALOGE("%s: bufId %" PRIu64 " is already inflight!",
+                __FUNCTION__, bufferId);
+        return BAD_VALUE;
+    }
+    return OK;
+}
+
+// Find and pop a buffer_handle_t based on bufferId
+status_t BufferRecords::popInflightRequestBuffer(
+        uint64_t bufferId,
+        /*out*/ buffer_handle_t** buffer,
+        /*optional out*/ int32_t* streamId) {
+    if (buffer == nullptr) {
+        ALOGE("%s: buffer (%p) must not be null", __FUNCTION__, buffer);
+        return BAD_VALUE;
+    }
+    std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
+    auto it = mRequestedBufferMap.find(bufferId);
+    if (it == mRequestedBufferMap.end()) {
+        ALOGE("%s: bufId %" PRIu64 " is not inflight!",
+                __FUNCTION__, bufferId);
+        return BAD_VALUE;
+    }
+    *buffer = it->second.second;
+    if (streamId != nullptr) {
+        *streamId = it->second.first;
+    }
+    mRequestedBufferMap.erase(it);
+    return OK;
+}
+
+void BufferRecords::getInflightRequestBufferKeys(
+        std::vector<uint64_t>* out) {
+    std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
+    out->clear();
+    out->reserve(mRequestedBufferMap.size());
+    for (auto& pair : mRequestedBufferMap) {
+        out->push_back(pair.first);
+    }
+    return;
+}
+
+
+} // camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/BufferUtils.h b/services/camera/libcameraservice/device3/BufferUtils.h
new file mode 100644
index 0000000..452a908
--- /dev/null
+++ b/services/camera/libcameraservice/device3/BufferUtils.h
@@ -0,0 +1,167 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_BUFFER_UTILS_H
+#define ANDROID_SERVERS_CAMERA3_BUFFER_UTILS_H
+
+#include <unordered_map>
+#include <mutex>
+#include <set>
+
+#include <cutils/native_handle.h>
+
+#include <android/hardware/camera/device/3.2/ICameraDevice.h>
+
+// TODO: remove legacy camera3.h references
+#include "hardware/camera3.h"
+
+#include <device3/Camera3OutputInterface.h>
+
+namespace android {
+
+namespace camera3 {
+
+    struct BufferHasher {
+        size_t operator()(const buffer_handle_t& buf) const {
+            if (buf == nullptr)
+                return 0;
+
+            size_t result = 1;
+            result = 31 * result + buf->numFds;
+            for (int i = 0; i < buf->numFds; i++) {
+                result = 31 * result + buf->data[i];
+            }
+            return result;
+        }
+    };
+
+    struct BufferComparator {
+        bool operator()(const buffer_handle_t& buf1, const buffer_handle_t& buf2) const {
+            if (buf1->numFds == buf2->numFds) {
+                for (int i = 0; i < buf1->numFds; i++) {
+                    if (buf1->data[i] != buf2->data[i]) {
+                        return false;
+                    }
+                }
+                return true;
+            }
+            return false;
+        }
+    };
+
+    // Per stream buffer native handle -> bufId map
+    typedef std::unordered_map<const buffer_handle_t, uint64_t,
+            BufferHasher, BufferComparator> BufferIdMap;
+
+    // streamId -> BufferIdMap
+    typedef std::unordered_map<int, BufferIdMap> BufferIdMaps;
+
+    // Map of inflight buffers sent along in capture requests.
+    // Key is composed by (frameNumber << 32 | streamId)
+    typedef std::unordered_map<uint64_t, buffer_handle_t*> InflightBufferMap;
+
+    // Map of inflight buffers dealt by requestStreamBuffers API
+    typedef std::unordered_map<uint64_t, std::pair<int32_t, buffer_handle_t*>> RequestedBufferMap;
+
+    // A struct containing all buffer tracking information like inflight buffers
+    // and buffer ID caches
+    class BufferRecords : public BufferRecordsInterface {
+
+    public:
+        BufferRecords() {}
+
+        BufferRecords(BufferRecords&& other) :
+                mBufferIdMaps(other.mBufferIdMaps),
+                mNextBufferId(other.mNextBufferId),
+                mInflightBufferMap(other.mInflightBufferMap),
+                mRequestedBufferMap(other.mRequestedBufferMap) {}
+
+        virtual ~BufferRecords() {}
+
+        // Helper methods to help moving buffer records
+        void takeInflightBufferMap(BufferRecords& other);
+        void takeRequestedBufferMap(BufferRecords& other);
+        void takeBufferCaches(BufferRecords& other, const std::vector<int32_t>& streams);
+
+        // method to extract buffer's unique ID
+        // return pair of (newlySeenBuffer?, bufferId)
+        virtual std::pair<bool, uint64_t> getBufferId(
+                const buffer_handle_t& buf, int streamId) override;
+
+        void tryCreateBufferCache(int streamId);
+
+        void removeInactiveBufferCaches(const std::set<int32_t>& activeStreams);
+
+        // Return the removed buffer ID if input cache is found.
+        // Otherwise return BUFFER_ID_NO_BUFFER
+        uint64_t removeOneBufferCache(int streamId, const native_handle_t* handle);
+
+        // Clear all caches for input stream, but do not remove the stream
+        // Removed buffers' ID are returned
+        std::vector<uint64_t> clearBufferCaches(int streamId);
+
+        bool isStreamCached(int streamId);
+
+        // Return true if the input caches match what we have; otherwise false
+        bool verifyBufferIds(int32_t streamId, std::vector<uint64_t>& inBufIds);
+
+        // Get a vector of (frameNumber, streamId) pair of currently inflight
+        // buffers
+        void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out);
+
+        status_t pushInflightBuffer(int32_t frameNumber, int32_t streamId,
+                buffer_handle_t *buffer);
+
+        // Find a buffer_handle_t based on frame number and stream ID
+        virtual status_t popInflightBuffer(int32_t frameNumber, int32_t streamId,
+                /*out*/ buffer_handle_t **buffer) override;
+
+        // Pop inflight buffers based on pairs of (frameNumber,streamId)
+        void popInflightBuffers(const std::vector<std::pair<int32_t, int32_t>>& buffers);
+
+        // Get a vector of bufferId of currently inflight buffers
+        void getInflightRequestBufferKeys(std::vector<uint64_t>* out);
+
+        // Register a bufId (streamId, buffer_handle_t) to inflight request buffer
+        virtual status_t pushInflightRequestBuffer(
+                uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) override;
+
+        // Find a buffer_handle_t based on bufferId
+        virtual status_t popInflightRequestBuffer(uint64_t bufferId,
+                /*out*/ buffer_handle_t** buffer,
+                /*optional out*/ int32_t* streamId = nullptr) override;
+
+    private:
+        std::mutex mBufferIdMapLock;
+        BufferIdMaps mBufferIdMaps;
+        uint64_t mNextBufferId = 1; // 0 means no buffer
+
+        std::mutex mInflightLock;
+        InflightBufferMap mInflightBufferMap;
+
+        std::mutex mRequestedBuffersLock;
+        RequestedBufferMap mRequestedBufferMap;
+    }; // class BufferRecords
+
+    static const uint64_t BUFFER_ID_NO_BUFFER = 0;
+
+    camera3_buffer_status_t mapHidlBufferStatus(
+            hardware::camera::device::V3_2::BufferStatus status);
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 4e5c8d6..23e26ce 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -58,10 +58,10 @@
 #include "device3/Camera3InputStream.h"
 #include "device3/Camera3DummyStream.h"
 #include "device3/Camera3SharedOutputStream.h"
-#include "device3/Camera3OfflineSession.h"
 #include "CameraService.h"
 #include "utils/CameraThreadState.h"
 
+#include <algorithm>
 #include <tuple>
 
 using namespace android::camera3;
@@ -133,6 +133,7 @@
         session->close();
         return res;
     }
+    mSupportNativeZoomRatio = manager->supportNativeZoomRatio(mId.string());
 
     std::vector<std::string> physicalCameraIds;
     bool isLogical = manager->isLogicalCamera(mId.string(), &physicalCameraIds);
@@ -147,8 +148,11 @@
                 return res;
             }
 
-            if (DistortionMapper::isDistortionSupported(mPhysicalDeviceInfoMap[physicalId])) {
-                mDistortionMappers[physicalId].setupStaticInfo(mPhysicalDeviceInfoMap[physicalId]);
+            bool usePrecorrectArray =
+                    DistortionMapper::isDistortionSupported(mPhysicalDeviceInfoMap[physicalId]);
+            if (usePrecorrectArray) {
+                res = mDistortionMappers[physicalId].setupStaticInfo(
+                        mPhysicalDeviceInfoMap[physicalId]);
                 if (res != OK) {
                     SET_ERR_L("Unable to read camera %s's calibration fields for distortion "
                             "correction", physicalId.c_str());
@@ -156,6 +160,10 @@
                     return res;
                 }
             }
+
+            mZoomRatioMappers[physicalId] = ZoomRatioMapper(
+                    &mPhysicalDeviceInfoMap[physicalId],
+                    mSupportNativeZoomRatio, usePrecorrectArray);
         }
     }
 
@@ -331,13 +339,18 @@
         }
     }
 
-    if (DistortionMapper::isDistortionSupported(mDeviceInfo)) {
+    bool usePrecorrectArray = DistortionMapper::isDistortionSupported(mDeviceInfo);
+    if (usePrecorrectArray) {
         res = mDistortionMappers[mId.c_str()].setupStaticInfo(mDeviceInfo);
         if (res != OK) {
             SET_ERR_L("Unable to read necessary calibration fields for distortion correction");
             return res;
         }
     }
+
+    mZoomRatioMappers[mId.c_str()] = ZoomRatioMapper(&mDeviceInfo,
+            mSupportNativeZoomRatio, usePrecorrectArray);
+
     return OK;
 }
 
@@ -561,14 +574,6 @@
     return OK;
 }
 
-camera3_buffer_status_t Camera3Device::mapHidlBufferStatus(BufferStatus status) {
-    switch (status) {
-        case BufferStatus::OK: return CAMERA3_BUFFER_STATUS_OK;
-        case BufferStatus::ERROR: return CAMERA3_BUFFER_STATUS_ERROR;
-    }
-    return CAMERA3_BUFFER_STATUS_ERROR;
-}
-
 int Camera3Device::mapToFrameworkFormat(
         hardware::graphics::common::V1_0::PixelFormat pixelFormat) {
     return static_cast<uint32_t>(pixelFormat);
@@ -988,230 +993,18 @@
 hardware::Return<void> Camera3Device::requestStreamBuffers(
         const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
         requestStreamBuffers_cb _hidl_cb) {
-    using hardware::camera::device::V3_5::BufferRequestStatus;
-    using hardware::camera::device::V3_5::StreamBufferRet;
-    using hardware::camera::device::V3_5::StreamBufferRequestError;
-
-    std::lock_guard<std::mutex> lock(mRequestBufferInterfaceLock);
-
-    hardware::hidl_vec<StreamBufferRet> bufRets;
-    if (!mUseHalBufManager) {
-        ALOGE("%s: Camera %s does not support HAL buffer management",
-                __FUNCTION__, mId.string());
-        _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
-        return hardware::Void();
-    }
-
-    SortedVector<int32_t> streamIds;
-    ssize_t sz = streamIds.setCapacity(bufReqs.size());
-    if (sz < 0 || static_cast<size_t>(sz) != bufReqs.size()) {
-        ALOGE("%s: failed to allocate memory for %zu buffer requests",
-                __FUNCTION__, bufReqs.size());
-        _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
-        return hardware::Void();
-    }
-
-    if (bufReqs.size() > mOutputStreams.size()) {
-        ALOGE("%s: too many buffer requests (%zu > # of output streams %zu)",
-                __FUNCTION__, bufReqs.size(), mOutputStreams.size());
-        _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
-        return hardware::Void();
-    }
-
-    // Check for repeated streamId
-    for (const auto& bufReq : bufReqs) {
-        if (streamIds.indexOf(bufReq.streamId) != NAME_NOT_FOUND) {
-            ALOGE("%s: Stream %d appear multiple times in buffer requests",
-                    __FUNCTION__, bufReq.streamId);
-            _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
-            return hardware::Void();
-        }
-        streamIds.add(bufReq.streamId);
-    }
-
-    if (!mRequestBufferSM.startRequestBuffer()) {
-        ALOGE("%s: request buffer disallowed while camera service is configuring",
-                __FUNCTION__);
-        _hidl_cb(BufferRequestStatus::FAILED_CONFIGURING, bufRets);
-        return hardware::Void();
-    }
-
-    bufRets.resize(bufReqs.size());
-
-    bool allReqsSucceeds = true;
-    bool oneReqSucceeds = false;
-    for (size_t i = 0; i < bufReqs.size(); i++) {
-        const auto& bufReq = bufReqs[i];
-        auto& bufRet = bufRets[i];
-        int32_t streamId = bufReq.streamId;
-        sp<Camera3OutputStreamInterface> outputStream = mOutputStreams.get(streamId);
-        if (outputStream == nullptr) {
-            ALOGE("%s: Output stream id %d not found!", __FUNCTION__, streamId);
-            hardware::hidl_vec<StreamBufferRet> emptyBufRets;
-            _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, emptyBufRets);
-            mRequestBufferSM.endRequestBuffer();
-            return hardware::Void();
-        }
-
-        if (outputStream->isAbandoned()) {
-            bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
-            allReqsSucceeds = false;
-            continue;
-        }
-
-        bufRet.streamId = streamId;
-        size_t handOutBufferCount = outputStream->getOutstandingBuffersCount();
-        uint32_t numBuffersRequested = bufReq.numBuffersRequested;
-        size_t totalHandout = handOutBufferCount + numBuffersRequested;
-        uint32_t maxBuffers = outputStream->asHalStream()->max_buffers;
-        if (totalHandout > maxBuffers) {
-            // Not able to allocate enough buffer. Exit early for this stream
-            ALOGE("%s: request too much buffers for stream %d: at HAL: %zu + requesting: %d"
-                    " > max: %d", __FUNCTION__, streamId, handOutBufferCount,
-                    numBuffersRequested, maxBuffers);
-            bufRet.val.error(StreamBufferRequestError::MAX_BUFFER_EXCEEDED);
-            allReqsSucceeds = false;
-            continue;
-        }
-
-        hardware::hidl_vec<StreamBuffer> tmpRetBuffers(numBuffersRequested);
-        bool currentReqSucceeds = true;
-        std::vector<camera3_stream_buffer_t> streamBuffers(numBuffersRequested);
-        size_t numAllocatedBuffers = 0;
-        size_t numPushedInflightBuffers = 0;
-        for (size_t b = 0; b < numBuffersRequested; b++) {
-            camera3_stream_buffer_t& sb = streamBuffers[b];
-            // Since this method can run concurrently with request thread
-            // We need to update the wait duration everytime we call getbuffer
-            nsecs_t waitDuration = kBaseGetBufferWait + getExpectedInFlightDuration();
-            status_t res = outputStream->getBuffer(&sb, waitDuration);
-            if (res != OK) {
-                if (res == NO_INIT || res == DEAD_OBJECT) {
-                    ALOGV("%s: Can't get output buffer for stream %d: %s (%d)",
-                            __FUNCTION__, streamId, strerror(-res), res);
-                    bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
-                } else {
-                    ALOGE("%s: Can't get output buffer for stream %d: %s (%d)",
-                            __FUNCTION__, streamId, strerror(-res), res);
-                    if (res == TIMED_OUT || res == NO_MEMORY) {
-                        bufRet.val.error(StreamBufferRequestError::NO_BUFFER_AVAILABLE);
-                    } else {
-                        bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
-                    }
-                }
-                currentReqSucceeds = false;
-                break;
-            }
-            numAllocatedBuffers++;
-
-            buffer_handle_t *buffer = sb.buffer;
-            auto pair = mInterface->getBufferId(*buffer, streamId);
-            bool isNewBuffer = pair.first;
-            uint64_t bufferId = pair.second;
-            StreamBuffer& hBuf = tmpRetBuffers[b];
-
-            hBuf.streamId = streamId;
-            hBuf.bufferId = bufferId;
-            hBuf.buffer = (isNewBuffer) ? *buffer : nullptr;
-            hBuf.status = BufferStatus::OK;
-            hBuf.releaseFence = nullptr;
-
-            native_handle_t *acquireFence = nullptr;
-            if (sb.acquire_fence != -1) {
-                acquireFence = native_handle_create(1,0);
-                acquireFence->data[0] = sb.acquire_fence;
-            }
-            hBuf.acquireFence.setTo(acquireFence, /*shouldOwn*/true);
-            hBuf.releaseFence = nullptr;
-
-            res = mInterface->pushInflightRequestBuffer(bufferId, buffer, streamId);
-            if (res != OK) {
-                ALOGE("%s: Can't get register request buffers for stream %d: %s (%d)",
-                        __FUNCTION__, streamId, strerror(-res), res);
-                bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
-                currentReqSucceeds = false;
-                break;
-            }
-            numPushedInflightBuffers++;
-        }
-        if (currentReqSucceeds) {
-            bufRet.val.buffers(std::move(tmpRetBuffers));
-            oneReqSucceeds = true;
-        } else {
-            allReqsSucceeds = false;
-            for (size_t b = 0; b < numPushedInflightBuffers; b++) {
-                StreamBuffer& hBuf = tmpRetBuffers[b];
-                buffer_handle_t* buffer;
-                status_t res = mInterface->popInflightRequestBuffer(hBuf.bufferId, &buffer);
-                if (res != OK) {
-                    SET_ERR("%s: popInflightRequestBuffer failed for stream %d: %s (%d)",
-                            __FUNCTION__, streamId, strerror(-res), res);
-                }
-            }
-            for (size_t b = 0; b < numAllocatedBuffers; b++) {
-                camera3_stream_buffer_t& sb = streamBuffers[b];
-                sb.acquire_fence = -1;
-                sb.status = CAMERA3_BUFFER_STATUS_ERROR;
-            }
-            returnOutputBuffers(streamBuffers.data(), numAllocatedBuffers, 0);
-        }
-    }
-
-    _hidl_cb(allReqsSucceeds ? BufferRequestStatus::OK :
-            oneReqSucceeds ? BufferRequestStatus::FAILED_PARTIAL :
-                             BufferRequestStatus::FAILED_UNKNOWN,
-            bufRets);
-    mRequestBufferSM.endRequestBuffer();
+    RequestBufferStates states {
+        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams,
+        *this, *mInterface, *this};
+    camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
     return hardware::Void();
 }
 
 hardware::Return<void> Camera3Device::returnStreamBuffers(
         const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
-    if (!mUseHalBufManager) {
-        ALOGE("%s: Camera %s does not support HAL buffer managerment",
-                __FUNCTION__, mId.string());
-        return hardware::Void();
-    }
-
-    for (const auto& buf : buffers) {
-        if (buf.bufferId == HalInterface::BUFFER_ID_NO_BUFFER) {
-            ALOGE("%s: cannot return a buffer without bufferId", __FUNCTION__);
-            continue;
-        }
-
-        buffer_handle_t* buffer;
-        status_t res = mInterface->popInflightRequestBuffer(buf.bufferId, &buffer);
-
-        if (res != OK) {
-            ALOGE("%s: cannot find in-flight buffer %" PRIu64 " for stream %d",
-                    __FUNCTION__, buf.bufferId, buf.streamId);
-            continue;
-        }
-
-        camera3_stream_buffer_t streamBuffer;
-        streamBuffer.buffer = buffer;
-        streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
-        streamBuffer.acquire_fence = -1;
-        streamBuffer.release_fence = -1;
-
-        if (buf.releaseFence == nullptr) {
-            streamBuffer.release_fence = -1;
-        } else if (buf.releaseFence->numFds == 1) {
-            streamBuffer.release_fence = dup(buf.releaseFence->data[0]);
-        } else {
-            ALOGE("%s: Invalid release fence, fd count is %d, not 1",
-                    __FUNCTION__, buf.releaseFence->numFds);
-            continue;
-        }
-
-        sp<Camera3StreamInterface> stream = mOutputStreams.get(buf.streamId);
-        if (stream == nullptr) {
-            ALOGE("%s: Output stream id %d not found!", __FUNCTION__, buf.streamId);
-            continue;
-        }
-        streamBuffer.stream = stream->asHalStream();
-        returnOutputBuffers(&streamBuffer, /*size*/1, /*timestamp*/ 0);
-    }
+    ReturnBufferStates states {
+        mId, mUseHalBufManager, mOutputStreams, *mInterface};
+    camera3::returnStreamBuffers(states, buffers);
     return hardware::Void();
 }
 
@@ -1228,6 +1021,12 @@
         ALOGW("%s: received capture result in error state.", __FUNCTION__);
     }
 
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> l(mOutputLock);
+        listener = mListener.promote();
+    }
+
     if (mProcessCaptureResultLock.tryLock() != OK) {
         // This should never happen; it indicates a wrong client implementation
         // that doesn't follow the contract. But, we can be tolerant here.
@@ -1240,8 +1039,22 @@
             return hardware::Void();
         }
     }
+    CaptureOutputStates states {
+        mId,
+        mInFlightLock, mInFlightMap,
+        mOutputLock,  mResultQueue, mResultSignal,
+        mNextShutterFrameNumber,
+        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+        mNextResultFrameNumber,
+        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+        mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+        mInputStream, mOutputStreams, listener, *this, *this, *mInterface
+    };
+
     for (const auto& result : results) {
-        processOneCaptureResultLocked(result.v3_2, result.physicalCameraMetadata);
+        processOneCaptureResultLocked(states, result.v3_2, result.physicalCameraMetadata);
     }
     mProcessCaptureResultLock.unlock();
     return hardware::Void();
@@ -1264,6 +1077,12 @@
         ALOGW("%s: received capture result in error state.", __FUNCTION__);
     }
 
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> l(mOutputLock);
+        listener = mListener.promote();
+    }
+
     if (mProcessCaptureResultLock.tryLock() != OK) {
         // This should never happen; it indicates a wrong client implementation
         // that doesn't follow the contract. But, we can be tolerant here.
@@ -1276,186 +1095,28 @@
             return hardware::Void();
         }
     }
+
+    CaptureOutputStates states {
+        mId,
+        mInFlightLock, mInFlightMap,
+        mOutputLock,  mResultQueue, mResultSignal,
+        mNextShutterFrameNumber,
+        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+        mNextResultFrameNumber,
+        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+        mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+        mInputStream, mOutputStreams, listener, *this, *this, *mInterface
+    };
+
     for (const auto& result : results) {
-        processOneCaptureResultLocked(result, noPhysMetadata);
+        processOneCaptureResultLocked(states, result, noPhysMetadata);
     }
     mProcessCaptureResultLock.unlock();
     return hardware::Void();
 }
 
-status_t Camera3Device::readOneCameraMetadataLocked(
-        uint64_t fmqResultSize, hardware::camera::device::V3_2::CameraMetadata& resultMetadata,
-        const hardware::camera::device::V3_2::CameraMetadata& result) {
-    if (fmqResultSize > 0) {
-        resultMetadata.resize(fmqResultSize);
-        if (mResultMetadataQueue == nullptr) {
-            return NO_MEMORY; // logged in initialize()
-        }
-        if (!mResultMetadataQueue->read(resultMetadata.data(), fmqResultSize)) {
-            ALOGE("%s: Cannot read camera metadata from fmq, size = %" PRIu64,
-                    __FUNCTION__, fmqResultSize);
-            return INVALID_OPERATION;
-        }
-    } else {
-        resultMetadata.setToExternal(const_cast<uint8_t *>(result.data()),
-                result.size());
-    }
-
-    if (resultMetadata.size() != 0) {
-        status_t res;
-        const camera_metadata_t* metadata =
-                reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
-        size_t expected_metadata_size = resultMetadata.size();
-        if ((res = validate_camera_metadata_structure(metadata, &expected_metadata_size)) != OK) {
-            ALOGE("%s: Invalid camera metadata received by camera service from HAL: %s (%d)",
-                    __FUNCTION__, strerror(-res), res);
-            return INVALID_OPERATION;
-        }
-    }
-
-    return OK;
-}
-
-void Camera3Device::processOneCaptureResultLocked(
-        const hardware::camera::device::V3_2::CaptureResult& result,
-        const hardware::hidl_vec<
-                hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata) {
-    camera3_capture_result r;
-    status_t res;
-    r.frame_number = result.frameNumber;
-
-    // Read and validate the result metadata.
-    hardware::camera::device::V3_2::CameraMetadata resultMetadata;
-    res = readOneCameraMetadataLocked(result.fmqResultSize, resultMetadata, result.result);
-    if (res != OK) {
-        ALOGE("%s: Frame %d: Failed to read capture result metadata",
-                __FUNCTION__, result.frameNumber);
-        return;
-    }
-    r.result = reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
-
-    // Read and validate physical camera metadata
-    size_t physResultCount = physicalCameraMetadata.size();
-    std::vector<const char*> physCamIds(physResultCount);
-    std::vector<const camera_metadata_t *> phyCamMetadatas(physResultCount);
-    std::vector<hardware::camera::device::V3_2::CameraMetadata> physResultMetadata;
-    physResultMetadata.resize(physResultCount);
-    for (size_t i = 0; i < physicalCameraMetadata.size(); i++) {
-        res = readOneCameraMetadataLocked(physicalCameraMetadata[i].fmqMetadataSize,
-                physResultMetadata[i], physicalCameraMetadata[i].metadata);
-        if (res != OK) {
-            ALOGE("%s: Frame %d: Failed to read capture result metadata for camera %s",
-                    __FUNCTION__, result.frameNumber,
-                    physicalCameraMetadata[i].physicalCameraId.c_str());
-            return;
-        }
-        physCamIds[i] = physicalCameraMetadata[i].physicalCameraId.c_str();
-        phyCamMetadatas[i] = reinterpret_cast<const camera_metadata_t*>(
-                physResultMetadata[i].data());
-    }
-    r.num_physcam_metadata = physResultCount;
-    r.physcam_ids = physCamIds.data();
-    r.physcam_metadata = phyCamMetadatas.data();
-
-    std::vector<camera3_stream_buffer_t> outputBuffers(result.outputBuffers.size());
-    std::vector<buffer_handle_t> outputBufferHandles(result.outputBuffers.size());
-    for (size_t i = 0; i < result.outputBuffers.size(); i++) {
-        auto& bDst = outputBuffers[i];
-        const StreamBuffer &bSrc = result.outputBuffers[i];
-
-        sp<Camera3StreamInterface> stream = mOutputStreams.get(bSrc.streamId);
-        if (stream == nullptr) {
-            ALOGE("%s: Frame %d: Buffer %zu: Invalid output stream id %d",
-                    __FUNCTION__, result.frameNumber, i, bSrc.streamId);
-            return;
-        }
-        bDst.stream = stream->asHalStream();
-
-        bool noBufferReturned = false;
-        buffer_handle_t *buffer = nullptr;
-        if (mUseHalBufManager) {
-            // This is suspicious most of the time but can be correct during flush where HAL
-            // has to return capture result before a buffer is requested
-            if (bSrc.bufferId == HalInterface::BUFFER_ID_NO_BUFFER) {
-                if (bSrc.status == BufferStatus::OK) {
-                    ALOGE("%s: Frame %d: Buffer %zu: No bufferId for stream %d",
-                            __FUNCTION__, result.frameNumber, i, bSrc.streamId);
-                    // Still proceeds so other buffers can be returned
-                }
-                noBufferReturned = true;
-            }
-            if (noBufferReturned) {
-                res = OK;
-            } else {
-                res = mInterface->popInflightRequestBuffer(bSrc.bufferId, &buffer);
-            }
-        } else {
-            res = mInterface->popInflightBuffer(result.frameNumber, bSrc.streamId, &buffer);
-        }
-
-        if (res != OK) {
-            ALOGE("%s: Frame %d: Buffer %zu: No in-flight buffer for stream %d",
-                    __FUNCTION__, result.frameNumber, i, bSrc.streamId);
-            return;
-        }
-
-        bDst.buffer = buffer;
-        bDst.status = mapHidlBufferStatus(bSrc.status);
-        bDst.acquire_fence = -1;
-        if (bSrc.releaseFence == nullptr) {
-            bDst.release_fence = -1;
-        } else if (bSrc.releaseFence->numFds == 1) {
-            if (noBufferReturned) {
-                ALOGE("%s: got releaseFence without output buffer!", __FUNCTION__);
-            }
-            bDst.release_fence = dup(bSrc.releaseFence->data[0]);
-        } else {
-            ALOGE("%s: Frame %d: Invalid release fence for buffer %zu, fd count is %d, not 1",
-                    __FUNCTION__, result.frameNumber, i, bSrc.releaseFence->numFds);
-            return;
-        }
-    }
-    r.num_output_buffers = outputBuffers.size();
-    r.output_buffers = outputBuffers.data();
-
-    camera3_stream_buffer_t inputBuffer;
-    if (result.inputBuffer.streamId == -1) {
-        r.input_buffer = nullptr;
-    } else {
-        if (mInputStream->getId() != result.inputBuffer.streamId) {
-            ALOGE("%s: Frame %d: Invalid input stream id %d", __FUNCTION__,
-                    result.frameNumber, result.inputBuffer.streamId);
-            return;
-        }
-        inputBuffer.stream = mInputStream->asHalStream();
-        buffer_handle_t *buffer;
-        res = mInterface->popInflightBuffer(result.frameNumber, result.inputBuffer.streamId,
-                &buffer);
-        if (res != OK) {
-            ALOGE("%s: Frame %d: Input buffer: No in-flight buffer for stream %d",
-                    __FUNCTION__, result.frameNumber, result.inputBuffer.streamId);
-            return;
-        }
-        inputBuffer.buffer = buffer;
-        inputBuffer.status = mapHidlBufferStatus(result.inputBuffer.status);
-        inputBuffer.acquire_fence = -1;
-        if (result.inputBuffer.releaseFence == nullptr) {
-            inputBuffer.release_fence = -1;
-        } else if (result.inputBuffer.releaseFence->numFds == 1) {
-            inputBuffer.release_fence = dup(result.inputBuffer.releaseFence->data[0]);
-        } else {
-            ALOGE("%s: Frame %d: Invalid release fence for input buffer, fd count is %d, not 1",
-                    __FUNCTION__, result.frameNumber, result.inputBuffer.releaseFence->numFds);
-            return;
-        }
-        r.input_buffer = &inputBuffer;
-    }
-
-    r.partial_result = result.partialResult;
-
-    processCaptureResult(&r);
-}
-
 hardware::Return<void> Camera3Device::notify(
         const hardware::hidl_vec<hardware::camera::device::V3_2::NotifyMsg>& msgs) {
     // Ideally we should grab mLock, but that can lead to deadlock, and
@@ -1468,55 +1129,31 @@
         ALOGW("%s: received notify message in error state.", __FUNCTION__);
     }
 
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> l(mOutputLock);
+        listener = mListener.promote();
+    }
+
+    CaptureOutputStates states {
+        mId,
+        mInFlightLock, mInFlightMap,
+        mOutputLock,  mResultQueue, mResultSignal,
+        mNextShutterFrameNumber,
+        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+        mNextResultFrameNumber,
+        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+        mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+        mInputStream, mOutputStreams, listener, *this, *this, *mInterface
+    };
     for (const auto& msg : msgs) {
-        notify(msg);
+        camera3::notify(states, msg);
     }
     return hardware::Void();
 }
 
-void Camera3Device::notify(
-        const hardware::camera::device::V3_2::NotifyMsg& msg) {
-
-    camera3_notify_msg m;
-    switch (msg.type) {
-        case MsgType::ERROR:
-            m.type = CAMERA3_MSG_ERROR;
-            m.message.error.frame_number = msg.msg.error.frameNumber;
-            if (msg.msg.error.errorStreamId >= 0) {
-                sp<Camera3StreamInterface> stream = mOutputStreams.get(msg.msg.error.errorStreamId);
-                if (stream == nullptr) {
-                    ALOGE("%s: Frame %d: Invalid error stream id %d", __FUNCTION__,
-                            m.message.error.frame_number, msg.msg.error.errorStreamId);
-                    return;
-                }
-                m.message.error.error_stream = stream->asHalStream();
-            } else {
-                m.message.error.error_stream = nullptr;
-            }
-            switch (msg.msg.error.errorCode) {
-                case ErrorCode::ERROR_DEVICE:
-                    m.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
-                    break;
-                case ErrorCode::ERROR_REQUEST:
-                    m.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
-                    break;
-                case ErrorCode::ERROR_RESULT:
-                    m.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
-                    break;
-                case ErrorCode::ERROR_BUFFER:
-                    m.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
-                    break;
-            }
-            break;
-        case MsgType::SHUTTER:
-            m.type = CAMERA3_MSG_SHUTTER;
-            m.message.shutter.frame_number = msg.msg.shutter.frameNumber;
-            m.message.shutter.timestamp = msg.msg.shutter.timestamp;
-            break;
-    }
-    notify(&m);
-}
-
 status_t Camera3Device::captureList(const List<const PhysicalCameraSettingsList> &requestsList,
                                     const std::list<const SurfaceMap> &surfaceMaps,
                                     int64_t *lastFrameNumber) {
@@ -1670,56 +1307,6 @@
     return OK;
 }
 
-status_t Camera3Device::StreamSet::add(
-        int streamId, sp<camera3::Camera3OutputStreamInterface> stream) {
-    if (stream == nullptr) {
-        ALOGE("%s: cannot add null stream", __FUNCTION__);
-        return BAD_VALUE;
-    }
-    std::lock_guard<std::mutex> lock(mLock);
-    return mData.add(streamId, stream);
-}
-
-ssize_t Camera3Device::StreamSet::remove(int streamId) {
-    std::lock_guard<std::mutex> lock(mLock);
-    return mData.removeItem(streamId);
-}
-
-sp<camera3::Camera3OutputStreamInterface>
-Camera3Device::StreamSet::get(int streamId) {
-    std::lock_guard<std::mutex> lock(mLock);
-    ssize_t idx = mData.indexOfKey(streamId);
-    if (idx == NAME_NOT_FOUND) {
-        return nullptr;
-    }
-    return mData.editValueAt(idx);
-}
-
-sp<camera3::Camera3OutputStreamInterface>
-Camera3Device::StreamSet::operator[] (size_t index) {
-    std::lock_guard<std::mutex> lock(mLock);
-    return mData.editValueAt(index);
-}
-
-size_t Camera3Device::StreamSet::size() const {
-    std::lock_guard<std::mutex> lock(mLock);
-    return mData.size();
-}
-
-void Camera3Device::StreamSet::clear() {
-    std::lock_guard<std::mutex> lock(mLock);
-    return mData.clear();
-}
-
-std::vector<int> Camera3Device::StreamSet::getStreamIds() {
-    std::lock_guard<std::mutex> lock(mLock);
-    std::vector<int> streamIds(mData.size());
-    for (size_t i = 0; i < mData.size(); i++) {
-        streamIds[i] = mData.keyAt(i);
-    }
-    return streamIds;
-}
-
 status_t Camera3Device::createStream(sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
@@ -2124,6 +1711,15 @@
         set_camera_metadata_vendor_id(rawRequest, mVendorTagId);
         mRequestTemplateCache[templateId].acquire(rawRequest);
 
+        // Override the template request with zoomRatioMapper
+        res = mZoomRatioMappers[mId.c_str()].initZoomRatioInTemplate(
+                &mRequestTemplateCache[templateId]);
+        if (res != OK) {
+            CLOGE("Failed to update zoom ratio for template %d: %s (%d)",
+                    templateId, strerror(-res), res);
+            return res;
+        }
+
         *request = mRequestTemplateCache[templateId];
         mLastTemplateId = templateId;
     }
@@ -2173,7 +1769,9 @@
 }
 
 void Camera3Device::pauseStateNotify(bool enable) {
-    Mutex::Autolock il(mInterfaceLock);
+    // We must not hold mInterfaceLock here since this function is called from
+    // RequestThread::threadLoop and holding mInterfaceLock could lead to
+    // deadlocks (http://b/143513518)
     Mutex::Autolock l(mLock);
 
     mPauseStateNotify = enable;
@@ -2272,7 +1870,7 @@
 
 status_t Camera3Device::setNotifyCallback(wp<NotificationListener> listener) {
     ATRACE_CALL();
-    Mutex::Autolock l(mOutputLock);
+    std::lock_guard<std::mutex> l(mOutputLock);
 
     if (listener != NULL && mListener != NULL) {
         ALOGW("%s: Replacing old callback listener", __FUNCTION__);
@@ -2290,17 +1888,12 @@
 
 status_t Camera3Device::waitForNextFrame(nsecs_t timeout) {
     ATRACE_CALL();
-    status_t res;
-    Mutex::Autolock l(mOutputLock);
+    std::unique_lock<std::mutex> l(mOutputLock);
 
     while (mResultQueue.empty()) {
-        res = mResultSignal.waitRelative(mOutputLock, timeout);
-        if (res == TIMED_OUT) {
-            return res;
-        } else if (res != OK) {
-            ALOGW("%s: Camera %s: No frame in %" PRId64 " ns: %s (%d)",
-                    __FUNCTION__, mId.string(), timeout, strerror(-res), res);
-            return res;
+        auto st = mResultSignal.wait_for(l, std::chrono::nanoseconds(timeout));
+        if (st == std::cv_status::timeout) {
+            return TIMED_OUT;
         }
     }
     return OK;
@@ -2308,7 +1901,7 @@
 
 status_t Camera3Device::getNextResult(CaptureResult *frame) {
     ATRACE_CALL();
-    Mutex::Autolock l(mOutputLock);
+    std::lock_guard<std::mutex> l(mOutputLock);
 
     if (mResultQueue.empty()) {
         return NOT_ENOUGH_DATA;
@@ -2504,7 +2097,7 @@
 
     sp<NotificationListener> listener;
     {
-        Mutex::Autolock l(mOutputLock);
+        std::lock_guard<std::mutex> l(mOutputLock);
         listener = mListener.promote();
     }
     if (idle && listener != NULL) {
@@ -2703,17 +2296,6 @@
     return newRequest;
 }
 
-bool Camera3Device::isOpaqueInputSizeSupported(uint32_t width, uint32_t height) {
-    for (uint32_t i = 0; i < mSupportedOpaqueInputSizes.size(); i++) {
-        Size size = mSupportedOpaqueInputSizes[i];
-        if (size.width == width && size.height == height) {
-            return true;
-        }
-    }
-
-    return false;
-}
-
 void Camera3Device::cancelStreamsConfigurationLocked() {
     int res = OK;
     if (mInputStream != NULL && mInputStream->isConfiguring()) {
@@ -2750,7 +2332,9 @@
     ATRACE_CALL();
     bool ret = false;
 
-    Mutex::Autolock il(mInterfaceLock);
+    // We must not hold mInterfaceLock here since this function is called from
+    // RequestThread::threadLoop and holding mInterfaceLock could lead to
+    // deadlocks (http://b/143513518)
     nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
 
     Mutex::Autolock l(mLock);
@@ -3146,14 +2730,15 @@
         int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
         bool hasAppCallback, nsecs_t maxExpectedDuration,
         std::set<String8>& physicalCameraIds, bool isStillCapture,
-        bool isZslCapture, const SurfaceMap& outputSurfaces) {
+        bool isZslCapture, const std::set<std::string>& cameraIdsWithZoom,
+        const SurfaceMap& outputSurfaces) {
     ATRACE_CALL();
-    Mutex::Autolock l(mInFlightLock);
+    std::lock_guard<std::mutex> l(mInFlightLock);
 
     ssize_t res;
     res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
             hasAppCallback, maxExpectedDuration, physicalCameraIds, isStillCapture, isZslCapture,
-            outputSurfaces));
+            cameraIdsWithZoom, outputSurfaces));
     if (res < 0) return res;
 
     if (mInFlightMap.size() == 1) {
@@ -3169,79 +2754,7 @@
     return OK;
 }
 
-void Camera3Device::returnOutputBuffers(
-        const camera3_stream_buffer_t *outputBuffers, size_t numBuffers,
-        nsecs_t timestamp, bool timestampIncreasing,
-        const SurfaceMap& outputSurfaces,
-        const CaptureResultExtras &inResultExtras) {
-
-    for (size_t i = 0; i < numBuffers; i++)
-    {
-        if (outputBuffers[i].buffer == nullptr) {
-            if (!mUseHalBufManager) {
-                // With HAL buffer management API, HAL sometimes will have to return buffers that
-                // has not got a output buffer handle filled yet. This is though illegal if HAL
-                // buffer management API is not being used.
-                ALOGE("%s: cannot return a null buffer!", __FUNCTION__);
-            }
-            continue;
-        }
-
-        Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
-        int streamId = stream->getId();
-        const auto& it = outputSurfaces.find(streamId);
-        status_t res = OK;
-        if (it != outputSurfaces.end()) {
-            res = stream->returnBuffer(
-                    outputBuffers[i], timestamp, timestampIncreasing, it->second,
-                    inResultExtras.frameNumber);
-        } else {
-            res = stream->returnBuffer(
-                    outputBuffers[i], timestamp, timestampIncreasing, std::vector<size_t> (),
-                    inResultExtras.frameNumber);
-        }
-
-        // Note: stream may be deallocated at this point, if this buffer was
-        // the last reference to it.
-        if (res == NO_INIT || res == DEAD_OBJECT) {
-            ALOGV("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
-        } else if (res != OK) {
-            ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
-        }
-
-        // Long processing consumers can cause returnBuffer timeout for shared stream
-        // If that happens, cancel the buffer and send a buffer error to client
-        if (it != outputSurfaces.end() && res == TIMED_OUT &&
-                outputBuffers[i].status == CAMERA3_BUFFER_STATUS_OK) {
-            // cancel the buffer
-            camera3_stream_buffer_t sb = outputBuffers[i];
-            sb.status = CAMERA3_BUFFER_STATUS_ERROR;
-            stream->returnBuffer(sb, /*timestamp*/0, timestampIncreasing, std::vector<size_t> (),
-                    inResultExtras.frameNumber);
-
-            // notify client buffer error
-            sp<NotificationListener> listener;
-            {
-                Mutex::Autolock l(mOutputLock);
-                listener = mListener.promote();
-            }
-
-            if (listener != nullptr) {
-                CaptureResultExtras extras = inResultExtras;
-                extras.errorStreamId = streamId;
-                listener->notifyError(
-                        hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
-                        extras);
-            }
-        }
-    }
-}
-
-void Camera3Device::removeInFlightMapEntryLocked(int idx) {
-    ATRACE_CALL();
-    nsecs_t duration = mInFlightMap.valueAt(idx).maxExpectedDuration;
-    mInFlightMap.removeItemsAt(idx, 1);
-
+void Camera3Device::onInflightEntryRemovedLocked(nsecs_t duration) {
     // Indicate idle inFlightMap to the status tracker
     if (mInFlightMap.size() == 0) {
         mRequestBufferSM.onInflightMapEmpty();
@@ -3255,50 +2768,7 @@
     mExpectedInflightDuration -= duration;
 }
 
-void Camera3Device::removeInFlightRequestIfReadyLocked(int idx) {
-
-    const InFlightRequest &request = mInFlightMap.valueAt(idx);
-    const uint32_t frameNumber = mInFlightMap.keyAt(idx);
-
-    nsecs_t sensorTimestamp = request.sensorTimestamp;
-    nsecs_t shutterTimestamp = request.shutterTimestamp;
-
-    // Check if it's okay to remove the request from InFlightMap:
-    // In the case of a successful request:
-    //      all input and output buffers, all result metadata, shutter callback
-    //      arrived.
-    // In the case of a unsuccessful request:
-    //      all input and output buffers arrived.
-    if (request.numBuffersLeft == 0 &&
-            (request.skipResultMetadata ||
-            (request.haveResultMetadata && shutterTimestamp != 0))) {
-        if (request.stillCapture) {
-            ATRACE_ASYNC_END("still capture", frameNumber);
-        }
-
-        ATRACE_ASYNC_END("frame capture", frameNumber);
-
-        // Sanity check - if sensor timestamp matches shutter timestamp in the
-        // case of request having callback.
-        if (request.hasCallback && request.requestStatus == OK &&
-                sensorTimestamp != shutterTimestamp) {
-            SET_ERR("sensor timestamp (%" PRId64
-                ") for frame %d doesn't match shutter timestamp (%" PRId64 ")",
-                sensorTimestamp, frameNumber, shutterTimestamp);
-        }
-
-        // for an unsuccessful request, it may have pending output buffers to
-        // return.
-        assert(request.requestStatus != OK ||
-               request.pendingOutputBuffers.size() == 0);
-        returnOutputBuffers(request.pendingOutputBuffers.array(),
-            request.pendingOutputBuffers.size(), 0, /*timestampIncreasing*/true,
-            request.outputSurfaces, request.resultExtras);
-
-        removeInFlightMapEntryLocked(idx);
-        ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
-     }
-
+void Camera3Device::checkInflightMapLengthLocked() {
     // Sanity check - if we have too many in-flight frames with long total inflight duration,
     // something has likely gone wrong. This might still be legit only if application send in
     // a long burst of long exposure requests.
@@ -3315,714 +2785,32 @@
     }
 }
 
+void Camera3Device::onInflightMapFlushedLocked() {
+    mExpectedInflightDuration = 0;
+}
+
+void Camera3Device::removeInFlightMapEntryLocked(int idx) {
+    ATRACE_CALL();
+    nsecs_t duration = mInFlightMap.valueAt(idx).maxExpectedDuration;
+    mInFlightMap.removeItemsAt(idx, 1);
+
+    onInflightEntryRemovedLocked(duration);
+}
+
+
 void Camera3Device::flushInflightRequests() {
     ATRACE_CALL();
-    { // First return buffers cached in mInFlightMap
-        Mutex::Autolock l(mInFlightLock);
-        for (size_t idx = 0; idx < mInFlightMap.size(); idx++) {
-            const InFlightRequest &request = mInFlightMap.valueAt(idx);
-            returnOutputBuffers(request.pendingOutputBuffers.array(),
-                request.pendingOutputBuffers.size(), 0,
-                /*timestampIncreasing*/true, request.outputSurfaces,
-                request.resultExtras);
-        }
-        mInFlightMap.clear();
-        mExpectedInflightDuration = 0;
-    }
-
-    // Then return all inflight buffers not returned by HAL
-    std::vector<std::pair<int32_t, int32_t>> inflightKeys;
-    mInterface->getInflightBufferKeys(&inflightKeys);
-
-    // Inflight buffers for HAL buffer manager
-    std::vector<uint64_t> inflightRequestBufferKeys;
-    mInterface->getInflightRequestBufferKeys(&inflightRequestBufferKeys);
-
-    // (streamId, frameNumber, buffer_handle_t*) tuple for all inflight buffers.
-    // frameNumber will be -1 for buffers from HAL buffer manager
-    std::vector<std::tuple<int32_t, int32_t, buffer_handle_t*>> inflightBuffers;
-    inflightBuffers.reserve(inflightKeys.size() + inflightRequestBufferKeys.size());
-
-    for (auto& pair : inflightKeys) {
-        int32_t frameNumber = pair.first;
-        int32_t streamId = pair.second;
-        buffer_handle_t* buffer;
-        status_t res = mInterface->popInflightBuffer(frameNumber, streamId, &buffer);
-        if (res != OK) {
-            ALOGE("%s: Frame %d: No in-flight buffer for stream %d",
-                    __FUNCTION__, frameNumber, streamId);
-            continue;
-        }
-        inflightBuffers.push_back(std::make_tuple(streamId, frameNumber, buffer));
-    }
-
-    for (auto& bufferId : inflightRequestBufferKeys) {
-        int32_t streamId = -1;
-        buffer_handle_t* buffer = nullptr;
-        status_t res = mInterface->popInflightRequestBuffer(bufferId, &buffer, &streamId);
-        if (res != OK) {
-            ALOGE("%s: cannot find in-flight buffer %" PRIu64, __FUNCTION__, bufferId);
-            continue;
-        }
-        inflightBuffers.push_back(std::make_tuple(streamId, /*frameNumber*/-1, buffer));
-    }
-
-    int32_t inputStreamId = (mInputStream != nullptr) ? mInputStream->getId() : -1;
-    for (auto& tuple : inflightBuffers) {
-        status_t res = OK;
-        int32_t streamId = std::get<0>(tuple);
-        int32_t frameNumber = std::get<1>(tuple);
-        buffer_handle_t* buffer = std::get<2>(tuple);
-
-        camera3_stream_buffer_t streamBuffer;
-        streamBuffer.buffer = buffer;
-        streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
-        streamBuffer.acquire_fence = -1;
-        streamBuffer.release_fence = -1;
-
-        // First check if the buffer belongs to deleted stream
-        bool streamDeleted = false;
-        for (auto& stream : mDeletedStreams) {
-            if (streamId == stream->getId()) {
-                streamDeleted = true;
-                // Return buffer to deleted stream
-                camera3_stream* halStream = stream->asHalStream();
-                streamBuffer.stream = halStream;
-                switch (halStream->stream_type) {
-                    case CAMERA3_STREAM_OUTPUT:
-                        res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0,
-                                /*timestampIncreasing*/true, std::vector<size_t> (), frameNumber);
-                        if (res != OK) {
-                            ALOGE("%s: Can't return output buffer for frame %d to"
-                                  " stream %d: %s (%d)",  __FUNCTION__,
-                                  frameNumber, streamId, strerror(-res), res);
-                        }
-                        break;
-                    case CAMERA3_STREAM_INPUT:
-                        res = stream->returnInputBuffer(streamBuffer);
-                        if (res != OK) {
-                            ALOGE("%s: Can't return input buffer for frame %d to"
-                                  " stream %d: %s (%d)",  __FUNCTION__,
-                                  frameNumber, streamId, strerror(-res), res);
-                        }
-                        break;
-                    default: // Bi-direcitonal stream is deprecated
-                        ALOGE("%s: stream %d has unknown stream type %d",
-                                __FUNCTION__, streamId, halStream->stream_type);
-                        break;
-                }
-                break;
-            }
-        }
-        if (streamDeleted) {
-            continue;
-        }
-
-        // Then check against configured streams
-        if (streamId == inputStreamId) {
-            streamBuffer.stream = mInputStream->asHalStream();
-            res = mInputStream->returnInputBuffer(streamBuffer);
-            if (res != OK) {
-                ALOGE("%s: Can't return input buffer for frame %d to"
-                      " stream %d: %s (%d)",  __FUNCTION__,
-                      frameNumber, streamId, strerror(-res), res);
-            }
-        } else {
-            sp<Camera3StreamInterface> stream = mOutputStreams.get(streamId);
-            if (stream == nullptr) {
-                ALOGE("%s: Output stream id %d not found!", __FUNCTION__, streamId);
-                continue;
-            }
-            streamBuffer.stream = stream->asHalStream();
-            returnOutputBuffers(&streamBuffer, /*size*/1, /*timestamp*/ 0);
-        }
-    }
-}
-
-void Camera3Device::insertResultLocked(CaptureResult *result,
-        uint32_t frameNumber) {
-    if (result == nullptr) return;
-
-    camera_metadata_t *meta = const_cast<camera_metadata_t *>(
-            result->mMetadata.getAndLock());
-    set_camera_metadata_vendor_id(meta, mVendorTagId);
-    result->mMetadata.unlock(meta);
-
-    if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
-            (int32_t*)&frameNumber, 1) != OK) {
-        SET_ERR("Failed to set frame number %d in metadata", frameNumber);
-        return;
-    }
-
-    if (result->mMetadata.update(ANDROID_REQUEST_ID, &result->mResultExtras.requestId, 1) != OK) {
-        SET_ERR("Failed to set request ID in metadata for frame %d", frameNumber);
-        return;
-    }
-
-    // Update vendor tag id for physical metadata
-    for (auto& physicalMetadata : result->mPhysicalMetadatas) {
-        camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
-                physicalMetadata.mPhysicalCameraMetadata.getAndLock());
-        set_camera_metadata_vendor_id(pmeta, mVendorTagId);
-        physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
-    }
-
-    // Valid result, insert into queue
-    List<CaptureResult>::iterator queuedResult =
-            mResultQueue.insert(mResultQueue.end(), CaptureResult(*result));
-    ALOGVV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
-           ", burstId = %" PRId32, __FUNCTION__,
-           queuedResult->mResultExtras.requestId,
-           queuedResult->mResultExtras.frameNumber,
-           queuedResult->mResultExtras.burstId);
-
-    mResultSignal.signal();
-}
-
-
-void Camera3Device::sendPartialCaptureResult(const camera_metadata_t * partialResult,
-        const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
-    ATRACE_CALL();
-    Mutex::Autolock l(mOutputLock);
-
-    CaptureResult captureResult;
-    captureResult.mResultExtras = resultExtras;
-    captureResult.mMetadata = partialResult;
-
-    // Fix up result metadata for monochrome camera.
-    status_t res = fixupMonochromeTags(mDeviceInfo, captureResult.mMetadata);
-    if (res != OK) {
-        SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
-        return;
-    }
-
-    insertResultLocked(&captureResult, frameNumber);
-}
-
-
-void Camera3Device::sendCaptureResult(CameraMetadata &pendingMetadata,
-        CaptureResultExtras &resultExtras,
-        CameraMetadata &collectedPartialResult,
-        uint32_t frameNumber,
-        bool reprocess, bool zslStillCapture,
-        const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
-    ATRACE_CALL();
-    if (pendingMetadata.isEmpty())
-        return;
-
-    Mutex::Autolock l(mOutputLock);
-
-    // TODO: need to track errors for tighter bounds on expected frame number
-    if (reprocess) {
-        if (frameNumber < mNextReprocessResultFrameNumber) {
-            SET_ERR("Out-of-order reprocess capture result metadata submitted! "
-                "(got frame number %d, expecting %d)",
-                frameNumber, mNextReprocessResultFrameNumber);
-            return;
-        }
-        mNextReprocessResultFrameNumber = frameNumber + 1;
-    } else if (zslStillCapture) {
-        if (frameNumber < mNextZslStillResultFrameNumber) {
-            SET_ERR("Out-of-order ZSL still capture result metadata submitted! "
-                "(got frame number %d, expecting %d)",
-                frameNumber, mNextZslStillResultFrameNumber);
-            return;
-        }
-        mNextZslStillResultFrameNumber = frameNumber + 1;
-    } else {
-        if (frameNumber < mNextResultFrameNumber) {
-            SET_ERR("Out-of-order capture result metadata submitted! "
-                    "(got frame number %d, expecting %d)",
-                    frameNumber, mNextResultFrameNumber);
-            return;
-        }
-        mNextResultFrameNumber = frameNumber + 1;
-    }
-
-    CaptureResult captureResult;
-    captureResult.mResultExtras = resultExtras;
-    captureResult.mMetadata = pendingMetadata;
-    captureResult.mPhysicalMetadatas = physicalMetadatas;
-
-    // Append any previous partials to form a complete result
-    if (mUsePartialResult && !collectedPartialResult.isEmpty()) {
-        captureResult.mMetadata.append(collectedPartialResult);
-    }
-
-    captureResult.mMetadata.sort();
-
-    // Check that there's a timestamp in the result metadata
-    camera_metadata_entry timestamp = captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
-    if (timestamp.count == 0) {
-        SET_ERR("No timestamp provided by HAL for frame %d!",
-                frameNumber);
-        return;
-    }
-    for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
-        camera_metadata_entry timestamp =
-                physicalMetadata.mPhysicalCameraMetadata.find(ANDROID_SENSOR_TIMESTAMP);
-        if (timestamp.count == 0) {
-            SET_ERR("No timestamp provided by HAL for physical camera %s frame %d!",
-                    String8(physicalMetadata.mPhysicalCameraId).c_str(), frameNumber);
-            return;
-        }
-    }
-
-    // Fix up some result metadata to account for HAL-level distortion correction
-    status_t res =
-            mDistortionMappers[mId.c_str()].correctCaptureResult(&captureResult.mMetadata);
-    if (res != OK) {
-        SET_ERR("Unable to correct capture result metadata for frame %d: %s (%d)",
-                frameNumber, strerror(res), res);
-        return;
-    }
-    for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
-        String8 cameraId8(physicalMetadata.mPhysicalCameraId);
-        if (mDistortionMappers.find(cameraId8.c_str()) == mDistortionMappers.end()) {
-            continue;
-        }
-        res = mDistortionMappers[cameraId8.c_str()].correctCaptureResult(
-                &physicalMetadata.mPhysicalCameraMetadata);
-        if (res != OK) {
-            SET_ERR("Unable to correct physical capture result metadata for frame %d: %s (%d)",
-                    frameNumber, strerror(res), res);
-            return;
-        }
-    }
-
-    // Fix up result metadata for monochrome camera.
-    res = fixupMonochromeTags(mDeviceInfo, captureResult.mMetadata);
-    if (res != OK) {
-        SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
-        return;
-    }
-    for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
-        String8 cameraId8(physicalMetadata.mPhysicalCameraId);
-        res = fixupMonochromeTags(mPhysicalDeviceInfoMap.at(cameraId8.c_str()),
-                physicalMetadata.mPhysicalCameraMetadata);
-        if (res != OK) {
-            SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
-            return;
-        }
-    }
-
-    std::unordered_map<std::string, CameraMetadata> monitoredPhysicalMetadata;
-    for (auto& m : physicalMetadatas) {
-        monitoredPhysicalMetadata.emplace(String8(m.mPhysicalCameraId).string(),
-                CameraMetadata(m.mPhysicalCameraMetadata));
-    }
-    mTagMonitor.monitorMetadata(TagMonitor::RESULT,
-            frameNumber, timestamp.data.i64[0], captureResult.mMetadata,
-            monitoredPhysicalMetadata);
-
-    insertResultLocked(&captureResult, frameNumber);
-}
-
-/**
- * Camera HAL device callback methods
- */
-
-void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
-    ATRACE_CALL();
-
-    status_t res;
-
-    uint32_t frameNumber = result->frame_number;
-    if (result->result == NULL && result->num_output_buffers == 0 &&
-            result->input_buffer == NULL) {
-        SET_ERR("No result data provided by HAL for frame %d",
-                frameNumber);
-        return;
-    }
-
-    if (!mUsePartialResult &&
-            result->result != NULL &&
-            result->partial_result != 1) {
-        SET_ERR("Result is malformed for frame %d: partial_result %u must be 1"
-                " if partial result is not supported",
-                frameNumber, result->partial_result);
-        return;
-    }
-
-    bool isPartialResult = false;
-    CameraMetadata collectedPartialResult;
-    bool hasInputBufferInRequest = false;
-
-    // Get shutter timestamp and resultExtras from list of in-flight requests,
-    // where it was added by the shutter notification for this frame. If the
-    // shutter timestamp isn't received yet, append the output buffers to the
-    // in-flight request and they will be returned when the shutter timestamp
-    // arrives. Update the in-flight status and remove the in-flight entry if
-    // all result data and shutter timestamp have been received.
-    nsecs_t shutterTimestamp = 0;
-
-    {
-        Mutex::Autolock l(mInFlightLock);
-        ssize_t idx = mInFlightMap.indexOfKey(frameNumber);
-        if (idx == NAME_NOT_FOUND) {
-            SET_ERR("Unknown frame number for capture result: %d",
-                    frameNumber);
-            return;
-        }
-        InFlightRequest &request = mInFlightMap.editValueAt(idx);
-        ALOGVV("%s: got InFlightRequest requestId = %" PRId32
-                ", frameNumber = %" PRId64 ", burstId = %" PRId32
-                ", partialResultCount = %d, hasCallback = %d",
-                __FUNCTION__, request.resultExtras.requestId,
-                request.resultExtras.frameNumber, request.resultExtras.burstId,
-                result->partial_result, request.hasCallback);
-        // Always update the partial count to the latest one if it's not 0
-        // (buffers only). When framework aggregates adjacent partial results
-        // into one, the latest partial count will be used.
-        if (result->partial_result != 0)
-            request.resultExtras.partialResultCount = result->partial_result;
-
-        // Check if this result carries only partial metadata
-        if (mUsePartialResult && result->result != NULL) {
-            if (result->partial_result > mNumPartialResults || result->partial_result < 1) {
-                SET_ERR("Result is malformed for frame %d: partial_result %u must be  in"
-                        " the range of [1, %d] when metadata is included in the result",
-                        frameNumber, result->partial_result, mNumPartialResults);
-                return;
-            }
-            isPartialResult = (result->partial_result < mNumPartialResults);
-            if (isPartialResult && result->num_physcam_metadata) {
-                SET_ERR("Result is malformed for frame %d: partial_result not allowed for"
-                        " physical camera result", frameNumber);
-                return;
-            }
-            if (isPartialResult) {
-                request.collectedPartialResult.append(result->result);
-            }
-
-            if (isPartialResult && request.hasCallback) {
-                // Send partial capture result
-                sendPartialCaptureResult(result->result, request.resultExtras,
-                        frameNumber);
-            }
-        }
-
-        shutterTimestamp = request.shutterTimestamp;
-        hasInputBufferInRequest = request.hasInputBuffer;
-
-        // Did we get the (final) result metadata for this capture?
-        if (result->result != NULL && !isPartialResult) {
-            if (request.physicalCameraIds.size() != result->num_physcam_metadata) {
-                SET_ERR("Expected physical Camera metadata count %d not equal to actual count %d",
-                        request.physicalCameraIds.size(), result->num_physcam_metadata);
-                return;
-            }
-            if (request.haveResultMetadata) {
-                SET_ERR("Called multiple times with metadata for frame %d",
-                        frameNumber);
-                return;
-            }
-            for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
-                String8 physicalId(result->physcam_ids[i]);
-                std::set<String8>::iterator cameraIdIter =
-                        request.physicalCameraIds.find(physicalId);
-                if (cameraIdIter != request.physicalCameraIds.end()) {
-                    request.physicalCameraIds.erase(cameraIdIter);
-                } else {
-                    SET_ERR("Total result for frame %d has already returned for camera %s",
-                            frameNumber, physicalId.c_str());
-                    return;
-                }
-            }
-            if (mUsePartialResult &&
-                    !request.collectedPartialResult.isEmpty()) {
-                collectedPartialResult.acquire(
-                    request.collectedPartialResult);
-            }
-            request.haveResultMetadata = true;
-        }
-
-        uint32_t numBuffersReturned = result->num_output_buffers;
-        if (result->input_buffer != NULL) {
-            if (hasInputBufferInRequest) {
-                numBuffersReturned += 1;
-            } else {
-                ALOGW("%s: Input buffer should be NULL if there is no input"
-                        " buffer sent in the request",
-                        __FUNCTION__);
-            }
-        }
-        request.numBuffersLeft -= numBuffersReturned;
-        if (request.numBuffersLeft < 0) {
-            SET_ERR("Too many buffers returned for frame %d",
-                    frameNumber);
-            return;
-        }
-
-        camera_metadata_ro_entry_t entry;
-        res = find_camera_metadata_ro_entry(result->result,
-                ANDROID_SENSOR_TIMESTAMP, &entry);
-        if (res == OK && entry.count == 1) {
-            request.sensorTimestamp = entry.data.i64[0];
-        }
-
-        // If shutter event isn't received yet, append the output buffers to
-        // the in-flight request. Otherwise, return the output buffers to
-        // streams.
-        if (shutterTimestamp == 0) {
-            request.pendingOutputBuffers.appendArray(result->output_buffers,
-                result->num_output_buffers);
-        } else {
-            bool timestampIncreasing = !(request.zslCapture || request.hasInputBuffer);
-            returnOutputBuffers(result->output_buffers,
-                result->num_output_buffers, shutterTimestamp, timestampIncreasing,
-                request.outputSurfaces, request.resultExtras);
-        }
-
-        if (result->result != NULL && !isPartialResult) {
-            for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
-                CameraMetadata physicalMetadata;
-                physicalMetadata.append(result->physcam_metadata[i]);
-                request.physicalMetadatas.push_back({String16(result->physcam_ids[i]),
-                        physicalMetadata});
-            }
-            if (shutterTimestamp == 0) {
-                request.pendingMetadata = result->result;
-                request.collectedPartialResult = collectedPartialResult;
-            } else if (request.hasCallback) {
-                CameraMetadata metadata;
-                metadata = result->result;
-                sendCaptureResult(metadata, request.resultExtras,
-                    collectedPartialResult, frameNumber,
-                    hasInputBufferInRequest, request.zslCapture && request.stillCapture,
-                    request.physicalMetadatas);
-            }
-        }
-
-        removeInFlightRequestIfReadyLocked(idx);
-    } // scope for mInFlightLock
-
-    if (result->input_buffer != NULL) {
-        if (hasInputBufferInRequest) {
-            Camera3Stream *stream =
-                Camera3Stream::cast(result->input_buffer->stream);
-            res = stream->returnInputBuffer(*(result->input_buffer));
-            // Note: stream may be deallocated at this point, if this buffer was the
-            // last reference to it.
-            if (res != OK) {
-                ALOGE("%s: RequestThread: Can't return input buffer for frame %d to"
-                      "  its stream:%s (%d)",  __FUNCTION__,
-                      frameNumber, strerror(-res), res);
-            }
-        } else {
-            ALOGW("%s: Input buffer should be NULL if there is no input"
-                    " buffer sent in the request, skipping input buffer return.",
-                    __FUNCTION__);
-        }
-    }
-}
-
-void Camera3Device::notify(const camera3_notify_msg *msg) {
-    ATRACE_CALL();
     sp<NotificationListener> listener;
     {
-        Mutex::Autolock l(mOutputLock);
+        std::lock_guard<std::mutex> l(mOutputLock);
         listener = mListener.promote();
     }
 
-    if (msg == NULL) {
-        SET_ERR("HAL sent NULL notify message!");
-        return;
-    }
+    FlushInflightReqStates states {
+        mId, mInFlightLock, mInFlightMap, mUseHalBufManager,
+        listener, *this, *mInterface, *this};
 
-    switch (msg->type) {
-        case CAMERA3_MSG_ERROR: {
-            notifyError(msg->message.error, listener);
-            break;
-        }
-        case CAMERA3_MSG_SHUTTER: {
-            notifyShutter(msg->message.shutter, listener);
-            break;
-        }
-        default:
-            SET_ERR("Unknown notify message from HAL: %d",
-                    msg->type);
-    }
-}
-
-void Camera3Device::notifyError(const camera3_error_msg_t &msg,
-        sp<NotificationListener> listener) {
-    ATRACE_CALL();
-    // Map camera HAL error codes to ICameraDeviceCallback error codes
-    // Index into this with the HAL error code
-    static const int32_t halErrorMap[CAMERA3_MSG_NUM_ERRORS] = {
-        // 0 = Unused error code
-        hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR,
-        // 1 = CAMERA3_MSG_ERROR_DEVICE
-        hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
-        // 2 = CAMERA3_MSG_ERROR_REQUEST
-        hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
-        // 3 = CAMERA3_MSG_ERROR_RESULT
-        hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT,
-        // 4 = CAMERA3_MSG_ERROR_BUFFER
-        hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER
-    };
-
-    int32_t errorCode =
-            ((msg.error_code >= 0) &&
-                    (msg.error_code < CAMERA3_MSG_NUM_ERRORS)) ?
-            halErrorMap[msg.error_code] :
-            hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR;
-
-    int streamId = 0;
-    String16 physicalCameraId;
-    if (msg.error_stream != NULL) {
-        Camera3Stream *stream =
-                Camera3Stream::cast(msg.error_stream);
-        streamId = stream->getId();
-        physicalCameraId = String16(stream->physicalCameraId());
-    }
-    ALOGV("Camera %s: %s: HAL error, frame %d, stream %d: %d",
-            mId.string(), __FUNCTION__, msg.frame_number,
-            streamId, msg.error_code);
-
-    CaptureResultExtras resultExtras;
-    switch (errorCode) {
-        case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
-            // SET_ERR calls notifyError
-            SET_ERR("Camera HAL reported serious device error");
-            break;
-        case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
-        case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
-        case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
-            {
-                Mutex::Autolock l(mInFlightLock);
-                ssize_t idx = mInFlightMap.indexOfKey(msg.frame_number);
-                if (idx >= 0) {
-                    InFlightRequest &r = mInFlightMap.editValueAt(idx);
-                    r.requestStatus = msg.error_code;
-                    resultExtras = r.resultExtras;
-                    bool logicalDeviceResultError = false;
-                    if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT ==
-                            errorCode) {
-                        if (physicalCameraId.size() > 0) {
-                            String8 cameraId(physicalCameraId);
-                            auto iter = r.physicalCameraIds.find(cameraId);
-                            if (iter == r.physicalCameraIds.end()) {
-                                ALOGE("%s: Reported result failure for physical camera device: %s "
-                                        " which is not part of the respective request!",
-                                        __FUNCTION__, cameraId.string());
-                                break;
-                            }
-                            r.physicalCameraIds.erase(iter);
-                            resultExtras.errorPhysicalCameraId = physicalCameraId;
-                        } else {
-                            logicalDeviceResultError = true;
-                        }
-                    }
-
-                    if (logicalDeviceResultError
-                            ||  hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST ==
-                            errorCode) {
-                        r.skipResultMetadata = true;
-                    }
-                    if (logicalDeviceResultError) {
-                        // In case of missing result check whether the buffers
-                        // returned. If they returned, then remove inflight
-                        // request.
-                        // TODO: should we call this for ERROR_CAMERA_REQUEST as well?
-                        //       otherwise we are depending on HAL to send the buffers back after
-                        //       calling notifyError. Not sure if that's in the spec.
-                        removeInFlightRequestIfReadyLocked(idx);
-                    }
-                } else {
-                    resultExtras.frameNumber = msg.frame_number;
-                    ALOGE("Camera %s: %s: cannot find in-flight request on "
-                            "frame %" PRId64 " error", mId.string(), __FUNCTION__,
-                            resultExtras.frameNumber);
-                }
-            }
-            resultExtras.errorStreamId = streamId;
-            if (listener != NULL) {
-                listener->notifyError(errorCode, resultExtras);
-            } else {
-                ALOGE("Camera %s: %s: no listener available", mId.string(), __FUNCTION__);
-            }
-            break;
-        default:
-            // SET_ERR calls notifyError
-            SET_ERR("Unknown error message from HAL: %d", msg.error_code);
-            break;
-    }
-}
-
-void Camera3Device::notifyShutter(const camera3_shutter_msg_t &msg,
-        sp<NotificationListener> listener) {
-    ATRACE_CALL();
-    ssize_t idx;
-
-    // Set timestamp for the request in the in-flight tracking
-    // and get the request ID to send upstream
-    {
-        Mutex::Autolock l(mInFlightLock);
-        idx = mInFlightMap.indexOfKey(msg.frame_number);
-        if (idx >= 0) {
-            InFlightRequest &r = mInFlightMap.editValueAt(idx);
-
-            // Verify ordering of shutter notifications
-            {
-                Mutex::Autolock l(mOutputLock);
-                // TODO: need to track errors for tighter bounds on expected frame number.
-                if (r.hasInputBuffer) {
-                    if (msg.frame_number < mNextReprocessShutterFrameNumber) {
-                        SET_ERR("Reprocess shutter notification out-of-order. Expected "
-                                "notification for frame %d, got frame %d",
-                                mNextReprocessShutterFrameNumber, msg.frame_number);
-                        return;
-                    }
-                    mNextReprocessShutterFrameNumber = msg.frame_number + 1;
-                } else if (r.zslCapture && r.stillCapture) {
-                    if (msg.frame_number < mNextZslStillShutterFrameNumber) {
-                        SET_ERR("ZSL still capture shutter notification out-of-order. Expected "
-                                "notification for frame %d, got frame %d",
-                                mNextZslStillShutterFrameNumber, msg.frame_number);
-                        return;
-                    }
-                    mNextZslStillShutterFrameNumber = msg.frame_number + 1;
-                } else {
-                    if (msg.frame_number < mNextShutterFrameNumber) {
-                        SET_ERR("Shutter notification out-of-order. Expected "
-                                "notification for frame %d, got frame %d",
-                                mNextShutterFrameNumber, msg.frame_number);
-                        return;
-                    }
-                    mNextShutterFrameNumber = msg.frame_number + 1;
-                }
-            }
-
-            r.shutterTimestamp = msg.timestamp;
-            if (r.hasCallback) {
-                ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
-                    mId.string(), __FUNCTION__,
-                    msg.frame_number, r.resultExtras.requestId, msg.timestamp);
-                // Call listener, if any
-                if (listener != NULL) {
-                    listener->notifyShutter(r.resultExtras, msg.timestamp);
-                }
-                // send pending result and buffers
-                sendCaptureResult(r.pendingMetadata, r.resultExtras,
-                    r.collectedPartialResult, msg.frame_number,
-                    r.hasInputBuffer, r.zslCapture && r.stillCapture,
-                    r.physicalMetadatas);
-            }
-            bool timestampIncreasing = !(r.zslCapture || r.hasInputBuffer);
-            returnOutputBuffers(r.pendingOutputBuffers.array(),
-                    r.pendingOutputBuffers.size(), r.shutterTimestamp, timestampIncreasing,
-                    r.outputSurfaces, r.resultExtras);
-            r.pendingOutputBuffers.clear();
-
-            removeInFlightRequestIfReadyLocked(idx);
-        }
-    }
-    if (idx < 0) {
-        SET_ERR("Shutter notification for non-existent frame number %d",
-                msg.frame_number);
-    }
+    camera3::flushInflightRequests(states);
 }
 
 CameraMetadata Camera3Device::getLatestRequestLocked() {
@@ -4037,7 +2825,6 @@
     return retVal;
 }
 
-
 void Camera3Device::monitorMetadata(TagMonitor::eventSource source,
         int64_t frameNumber, nsecs_t timestamp, const CameraMetadata& metadata,
         const std::unordered_map<std::string, CameraMetadata>& physicalMetadata) {
@@ -4272,20 +3059,10 @@
 
         activeStreams.insert(streamId);
         // Create Buffer ID map if necessary
-        if (mBufferIdMaps.count(streamId) == 0) {
-            mBufferIdMaps.emplace(streamId, BufferIdMap{});
-        }
+        mBufferRecords.tryCreateBufferCache(streamId);
     }
     // remove BufferIdMap for deleted streams
-    for(auto it = mBufferIdMaps.begin(); it != mBufferIdMaps.end();) {
-        int streamId = it->first;
-        bool active = activeStreams.count(streamId) > 0;
-        if (!active) {
-            it = mBufferIdMaps.erase(it);
-        } else {
-            ++it;
-        }
-    }
+    mBufferRecords.removeInactiveBufferCaches(activeStreams);
 
     StreamConfigurationMode operationMode;
     res = mapToStreamConfigurationMode(
@@ -4303,6 +3080,7 @@
     // Invoke configureStreams
     device::V3_3::HalStreamConfiguration finalConfiguration;
     device::V3_4::HalStreamConfiguration finalConfiguration3_4;
+    device::V3_6::HalStreamConfiguration finalConfiguration3_6;
     common::V1_0::Status status;
 
     auto configStream34Cb = [&status, &finalConfiguration3_4]
@@ -4311,6 +3089,12 @@
                 status = s;
             };
 
+    auto configStream36Cb = [&status, &finalConfiguration3_6]
+            (common::V1_0::Status s, const device::V3_6::HalStreamConfiguration& halConfiguration) {
+                finalConfiguration3_6 = halConfiguration;
+                status = s;
+            };
+
     auto postprocConfigStream34 = [&finalConfiguration, &finalConfiguration3_4]
             (hardware::Return<void>& err) -> status_t {
                 if (!err.isOk()) {
@@ -4324,8 +3108,32 @@
                 return OK;
             };
 
+    auto postprocConfigStream36 = [&finalConfiguration, &finalConfiguration3_6]
+            (hardware::Return<void>& err) -> status_t {
+                if (!err.isOk()) {
+                    ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+                    return DEAD_OBJECT;
+                }
+                finalConfiguration.streams.resize(finalConfiguration3_6.streams.size());
+                for (size_t i = 0; i < finalConfiguration3_6.streams.size(); i++) {
+                    finalConfiguration.streams[i] = finalConfiguration3_6.streams[i].v3_4.v3_3;
+                }
+                return OK;
+            };
+
     // See which version of HAL we have
-    if (mHidlSession_3_5 != nullptr) {
+    if (mHidlSession_3_6 != nullptr) {
+        ALOGV("%s: v3.6 device found", __FUNCTION__);
+        device::V3_5::StreamConfiguration requestedConfiguration3_5;
+        requestedConfiguration3_5.v3_4 = requestedConfiguration3_4;
+        requestedConfiguration3_5.streamConfigCounter = mNextStreamConfigCounter++;
+        auto err = mHidlSession_3_6->configureStreams_3_6(
+                requestedConfiguration3_5, configStream36Cb);
+        res = postprocConfigStream36(err);
+        if (res != OK) {
+            return res;
+        }
+    } else if (mHidlSession_3_5 != nullptr) {
         ALOGV("%s: v3.5 device found", __FUNCTION__);
         device::V3_5::StreamConfiguration requestedConfiguration3_5;
         requestedConfiguration3_5.v3_4 = requestedConfiguration3_4;
@@ -4407,11 +3215,16 @@
             return INVALID_OPERATION;
         }
         device::V3_3::HalStream &src = finalConfiguration.streams[realIdx];
+        device::V3_6::HalStream &src_36 = finalConfiguration3_6.streams[realIdx];
 
         Camera3Stream* dstStream = Camera3Stream::cast(dst);
         int overrideFormat = mapToFrameworkFormat(src.v3_2.overrideFormat);
         android_dataspace overrideDataSpace = mapToFrameworkDataspace(src.overrideDataSpace);
 
+        if (mHidlSession_3_6 != nullptr) {
+            dstStream->setOfflineProcessingSupport(src_36.supportOffline);
+        }
+
         if (dstStream->getOriginalFormat() != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
             dstStream->setFormatOverride(false);
             dstStream->setDataSpaceOverride(false);
@@ -4475,7 +3288,6 @@
     captureRequest->fmqSettingsSize = 0;
 
     {
-        std::lock_guard<std::mutex> lock(mInflightLock);
         if (request->input_buffer != nullptr) {
             int32_t streamId = Camera3Stream::cast(request->input_buffer->stream)->getId();
             buffer_handle_t buf = *(request->input_buffer->buffer);
@@ -4495,7 +3307,7 @@
             captureRequest->inputBuffer.acquireFence = acquireFence;
             captureRequest->inputBuffer.releaseFence = nullptr;
 
-            pushInflightBufferLocked(captureRequest->frameNumber, streamId,
+            mBufferRecords.pushInflightBuffer(captureRequest->frameNumber, streamId,
                     request->input_buffer->buffer);
             inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
         } else {
@@ -4536,7 +3348,8 @@
 
             // Output buffers are empty when using HAL buffer manager
             if (!mUseHalBufManager) {
-                pushInflightBufferLocked(captureRequest->frameNumber, streamId, src->buffer);
+                mBufferRecords.pushInflightBuffer(
+                        captureRequest->frameNumber, streamId, src->buffer);
                 inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
             }
         }
@@ -4593,7 +3406,7 @@
                     /*out*/&handlesCreated, /*out*/&inflightBuffers);
         }
         if (res != OK) {
-            popInflightBuffers(inflightBuffers);
+            mBufferRecords.popInflightBuffers(inflightBuffers);
             cleanupNativeHandles(&handlesCreated);
             return res;
         }
@@ -4601,10 +3414,10 @@
 
     std::vector<device::V3_2::BufferCache> cachesToRemove;
     {
-        std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+        std::lock_guard<std::mutex> lock(mFreedBuffersLock);
         for (auto& pair : mFreedBuffers) {
             // The stream might have been removed since onBufferFreed
-            if (mBufferIdMaps.find(pair.first) != mBufferIdMaps.end()) {
+            if (mBufferRecords.isStreamCached(pair.first)) {
                 cachesToRemove.push_back({pair.first, pair.second});
             }
         }
@@ -4711,7 +3524,7 @@
             cleanupNativeHandles(&handlesCreated);
         }
     } else {
-        popInflightBuffers(inflightBuffers);
+        mBufferRecords.popInflightBuffers(inflightBuffers);
         cleanupNativeHandles(&handlesCreated);
     }
     return res;
@@ -4773,20 +3586,20 @@
 status_t Camera3Device::HalInterface::switchToOffline(
         const std::vector<int32_t>& streamsToKeep,
         /*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
-        /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession) {
+        /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
+        /*out*/camera3::BufferRecords* bufferRecords) {
     ATRACE_NAME("CameraHal::switchToOffline");
     if (!valid() || mHidlSession_3_6 == nullptr) {
         ALOGE("%s called on invalid camera!", __FUNCTION__);
         return INVALID_OPERATION;
     }
 
-    if (offlineSessionInfo == nullptr || offlineSession == nullptr) {
-        ALOGE("%s: offlineSessionInfo and offlineSession must not be null!", __FUNCTION__);
+    if (offlineSessionInfo == nullptr || offlineSession == nullptr || bufferRecords == nullptr) {
+        ALOGE("%s: output arguments must not be null!", __FUNCTION__);
         return INVALID_OPERATION;
     }
 
     common::V1_0::Status status = common::V1_0::Status::INTERNAL_ERROR;
-
     auto resultCallback =
         [&status, &offlineSessionInfo, &offlineSession] (auto s, auto info, auto session) {
                 status = s;
@@ -4799,75 +3612,65 @@
         ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
         return DEAD_OBJECT;
     }
-    return CameraProviderManager::mapToStatusT(status);
+
+    status_t ret = CameraProviderManager::mapToStatusT(status);
+    if (ret != OK) {
+        return ret;
+    }
+
+    // TODO: assert no ongoing requestBuffer/returnBuffer call here
+    // TODO: update RequestBufferStateMachine to block requestBuffer/returnBuffer once HAL
+    //       returns from switchToOffline.
+
+
+    // Validate buffer caches
+    std::vector<int32_t> streams;
+    streams.reserve(offlineSessionInfo->offlineStreams.size());
+    for (auto offlineStream : offlineSessionInfo->offlineStreams) {
+        int32_t id = offlineStream.id;
+        streams.push_back(id);
+        // Verify buffer caches
+        std::vector<uint64_t> bufIds(offlineStream.circulatingBufferIds.begin(),
+                offlineStream.circulatingBufferIds.end());
+        if (!verifyBufferIds(id, bufIds)) {
+            ALOGE("%s: stream ID %d buffer cache records mismatch!", __FUNCTION__, id);
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    // Move buffer records
+    bufferRecords->takeBufferCaches(mBufferRecords, streams);
+    bufferRecords->takeInflightBufferMap(mBufferRecords);
+    bufferRecords->takeRequestedBufferMap(mBufferRecords);
+    return ret;
 }
 
 void Camera3Device::HalInterface::getInflightBufferKeys(
         std::vector<std::pair<int32_t, int32_t>>* out) {
-    std::lock_guard<std::mutex> lock(mInflightLock);
-    out->clear();
-    out->reserve(mInflightBufferMap.size());
-    for (auto& pair : mInflightBufferMap) {
-        uint64_t key = pair.first;
-        int32_t streamId = key & 0xFFFFFFFF;
-        int32_t frameNumber = (key >> 32) & 0xFFFFFFFF;
-        out->push_back(std::make_pair(frameNumber, streamId));
-    }
+    mBufferRecords.getInflightBufferKeys(out);
     return;
 }
 
 void Camera3Device::HalInterface::getInflightRequestBufferKeys(
         std::vector<uint64_t>* out) {
-    std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
-    out->clear();
-    out->reserve(mRequestedBuffers.size());
-    for (auto& pair : mRequestedBuffers) {
-        out->push_back(pair.first);
-    }
+    mBufferRecords.getInflightRequestBufferKeys(out);
     return;
 }
 
-status_t Camera3Device::HalInterface::pushInflightBufferLocked(
-        int32_t frameNumber, int32_t streamId, buffer_handle_t *buffer) {
-    uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
-    mInflightBufferMap[key] = buffer;
-    return OK;
+bool Camera3Device::HalInterface::verifyBufferIds(
+        int32_t streamId, std::vector<uint64_t>& bufIds) {
+    return mBufferRecords.verifyBufferIds(streamId, bufIds);
 }
 
 status_t Camera3Device::HalInterface::popInflightBuffer(
         int32_t frameNumber, int32_t streamId,
         /*out*/ buffer_handle_t **buffer) {
-    std::lock_guard<std::mutex> lock(mInflightLock);
-
-    uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
-    auto it = mInflightBufferMap.find(key);
-    if (it == mInflightBufferMap.end()) return NAME_NOT_FOUND;
-    if (buffer != nullptr) {
-        *buffer = it->second;
-    }
-    mInflightBufferMap.erase(it);
-    return OK;
-}
-
-void Camera3Device::HalInterface::popInflightBuffers(
-        const std::vector<std::pair<int32_t, int32_t>>& buffers) {
-    for (const auto& pair : buffers) {
-        int32_t frameNumber = pair.first;
-        int32_t streamId = pair.second;
-        popInflightBuffer(frameNumber, streamId, nullptr);
-    }
+    return mBufferRecords.popInflightBuffer(frameNumber, streamId, buffer);
 }
 
 status_t Camera3Device::HalInterface::pushInflightRequestBuffer(
         uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) {
-    std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
-    auto pair = mRequestedBuffers.insert({bufferId, {streamId, buf}});
-    if (!pair.second) {
-        ALOGE("%s: bufId %" PRIu64 " is already inflight!",
-                __FUNCTION__, bufferId);
-        return BAD_VALUE;
-    }
-    return OK;
+    return mBufferRecords.pushInflightRequestBuffer(bufferId, buf, streamId);
 }
 
 // Find and pop a buffer_handle_t based on bufferId
@@ -4875,81 +3678,29 @@
         uint64_t bufferId,
         /*out*/ buffer_handle_t** buffer,
         /*optional out*/ int32_t* streamId) {
-    if (buffer == nullptr) {
-        ALOGE("%s: buffer (%p) must not be null", __FUNCTION__, buffer);
-        return BAD_VALUE;
-    }
-    std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
-    auto it = mRequestedBuffers.find(bufferId);
-    if (it == mRequestedBuffers.end()) {
-        ALOGE("%s: bufId %" PRIu64 " is not inflight!",
-                __FUNCTION__, bufferId);
-        return BAD_VALUE;
-    }
-    *buffer = it->second.second;
-    if (streamId != nullptr) {
-        *streamId = it->second.first;
-    }
-    mRequestedBuffers.erase(it);
-    return OK;
+    return mBufferRecords.popInflightRequestBuffer(bufferId, buffer, streamId);
 }
 
 std::pair<bool, uint64_t> Camera3Device::HalInterface::getBufferId(
         const buffer_handle_t& buf, int streamId) {
-    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
-
-    BufferIdMap& bIdMap = mBufferIdMaps.at(streamId);
-    auto it = bIdMap.find(buf);
-    if (it == bIdMap.end()) {
-        bIdMap[buf] = mNextBufferId++;
-        ALOGV("stream %d now have %zu buffer caches, buf %p",
-                streamId, bIdMap.size(), buf);
-        return std::make_pair(true, mNextBufferId - 1);
-    } else {
-        return std::make_pair(false, it->second);
-    }
+    return mBufferRecords.getBufferId(buf, streamId);
 }
 
 void Camera3Device::HalInterface::onBufferFreed(
         int streamId, const native_handle_t* handle) {
-    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
-    uint64_t bufferId = BUFFER_ID_NO_BUFFER;
-    auto mapIt = mBufferIdMaps.find(streamId);
-    if (mapIt == mBufferIdMaps.end()) {
-        // streamId might be from a deleted stream here
-        ALOGI("%s: stream %d has been removed",
-                __FUNCTION__, streamId);
-        return;
+    uint32_t bufferId = mBufferRecords.removeOneBufferCache(streamId, handle);
+    std::lock_guard<std::mutex> lock(mFreedBuffersLock);
+    if (bufferId != BUFFER_ID_NO_BUFFER) {
+        mFreedBuffers.push_back(std::make_pair(streamId, bufferId));
     }
-    BufferIdMap& bIdMap = mapIt->second;
-    auto it = bIdMap.find(handle);
-    if (it == bIdMap.end()) {
-        ALOGW("%s: cannot find buffer %p in stream %d",
-                __FUNCTION__, handle, streamId);
-        return;
-    } else {
-        bufferId = it->second;
-        bIdMap.erase(it);
-        ALOGV("%s: stream %d now have %zu buffer caches after removing buf %p",
-                __FUNCTION__, streamId, bIdMap.size(), handle);
-    }
-    mFreedBuffers.push_back(std::make_pair(streamId, bufferId));
 }
 
 void Camera3Device::HalInterface::onStreamReConfigured(int streamId) {
-    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
-    auto mapIt = mBufferIdMaps.find(streamId);
-    if (mapIt == mBufferIdMaps.end()) {
-        ALOGE("%s: streamId %d not found!", __FUNCTION__, streamId);
-        return;
-    }
-
-    BufferIdMap& bIdMap = mapIt->second;
-    for (const auto& it : bIdMap) {
-        uint64_t bufferId = it.second;
+    std::vector<uint64_t> bufIds = mBufferRecords.clearBufferCaches(streamId);
+    std::lock_guard<std::mutex> lock(mFreedBuffersLock);
+    for (auto bufferId : bufIds) {
         mFreedBuffers.push_back(std::make_pair(streamId, bufferId));
     }
-    bIdMap.clear();
 }
 
 /**
@@ -5445,6 +4196,9 @@
 bool Camera3Device::RequestThread::threadLoop() {
     ATRACE_CALL();
     status_t res;
+    // Any function called from threadLoop() must not hold mInterfaceLock since
+    // it could lead to deadlocks (disconnect() -> hold mInterfaceMutex -> wait for request thread
+    // to finish -> request thread waits on mInterfaceMutex) http://b/143513518
 
     // Handle paused state.
     if (waitIfPaused()) {
@@ -5610,6 +4364,7 @@
                 // request in a batch as new
                 !(batchedRequest && i > 0);
         if (newRequest) {
+            std::set<std::string> cameraIdsWithZoom;
             /**
              * HAL workaround:
              * Insert a dummy trigger ID if a trigger is set but no trigger ID is
@@ -5642,6 +4397,28 @@
                             return INVALID_OPERATION;
                         }
                     }
+
+                    for (it = captureRequest->mSettingsList.begin();
+                            it != captureRequest->mSettingsList.end(); it++) {
+                        if (parent->mZoomRatioMappers.find(it->cameraId) ==
+                                parent->mZoomRatioMappers.end()) {
+                            continue;
+                        }
+
+                        camera_metadata_entry_t e = it->metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
+                        if (e.count > 0 && e.data.f[0] != 1.0f) {
+                            cameraIdsWithZoom.insert(it->cameraId);
+                        }
+
+                        res = parent->mZoomRatioMappers[it->cameraId].updateCaptureRequest(
+                            &(it->metadata));
+                        if (res != OK) {
+                            SET_ERR("RequestThread: Unable to correct capture requests "
+                                    "for zoom ratio for request %d: %s (%d)",
+                                    halRequest->frame_number, strerror(-res), res);
+                            return INVALID_OPERATION;
+                        }
+                    }
                 }
             }
 
@@ -5652,6 +4429,7 @@
             captureRequest->mSettingsList.begin()->metadata.sort();
             halRequest->settings = captureRequest->mSettingsList.begin()->metadata.getAndLock();
             mPrevRequest = captureRequest;
+            mPrevCameraIdsWithZoom = cameraIdsWithZoom;
             ALOGVV("%s: Request settings are NEW", __FUNCTION__);
 
             IF_ALOGV() {
@@ -5839,7 +4617,7 @@
                 /*hasInput*/halRequest->input_buffer != NULL,
                 hasCallback,
                 calculateMaxExpectedDuration(halRequest->settings),
-                requestedPhysicalCameras, isStillCapture, isZslCapture,
+                requestedPhysicalCameras, isStillCapture, isZslCapture, mPrevCameraIdsWithZoom,
                 (mUseHalBufManager) ? uniqueSurfaceIdMap :
                                       SurfaceMap{});
         ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
@@ -5956,17 +4734,22 @@
 status_t Camera3Device::RequestThread::switchToOffline(
         const std::vector<int32_t>& streamsToKeep,
         /*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
-        /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession) {
+        /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
+        /*out*/camera3::BufferRecords* bufferRecords) {
     Mutex::Autolock l(mRequestLock);
     clearRepeatingRequestsLocked(/*lastFrameNumber*/nullptr);
 
-    // Theoretically we should also check for mRepeatingRequests.empty(), but the API interface
-    // is serialized by mInterfaceLock so skip that check.
+    // Wait until request thread is fully stopped
+    // TBD: check if request thread is being paused by other APIs (shouldn't be)
+
+    // We could also check for mRepeatingRequests.empty(), but the API interface
+    // is serialized by Camera3Device::mInterfaceLock so no one should be able to submit any
+    // new requests during the call; hence skip that check.
     bool queueEmpty = mNextRequests.empty() && mRequestQueue.empty();
     while (!queueEmpty) {
         status_t res = mRequestSubmittedSignal.waitRelative(mRequestLock, kRequestSubmitTimeout);
         if (res == TIMED_OUT) {
-            ALOGE("%s: request thread failed to submit a request within timeout!", __FUNCTION__);
+            ALOGE("%s: request thread failed to submit one request within timeout!", __FUNCTION__);
             return res;
         } else if (res != OK) {
             ALOGE("%s: request thread failed to submit a request: %s (%d)!",
@@ -5975,13 +4758,14 @@
         }
         queueEmpty = mNextRequests.empty() && mRequestQueue.empty();
     }
+
     return mInterface->switchToOffline(
-            streamsToKeep, offlineSessionInfo, offlineSession);
+            streamsToKeep, offlineSessionInfo, offlineSession, bufferRecords);
 }
 
 nsecs_t Camera3Device::getExpectedInFlightDuration() {
     ATRACE_CALL();
-    Mutex::Autolock al(mInFlightLock);
+    std::lock_guard<std::mutex> l(mInFlightLock);
     return mExpectedInflightDuration > kMinInflightDuration ?
             mExpectedInflightDuration : kMinInflightDuration;
 }
@@ -6067,7 +4851,7 @@
         {
           sp<Camera3Device> parent = mParent.promote();
           if (parent != NULL) {
-              Mutex::Autolock l(parent->mInFlightLock);
+              std::lock_guard<std::mutex> l(parent->mInFlightLock);
               ssize_t idx = parent->mInFlightMap.indexOfKey(captureRequest->mResultExtras.frameNumber);
               if (idx >= 0) {
                   ALOGV("%s: Remove inflight request from queue: frameNumber %" PRId64,
@@ -6756,6 +5540,7 @@
 
 void Camera3Device::RequestBufferStateMachine::onStreamsConfigured() {
     std::lock_guard<std::mutex> lock(mLock);
+    mSwitchedToOffline = false;
     mStatus = RB_STATUS_READY;
     return;
 }
@@ -6798,6 +5583,20 @@
     return;
 }
 
+bool Camera3Device::RequestBufferStateMachine::onSwitchToOfflineSuccess() {
+    std::lock_guard<std::mutex> lock(mLock);
+    if (mRequestBufferOngoing) {
+        ALOGE("%s: HAL must not be requesting buffer after HAL returns switchToOffline!",
+                __FUNCTION__);
+        return false;
+    }
+    mSwitchedToOffline = true;
+    mInflightMapEmpty = true;
+    mRequestThreadPaused = true;
+    mStatus = RB_STATUS_STOPPED;
+    return true;
+}
+
 void Camera3Device::RequestBufferStateMachine::notifyTrackerLocked(bool active) {
     sp<StatusTracker> statusTracker = mStatusTracker.promote();
     if (statusTracker != nullptr) {
@@ -6817,75 +5616,40 @@
     return false;
 }
 
-status_t Camera3Device::fixupMonochromeTags(const CameraMetadata& deviceInfo,
-        CameraMetadata& resultMetadata) {
-    status_t res = OK;
-    if (!mNeedFixupMonochromeTags) {
-        return res;
-    }
+bool Camera3Device::startRequestBuffer() {
+    return mRequestBufferSM.startRequestBuffer();
+}
 
-    // Remove tags that are not applicable to monochrome camera.
-    int32_t tagsToRemove[] = {
-           ANDROID_SENSOR_GREEN_SPLIT,
-           ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
-           ANDROID_COLOR_CORRECTION_MODE,
-           ANDROID_COLOR_CORRECTION_TRANSFORM,
-           ANDROID_COLOR_CORRECTION_GAINS,
-    };
-    for (auto tag : tagsToRemove) {
-        res = resultMetadata.erase(tag);
-        if (res != OK) {
-            ALOGE("%s: Failed to remove tag %d for monochrome camera", __FUNCTION__, tag);
-            return res;
-        }
-    }
+void Camera3Device::endRequestBuffer() {
+    mRequestBufferSM.endRequestBuffer();
+}
 
-    // ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
-    camera_metadata_entry blEntry = resultMetadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
-    for (size_t i = 1; i < blEntry.count; i++) {
-        blEntry.data.f[i] = blEntry.data.f[0];
-    }
+nsecs_t Camera3Device::getWaitDuration() {
+    return kBaseGetBufferWait + getExpectedInFlightDuration();
+}
 
-    // ANDROID_SENSOR_NOISE_PROFILE
-    camera_metadata_entry npEntry = resultMetadata.find(ANDROID_SENSOR_NOISE_PROFILE);
-    if (npEntry.count > 0 && npEntry.count % 2 == 0) {
-        double np[] = {npEntry.data.d[0], npEntry.data.d[1]};
-        res = resultMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, np, 2);
-        if (res != OK) {
-             ALOGE("%s: Failed to update SENSOR_NOISE_PROFILE: %s (%d)",
-                    __FUNCTION__, strerror(-res), res);
-            return res;
-        }
-    }
+void Camera3Device::getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out) {
+    mInterface->getInflightBufferKeys(out);
+}
 
-    // ANDROID_STATISTICS_LENS_SHADING_MAP
-    camera_metadata_ro_entry lsSizeEntry = deviceInfo.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
-    camera_metadata_entry lsEntry = resultMetadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
-    if (lsSizeEntry.count == 2 && lsEntry.count > 0
-            && (int32_t)lsEntry.count == 4 * lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]) {
-        for (int32_t i = 0; i < lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]; i++) {
-            lsEntry.data.f[4*i+1] = lsEntry.data.f[4*i];
-            lsEntry.data.f[4*i+2] = lsEntry.data.f[4*i];
-            lsEntry.data.f[4*i+3] = lsEntry.data.f[4*i];
-        }
-    }
+void Camera3Device::getInflightRequestBufferKeys(std::vector<uint64_t>* out) {
+    mInterface->getInflightRequestBufferKeys(out);
+}
 
-    // ANDROID_TONEMAP_CURVE_BLUE
-    // ANDROID_TONEMAP_CURVE_GREEN
-    // ANDROID_TONEMAP_CURVE_RED
-    camera_metadata_entry tcbEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_BLUE);
-    camera_metadata_entry tcgEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_GREEN);
-    camera_metadata_entry tcrEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_RED);
-    if (tcbEntry.count > 0
-            && tcbEntry.count == tcgEntry.count
-            && tcbEntry.count == tcrEntry.count) {
-        for (size_t i = 0; i < tcbEntry.count; i++) {
-            tcbEntry.data.f[i] = tcrEntry.data.f[i];
-            tcgEntry.data.f[i] = tcrEntry.data.f[i];
-        }
+std::vector<sp<Camera3StreamInterface>> Camera3Device::getAllStreams() {
+    std::vector<sp<Camera3StreamInterface>> ret;
+    bool hasInputStream = mInputStream != nullptr;
+    ret.reserve(mOutputStreams.size() + mDeletedStreams.size() + ((hasInputStream) ? 1 : 0));
+    if (hasInputStream) {
+        ret.push_back(mInputStream);
     }
-
-    return res;
+    for (size_t i = 0; i < mOutputStreams.size(); i++) {
+        ret.push_back(mOutputStreams[i]);
+    }
+    for (size_t i = 0; i < mDeletedStreams.size(); i++) {
+        ret.push_back(mDeletedStreams[i]);
+    }
+    return ret;
 }
 
 status_t Camera3Device::switchToOffline(
@@ -6898,24 +5662,231 @@
     }
 
     Mutex::Autolock il(mInterfaceLock);
-    Mutex::Autolock l(mLock);
 
-    // 1. Stop repeating request, wait until request thread submitted all requests, then
-    //    call HAL switchToOffline
+    bool hasInputStream = mInputStream != nullptr;
+    int32_t inputStreamId = hasInputStream ? mInputStream->getId() : -1;
+    bool inputStreamSupportsOffline = hasInputStream ?
+            mInputStream->getOfflineProcessingSupport() : false;
+    auto outputStreamIds = mOutputStreams.getStreamIds();
+    auto streamIds = outputStreamIds;
+    if (hasInputStream) {
+        streamIds.push_back(mInputStream->getId());
+    }
+
+    // Check all streams in streamsToKeep supports offline mode
+    for (auto id : streamsToKeep) {
+        if (std::find(streamIds.begin(), streamIds.end(), id) == streamIds.end()) {
+            ALOGE("%s: Unknown stream ID %d", __FUNCTION__, id);
+            return BAD_VALUE;
+        } else if (id == inputStreamId) {
+            if (!inputStreamSupportsOffline) {
+                ALOGE("%s: input stream %d cannot be switched to offline",
+                        __FUNCTION__, id);
+                return BAD_VALUE;
+            }
+        } else {
+            sp<camera3::Camera3OutputStreamInterface> stream = mOutputStreams.get(id);
+            if (!stream->getOfflineProcessingSupport()) {
+                ALOGE("%s: output stream %d cannot be switched to offline",
+                        __FUNCTION__, id);
+                return BAD_VALUE;
+            }
+        }
+    }
+
+    // TODO: block surface sharing and surface group streams until we can support them
+
+    // Stop repeating request, wait until all remaining requests are submitted, then call into
+    // HAL switchToOffline
     hardware::camera::device::V3_6::CameraOfflineSessionInfo offlineSessionInfo;
     sp<hardware::camera::device::V3_6::ICameraOfflineSession> offlineSession;
+    camera3::BufferRecords bufferRecords;
+    status_t ret = mRequestThread->switchToOffline(
+            streamsToKeep, &offlineSessionInfo, &offlineSession, &bufferRecords);
 
-    mRequestThread->switchToOffline(streamsToKeep, &offlineSessionInfo, &offlineSession);
+    if (ret != OK) {
+        SET_ERR("Switch to offline failed: %s (%d)", strerror(-ret), ret);
+        return ret;
+    }
 
+    bool succ = mRequestBufferSM.onSwitchToOfflineSuccess();
+    if (!succ) {
+        SET_ERR("HAL must not be calling requestStreamBuffers call");
+        // TODO: block ALL callbacks from HAL till app configured new streams?
+        return UNKNOWN_ERROR;
+    }
 
-    // 2. Verify offlineSessionInfo
-    // 3. create Camera3OfflineSession and transfer object ownership
+    // Verify offlineSessionInfo
+    std::vector<int32_t> offlineStreamIds;
+    offlineStreamIds.reserve(offlineSessionInfo.offlineStreams.size());
+    for (auto offlineStream : offlineSessionInfo.offlineStreams) {
+        // verify stream IDs
+        int32_t id = offlineStream.id;
+        if (std::find(streamIds.begin(), streamIds.end(), id) == streamIds.end()) {
+            SET_ERR("stream ID %d not found!", id);
+            return UNKNOWN_ERROR;
+        }
+
+        // When not using HAL buf manager, only allow streams requested by app to be preserved
+        if (!mUseHalBufManager) {
+            if (std::find(streamsToKeep.begin(), streamsToKeep.end(), id) == streamsToKeep.end()) {
+                SET_ERR("stream ID %d must not be switched to offline!", id);
+                return UNKNOWN_ERROR;
+            }
+        }
+
+        offlineStreamIds.push_back(id);
+        sp<Camera3StreamInterface> stream = (id == inputStreamId) ?
+                static_cast<sp<Camera3StreamInterface>>(mInputStream) :
+                static_cast<sp<Camera3StreamInterface>>(mOutputStreams.get(id));
+        // Verify number of outstanding buffers
+        if (stream->getOutstandingBuffersCount() != offlineStream.numOutstandingBuffers) {
+            SET_ERR("Offline stream %d # of remaining buffer mismatch: (%zu,%d) (service/HAL)",
+                    id, stream->getOutstandingBuffersCount(), offlineStream.numOutstandingBuffers);
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    // Verify all streams to be deleted don't have any outstanding buffers
+    if (hasInputStream && std::find(offlineStreamIds.begin(), offlineStreamIds.end(),
+                inputStreamId) == offlineStreamIds.end()) {
+        if (mInputStream->hasOutstandingBuffers()) {
+            SET_ERR("Input stream %d still has %zu outstanding buffer!",
+                    inputStreamId, mInputStream->getOutstandingBuffersCount());
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    for (const auto& outStreamId : outputStreamIds) {
+        if (std::find(offlineStreamIds.begin(), offlineStreamIds.end(),
+                outStreamId) == offlineStreamIds.end()) {
+            auto outStream = mOutputStreams.get(outStreamId);
+            if (outStream->hasOutstandingBuffers()) {
+                SET_ERR("Output stream %d still has %zu outstanding buffer!",
+                        outStreamId, outStream->getOutstandingBuffersCount());
+                return UNKNOWN_ERROR;
+            }
+        }
+    }
+
+    InFlightRequestMap offlineReqs;
+    // Verify inflight requests and their pending buffers
+    {
+        std::lock_guard<std::mutex> l(mInFlightLock);
+        for (auto offlineReq : offlineSessionInfo.offlineRequests) {
+            int idx = mInFlightMap.indexOfKey(offlineReq.frameNumber);
+            if (idx == NAME_NOT_FOUND) {
+                SET_ERR("Offline request frame number %d not found!", offlineReq.frameNumber);
+                return UNKNOWN_ERROR;
+            }
+
+            const auto& inflightReq = mInFlightMap.valueAt(idx);
+            // TODO: check specific stream IDs
+            size_t numBuffersLeft = static_cast<size_t>(inflightReq.numBuffersLeft);
+            if (numBuffersLeft != offlineReq.pendingStreams.size()) {
+                SET_ERR("Offline request # of remaining buffer mismatch: (%d,%d) (service/HAL)",
+                        inflightReq.numBuffersLeft, offlineReq.pendingStreams.size());
+                return UNKNOWN_ERROR;
+            }
+            offlineReqs.add(offlineReq.frameNumber, inflightReq);
+        }
+    }
+
+    // Create Camera3OfflineSession and transfer object ownership
     //   (streams, inflight requests, buffer caches)
-    *session = new Camera3OfflineSession(mId);
+    camera3::StreamSet offlineStreamSet;
+    sp<camera3::Camera3Stream> inputStream;
+    for (auto offlineStream : offlineSessionInfo.offlineStreams) {
+        int32_t id = offlineStream.id;
+        if (mInputStream != nullptr && id == mInputStream->getId()) {
+            inputStream = mInputStream;
+        } else {
+            offlineStreamSet.add(id, mOutputStreams.get(id));
+        }
+    }
 
-    // 4. Delete unneeded streams
-    // 5. Verify Camera3Device is in unconfigured state
+    // TODO: check if we need to lock before copying states
+    //       though technically no other thread should be talking to Camera3Device at this point
+    Camera3OfflineStates offlineStates(
+            mTagMonitor, mVendorTagId, mUseHalBufManager, mNeedFixupMonochromeTags,
+            mUsePartialResult, mNumPartialResults, mNextResultFrameNumber,
+            mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+            mNextShutterFrameNumber, mNextReprocessShutterFrameNumber,
+            mNextZslStillShutterFrameNumber, mDeviceInfo, mPhysicalDeviceInfoMap,
+            mDistortionMappers, mZoomRatioMappers);
+
+    *session = new Camera3OfflineSession(mId, inputStream, offlineStreamSet,
+            std::move(bufferRecords), offlineReqs, offlineStates, offlineSession);
+
+    // Delete all streams that has been transferred to offline session
+    Mutex::Autolock l(mLock);
+    for (auto offlineStream : offlineSessionInfo.offlineStreams) {
+        int32_t id = offlineStream.id;
+        if (mInputStream != nullptr && id == mInputStream->getId()) {
+            mInputStream.clear();
+        } else {
+            mOutputStreams.remove(id);
+        }
+    }
+
+    // disconnect all other streams and switch to UNCONFIGURED state
+    if (mInputStream != nullptr) {
+        ret = mInputStream->disconnect();
+        if (ret != OK) {
+            SET_ERR_L("disconnect input stream failed!");
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    for (auto streamId : mOutputStreams.getStreamIds()) {
+        sp<Camera3StreamInterface> stream = mOutputStreams.get(streamId);
+        ret = stream->disconnect();
+        if (ret != OK) {
+            SET_ERR_L("disconnect output stream %d failed!", streamId);
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    mInputStream.clear();
+    mOutputStreams.clear();
+    mNeedConfig = true;
+    internalUpdateStatusLocked(STATUS_UNCONFIGURED);
+    mOperatingMode = NO_MODE;
+    mIsConstrainedHighSpeedConfiguration = false;
+
     return OK;
+    // TO be done by CameraDeviceClient/Camera3OfflineSession
+    // register the offline client to camera service
+    // Setup result passthing threads etc
+    // Initialize offline session so HAL can start sending callback to it (result Fmq)
+    // TODO: check how many onIdle callback will be sent
+    // Java side to make sure the CameraCaptureSession is properly closed
+}
+
+void Camera3Device::getOfflineStreamIds(std::vector<int> *offlineStreamIds) {
+    ATRACE_CALL();
+
+    if (offlineStreamIds == nullptr) {
+        return;
+    }
+
+    Mutex::Autolock il(mInterfaceLock);
+
+    auto streamIds = mOutputStreams.getStreamIds();
+    bool hasInputStream = mInputStream != nullptr;
+    if (hasInputStream && mInputStream->getOfflineProcessingSupport()) {
+        offlineStreamIds->push_back(mInputStream->getId());
+    }
+
+    for (const auto & streamId : streamIds) {
+        sp<camera3::Camera3OutputStreamInterface> stream = mOutputStreams.get(streamId);
+        // Streams that use the camera buffer manager are currently not supported in
+        // offline mode
+        if (stream->getOfflineProcessingSupport() &&
+                (stream->getStreamSetId() == CAMERA3_STREAM_SET_ID_INVALID)) {
+            offlineStreamIds->push_back(streamId);
+        }
+    }
 }
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index eabc44d..7279baf 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -43,9 +43,14 @@
 #include <camera/CaptureResult.h>
 
 #include "common/CameraDeviceBase.h"
+#include "device3/BufferUtils.h"
 #include "device3/StatusTracker.h"
 #include "device3/Camera3BufferManager.h"
 #include "device3/DistortionMapper.h"
+#include "device3/ZoomRatioMapper.h"
+#include "device3/InFlightRequest.h"
+#include "device3/Camera3OutputInterface.h"
+#include "device3/Camera3OfflineSession.h"
 #include "utils/TagMonitor.h"
 #include "utils/LatencyHistogram.h"
 #include <camera_metadata_hidden.h>
@@ -68,7 +73,11 @@
  */
 class Camera3Device :
             public CameraDeviceBase,
-            virtual public hardware::camera::device::V3_5::ICameraDeviceCallback {
+            virtual public hardware::camera::device::V3_5::ICameraDeviceCallback,
+            public camera3::SetErrorInterface,
+            public camera3::InflightRequestUpdateInterface,
+            public camera3::RequestBufferInterface,
+            public camera3::FlushBufferInterface {
   public:
 
     explicit Camera3Device(const String8& id);
@@ -141,6 +150,8 @@
     status_t getInputBufferProducer(
             sp<IGraphicBufferProducer> *producer) override;
 
+    void getOfflineStreamIds(std::vector<int> *offlineStreamIds) override;
+
     status_t createDefaultRequest(int templateId, CameraMetadata *request) override;
 
     // Transitions to the idle state on success
@@ -200,6 +211,16 @@
     status_t switchToOffline(const std::vector<int32_t>& streamsToKeep,
             /*out*/ sp<CameraOfflineSessionBase>* session) override;
 
+    // RequestBufferInterface
+    bool startRequestBuffer() override;
+    void endRequestBuffer() override;
+    nsecs_t getWaitDuration() override;
+
+    // FlushBufferInterface
+    void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out) override;
+    void getInflightRequestBufferKeys(std::vector<uint64_t>* out) override;
+    std::vector<sp<camera3::Camera3StreamInterface>> getAllStreams() override;
+
     /**
      * Helper functions to map between framework and HIDL values
      */
@@ -212,8 +233,6 @@
     // Returns a negative error code if the passed-in operation mode is not valid.
     static status_t mapToStreamConfigurationMode(camera3_stream_configuration_mode_t operationMode,
             /*out*/ hardware::camera::device::V3_2::StreamConfigurationMode *mode);
-    static camera3_buffer_status_t mapHidlBufferStatus(
-            hardware::camera::device::V3_2::BufferStatus status);
     static int mapToFrameworkFormat(hardware::graphics::common::V1_0::PixelFormat pixelFormat);
     static android_dataspace mapToFrameworkDataspace(
             hardware::camera::device::V3_2::DataspaceFlags);
@@ -228,7 +247,6 @@
 
     // internal typedefs
     using RequestMetadataQueue = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
-    using ResultMetadataQueue  = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
 
     static const size_t        kDumpLockAttempts  = 10;
     static const size_t        kDumpSleepDuration = 100000; // 0.10 sec
@@ -282,7 +300,8 @@
      * Adapter for legacy HAL / HIDL HAL interface calls; calls either into legacy HALv3 or the
      * HIDL HALv3 interfaces.
      */
-    class HalInterface : public camera3::Camera3StreamBufferFreedListener {
+    class HalInterface : public camera3::Camera3StreamBufferFreedListener,
+            public camera3::BufferRecordsInterface {
       public:
         HalInterface(sp<hardware::camera::device::V3_2::ICameraDeviceSession> &session,
                      std::shared_ptr<RequestMetadataQueue> queue,
@@ -320,27 +339,31 @@
         bool isReconfigurationRequired(CameraMetadata& oldSessionParams,
                 CameraMetadata& newSessionParams);
 
+        // Upon successful return, HalInterface will return buffer maps needed for offline
+        // processing, and clear all its internal buffer maps.
         status_t switchToOffline(
                 const std::vector<int32_t>& streamsToKeep,
                 /*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
-                /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession);
+                /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
+                /*out*/camera3::BufferRecords* bufferRecords);
 
-        // method to extract buffer's unique ID
-        // return pair of (newlySeenBuffer?, bufferId)
-        std::pair<bool, uint64_t> getBufferId(const buffer_handle_t& buf, int streamId);
+        /////////////////////////////////////////////////////////////////////
+        // Implements BufferRecordsInterface
 
-        // Find a buffer_handle_t based on frame number and stream ID
+        std::pair<bool, uint64_t> getBufferId(
+                const buffer_handle_t& buf, int streamId) override;
+
         status_t popInflightBuffer(int32_t frameNumber, int32_t streamId,
-                /*out*/ buffer_handle_t **buffer);
+                /*out*/ buffer_handle_t **buffer) override;
 
-        // Register a bufId (streamId, buffer_handle_t) to inflight request buffer
         status_t pushInflightRequestBuffer(
-                uint64_t bufferId, buffer_handle_t* buf, int32_t streamId);
+                uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) override;
 
-        // Find a buffer_handle_t based on bufferId
         status_t popInflightRequestBuffer(uint64_t bufferId,
                 /*out*/ buffer_handle_t** buffer,
-                /*optional out*/ int32_t* streamId = nullptr);
+                /*optional out*/ int32_t* streamId = nullptr) override;
+
+        /////////////////////////////////////////////////////////////////////
 
         // Get a vector of (frameNumber, streamId) pair of currently inflight
         // buffers
@@ -351,7 +374,6 @@
 
         void onStreamReConfigured(int streamId);
 
-        static const uint64_t BUFFER_ID_NO_BUFFER = 0;
       private:
         // Always valid
         sp<hardware::camera::device::V3_2::ICameraDeviceSession> mHidlSession;
@@ -366,8 +388,6 @@
 
         std::shared_ptr<RequestMetadataQueue> mRequestMetadataQueue;
 
-        std::mutex mInflightLock;
-
         // The output HIDL request still depends on input camera3_capture_request_t
         // Do not free input camera3_capture_request_t before output HIDL request
         status_t wrapAsHidlRequest(camera3_capture_request_t* in,
@@ -381,55 +401,20 @@
         // Pop inflight buffers based on pairs of (frameNumber,streamId)
         void popInflightBuffers(const std::vector<std::pair<int32_t, int32_t>>& buffers);
 
-        // Cache of buffer handles keyed off (frameNumber << 32 | streamId)
-        std::unordered_map<uint64_t, buffer_handle_t*> mInflightBufferMap;
+        // Return true if the input caches match what we have; otherwise false
+        bool verifyBufferIds(int32_t streamId, std::vector<uint64_t>& inBufIds);
 
         // Delete and optionally close native handles and clear the input vector afterward
         static void cleanupNativeHandles(
                 std::vector<native_handle_t*> *handles, bool closeFd = false);
 
-        struct BufferHasher {
-            size_t operator()(const buffer_handle_t& buf) const {
-                if (buf == nullptr)
-                    return 0;
-
-                size_t result = 1;
-                result = 31 * result + buf->numFds;
-                for (int i = 0; i < buf->numFds; i++) {
-                    result = 31 * result + buf->data[i];
-                }
-                return result;
-            }
-        };
-
-        struct BufferComparator {
-            bool operator()(const buffer_handle_t& buf1, const buffer_handle_t& buf2) const {
-                if (buf1->numFds == buf2->numFds) {
-                    for (int i = 0; i < buf1->numFds; i++) {
-                        if (buf1->data[i] != buf2->data[i]) {
-                            return false;
-                        }
-                    }
-                    return true;
-                }
-                return false;
-            }
-        };
-
-        std::mutex mBufferIdMapLock; // protecting mBufferIdMaps and mNextBufferId
-        typedef std::unordered_map<const buffer_handle_t, uint64_t,
-                BufferHasher, BufferComparator> BufferIdMap;
-        // stream ID -> per stream buffer ID map
-        std::unordered_map<int, BufferIdMap> mBufferIdMaps;
-        uint64_t mNextBufferId = 1; // 0 means no buffer
-
         virtual void onBufferFreed(int streamId, const native_handle_t* handle) override;
 
+        std::mutex mFreedBuffersLock;
         std::vector<std::pair<int, uint64_t>> mFreedBuffers;
 
-        // Buffers given to HAL through requestStreamBuffer API
-        std::mutex mRequestedBuffersLock;
-        std::unordered_map<uint64_t, std::pair<int32_t, buffer_handle_t*>> mRequestedBuffers;
+        // Keep track of buffer cache and inflight buffer records
+        camera3::BufferRecords mBufferRecords;
 
         uint32_t mNextStreamConfigCounter = 1;
 
@@ -442,6 +427,7 @@
     sp<HalInterface> mInterface;
 
     CameraMetadata             mDeviceInfo;
+    bool                       mSupportNativeZoomRatio;
     std::unordered_map<std::string, CameraMetadata> mPhysicalDeviceInfoMap;
 
     CameraMetadata             mRequestTemplateCache[CAMERA3_TEMPLATE_COUNT];
@@ -471,24 +457,7 @@
     // Tracking cause of fatal errors when in STATUS_ERROR
     String8                    mErrorCause;
 
-    // Synchronized mapping of stream IDs to stream instances
-    class StreamSet {
-      public:
-        status_t add(int streamId, sp<camera3::Camera3OutputStreamInterface>);
-        ssize_t remove(int streamId);
-        sp<camera3::Camera3OutputStreamInterface> get(int streamId);
-        // get by (underlying) vector index
-        sp<camera3::Camera3OutputStreamInterface> operator[] (size_t index);
-        size_t size() const;
-        std::vector<int> getStreamIds();
-        void clear();
-
-      private:
-        mutable std::mutex mLock;
-        KeyedVector<int, sp<camera3::Camera3OutputStreamInterface>> mData;
-    };
-
-    StreamSet                  mOutputStreams;
+    camera3::StreamSet         mOutputStreams;
     sp<camera3::Camera3Stream> mInputStream;
     int                        mNextStreamId;
     bool                       mNeedConfig;
@@ -577,15 +546,6 @@
             const hardware::hidl_vec<
                     hardware::camera::device::V3_2::StreamBuffer>& buffers) override;
 
-    // Handle one capture result. Assume that mProcessCaptureResultLock is held.
-    void processOneCaptureResultLocked(
-            const hardware::camera::device::V3_2::CaptureResult& result,
-            const hardware::hidl_vec<
-            hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata);
-    status_t readOneCameraMetadataLocked(uint64_t fmqResultSize,
-            hardware::camera::device::V3_2::CameraMetadata& resultMetadata,
-            const hardware::camera::device::V3_2::CameraMetadata& result);
-
     // Handle one notify message
     void notify(const hardware::camera::device::V3_2::NotifyMsg& msg);
 
@@ -708,11 +668,20 @@
      * error message to indicate why. Only the first call's message will be
      * used. The message is also sent to the log.
      */
-    void               setErrorState(const char *fmt, ...);
+    void               setErrorState(const char *fmt, ...) override;
+    void               setErrorStateLocked(const char *fmt, ...) override;
     void               setErrorStateV(const char *fmt, va_list args);
-    void               setErrorStateLocked(const char *fmt, ...);
     void               setErrorStateLockedV(const char *fmt, va_list args);
 
+    /////////////////////////////////////////////////////////////////////
+    // Implements InflightRequestUpdateInterface
+
+    void onInflightEntryRemovedLocked(nsecs_t duration) override;
+    void checkInflightMapLengthLocked() override;
+    void onInflightMapFlushedLocked() override;
+
+    /////////////////////////////////////////////////////////////////////
+
     /**
      * Debugging trylock/spin method
      * Try to acquire a lock a few times with sleeps between before giving up.
@@ -720,12 +689,6 @@
     bool               tryLockSpinRightRound(Mutex& lock);
 
     /**
-     * Helper function to determine if an input size for implementation defined
-     * format is supported.
-     */
-    bool isOpaqueInputSizeSupported(uint32_t width, uint32_t height);
-
-    /**
      * Helper function to get the largest Jpeg resolution (in area)
      * Return Size(0, 0) if static metatdata is invalid
      */
@@ -858,7 +821,8 @@
         status_t switchToOffline(
                 const std::vector<int32_t>& streamsToKeep,
                 /*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
-                /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession);
+                /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
+                /*out*/camera3::BufferRecords* bufferRecords);
 
       protected:
 
@@ -980,6 +944,7 @@
 
         sp<CaptureRequest> mPrevRequest;
         int32_t            mPrevTriggers;
+        std::set<std::string> mPrevCameraIdsWithZoom;
 
         uint32_t           mFrameNumber;
 
@@ -1018,120 +983,17 @@
     /**
      * In-flight queue for tracking completion of capture requests.
      */
+    std::mutex                    mInFlightLock;
+    camera3::InFlightRequestMap   mInFlightMap;
+    nsecs_t                       mExpectedInflightDuration = 0;
+    // End of mInFlightLock protection scope
 
-    struct InFlightRequest {
-        // Set by notify() SHUTTER call.
-        nsecs_t shutterTimestamp;
-        // Set by process_capture_result().
-        nsecs_t sensorTimestamp;
-        int     requestStatus;
-        // Set by process_capture_result call with valid metadata
-        bool    haveResultMetadata;
-        // Decremented by calls to process_capture_result with valid output
-        // and input buffers
-        int     numBuffersLeft;
-        CaptureResultExtras resultExtras;
-        // If this request has any input buffer
-        bool hasInputBuffer;
-
-        // The last metadata that framework receives from HAL and
-        // not yet send out because the shutter event hasn't arrived.
-        // It's added by process_capture_result and sent when framework
-        // receives the shutter event.
-        CameraMetadata pendingMetadata;
-
-        // The metadata of the partial results that framework receives from HAL so far
-        // and has sent out.
-        CameraMetadata collectedPartialResult;
-
-        // Buffers are added by process_capture_result when output buffers
-        // return from HAL but framework has not yet received the shutter
-        // event. They will be returned to the streams when framework receives
-        // the shutter event.
-        Vector<camera3_stream_buffer_t> pendingOutputBuffers;
-
-        // Whether this inflight request's shutter and result callback are to be
-        // called. The policy is that if the request is the last one in the constrained
-        // high speed recording request list, this flag will be true. If the request list
-        // is not for constrained high speed recording, this flag will also be true.
-        bool hasCallback;
-
-        // Maximum expected frame duration for this request.
-        // For manual captures, equal to the max of requested exposure time and frame duration
-        // For auto-exposure modes, equal to 1/(lower end of target FPS range)
-        nsecs_t maxExpectedDuration;
-
-        // Whether the result metadata for this request is to be skipped. The
-        // result metadata should be skipped in the case of
-        // REQUEST/RESULT error.
-        bool skipResultMetadata;
-
-        // The physical camera ids being requested.
-        std::set<String8> physicalCameraIds;
-
-        // Map of physicalCameraId <-> Metadata
-        std::vector<PhysicalCaptureResultInfo> physicalMetadatas;
-
-        // Indicates a still capture request.
-        bool stillCapture;
-
-        // Indicates a ZSL capture request
-        bool zslCapture;
-
-        // What shared surfaces an output should go to
-        SurfaceMap outputSurfaces;
-
-        // Default constructor needed by KeyedVector
-        InFlightRequest() :
-                shutterTimestamp(0),
-                sensorTimestamp(0),
-                requestStatus(OK),
-                haveResultMetadata(false),
-                numBuffersLeft(0),
-                hasInputBuffer(false),
-                hasCallback(true),
-                maxExpectedDuration(kDefaultExpectedDuration),
-                skipResultMetadata(false),
-                stillCapture(false),
-                zslCapture(false) {
-        }
-
-        InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
-                bool hasAppCallback, nsecs_t maxDuration,
-                const std::set<String8>& physicalCameraIdSet, bool isStillCapture,
-                bool isZslCapture,
-                const SurfaceMap& outSurfaces = SurfaceMap{}) :
-                shutterTimestamp(0),
-                sensorTimestamp(0),
-                requestStatus(OK),
-                haveResultMetadata(false),
-                numBuffersLeft(numBuffers),
-                resultExtras(extras),
-                hasInputBuffer(hasInput),
-                hasCallback(hasAppCallback),
-                maxExpectedDuration(maxDuration),
-                skipResultMetadata(false),
-                physicalCameraIds(physicalCameraIdSet),
-                stillCapture(isStillCapture),
-                zslCapture(isZslCapture),
-                outputSurfaces(outSurfaces) {
-        }
-    };
-
-    // Map from frame number to the in-flight request state
-    typedef KeyedVector<uint32_t, InFlightRequest> InFlightMap;
-
-
-    Mutex                  mInFlightLock; // Protects mInFlightMap and
-                                          // mExpectedInflightDuration
-    InFlightMap            mInFlightMap;
-    nsecs_t                mExpectedInflightDuration = 0;
-    int                    mInFlightStatusId;
+    int mInFlightStatusId; // const after initialize
 
     status_t registerInFlight(uint32_t frameNumber,
             int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
             bool callback, nsecs_t maxExpectedDuration, std::set<String8>& physicalCameraIds,
-            bool isStillCapture, bool isZslCapture,
+            bool isStillCapture, bool isZslCapture, const std::set<std::string>& cameraIdsWithZoom,
             const SurfaceMap& outputSurfaces);
 
     /**
@@ -1204,7 +1066,7 @@
      */
 
     // Lock for output side of device
-    Mutex                  mOutputLock;
+    std::mutex             mOutputLock;
 
     /**** Scope for mOutputLock ****/
     // the minimal frame number of the next non-reprocess result
@@ -1219,60 +1081,18 @@
     uint32_t               mNextReprocessShutterFrameNumber;
     // the minimal frame number of the next ZSL still capture shutter
     uint32_t               mNextZslStillShutterFrameNumber;
-    List<CaptureResult>   mResultQueue;
-    Condition              mResultSignal;
-    wp<NotificationListener>  mListener;
+    List<CaptureResult>    mResultQueue;
+    std::condition_variable  mResultSignal;
+    wp<NotificationListener> mListener;
 
     /**** End scope for mOutputLock ****/
 
-    /**
-     * Callback functions from HAL device
-     */
-    void processCaptureResult(const camera3_capture_result *result);
-
-    void notify(const camera3_notify_msg *msg);
-
-    // Specific notify handlers
-    void notifyError(const camera3_error_msg_t &msg,
-            sp<NotificationListener> listener);
-    void notifyShutter(const camera3_shutter_msg_t &msg,
-            sp<NotificationListener> listener);
-
-    // helper function to return the output buffers to the streams.
-    void returnOutputBuffers(const camera3_stream_buffer_t *outputBuffers,
-            size_t numBuffers, nsecs_t timestamp, bool timestampIncreasing = true,
-            // The following arguments are only meant for surface sharing use case
-            const SurfaceMap& outputSurfaces = SurfaceMap{},
-            // Used to send buffer error callback when failing to return buffer
-            const CaptureResultExtras &resultExtras = CaptureResultExtras{});
-
-    // Send a partial capture result.
-    void sendPartialCaptureResult(const camera_metadata_t * partialResult,
-            const CaptureResultExtras &resultExtras, uint32_t frameNumber);
-
-    // Send a total capture result given the pending metadata and result extras,
-    // partial results, and the frame number to the result queue.
-    void sendCaptureResult(CameraMetadata &pendingMetadata,
-            CaptureResultExtras &resultExtras,
-            CameraMetadata &collectedPartialResult, uint32_t frameNumber,
-            bool reprocess, bool zslStillCapture,
-            const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas);
-
-    bool isLastFullResult(const InFlightRequest& inFlightRequest);
-
-    // Insert the result to the result queue after updating frame number and overriding AE
-    // trigger cancel.
-    // mOutputLock must be held when calling this function.
-    void insertResultLocked(CaptureResult *result, uint32_t frameNumber);
-
     /**** Scope for mInFlightLock ****/
 
     // Remove the in-flight map entry of the given index from mInFlightMap.
     // It must only be called with mInFlightLock held.
     void removeInFlightMapEntryLocked(int idx);
-    // Remove the in-flight request of the given index from mInFlightMap
-    // if it's no longer needed. It must only be called with mInFlightLock held.
-    void removeInFlightRequestIfReadyLocked(int idx);
+
     // Remove all in-flight requests and return all buffers.
     // This is used after HAL interface is closed to cleanup any request/buffers
     // not returned by HAL.
@@ -1288,6 +1108,11 @@
     // logical camera and its physical subcameras.
     std::unordered_map<std::string, camera3::DistortionMapper> mDistortionMappers;
 
+    /**
+     * Zoom ratio mapper support
+     */
+    std::unordered_map<std::string, camera3::ZoomRatioMapper> mZoomRatioMappers;
+
     // Debug tracker for metadata tag value changes
     // - Enabled with the -m <taglist> option to dumpsys, such as
     //   dumpsys -m android.control.aeState,android.control.aeMode
@@ -1365,6 +1190,10 @@
         void onSubmittingRequest();
         void onRequestThreadPaused();
 
+        // Events triggered by successful switchToOffline call
+        // Return true is there is no ongoing requestBuffer call.
+        bool onSwitchToOfflineSuccess();
+
       private:
         void notifyTrackerLocked(bool active);
 
@@ -1378,6 +1207,7 @@
         bool mRequestThreadPaused = true;
         bool mInflightMapEmpty = true;
         bool mRequestBufferOngoing = false;
+        bool mSwitchedToOffline = false;
 
         wp<camera3::StatusTracker> mStatusTracker;
         int  mRequestBufferStatusId;
@@ -1385,7 +1215,6 @@
 
     // Fix up result metadata for monochrome camera.
     bool mNeedFixupMonochromeTags;
-    status_t fixupMonochromeTags(const CameraMetadata& deviceInfo, CameraMetadata& resultMetadata);
 
     // Whether HAL supports offline processing capability.
     bool mSupportOfflineProcessing = false;
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
index dc5cbb3..0f05632 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
@@ -29,90 +29,435 @@
 
 #include <utils/Trace.h>
 
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+
 #include "device3/Camera3OfflineSession.h"
 #include "device3/Camera3OutputStream.h"
 #include "device3/Camera3InputStream.h"
 #include "device3/Camera3SharedOutputStream.h"
+#include "utils/CameraTraces.h"
 
 using namespace android::camera3;
 using namespace android::hardware::camera;
 
 namespace android {
 
-Camera3OfflineSession::Camera3OfflineSession(const String8 &id):
-        mId(id)
-{
+Camera3OfflineSession::Camera3OfflineSession(const String8 &id,
+        const sp<camera3::Camera3Stream>& inputStream,
+        const camera3::StreamSet& offlineStreamSet,
+        camera3::BufferRecords&& bufferRecords,
+        const camera3::InFlightRequestMap& offlineReqs,
+        const Camera3OfflineStates& offlineStates,
+        sp<hardware::camera::device::V3_6::ICameraOfflineSession> offlineSession) :
+        mId(id),
+        mInputStream(inputStream),
+        mOutputStreams(offlineStreamSet),
+        mBufferRecords(std::move(bufferRecords)),
+        mOfflineReqs(offlineReqs),
+        mSession(offlineSession),
+        mTagMonitor(offlineStates.mTagMonitor),
+        mVendorTagId(offlineStates.mVendorTagId),
+        mUseHalBufManager(offlineStates.mUseHalBufManager),
+        mNeedFixupMonochromeTags(offlineStates.mNeedFixupMonochromeTags),
+        mUsePartialResult(offlineStates.mUsePartialResult),
+        mNumPartialResults(offlineStates.mNumPartialResults),
+        mNextResultFrameNumber(offlineStates.mNextResultFrameNumber),
+        mNextReprocessResultFrameNumber(offlineStates.mNextReprocessResultFrameNumber),
+        mNextZslStillResultFrameNumber(offlineStates.mNextZslStillResultFrameNumber),
+        mNextShutterFrameNumber(offlineStates.mNextShutterFrameNumber),
+        mNextReprocessShutterFrameNumber(offlineStates.mNextReprocessShutterFrameNumber),
+        mNextZslStillShutterFrameNumber(offlineStates.mNextZslStillShutterFrameNumber),
+        mDeviceInfo(offlineStates.mDeviceInfo),
+        mPhysicalDeviceInfoMap(offlineStates.mPhysicalDeviceInfoMap),
+        mDistortionMappers(offlineStates.mDistortionMappers),
+        mZoomRatioMappers(offlineStates.mZoomRatioMappers),
+        mStatus(STATUS_UNINITIALIZED) {
     ATRACE_CALL();
     ALOGV("%s: Created offline session for camera %s", __FUNCTION__, mId.string());
 }
 
-Camera3OfflineSession::~Camera3OfflineSession()
-{
+Camera3OfflineSession::~Camera3OfflineSession() {
     ATRACE_CALL();
     ALOGV("%s: Tearing down offline session for camera id %s", __FUNCTION__, mId.string());
+    disconnectImpl();
 }
 
 const String8& Camera3OfflineSession::getId() const {
     return mId;
 }
 
-status_t Camera3OfflineSession::initialize(
-        sp<hardware::camera::device::V3_6::ICameraOfflineSession> /*hidlSession*/) {
+status_t Camera3OfflineSession::initialize(wp<NotificationListener> listener) {
     ATRACE_CALL();
+
+    if (mSession == nullptr) {
+        ALOGE("%s: HIDL session is null!", __FUNCTION__);
+        return DEAD_OBJECT;
+    }
+
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+
+        mListener = listener;
+
+        // setup result FMQ
+        std::unique_ptr<ResultMetadataQueue>& resQueue = mResultMetadataQueue;
+        auto resultQueueRet = mSession->getCaptureResultMetadataQueue(
+            [&resQueue](const auto& descriptor) {
+                resQueue = std::make_unique<ResultMetadataQueue>(descriptor);
+                if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
+                    ALOGE("HAL returns empty result metadata fmq, not use it");
+                    resQueue = nullptr;
+                    // Don't use resQueue onwards.
+                }
+            });
+        if (!resultQueueRet.isOk()) {
+            ALOGE("Transaction error when getting result metadata queue from camera session: %s",
+                    resultQueueRet.description().c_str());
+            return DEAD_OBJECT;
+        }
+        mStatus = STATUS_ACTIVE;
+    }
+
+    mSession->setCallback(this);
+
     return OK;
 }
 
 status_t Camera3OfflineSession::dump(int /*fd*/) {
     ATRACE_CALL();
-    return OK;
-}
-
-status_t Camera3OfflineSession::abort() {
-    ATRACE_CALL();
+    std::lock_guard<std::mutex> il(mInterfaceLock);
     return OK;
 }
 
 status_t Camera3OfflineSession::disconnect() {
     ATRACE_CALL();
+    return disconnectImpl();
+}
+
+status_t Camera3OfflineSession::disconnectImpl() {
+    ATRACE_CALL();
+    std::lock_guard<std::mutex> il(mInterfaceLock);
+
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+        if (mStatus == STATUS_CLOSED) {
+            return OK; // don't close twice
+        } else if (mStatus == STATUS_ERROR) {
+            ALOGE("%s: offline session %s shutting down in error state",
+                    __FUNCTION__, mId.string());
+        }
+        listener = mListener.promote();
+    }
+
+    ALOGV("%s: E", __FUNCTION__);
+
+    {
+        std::lock_guard<std::mutex> lock(mRequestBufferInterfaceLock);
+        mAllowRequestBuffer = false;
+    }
+
+    std::vector<wp<Camera3StreamInterface>> streams;
+    streams.reserve(mOutputStreams.size() + (mInputStream != nullptr ? 1 : 0));
+    for (size_t i = 0; i < mOutputStreams.size(); i++) {
+        streams.push_back(mOutputStreams[i]);
+    }
+    if (mInputStream != nullptr) {
+        streams.push_back(mInputStream);
+    }
+
+    mSession->close();
+
+    FlushInflightReqStates states {
+        mId, mOfflineReqsLock, mOfflineReqs, mUseHalBufManager,
+        listener, *this, mBufferRecords, *this};
+
+    camera3::flushInflightRequests(states);
+
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+        mSession.clear();
+        mOutputStreams.clear();
+        mInputStream.clear();
+        mStatus = STATUS_CLOSED;
+    }
+
+    for (auto& weakStream : streams) {
+        sp<Camera3StreamInterface> stream = weakStream.promote();
+        if (stream != nullptr) {
+            ALOGE("%s: Stream %d leaked! strong reference (%d)!",
+                    __FUNCTION__, stream->getId(), stream->getStrongCount() - 1);
+        }
+    }
+
+    ALOGV("%s: X", __FUNCTION__);
     return OK;
 }
 
-status_t Camera3OfflineSession::waitForNextFrame(nsecs_t /*timeout*/) {
+status_t Camera3OfflineSession::waitForNextFrame(nsecs_t timeout) {
     ATRACE_CALL();
+    std::unique_lock<std::mutex> lk(mOutputLock);
+
+    while (mResultQueue.empty()) {
+        auto st = mResultSignal.wait_for(lk, std::chrono::nanoseconds(timeout));
+        if (st == std::cv_status::timeout) {
+            return TIMED_OUT;
+        }
+    }
     return OK;
 }
 
-status_t Camera3OfflineSession::getNextResult(CaptureResult* /*frame*/) {
+status_t Camera3OfflineSession::getNextResult(CaptureResult* frame) {
     ATRACE_CALL();
+    std::lock_guard<std::mutex> l(mOutputLock);
+
+    if (mResultQueue.empty()) {
+        return NOT_ENOUGH_DATA;
+    }
+
+    if (frame == nullptr) {
+        ALOGE("%s: argument cannot be NULL", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    CaptureResult &result = *(mResultQueue.begin());
+    frame->mResultExtras = result.mResultExtras;
+    frame->mMetadata.acquire(result.mMetadata);
+    frame->mPhysicalMetadatas = std::move(result.mPhysicalMetadatas);
+    mResultQueue.erase(mResultQueue.begin());
+
     return OK;
 }
 
 hardware::Return<void> Camera3OfflineSession::processCaptureResult_3_4(
         const hardware::hidl_vec<
-                hardware::camera::device::V3_4::CaptureResult>& /*results*/) {
+                hardware::camera::device::V3_4::CaptureResult>& results) {
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+        if (mStatus != STATUS_ACTIVE) {
+            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+            return hardware::Void();
+        }
+        listener = mListener.promote();
+    }
+
+    CaptureOutputStates states {
+        mId,
+        mOfflineReqsLock, mOfflineReqs,
+        mOutputLock, mResultQueue, mResultSignal,
+        mNextShutterFrameNumber,
+        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+        mNextResultFrameNumber,
+        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+        mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+        mInputStream, mOutputStreams, listener, *this, *this, mBufferRecords
+    };
+
+    std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
+    for (const auto& result : results) {
+        processOneCaptureResultLocked(states, result.v3_2, result.physicalCameraMetadata);
+    }
     return hardware::Void();
 }
 
 hardware::Return<void> Camera3OfflineSession::processCaptureResult(
         const hardware::hidl_vec<
-                hardware::camera::device::V3_2::CaptureResult>& /*results*/) {
+                hardware::camera::device::V3_2::CaptureResult>& results) {
+    // TODO: changed impl to call into processCaptureResult_3_4 instead?
+    //       might need to figure how to reduce copy though.
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+        if (mStatus != STATUS_ACTIVE) {
+            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+            return hardware::Void();
+        }
+        listener = mListener.promote();
+    }
+
+    hardware::hidl_vec<hardware::camera::device::V3_4::PhysicalCameraMetadata> noPhysMetadata;
+
+    CaptureOutputStates states {
+        mId,
+        mOfflineReqsLock, mOfflineReqs,
+        mOutputLock, mResultQueue, mResultSignal,
+        mNextShutterFrameNumber,
+        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+        mNextResultFrameNumber,
+        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+        mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+        mInputStream, mOutputStreams, listener, *this, *this, mBufferRecords
+    };
+
+    std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
+    for (const auto& result : results) {
+        processOneCaptureResultLocked(states, result, noPhysMetadata);
+    }
     return hardware::Void();
 }
 
 hardware::Return<void> Camera3OfflineSession::notify(
-        const hardware::hidl_vec<hardware::camera::device::V3_2::NotifyMsg>& /*msgs*/) {
+        const hardware::hidl_vec<hardware::camera::device::V3_2::NotifyMsg>& msgs) {
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+        if (mStatus != STATUS_ACTIVE) {
+            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+            return hardware::Void();
+        }
+        listener = mListener.promote();
+    }
+
+    CaptureOutputStates states {
+        mId,
+        mOfflineReqsLock, mOfflineReqs,
+        mOutputLock, mResultQueue, mResultSignal,
+        mNextShutterFrameNumber,
+        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+        mNextResultFrameNumber,
+        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+        mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+        mInputStream, mOutputStreams, listener, *this, *this, mBufferRecords
+    };
+    for (const auto& msg : msgs) {
+        camera3::notify(states, msg);
+    }
     return hardware::Void();
 }
 
 hardware::Return<void> Camera3OfflineSession::requestStreamBuffers(
-        const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& /*bufReqs*/,
-        requestStreamBuffers_cb /*_hidl_cb*/) {
+        const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
+        requestStreamBuffers_cb _hidl_cb) {
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+        if (mStatus != STATUS_ACTIVE) {
+            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+            return hardware::Void();
+        }
+    }
+
+    RequestBufferStates states {
+        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams,
+        *this, mBufferRecords, *this};
+    camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
     return hardware::Void();
 }
 
 hardware::Return<void> Camera3OfflineSession::returnStreamBuffers(
-        const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& /*buffers*/) {
+        const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+        if (mStatus != STATUS_ACTIVE) {
+            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+            return hardware::Void();
+        }
+    }
+
+    ReturnBufferStates states {
+        mId, mUseHalBufManager, mOutputStreams, mBufferRecords};
+    camera3::returnStreamBuffers(states, buffers);
     return hardware::Void();
 }
 
+void Camera3OfflineSession::setErrorState(const char *fmt, ...) {
+    ATRACE_CALL();
+    std::lock_guard<std::mutex> lock(mLock);
+    va_list args;
+    va_start(args, fmt);
+
+    setErrorStateLockedV(fmt, args);
+
+    va_end(args);
+
+    //FIXME: automatically disconnect here?
+}
+
+void Camera3OfflineSession::setErrorStateLocked(const char *fmt, ...) {
+    va_list args;
+    va_start(args, fmt);
+
+    setErrorStateLockedV(fmt, args);
+
+    va_end(args);
+}
+
+void Camera3OfflineSession::setErrorStateLockedV(const char *fmt, va_list args) {
+    // Print out all error messages to log
+    String8 errorCause = String8::formatV(fmt, args);
+    ALOGE("Camera %s: %s", mId.string(), errorCause.string());
+
+    // But only do error state transition steps for the first error
+    if (mStatus == STATUS_ERROR || mStatus == STATUS_UNINITIALIZED) return;
+
+    mErrorCause = errorCause;
+
+    mStatus = STATUS_ERROR;
+
+    // Notify upstream about a device error
+    sp<NotificationListener> listener = mListener.promote();
+    if (listener != NULL) {
+        listener->notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
+                CaptureResultExtras());
+    }
+
+    // Save stack trace. View by dumping it later.
+    CameraTraces::saveTrace();
+}
+
+void Camera3OfflineSession::onInflightEntryRemovedLocked(nsecs_t /*duration*/) {
+    if (mOfflineReqs.size() == 0) {
+        std::lock_guard<std::mutex> lock(mRequestBufferInterfaceLock);
+        mAllowRequestBuffer = false;
+    }
+}
+
+void Camera3OfflineSession::checkInflightMapLengthLocked() {
+    // Intentional empty impl.
+}
+
+void Camera3OfflineSession::onInflightMapFlushedLocked() {
+    // Intentional empty impl.
+}
+
+bool Camera3OfflineSession::startRequestBuffer() {
+    return mAllowRequestBuffer;
+}
+
+void Camera3OfflineSession::endRequestBuffer() {
+    // Intentional empty impl.
+}
+
+nsecs_t Camera3OfflineSession::getWaitDuration() {
+    const nsecs_t kBaseGetBufferWait = 3000000000; // 3 sec.
+    return kBaseGetBufferWait;
+}
+
+void Camera3OfflineSession::getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out) {
+    mBufferRecords.getInflightBufferKeys(out);
+}
+
+void Camera3OfflineSession::getInflightRequestBufferKeys(std::vector<uint64_t>* out) {
+    mBufferRecords.getInflightRequestBufferKeys(out);
+}
+
+std::vector<sp<Camera3StreamInterface>> Camera3OfflineSession::getAllStreams() {
+    std::vector<sp<Camera3StreamInterface>> ret;
+    bool hasInputStream = mInputStream != nullptr;
+    ret.reserve(mOutputStreams.size() + ((hasInputStream) ? 1 : 0));
+    if (hasInputStream) {
+        ret.push_back(mInputStream);
+    }
+    for (size_t i = 0; i < mOutputStreams.size(); i++) {
+        ret.push_back(mOutputStreams[i]);
+    }
+    return ret;
+}
+
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.h b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
index 30e8c4f..b6b8a7d 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
@@ -17,16 +17,23 @@
 #ifndef ANDROID_SERVERS_CAMERA3OFFLINESESSION_H
 #define ANDROID_SERVERS_CAMERA3OFFLINESESSION_H
 
+#include <memory>
+#include <mutex>
+
 #include <utils/String8.h>
 #include <utils/String16.h>
 
 #include <android/hardware/camera/device/3.6/ICameraOfflineSession.h>
+
 #include <fmq/MessageQueue.h>
 
 #include "common/CameraOfflineSessionBase.h"
 
 #include "device3/Camera3BufferManager.h"
 #include "device3/DistortionMapper.h"
+#include "device3/InFlightRequest.h"
+#include "device3/Camera3OutputUtils.h"
+#include "device3/ZoomRatioMapper.h"
 #include "utils/TagMonitor.h"
 #include "utils/LatencyHistogram.h"
 #include <camera_metadata_hidden.h>
@@ -41,26 +48,90 @@
 
 } // namespace camera3
 
+
+// An immutable struct containing general states that will be copied from Camera3Device to
+// Camera3OfflineSession
+struct Camera3OfflineStates {
+    Camera3OfflineStates(
+            const TagMonitor& tagMonitor, const metadata_vendor_id_t vendorTagId,
+            const bool useHalBufManager, const bool needFixupMonochromeTags,
+            const bool usePartialResult, const uint32_t numPartialResults,
+            const uint32_t nextResultFN, const uint32_t nextReprocResultFN,
+            const uint32_t nextZslResultFN,  const uint32_t nextShutterFN,
+            const uint32_t nextReprocShutterFN, const uint32_t nextZslShutterFN,
+            const CameraMetadata& deviceInfo,
+            const std::unordered_map<std::string, CameraMetadata>& physicalDeviceInfoMap,
+            const std::unordered_map<std::string, camera3::DistortionMapper>& distortionMappers,
+            const std::unordered_map<std::string, camera3::ZoomRatioMapper>& zoomRatioMappers) :
+            mTagMonitor(tagMonitor), mVendorTagId(vendorTagId),
+            mUseHalBufManager(useHalBufManager), mNeedFixupMonochromeTags(needFixupMonochromeTags),
+            mUsePartialResult(usePartialResult), mNumPartialResults(numPartialResults),
+            mNextResultFrameNumber(nextResultFN),
+            mNextReprocessResultFrameNumber(nextReprocResultFN),
+            mNextZslStillResultFrameNumber(nextZslResultFN),
+            mNextShutterFrameNumber(nextShutterFN),
+            mNextReprocessShutterFrameNumber(nextReprocShutterFN),
+            mNextZslStillShutterFrameNumber(nextZslShutterFN),
+            mDeviceInfo(deviceInfo),
+            mPhysicalDeviceInfoMap(physicalDeviceInfoMap),
+            mDistortionMappers(distortionMappers),
+            mZoomRatioMappers(zoomRatioMappers) {}
+
+    const TagMonitor& mTagMonitor;
+    const metadata_vendor_id_t mVendorTagId;
+
+    const bool mUseHalBufManager;
+    const bool mNeedFixupMonochromeTags;
+
+    const bool mUsePartialResult;
+    const uint32_t mNumPartialResults;
+
+    // the minimal frame number of the next non-reprocess result
+    const uint32_t mNextResultFrameNumber;
+    // the minimal frame number of the next reprocess result
+    const uint32_t mNextReprocessResultFrameNumber;
+    // the minimal frame number of the next ZSL still capture result
+    const uint32_t mNextZslStillResultFrameNumber;
+    // the minimal frame number of the next non-reprocess shutter
+    const uint32_t mNextShutterFrameNumber;
+    // the minimal frame number of the next reprocess shutter
+    const uint32_t mNextReprocessShutterFrameNumber;
+    // the minimal frame number of the next ZSL still capture shutter
+    const uint32_t mNextZslStillShutterFrameNumber;
+
+    const CameraMetadata& mDeviceInfo;
+
+    const std::unordered_map<std::string, CameraMetadata>& mPhysicalDeviceInfoMap;
+
+    const std::unordered_map<std::string, camera3::DistortionMapper>& mDistortionMappers;
+
+    const std::unordered_map<std::string, camera3::ZoomRatioMapper>& mZoomRatioMappers;
+};
+
 /**
  * Camera3OfflineSession for offline session defined in HIDL ICameraOfflineSession@3.6 or higher
  */
 class Camera3OfflineSession :
             public CameraOfflineSessionBase,
-            virtual public hardware::camera::device::V3_5::ICameraDeviceCallback {
-
+            virtual public hardware::camera::device::V3_5::ICameraDeviceCallback,
+            public camera3::SetErrorInterface,
+            public camera3::InflightRequestUpdateInterface,
+            public camera3::RequestBufferInterface,
+            public camera3::FlushBufferInterface {
   public:
 
-    // initialize by Camera3Device. Camera3Device must send all info in separate argument.
-    // monitored tags
-    // mUseHalBufManager
-    // mUsePartialResult
-    // mNumPartialResults
-    explicit Camera3OfflineSession(const String8& id);
+    // initialize by Camera3Device.
+    explicit Camera3OfflineSession(const String8& id,
+            const sp<camera3::Camera3Stream>& inputStream,
+            const camera3::StreamSet& offlineStreamSet,
+            camera3::BufferRecords&& bufferRecords,
+            const camera3::InFlightRequestMap& offlineReqs,
+            const Camera3OfflineStates& offlineStates,
+            sp<hardware::camera::device::V3_6::ICameraOfflineSession> offlineSession);
 
     virtual ~Camera3OfflineSession();
 
-    status_t initialize(
-        sp<hardware::camera::device::V3_6::ICameraOfflineSession> hidlSession);
+    virtual status_t initialize(wp<NotificationListener> listener) override;
 
     /**
      * CameraOfflineSessionBase interface
@@ -71,8 +142,6 @@
 
     status_t dump(int fd) override;
 
-    status_t abort() override;
-
     // methods for capture result passing
     status_t waitForNextFrame(nsecs_t timeout) override;
     status_t getNextResult(CaptureResult *frame) override;
@@ -115,10 +184,99 @@
      */
 
   private:
-
     // Camera device ID
     const String8 mId;
+    sp<camera3::Camera3Stream> mInputStream;
+    camera3::StreamSet mOutputStreams;
+    camera3::BufferRecords mBufferRecords;
 
+    std::mutex mOfflineReqsLock;
+    camera3::InFlightRequestMap mOfflineReqs;
+
+    sp<hardware::camera::device::V3_6::ICameraOfflineSession> mSession;
+
+    TagMonitor mTagMonitor;
+    const metadata_vendor_id_t mVendorTagId;
+
+    const bool mUseHalBufManager;
+    const bool mNeedFixupMonochromeTags;
+
+    const bool mUsePartialResult;
+    const uint32_t mNumPartialResults;
+
+    std::mutex mOutputLock;
+    List<CaptureResult> mResultQueue;
+    std::condition_variable mResultSignal;
+    // the minimal frame number of the next non-reprocess result
+    uint32_t mNextResultFrameNumber;
+    // the minimal frame number of the next reprocess result
+    uint32_t mNextReprocessResultFrameNumber;
+    // the minimal frame number of the next ZSL still capture result
+    uint32_t mNextZslStillResultFrameNumber;
+    // the minimal frame number of the next non-reprocess shutter
+    uint32_t mNextShutterFrameNumber;
+    // the minimal frame number of the next reprocess shutter
+    uint32_t mNextReprocessShutterFrameNumber;
+    // the minimal frame number of the next ZSL still capture shutter
+    uint32_t mNextZslStillShutterFrameNumber;
+    // End of mOutputLock scope
+
+    const CameraMetadata mDeviceInfo;
+    std::unordered_map<std::string, CameraMetadata> mPhysicalDeviceInfoMap;
+
+    std::unordered_map<std::string, camera3::DistortionMapper> mDistortionMappers;
+
+    std::unordered_map<std::string, camera3::ZoomRatioMapper> mZoomRatioMappers;
+
+    mutable std::mutex mLock;
+
+    enum Status {
+        STATUS_UNINITIALIZED = 0,
+        STATUS_ACTIVE,
+        STATUS_ERROR,
+        STATUS_CLOSED
+    } mStatus;
+
+    wp<NotificationListener> mListener;
+    // End of mLock protect scope
+
+    std::mutex mProcessCaptureResultLock;
+    // FMQ to write result on. Must be guarded by mProcessCaptureResultLock.
+    std::unique_ptr<ResultMetadataQueue> mResultMetadataQueue;
+
+    // Tracking cause of fatal errors when in STATUS_ERROR
+    String8 mErrorCause;
+
+    // Lock to ensure requestStreamBuffers() callbacks are serialized
+    std::mutex mRequestBufferInterfaceLock;
+    // allow request buffer until all requests are processed or disconnectImpl is called
+    bool mAllowRequestBuffer = true;
+
+    // For client methods such as disconnect/dump
+    std::mutex mInterfaceLock;
+
+    // SetErrorInterface
+    void setErrorState(const char *fmt, ...) override;
+    void setErrorStateLocked(const char *fmt, ...) override;
+
+    // InflightRequestUpdateInterface
+    void onInflightEntryRemovedLocked(nsecs_t duration) override;
+    void checkInflightMapLengthLocked() override;
+    void onInflightMapFlushedLocked() override;
+
+    // RequestBufferInterface
+    bool startRequestBuffer() override;
+    void endRequestBuffer() override;
+    nsecs_t getWaitDuration() override;
+
+    // FlushBufferInterface
+    void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out) override;
+    void getInflightRequestBufferKeys(std::vector<uint64_t>* out) override;
+    std::vector<sp<camera3::Camera3StreamInterface>> getAllStreams() override;
+
+    void setErrorStateLockedV(const char *fmt, va_list args);
+
+    status_t disconnectImpl();
 }; // class Camera3OfflineSession
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputInterface.h b/services/camera/libcameraservice/device3/Camera3OutputInterface.h
new file mode 100644
index 0000000..8817833
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3OutputInterface.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_INTERFACE_H
+#define ANDROID_SERVERS_CAMERA3_OUTPUT_INTERFACE_H
+
+#include <memory>
+
+#include <cutils/native_handle.h>
+
+#include <utils/Timers.h>
+
+#include "device3/Camera3StreamInterface.h"
+
+namespace android {
+
+namespace camera3 {
+
+    /**
+     * Interfaces used by result/notification path shared between Camera3Device and
+     * Camera3OfflineSession
+     */
+    class SetErrorInterface {
+    public:
+        // Switch device into error state and send a ERROR_DEVICE notification
+        virtual void setErrorState(const char *fmt, ...) = 0;
+        // Same as setErrorState except this method assumes callers holds the main object lock
+        virtual void setErrorStateLocked(const char *fmt, ...) = 0;
+
+        virtual ~SetErrorInterface() {}
+    };
+
+    // Interface used by callback path to update buffer records
+    class BufferRecordsInterface {
+    public:
+        // method to extract buffer's unique ID
+        // return pair of (newlySeenBuffer?, bufferId)
+        virtual std::pair<bool, uint64_t> getBufferId(const buffer_handle_t& buf, int streamId) = 0;
+
+        // Find a buffer_handle_t based on frame number and stream ID
+        virtual status_t popInflightBuffer(int32_t frameNumber, int32_t streamId,
+                /*out*/ buffer_handle_t **buffer) = 0;
+
+        // Register a bufId (streamId, buffer_handle_t) to inflight request buffer
+        virtual status_t pushInflightRequestBuffer(
+                uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) = 0;
+
+        // Find a buffer_handle_t based on bufferId
+        virtual status_t popInflightRequestBuffer(uint64_t bufferId,
+                /*out*/ buffer_handle_t** buffer,
+                /*optional out*/ int32_t* streamId = nullptr) = 0;
+
+        virtual ~BufferRecordsInterface() {}
+    };
+
+    class InflightRequestUpdateInterface {
+    public:
+        // Caller must hold the lock proctecting InflightRequestMap
+        // duration: the maxExpectedDuration of the removed entry
+        virtual void onInflightEntryRemovedLocked(nsecs_t duration) = 0;
+
+        virtual void checkInflightMapLengthLocked() = 0;
+
+        virtual void onInflightMapFlushedLocked() = 0;
+
+        virtual ~InflightRequestUpdateInterface() {}
+    };
+
+    class RequestBufferInterface {
+    public:
+        // Return if the state machine currently allows for requestBuffers.
+        // If this returns true, caller must call endRequestBuffer() later to signal end of a
+        // request buffer transaction.
+        virtual bool startRequestBuffer() = 0;
+
+        virtual void endRequestBuffer() = 0;
+
+        // Returns how long should implementation wait for a buffer returned
+        virtual nsecs_t getWaitDuration() = 0;
+
+        virtual ~RequestBufferInterface() {}
+    };
+
+    class FlushBufferInterface {
+    public:
+        virtual void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out) = 0;
+
+        virtual void getInflightRequestBufferKeys(std::vector<uint64_t>* out) = 0;
+
+        virtual std::vector<sp<Camera3StreamInterface>> getAllStreams() = 0;
+
+        virtual ~FlushBufferInterface() {}
+    };
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.cpp b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.cpp
new file mode 100644
index 0000000..64af91e
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.cpp
@@ -0,0 +1,84 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-OutStrmIntf"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0  // Per-frame verbose logging
+
+
+#include "Camera3OutputStreamInterface.h"
+
+namespace android {
+
+namespace camera3 {
+
+status_t StreamSet::add(
+        int streamId, sp<camera3::Camera3OutputStreamInterface> stream) {
+    if (stream == nullptr) {
+        ALOGE("%s: cannot add null stream", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    std::lock_guard<std::mutex> lock(mLock);
+    return mData.add(streamId, stream);
+}
+
+ssize_t StreamSet::remove(int streamId) {
+    std::lock_guard<std::mutex> lock(mLock);
+    return mData.removeItem(streamId);
+}
+
+sp<camera3::Camera3OutputStreamInterface> StreamSet::get(int streamId) {
+    std::lock_guard<std::mutex> lock(mLock);
+    ssize_t idx = mData.indexOfKey(streamId);
+    if (idx == NAME_NOT_FOUND) {
+        return nullptr;
+    }
+    return mData.editValueAt(idx);
+}
+
+sp<camera3::Camera3OutputStreamInterface> StreamSet::operator[] (size_t index) {
+    std::lock_guard<std::mutex> lock(mLock);
+    return mData.editValueAt(index);
+}
+
+size_t StreamSet::size() const {
+    std::lock_guard<std::mutex> lock(mLock);
+    return mData.size();
+}
+
+void StreamSet::clear() {
+    std::lock_guard<std::mutex> lock(mLock);
+    return mData.clear();
+}
+
+std::vector<int> StreamSet::getStreamIds() {
+    std::lock_guard<std::mutex> lock(mLock);
+    std::vector<int> streamIds(mData.size());
+    for (size_t i = 0; i < mData.size(); i++) {
+        streamIds[i] = mData.keyAt(i);
+    }
+    return streamIds;
+}
+
+StreamSet::StreamSet(const StreamSet& other) {
+    std::lock_guard<std::mutex> lock(other.mLock);
+    mData = other.mData;
+}
+
+} // namespace camera3
+
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index 2bde949..7f5c87a 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -97,6 +97,26 @@
     virtual const String8& getPhysicalCameraId() const = 0;
 };
 
+// Helper class to organize a synchronized mapping of stream IDs to stream instances
+class StreamSet {
+  public:
+    status_t add(int streamId, sp<camera3::Camera3OutputStreamInterface>);
+    ssize_t remove(int streamId);
+    sp<camera3::Camera3OutputStreamInterface> get(int streamId);
+    // get by (underlying) vector index
+    sp<camera3::Camera3OutputStreamInterface> operator[] (size_t index);
+    size_t size() const;
+    std::vector<int> getStreamIds();
+    void clear();
+
+    StreamSet() {};
+    StreamSet(const StreamSet& other);
+
+  private:
+    mutable std::mutex mLock;
+    KeyedVector<int, sp<camera3::Camera3OutputStreamInterface>> mData;
+};
+
 } // namespace camera3
 
 } // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
new file mode 100644
index 0000000..eb7b666
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -0,0 +1,1414 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-OutputUtils"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0  // Per-frame verbose logging
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+// Convenience macros for transitioning to the error state
+#define SET_ERR(fmt, ...) states.setErrIntf.setErrorState(   \
+    "%s: " fmt, __FUNCTION__,                         \
+    ##__VA_ARGS__)
+
+#include <inttypes.h>
+
+#include <utils/Log.h>
+#include <utils/SortedVector.h>
+#include <utils/Trace.h>
+
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+
+#include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
+#include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
+#include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
+
+#include <camera_metadata_hidden.h>
+
+#include "device3/Camera3OutputUtils.h"
+
+using namespace android::camera3;
+using namespace android::hardware::camera;
+
+namespace android {
+namespace camera3 {
+
+status_t fixupMonochromeTags(
+        CaptureOutputStates& states,
+        const CameraMetadata& deviceInfo,
+        CameraMetadata& resultMetadata) {
+    status_t res = OK;
+    if (!states.needFixupMonoChrome) {
+        return res;
+    }
+
+    // Remove tags that are not applicable to monochrome camera.
+    int32_t tagsToRemove[] = {
+           ANDROID_SENSOR_GREEN_SPLIT,
+           ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
+           ANDROID_COLOR_CORRECTION_MODE,
+           ANDROID_COLOR_CORRECTION_TRANSFORM,
+           ANDROID_COLOR_CORRECTION_GAINS,
+    };
+    for (auto tag : tagsToRemove) {
+        res = resultMetadata.erase(tag);
+        if (res != OK) {
+            ALOGE("%s: Failed to remove tag %d for monochrome camera", __FUNCTION__, tag);
+            return res;
+        }
+    }
+
+    // ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
+    camera_metadata_entry blEntry = resultMetadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
+    for (size_t i = 1; i < blEntry.count; i++) {
+        blEntry.data.f[i] = blEntry.data.f[0];
+    }
+
+    // ANDROID_SENSOR_NOISE_PROFILE
+    camera_metadata_entry npEntry = resultMetadata.find(ANDROID_SENSOR_NOISE_PROFILE);
+    if (npEntry.count > 0 && npEntry.count % 2 == 0) {
+        double np[] = {npEntry.data.d[0], npEntry.data.d[1]};
+        res = resultMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, np, 2);
+        if (res != OK) {
+             ALOGE("%s: Failed to update SENSOR_NOISE_PROFILE: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            return res;
+        }
+    }
+
+    // ANDROID_STATISTICS_LENS_SHADING_MAP
+    camera_metadata_ro_entry lsSizeEntry = deviceInfo.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
+    camera_metadata_entry lsEntry = resultMetadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
+    if (lsSizeEntry.count == 2 && lsEntry.count > 0
+            && (int32_t)lsEntry.count == 4 * lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]) {
+        for (int32_t i = 0; i < lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]; i++) {
+            lsEntry.data.f[4*i+1] = lsEntry.data.f[4*i];
+            lsEntry.data.f[4*i+2] = lsEntry.data.f[4*i];
+            lsEntry.data.f[4*i+3] = lsEntry.data.f[4*i];
+        }
+    }
+
+    // ANDROID_TONEMAP_CURVE_BLUE
+    // ANDROID_TONEMAP_CURVE_GREEN
+    // ANDROID_TONEMAP_CURVE_RED
+    camera_metadata_entry tcbEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_BLUE);
+    camera_metadata_entry tcgEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_GREEN);
+    camera_metadata_entry tcrEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_RED);
+    if (tcbEntry.count > 0
+            && tcbEntry.count == tcgEntry.count
+            && tcbEntry.count == tcrEntry.count) {
+        for (size_t i = 0; i < tcbEntry.count; i++) {
+            tcbEntry.data.f[i] = tcrEntry.data.f[i];
+            tcgEntry.data.f[i] = tcrEntry.data.f[i];
+        }
+    }
+
+    return res;
+}
+
+void insertResultLocked(CaptureOutputStates& states, CaptureResult *result, uint32_t frameNumber) {
+    if (result == nullptr) return;
+
+    camera_metadata_t *meta = const_cast<camera_metadata_t *>(
+            result->mMetadata.getAndLock());
+    set_camera_metadata_vendor_id(meta, states.vendorTagId);
+    result->mMetadata.unlock(meta);
+
+    if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
+            (int32_t*)&frameNumber, 1) != OK) {
+        SET_ERR("Failed to set frame number %d in metadata", frameNumber);
+        return;
+    }
+
+    if (result->mMetadata.update(ANDROID_REQUEST_ID, &result->mResultExtras.requestId, 1) != OK) {
+        SET_ERR("Failed to set request ID in metadata for frame %d", frameNumber);
+        return;
+    }
+
+    // Update vendor tag id for physical metadata
+    for (auto& physicalMetadata : result->mPhysicalMetadatas) {
+        camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
+                physicalMetadata.mPhysicalCameraMetadata.getAndLock());
+        set_camera_metadata_vendor_id(pmeta, states.vendorTagId);
+        physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
+    }
+
+    // Valid result, insert into queue
+    List<CaptureResult>::iterator queuedResult =
+            states.resultQueue.insert(states.resultQueue.end(), CaptureResult(*result));
+    ALOGVV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
+           ", burstId = %" PRId32, __FUNCTION__,
+           queuedResult->mResultExtras.requestId,
+           queuedResult->mResultExtras.frameNumber,
+           queuedResult->mResultExtras.burstId);
+
+    states.resultSignal.notify_one();
+}
+
+
+void sendPartialCaptureResult(CaptureOutputStates& states,
+        const camera_metadata_t * partialResult,
+        const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
+    ATRACE_CALL();
+    std::lock_guard<std::mutex> l(states.outputLock);
+
+    CaptureResult captureResult;
+    captureResult.mResultExtras = resultExtras;
+    captureResult.mMetadata = partialResult;
+
+    // Fix up result metadata for monochrome camera.
+    status_t res = fixupMonochromeTags(states, states.deviceInfo, captureResult.mMetadata);
+    if (res != OK) {
+        SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
+        return;
+    }
+
+    insertResultLocked(states, &captureResult, frameNumber);
+}
+
+void sendCaptureResult(
+        CaptureOutputStates& states,
+        CameraMetadata &pendingMetadata,
+        CaptureResultExtras &resultExtras,
+        CameraMetadata &collectedPartialResult,
+        uint32_t frameNumber,
+        bool reprocess, bool zslStillCapture,
+        const std::set<std::string>& cameraIdsWithZoom,
+        const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
+    ATRACE_CALL();
+    if (pendingMetadata.isEmpty())
+        return;
+
+    std::lock_guard<std::mutex> l(states.outputLock);
+
+    // TODO: need to track errors for tighter bounds on expected frame number
+    if (reprocess) {
+        if (frameNumber < states.nextReprocResultFrameNum) {
+            SET_ERR("Out-of-order reprocess capture result metadata submitted! "
+                "(got frame number %d, expecting %d)",
+                frameNumber, states.nextReprocResultFrameNum);
+            return;
+        }
+        states.nextReprocResultFrameNum = frameNumber + 1;
+    } else if (zslStillCapture) {
+        if (frameNumber < states.nextZslResultFrameNum) {
+            SET_ERR("Out-of-order ZSL still capture result metadata submitted! "
+                "(got frame number %d, expecting %d)",
+                frameNumber, states.nextZslResultFrameNum);
+            return;
+        }
+        states.nextZslResultFrameNum = frameNumber + 1;
+    } else {
+        if (frameNumber < states.nextResultFrameNum) {
+            SET_ERR("Out-of-order capture result metadata submitted! "
+                    "(got frame number %d, expecting %d)",
+                    frameNumber, states.nextResultFrameNum);
+            return;
+        }
+        states.nextResultFrameNum = frameNumber + 1;
+    }
+
+    CaptureResult captureResult;
+    captureResult.mResultExtras = resultExtras;
+    captureResult.mMetadata = pendingMetadata;
+    captureResult.mPhysicalMetadatas = physicalMetadatas;
+
+    // Append any previous partials to form a complete result
+    if (states.usePartialResult && !collectedPartialResult.isEmpty()) {
+        captureResult.mMetadata.append(collectedPartialResult);
+    }
+
+    captureResult.mMetadata.sort();
+
+    // Check that there's a timestamp in the result metadata
+    camera_metadata_entry timestamp = captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
+    if (timestamp.count == 0) {
+        SET_ERR("No timestamp provided by HAL for frame %d!",
+                frameNumber);
+        return;
+    }
+    for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
+        camera_metadata_entry timestamp =
+                physicalMetadata.mPhysicalCameraMetadata.find(ANDROID_SENSOR_TIMESTAMP);
+        if (timestamp.count == 0) {
+            SET_ERR("No timestamp provided by HAL for physical camera %s frame %d!",
+                    String8(physicalMetadata.mPhysicalCameraId).c_str(), frameNumber);
+            return;
+        }
+    }
+
+    // Fix up some result metadata to account for HAL-level distortion correction
+    status_t res =
+            states.distortionMappers[states.cameraId.c_str()].correctCaptureResult(
+                    &captureResult.mMetadata);
+    if (res != OK) {
+        SET_ERR("Unable to correct capture result metadata for frame %d: %s (%d)",
+                frameNumber, strerror(-res), res);
+        return;
+    }
+
+    // Fix up result metadata to account for zoom ratio availabilities between
+    // HAL and app.
+    bool zoomRatioIs1 = cameraIdsWithZoom.find(states.cameraId.c_str()) == cameraIdsWithZoom.end();
+    res = states.zoomRatioMappers[states.cameraId.c_str()].updateCaptureResult(
+            &captureResult.mMetadata, zoomRatioIs1);
+    if (res != OK) {
+        SET_ERR("Failed to update capture result zoom ratio metadata for frame %d: %s (%d)",
+                frameNumber, strerror(-res), res);
+        return;
+    }
+
+    for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
+        String8 cameraId8(physicalMetadata.mPhysicalCameraId);
+        if (states.distortionMappers.find(cameraId8.c_str()) != states.distortionMappers.end()) {
+            res = states.distortionMappers[cameraId8.c_str()].correctCaptureResult(
+                    &physicalMetadata.mPhysicalCameraMetadata);
+            if (res != OK) {
+                SET_ERR("Unable to correct physical capture result metadata for frame %d: %s (%d)",
+                        frameNumber, strerror(-res), res);
+                return;
+            }
+        }
+
+        zoomRatioIs1 = cameraIdsWithZoom.find(cameraId8.c_str()) == cameraIdsWithZoom.end();
+        res = states.zoomRatioMappers[cameraId8.c_str()].updateCaptureResult(
+                &physicalMetadata.mPhysicalCameraMetadata, zoomRatioIs1);
+        if (res != OK) {
+            SET_ERR("Failed to update camera %s's physical zoom ratio metadata for "
+                    "frame %d: %s(%d)", cameraId8.c_str(), frameNumber, strerror(-res), res);
+            return;
+        }
+    }
+
+    // Fix up result metadata for monochrome camera.
+    res = fixupMonochromeTags(states, states.deviceInfo, captureResult.mMetadata);
+    if (res != OK) {
+        SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
+        return;
+    }
+    for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
+        String8 cameraId8(physicalMetadata.mPhysicalCameraId);
+        res = fixupMonochromeTags(states,
+                states.physicalDeviceInfoMap.at(cameraId8.c_str()),
+                physicalMetadata.mPhysicalCameraMetadata);
+        if (res != OK) {
+            SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
+            return;
+        }
+    }
+
+    std::unordered_map<std::string, CameraMetadata> monitoredPhysicalMetadata;
+    for (auto& m : physicalMetadatas) {
+        monitoredPhysicalMetadata.emplace(String8(m.mPhysicalCameraId).string(),
+                CameraMetadata(m.mPhysicalCameraMetadata));
+    }
+    states.tagMonitor.monitorMetadata(TagMonitor::RESULT,
+            frameNumber, timestamp.data.i64[0], captureResult.mMetadata,
+            monitoredPhysicalMetadata);
+
+    insertResultLocked(states, &captureResult, frameNumber);
+}
+
+// Reading one camera metadata from result argument via fmq or from the result
+// Assuming the fmq is protected by a lock already
+status_t readOneCameraMetadataLocked(
+        std::unique_ptr<ResultMetadataQueue>& fmq,
+        uint64_t fmqResultSize,
+        hardware::camera::device::V3_2::CameraMetadata& resultMetadata,
+        const hardware::camera::device::V3_2::CameraMetadata& result) {
+    if (fmqResultSize > 0) {
+        resultMetadata.resize(fmqResultSize);
+        if (fmq == nullptr) {
+            return NO_MEMORY; // logged in initialize()
+        }
+        if (!fmq->read(resultMetadata.data(), fmqResultSize)) {
+            ALOGE("%s: Cannot read camera metadata from fmq, size = %" PRIu64,
+                    __FUNCTION__, fmqResultSize);
+            return INVALID_OPERATION;
+        }
+    } else {
+        resultMetadata.setToExternal(const_cast<uint8_t *>(result.data()),
+                result.size());
+    }
+
+    if (resultMetadata.size() != 0) {
+        status_t res;
+        const camera_metadata_t* metadata =
+                reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
+        size_t expected_metadata_size = resultMetadata.size();
+        if ((res = validate_camera_metadata_structure(metadata, &expected_metadata_size)) != OK) {
+            ALOGE("%s: Invalid camera metadata received by camera service from HAL: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            return INVALID_OPERATION;
+        }
+    }
+
+    return OK;
+}
+
+void removeInFlightMapEntryLocked(CaptureOutputStates& states, int idx) {
+    ATRACE_CALL();
+    InFlightRequestMap& inflightMap = states.inflightMap;
+    nsecs_t duration = inflightMap.valueAt(idx).maxExpectedDuration;
+    inflightMap.removeItemsAt(idx, 1);
+
+    states.inflightIntf.onInflightEntryRemovedLocked(duration);
+}
+
+void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx) {
+    InFlightRequestMap& inflightMap = states.inflightMap;
+    const InFlightRequest &request = inflightMap.valueAt(idx);
+    const uint32_t frameNumber = inflightMap.keyAt(idx);
+
+    nsecs_t sensorTimestamp = request.sensorTimestamp;
+    nsecs_t shutterTimestamp = request.shutterTimestamp;
+
+    // Check if it's okay to remove the request from InFlightMap:
+    // In the case of a successful request:
+    //      all input and output buffers, all result metadata, shutter callback
+    //      arrived.
+    // In the case of a unsuccessful request:
+    //      all input and output buffers arrived.
+    if (request.numBuffersLeft == 0 &&
+            (request.skipResultMetadata ||
+            (request.haveResultMetadata && shutterTimestamp != 0))) {
+        if (request.stillCapture) {
+            ATRACE_ASYNC_END("still capture", frameNumber);
+        }
+
+        ATRACE_ASYNC_END("frame capture", frameNumber);
+
+        // Sanity check - if sensor timestamp matches shutter timestamp in the
+        // case of request having callback.
+        if (request.hasCallback && request.requestStatus == OK &&
+                sensorTimestamp != shutterTimestamp) {
+            SET_ERR("sensor timestamp (%" PRId64
+                ") for frame %d doesn't match shutter timestamp (%" PRId64 ")",
+                sensorTimestamp, frameNumber, shutterTimestamp);
+        }
+
+        // for an unsuccessful request, it may have pending output buffers to
+        // return.
+        assert(request.requestStatus != OK ||
+               request.pendingOutputBuffers.size() == 0);
+
+        returnOutputBuffers(
+            states.useHalBufManager, states.listener,
+            request.pendingOutputBuffers.array(),
+            request.pendingOutputBuffers.size(), 0, /*timestampIncreasing*/true,
+            request.outputSurfaces, request.resultExtras);
+
+        removeInFlightMapEntryLocked(states, idx);
+        ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
+    }
+
+    states.inflightIntf.checkInflightMapLengthLocked();
+}
+
+void processCaptureResult(CaptureOutputStates& states, const camera3_capture_result *result) {
+    ATRACE_CALL();
+
+    status_t res;
+
+    uint32_t frameNumber = result->frame_number;
+    if (result->result == NULL && result->num_output_buffers == 0 &&
+            result->input_buffer == NULL) {
+        SET_ERR("No result data provided by HAL for frame %d",
+                frameNumber);
+        return;
+    }
+
+    if (!states.usePartialResult &&
+            result->result != NULL &&
+            result->partial_result != 1) {
+        SET_ERR("Result is malformed for frame %d: partial_result %u must be 1"
+                " if partial result is not supported",
+                frameNumber, result->partial_result);
+        return;
+    }
+
+    bool isPartialResult = false;
+    CameraMetadata collectedPartialResult;
+    bool hasInputBufferInRequest = false;
+
+    // Get shutter timestamp and resultExtras from list of in-flight requests,
+    // where it was added by the shutter notification for this frame. If the
+    // shutter timestamp isn't received yet, append the output buffers to the
+    // in-flight request and they will be returned when the shutter timestamp
+    // arrives. Update the in-flight status and remove the in-flight entry if
+    // all result data and shutter timestamp have been received.
+    nsecs_t shutterTimestamp = 0;
+    {
+        std::lock_guard<std::mutex> l(states.inflightLock);
+        ssize_t idx = states.inflightMap.indexOfKey(frameNumber);
+        if (idx == NAME_NOT_FOUND) {
+            SET_ERR("Unknown frame number for capture result: %d",
+                    frameNumber);
+            return;
+        }
+        InFlightRequest &request = states.inflightMap.editValueAt(idx);
+        ALOGVV("%s: got InFlightRequest requestId = %" PRId32
+                ", frameNumber = %" PRId64 ", burstId = %" PRId32
+                ", partialResultCount = %d, hasCallback = %d",
+                __FUNCTION__, request.resultExtras.requestId,
+                request.resultExtras.frameNumber, request.resultExtras.burstId,
+                result->partial_result, request.hasCallback);
+        // Always update the partial count to the latest one if it's not 0
+        // (buffers only). When framework aggregates adjacent partial results
+        // into one, the latest partial count will be used.
+        if (result->partial_result != 0)
+            request.resultExtras.partialResultCount = result->partial_result;
+
+        // Check if this result carries only partial metadata
+        if (states.usePartialResult && result->result != NULL) {
+            if (result->partial_result > states.numPartialResults || result->partial_result < 1) {
+                SET_ERR("Result is malformed for frame %d: partial_result %u must be  in"
+                        " the range of [1, %d] when metadata is included in the result",
+                        frameNumber, result->partial_result, states.numPartialResults);
+                return;
+            }
+            isPartialResult = (result->partial_result < states.numPartialResults);
+            if (isPartialResult && result->num_physcam_metadata) {
+                SET_ERR("Result is malformed for frame %d: partial_result not allowed for"
+                        " physical camera result", frameNumber);
+                return;
+            }
+            if (isPartialResult) {
+                request.collectedPartialResult.append(result->result);
+            }
+
+            if (isPartialResult && request.hasCallback) {
+                // Send partial capture result
+                sendPartialCaptureResult(states, result->result, request.resultExtras,
+                        frameNumber);
+            }
+        }
+
+        shutterTimestamp = request.shutterTimestamp;
+        hasInputBufferInRequest = request.hasInputBuffer;
+
+        // Did we get the (final) result metadata for this capture?
+        if (result->result != NULL && !isPartialResult) {
+            if (request.physicalCameraIds.size() != result->num_physcam_metadata) {
+                SET_ERR("Expected physical Camera metadata count %d not equal to actual count %d",
+                        request.physicalCameraIds.size(), result->num_physcam_metadata);
+                return;
+            }
+            if (request.haveResultMetadata) {
+                SET_ERR("Called multiple times with metadata for frame %d",
+                        frameNumber);
+                return;
+            }
+            for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
+                String8 physicalId(result->physcam_ids[i]);
+                std::set<String8>::iterator cameraIdIter =
+                        request.physicalCameraIds.find(physicalId);
+                if (cameraIdIter != request.physicalCameraIds.end()) {
+                    request.physicalCameraIds.erase(cameraIdIter);
+                } else {
+                    SET_ERR("Total result for frame %d has already returned for camera %s",
+                            frameNumber, physicalId.c_str());
+                    return;
+                }
+            }
+            if (states.usePartialResult &&
+                    !request.collectedPartialResult.isEmpty()) {
+                collectedPartialResult.acquire(
+                    request.collectedPartialResult);
+            }
+            request.haveResultMetadata = true;
+        }
+
+        uint32_t numBuffersReturned = result->num_output_buffers;
+        if (result->input_buffer != NULL) {
+            if (hasInputBufferInRequest) {
+                numBuffersReturned += 1;
+            } else {
+                ALOGW("%s: Input buffer should be NULL if there is no input"
+                        " buffer sent in the request",
+                        __FUNCTION__);
+            }
+        }
+        request.numBuffersLeft -= numBuffersReturned;
+        if (request.numBuffersLeft < 0) {
+            SET_ERR("Too many buffers returned for frame %d",
+                    frameNumber);
+            return;
+        }
+
+        camera_metadata_ro_entry_t entry;
+        res = find_camera_metadata_ro_entry(result->result,
+                ANDROID_SENSOR_TIMESTAMP, &entry);
+        if (res == OK && entry.count == 1) {
+            request.sensorTimestamp = entry.data.i64[0];
+        }
+
+        // If shutter event isn't received yet, append the output buffers to
+        // the in-flight request. Otherwise, return the output buffers to
+        // streams.
+        if (shutterTimestamp == 0) {
+            request.pendingOutputBuffers.appendArray(result->output_buffers,
+                result->num_output_buffers);
+        } else {
+            bool timestampIncreasing = !(request.zslCapture || request.hasInputBuffer);
+            returnOutputBuffers(states.useHalBufManager, states.listener,
+                result->output_buffers, result->num_output_buffers,
+                shutterTimestamp, timestampIncreasing,
+                request.outputSurfaces, request.resultExtras);
+        }
+
+        if (result->result != NULL && !isPartialResult) {
+            for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
+                CameraMetadata physicalMetadata;
+                physicalMetadata.append(result->physcam_metadata[i]);
+                request.physicalMetadatas.push_back({String16(result->physcam_ids[i]),
+                        physicalMetadata});
+            }
+            if (shutterTimestamp == 0) {
+                request.pendingMetadata = result->result;
+                request.collectedPartialResult = collectedPartialResult;
+            } else if (request.hasCallback) {
+                CameraMetadata metadata;
+                metadata = result->result;
+                sendCaptureResult(states, metadata, request.resultExtras,
+                    collectedPartialResult, frameNumber,
+                    hasInputBufferInRequest, request.zslCapture && request.stillCapture,
+                    request.cameraIdsWithZoom, request.physicalMetadatas);
+            }
+        }
+        removeInFlightRequestIfReadyLocked(states, idx);
+    } // scope for states.inFlightLock
+
+    if (result->input_buffer != NULL) {
+        if (hasInputBufferInRequest) {
+            Camera3Stream *stream =
+                Camera3Stream::cast(result->input_buffer->stream);
+            res = stream->returnInputBuffer(*(result->input_buffer));
+            // Note: stream may be deallocated at this point, if this buffer was the
+            // last reference to it.
+            if (res != OK) {
+                ALOGE("%s: RequestThread: Can't return input buffer for frame %d to"
+                      "  its stream:%s (%d)",  __FUNCTION__,
+                      frameNumber, strerror(-res), res);
+            }
+        } else {
+            ALOGW("%s: Input buffer should be NULL if there is no input"
+                    " buffer sent in the request, skipping input buffer return.",
+                    __FUNCTION__);
+        }
+    }
+}
+
+void processOneCaptureResultLocked(
+        CaptureOutputStates& states,
+        const hardware::camera::device::V3_2::CaptureResult& result,
+        const hardware::hidl_vec<
+                hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata) {
+    using hardware::camera::device::V3_2::StreamBuffer;
+    using hardware::camera::device::V3_2::BufferStatus;
+    std::unique_ptr<ResultMetadataQueue>& fmq = states.fmq;
+    BufferRecordsInterface& bufferRecords = states.bufferRecordsIntf;
+    camera3_capture_result r;
+    status_t res;
+    r.frame_number = result.frameNumber;
+
+    // Read and validate the result metadata.
+    hardware::camera::device::V3_2::CameraMetadata resultMetadata;
+    res = readOneCameraMetadataLocked(
+            fmq, result.fmqResultSize,
+            resultMetadata, result.result);
+    if (res != OK) {
+        ALOGE("%s: Frame %d: Failed to read capture result metadata",
+                __FUNCTION__, result.frameNumber);
+        return;
+    }
+    r.result = reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
+
+    // Read and validate physical camera metadata
+    size_t physResultCount = physicalCameraMetadata.size();
+    std::vector<const char*> physCamIds(physResultCount);
+    std::vector<const camera_metadata_t *> phyCamMetadatas(physResultCount);
+    std::vector<hardware::camera::device::V3_2::CameraMetadata> physResultMetadata;
+    physResultMetadata.resize(physResultCount);
+    for (size_t i = 0; i < physicalCameraMetadata.size(); i++) {
+        res = readOneCameraMetadataLocked(fmq, physicalCameraMetadata[i].fmqMetadataSize,
+                physResultMetadata[i], physicalCameraMetadata[i].metadata);
+        if (res != OK) {
+            ALOGE("%s: Frame %d: Failed to read capture result metadata for camera %s",
+                    __FUNCTION__, result.frameNumber,
+                    physicalCameraMetadata[i].physicalCameraId.c_str());
+            return;
+        }
+        physCamIds[i] = physicalCameraMetadata[i].physicalCameraId.c_str();
+        phyCamMetadatas[i] = reinterpret_cast<const camera_metadata_t*>(
+                physResultMetadata[i].data());
+    }
+    r.num_physcam_metadata = physResultCount;
+    r.physcam_ids = physCamIds.data();
+    r.physcam_metadata = phyCamMetadatas.data();
+
+    std::vector<camera3_stream_buffer_t> outputBuffers(result.outputBuffers.size());
+    std::vector<buffer_handle_t> outputBufferHandles(result.outputBuffers.size());
+    for (size_t i = 0; i < result.outputBuffers.size(); i++) {
+        auto& bDst = outputBuffers[i];
+        const StreamBuffer &bSrc = result.outputBuffers[i];
+
+        sp<Camera3StreamInterface> stream = states.outputStreams.get(bSrc.streamId);
+        if (stream == nullptr) {
+            ALOGE("%s: Frame %d: Buffer %zu: Invalid output stream id %d",
+                    __FUNCTION__, result.frameNumber, i, bSrc.streamId);
+            return;
+        }
+        bDst.stream = stream->asHalStream();
+
+        bool noBufferReturned = false;
+        buffer_handle_t *buffer = nullptr;
+        if (states.useHalBufManager) {
+            // This is suspicious most of the time but can be correct during flush where HAL
+            // has to return capture result before a buffer is requested
+            if (bSrc.bufferId == BUFFER_ID_NO_BUFFER) {
+                if (bSrc.status == BufferStatus::OK) {
+                    ALOGE("%s: Frame %d: Buffer %zu: No bufferId for stream %d",
+                            __FUNCTION__, result.frameNumber, i, bSrc.streamId);
+                    // Still proceeds so other buffers can be returned
+                }
+                noBufferReturned = true;
+            }
+            if (noBufferReturned) {
+                res = OK;
+            } else {
+                res = bufferRecords.popInflightRequestBuffer(bSrc.bufferId, &buffer);
+            }
+        } else {
+            res = bufferRecords.popInflightBuffer(result.frameNumber, bSrc.streamId, &buffer);
+        }
+
+        if (res != OK) {
+            ALOGE("%s: Frame %d: Buffer %zu: No in-flight buffer for stream %d",
+                    __FUNCTION__, result.frameNumber, i, bSrc.streamId);
+            return;
+        }
+
+        bDst.buffer = buffer;
+        bDst.status = mapHidlBufferStatus(bSrc.status);
+        bDst.acquire_fence = -1;
+        if (bSrc.releaseFence == nullptr) {
+            bDst.release_fence = -1;
+        } else if (bSrc.releaseFence->numFds == 1) {
+            if (noBufferReturned) {
+                ALOGE("%s: got releaseFence without output buffer!", __FUNCTION__);
+            }
+            bDst.release_fence = dup(bSrc.releaseFence->data[0]);
+        } else {
+            ALOGE("%s: Frame %d: Invalid release fence for buffer %zu, fd count is %d, not 1",
+                    __FUNCTION__, result.frameNumber, i, bSrc.releaseFence->numFds);
+            return;
+        }
+    }
+    r.num_output_buffers = outputBuffers.size();
+    r.output_buffers = outputBuffers.data();
+
+    camera3_stream_buffer_t inputBuffer;
+    if (result.inputBuffer.streamId == -1) {
+        r.input_buffer = nullptr;
+    } else {
+        if (states.inputStream->getId() != result.inputBuffer.streamId) {
+            ALOGE("%s: Frame %d: Invalid input stream id %d", __FUNCTION__,
+                    result.frameNumber, result.inputBuffer.streamId);
+            return;
+        }
+        inputBuffer.stream = states.inputStream->asHalStream();
+        buffer_handle_t *buffer;
+        res = bufferRecords.popInflightBuffer(result.frameNumber, result.inputBuffer.streamId,
+                &buffer);
+        if (res != OK) {
+            ALOGE("%s: Frame %d: Input buffer: No in-flight buffer for stream %d",
+                    __FUNCTION__, result.frameNumber, result.inputBuffer.streamId);
+            return;
+        }
+        inputBuffer.buffer = buffer;
+        inputBuffer.status = mapHidlBufferStatus(result.inputBuffer.status);
+        inputBuffer.acquire_fence = -1;
+        if (result.inputBuffer.releaseFence == nullptr) {
+            inputBuffer.release_fence = -1;
+        } else if (result.inputBuffer.releaseFence->numFds == 1) {
+            inputBuffer.release_fence = dup(result.inputBuffer.releaseFence->data[0]);
+        } else {
+            ALOGE("%s: Frame %d: Invalid release fence for input buffer, fd count is %d, not 1",
+                    __FUNCTION__, result.frameNumber, result.inputBuffer.releaseFence->numFds);
+            return;
+        }
+        r.input_buffer = &inputBuffer;
+    }
+
+    r.partial_result = result.partialResult;
+
+    processCaptureResult(states, &r);
+}
+
+void returnOutputBuffers(
+        bool useHalBufManager,
+        sp<NotificationListener> listener,
+        const camera3_stream_buffer_t *outputBuffers, size_t numBuffers,
+        nsecs_t timestamp, bool timestampIncreasing,
+        const SurfaceMap& outputSurfaces,
+        const CaptureResultExtras &inResultExtras) {
+
+    for (size_t i = 0; i < numBuffers; i++)
+    {
+        if (outputBuffers[i].buffer == nullptr) {
+            if (!useHalBufManager) {
+                // With HAL buffer management API, HAL sometimes will have to return buffers that
+                // has not got a output buffer handle filled yet. This is though illegal if HAL
+                // buffer management API is not being used.
+                ALOGE("%s: cannot return a null buffer!", __FUNCTION__);
+            }
+            continue;
+        }
+
+        Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
+        int streamId = stream->getId();
+        const auto& it = outputSurfaces.find(streamId);
+        status_t res = OK;
+        if (it != outputSurfaces.end()) {
+            res = stream->returnBuffer(
+                    outputBuffers[i], timestamp, timestampIncreasing, it->second,
+                    inResultExtras.frameNumber);
+        } else {
+            res = stream->returnBuffer(
+                    outputBuffers[i], timestamp, timestampIncreasing, std::vector<size_t> (),
+                    inResultExtras.frameNumber);
+        }
+
+        // Note: stream may be deallocated at this point, if this buffer was
+        // the last reference to it.
+        if (res == NO_INIT || res == DEAD_OBJECT) {
+            ALOGV("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
+        } else if (res != OK) {
+            ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
+        }
+
+        // Long processing consumers can cause returnBuffer timeout for shared stream
+        // If that happens, cancel the buffer and send a buffer error to client
+        if (it != outputSurfaces.end() && res == TIMED_OUT &&
+                outputBuffers[i].status == CAMERA3_BUFFER_STATUS_OK) {
+            // cancel the buffer
+            camera3_stream_buffer_t sb = outputBuffers[i];
+            sb.status = CAMERA3_BUFFER_STATUS_ERROR;
+            stream->returnBuffer(sb, /*timestamp*/0, timestampIncreasing, std::vector<size_t> (),
+                    inResultExtras.frameNumber);
+
+            if (listener != nullptr) {
+                CaptureResultExtras extras = inResultExtras;
+                extras.errorStreamId = streamId;
+                listener->notifyError(
+                        hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
+                        extras);
+            }
+        }
+    }
+}
+
+void notifyShutter(CaptureOutputStates& states, const camera3_shutter_msg_t &msg) {
+    ATRACE_CALL();
+    ssize_t idx;
+
+    // Set timestamp for the request in the in-flight tracking
+    // and get the request ID to send upstream
+    {
+        std::lock_guard<std::mutex> l(states.inflightLock);
+        InFlightRequestMap& inflightMap = states.inflightMap;
+        idx = inflightMap.indexOfKey(msg.frame_number);
+        if (idx >= 0) {
+            InFlightRequest &r = inflightMap.editValueAt(idx);
+
+            // Verify ordering of shutter notifications
+            {
+                std::lock_guard<std::mutex> l(states.outputLock);
+                // TODO: need to track errors for tighter bounds on expected frame number.
+                if (r.hasInputBuffer) {
+                    if (msg.frame_number < states.nextReprocShutterFrameNum) {
+                        SET_ERR("Reprocess shutter notification out-of-order. Expected "
+                                "notification for frame %d, got frame %d",
+                                states.nextReprocShutterFrameNum, msg.frame_number);
+                        return;
+                    }
+                    states.nextReprocShutterFrameNum = msg.frame_number + 1;
+                } else if (r.zslCapture && r.stillCapture) {
+                    if (msg.frame_number < states.nextZslShutterFrameNum) {
+                        SET_ERR("ZSL still capture shutter notification out-of-order. Expected "
+                                "notification for frame %d, got frame %d",
+                                states.nextZslShutterFrameNum, msg.frame_number);
+                        return;
+                    }
+                    states.nextZslShutterFrameNum = msg.frame_number + 1;
+                } else {
+                    if (msg.frame_number < states.nextShutterFrameNum) {
+                        SET_ERR("Shutter notification out-of-order. Expected "
+                                "notification for frame %d, got frame %d",
+                                states.nextShutterFrameNum, msg.frame_number);
+                        return;
+                    }
+                    states.nextShutterFrameNum = msg.frame_number + 1;
+                }
+            }
+
+            r.shutterTimestamp = msg.timestamp;
+            if (r.hasCallback) {
+                ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
+                    states.cameraId.string(), __FUNCTION__,
+                    msg.frame_number, r.resultExtras.requestId, msg.timestamp);
+                // Call listener, if any
+                if (states.listener != nullptr) {
+                    states.listener->notifyShutter(r.resultExtras, msg.timestamp);
+                }
+                // send pending result and buffers
+                sendCaptureResult(states,
+                    r.pendingMetadata, r.resultExtras,
+                    r.collectedPartialResult, msg.frame_number,
+                    r.hasInputBuffer, r.zslCapture && r.stillCapture,
+                    r.cameraIdsWithZoom, r.physicalMetadatas);
+            }
+            bool timestampIncreasing = !(r.zslCapture || r.hasInputBuffer);
+            returnOutputBuffers(
+                    states.useHalBufManager, states.listener,
+                    r.pendingOutputBuffers.array(),
+                    r.pendingOutputBuffers.size(), r.shutterTimestamp, timestampIncreasing,
+                    r.outputSurfaces, r.resultExtras);
+            r.pendingOutputBuffers.clear();
+
+            removeInFlightRequestIfReadyLocked(states, idx);
+        }
+    }
+    if (idx < 0) {
+        SET_ERR("Shutter notification for non-existent frame number %d",
+                msg.frame_number);
+    }
+}
+
+void notifyError(CaptureOutputStates& states, const camera3_error_msg_t &msg) {
+    ATRACE_CALL();
+    // Map camera HAL error codes to ICameraDeviceCallback error codes
+    // Index into this with the HAL error code
+    static const int32_t halErrorMap[CAMERA3_MSG_NUM_ERRORS] = {
+        // 0 = Unused error code
+        hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR,
+        // 1 = CAMERA3_MSG_ERROR_DEVICE
+        hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
+        // 2 = CAMERA3_MSG_ERROR_REQUEST
+        hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
+        // 3 = CAMERA3_MSG_ERROR_RESULT
+        hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT,
+        // 4 = CAMERA3_MSG_ERROR_BUFFER
+        hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER
+    };
+
+    int32_t errorCode =
+            ((msg.error_code >= 0) &&
+                    (msg.error_code < CAMERA3_MSG_NUM_ERRORS)) ?
+            halErrorMap[msg.error_code] :
+            hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR;
+
+    int streamId = 0;
+    String16 physicalCameraId;
+    if (msg.error_stream != nullptr) {
+        Camera3Stream *stream =
+                Camera3Stream::cast(msg.error_stream);
+        streamId = stream->getId();
+        physicalCameraId = String16(stream->physicalCameraId());
+    }
+    ALOGV("Camera %s: %s: HAL error, frame %d, stream %d: %d",
+            states.cameraId.string(), __FUNCTION__, msg.frame_number,
+            streamId, msg.error_code);
+
+    CaptureResultExtras resultExtras;
+    switch (errorCode) {
+        case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
+            // SET_ERR calls into listener to notify application
+            SET_ERR("Camera HAL reported serious device error");
+            break;
+        case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
+        case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
+        case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
+            {
+                std::lock_guard<std::mutex> l(states.inflightLock);
+                ssize_t idx = states.inflightMap.indexOfKey(msg.frame_number);
+                if (idx >= 0) {
+                    InFlightRequest &r = states.inflightMap.editValueAt(idx);
+                    r.requestStatus = msg.error_code;
+                    resultExtras = r.resultExtras;
+                    bool logicalDeviceResultError = false;
+                    if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT ==
+                            errorCode) {
+                        if (physicalCameraId.size() > 0) {
+                            String8 cameraId(physicalCameraId);
+                            auto iter = r.physicalCameraIds.find(cameraId);
+                            if (iter == r.physicalCameraIds.end()) {
+                                ALOGE("%s: Reported result failure for physical camera device: %s "
+                                        " which is not part of the respective request!",
+                                        __FUNCTION__, cameraId.string());
+                                break;
+                            }
+                            r.physicalCameraIds.erase(iter);
+                            resultExtras.errorPhysicalCameraId = physicalCameraId;
+                        } else {
+                            logicalDeviceResultError = true;
+                        }
+                    }
+
+                    if (logicalDeviceResultError
+                            ||  hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST ==
+                            errorCode) {
+                        r.skipResultMetadata = true;
+                    }
+                    if (logicalDeviceResultError) {
+                        // In case of missing result check whether the buffers
+                        // returned. If they returned, then remove inflight
+                        // request.
+                        // TODO: should we call this for ERROR_CAMERA_REQUEST as well?
+                        //       otherwise we are depending on HAL to send the buffers back after
+                        //       calling notifyError. Not sure if that's in the spec.
+                        removeInFlightRequestIfReadyLocked(states, idx);
+                    }
+                } else {
+                    resultExtras.frameNumber = msg.frame_number;
+                    ALOGE("Camera %s: %s: cannot find in-flight request on "
+                            "frame %" PRId64 " error", states.cameraId.string(), __FUNCTION__,
+                            resultExtras.frameNumber);
+                }
+            }
+            resultExtras.errorStreamId = streamId;
+            if (states.listener != nullptr) {
+                states.listener->notifyError(errorCode, resultExtras);
+            } else {
+                ALOGE("Camera %s: %s: no listener available",
+                        states.cameraId.string(), __FUNCTION__);
+            }
+            break;
+        default:
+            // SET_ERR calls notifyError
+            SET_ERR("Unknown error message from HAL: %d", msg.error_code);
+            break;
+    }
+}
+
+void notify(CaptureOutputStates& states, const camera3_notify_msg *msg) {
+    switch (msg->type) {
+        case CAMERA3_MSG_ERROR: {
+            notifyError(states, msg->message.error);
+            break;
+        }
+        case CAMERA3_MSG_SHUTTER: {
+            notifyShutter(states, msg->message.shutter);
+            break;
+        }
+        default:
+            SET_ERR("Unknown notify message from HAL: %d",
+                    msg->type);
+    }
+}
+
+void notify(CaptureOutputStates& states,
+        const hardware::camera::device::V3_2::NotifyMsg& msg) {
+    using android::hardware::camera::device::V3_2::MsgType;
+    using android::hardware::camera::device::V3_2::ErrorCode;
+
+    ATRACE_CALL();
+    camera3_notify_msg m;
+    switch (msg.type) {
+        case MsgType::ERROR:
+            m.type = CAMERA3_MSG_ERROR;
+            m.message.error.frame_number = msg.msg.error.frameNumber;
+            if (msg.msg.error.errorStreamId >= 0) {
+                sp<Camera3StreamInterface> stream =
+                        states.outputStreams.get(msg.msg.error.errorStreamId);
+                if (stream == nullptr) {
+                    ALOGE("%s: Frame %d: Invalid error stream id %d", __FUNCTION__,
+                            m.message.error.frame_number, msg.msg.error.errorStreamId);
+                    return;
+                }
+                m.message.error.error_stream = stream->asHalStream();
+            } else {
+                m.message.error.error_stream = nullptr;
+            }
+            switch (msg.msg.error.errorCode) {
+                case ErrorCode::ERROR_DEVICE:
+                    m.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
+                    break;
+                case ErrorCode::ERROR_REQUEST:
+                    m.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
+                    break;
+                case ErrorCode::ERROR_RESULT:
+                    m.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
+                    break;
+                case ErrorCode::ERROR_BUFFER:
+                    m.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
+                    break;
+            }
+            break;
+        case MsgType::SHUTTER:
+            m.type = CAMERA3_MSG_SHUTTER;
+            m.message.shutter.frame_number = msg.msg.shutter.frameNumber;
+            m.message.shutter.timestamp = msg.msg.shutter.timestamp;
+            break;
+    }
+    notify(states, &m);
+}
+
+void requestStreamBuffers(RequestBufferStates& states,
+        const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
+        hardware::camera::device::V3_5::ICameraDeviceCallback::requestStreamBuffers_cb _hidl_cb) {
+    using android::hardware::camera::device::V3_2::BufferStatus;
+    using android::hardware::camera::device::V3_2::StreamBuffer;
+    using android::hardware::camera::device::V3_5::BufferRequestStatus;
+    using android::hardware::camera::device::V3_5::StreamBufferRet;
+    using android::hardware::camera::device::V3_5::StreamBufferRequestError;
+
+    std::lock_guard<std::mutex> lock(states.reqBufferLock);
+
+    hardware::hidl_vec<StreamBufferRet> bufRets;
+    if (!states.useHalBufManager) {
+        ALOGE("%s: Camera %s does not support HAL buffer management",
+                __FUNCTION__, states.cameraId.string());
+        _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
+        return;
+    }
+
+    SortedVector<int32_t> streamIds;
+    ssize_t sz = streamIds.setCapacity(bufReqs.size());
+    if (sz < 0 || static_cast<size_t>(sz) != bufReqs.size()) {
+        ALOGE("%s: failed to allocate memory for %zu buffer requests",
+                __FUNCTION__, bufReqs.size());
+        _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
+        return;
+    }
+
+    if (bufReqs.size() > states.outputStreams.size()) {
+        ALOGE("%s: too many buffer requests (%zu > # of output streams %zu)",
+                __FUNCTION__, bufReqs.size(), states.outputStreams.size());
+        _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
+        return;
+    }
+
+    // Check for repeated streamId
+    for (const auto& bufReq : bufReqs) {
+        if (streamIds.indexOf(bufReq.streamId) != NAME_NOT_FOUND) {
+            ALOGE("%s: Stream %d appear multiple times in buffer requests",
+                    __FUNCTION__, bufReq.streamId);
+            _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
+            return;
+        }
+        streamIds.add(bufReq.streamId);
+    }
+
+    if (!states.reqBufferIntf.startRequestBuffer()) {
+        ALOGE("%s: request buffer disallowed while camera service is configuring",
+                __FUNCTION__);
+        _hidl_cb(BufferRequestStatus::FAILED_CONFIGURING, bufRets);
+        return;
+    }
+
+    bufRets.resize(bufReqs.size());
+
+    bool allReqsSucceeds = true;
+    bool oneReqSucceeds = false;
+    for (size_t i = 0; i < bufReqs.size(); i++) {
+        const auto& bufReq = bufReqs[i];
+        auto& bufRet = bufRets[i];
+        int32_t streamId = bufReq.streamId;
+        sp<Camera3OutputStreamInterface> outputStream = states.outputStreams.get(streamId);
+        if (outputStream == nullptr) {
+            ALOGE("%s: Output stream id %d not found!", __FUNCTION__, streamId);
+            hardware::hidl_vec<StreamBufferRet> emptyBufRets;
+            _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, emptyBufRets);
+            states.reqBufferIntf.endRequestBuffer();
+            return;
+        }
+
+        if (outputStream->isAbandoned()) {
+            bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
+            allReqsSucceeds = false;
+            continue;
+        }
+
+        bufRet.streamId = streamId;
+        size_t handOutBufferCount = outputStream->getOutstandingBuffersCount();
+        uint32_t numBuffersRequested = bufReq.numBuffersRequested;
+        size_t totalHandout = handOutBufferCount + numBuffersRequested;
+        uint32_t maxBuffers = outputStream->asHalStream()->max_buffers;
+        if (totalHandout > maxBuffers) {
+            // Not able to allocate enough buffer. Exit early for this stream
+            ALOGE("%s: request too much buffers for stream %d: at HAL: %zu + requesting: %d"
+                    " > max: %d", __FUNCTION__, streamId, handOutBufferCount,
+                    numBuffersRequested, maxBuffers);
+            bufRet.val.error(StreamBufferRequestError::MAX_BUFFER_EXCEEDED);
+            allReqsSucceeds = false;
+            continue;
+        }
+
+        hardware::hidl_vec<StreamBuffer> tmpRetBuffers(numBuffersRequested);
+        bool currentReqSucceeds = true;
+        std::vector<camera3_stream_buffer_t> streamBuffers(numBuffersRequested);
+        size_t numAllocatedBuffers = 0;
+        size_t numPushedInflightBuffers = 0;
+        for (size_t b = 0; b < numBuffersRequested; b++) {
+            camera3_stream_buffer_t& sb = streamBuffers[b];
+            // Since this method can run concurrently with request thread
+            // We need to update the wait duration everytime we call getbuffer
+            nsecs_t waitDuration =  states.reqBufferIntf.getWaitDuration();
+            status_t res = outputStream->getBuffer(&sb, waitDuration);
+            if (res != OK) {
+                if (res == NO_INIT || res == DEAD_OBJECT) {
+                    ALOGV("%s: Can't get output buffer for stream %d: %s (%d)",
+                            __FUNCTION__, streamId, strerror(-res), res);
+                    bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
+                } else {
+                    ALOGE("%s: Can't get output buffer for stream %d: %s (%d)",
+                            __FUNCTION__, streamId, strerror(-res), res);
+                    if (res == TIMED_OUT || res == NO_MEMORY) {
+                        bufRet.val.error(StreamBufferRequestError::NO_BUFFER_AVAILABLE);
+                    } else {
+                        bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
+                    }
+                }
+                currentReqSucceeds = false;
+                break;
+            }
+            numAllocatedBuffers++;
+
+            buffer_handle_t *buffer = sb.buffer;
+            auto pair = states.bufferRecordsIntf.getBufferId(*buffer, streamId);
+            bool isNewBuffer = pair.first;
+            uint64_t bufferId = pair.second;
+            StreamBuffer& hBuf = tmpRetBuffers[b];
+
+            hBuf.streamId = streamId;
+            hBuf.bufferId = bufferId;
+            hBuf.buffer = (isNewBuffer) ? *buffer : nullptr;
+            hBuf.status = BufferStatus::OK;
+            hBuf.releaseFence = nullptr;
+
+            native_handle_t *acquireFence = nullptr;
+            if (sb.acquire_fence != -1) {
+                acquireFence = native_handle_create(1,0);
+                acquireFence->data[0] = sb.acquire_fence;
+            }
+            hBuf.acquireFence.setTo(acquireFence, /*shouldOwn*/true);
+            hBuf.releaseFence = nullptr;
+
+            res = states.bufferRecordsIntf.pushInflightRequestBuffer(bufferId, buffer, streamId);
+            if (res != OK) {
+                ALOGE("%s: Can't get register request buffers for stream %d: %s (%d)",
+                        __FUNCTION__, streamId, strerror(-res), res);
+                bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
+                currentReqSucceeds = false;
+                break;
+            }
+            numPushedInflightBuffers++;
+        }
+        if (currentReqSucceeds) {
+            bufRet.val.buffers(std::move(tmpRetBuffers));
+            oneReqSucceeds = true;
+        } else {
+            allReqsSucceeds = false;
+            for (size_t b = 0; b < numPushedInflightBuffers; b++) {
+                StreamBuffer& hBuf = tmpRetBuffers[b];
+                buffer_handle_t* buffer;
+                status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(
+                        hBuf.bufferId, &buffer);
+                if (res != OK) {
+                    SET_ERR("%s: popInflightRequestBuffer failed for stream %d: %s (%d)",
+                            __FUNCTION__, streamId, strerror(-res), res);
+                }
+            }
+            for (size_t b = 0; b < numAllocatedBuffers; b++) {
+                camera3_stream_buffer_t& sb = streamBuffers[b];
+                sb.acquire_fence = -1;
+                sb.status = CAMERA3_BUFFER_STATUS_ERROR;
+            }
+            returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
+                    streamBuffers.data(), numAllocatedBuffers, 0);
+        }
+    }
+
+    _hidl_cb(allReqsSucceeds ? BufferRequestStatus::OK :
+            oneReqSucceeds ? BufferRequestStatus::FAILED_PARTIAL :
+                             BufferRequestStatus::FAILED_UNKNOWN,
+            bufRets);
+    states.reqBufferIntf.endRequestBuffer();
+}
+
+void returnStreamBuffers(ReturnBufferStates& states,
+        const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
+    if (!states.useHalBufManager) {
+        ALOGE("%s: Camera %s does not support HAL buffer managerment",
+                __FUNCTION__, states.cameraId.string());
+        return;
+    }
+
+    for (const auto& buf : buffers) {
+        if (buf.bufferId == BUFFER_ID_NO_BUFFER) {
+            ALOGE("%s: cannot return a buffer without bufferId", __FUNCTION__);
+            continue;
+        }
+
+        buffer_handle_t* buffer;
+        status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(buf.bufferId, &buffer);
+
+        if (res != OK) {
+            ALOGE("%s: cannot find in-flight buffer %" PRIu64 " for stream %d",
+                    __FUNCTION__, buf.bufferId, buf.streamId);
+            continue;
+        }
+
+        camera3_stream_buffer_t streamBuffer;
+        streamBuffer.buffer = buffer;
+        streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+        streamBuffer.acquire_fence = -1;
+        streamBuffer.release_fence = -1;
+
+        if (buf.releaseFence == nullptr) {
+            streamBuffer.release_fence = -1;
+        } else if (buf.releaseFence->numFds == 1) {
+            streamBuffer.release_fence = dup(buf.releaseFence->data[0]);
+        } else {
+            ALOGE("%s: Invalid release fence, fd count is %d, not 1",
+                    __FUNCTION__, buf.releaseFence->numFds);
+            continue;
+        }
+
+        sp<Camera3StreamInterface> stream = states.outputStreams.get(buf.streamId);
+        if (stream == nullptr) {
+            ALOGE("%s: Output stream id %d not found!", __FUNCTION__, buf.streamId);
+            continue;
+        }
+        streamBuffer.stream = stream->asHalStream();
+        returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
+                &streamBuffer, /*size*/1, /*timestamp*/ 0);
+    }
+}
+
+void flushInflightRequests(FlushInflightReqStates& states) {
+    ATRACE_CALL();
+    { // First return buffers cached in mInFlightMap
+        std::lock_guard<std::mutex> l(states.inflightLock);
+        for (size_t idx = 0; idx < states.inflightMap.size(); idx++) {
+            const InFlightRequest &request = states.inflightMap.valueAt(idx);
+            returnOutputBuffers(
+                states.useHalBufManager, states.listener,
+                request.pendingOutputBuffers.array(),
+                request.pendingOutputBuffers.size(), 0,
+                /*timestampIncreasing*/true, request.outputSurfaces,
+                request.resultExtras);
+        }
+        states.inflightMap.clear();
+        states.inflightIntf.onInflightMapFlushedLocked();
+    }
+
+    // Then return all inflight buffers not returned by HAL
+    std::vector<std::pair<int32_t, int32_t>> inflightKeys;
+    states.flushBufferIntf.getInflightBufferKeys(&inflightKeys);
+
+    // Inflight buffers for HAL buffer manager
+    std::vector<uint64_t> inflightRequestBufferKeys;
+    states.flushBufferIntf.getInflightRequestBufferKeys(&inflightRequestBufferKeys);
+
+    // (streamId, frameNumber, buffer_handle_t*) tuple for all inflight buffers.
+    // frameNumber will be -1 for buffers from HAL buffer manager
+    std::vector<std::tuple<int32_t, int32_t, buffer_handle_t*>> inflightBuffers;
+    inflightBuffers.reserve(inflightKeys.size() + inflightRequestBufferKeys.size());
+
+    for (auto& pair : inflightKeys) {
+        int32_t frameNumber = pair.first;
+        int32_t streamId = pair.second;
+        buffer_handle_t* buffer;
+        status_t res = states.bufferRecordsIntf.popInflightBuffer(frameNumber, streamId, &buffer);
+        if (res != OK) {
+            ALOGE("%s: Frame %d: No in-flight buffer for stream %d",
+                    __FUNCTION__, frameNumber, streamId);
+            continue;
+        }
+        inflightBuffers.push_back(std::make_tuple(streamId, frameNumber, buffer));
+    }
+
+    for (auto& bufferId : inflightRequestBufferKeys) {
+        int32_t streamId = -1;
+        buffer_handle_t* buffer = nullptr;
+        status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(
+                bufferId, &buffer, &streamId);
+        if (res != OK) {
+            ALOGE("%s: cannot find in-flight buffer %" PRIu64, __FUNCTION__, bufferId);
+            continue;
+        }
+        inflightBuffers.push_back(std::make_tuple(streamId, /*frameNumber*/-1, buffer));
+    }
+
+    std::vector<sp<Camera3StreamInterface>> streams = states.flushBufferIntf.getAllStreams();
+
+    for (auto& tuple : inflightBuffers) {
+        status_t res = OK;
+        int32_t streamId = std::get<0>(tuple);
+        int32_t frameNumber = std::get<1>(tuple);
+        buffer_handle_t* buffer = std::get<2>(tuple);
+
+        camera3_stream_buffer_t streamBuffer;
+        streamBuffer.buffer = buffer;
+        streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+        streamBuffer.acquire_fence = -1;
+        streamBuffer.release_fence = -1;
+
+        for (auto& stream : streams) {
+            if (streamId == stream->getId()) {
+                // Return buffer to deleted stream
+                camera3_stream* halStream = stream->asHalStream();
+                streamBuffer.stream = halStream;
+                switch (halStream->stream_type) {
+                    case CAMERA3_STREAM_OUTPUT:
+                        res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0,
+                                /*timestampIncreasing*/true, std::vector<size_t> (), frameNumber);
+                        if (res != OK) {
+                            ALOGE("%s: Can't return output buffer for frame %d to"
+                                  " stream %d: %s (%d)",  __FUNCTION__,
+                                  frameNumber, streamId, strerror(-res), res);
+                        }
+                        break;
+                    case CAMERA3_STREAM_INPUT:
+                        res = stream->returnInputBuffer(streamBuffer);
+                        if (res != OK) {
+                            ALOGE("%s: Can't return input buffer for frame %d to"
+                                  " stream %d: %s (%d)",  __FUNCTION__,
+                                  frameNumber, streamId, strerror(-res), res);
+                        }
+                        break;
+                    default: // Bi-direcitonal stream is deprecated
+                        ALOGE("%s: stream %d has unknown stream type %d",
+                                __FUNCTION__, streamId, halStream->stream_type);
+                        break;
+                }
+                break;
+            }
+        }
+    }
+}
+
+} // camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
new file mode 100644
index 0000000..47d8095
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_UTILS_H
+#define ANDROID_SERVERS_CAMERA3_OUTPUT_UTILS_H
+
+#include <memory>
+#include <mutex>
+
+#include <cutils/native_handle.h>
+
+#include <fmq/MessageQueue.h>
+
+#include <common/CameraDeviceBase.h>
+
+#include "device3/BufferUtils.h"
+#include "device3/DistortionMapper.h"
+#include "device3/ZoomRatioMapper.h"
+#include "device3/InFlightRequest.h"
+#include "device3/Camera3Stream.h"
+#include "device3/Camera3OutputStreamInterface.h"
+#include "utils/TagMonitor.h"
+
+namespace android {
+
+using ResultMetadataQueue = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
+
+namespace camera3 {
+
+    /**
+     * Helper methods shared between Camera3Device/Camera3OfflineSession for HAL callbacks
+     */
+    // helper function to return the output buffers to output streams.
+    void returnOutputBuffers(
+            bool useHalBufManager,
+            sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
+            const camera3_stream_buffer_t *outputBuffers,
+            size_t numBuffers, nsecs_t timestamp, bool timestampIncreasing = true,
+            // The following arguments are only meant for surface sharing use case
+            const SurfaceMap& outputSurfaces = SurfaceMap{},
+            // Used to send buffer error callback when failing to return buffer
+            const CaptureResultExtras &resultExtras = CaptureResultExtras{});
+
+    // Camera3Device/Camera3OfflineSession internal states used in notify/processCaptureResult
+    // callbacks
+    struct CaptureOutputStates {
+        const String8& cameraId;
+        std::mutex& inflightLock;
+        InFlightRequestMap& inflightMap; // end of inflightLock scope
+        std::mutex& outputLock;
+        List<CaptureResult>& resultQueue;
+        std::condition_variable& resultSignal;
+        uint32_t& nextShutterFrameNum;
+        uint32_t& nextReprocShutterFrameNum;
+        uint32_t& nextZslShutterFrameNum;
+        uint32_t& nextResultFrameNum;
+        uint32_t& nextReprocResultFrameNum;
+        uint32_t& nextZslResultFrameNum; // end of outputLock scope
+        const bool useHalBufManager;
+        const bool usePartialResult;
+        const bool needFixupMonoChrome;
+        const uint32_t numPartialResults;
+        const metadata_vendor_id_t vendorTagId;
+        const CameraMetadata& deviceInfo;
+        const std::unordered_map<std::string, CameraMetadata>& physicalDeviceInfoMap;
+        std::unique_ptr<ResultMetadataQueue>& fmq;
+        std::unordered_map<std::string, camera3::DistortionMapper>& distortionMappers;
+        std::unordered_map<std::string, camera3::ZoomRatioMapper>& zoomRatioMappers;
+        TagMonitor& tagMonitor;
+        sp<Camera3Stream> inputStream;
+        StreamSet& outputStreams;
+        sp<NotificationListener> listener;
+        SetErrorInterface& setErrIntf;
+        InflightRequestUpdateInterface& inflightIntf;
+        BufferRecordsInterface& bufferRecordsIntf;
+    };
+
+    // Handle one capture result. Assume callers hold the lock to serialize all
+    // processCaptureResult calls
+    void processOneCaptureResultLocked(
+            CaptureOutputStates& states,
+            const hardware::camera::device::V3_2::CaptureResult& result,
+            const hardware::hidl_vec<
+                    hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata);
+
+    // Handle one notify message
+    void notify(CaptureOutputStates& states,
+            const hardware::camera::device::V3_2::NotifyMsg& msg);
+
+    struct RequestBufferStates {
+        const String8& cameraId;
+        std::mutex& reqBufferLock; // lock to serialize request buffer calls
+        const bool useHalBufManager;
+        StreamSet& outputStreams;
+        SetErrorInterface& setErrIntf;
+        BufferRecordsInterface& bufferRecordsIntf;
+        RequestBufferInterface& reqBufferIntf;
+    };
+
+    void requestStreamBuffers(RequestBufferStates& states,
+            const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
+            hardware::camera::device::V3_5::ICameraDeviceCallback::requestStreamBuffers_cb _hidl_cb);
+
+    struct ReturnBufferStates {
+        const String8& cameraId;
+        const bool useHalBufManager;
+        StreamSet& outputStreams;
+        BufferRecordsInterface& bufferRecordsIntf;
+    };
+
+    void returnStreamBuffers(ReturnBufferStates& states,
+            const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers);
+
+    struct FlushInflightReqStates {
+        const String8& cameraId;
+        std::mutex& inflightLock;
+        InFlightRequestMap& inflightMap; // end of inflightLock scope
+        const bool useHalBufManager;
+        sp<NotificationListener> listener;
+        InflightRequestUpdateInterface& inflightIntf;
+        BufferRecordsInterface& bufferRecordsIntf;
+        FlushBufferInterface& flushBufferIntf;
+    };
+
+    void flushInflightRequests(FlushInflightReqStates& states);
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index b5e37c2..e645e05 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -65,6 +65,12 @@
             const std::vector<size_t> &removedSurfaceIds,
             KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
 
+    virtual bool getOfflineProcessingSupport() const {
+        // As per Camera spec. shared streams currently do not support
+        // offline mode.
+        return false;
+    }
+
 private:
 
     static const size_t kMaxOutputs = 4;
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index f707ef8..7916ddb 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -151,6 +151,14 @@
     return mPhysicalCameraId;
 }
 
+void Camera3Stream::setOfflineProcessingSupport(bool support) {
+    mSupportOfflineProcessing = support;
+}
+
+bool Camera3Stream::getOfflineProcessingSupport() const {
+    return mSupportOfflineProcessing;
+}
+
 status_t Camera3Stream::forceToIdle() {
     ATRACE_CALL();
     Mutex::Autolock l(mLock);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 805df82..d768d3d 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -167,6 +167,9 @@
     android_dataspace getOriginalDataSpace() const;
     const String8&    physicalCameraId() const;
 
+    void              setOfflineProcessingSupport(bool) override;
+    bool              getOfflineProcessingSupport() const override;
+
     camera3_stream*   asHalStream() override {
         return this;
     }
@@ -592,6 +595,8 @@
 
     String8 mPhysicalCameraId;
     nsecs_t mLastTimestamp;
+
+    bool mSupportOfflineProcessing = false;
 }; // class Camera3Stream
 
 }; // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 73f501a..667e3bb 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -23,6 +23,7 @@
 #include "Camera3StreamBufferListener.h"
 #include "Camera3StreamBufferFreedListener.h"
 
+struct camera3_stream;
 struct camera3_stream_buffer;
 
 namespace android {
@@ -54,6 +55,7 @@
         android_dataspace dataSpace;
         uint64_t consumerUsage;
         bool finalized = false;
+        bool supportsOffline = false;
         OutputStreamInfo() :
             width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
             consumerUsage(0) {}
@@ -99,6 +101,12 @@
     virtual android_dataspace getOriginalDataSpace() const = 0;
 
     /**
+     * Offline processing
+     */
+    virtual void setOfflineProcessingSupport(bool support) = 0;
+    virtual bool getOfflineProcessingSupport() const = 0;
+
+    /**
      * Get a HAL3 handle for the stream, without starting stream configuration.
      */
     virtual camera3_stream* asHalStream() = 0;
diff --git a/services/camera/libcameraservice/device3/CoordinateMapper.cpp b/services/camera/libcameraservice/device3/CoordinateMapper.cpp
new file mode 100644
index 0000000..d62f397
--- /dev/null
+++ b/services/camera/libcameraservice/device3/CoordinateMapper.cpp
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <system/camera_metadata_tags.h>
+
+#include "device3/CoordinateMapper.h"
+
+namespace android {
+
+namespace camera3 {
+
+/**
+ * Metadata keys to correct when adjusting coordinates for distortion correction
+ */
+
+// Both capture request and result
+constexpr std::array<uint32_t, 3> CoordinateMapper::kMeteringRegionsToCorrect = {
+    ANDROID_CONTROL_AF_REGIONS,
+    ANDROID_CONTROL_AE_REGIONS,
+    ANDROID_CONTROL_AWB_REGIONS
+};
+
+// Both capture request and result
+constexpr std::array<uint32_t, 1> CoordinateMapper::kRectsToCorrect = {
+    ANDROID_SCALER_CROP_REGION,
+};
+
+// Only for capture result
+constexpr std::array<uint32_t, 2> CoordinateMapper::kResultPointsToCorrectNoClamp = {
+    ANDROID_STATISTICS_FACE_RECTANGLES, // Says rectangles, is really points
+    ANDROID_STATISTICS_FACE_LANDMARKS,
+};
+
+} // namespace camera3
+
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/CoordinateMapper.h b/services/camera/libcameraservice/device3/CoordinateMapper.h
new file mode 100644
index 0000000..5164856
--- /dev/null
+++ b/services/camera/libcameraservice/device3/CoordinateMapper.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_COORDINATEMAPPER_H
+#define ANDROID_SERVERS_COORDINATEMAPPER_H
+
+#include <array>
+
+namespace android {
+
+namespace camera3 {
+
+class CoordinateMapper {
+    // Right now only stores metadata tags containing 2D coordinates
+    // to be corrected.
+protected:
+    // Metadata key lists to correct
+
+    // Both capture request and result
+    static const std::array<uint32_t, 3> kMeteringRegionsToCorrect;
+
+    // Both capture request and result
+    static const std::array<uint32_t, 1> kRectsToCorrect;
+
+    // Only for capture results; don't clamp
+    static const std::array<uint32_t, 2> kResultPointsToCorrectNoClamp;
+}; // class CoordinateMapper
+
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.cpp b/services/camera/libcameraservice/device3/DistortionMapper.cpp
index ae7af8e..8132225 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.cpp
+++ b/services/camera/libcameraservice/device3/DistortionMapper.cpp
@@ -27,41 +27,14 @@
 
 namespace camera3 {
 
-/**
- * Metadata keys to correct when adjusting coordinates for distortion correction
- */
-
-// Both capture request and result
-constexpr std::array<uint32_t, 3> DistortionMapper::kMeteringRegionsToCorrect = {
-    ANDROID_CONTROL_AF_REGIONS,
-    ANDROID_CONTROL_AE_REGIONS,
-    ANDROID_CONTROL_AWB_REGIONS
-};
-
-// Only capture request
-constexpr std::array<uint32_t, 1> DistortionMapper::kRequestRectsToCorrect = {
-    ANDROID_SCALER_CROP_REGION,
-};
-
-// Only for capture result
-constexpr std::array<uint32_t, 1> DistortionMapper::kResultRectsToCorrect = {
-    ANDROID_SCALER_CROP_REGION,
-};
-
-// Only for capture result
-constexpr std::array<uint32_t, 2> DistortionMapper::kResultPointsToCorrectNoClamp = {
-    ANDROID_STATISTICS_FACE_RECTANGLES, // Says rectangles, is really points
-    ANDROID_STATISTICS_FACE_LANDMARKS,
-};
-
 
 DistortionMapper::DistortionMapper() : mValidMapping(false), mValidGrids(false) {
 }
 
-bool DistortionMapper::isDistortionSupported(const CameraMetadata &result) {
+bool DistortionMapper::isDistortionSupported(const CameraMetadata &deviceInfo) {
     bool isDistortionCorrectionSupported = false;
     camera_metadata_ro_entry_t distortionCorrectionModes =
-            result.find(ANDROID_DISTORTION_CORRECTION_AVAILABLE_MODES);
+            deviceInfo.find(ANDROID_DISTORTION_CORRECTION_AVAILABLE_MODES);
     for (size_t i = 0; i < distortionCorrectionModes.count; i++) {
         if (distortionCorrectionModes.data.u8[i] !=
                 ANDROID_DISTORTION_CORRECTION_MODE_OFF) {
@@ -124,7 +97,7 @@
                 if (res != OK) return res;
             }
         }
-        for (auto rect : kRequestRectsToCorrect) {
+        for (auto rect : kRectsToCorrect) {
             e = request->find(rect);
             res = mapCorrectedRectToRaw(e.data.i32, e.count / 4, /*clamp*/true);
             if (res != OK) return res;
@@ -160,7 +133,7 @@
                 if (res != OK) return res;
             }
         }
-        for (auto rect : kResultRectsToCorrect) {
+        for (auto rect : kRectsToCorrect) {
             e = result->find(rect);
             res = mapRawRectToCorrected(e.data.i32, e.count / 4, /*clamp*/true);
             if (res != OK) return res;
@@ -390,7 +363,6 @@
     return OK;
 }
 
-
 status_t DistortionMapper::mapCorrectedRectToRaw(int32_t *rects, int rectCount, bool clamp,
         bool simple) const {
     if (!mValidMapping) return INVALID_OPERATION;
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.h b/services/camera/libcameraservice/device3/DistortionMapper.h
index 4c0a1a6..7dcb67b 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.h
+++ b/services/camera/libcameraservice/device3/DistortionMapper.h
@@ -22,6 +22,7 @@
 #include <mutex>
 
 #include "camera/CameraMetadata.h"
+#include "device3/CoordinateMapper.h"
 
 namespace android {
 
@@ -31,10 +32,19 @@
  * Utilities to transform between raw (distorted) and warped (corrected) coordinate systems
  * for cameras that support geometric distortion
  */
-class DistortionMapper {
+class DistortionMapper : private CoordinateMapper {
   public:
     DistortionMapper();
 
+    DistortionMapper(const DistortionMapper& other) :
+            mValidMapping(other.mValidMapping), mValidGrids(other.mValidGrids),
+            mFx(other.mFx), mFy(other.mFy), mCx(other.mCx), mCy(other.mCy), mS(other.mS),
+            mInvFx(other.mInvFx), mInvFy(other.mInvFy), mK(other.mK),
+            mArrayWidth(other.mArrayWidth), mArrayHeight(other.mArrayHeight),
+            mActiveWidth(other.mActiveWidth), mActiveHeight(other.mActiveHeight),
+            mArrayDiffX(other.mArrayDiffX), mArrayDiffY(other.mArrayDiffY),
+            mCorrectedGrid(other.mCorrectedGrid), mDistortedGrid(other.mDistortedGrid) {}
+
     /**
      * Check whether distortion correction is supported by the camera HAL
      */
@@ -150,20 +160,6 @@
     // Fuzziness for float inequality tests
     constexpr static float kFloatFuzz = 1e-4;
 
-    // Metadata key lists to correct
-
-    // Both capture request and result
-    static const std::array<uint32_t, 3> kMeteringRegionsToCorrect;
-
-    // Only capture request
-    static const std::array<uint32_t, 1> kRequestRectsToCorrect;
-
-    // Only capture result
-    static const std::array<uint32_t, 1> kResultRectsToCorrect;
-
-    // Only for capture results; don't clamp
-    static const std::array<uint32_t, 2> kResultPointsToCorrectNoClamp;
-
     // Single implementation for various mapCorrectedToRaw methods
     template<typename T>
     status_t mapCorrectedToRawImpl(T* coordPairs, int coordCount, bool clamp, bool simple) const;
@@ -186,7 +182,7 @@
     // pre-calculated inverses for speed
     float mInvFx, mInvFy;
     // radial/tangential distortion parameters
-    float mK[5];
+    std::array<float, 5> mK;
 
     // pre-correction active array dimensions
     float mArrayWidth, mArrayHeight;
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
new file mode 100644
index 0000000..ceeaa71
--- /dev/null
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -0,0 +1,148 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_INFLIGHT_REQUEST_H
+#define ANDROID_SERVERS_CAMERA3_INFLIGHT_REQUEST_H
+
+#include <set>
+
+#include <camera/CaptureResult.h>
+#include <camera/CameraMetadata.h>
+#include <utils/String8.h>
+#include <utils/Timers.h>
+
+#include "hardware/camera3.h"
+
+#include "common/CameraDeviceBase.h"
+
+namespace android {
+
+namespace camera3 {
+
+struct InFlightRequest {
+    // Set by notify() SHUTTER call.
+    nsecs_t shutterTimestamp;
+    // Set by process_capture_result().
+    nsecs_t sensorTimestamp;
+    int     requestStatus;
+    // Set by process_capture_result call with valid metadata
+    bool    haveResultMetadata;
+    // Decremented by calls to process_capture_result with valid output
+    // and input buffers
+    int     numBuffersLeft;
+    CaptureResultExtras resultExtras;
+    // If this request has any input buffer
+    bool hasInputBuffer;
+
+    // The last metadata that framework receives from HAL and
+    // not yet send out because the shutter event hasn't arrived.
+    // It's added by process_capture_result and sent when framework
+    // receives the shutter event.
+    CameraMetadata pendingMetadata;
+
+    // The metadata of the partial results that framework receives from HAL so far
+    // and has sent out.
+    CameraMetadata collectedPartialResult;
+
+    // Buffers are added by process_capture_result when output buffers
+    // return from HAL but framework has not yet received the shutter
+    // event. They will be returned to the streams when framework receives
+    // the shutter event.
+    Vector<camera3_stream_buffer_t> pendingOutputBuffers;
+
+    // Whether this inflight request's shutter and result callback are to be
+    // called. The policy is that if the request is the last one in the constrained
+    // high speed recording request list, this flag will be true. If the request list
+    // is not for constrained high speed recording, this flag will also be true.
+    bool hasCallback;
+
+    // Maximum expected frame duration for this request.
+    // For manual captures, equal to the max of requested exposure time and frame duration
+    // For auto-exposure modes, equal to 1/(lower end of target FPS range)
+    nsecs_t maxExpectedDuration;
+
+    // Whether the result metadata for this request is to be skipped. The
+    // result metadata should be skipped in the case of
+    // REQUEST/RESULT error.
+    bool skipResultMetadata;
+
+    // The physical camera ids being requested.
+    std::set<String8> physicalCameraIds;
+
+    // Map of physicalCameraId <-> Metadata
+    std::vector<PhysicalCaptureResultInfo> physicalMetadatas;
+
+    // Indicates a still capture request.
+    bool stillCapture;
+
+    // Indicates a ZSL capture request
+    bool zslCapture;
+
+    // Requested camera ids (both logical and physical) with zoomRatio != 1.0f
+    std::set<std::string> cameraIdsWithZoom;
+
+    // What shared surfaces an output should go to
+    SurfaceMap outputSurfaces;
+
+    // TODO: dedupe
+    static const nsecs_t kDefaultExpectedDuration = 100000000; // 100 ms
+
+    // Default constructor needed by KeyedVector
+    InFlightRequest() :
+            shutterTimestamp(0),
+            sensorTimestamp(0),
+            requestStatus(OK),
+            haveResultMetadata(false),
+            numBuffersLeft(0),
+            hasInputBuffer(false),
+            hasCallback(true),
+            maxExpectedDuration(kDefaultExpectedDuration),
+            skipResultMetadata(false),
+            stillCapture(false),
+            zslCapture(false) {
+    }
+
+    InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
+            bool hasAppCallback, nsecs_t maxDuration,
+            const std::set<String8>& physicalCameraIdSet, bool isStillCapture,
+            bool isZslCapture, const std::set<std::string>& idsWithZoom,
+            const SurfaceMap& outSurfaces = SurfaceMap{}) :
+            shutterTimestamp(0),
+            sensorTimestamp(0),
+            requestStatus(OK),
+            haveResultMetadata(false),
+            numBuffersLeft(numBuffers),
+            resultExtras(extras),
+            hasInputBuffer(hasInput),
+            hasCallback(hasAppCallback),
+            maxExpectedDuration(maxDuration),
+            skipResultMetadata(false),
+            physicalCameraIds(physicalCameraIdSet),
+            stillCapture(isStillCapture),
+            zslCapture(isZslCapture),
+            cameraIdsWithZoom(idsWithZoom),
+            outputSurfaces(outSurfaces) {
+    }
+};
+
+// Map from frame number to the in-flight request state
+typedef KeyedVector<uint32_t, InFlightRequest> InFlightRequestMap;
+
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
new file mode 100644
index 0000000..84da45a
--- /dev/null
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -0,0 +1,370 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-ZoomRatioMapper"
+//#define LOG_NDEBUG 0
+
+#include <algorithm>
+
+#include "device3/ZoomRatioMapper.h"
+
+namespace android {
+
+namespace camera3 {
+
+
+status_t ZoomRatioMapper::initZoomRatioInTemplate(CameraMetadata *request) {
+    camera_metadata_entry_t entry;
+    entry = request->find(ANDROID_CONTROL_ZOOM_RATIO);
+    float defaultZoomRatio = 1.0f;
+    if (entry.count == 0) {
+        return request->update(ANDROID_CONTROL_ZOOM_RATIO, &defaultZoomRatio, 1);
+    }
+    return OK;
+}
+
+status_t ZoomRatioMapper::overrideZoomRatioTags(
+        CameraMetadata* deviceInfo, bool* supportNativeZoomRatio) {
+    if (deviceInfo == nullptr || supportNativeZoomRatio == nullptr) {
+        return BAD_VALUE;
+    }
+
+    camera_metadata_entry_t entry;
+    entry = deviceInfo->find(ANDROID_CONTROL_ZOOM_RATIO_RANGE);
+    if (entry.count != 2 && entry.count != 0) return BAD_VALUE;
+
+    // Hal has zoom ratio support
+    if (entry.count == 2) {
+        *supportNativeZoomRatio = true;
+        return OK;
+    }
+
+    // Hal has no zoom ratio support
+    *supportNativeZoomRatio = false;
+
+    entry = deviceInfo->find(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
+    if (entry.count != 1) {
+        ALOGI("%s: Camera device doesn't support SCALER_AVAILABLE_MAX_DIGITAL_ZOOM key!",
+                __FUNCTION__);
+        return OK;
+    }
+
+    float zoomRange[] = {1.0f, entry.data.f[0]};
+    status_t res = deviceInfo->update(ANDROID_CONTROL_ZOOM_RATIO_RANGE, zoomRange, 2);
+    if (res != OK) {
+        ALOGE("%s: Failed to update CONTROL_ZOOM_RATIO_RANGE key: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+
+    std::vector<int32_t> requestKeys;
+    entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+    if (entry.count > 0) {
+        requestKeys.insert(requestKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
+    }
+    requestKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO);
+    res = deviceInfo->update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
+            requestKeys.data(), requestKeys.size());
+    if (res != OK) {
+        ALOGE("%s: Failed to update REQUEST_AVAILABLE_REQUEST_KEYS: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+
+    std::vector<int32_t> resultKeys;
+    entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS);
+    if (entry.count > 0) {
+        resultKeys.insert(resultKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
+    }
+    resultKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO);
+    res = deviceInfo->update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
+            resultKeys.data(), resultKeys.size());
+    if (res != OK) {
+        ALOGE("%s: Failed to update REQUEST_AVAILABLE_RESULT_KEYS: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+
+    std::vector<int32_t> charKeys;
+    entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+    if (entry.count > 0) {
+        charKeys.insert(charKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
+    }
+    charKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO_RANGE);
+    res = deviceInfo->update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
+            charKeys.data(), charKeys.size());
+    if (res != OK) {
+        ALOGE("%s: Failed to update REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+
+    return OK;
+}
+
+ZoomRatioMapper::ZoomRatioMapper(const CameraMetadata* deviceInfo,
+        bool supportNativeZoomRatio, bool usePrecorrectArray) {
+    camera_metadata_ro_entry_t entry;
+
+    entry = deviceInfo->find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
+    if (entry.count != 4) return;
+    int32_t arrayW = entry.data.i32[2];
+    int32_t arrayH = entry.data.i32[3];
+
+    entry = deviceInfo->find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+    if (entry.count != 4) return;
+    int32_t activeW = entry.data.i32[2];
+    int32_t activeH = entry.data.i32[3];
+
+    if (usePrecorrectArray) {
+        mArrayWidth = arrayW;
+        mArrayHeight = arrayH;
+    } else {
+        mArrayWidth = activeW;
+        mArrayHeight = activeH;
+    }
+    mHalSupportsZoomRatio = supportNativeZoomRatio;
+
+    ALOGV("%s: array size: %d x %d, mHalSupportsZoomRatio %d",
+            __FUNCTION__, mArrayWidth, mArrayHeight, mHalSupportsZoomRatio);
+    mIsValid = true;
+}
+
+status_t ZoomRatioMapper::updateCaptureRequest(CameraMetadata* request) {
+    if (!mIsValid) return INVALID_OPERATION;
+
+    status_t res = OK;
+    bool zoomRatioIs1 = true;
+    camera_metadata_entry_t entry;
+
+    entry = request->find(ANDROID_CONTROL_ZOOM_RATIO);
+    if (entry.count == 1 && entry.data.f[0] != 1.0f) {
+        zoomRatioIs1 = false;
+    }
+
+    if (mHalSupportsZoomRatio && zoomRatioIs1) {
+        res = separateZoomFromCropLocked(request, false/*isResult*/);
+    } else if (!mHalSupportsZoomRatio && !zoomRatioIs1) {
+        res = combineZoomAndCropLocked(request, false/*isResult*/);
+    }
+
+    // If CONTROL_ZOOM_RATIO is in request, but HAL doesn't support
+    // CONTROL_ZOOM_RATIO, remove it from the request.
+    if (!mHalSupportsZoomRatio && entry.count == 1) {
+        request->erase(ANDROID_CONTROL_ZOOM_RATIO);
+    }
+
+    return res;
+}
+
+status_t ZoomRatioMapper::updateCaptureResult(CameraMetadata* result, bool requestedZoomRatioIs1) {
+    if (!mIsValid) return INVALID_OPERATION;
+
+    status_t res = OK;
+
+    if (mHalSupportsZoomRatio && requestedZoomRatioIs1) {
+        res = combineZoomAndCropLocked(result, true/*isResult*/);
+    } else if (!mHalSupportsZoomRatio && !requestedZoomRatioIs1) {
+        res = separateZoomFromCropLocked(result, true/*isResult*/);
+    } else {
+        camera_metadata_entry_t entry = result->find(ANDROID_CONTROL_ZOOM_RATIO);
+        if (entry.count == 0) {
+            float zoomRatio1x = 1.0f;
+            result->update(ANDROID_CONTROL_ZOOM_RATIO, &zoomRatio1x, 1);
+        }
+    }
+
+    return res;
+}
+
+float ZoomRatioMapper::deriveZoomRatio(const CameraMetadata* metadata) {
+    float zoomRatio = 1.0;
+
+    camera_metadata_ro_entry_t entry;
+    entry = metadata->find(ANDROID_SCALER_CROP_REGION);
+    if (entry.count != 4) return zoomRatio;
+
+    // Center of the preCorrection/active size
+    float arrayCenterX = mArrayWidth / 2.0;
+    float arrayCenterY = mArrayHeight / 2.0;
+
+    // Re-map crop region to coordinate system centered to (arrayCenterX,
+    // arrayCenterY).
+    float cropRegionLeft = arrayCenterX - entry.data.i32[0] ;
+    float cropRegionTop = arrayCenterY - entry.data.i32[1];
+    float cropRegionRight = entry.data.i32[0] + entry.data.i32[2] - arrayCenterX;
+    float cropRegionBottom = entry.data.i32[1] + entry.data.i32[3] - arrayCenterY;
+
+    // Calculate the scaling factor for left, top, bottom, right
+    float zoomRatioLeft = std::max(mArrayWidth / (2 * cropRegionLeft), 1.0f);
+    float zoomRatioTop = std::max(mArrayHeight / (2 * cropRegionTop), 1.0f);
+    float zoomRatioRight = std::max(mArrayWidth / (2 * cropRegionRight), 1.0f);
+    float zoomRatioBottom = std::max(mArrayHeight / (2 * cropRegionBottom), 1.0f);
+
+    // Use minimum scaling factor to handle letterboxing or pillarboxing
+    zoomRatio = std::min(std::min(zoomRatioLeft, zoomRatioRight),
+            std::min(zoomRatioTop, zoomRatioBottom));
+
+    ALOGV("%s: derived zoomRatio is %f", __FUNCTION__, zoomRatio);
+    return zoomRatio;
+}
+
+status_t ZoomRatioMapper::separateZoomFromCropLocked(CameraMetadata* metadata, bool isResult) {
+    status_t res;
+    float zoomRatio = deriveZoomRatio(metadata);
+
+    // Update zoomRatio metadata tag
+    res = metadata->update(ANDROID_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
+    if (res != OK) {
+        ALOGE("%s: Failed to update ANDROID_CONTROL_ZOOM_RATIO: %s(%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+
+    // Scale regions using zoomRatio
+    camera_metadata_entry_t entry;
+    for (auto region : kMeteringRegionsToCorrect) {
+        entry = metadata->find(region);
+        for (size_t j = 0; j < entry.count; j += 5) {
+            int32_t weight = entry.data.i32[j + 4];
+            if (weight == 0) {
+                continue;
+            }
+            // Top-left is inclusively clamped
+            scaleCoordinates(entry.data.i32 + j, 1, zoomRatio, ClampInclusive);
+            // Bottom-right is exclusively clamped
+            scaleCoordinates(entry.data.i32 + j + 2, 1, zoomRatio, ClampExclusive);
+        }
+    }
+
+    for (auto rect : kRectsToCorrect) {
+        entry = metadata->find(rect);
+        scaleRects(entry.data.i32, entry.count / 4, zoomRatio);
+    }
+
+    if (isResult) {
+        for (auto pts : kResultPointsToCorrectNoClamp) {
+            entry = metadata->find(pts);
+            scaleCoordinates(entry.data.i32, entry.count / 2, zoomRatio, ClampOff);
+        }
+    }
+
+    return OK;
+}
+
+status_t ZoomRatioMapper::combineZoomAndCropLocked(CameraMetadata* metadata, bool isResult) {
+    float zoomRatio = 1.0f;
+    camera_metadata_entry_t entry;
+    entry = metadata->find(ANDROID_CONTROL_ZOOM_RATIO);
+    if (entry.count == 1) {
+        zoomRatio = entry.data.f[0];
+    }
+
+    // Unscale regions with zoomRatio
+    status_t res;
+    for (auto region : kMeteringRegionsToCorrect) {
+        entry = metadata->find(region);
+        for (size_t j = 0; j < entry.count; j += 5) {
+            int32_t weight = entry.data.i32[j + 4];
+            if (weight == 0) {
+                continue;
+            }
+            // Top-left is inclusively clamped
+            scaleCoordinates(entry.data.i32 + j, 1, 1.0 / zoomRatio, ClampInclusive);
+            // Bottom-right is exclusively clamped
+            scaleCoordinates(entry.data.i32 + j + 2, 1, 1.0 / zoomRatio, ClampExclusive);
+        }
+    }
+    for (auto rect : kRectsToCorrect) {
+        entry = metadata->find(rect);
+        scaleRects(entry.data.i32, entry.count / 4, 1.0 / zoomRatio);
+    }
+    if (isResult) {
+        for (auto pts : kResultPointsToCorrectNoClamp) {
+            entry = metadata->find(pts);
+            scaleCoordinates(entry.data.i32, entry.count / 2, 1.0 / zoomRatio, ClampOff);
+        }
+    }
+
+    zoomRatio = 1.0;
+    res = metadata->update(ANDROID_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
+    if (res != OK) {
+        return res;
+    }
+
+    return OK;
+}
+
+void ZoomRatioMapper::scaleCoordinates(int32_t* coordPairs, int coordCount,
+        float scaleRatio, ClampMode clamp) {
+    for (int i = 0; i < coordCount * 2; i += 2) {
+        float x = coordPairs[i];
+        float y = coordPairs[i + 1];
+        float xCentered = x - mArrayWidth / 2;
+        float yCentered = y - mArrayHeight / 2;
+        float scaledX = xCentered * scaleRatio;
+        float scaledY = yCentered * scaleRatio;
+        scaledX += mArrayWidth / 2;
+        scaledY += mArrayHeight / 2;
+        // Clamp to within activeArray/preCorrectionActiveArray
+        coordPairs[i] = static_cast<int32_t>(scaledX);
+        coordPairs[i+1] = static_cast<int32_t>(scaledY);
+        if (clamp != ClampOff) {
+            int32_t right, bottom;
+            if (clamp == ClampInclusive) {
+                right = mArrayWidth - 1;
+                bottom = mArrayHeight - 1;
+            } else {
+                right = mArrayWidth;
+                bottom = mArrayHeight;
+            }
+            coordPairs[i] =
+                    std::min(right, std::max(0, coordPairs[i]));
+            coordPairs[i+1] =
+                    std::min(bottom, std::max(0, coordPairs[i+1]));
+        }
+        ALOGV("%s: coordinates: %d, %d", __FUNCTION__, coordPairs[i], coordPairs[i+1]);
+    }
+}
+
+void ZoomRatioMapper::scaleRects(int32_t* rects, int rectCount,
+        float scaleRatio) {
+    for (int i = 0; i < rectCount * 4; i += 4) {
+        // Map from (l, t, width, height) to (l, t, r, b).
+        // [l, t] is inclusive, and [r, b] is exclusive.
+        int32_t coords[4] = {
+            rects[i],
+            rects[i + 1],
+            rects[i] + rects[i + 2],
+            rects[i + 1] + rects[i + 3]
+        };
+
+        // top-left
+        scaleCoordinates(coords, 1, scaleRatio, ClampInclusive);
+        // bottom-right
+        scaleCoordinates(coords+2, 1, scaleRatio, ClampExclusive);
+
+        // Map back to (l, t, width, height)
+        rects[i] = coords[0];
+        rects[i + 1] = coords[1];
+        rects[i + 2] = coords[2] - coords[0];
+        rects[i + 3] = coords[3] - coords[1];
+    }
+}
+
+} // namespace camera3
+
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.h b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
new file mode 100644
index 0000000..16b223b
--- /dev/null
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_ZOOMRATIOMAPPER_H
+#define ANDROID_SERVERS_ZOOMRATIOMAPPER_H
+
+#include <utils/Errors.h>
+#include <array>
+
+#include "camera/CameraMetadata.h"
+#include "device3/CoordinateMapper.h"
+
+namespace android {
+
+namespace camera3 {
+
+/**
+ * Utilities to convert between the new zoomRatio and existing cropRegion
+ * metadata tags. Note that this class does conversions in 2 scenarios:
+ * - HAL supports zoomRatio and the application uses cropRegion, or
+ * - HAL doesn't support zoomRatio, but the application uses zoomRatio
+ */
+class ZoomRatioMapper : private CoordinateMapper {
+  public:
+    ZoomRatioMapper() = default;
+    ZoomRatioMapper(const CameraMetadata *deviceInfo,
+            bool supportNativeZoomRatio, bool usePrecorrectArray);
+    ZoomRatioMapper(const ZoomRatioMapper& other) :
+            mHalSupportsZoomRatio(other.mHalSupportsZoomRatio),
+            mArrayWidth(other.mArrayWidth), mArrayHeight(other.mArrayHeight) {}
+
+    /**
+     * Initialize request template with valid zoomRatio if necessary.
+     */
+    static status_t initZoomRatioInTemplate(CameraMetadata *request);
+
+    /**
+     * Override zoomRatio related tags in the static metadata.
+     */
+    static status_t overrideZoomRatioTags(
+            CameraMetadata* deviceInfo, bool* supportNativeZoomRatio);
+
+    /**
+     * Update capture request to handle both cropRegion and zoomRatio.
+     */
+    status_t updateCaptureRequest(CameraMetadata *request);
+
+    /**
+     * Update capture result to handle both cropRegion and zoomRatio.
+     */
+    status_t updateCaptureResult(CameraMetadata *request, bool requestedZoomRatioIs1);
+
+  public: // Visible for testing. Do not use concurently.
+    enum ClampMode {
+        ClampOff,
+        ClampInclusive,
+        ClampExclusive,
+    };
+
+    void scaleCoordinates(int32_t* coordPairs, int coordCount,
+            float scaleRatio, ClampMode clamp);
+
+    bool isValid() { return mIsValid; }
+  private:
+    // const after construction
+    bool mHalSupportsZoomRatio;
+    // active array / pre-correction array dimension
+    int32_t mArrayWidth, mArrayHeight;
+
+    bool mIsValid = false;
+
+    float deriveZoomRatio(const CameraMetadata* metadata);
+    void scaleRects(int32_t* rects, int rectCount, float scaleRatio);
+
+    status_t separateZoomFromCropLocked(CameraMetadata* metadata, bool isResult);
+    status_t combineZoomAndCropLocked(CameraMetadata* metadata, bool isResult);
+};
+
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index 675ad24..bf89ca5 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -201,8 +201,9 @@
         return HStatus::ILLEGAL_ARGUMENT;
     }
 
+    std::vector<int> offlineStreamIds;
     binder::Status ret = mDeviceRemote->endConfigure(convertFromHidl(operatingMode),
-                                                     cameraMetadata);
+                                                     cameraMetadata, &offlineStreamIds);
     return B2HStatus(ret);
 }
 
diff --git a/services/camera/libcameraservice/tests/DepthProcessorTest.cpp b/services/camera/libcameraservice/tests/DepthProcessorTest.cpp
index 2162514..673c149 100644
--- a/services/camera/libcameraservice/tests/DepthProcessorTest.cpp
+++ b/services/camera/libcameraservice/tests/DepthProcessorTest.cpp
@@ -20,7 +20,6 @@
 #include <array>
 #include <random>
 
-#include <dlfcn.h>
 #include <gtest/gtest.h>
 
 #include "../common/DepthPhotoProcessor.h"
@@ -36,19 +35,6 @@
 static const size_t kTestBufferDepthSize (kTestBufferWidth * kTestBufferHeight);
 static const size_t kSeed = 1234;
 
-void linkToDepthPhotoLibrary(void **libHandle /*out*/,
-        process_depth_photo_frame *processFrameFunc /*out*/) {
-    ASSERT_NE(libHandle, nullptr);
-    ASSERT_NE(processFrameFunc, nullptr);
-
-    *libHandle = dlopen(kDepthPhotoLibrary, RTLD_NOW | RTLD_LOCAL);
-    if (*libHandle != nullptr) {
-        *processFrameFunc = reinterpret_cast<camera3::process_depth_photo_frame> (
-                dlsym(*libHandle, kDepthPhotoProcessFunction));
-        ASSERT_NE(*processFrameFunc, nullptr);
-    }
-}
-
 void generateColorJpegBuffer(int jpegQuality, ExifOrientation orientationValue, bool includeExif,
         bool switchDimensions, std::vector<uint8_t> *colorJpegBuffer /*out*/) {
     ASSERT_NE(colorJpegBuffer, nullptr);
@@ -91,26 +77,9 @@
     }
 }
 
-TEST(DepthProcessorTest, LinkToLibray) {
-    void *libHandle;
-    process_depth_photo_frame processFunc;
-    linkToDepthPhotoLibrary(&libHandle, &processFunc);
-    if (libHandle != nullptr) {
-        dlclose(libHandle);
-    }
-}
-
 TEST(DepthProcessorTest, BadInput) {
-    void *libHandle;
     int jpegQuality = 95;
 
-    process_depth_photo_frame processFunc;
-    linkToDepthPhotoLibrary(&libHandle, &processFunc);
-    if (libHandle == nullptr) {
-        // Depth library no present, nothing more to test.
-        return;
-    }
-
     DepthPhotoInputFrame inputFrame;
     // Worst case both depth and confidence maps have the same size as the main color image.
     inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
@@ -128,37 +97,27 @@
     inputFrame.mMainJpegWidth = kTestBufferWidth;
     inputFrame.mMainJpegHeight = kTestBufferHeight;
     inputFrame.mJpegQuality = jpegQuality;
-    ASSERT_NE(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+    ASSERT_NE(processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
                 &actualDepthPhotoSize), 0);
 
     inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (colorJpegBuffer.data());
     inputFrame.mMainJpegSize = colorJpegBuffer.size();
-    ASSERT_NE(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+    ASSERT_NE(processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
                 &actualDepthPhotoSize), 0);
 
     inputFrame.mDepthMapBuffer = depth16Buffer.data();
     inputFrame.mDepthMapWidth = inputFrame.mDepthMapStride = kTestBufferWidth;
     inputFrame.mDepthMapHeight = kTestBufferHeight;
-    ASSERT_NE(processFunc(inputFrame, depthPhotoBuffer.size(), nullptr,
+    ASSERT_NE(processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(), nullptr,
                 &actualDepthPhotoSize), 0);
 
-    ASSERT_NE(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(), nullptr),
-            0);
-
-    dlclose(libHandle);
+    ASSERT_NE(processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+                nullptr), 0);
 }
 
 TEST(DepthProcessorTest, BasicDepthPhotoValidation) {
-    void *libHandle;
     int jpegQuality = 95;
 
-    process_depth_photo_frame processFunc;
-    linkToDepthPhotoLibrary(&libHandle, &processFunc);
-    if (libHandle == nullptr) {
-        // Depth library no present, nothing more to test.
-        return;
-    }
-
     std::vector<uint8_t> colorJpegBuffer;
     generateColorJpegBuffer(jpegQuality, ExifOrientation::ORIENTATION_UNDEFINED,
             /*includeExif*/ false, /*switchDimensions*/ false, &colorJpegBuffer);
@@ -180,7 +139,7 @@
 
     std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
     size_t actualDepthPhotoSize = 0;
-    ASSERT_EQ(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+    ASSERT_EQ(processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
                 &actualDepthPhotoSize), 0);
     ASSERT_TRUE((actualDepthPhotoSize > 0) && (depthPhotoBuffer.size() >= actualDepthPhotoSize));
 
@@ -196,21 +155,11 @@
     ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data() + mainJpegSize,
                 actualDepthPhotoSize - mainJpegSize, &depthMapSize), OK);
     ASSERT_TRUE((depthMapSize > 0) && (depthMapSize < (actualDepthPhotoSize - mainJpegSize)));
-
-    dlclose(libHandle);
 }
 
 TEST(DepthProcessorTest, TestDepthPhotoExifOrientation) {
-    void *libHandle;
     int jpegQuality = 95;
 
-    process_depth_photo_frame processFunc;
-    linkToDepthPhotoLibrary(&libHandle, &processFunc);
-    if (libHandle == nullptr) {
-        // Depth library no present, nothing more to test.
-        return;
-    }
-
     ExifOrientation exifOrientations[] = { ExifOrientation::ORIENTATION_UNDEFINED,
             ExifOrientation::ORIENTATION_0_DEGREES, ExifOrientation::ORIENTATION_90_DEGREES,
             ExifOrientation::ORIENTATION_180_DEGREES, ExifOrientation::ORIENTATION_270_DEGREES };
@@ -242,8 +191,8 @@
 
         std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
         size_t actualDepthPhotoSize = 0;
-        ASSERT_EQ(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
-                &actualDepthPhotoSize), 0);
+        ASSERT_EQ(processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(),
+                    depthPhotoBuffer.data(), &actualDepthPhotoSize), 0);
         ASSERT_TRUE((actualDepthPhotoSize > 0) &&
                 (depthPhotoBuffer.size() >= actualDepthPhotoSize));
 
@@ -281,21 +230,11 @@
             ASSERT_EQ(confidenceJpegExifOrientation, exifOrientation);
         }
     }
-
-    dlclose(libHandle);
 }
 
 TEST(DepthProcessorTest, TestDephtPhotoPhysicalRotation) {
-    void *libHandle;
     int jpegQuality = 95;
 
-    process_depth_photo_frame processFunc;
-    linkToDepthPhotoLibrary(&libHandle, &processFunc);
-    if (libHandle == nullptr) {
-        // Depth library no present, nothing more to test.
-        return;
-    }
-
     // In case of physical rotation, the EXIF orientation must always be 0.
     auto exifOrientation = ExifOrientation::ORIENTATION_0_DEGREES;
     DepthPhotoOrientation depthOrientations[] = {
@@ -339,8 +278,8 @@
 
         std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
         size_t actualDepthPhotoSize = 0;
-        ASSERT_EQ(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
-                &actualDepthPhotoSize), 0);
+        ASSERT_EQ(processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(),
+                    depthPhotoBuffer.data(), &actualDepthPhotoSize), 0);
         ASSERT_TRUE((actualDepthPhotoSize > 0) &&
                 (depthPhotoBuffer.size() >= actualDepthPhotoSize));
 
@@ -377,6 +316,4 @@
         ASSERT_EQ(confidenceMapWidth, expectedWidth);
         ASSERT_EQ(confidenceMapHeight, expectedHeight);
     }
-
-    dlclose(libHandle);
 }
diff --git a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
new file mode 100644
index 0000000..300da09
--- /dev/null
+++ b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
@@ -0,0 +1,457 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "ZoomRatioMapperTest"
+
+#include <gtest/gtest.h>
+#include <utils/Errors.h>
+
+#include "../device3/ZoomRatioMapper.h"
+
+using namespace std;
+using namespace android;
+using namespace android::camera3;
+
+constexpr int kMaxAllowedPixelError = 1;
+constexpr float kMaxAllowedRatioError = 0.1;
+
+constexpr int32_t testActiveArraySize[] = {100, 100, 1024, 768};
+constexpr int32_t testPreCorrActiveArraySize[] = {90, 90, 1044, 788};
+
+constexpr int32_t testDefaultCropSize[][4] = {
+      {0, 0, 1024, 768},   // active array default crop
+      {0, 0, 1044, 788},   // preCorrection active array default crop
+};
+
+constexpr int32_t test2xCropRegion[][4] = {
+      {256, 192, 512, 384}, // active array 2x zoom crop
+      {261, 197, 522, 394}, // preCorrection active array default crop
+};
+
+constexpr int32_t testLetterBoxSize[][4] = {
+      {0, 96, 1024, 576}, // active array 2x zoom crop
+      {0, 106, 1024, 576}, // preCorrection active array default crop
+};
+
+status_t setupTestMapper(ZoomRatioMapper *m, float maxDigitalZoom,
+        const int32_t activeArray[4], const int32_t preCorrectArray[4],
+        bool hasZoomRatioRange, float zoomRatioRange[2],
+        bool usePreCorrectArray) {
+    CameraMetadata deviceInfo;
+
+    deviceInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArray, 4);
+    deviceInfo.update(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, preCorrectArray, 4);
+    deviceInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, &maxDigitalZoom, 1);
+    if (hasZoomRatioRange) {
+        deviceInfo.update(ANDROID_CONTROL_ZOOM_RATIO_RANGE, zoomRatioRange, 2);
+    }
+
+    bool supportNativeZoomRatio;
+    status_t res = ZoomRatioMapper::overrideZoomRatioTags(&deviceInfo, &supportNativeZoomRatio);
+    if (res != OK) {
+        return res;
+    }
+
+    *m = ZoomRatioMapper(&deviceInfo, hasZoomRatioRange, usePreCorrectArray);
+    return OK;
+}
+
+TEST(ZoomRatioTest, Initialization) {
+    CameraMetadata deviceInfo;
+    status_t res;
+    camera_metadata_entry_t entry;
+
+    deviceInfo.update(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
+            testPreCorrActiveArraySize, 4);
+    deviceInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, testActiveArraySize, 4);
+
+    // Test initialization from devices not supporting zoomRange
+    float maxDigitalZoom = 4.0f;
+    ZoomRatioMapper mapperNoZoomRange;
+    deviceInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, &maxDigitalZoom, 1);
+    bool supportNativeZoomRatio;
+    res = ZoomRatioMapper::overrideZoomRatioTags(&deviceInfo, &supportNativeZoomRatio);
+    ASSERT_EQ(res, OK);
+    ASSERT_EQ(supportNativeZoomRatio, false);
+    mapperNoZoomRange = ZoomRatioMapper(&deviceInfo,
+            supportNativeZoomRatio, true/*usePreCorrectArray*/);
+    ASSERT_TRUE(mapperNoZoomRange.isValid());
+    mapperNoZoomRange = ZoomRatioMapper(&deviceInfo,
+            supportNativeZoomRatio, false/*usePreCorrectArray*/);
+    ASSERT_TRUE(mapperNoZoomRange.isValid());
+
+    entry = deviceInfo.find(ANDROID_CONTROL_ZOOM_RATIO_RANGE);
+    ASSERT_EQ(entry.count, 2U);
+    ASSERT_EQ(entry.data.f[0], 1.0);
+    ASSERT_EQ(entry.data.f[1], maxDigitalZoom);
+
+    entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+    ASSERT_GT(entry.count, 0U);
+    ASSERT_NE(std::find(entry.data.i32, entry.data.i32 + entry.count,
+            ANDROID_CONTROL_ZOOM_RATIO_RANGE), entry.data.i32 + entry.count);
+
+    entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+    ASSERT_GT(entry.count, 0U);
+    ASSERT_NE(std::find(entry.data.i32, entry.data.i32 + entry.count,
+            ANDROID_CONTROL_ZOOM_RATIO), entry.data.i32 + entry.count);
+
+    entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS);
+    ASSERT_GT(entry.count, 0U);
+    ASSERT_NE(std::find(entry.data.i32, entry.data.i32 + entry.count,
+            ANDROID_CONTROL_ZOOM_RATIO), entry.data.i32 + entry.count);
+
+    // Test initialization from devices supporting zoomRange
+    float ratioRange[2] = {0.2f, maxDigitalZoom};
+    deviceInfo.update(ANDROID_CONTROL_ZOOM_RATIO_RANGE, ratioRange, 2);
+    res = ZoomRatioMapper::overrideZoomRatioTags(&deviceInfo, &supportNativeZoomRatio);
+    ASSERT_EQ(res, OK);
+    ASSERT_EQ(supportNativeZoomRatio, true);
+    ZoomRatioMapper mapperWithZoomRange;
+    mapperWithZoomRange = ZoomRatioMapper(&deviceInfo,
+            supportNativeZoomRatio, true/*usePreCorrectArray*/);
+    ASSERT_TRUE(mapperWithZoomRange.isValid());
+    mapperWithZoomRange = ZoomRatioMapper(&deviceInfo,
+            supportNativeZoomRatio, false/*usePreCorrectArray*/);
+    ASSERT_TRUE(mapperWithZoomRange.isValid());
+
+    entry = deviceInfo.find(ANDROID_CONTROL_ZOOM_RATIO_RANGE);
+    ASSERT_EQ(entry.count, 2U);
+    ASSERT_EQ(entry.data.f[0], ratioRange[0]);
+    ASSERT_EQ(entry.data.f[1], ratioRange[1]);
+
+    // Test default zoom ratio in template
+    CameraMetadata requestTemplate;
+    res = ZoomRatioMapper::initZoomRatioInTemplate(&requestTemplate);
+    ASSERT_EQ(res, OK);
+    entry = requestTemplate.find(ANDROID_CONTROL_ZOOM_RATIO);
+    ASSERT_EQ(entry.count, 1U);
+    ASSERT_EQ(entry.data.f[0], 1.0f);
+
+    float customRatio = 0.5f;
+    res = requestTemplate.update(ANDROID_CONTROL_ZOOM_RATIO, &customRatio, 1);
+    ASSERT_EQ(res, OK);
+    res = ZoomRatioMapper::initZoomRatioInTemplate(&requestTemplate);
+    ASSERT_EQ(res, OK);
+    entry = requestTemplate.find(ANDROID_CONTROL_ZOOM_RATIO);
+    ASSERT_EQ(entry.count, 1U);
+    ASSERT_EQ(entry.data.f[0], customRatio);
+}
+
+void subScaleCoordinatesTest(bool usePreCorrectArray) {
+    ZoomRatioMapper mapper;
+    float maxDigitalZoom = 4.0f;
+    float zoomRatioRange[2];
+    ASSERT_EQ(OK, setupTestMapper(&mapper, maxDigitalZoom,
+            testActiveArraySize, testPreCorrActiveArraySize,
+            false/*hasZoomRatioRange*/, zoomRatioRange,
+            usePreCorrectArray));
+
+    size_t index = 0;
+    int32_t width = testActiveArraySize[2];
+    int32_t height = testActiveArraySize[3];
+    if (usePreCorrectArray) {
+        index = 1;
+        width = testPreCorrActiveArraySize[2];
+        height = testPreCorrActiveArraySize[3];
+    }
+
+    std::array<int32_t, 16> originalCoords = {
+            0, 0, // top-left
+            width, 0, // top-right
+            0, height, // bottom-left
+            width, height, // bottom-right
+            width / 2, height / 2, // center
+            width / 4, height / 4, // top-left after 2x
+            width / 3, height * 2 / 3, // bottom-left after 3x zoom
+            width * 7 / 8, height / 2, // middle-right after 1.33x zoom
+    };
+
+    // Verify 1.0x zoom doesn't change the coordinates
+    auto coords = originalCoords;
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 1.0f, ZoomRatioMapper::ClampOff);
+    for (size_t i = 0; i < coords.size(); i++) {
+        EXPECT_EQ(coords[i], originalCoords[i]);
+    }
+
+    // Verify 2.0x zoom work as expected (no clamping)
+    std::array<float, 16> expected2xCoords = {
+            - width / 2.0f, - height / 2.0f,// top-left
+            width * 3 / 2.0f, - height / 2.0f, // top-right
+            - width / 2.0f, height * 3 / 2.0f, // bottom-left
+            width * 3 / 2.0f, height * 3 / 2.0f, // bottom-right
+            width / 2.0f, height / 2.0f, // center
+            0, 0, // top-left after 2x
+            width / 6.0f, height - height / 6.0f, // bottom-left after 3x zoom
+            width + width / 4.0f, height / 2.0f, // middle-right after 1.33x zoom
+    };
+    coords = originalCoords;
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, ZoomRatioMapper::ClampOff);
+    for (size_t i = 0; i < coords.size(); i++) {
+        EXPECT_LE(std::abs(coords[i] - expected2xCoords[i]), kMaxAllowedPixelError);
+    }
+
+    // Verify 2.0x zoom work as expected (with inclusive clamping)
+    std::array<float, 16> expected2xCoordsClampedInc = {
+            0, 0, // top-left
+            static_cast<float>(width) - 1, 0, // top-right
+            0, static_cast<float>(height) - 1, // bottom-left
+            static_cast<float>(width) - 1, static_cast<float>(height) - 1, // bottom-right
+            width / 2.0f, height / 2.0f, // center
+            0, 0, // top-left after 2x
+            width / 6.0f, height - height / 6.0f , // bottom-left after 3x zoom
+            static_cast<float>(width) - 1,  height / 2.0f, // middle-right after 1.33x zoom
+    };
+    coords = originalCoords;
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, ZoomRatioMapper::ClampInclusive);
+    for (size_t i = 0; i < coords.size(); i++) {
+        EXPECT_LE(std::abs(coords[i] - expected2xCoordsClampedInc[i]), kMaxAllowedPixelError);
+    }
+
+    // Verify 2.0x zoom work as expected (with exclusive clamping)
+    std::array<float, 16> expected2xCoordsClampedExc = {
+            0, 0, // top-left
+            static_cast<float>(width), 0, // top-right
+            0, static_cast<float>(height), // bottom-left
+            static_cast<float>(width), static_cast<float>(height), // bottom-right
+            width / 2.0f, height / 2.0f, // center
+            0, 0, // top-left after 2x
+            width / 6.0f, height - height / 6.0f , // bottom-left after 3x zoom
+            static_cast<float>(width),  height / 2.0f, // middle-right after 1.33x zoom
+    };
+    coords = originalCoords;
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, ZoomRatioMapper::ClampExclusive);
+    for (size_t i = 0; i < coords.size(); i++) {
+        EXPECT_LE(std::abs(coords[i] - expected2xCoordsClampedExc[i]), kMaxAllowedPixelError);
+    }
+
+    // Verify 0.33x zoom work as expected
+    std::array<float, 16> expectedZoomOutCoords = {
+            width / 3.0f, height / 3.0f, // top-left
+            width * 2 / 3.0f, height / 3.0f, // top-right
+            width / 3.0f, height * 2 / 3.0f, // bottom-left
+            width * 2 / 3.0f, height * 2 / 3.0f, // bottom-right
+            width / 2.0f, height / 2.0f, // center
+            width * 5 / 12.0f, height * 5 / 12.0f, // top-left after 2x
+            width * 4 / 9.0f, height * 5 / 9.0f, // bottom-left after 3x zoom-in
+            width * 5 / 8.0f, height / 2.0f, // middle-right after 1.33x zoom-in
+    };
+    coords = originalCoords;
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 1.0f/3, ZoomRatioMapper::ClampOff);
+    for (size_t i = 0; i < coords.size(); i++) {
+        EXPECT_LE(std::abs(coords[i] - expectedZoomOutCoords[i]), kMaxAllowedPixelError);
+    }
+}
+
+TEST(ZoomRatioTest, scaleCoordinatesTest) {
+    subScaleCoordinatesTest(false/*usePreCorrectArray*/);
+    subScaleCoordinatesTest(true/*usePreCorrectArray*/);
+}
+
+void subCropOverMaxDigitalZoomTest(bool usePreCorrectArray) {
+    status_t res;
+    ZoomRatioMapper mapper;
+    float noZoomRatioRange[2];
+    res = setupTestMapper(&mapper, 4.0/*maxDigitalZoom*/,
+            testActiveArraySize, testPreCorrActiveArraySize,
+            false/*hasZoomRatioRange*/, noZoomRatioRange,
+            usePreCorrectArray);
+    ASSERT_EQ(res, OK);
+
+    CameraMetadata metadata;
+    camera_metadata_entry_t entry;
+
+    size_t index = usePreCorrectArray ? 1 : 0;
+    metadata.update(ANDROID_SCALER_CROP_REGION, testDefaultCropSize[index], 4);
+    res = mapper.updateCaptureRequest(&metadata);
+    ASSERT_EQ(res, OK);
+    entry = metadata.find(ANDROID_SCALER_CROP_REGION);
+    ASSERT_EQ(entry.count, 4U);
+    for (int i = 0; i < 4; i ++) {
+        EXPECT_EQ(entry.data.i32[i], testDefaultCropSize[index][i]);
+    }
+
+    metadata.update(ANDROID_SCALER_CROP_REGION, test2xCropRegion[index], 4);
+    res = mapper.updateCaptureResult(&metadata, true/*requestedZoomRatioIs1*/);
+    ASSERT_EQ(res, OK);
+    entry = metadata.find(ANDROID_SCALER_CROP_REGION);
+    ASSERT_EQ(entry.count, 4U);
+    for (int i = 0; i < 4; i ++) {
+        EXPECT_EQ(entry.data.i32[i], test2xCropRegion[index][i]);
+    }
+    entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
+    ASSERT_TRUE(entry.count == 0 || (entry.count == 1 && entry.data.f[0] == 1.0f));
+}
+
+TEST(ZoomRatioTest, CropOverMaxDigitalZoomTest) {
+    subCropOverMaxDigitalZoomTest(false/*usePreCorrectArray*/);
+    subCropOverMaxDigitalZoomTest(true/*usePreCorrectArray*/);
+}
+
+void subCropOverZoomRangeTest(bool usePreCorrectArray) {
+    status_t res;
+    ZoomRatioMapper mapper;
+    float zoomRatioRange[2] = {0.5f, 4.0f};
+    res = setupTestMapper(&mapper, 4.0/*maxDigitalZoom*/,
+            testActiveArraySize, testPreCorrActiveArraySize,
+            true/*hasZoomRatioRange*/, zoomRatioRange,
+            usePreCorrectArray);
+    ASSERT_EQ(res, OK);
+
+    CameraMetadata metadata;
+    camera_metadata_entry_t entry;
+
+    size_t index = usePreCorrectArray ? 1 : 0;
+
+    // 2x zoom crop region, zoomRatio is 1.0f
+    metadata.update(ANDROID_SCALER_CROP_REGION, test2xCropRegion[index], 4);
+    res = mapper.updateCaptureRequest(&metadata);
+    ASSERT_EQ(res, OK);
+    entry = metadata.find(ANDROID_SCALER_CROP_REGION);
+    ASSERT_EQ(entry.count, 4U);
+    for (int i = 0; i < 4; i++) {
+        EXPECT_EQ(entry.data.i32[i], testDefaultCropSize[index][i]);
+    }
+    entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
+    EXPECT_NEAR(entry.data.f[0], 2.0f, kMaxAllowedRatioError);
+
+    res = mapper.updateCaptureResult(&metadata, true/*requestedZoomRatioIs1*/);
+    ASSERT_EQ(res, OK);
+    entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
+    EXPECT_NEAR(entry.data.f[0], 1.0f, kMaxAllowedRatioError);
+    entry = metadata.find(ANDROID_SCALER_CROP_REGION);
+    ASSERT_EQ(entry.count, 4U);
+    for (int i = 0; i < 4; i++) {
+        EXPECT_EQ(entry.data.i32[i], test2xCropRegion[index][i]);
+    }
+
+    // Letter boxing crop region, zoomRatio is 1.0
+    float zoomRatio = 1.0f;
+    metadata.update(ANDROID_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
+    metadata.update(ANDROID_SCALER_CROP_REGION, testLetterBoxSize[index], 4);
+    res = mapper.updateCaptureRequest(&metadata);
+    ASSERT_EQ(res, OK);
+    entry = metadata.find(ANDROID_SCALER_CROP_REGION);
+    ASSERT_EQ(entry.count, 4U);
+    for (int i = 0; i < 4; i++) {
+        EXPECT_EQ(entry.data.i32[i], testLetterBoxSize[index][i]);
+    }
+    entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
+    EXPECT_NEAR(entry.data.f[0], 1.0f, kMaxAllowedRatioError);
+
+    res = mapper.updateCaptureResult(&metadata, true/*requestedZoomRatioIs1*/);
+    ASSERT_EQ(res, OK);
+    entry = metadata.find(ANDROID_SCALER_CROP_REGION);
+    ASSERT_EQ(entry.count, 4U);
+    for (int i = 0; i < 4; i++) {
+        EXPECT_EQ(entry.data.i32[i], testLetterBoxSize[index][i]);
+    }
+    entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
+    EXPECT_NEAR(entry.data.f[0], 1.0f, kMaxAllowedRatioError);
+}
+
+TEST(ZoomRatioTest, CropOverZoomRangeTest) {
+    subCropOverZoomRangeTest(false/*usePreCorrectArray*/);
+    subCropOverZoomRangeTest(true/*usePreCorrectArray*/);
+}
+
+void subZoomOverMaxDigitalZoomTest(bool usePreCorrectArray) {
+    status_t res;
+    ZoomRatioMapper mapper;
+    float noZoomRatioRange[2];
+    res = setupTestMapper(&mapper, 4.0/*maxDigitalZoom*/,
+            testActiveArraySize, testPreCorrActiveArraySize,
+            false/*hasZoomRatioRange*/, noZoomRatioRange,
+            usePreCorrectArray);
+    ASSERT_EQ(res, OK);
+
+    CameraMetadata metadata;
+    float zoomRatio = 3.0f;
+    camera_metadata_entry_t entry;
+
+    size_t index = usePreCorrectArray ? 1 : 0;
+
+    // Full active array crop, zoomRatio is 3.0f
+    metadata.update(ANDROID_SCALER_CROP_REGION, testDefaultCropSize[index], 4);
+    metadata.update(ANDROID_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
+    res = mapper.updateCaptureRequest(&metadata);
+    ASSERT_EQ(res, OK);
+    entry = metadata.find(ANDROID_SCALER_CROP_REGION);
+    ASSERT_EQ(entry.count, 4U);
+    std::array<float, 4> expectedCrop = {
+        testDefaultCropSize[index][2] / 3.0f, /*x*/
+        testDefaultCropSize[index][3] / 3.0f, /*y*/
+        testDefaultCropSize[index][2] / 3.0f, /*width*/
+        testDefaultCropSize[index][3] / 3.0f, /*height*/
+    };
+    for (int i = 0; i < 4; i++) {
+        EXPECT_LE(std::abs(entry.data.i32[i] - expectedCrop[i]), kMaxAllowedPixelError);
+    }
+
+    entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
+    if (entry.count == 1) {
+        EXPECT_NEAR(entry.data.f[0], 1.0f, kMaxAllowedRatioError);
+    }
+}
+
+TEST(ZoomRatioTest, ZoomOverMaxDigitalZoomTest) {
+    subZoomOverMaxDigitalZoomTest(false/*usePreCorrectArray*/);
+    subZoomOverMaxDigitalZoomTest(true/*usePreCorrectArray*/);
+}
+
+void subZoomOverZoomRangeTest(bool usePreCorrectArray) {
+    status_t res;
+    ZoomRatioMapper mapper;
+    float zoomRatioRange[2] = {1.0f, 4.0f};
+    res = setupTestMapper(&mapper, 4.0/*maxDigitalZoom*/,
+            testActiveArraySize, testPreCorrActiveArraySize,
+            true/*hasZoomRatioRange*/, zoomRatioRange,
+            usePreCorrectArray);
+    ASSERT_EQ(res, OK);
+
+    CameraMetadata metadata;
+    float zoomRatio = 3.0f;
+    camera_metadata_entry_t entry;
+    size_t index = usePreCorrectArray ? 1 : 0;
+
+    // Full active array crop, zoomRatio is 3.0f
+    metadata.update(ANDROID_SCALER_CROP_REGION, testDefaultCropSize[index], 4);
+    metadata.update(ANDROID_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
+    res = mapper.updateCaptureRequest(&metadata);
+    ASSERT_EQ(res, OK);
+    entry = metadata.find(ANDROID_SCALER_CROP_REGION);
+    ASSERT_EQ(entry.count, 4U);
+    for (int i = 0; i < 4; i ++) {
+        EXPECT_EQ(entry.data.i32[i], testDefaultCropSize[index][i]);
+    }
+    entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
+    ASSERT_EQ(entry.data.f[0], zoomRatio);
+
+    res = mapper.updateCaptureResult(&metadata, false/*requestedZoomRatioIs1*/);
+    ASSERT_EQ(res, OK);
+    entry = metadata.find(ANDROID_SCALER_CROP_REGION);
+    ASSERT_EQ(entry.count, 4U);
+    for (int i = 0; i < 4; i ++) {
+        EXPECT_EQ(entry.data.i32[i], testDefaultCropSize[index][i]);
+    }
+    entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
+    ASSERT_EQ(entry.data.f[0], zoomRatio);
+}
+
+TEST(ZoomRatioTest, ZoomOverZoomRangeTest) {
+    subZoomOverZoomRangeTest(false/*usePreCorrectArray*/);
+    subZoomOverZoomRangeTest(true/*usePreCorrectArray*/);
+}
diff --git a/services/camera/libcameraservice/utils/TagMonitor.cpp b/services/camera/libcameraservice/utils/TagMonitor.cpp
index 4037a66..262f962 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.cpp
+++ b/services/camera/libcameraservice/utils/TagMonitor.cpp
@@ -33,6 +33,16 @@
         mVendorTagId(CAMERA_METADATA_INVALID_VENDOR_ID)
 {}
 
+TagMonitor::TagMonitor(const TagMonitor& other):
+        mMonitoringEnabled(other.mMonitoringEnabled.load()),
+        mMonitoredTagList(other.mMonitoredTagList),
+        mLastMonitoredRequestValues(other.mLastMonitoredRequestValues),
+        mLastMonitoredResultValues(other.mLastMonitoredResultValues),
+        mLastMonitoredPhysicalRequestKeys(other.mLastMonitoredPhysicalRequestKeys),
+        mLastMonitoredPhysicalResultKeys(other.mLastMonitoredPhysicalResultKeys),
+        mMonitoringEvents(other.mMonitoringEvents),
+        mVendorTagId(other.mVendorTagId) {}
+
 const String16 TagMonitor::kMonitorOption = String16("-m");
 
 const char* TagMonitor::k3aTags =
diff --git a/services/camera/libcameraservice/utils/TagMonitor.h b/services/camera/libcameraservice/utils/TagMonitor.h
index 1b7b033..413f502 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.h
+++ b/services/camera/libcameraservice/utils/TagMonitor.h
@@ -50,6 +50,8 @@
 
     TagMonitor();
 
+    TagMonitor(const TagMonitor& other);
+
     void initialize(metadata_vendor_id_t id) { mVendorTagId = id; }
 
     // Parse tag name list (comma-separated) and if valid, enable monitoring
diff --git a/services/mediacodec/Android.bp b/services/mediacodec/Android.bp
index 3141c31..5811068 100644
--- a/services/mediacodec/Android.bp
+++ b/services/mediacodec/Android.bp
@@ -15,10 +15,6 @@
     ],
 
     target: {
-        vendor: {
-            exclude_shared_libs: ["libavservices_minijail"],
-            shared_libs: ["libavservices_minijail_vendor"],
-        },
         android: {
             product_variables: {
                 malloc_not_svelte: {
diff --git a/services/mediacodec/Android.mk b/services/mediacodec/Android.mk
index 1cf0534..88a79e7 100644
--- a/services/mediacodec/Android.mk
+++ b/services/mediacodec/Android.mk
@@ -37,7 +37,7 @@
     libutils \
     liblog \
     libbase \
-    libavservices_minijail_vendor \
+    libavservices_minijail \
     libcutils \
     libhidlbase \
     libstagefright_omx \
diff --git a/services/mediametrics/AnalyticsActions.h b/services/mediametrics/AnalyticsActions.h
new file mode 100644
index 0000000..5568c91
--- /dev/null
+++ b/services/mediametrics/AnalyticsActions.h
@@ -0,0 +1,150 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <media/MediaMetricsItem.h>
+#include <mutex>
+
+namespace android::mediametrics {
+
+/**
+ * AnalyticsActions consists of a map of pairs <trigger, action> which
+ * are evaluated for a given incoming MediaMetrics item.
+ *
+ * A vector of Actions are returned from getActionsForItem() which
+ * should be executed outside of any locks.
+ *
+ * Mediametrics assumes weak consistency, which is fine as the analytics database
+ * is generally strictly increasing in size (until gc removes values that are
+ * supposedly no longer needed).
+ */
+
+class AnalyticsActions {
+public:
+
+    using Elem = mediametrics::Item::Prop::Elem;
+    /**
+     * Trigger: a pair consisting of
+     * std::string: A wildcard url specifying a property in the item,
+     *              where '*' indicates 0 or more arbitrary characters
+     *              for the item key match.
+     * Elem: A value that needs to match exactly.
+     *
+     * Trigger is used in a map sort;  default less with std::string as primary key.
+     * The wildcard accepts a string with '*' as being 0 or more arbitrary
+     * characters for the item key match.  A wildcard is preferred over general
+     * regexp for simple fast lookup.
+     *
+     * TODO: incorporate a regexp option.
+     */
+    using Trigger = std::pair<std::string, Elem>;
+
+    /**
+     * Function: The function to be executed.
+     */
+    using Function = std::function<
+            void(const std::shared_ptr<const mediametrics::Item>& item)>;
+
+    /**
+     * Action:  An action to execute.  This is a shared pointer to Function.
+     */
+    using Action = std::shared_ptr<Function>;
+
+    /**
+     * Adds a new action.
+     *
+     * \param url references a property in the item with wildcards
+     * \param value references a value (cast to Elem automatically)
+     *              so be careful of the type.  It must be one of
+     *              the types acceptable to Elem.
+     * \param action is a function or lambda to execute if the url matches value
+     *               in the item.
+     */
+    template <typename T, typename U, typename A>
+    void addAction(T&& url, U&& value, A&& action) {
+        std::lock_guard l(mLock);
+        mFilters[ { std::forward<T>(url), std::forward<U>(value) } ]
+                = std::forward<A>(action);
+    }
+
+    // TODO: remove an action.
+
+    /**
+     * Get all the actions triggered for a particular item.
+     *
+     * \param item to be analyzed for actions.
+     */
+    std::vector<Action>
+    getActionsForItem(const std::shared_ptr<const mediametrics::Item>& item) {
+        std::vector<Action> actions;
+        std::lock_guard l(mLock);
+
+        // Essentially the code looks like this:
+        /*
+        for (auto &[trigger, action] : mFilters) {
+            if (isMatch(trigger, item)) {
+                actions.push_back(action);
+            }
+        }
+        */
+
+        // Optimization: there should only be one match for a non-wildcard url.
+        auto it = mFilters.upper_bound( {item->getKey(), std::monostate{} });
+        if (it != mFilters.end()) {
+            const auto &[trigger, action] = *it;
+            if (isMatch(trigger, item)) {
+                actions.push_back(action);
+            }
+        }
+
+        // Optimization: for wildcard URLs we go backwards until there is no
+        // match with the prefix before the wildcard.
+        while (it != mFilters.begin()) {  // this walks backwards, cannot start at begin.
+            const auto &[trigger, action] = *--it;  // look backwards
+            int ret = isWildcardMatch(trigger, item);
+            if (ret == mediametrics::Item::RECURSIVE_WILDCARD_CHECK_MATCH_FOUND) {
+                actions.push_back(action);    // match found.
+            } else if (ret == mediametrics::Item::RECURSIVE_WILDCARD_CHECK_NO_MATCH_NO_WILDCARD) {
+                break;                        // no match before wildcard.
+            }
+            // a wildcard was encountered when matching prefix, so we should check again.
+        }
+        return actions;
+    }
+
+private:
+
+    static inline bool isMatch(const Trigger& trigger,
+            const std::shared_ptr<const mediametrics::Item>& item) {
+        const auto& [key, elem] = trigger;
+        if (!startsWith(key, item->getKey())) return false;
+        // The trigger key is in format (item key).propName, so + 1 skips '.' delimeter.
+        const char *propName = key.c_str() + item->getKey().size() + 1;
+        return item->hasPropElem(propName, elem);
+    }
+
+    static inline int isWildcardMatch(const Trigger& trigger,
+            const std::shared_ptr<const mediametrics::Item>& item) {
+        const auto& [key, elem] = trigger;
+        return item->recursiveWildcardCheckElem(key.c_str(), elem);
+    }
+
+    mutable std::mutex mLock;
+    std::map<Trigger, Action> mFilters; // GUARDED_BY mLock
+};
+
+} // namespace android::mediametrics
diff --git a/services/mediametrics/AnalyticsState.h b/services/mediametrics/AnalyticsState.h
new file mode 100644
index 0000000..290ed21
--- /dev/null
+++ b/services/mediametrics/AnalyticsState.h
@@ -0,0 +1,131 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "TimeMachine.h"
+#include "TransactionLog.h"
+
+namespace android::mediametrics {
+
+/**
+ * AnalyticsState consists of a TimeMachine and TransactionLog for a set
+ * of MediaMetrics Items.
+ *
+ * One can add new Items with the submit() method.
+ *
+ * The AnalyticsState may be cleared or duplicated to preserve state after crashes
+ * in services are detected.
+ *
+ * As its members may not be moveable due to mutexes, we use this encapsulation
+ * with a shared pointer in order to save it or duplicate it.
+ */
+class AnalyticsState {
+public:
+    /**
+     * Returns success if AnalyticsState accepts the item.
+     *
+     * A trusted source can create a new key, an untrusted source
+     * can only modify the key if the uid will match that authorized
+     * on the existing key.
+     *
+     * \param item the item to be submitted.
+     * \param isTrusted whether the transaction comes from a trusted source.
+     *        In this case, a trusted source is verified by binder
+     *        UID to be a system service by MediaMetrics service.
+     *        Do not use true if you haven't really checked!
+     *
+     * \return NO_ERROR on success or
+     *         PERMISSION_DENIED if the item cannot be put into the AnalyticsState.
+     */
+    status_t submit(const std::shared_ptr<const mediametrics::Item>& item, bool isTrusted) {
+        return mTimeMachine.put(item, isTrusted) ?: mTransactionLog.put(item);
+    }
+
+    /**
+     * Returns the TimeMachine.
+     *
+     * The TimeMachine object is internally locked, so access is safe and defined,
+     * but multiple threaded access may change results after calling.
+     */
+    TimeMachine& timeMachine() { return mTimeMachine; }
+    const TimeMachine& timeMachine() const { return mTimeMachine; }
+
+    /**
+     * Returns the TransactionLog.
+     *
+     * The TransactionLog object is internally locked, so access is safe and defined,
+     * but multiple threaded access may change results after calling.
+     */
+    TransactionLog& transactionLog() { return mTransactionLog; }
+    const TransactionLog& transactionLog() const { return mTransactionLog; }
+
+    /**
+     * Returns a pair consisting of the dump string, and the number of lines in the string.
+     *
+     * The number of lines in the returned pair is used as an optimization
+     * for subsequent line limiting.
+     *
+     * The TimeMachine and the TransactionLog are dumped separately under
+     * different locks, so may not be 100% consistent with the last data
+     * delivered.
+     *
+     * \param lines the maximum number of lines in the string returned.
+     */
+    std::pair<std::string, int32_t> dump(int32_t lines = INT32_MAX) const {
+        std::stringstream ss;
+        int32_t ll = lines;
+
+        if (ll > 0) {
+            ss << "TransactionLog:\n";
+            --ll;
+        }
+        if (ll > 0) {
+            auto [s, l] = mTransactionLog.dump(ll);
+            ss << s;
+            ll -= l;
+        }
+        if (ll > 0) {
+            ss << "TimeMachine:\n";
+            --ll;
+        }
+        if (ll > 0) {
+            auto [s, l] = mTimeMachine.dump(ll);
+            ss << s;
+            ll -= l;
+        }
+        return { ss.str(), lines - ll };
+    }
+
+    /**
+     * Clears the AnalyticsState.
+     */
+    void clear() {
+        mTimeMachine.clear();
+        mTransactionLog.clear();
+    }
+
+private:
+    // Note: TimeMachine and TransactionLog are individually locked.
+    // Access to these objects under multiple threads will be weakly synchronized,
+    // which is acceptable as modifications only increase the history (or with GC,
+    // eliminates very old history).
+
+    TimeMachine    mTimeMachine;
+    TransactionLog mTransactionLog;
+};
+
+} // namespace android::mediametrics
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index 0840f31..20f346c 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -12,6 +12,7 @@
         "libbinder",
         "liblog",
         "libmediametricsservice",
+        "libmediautils",
         "libutils",
     ],
 
@@ -52,6 +53,7 @@
         "libbinder",
         "liblog",
         "libmediametrics",
+        "libmediautils",
         "libprotobuf-cpp-lite",
         "libstatslog",
         "libutils",
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index fffe517..126e501 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -27,6 +27,66 @@
 AudioAnalytics::AudioAnalytics()
 {
     ALOGD("%s", __func__);
+
+    // Add action to save AnalyticsState if audioserver is restarted.
+    // This triggers on an item of "audio.flinger"
+    // with a property "event" set to "AudioFlinger" (the constructor).
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_AUDIO_FLINGER "." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR),
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+                ALOGW("(key=%s) Audioflinger constructor event detected", item->getKey().c_str());
+                mPreviousAnalyticsState.set(std::make_shared<AnalyticsState>(
+                        *mAnalyticsState.get()));
+                // Note: get returns shared_ptr temp, whose lifetime is extended
+                // to end of full expression.
+                mAnalyticsState->clear();  // TODO: filter the analytics state.
+                // Perhaps report this.
+            }));
+
+    // Check underruns
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_THREAD "*." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_UNDERRUN),
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+                std::string threadId = item->getKey().substr(
+                        sizeof(AMEDIAMETRICS_KEY_PREFIX_AUDIO_THREAD) - 1);
+                std::string outputDevices;
+                mAnalyticsState->timeMachine().get(
+                        item->getKey(), AMEDIAMETRICS_PROP_OUTPUTDEVICES, &outputDevices);
+                ALOGD("(key=%s) Thread underrun event detected on io handle:%s device:%s",
+                        item->getKey().c_str(), threadId.c_str(), outputDevices.c_str());
+                if (outputDevices.find("AUDIO_DEVICE_OUT_BLUETOOTH") != std::string::npos) {
+                    // report this for Bluetooth
+                }
+            }));
+
+    // Check latencies, playback and startup
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK "*." AMEDIAMETRICS_PROP_LATENCYMS,
+        std::monostate{},  // accept any value
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+                double latencyMs{};
+                double startupMs{};
+                if (!item->get(AMEDIAMETRICS_PROP_LATENCYMS, &latencyMs)
+                        || !item->get(AMEDIAMETRICS_PROP_STARTUPMS, &startupMs)) return;
+
+                std::string trackId = item->getKey().substr(
+                        sizeof(AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK) - 1);
+                std::string thread = getThreadFromTrack(item->getKey());
+                std::string outputDevices;
+                mAnalyticsState->timeMachine().get(
+                        thread, AMEDIAMETRICS_PROP_OUTPUTDEVICES, &outputDevices);
+                ALOGD("(key=%s) Track latencyMs:%lf startupMs:%lf detected on port:%s device:%s",
+                        item->getKey().c_str(), latencyMs, startupMs,
+                        trackId.c_str(), outputDevices.c_str());
+                if (outputDevices.find("AUDIO_DEVICE_OUT_BLUETOOTH") != std::string::npos) {
+                    // report this for Bluetooth
+                }
+            }));
 }
 
 AudioAnalytics::~AudioAnalytics()
@@ -37,11 +97,14 @@
 status_t AudioAnalytics::submit(
         const std::shared_ptr<const mediametrics::Item>& item, bool isTrusted)
 {
-    if (startsWith(item->getKey(), "audio.")) {
-        return mTimeMachine.put(item, isTrusted)
-                ?: mTransactionLog.put(item);
-    }
-    return BAD_VALUE;
+    if (!startsWith(item->getKey(), AMEDIAMETRICS_KEY_PREFIX_AUDIO)) return BAD_VALUE;
+    status_t status = mAnalyticsState->submit(item, isTrusted);
+    if (status != NO_ERROR) return status;  // may not be permitted.
+
+    // Only if the item was successfully submitted (permission)
+    // do we check triggered actions.
+    checkActions(item);
+    return NO_ERROR;
 }
 
 std::pair<std::string, int32_t> AudioAnalytics::dump(int32_t lines) const
@@ -50,24 +113,41 @@
     int32_t ll = lines;
 
     if (ll > 0) {
-        ss << "TransactionLog:\n";
-        --ll;
-    }
-    if (ll > 0) {
-        auto [s, l] = mTransactionLog.dump(ll);
+        auto [s, l] = mAnalyticsState->dump(ll);
         ss << s;
         ll -= l;
     }
     if (ll > 0) {
-        ss << "TimeMachine:\n";
+        ss << "Prior audioserver state:\n";
         --ll;
     }
     if (ll > 0) {
-        auto [s, l] = mTimeMachine.dump(ll);
+        auto [s, l] = mPreviousAnalyticsState->dump(ll);
         ss << s;
         ll -= l;
     }
     return { ss.str(), lines - ll };
 }
 
+void AudioAnalytics::checkActions(const std::shared_ptr<const mediametrics::Item>& item)
+{
+    auto actions = mActions.getActionsForItem(item); // internally locked.
+    // Execute actions with no lock held.
+    for (const auto& action : actions) {
+        (*action)(item);
+    }
+}
+
+// HELPER METHODS
+
+std::string AudioAnalytics::getThreadFromTrack(const std::string& track) const
+{
+    int32_t threadId_int32{};
+    if (mAnalyticsState->timeMachine().get(
+            track, AMEDIAMETRICS_PROP_THREADID, &threadId_int32) != NO_ERROR) {
+        return {};
+    }
+    return std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_THREAD) + std::to_string(threadId_int32);
+}
+
 } // namespace android
diff --git a/services/mediametrics/AudioAnalytics.h b/services/mediametrics/AudioAnalytics.h
index b931258..4a42e22 100644
--- a/services/mediametrics/AudioAnalytics.h
+++ b/services/mediametrics/AudioAnalytics.h
@@ -16,8 +16,9 @@
 
 #pragma once
 
-#include "TimeMachine.h"
-#include "TransactionLog.h"
+#include "AnalyticsActions.h"
+#include "AnalyticsState.h"
+#include "Wrap.h"
 
 namespace android::mediametrics {
 
@@ -27,7 +28,6 @@
     AudioAnalytics();
     ~AudioAnalytics();
 
-    // TODO: update with conditions for keys.
     /**
      * Returns success if AudioAnalytics recognizes item.
      *
@@ -42,6 +42,10 @@
      *        In this case, a trusted source is verified by binder
      *        UID to be a system service by MediaMetrics service.
      *        Do not use true if you haven't really checked!
+     *
+     * \return NO_ERROR on success,
+     *         PERMISSION_DENIED if the item cannot be put into the AnalyticsState,
+     *         BAD_VALUE if the item key does not start with "audio.".
      */
     status_t submit(const std::shared_ptr<const mediametrics::Item>& item, bool isTrusted);
 
@@ -60,9 +64,30 @@
     std::pair<std::string, int32_t> dump(int32_t lines = INT32_MAX) const;
 
 private:
-    // The following are locked internally
-    TimeMachine mTimeMachine;
-    TransactionLog mTransactionLog;
+
+    /**
+     * Checks for any pending actions for a particular item.
+     *
+     * \param item to check against the current AnalyticsActions.
+     */
+    void checkActions(const std::shared_ptr<const mediametrics::Item>& item);
+
+    // HELPER METHODS
+    /**
+     * Return the audio thread associated with an audio track name.
+     * e.g. "audio.track.32" -> "audio.thread.10" if the associated
+     * threadId for the audio track is 10.
+     */
+    std::string getThreadFromTrack(const std::string& track) const;
+
+    // Actions is individually locked
+    AnalyticsActions mActions;
+
+    // AnalyticsState is individually locked, and we use SharedPtrWrap
+    // to allow safe access even if the shared pointer changes underneath.
+
+    SharedPtrWrap<AnalyticsState> mAnalyticsState;
+    SharedPtrWrap<AnalyticsState> mPreviousAnalyticsState;
 };
 
 } // namespace android::mediametrics
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index bbd13fa..3b95f7a 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -57,6 +57,25 @@
     return (timeNs + NANOS_PER_SECOND / 2) / NANOS_PER_SECOND * NANOS_PER_SECOND;
 }
 
+/* static */
+bool MediaMetricsService::useUidForPackage(
+        const std::string& package, const std::string& installer)
+{
+    if (strchr(package.c_str(), '.') == NULL) {
+        return false;  // not of form 'com.whatever...'; assume internal and ok
+    } else if (strncmp(package.c_str(), "android.", 8) == 0) {
+        return false;  // android.* packages are assumed fine
+    } else if (strncmp(installer.c_str(), "com.android.", 12) == 0) {
+        return false;  // from play store
+    } else if (strncmp(installer.c_str(), "com.google.", 11) == 0) {
+        return false;  // some google source
+    } else if (strcmp(installer.c_str(), "preload") == 0) {
+        return false;  // preloads
+    } else {
+        return true;  // we're not sure where it came from, use uid only.
+    }
+}
+
 MediaMetricsService::MediaMetricsService()
         : mMaxRecords(kMaxRecords),
           mMaxRecordAgeNs(kMaxRecordAgeNs),
@@ -112,14 +131,19 @@
         break;
     }
 
-    // Overwrite package name and version if the caller was untrusted.
-    if (!isTrusted) {
-        mUidInfo.setPkgInfo(item, item->getUid(), true, true);
-    } else if (item->getPkgName().empty()) {
-        // empty, so fill out both parts
-        mUidInfo.setPkgInfo(item, item->getUid(), true, true);
-    } else {
-        // trusted, provided a package, do nothing
+    // Overwrite package name and version if the caller was untrusted or empty
+    if (!isTrusted || item->getPkgName().empty()) {
+        const uid_t uid = item->getUid();
+        mediautils::UidInfo::Info info = mUidInfo.getInfo(uid);
+        if (useUidForPackage(info.package, info.installer)) {
+            // remove uid information of unknown installed packages.
+            // TODO: perhaps this can be done just before uploading to Westworld.
+            item->setPkgName(std::to_string(uid));
+            item->setPkgVersionCode(0);
+        } else {
+            item->setPkgName(info.package);
+            item->setPkgVersionCode(info.versionCode);
+        }
     }
 
     ALOGV("%s: given uid %d; sanitized uid: %d sanitized pkg: %s "
@@ -267,7 +291,11 @@
         }
         // TODO: maybe consider a better way of dumping audio analytics info.
         constexpr int32_t linesToDump = 1000;
-        result.append(mAudioAnalytics.dump(linesToDump).first.c_str());
+        auto [ dumpString, lines ] = mAudioAnalytics.dump(linesToDump);
+        result.append(dumpString.c_str());
+        if (lines == linesToDump) {
+            result.append("-- some lines may be truncated --\n");
+        }
     }
 
     write(fd, result.string(), result.size());
@@ -450,150 +478,4 @@
     return false;
 }
 
-// How long we hold package info before we re-fetch it
-constexpr nsecs_t PKG_EXPIRATION_NS = 30 * 60 * NANOS_PER_SECOND; // 30 minutes
-
-// give me the package name, perhaps going to find it
-// manages its own mutex operations internally
-void MediaMetricsService::UidInfo::setPkgInfo(
-        mediametrics::Item *item, uid_t uid, bool setName, bool setVersion)
-{
-    ALOGV("%s: uid=%d", __func__, uid);
-
-    if (!setName && !setVersion) {
-        return;  // setting nothing? strange
-    }
-
-    const nsecs_t now = systemTime(SYSTEM_TIME_REALTIME);
-    struct UidToPkgInfo mapping;
-    {
-        std::lock_guard _l(mUidInfoLock);
-        auto it = mPkgMappings.find(uid);
-        if (it != mPkgMappings.end()) {
-            mapping = it->second;
-            ALOGV("%s: uid %d expiration %lld now %lld",
-                    __func__, uid, (long long)mapping.expiration, (long long)now);
-            if (mapping.expiration <= now) {
-                // purge the stale entry and fall into re-fetching
-                ALOGV("%s: entry for uid %d expired, now %lld",
-                        __func__, uid, (long long)now);
-                mPkgMappings.erase(it);
-                mapping.uid = (uid_t)-1;  // this is always fully overwritten
-            }
-        }
-    }
-
-    // if we did not find it
-    if (mapping.uid == (uid_t)(-1)) {
-        std::string pkg;
-        std::string installer;
-        int64_t versionCode = 0;
-
-        const struct passwd *pw = getpwuid(uid);
-        if (pw) {
-            pkg = pw->pw_name;
-        }
-
-        sp<IServiceManager> sm = defaultServiceManager();
-        sp<content::pm::IPackageManagerNative> package_mgr;
-        if (sm.get() == nullptr) {
-            ALOGE("%s: Cannot find service manager", __func__);
-        } else {
-            sp<IBinder> binder = sm->getService(String16("package_native"));
-            if (binder.get() == nullptr) {
-                ALOGE("%s: Cannot find package_native", __func__);
-            } else {
-                package_mgr = interface_cast<content::pm::IPackageManagerNative>(binder);
-            }
-        }
-
-        if (package_mgr != nullptr) {
-            std::vector<int> uids;
-            std::vector<std::string> names;
-            uids.push_back(uid);
-            binder::Status status = package_mgr->getNamesForUids(uids, &names);
-            if (!status.isOk()) {
-                ALOGE("%s: getNamesForUids failed: %s",
-                        __func__, status.exceptionMessage().c_str());
-            } else {
-                if (!names[0].empty()) {
-                    pkg = names[0].c_str();
-                }
-            }
-        }
-
-        // strip any leading "shared:" strings that came back
-        if (pkg.compare(0, 7, "shared:") == 0) {
-            pkg.erase(0, 7);
-        }
-        // determine how pkg was installed and the versionCode
-        if (pkg.empty()) {
-            pkg = std::to_string(uid); // no name for us to manage
-        } else if (strchr(pkg.c_str(), '.') == NULL) {
-            // not of form 'com.whatever...'; assume internal and ok
-        } else if (strncmp(pkg.c_str(), "android.", 8) == 0) {
-            // android.* packages are assumed fine
-        } else if (package_mgr.get() != nullptr) {
-            String16 pkgName16(pkg.c_str());
-            binder::Status status = package_mgr->getInstallerForPackage(pkgName16, &installer);
-            if (!status.isOk()) {
-                ALOGE("%s: getInstallerForPackage failed: %s",
-                        __func__, status.exceptionMessage().c_str());
-            }
-
-            // skip if we didn't get an installer
-            if (status.isOk()) {
-                status = package_mgr->getVersionCodeForPackage(pkgName16, &versionCode);
-                if (!status.isOk()) {
-                    ALOGE("%s: getVersionCodeForPackage failed: %s",
-                            __func__, status.exceptionMessage().c_str());
-                }
-            }
-
-            ALOGV("%s: package '%s' installed by '%s' versioncode %lld",
-                    __func__, pkg.c_str(), installer.c_str(), (long long)versionCode);
-
-            if (strncmp(installer.c_str(), "com.android.", 12) == 0) {
-                // from play store, we keep info
-            } else if (strncmp(installer.c_str(), "com.google.", 11) == 0) {
-                // some google source, we keep info
-            } else if (strcmp(installer.c_str(), "preload") == 0) {
-                // preloads, we keep the info
-            } else if (installer.c_str()[0] == '\0') {
-                // sideload (no installer); report UID only
-                pkg = std::to_string(uid);
-                versionCode = 0;
-            } else {
-                // unknown installer; report UID only
-                pkg = std::to_string(uid);
-                versionCode = 0;
-            }
-        } else {
-            // unvalidated by package_mgr just send uid.
-            pkg = std::to_string(uid);
-        }
-
-        // add it to the map, to save a subsequent lookup
-        std::lock_guard _l(mUidInfoLock);
-        // always overwrite
-        mapping.uid = uid;
-        mapping.pkg = std::move(pkg);
-        mapping.installer = std::move(installer);
-        mapping.versionCode = versionCode;
-        mapping.expiration = now + PKG_EXPIRATION_NS;
-        ALOGV("%s: adding uid %d pkg '%s' expiration: %lld",
-                __func__, uid, mapping.pkg.c_str(), (long long)mapping.expiration);
-        mPkgMappings[uid] = mapping;
-    }
-
-    if (mapping.uid != (uid_t)(-1)) {
-        if (setName) {
-            item->setPkgName(mapping.pkg);
-        }
-        if (setVersion) {
-            item->setPkgVersionCode(mapping.versionCode);
-        }
-    }
-}
-
 } // namespace android
diff --git a/services/mediametrics/MediaMetricsService.h b/services/mediametrics/MediaMetricsService.h
index b736c30..74a114a 100644
--- a/services/mediametrics/MediaMetricsService.h
+++ b/services/mediametrics/MediaMetricsService.h
@@ -24,6 +24,7 @@
 
 // IMediaMetricsService must include Vector, String16, Errors
 #include <media/IMediaMetricsService.h>
+#include <mediautils/ServiceUtilities.h>
 #include <utils/String8.h>
 
 #include "AudioAnalytics.h"
@@ -62,6 +63,11 @@
      */
     static nsecs_t roundTime(nsecs_t timeNs);
 
+    /**
+     * Returns true if we should use uid for package name when uploading to WestWorld.
+     */
+    static bool useUidForPackage(const std::string& package, const std::string& installer);
+
 protected:
 
     // Internal call where release is true if ownership of item is transferred
@@ -96,27 +102,10 @@
     const size_t mMaxRecordsExpiredAtOnce;
     const int mDumpProtoDefault;
 
-    class UidInfo {
-    public:
-        void setPkgInfo(mediametrics::Item *item, uid_t uid, bool setName, bool setVersion);
-
-    private:
-        std::mutex mUidInfoLock;
-
-        struct UidToPkgInfo {
-            uid_t uid = -1;
-            std::string pkg;
-            std::string installer;
-            int64_t versionCode = 0;
-            nsecs_t expiration = 0;  // TODO: remove expiration.
-        };
-
-        // TODO: use concurrent hashmap with striped lock.
-        std::unordered_map<uid_t, struct UidToPkgInfo> mPkgMappings; // GUARDED_BY(mUidInfoLock)
-    } mUidInfo;  // mUidInfo can be accessed without lock (locked internally)
-
     std::atomic<int64_t> mItemsSubmitted{}; // accessed outside of lock.
 
+    mediautils::UidInfo mUidInfo;  // mUidInfo can be accessed without lock (locked internally)
+
     mediametrics::AudioAnalytics mAudioAnalytics;
 
     std::mutex mLock;
diff --git a/services/mediametrics/TimeMachine.h b/services/mediametrics/TimeMachine.h
index 0cd8dc6..4d24ce4 100644
--- a/services/mediametrics/TimeMachine.h
+++ b/services/mediametrics/TimeMachine.h
@@ -35,6 +35,14 @@
     return s;
 }
 
+// define a way of printing a std::pair.
+template <typename T, typename U>
+std::ostream & operator<< (std::ostream& s,
+                           const std::pair<T, U>& v) {
+    s << "{ " << v.first << ", " << v.second << " }";
+    return s;
+}
+
 // define a way of printing a variant
 // see https://en.cppreference.com/w/cpp/utility/variant/visit
 template <typename T0, typename ... Ts>
@@ -52,11 +60,13 @@
  *
  * The TimeMachine is NOT thread safe.
  */
-class TimeMachine {
-
-    using Elem = std::variant<std::monostate, int32_t, int64_t, double, std::string>;
+class TimeMachine final { // made final as we have copy constructor instead of dup() override.
+public:
+    using Elem = Item::Prop::Elem;  // use the Item property element.
     using PropertyHistory = std::multimap<int64_t /* time */, Elem>;
 
+private:
+
     // KeyHistory contains no lock.
     // Access is through the TimeMachine, and a hash-striped lock is used
     // before calling into KeyHistory.
@@ -70,10 +80,12 @@
             , mCreationTime(time)
             , mLastModificationTime(time)
         {
-            putValue("_pid", (int32_t)pid, time);
-            putValue("_uid", (int32_t)uid, time);
+            putValue(BUNDLE_PID, (int32_t)pid, time);
+            putValue(BUNDLE_UID, (int32_t)uid, time);
         }
 
+        KeyHistory(const KeyHistory &other) = default;
+
         status_t checkPermission(uid_t uidCheck) const {
             return uidCheck != (uid_t)-1 && uidCheck != mUid ? PERMISSION_DENIED : NO_ERROR;
         }
@@ -102,7 +114,8 @@
 
         void putProp(
                 const std::string &name, const mediametrics::Item::Prop &prop, int64_t time = 0) {
-            prop.visit([&](auto value) { putValue(name, value, time); });
+            //alternatively: prop.visit([&](auto value) { putValue(name, value, time); });
+            putValue(name, prop.get(), time);
         }
 
         template <typename T>
@@ -119,13 +132,6 @@
             }
         }
 
-        // Explicitly ignore rate properties - we don't expose them for now.
-        void putValue(
-                      const std::string &property __unused,
-                      std::pair<int64_t, int64_t>& e __unused,
-                      int64_t time __unused) {
-        }
-
         std::pair<std::string, int32_t> dump(int32_t lines, int64_t time) const {
             std::stringstream ss;
             int32_t ll = lines;
@@ -185,6 +191,34 @@
                   __func__, keyHighWaterMark, keyLowWaterMark);
     }
 
+    // The TimeMachine copy constructor/assignment uses a deep copy,
+    // though the snapshot is not instantaneous nor isochronous.
+    //
+    // If there are concurrent operations ongoing in the other TimeMachine
+    // then there may be some history more recent than others (a time shear).
+    // This is expected to be a benign addition in history as small number of
+    // future elements are incorporated.
+    TimeMachine(const TimeMachine& other) {
+        *this = other;
+    }
+    TimeMachine& operator=(const TimeMachine& other) {
+        std::lock_guard lock(mLock);
+        mHistory.clear();
+
+        {
+            std::lock_guard lock2(other.mLock);
+            mHistory = other.mHistory;
+        }
+
+        // Now that we safely have our own shared pointers, let's dup them
+        // to ensure they are decoupled.  We do this by acquiring the other lock.
+        for (const auto &[lkey, lhist] : mHistory) {
+            std::lock_guard lock2(other.getLockForKey(lkey));
+            mHistory[lkey] = std::make_shared<KeyHistory>(*lhist);
+        }
+        return *this;
+    }
+
     /**
      * Put all the properties from an item into the Time Machine log.
      */
@@ -358,10 +392,10 @@
 
         std::stringstream ss;
         int32_t ll = lines;
-        for (const auto &keyPair : mHistory) {
-            std::lock_guard lock(getLockForKey(keyPair.first));
+        for (const auto &[lkey, lhist] : mHistory) {
+            std::lock_guard lock(getLockForKey(lkey));
             if (lines <= 0) break;
-            auto [s, l] = keyPair.second->dump(ll, time);
+            auto [s, l] = lhist->dump(ll, time);
             ss << s;
             ll -= l;
         }
diff --git a/services/mediametrics/TransactionLog.h b/services/mediametrics/TransactionLog.h
index 7a520d9..0c5d12b 100644
--- a/services/mediametrics/TransactionLog.h
+++ b/services/mediametrics/TransactionLog.h
@@ -36,7 +36,7 @@
  *
  * The TransactionLog is NOT thread safe.
  */
-class TransactionLog {
+class TransactionLog final { // made final as we have copy constructor instead of dup() override.
 public:
     // In long term run, the garbage collector aims to keep the
     // Transaction Log between the Low Water Mark and the High Water Mark.
@@ -58,6 +58,30 @@
                   __func__, highWaterMark, lowWaterMark);
     }
 
+    // The TransactionLog copy constructor/assignment is effectively an
+    // instantaneous, isochronous snapshot of the other TransactionLog.
+    //
+    // The contents of the Transaction Log are shared pointers to immutable instances -
+    // std::shared_ptr<const mediametrics::Item>, so we use a shallow copy,
+    // which is more efficient in space and execution time than a deep copy,
+    // and gives the same results.
+
+    TransactionLog(const TransactionLog &other) {
+        *this = other;
+    }
+
+    TransactionLog& operator=(const TransactionLog &other) {
+        std::lock_guard lock(mLock);
+        mLog.clear();
+        mItemMap.clear();
+
+        std::lock_guard lock2(other.mLock);
+        mLog = other.mLog;
+        mItemMap = other.mItemMap;
+
+        return *this;
+    }
+
     /**
      * Put an item in the TransactionLog.
      */
diff --git a/services/mediametrics/Wrap.h b/services/mediametrics/Wrap.h
new file mode 100644
index 0000000..3584e08
--- /dev/null
+++ b/services/mediametrics/Wrap.h
@@ -0,0 +1,205 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <memory>
+#include <mutex>
+
+namespace android::mediametrics {
+
+/**
+ * Wraps a shared-ptr for which member access through operator->() behaves
+ * as if the shared-ptr is atomically copied and then (without a lock) -> called.
+ *
+ * See related C++ 20:
+ * https://en.cppreference.com/w/cpp/memory/shared_ptr/atomic2
+ *
+ * EXAMPLE:
+ *
+ * SharedPtrWrap<T> t{};
+ *
+ * thread1() {
+ *   t->func();  // safely executes either the original t or the one created by thread2.
+ * }
+ *
+ * thread2() {
+ *  t.set(std::make_shared<T>()); // overwrites the original t.
+ * }
+ */
+template <typename T>
+class SharedPtrWrap {
+    mutable std::mutex mLock;
+    std::shared_ptr<T> mPtr;
+
+public:
+    template <typename... Args>
+    explicit SharedPtrWrap(Args&&... args)
+        : mPtr(std::make_shared<T>(std::forward<Args>(args)...))
+    {}
+
+    /**
+     * Gets the current shared pointer.  This must return a value, not a reference.
+     *
+     * For compatibility with existing shared_ptr, we do not pass back a
+     * shared_ptr<const T> for the const getter.
+     */
+    std::shared_ptr<T> get() const {
+        std::lock_guard lock(mLock);
+        return mPtr;
+    }
+
+    /**
+     * Sets the current shared pointer, returning the previous shared pointer.
+     */
+    std::shared_ptr<T> set(std::shared_ptr<T> ptr) { // pass by value as we use swap.
+        std::lock_guard lock(mLock);
+        std::swap(ptr, mPtr);
+        return ptr;
+    }
+
+    /**
+     * Returns a shared pointer value representing T at the instant of time when
+     * the call executes. The lifetime of the shared pointer will
+     * be extended as we are returning an instance of the shared_ptr
+     * not a reference to it.  The destructor to the returned shared_ptr
+     * will be called sometime after the expression including the member function or
+     * the member variable is evaluated. Do not change to a reference!
+     */
+
+    // For compatibility with existing shared_ptr, we do not pass back a
+    // shared_ptr<const T> for the const operator pointer access.
+    std::shared_ptr<T> operator->() const {
+        return get();
+    }
+    /**
+     * We do not overload operator*() as the reference is not stable if the
+     * lock is not held.
+     */
+};
+
+/**
+ * Wraps member access to the class T by a lock.
+ *
+ * The object T is constructed within the LockWrap to guarantee
+ * locked access at all times.  When T's methods are accessed through ->,
+ * a monitor style lock is obtained to prevent multiple threads from executing
+ * methods in the object T at the same time.
+ * Suggested by Kevin R.
+ *
+ * EXAMPLE:
+ *
+ * // Accumulator class which is very slow, requires locking for multiple threads.
+ *
+ * class Accumulator {
+ *   int32_t value_ = 0;
+ * public:
+ *   void add(int32_t incr) {
+ *     const int32_t temp = value_;
+ *     sleep(0);  // yield
+ *     value_ = temp + incr;
+ *   }
+ *   int32_t get() { return value_; }
+ * };
+ *
+ * // We use LockWrap on Accumulator to have safe multithread access.
+ * android::mediametrics::LockWrap<Accumulator> a{}; // locked accumulator succeeds
+ *
+ * // Conversely, the following line fails:
+ * // auto a = std::make_shared<Accumulator>(); // this fails, only 50% adds atomic.
+ *
+ * constexpr size_t THREADS = 100;
+ * constexpr size_t ITERATIONS = 10;
+ * constexpr int32_t INCREMENT = 1;
+ *
+ * // Test by generating multiple threads, all adding simultaneously.
+ * std::vector<std::future<void>> threads(THREADS);
+ * for (size_t i = 0; i < THREADS; ++i) {
+ *     threads.push_back(std::async(std::launch::async, [&] {
+ *         for (size_t j = 0; j < ITERATIONS; ++j) {
+ *             a->add(INCREMENT);  // add needs locked access here.
+ *         }
+ *     }));
+ * }
+ * threads.clear();
+ *
+ * // If the add operations are not atomic, value will be smaller than expected.
+ * ASSERT_EQ(INCREMENT * THREADS * ITERATIONS, (size_t)a->get());
+ *
+ */
+template <typename T>
+class LockWrap {
+    /**
+      * Holding class that keeps the pointer and the lock.
+      *
+      * We return this holding class from operator->() to keep the lock until the
+      * method function or method variable access is completed.
+      */
+    class LockedPointer {
+        friend LockWrap;
+        LockedPointer(T *t, std::recursive_mutex *lock, std::atomic<size_t> *recursionDepth)
+            : mT(t), mLock(*lock), mRecursionDepth(recursionDepth) { ++*mRecursionDepth; }
+
+        T* const mT;
+        std::lock_guard<std::recursive_mutex> mLock;
+        std::atomic<size_t>* mRecursionDepth;
+    public:
+        ~LockedPointer() {
+            --*mRecursionDepth; // Used for testing, we do not check underflow.
+        }
+
+        const T* operator->() const {
+            return mT;
+        }
+        T* operator->() {
+            return mT;
+        }
+    };
+
+    // We must use a recursive mutex because the end of the full expression may
+    // involve another reference to T->.
+    //
+    // A recursive mutex allows the same thread to recursively acquire,
+    // but different thread would block.
+    //
+    // Example which fails with a normal mutex:
+    //
+    // android::mediametrics::LockWrap<std::vector<int>> v{std::initializer_list<int>{1, 2}};
+    // const int sum = v->operator[](0) + v->operator[](1);
+    //
+    mutable std::recursive_mutex mLock;
+    mutable T mT;
+    mutable std::atomic<size_t> mRecursionDepth{};  // Used for testing.
+
+public:
+    template <typename... Args>
+    explicit LockWrap(Args&&... args) : mT(std::forward<Args>(args)...) {}
+
+    const LockedPointer operator->() const {
+        return LockedPointer(&mT, &mLock, &mRecursionDepth);
+    }
+    LockedPointer operator->() {
+        return LockedPointer(&mT, &mLock, &mRecursionDepth);
+    }
+
+    // Returns the lock depth of the recursive mutex.
+    // @TestApi
+    size_t getRecursionDepth() const {
+        return mRecursionDepth;
+    }
+};
+
+} // namespace android::mediametrics
diff --git a/services/mediametrics/tests/Android.bp b/services/mediametrics/tests/Android.bp
index 9eb2d89..bdeda30 100644
--- a/services/mediametrics/tests/Android.bp
+++ b/services/mediametrics/tests/Android.bp
@@ -17,6 +17,7 @@
         "liblog",
         "libmediametrics",
         "libmediametricsservice",
+        "libmediautils",
         "libutils",
     ],
 
diff --git a/services/mediametrics/tests/build_and_run_all_unit_tests.sh b/services/mediametrics/tests/build_and_run_all_unit_tests.sh
index 2511c30..382be47 100755
--- a/services/mediametrics/tests/build_and_run_all_unit_tests.sh
+++ b/services/mediametrics/tests/build_and_run_all_unit_tests.sh
@@ -20,5 +20,5 @@
 echo "========================================"
 
 echo "testing mediametrics"
-adb push $OUT/data/nativetest/mediametrics_tests/mediametrics_tests /system/bin
+adb push $OUT/data/nativetest64/mediametrics_tests/mediametrics_tests /system/bin
 adb shell /system/bin/mediametrics_tests
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index 285a1ba..27b72eb 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -35,6 +35,157 @@
     return count;
 }
 
+TEST(mediametrics_tests, startsWith) {
+  std::string s("test");
+  ASSERT_EQ(true, android::mediametrics::startsWith(s, "te"));
+  ASSERT_EQ(true, android::mediametrics::startsWith(s, std::string("tes")));
+  ASSERT_EQ(false, android::mediametrics::startsWith(s, "ts"));
+  ASSERT_EQ(false, android::mediametrics::startsWith(s, std::string("est")));
+}
+
+TEST(mediametrics_tests, defer) {
+  bool check = false;
+  {
+      android::mediametrics::Defer defer([&] { check = true; });
+      ASSERT_EQ(false, check);
+  }
+  ASSERT_EQ(true, check);
+}
+
+TEST(mediametrics_tests, shared_ptr_wrap) {
+  // Test shared pointer wrap with simple access
+  android::mediametrics::SharedPtrWrap<std::string> s("123");
+  ASSERT_EQ('1', s->at(0));
+  ASSERT_EQ('2', s->at(1));
+  s->push_back('4');
+  ASSERT_EQ('4', s->at(3));
+
+  const android::mediametrics::SharedPtrWrap<std::string> s2("345");
+  ASSERT_EQ('3', s2->operator[](0));  // s2[0] == '3'
+  // we allow modification through a const shared pointer wrap
+  // for compatibility with shared_ptr.
+  s2->push_back('6');
+  ASSERT_EQ('6', s2->operator[](3));  // s2[3] == '6'
+
+  android::mediametrics::SharedPtrWrap<std::string> s3("");
+  s3.set(std::make_shared<std::string>("abc"));
+  ASSERT_EQ('b', s3->operator[](1)); // s2[1] = 'b';
+
+  // Use Thunk to check whether the destructor was called prematurely
+  // when setting the shared ptr wrap in the middle of a method.
+
+  class Thunk {
+    std::function<void(int)> mF;
+    const int mFinal;
+
+    public:
+      Thunk(decltype(mF) f, int final) : mF(f), mFinal(final) {}
+      ~Thunk() { mF(mFinal); }
+      void thunk(int value) { mF(value); }
+  };
+
+  int counter = 0;
+  android::mediametrics::SharedPtrWrap<Thunk> s4(
+    [&](int value) {
+      s4.set(std::make_shared<Thunk>([](int){}, 0)); // recursively set s4 while in s4.
+      ++counter;
+      ASSERT_EQ(value, counter);  // on thunk() value is 1, on destructor this is 2.
+    }, 2);
+
+  // This will fail if the shared ptr wrap doesn't hold a ref count during method access.
+  s4->thunk(1);
+}
+
+TEST(mediametrics_tests, lock_wrap) {
+  // Test lock wrap with simple access
+  android::mediametrics::LockWrap<std::string> s("123");
+  ASSERT_EQ('1', s->at(0));
+  ASSERT_EQ('2', s->at(1));
+  s->push_back('4');
+  ASSERT_EQ('4', s->at(3));
+
+  const android::mediametrics::LockWrap<std::string> s2("345");
+  ASSERT_EQ('3', s2->operator[](0));  // s2[0] == '3'
+  // note: we can't modify s2 due to const, s2->push_back('6');
+
+  android::mediametrics::LockWrap<std::string> s3("");
+  s3->operator=("abc");
+  ASSERT_EQ('b', s3->operator[](1)); // s2[1] = 'b';
+
+  // Check that we can recursively hold lock.
+  android::mediametrics::LockWrap<std::vector<int>> v{std::initializer_list<int>{1, 2}};
+  v->push_back(3);
+  v->push_back(4);
+  ASSERT_EQ(1, v->operator[](0));
+  ASSERT_EQ(2, v->operator[](1));
+  ASSERT_EQ(3, v->operator[](2));
+  ASSERT_EQ(4, v->operator[](3));
+  // The end of the full expression here requires recursive depth of 4.
+  ASSERT_EQ(10, v->operator[](0) + v->operator[](1) + v->operator[](2) + v->operator[](3));
+
+  // Mikhail's note: a non-recursive lock implementation could be used if one obtains
+  // the LockedPointer helper object like this and directly hold the lock through RAII,
+  // though it is trickier in use.
+  //
+  // We include an example here for completeness.
+  {
+    auto l = v.operator->();
+    ASSERT_EQ(10, l->operator[](0) + l->operator[](1) + l->operator[](2) + l->operator[](3));
+  }
+
+  // Use Thunk to check whether we have the lock when calling a method through LockWrap.
+
+  class Thunk {
+    std::function<void()> mF;
+
+    public:
+      Thunk(decltype(mF) f) : mF(f) {}
+      void thunk() { mF(); }
+  };
+
+  android::mediametrics::LockWrap<Thunk> s4([&]{
+    ASSERT_EQ((size_t)1, s4.getRecursionDepth()); // we must be locked when thunk() is called.
+  });
+
+  ASSERT_EQ((size_t)0, s4.getRecursionDepth());
+  // This will fail if we are not locked during method access.
+  s4->thunk();
+  ASSERT_EQ((size_t)0, s4.getRecursionDepth());
+}
+
+TEST(mediametrics_tests, lock_wrap_multithread) {
+  class Accumulator {
+    int32_t value_ = 0;
+  public:
+    void add(int32_t incr) {
+      const int32_t temp = value_;
+      sleep(0);  // yield
+      value_ = temp + incr;
+    }
+    int32_t get() { return value_; }
+  };
+
+  android::mediametrics::LockWrap<Accumulator> a{}; // locked accumulator succeeds
+  // auto a = std::make_shared<Accumulator>(); // this fails, only 50% adds atomic.
+
+  constexpr size_t THREADS = 100;
+  constexpr size_t ITERATIONS = 10;
+  constexpr int32_t INCREMENT = 1;
+
+  std::vector<std::future<void>> threads(THREADS);
+  for (size_t i = 0; i < THREADS; ++i) {
+    threads.push_back(std::async(std::launch::async, [&] {
+        for (size_t j = 0; j < ITERATIONS; ++j) {
+          a->add(INCREMENT);
+        }
+      }));
+  }
+  threads.clear();
+
+  // If the add operations are not atomic, value will be smaller than expected.
+  ASSERT_EQ(INCREMENT * THREADS * ITERATIONS, (size_t)a->get());
+}
+
 TEST(mediametrics_tests, instantiate) {
   sp mediaMetrics = new MediaMetricsService();
   status_t status;
@@ -72,6 +223,32 @@
   mediaMetrics->dump(fileno(stdout), {} /* args */);
 }
 
+TEST(mediametrics_tests, package_installer_check) {
+  ASSERT_EQ(false, MediaMetricsService::useUidForPackage(
+      "abcd", "installer"));  // ok, package name has no dot.
+  ASSERT_EQ(false, MediaMetricsService::useUidForPackage(
+      "android.com", "installer"));  // ok, package name starts with android
+
+  ASSERT_EQ(false, MediaMetricsService::useUidForPackage(
+      "abc.def", "com.android.foo"));  // ok, installer name starts with com.android
+  ASSERT_EQ(false, MediaMetricsService::useUidForPackage(
+      "123.456", "com.google.bar"));  // ok, installer name starts with com.google
+  ASSERT_EQ(false, MediaMetricsService::useUidForPackage(
+      "r2.d2", "preload"));  // ok, installer name is preload
+
+  ASSERT_EQ(true, MediaMetricsService::useUidForPackage(
+      "abc.def", "installer"));  // unknown installer
+  ASSERT_EQ(true, MediaMetricsService::useUidForPackage(
+      "123.456", "installer")); // unknown installer
+  ASSERT_EQ(true, MediaMetricsService::useUidForPackage(
+      "r2.d2", "preload23"));  // unknown installer
+
+  ASSERT_EQ(true, MediaMetricsService::useUidForPackage(
+      "com.android.foo", "abc.def"));  // unknown installer
+  ASSERT_EQ(true, MediaMetricsService::useUidForPackage(
+      "com.google.bar", "123.456"));  // unknown installer
+}
+
 TEST(mediametrics_tests, item_manipulation) {
   mediametrics::Item item("audiorecord");
 
@@ -262,27 +439,32 @@
           int32_t i32;
           ASSERT_TRUE(prop.get(&i32));
           ASSERT_EQ(1, i32);
+          ASSERT_EQ(1, std::get<int32_t>(prop.get()));
           mask |= 1;
       } else if (!strcmp(name, "i64")) {
           int64_t i64;
           ASSERT_TRUE(prop.get(&i64));
           ASSERT_EQ(2, i64);
+          ASSERT_EQ(2, std::get<int64_t>(prop.get()));
           mask |= 2;
       } else if (!strcmp(name, "double")) {
           double d;
           ASSERT_TRUE(prop.get(&d));
           ASSERT_EQ(3.125, d);
+          ASSERT_EQ(3.125, std::get<double>(prop.get()));
           mask |= 4;
       } else if (!strcmp(name, "string")) {
-          const char *s;
+          std::string s;
           ASSERT_TRUE(prop.get(&s));
-          ASSERT_EQ(0, strcmp(s, "abc"));
+          ASSERT_EQ("abc", s);
+          ASSERT_EQ(s, std::get<std::string>(prop.get()));
           mask |= 8;
       } else if (!strcmp(name, "rate")) {
           std::pair<int64_t, int64_t> r;
           ASSERT_TRUE(prop.get(&r));
           ASSERT_EQ(11, r.first);
           ASSERT_EQ(12, r.second);
+          ASSERT_EQ(r, std::get<decltype(r)>(prop.get()));
           mask |= 16;
       } else {
           FAIL();
@@ -323,9 +505,9 @@
           ASSERT_EQ(3.125, d);
           mask |= 4;
       } else if (!strcmp(name, "string")) {
-          const char *s;
+          std::string s;
           ASSERT_TRUE(prop.get(&s));
-          ASSERT_EQ(0, strcmp(s, "abcdefghijklmnopqrstuvwxyz"));
+          ASSERT_EQ("abcdefghijklmnopqrstuvwxyz", s);
           mask |= 8;
       } else if (!strcmp(name, "rate")) {
           std::pair<int64_t, int64_t> r;
@@ -544,6 +726,61 @@
   ASSERT_EQ((size_t)2, transactionLog.size());
 }
 
+TEST(mediametrics_tests, analytics_actions) {
+  mediametrics::AnalyticsActions analyticsActions;
+  bool action1 = false;
+  bool action2 = false;
+  bool action3 = false;
+  bool action4 = false;
+
+  // check to see whether various actions have been matched.
+  analyticsActions.addAction(
+      "audio.flinger.event",
+      std::string("AudioFlinger"),
+      std::make_shared<mediametrics::AnalyticsActions::Function>(
+          [&](const std::shared_ptr<const android::mediametrics::Item> &) {
+            action1 = true;
+          }));
+
+  analyticsActions.addAction(
+      "audio.*.event",
+      std::string("AudioFlinger"),
+      std::make_shared<mediametrics::AnalyticsActions::Function>(
+          [&](const std::shared_ptr<const android::mediametrics::Item> &) {
+            action2 = true;
+          }));
+
+  analyticsActions.addAction("audio.fl*n*g*r.event",
+      std::string("AudioFlinger"),
+      std::make_shared<mediametrics::AnalyticsActions::Function>(
+          [&](const std::shared_ptr<const android::mediametrics::Item> &) {
+            action3 = true;
+          }));
+
+  analyticsActions.addAction("audio.fl*gn*r.event",
+      std::string("AudioFlinger"),
+      std::make_shared<mediametrics::AnalyticsActions::Function>(
+          [&](const std::shared_ptr<const android::mediametrics::Item> &) {
+            action4 = true;
+          }));
+
+  // make a test item
+  auto item = std::make_shared<mediametrics::Item>("audio.flinger");
+  (*item).set("event", "AudioFlinger");
+
+  // get the actions and execute them
+  auto actions = analyticsActions.getActionsForItem(item);
+  for (const auto& action : actions) {
+    action->operator()(item);
+  }
+
+  // The following should match.
+  ASSERT_EQ(true, action1);
+  ASSERT_EQ(true, action2);
+  ASSERT_EQ(true, action3);
+  ASSERT_EQ(false, action4); // audio.fl*gn*r != audio.flinger
+}
+
 TEST(mediametrics_tests, audio_analytics_permission) {
   auto item = std::make_shared<mediametrics::Item>("audio.1");
   (*item).set("one", (int32_t)1)
@@ -566,14 +803,14 @@
 
   // TODO: Verify contents of AudioAnalytics.
   // Currently there is no getter API in AudioAnalytics besides dump.
-  ASSERT_EQ(4, audioAnalytics.dump(1000).second /* lines */);
+  ASSERT_EQ(9, audioAnalytics.dump(1000).second /* lines */);
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item2, false /* isTrusted */));
 
   // Check that we have some info in the dump.
-  ASSERT_LT(4, audioAnalytics.dump(1000).second /* lines */);
+  ASSERT_LT(9, audioAnalytics.dump(1000).second /* lines */);
 }
 
 TEST(mediametrics_tests, audio_analytics_dump) {
diff --git a/services/mediatranscoding/MediaTranscodingService.cpp b/services/mediatranscoding/MediaTranscodingService.cpp
index 2680cee..0269896 100644
--- a/services/mediatranscoding/MediaTranscodingService.cpp
+++ b/services/mediatranscoding/MediaTranscodingService.cpp
@@ -25,10 +25,6 @@
 
 namespace android {
 
-namespace media {
-
-using android::media::MediaTranscodingService;
-
 MediaTranscodingService::MediaTranscodingService() {
     ALOGV("MediaTranscodingService is created");
 }
@@ -56,7 +52,8 @@
 
 Status MediaTranscodingService::registerClient(
         const std::shared_ptr<ITranscodingServiceClient>& /*in_client*/,
-        int32_t* /*_aidl_return*/) {
+        const std::string& /* in_opPackageName */, int32_t /* in_clientUid */,
+        int32_t /* in_clientPid */, int32_t* /*_aidl_return*/) {
     // TODO(hkuang): Add implementation.
     return Status::ok();
 }
@@ -67,8 +64,9 @@
 }
 
 Status MediaTranscodingService::submitRequest(int32_t /*clientId*/,
-                                              const TranscodingRequest& /*request*/,
-                                              TranscodingJob* /*job*/, int32_t* /*_aidl_return*/) {
+                                              const TranscodingRequestParcel& /*request*/,
+                                              TranscodingJobParcel* /*job*/,
+                                              int32_t* /*_aidl_return*/) {
     // TODO(hkuang): Add implementation.
     return Status::ok();
 }
@@ -79,11 +77,11 @@
     return Status::ok();
 }
 
-Status MediaTranscodingService::getJobWithId(int32_t /*in_jobId*/, TranscodingJob* /*out_job*/,
+Status MediaTranscodingService::getJobWithId(int32_t /*in_jobId*/,
+                                             TranscodingJobParcel* /*out_job*/,
                                              bool* /*_aidl_return*/) {
     // TODO(hkuang): Add implementation.
     return Status::ok();
 }
 
-}  // namespace media
 }  // namespace android
diff --git a/services/mediatranscoding/MediaTranscodingService.h b/services/mediatranscoding/MediaTranscodingService.h
index c2c30b9..d225f9a 100644
--- a/services/mediatranscoding/MediaTranscodingService.h
+++ b/services/mediatranscoding/MediaTranscodingService.h
@@ -22,13 +22,11 @@
 
 namespace android {
 
-namespace media {
-
 using Status = ::ndk::ScopedAStatus;
 using ::aidl::android::media::BnMediaTranscodingService;
 using ::aidl::android::media::ITranscodingServiceClient;
-using ::aidl::android::media::TranscodingJob;
-using ::aidl::android::media::TranscodingRequest;
+using ::aidl::android::media::TranscodingJobParcel;
+using ::aidl::android::media::TranscodingRequestParcel;
 
 class MediaTranscodingService : public BnMediaTranscodingService {
 public:
@@ -40,23 +38,24 @@
     static const char* getServiceName() { return "media.transcoding"; }
 
     Status registerClient(const std::shared_ptr<ITranscodingServiceClient>& in_client,
-                          int32_t* _aidl_return) override;
+                          const std::string& in_opPackageName, int32_t in_clientUid,
+                          int32_t in_clientPid, int32_t* _aidl_return) override;
 
     Status unregisterClient(int32_t clientId, bool* _aidl_return) override;
 
-    Status submitRequest(int32_t in_clientId, const TranscodingRequest& in_request,
-                         TranscodingJob* out_job, int32_t* _aidl_return) override;
+    Status submitRequest(int32_t in_clientId, const TranscodingRequestParcel& in_request,
+                         TranscodingJobParcel* out_job, int32_t* _aidl_return) override;
 
     Status cancelJob(int32_t in_clientId, int32_t in_jobId, bool* _aidl_return) override;
 
-    Status getJobWithId(int32_t in_jobId, TranscodingJob* out_job, bool* _aidl_return) override;
+    Status getJobWithId(int32_t in_jobId, TranscodingJobParcel* out_job,
+                        bool* _aidl_return) override;
 
     virtual inline binder_status_t dump(int /*fd*/, const char** /*args*/, uint32_t /*numArgs*/);
 
 private:
 };
 
-}  // namespace media
 }  // namespace android
 
 #endif  // ANDROID_MEDIA_TRANSCODING_SERVICE_H
diff --git a/services/mediatranscoding/main_mediatranscodingservice.cpp b/services/mediatranscoding/main_mediatranscodingservice.cpp
index 1978eb4..7d862e6 100644
--- a/services/mediatranscoding/main_mediatranscodingservice.cpp
+++ b/services/mediatranscoding/main_mediatranscodingservice.cpp
@@ -29,7 +29,7 @@
     strcpy(argv[0], "media.transcoding");
     sp<ProcessState> proc(ProcessState::self());
     sp<IServiceManager> sm = defaultServiceManager();
-    android::media::MediaTranscodingService::instantiate();
+    android::MediaTranscodingService::instantiate();
 
     ProcessState::self()->startThreadPool();
     IPCThreadState::self()->joinThreadPool();
diff --git a/services/minijail/Android.bp b/services/minijail/Android.bp
index 07a94cc..0713a87 100644
--- a/services/minijail/Android.bp
+++ b/services/minijail/Android.bp
@@ -17,10 +17,14 @@
 cc_library_shared {
     name: "libavservices_minijail",
     defaults: ["libavservices_minijail_defaults"],
+    vendor_available: true,
     export_include_dirs: ["."],
 }
 
-// Small library for media.extractor and media.codec sandboxing.
+// By adding "vendor_available: true" to "libavservices_minijail", we don't
+// need to have "libavservices_minijail_vendor" any longer.
+// "libavservices_minijail_vendor" will be removed, once we replace it with
+// "libavservices_minijail" in all vendor modules. (b/146313710)
 cc_library_shared {
     name: "libavservices_minijail_vendor",
     vendor: true,
diff --git a/services/oboeservice/AAudioEndpointManager.cpp b/services/oboeservice/AAudioEndpointManager.cpp
index a1fc0ea..82cc90e 100644
--- a/services/oboeservice/AAudioEndpointManager.cpp
+++ b/services/oboeservice/AAudioEndpointManager.cpp
@@ -284,7 +284,7 @@
 
         serviceEndpoint->close();
         mSharedCloseCount++;
-        ALOGV("%s() %p for device %d",
+        ALOGV("%s(%p) closed for device %d",
               __func__, serviceEndpoint.get(), serviceEndpoint->getDeviceId());
     }
 }
diff --git a/services/oboeservice/AAudioServiceEndpointShared.cpp b/services/oboeservice/AAudioServiceEndpointShared.cpp
index 0a415fd..9b3b3b8 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.cpp
+++ b/services/oboeservice/AAudioServiceEndpointShared.cpp
@@ -85,7 +85,7 @@
 }
 
 aaudio_result_t AAudioServiceEndpointShared::close() {
-    return getStreamInternal()->close();
+    return getStreamInternal()->releaseCloseFinal();
 }
 
 // Glue between C and C++ callbacks.
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 880a3d7..39e90b1 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -56,7 +56,8 @@
     LOG_ALWAYS_FATAL_IF(!(getState() == AAUDIO_STREAM_STATE_CLOSED
                         || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED
                         || getState() == AAUDIO_STREAM_STATE_DISCONNECTED),
-                        "service stream still open, state = %d", getState());
+                        "service stream %p still open, state = %d",
+                        this, getState());
 }
 
 std::string AAudioServiceStreamBase::dumpHeader() {
@@ -126,13 +127,13 @@
 }
 
 aaudio_result_t AAudioServiceStreamBase::close() {
-    aaudio_result_t result = AAUDIO_OK;
     if (getState() == AAUDIO_STREAM_STATE_CLOSED) {
         return AAUDIO_OK;
     }
 
     stop();
 
+    aaudio_result_t result = AAUDIO_OK;
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
         result = AAUDIO_ERROR_INVALID_STATE;
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index 837b080..f4e72b7 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -49,16 +49,6 @@
         , mInService(inService) {
 }
 
-aaudio_result_t AAudioServiceStreamMMAP::close() {
-    if (getState() == AAUDIO_STREAM_STATE_CLOSED) {
-        return AAUDIO_OK;
-    }
-
-    stop();
-
-    return AAudioServiceStreamBase::close();
-}
-
 // Open stream on HAL and pass information about the shared memory buffer back to the client.
 aaudio_result_t AAudioServiceStreamMMAP::open(const aaudio::AAudioStreamRequest &request) {
 
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 1509f7d..3d56623 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -67,8 +67,6 @@
 
     aaudio_result_t stopClient(audio_port_handle_t clientHandle) override;
 
-    aaudio_result_t close() override;
-
     const char *getTypeText() const override { return "MMAP"; }
 
 protected:
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index ca1354d..64d835b 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -55,4 +55,12 @@
         "libutils",
     ],
 
+    header_libs: [
+        "libaudiohal_headers",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libnbaio/include_mono",
+        "frameworks/av/media/libnbaio/include",
+    ],
 }