Merge "listByInterface -> listManifestByInterface"
diff --git a/apex/Android.bp b/apex/Android.bp
index 2cc6fcb..42a620b 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -33,6 +33,9 @@
             ],
         },
     },
+    prebuilts: [
+        "mediaextractor.policy",
+    ],
     key: "com.android.media.key",
     certificate: ":com.android.media.certificate",
 
@@ -65,6 +68,7 @@
         "com.android.media.swcodec-mediaswcodec.rc",
         "com.android.media.swcodec-ld.config.txt",
         "mediaswcodec.policy",
+        "mediaswcodec.xml",
     ],
     use_vendor: true,
     key: "com.android.media.swcodec.key",
diff --git a/apex/AndroidManifest-media.xml b/apex/AndroidManifest-media.xml
index 17d3f3a..78ed0ed 100644
--- a/apex/AndroidManifest-media.xml
+++ b/apex/AndroidManifest-media.xml
@@ -18,8 +18,10 @@
   package="com.android.media">
   <!-- APEX does not have classes.dex -->
   <application android:hasCode="false" />
+  <!-- Setting maxSdk to lock the module to Q. minSdk is 28 for now to cover Q beta devices. -->
   <uses-sdk
       android:minSdkVersion="28"
+      android:maxSdkVersion="29"
       android:targetSdkVersion="28"
   />
 </manifest>
diff --git a/apex/AndroidManifest-swcodec.xml b/apex/AndroidManifest-swcodec.xml
index bd20dc0..9558644 100644
--- a/apex/AndroidManifest-swcodec.xml
+++ b/apex/AndroidManifest-swcodec.xml
@@ -18,8 +18,10 @@
   package="com.android.media.swcodec">
   <!-- APEX does not have classes.dex -->
   <application android:hasCode="false" />
+  <!-- Setting maxSdk to lock the module to Q. minSdk is 28 for now to cover Q beta devices. -->
   <uses-sdk
       android:minSdkVersion="28"
+      android:maxSdkVersion="29"
       android:targetSdkVersion="28"
   />
 </manifest>
diff --git a/apex/ld.config.txt b/apex/ld.config.txt
index 87af5a1..e74415c 100644
--- a/apex/ld.config.txt
+++ b/apex/ld.config.txt
@@ -37,9 +37,11 @@
 
 namespace.platform.isolated = true
 
-namespace.platform.search.paths = /system/${LIB}
+namespace.platform.search.paths  = /system/${LIB}
+namespace.platform.search.paths += /apex/com.android.runtime/${LIB}
 namespace.platform.asan.search.paths  = /data/asan/system/${LIB}
 namespace.platform.asan.search.paths +=           /system/${LIB}
+namespace.platform.asan.search.paths += /apex/com.android.runtime/${LIB}
 
 # /system/lib/libc.so, etc are symlinks to /apex/com.android.lib/lib/bionic/libc.so, etc.
 # Add /apex/... pat to the permitted paths because linker uses realpath(3)
diff --git a/apex/manifest.json b/apex/manifest.json
index cee94e2..03b9dd0 100644
--- a/apex/manifest.json
+++ b/apex/manifest.json
@@ -1,4 +1,4 @@
 {
   "name": "com.android.media",
-  "version": 210000000
+  "version": 220000000
 }
diff --git a/apex/manifest_codec.json b/apex/manifest_codec.json
index b83e65a..58ce868 100644
--- a/apex/manifest_codec.json
+++ b/apex/manifest_codec.json
@@ -1,4 +1,4 @@
 {
   "name": "com.android.media.swcodec",
-  "version": 210000000
+  "version": 220000000
 }
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index 7786856..a2ee65d 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -77,7 +77,7 @@
 
 cc_library_shared {
     name: "libcamera2ndk_vendor",
-    vendor_available: true,
+    vendor: true,
     srcs: [
         "ndk_vendor/impl/ACameraDevice.cpp",
         "ndk_vendor/impl/ACameraManager.cpp",
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index 1fdff40..529c167 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -259,15 +259,27 @@
     }
 }
 
+static void addMetadataToPhysicalCameraSettings(const CameraMetadata *metadata,
+        const std::string &cameraId, PhysicalCameraSettings *physicalCameraSettings) {
+    CameraMetadata metadataCopy = *metadata;
+    camera_metadata_t *camera_metadata = metadataCopy.release();
+    HCameraMetadata hCameraMetadata;
+    utils::convertToHidl(camera_metadata, &hCameraMetadata, /*shouldOwn*/ true);
+    physicalCameraSettings->settings.metadata(std::move(hCameraMetadata));
+    physicalCameraSettings->id = cameraId;
+}
+
 void CameraDevice::addRequestSettingsMetadata(ACaptureRequest *aCaptureRequest,
         sp<CaptureRequest> &req) {
-    CameraMetadata metadataCopy = aCaptureRequest->settings->getInternalData();
-    const camera_metadata_t *camera_metadata = metadataCopy.getAndLock();
-    HCameraMetadata hCameraMetadata;
-    utils::convertToHidl(camera_metadata, &hCameraMetadata);
-    metadataCopy.unlock(camera_metadata);
-    req->mPhysicalCameraSettings.resize(1);
-    req->mPhysicalCameraSettings[0].settings.metadata(std::move(hCameraMetadata));
+    req->mPhysicalCameraSettings.resize(1 + aCaptureRequest->physicalSettings.size());
+    addMetadataToPhysicalCameraSettings(&(aCaptureRequest->settings->getInternalData()), getId(),
+                    &(req->mPhysicalCameraSettings[0]));
+    size_t i = 1;
+    for (auto &physicalSetting : aCaptureRequest->physicalSettings) {
+        addMetadataToPhysicalCameraSettings(&(physicalSetting.second->getInternalData()),
+                physicalSetting.first, &(req->mPhysicalCameraSettings[i]));
+        i++;
+    }
 }
 
 camera_status_t CameraDevice::updateOutputConfigurationLocked(ACaptureSessionOutput *output) {
@@ -398,10 +410,9 @@
     cameraSettings.id = id;
     // TODO: Do we really need to copy the metadata here ?
     CameraMetadata metadataCopy = metadata->getInternalData();
-    const camera_metadata_t *cameraMetadata = metadataCopy.getAndLock();
+    camera_metadata_t *cameraMetadata = metadataCopy.release();
     HCameraMetadata hCameraMetadata;
-    utils::convertToHidl(cameraMetadata, &hCameraMetadata);
-    metadataCopy.unlock(cameraMetadata);
+    utils::convertToHidl(cameraMetadata, &hCameraMetadata, true);
     if (metadata != nullptr) {
         if (hCameraMetadata.data() != nullptr &&
             mCaptureRequestMetadataQueue != nullptr &&
@@ -426,11 +437,12 @@
         const std::string& id = req->mPhysicalCameraSettings[i].id;
         CameraMetadata clone;
         utils::convertFromHidlCloned(req->mPhysicalCameraSettings[i].settings.metadata(), &clone);
+        camera_metadata_t *clonep = clone.release();
         if (id == deviceId) {
-            pRequest->settings = new ACameraMetadata(clone.release(), ACameraMetadata::ACM_REQUEST);
+            pRequest->settings = new ACameraMetadata(clonep, ACameraMetadata::ACM_REQUEST);
         } else {
             pRequest->physicalSettings[req->mPhysicalCameraSettings[i].id] =
-                    new ACameraMetadata(clone.release(), ACameraMetadata::ACM_REQUEST);
+                    new ACameraMetadata(clonep, ACameraMetadata::ACM_REQUEST);
         }
     }
     pRequest->targets = new ACameraOutputTargets();
diff --git a/camera/ndk/ndk_vendor/impl/utils.cpp b/camera/ndk/ndk_vendor/impl/utils.cpp
index 5d2d47c..e4fb204 100644
--- a/camera/ndk/ndk_vendor/impl/utils.cpp
+++ b/camera/ndk/ndk_vendor/impl/utils.cpp
@@ -64,13 +64,14 @@
     return true;
 }
 
-// Note: existing data in dst will be gone. Caller still owns the memory of src
-void convertToHidl(const camera_metadata_t *src, HCameraMetadata* dst) {
+// Note: existing data in dst will be gone. dst owns memory if shouldOwn is set
+//       to true.
+void convertToHidl(const camera_metadata_t *src, HCameraMetadata* dst, bool shouldOwn) {
     if (src == nullptr) {
         return;
     }
     size_t size = get_camera_metadata_size(src);
-    dst->setToExternal((uint8_t *) src, size);
+    dst->setToExternal((uint8_t *) src, size, shouldOwn);
     return;
 }
 
diff --git a/camera/ndk/ndk_vendor/impl/utils.h b/camera/ndk/ndk_vendor/impl/utils.h
index a03c7bc..f389f03 100644
--- a/camera/ndk/ndk_vendor/impl/utils.h
+++ b/camera/ndk/ndk_vendor/impl/utils.h
@@ -168,8 +168,8 @@
 
 bool convertFromHidlCloned(const HCameraMetadata &metadata, CameraMetadata *rawMetadata);
 
-// Note: existing data in dst will be gone. Caller still owns the memory of src
-void convertToHidl(const camera_metadata_t *src, HCameraMetadata* dst);
+// Note: existing data in dst will be gone.
+void convertToHidl(const camera_metadata_t *src, HCameraMetadata* dst, bool shouldOwn = false);
 
 TemplateId convertToHidl(ACameraDevice_request_template templateId);
 
diff --git a/drm/libmediadrm/Android.bp b/drm/libmediadrm/Android.bp
index 01efb22..d6db1d4 100644
--- a/drm/libmediadrm/Android.bp
+++ b/drm/libmediadrm/Android.bp
@@ -59,6 +59,7 @@
     shared_libs: [
         "android.hardware.drm@1.0",
         "android.hardware.drm@1.1",
+        "android.hardware.drm@1.2",
         "libbinder",
         "libhidlbase",
         "liblog",
@@ -89,6 +90,7 @@
     shared_libs: [
         "android.hardware.drm@1.0",
         "android.hardware.drm@1.1",
+        "android.hardware.drm@1.2",
         "libbase",
         "libbinder",
         "libhidlbase",
diff --git a/drm/libmediadrm/CryptoHal.cpp b/drm/libmediadrm/CryptoHal.cpp
index b757f35..954608f 100644
--- a/drm/libmediadrm/CryptoHal.cpp
+++ b/drm/libmediadrm/CryptoHal.cpp
@@ -300,7 +300,7 @@
     ssize_t offset;
     size_t size;
 
-    if (memory == NULL && buffer == NULL) {
+    if (memory == NULL || buffer == NULL) {
         return UNEXPECTED_NULL;
     }
 
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index 731b8d0..7cfe900 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -39,7 +39,6 @@
 #include <mediadrm/DrmSessionManager.h>
 
 using drm::V1_0::KeyedVector;
-using drm::V1_0::KeyStatusType;
 using drm::V1_0::KeyRequestType;
 using drm::V1_0::KeyType;
 using drm::V1_0::KeyValue;
@@ -50,6 +49,7 @@
 using drm::V1_1::SecureStopRelease;
 using drm::V1_1::SecurityLevel;
 using drm::V1_2::KeySetId;
+using drm::V1_2::KeyStatusType;
 using ::android::hardware::drm::V1_1::DrmMetricGroup;
 using ::android::hardware::hidl_array;
 using ::android::hardware::hidl_string;
@@ -515,6 +515,17 @@
 }
 
 Return<void> DrmHal::sendKeysChange(const hidl_vec<uint8_t>& sessionId,
+        const hidl_vec<KeyStatus_V1_0>& keyStatusList_V1_0, bool hasNewUsableKey) {
+    std::vector<KeyStatus> keyStatusVec;
+    for (const auto &keyStatus_V1_0 : keyStatusList_V1_0) {
+        keyStatusVec.push_back({keyStatus_V1_0.keyId,
+                static_cast<KeyStatusType>(keyStatus_V1_0.type)});
+    }
+    hidl_vec<KeyStatus> keyStatusList_V1_2(keyStatusVec);
+    return sendKeysChange_1_2(sessionId, keyStatusList_V1_2, hasNewUsableKey);
+}
+
+Return<void> DrmHal::sendKeysChange_1_2(const hidl_vec<uint8_t>& sessionId,
         const hidl_vec<KeyStatus>& keyStatusList, bool hasNewUsableKey) {
 
     mEventLock.lock();
@@ -544,6 +555,9 @@
             case KeyStatusType::STATUSPENDING:
                 type = DrmPlugin::kKeyStatusType_StatusPending;
                 break;
+            case KeyStatusType::USABLEINFUTURE:
+                type = DrmPlugin::kKeyStatusType_UsableInFuture;
+                break;
             case KeyStatusType::INTERNALERROR:
             default:
                 type = DrmPlugin::kKeyStatusType_InternalError;
diff --git a/drm/libmediadrm/DrmMetrics.cpp b/drm/libmediadrm/DrmMetrics.cpp
index 4fed707..3080802 100644
--- a/drm/libmediadrm/DrmMetrics.cpp
+++ b/drm/libmediadrm/DrmMetrics.cpp
@@ -32,7 +32,7 @@
 using ::android::hardware::hidl_string;
 using ::android::hardware::hidl_vec;
 using ::android::hardware::drm::V1_0::EventType;
-using ::android::hardware::drm::V1_0::KeyStatusType;
+using ::android::hardware::drm::V1_2::KeyStatusType;
 using ::android::hardware::drm::V1_1::DrmMetricGroup;
 using ::android::os::PersistableBundle;
 
diff --git a/drm/libmediadrm/tests/DrmMetrics_test.cpp b/drm/libmediadrm/tests/DrmMetrics_test.cpp
index 64aa9d0..5c8a1b0 100644
--- a/drm/libmediadrm/tests/DrmMetrics_test.cpp
+++ b/drm/libmediadrm/tests/DrmMetrics_test.cpp
@@ -30,7 +30,7 @@
 using ::android::drm_metrics::DrmFrameworkMetrics;
 using ::android::hardware::hidl_vec;
 using ::android::hardware::drm::V1_0::EventType;
-using ::android::hardware::drm::V1_0::KeyStatusType;
+using ::android::hardware::drm::V1_2::KeyStatusType;
 using ::android::hardware::drm::V1_0::Status;
 using ::android::hardware::drm::V1_1::DrmMetricGroup;
 using ::android::os::PersistableBundle;
diff --git a/drm/mediadrm/plugins/clearkey/hidl/InitDataParser.cpp b/drm/mediadrm/plugins/clearkey/hidl/InitDataParser.cpp
index 8ebb42b..b988ce0 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/InitDataParser.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/InitDataParser.cpp
@@ -51,16 +51,16 @@
     // Build a list of the key IDs
     std::vector<const uint8_t*> keyIds;
 
-    if (mimeType == kIsoBmffVideoMimeType ||
-        mimeType == kIsoBmffAudioMimeType ||
-        mimeType == kCencInitDataFormat) {
+    if (mimeType == kIsoBmffVideoMimeType.c_str() ||
+        mimeType == kIsoBmffAudioMimeType.c_str() ||
+        mimeType == kCencInitDataFormat.c_str()) {
         Status res = parsePssh(initData, &keyIds);
         if (res != Status::OK) {
             return res;
         }
-    } else if (mimeType == kWebmVideoMimeType ||
-        mimeType == kWebmAudioMimeType ||
-        mimeType == kWebmInitDataFormat) {
+    } else if (mimeType == kWebmVideoMimeType.c_str() ||
+        mimeType == kWebmAudioMimeType.c_str() ||
+        mimeType == kWebmInitDataFormat.c_str()) {
         // WebM "init data" is just a single key ID
         if (initData.size() != kKeyIdSize) {
             return Status::ERROR_DRM_CANNOT_HANDLE;
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
index ba5fa65..f294d4d 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
@@ -62,6 +62,7 @@
 
 typedef drm::V1_1::KeyRequestType KeyRequestType_V1_1;
 typedef drm::V1_2::IDrmPluginListener IDrmPluginListener_V1_2;
+typedef drm::V1_2::KeyStatus KeyStatus_V1_2;
 typedef drm::V1_2::Status Status_V1_2;
 typedef drm::V1_2::HdcpLevel HdcpLevel_V1_2;
 
@@ -335,6 +336,15 @@
         return Void();
     }
 
+    Return<void> sendKeysChange_1_2(
+            const hidl_vec<uint8_t>& sessionId,
+            const hidl_vec<KeyStatus_V1_2>& keyStatusList, bool hasNewUsableKey) {
+        if (mListenerV1_2 != NULL) {
+            mListenerV1_2->sendKeysChange_1_2(sessionId, keyStatusList, hasNewUsableKey);
+        }
+        return Void();
+    }
+
     Return<void> sendSessionLostState(
             const hidl_vec<uint8_t>& sessionId) {
         if (mListenerV1_2 != NULL) {
diff --git a/media/bufferpool/1.0/Connection.cpp b/media/bufferpool/1.0/Connection.cpp
index e58f595..be89701 100644
--- a/media/bufferpool/1.0/Connection.cpp
+++ b/media/bufferpool/1.0/Connection.cpp
@@ -32,14 +32,22 @@
             status = mAccessor->fetch(
                     mConnectionId, transactionId, bufferId, &handle);
             if (status == ResultStatus::OK) {
-                _hidl_cb(status, Buffer{bufferId, handle});
+                Buffer buffer = {};
+                buffer.id = bufferId;
+                buffer.buffer = handle;
+                _hidl_cb(status, buffer);
                 return Void();
             }
         } else {
             mAccessor->cleanUp(false);
         }
     }
-    _hidl_cb(status, Buffer{0, nullptr});
+
+    Buffer buffer = {};
+    buffer.id = 0;
+    buffer.buffer = nullptr;
+
+    _hidl_cb(status, buffer);
     return Void();
 }
 
diff --git a/media/bufferpool/2.0/Connection.cpp b/media/bufferpool/2.0/Connection.cpp
index 6bd0e79..57d0c7e 100644
--- a/media/bufferpool/2.0/Connection.cpp
+++ b/media/bufferpool/2.0/Connection.cpp
@@ -32,14 +32,22 @@
             status = mAccessor->fetch(
                     mConnectionId, transactionId, bufferId, &handle);
             if (status == ResultStatus::OK) {
-                _hidl_cb(status, Buffer{bufferId, handle});
+                Buffer buffer = {};
+                buffer.id = bufferId;
+                buffer.buffer = handle;
+                _hidl_cb(status, buffer);
                 return Void();
             }
         } else {
             mAccessor->cleanUp(false);
         }
     }
-    _hidl_cb(status, Buffer{0, nullptr});
+
+    Buffer buffer = {};
+    buffer.id = 0;
+    buffer.buffer = nullptr;
+
+    _hidl_cb(status, buffer);
     return Void();
 }
 
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index 4d00d35..2d4e126 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -205,10 +205,6 @@
     int32_t getDrcEffectType() const { return mDrcEffectType->value; }
 
 private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
     std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index 71eb1ac..7efc7f1 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -29,33 +29,32 @@
 
 namespace android {
 
-class C2SoftAacEnc::IntfImpl : public C2InterfaceHelper {
+namespace {
+
+constexpr char COMPONENT_NAME[] = "c2.android.aac.encoder";
+
+}  // namespace
+
+class C2SoftAacEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
     explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
-        : C2InterfaceHelper(helper) {
-
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_ENCODER,
+                C2Component::DOMAIN_AUDIO,
+                MEDIA_MIMETYPE_AUDIO_AAC) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
         setDerivedInstance(this);
 
         addParameter(
-                DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-                        MEDIA_MIMETYPE_AUDIO_RAW))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-                        MEDIA_MIMETYPE_AUDIO_AAC))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
 
         addParameter(
@@ -125,10 +124,6 @@
     }
 
 private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
     std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
@@ -136,8 +131,6 @@
     std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
 };
 
-constexpr char COMPONENT_NAME[] = "c2.android.aac.encoder";
-
 C2SoftAacEnc::C2SoftAacEnc(
         const char *name,
         c2_node_id_t id,
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
index edad75a..6578ad2 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
@@ -33,43 +33,41 @@
 
 namespace android {
 
+namespace {
+
 #ifdef AMRNB
   constexpr char COMPONENT_NAME[] = "c2.android.amrnb.decoder";
 #else
   constexpr char COMPONENT_NAME[] = "c2.android.amrwb.decoder";
 #endif
 
-class C2SoftAmrDec::IntfImpl : public C2InterfaceHelper {
+}  // namespace
+
+class C2SoftAmrDec::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
     explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
-        : C2InterfaceHelper(helper) {
-
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_DECODER,
+                C2Component::DOMAIN_AUDIO,
+#ifdef AMRNB
+                MEDIA_MIMETYPE_AUDIO_AMR_NB
+#else
+                MEDIA_MIMETYPE_AUDIO_AMR_WB
+#endif
+                ) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
         setDerivedInstance(this);
 
         addParameter(
-                DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-#ifdef AMRNB
-                        MEDIA_MIMETYPE_AUDIO_AMR_NB
-#else
-                        MEDIA_MIMETYPE_AUDIO_AMR_WB
-#endif
-                )).build());
-
-        addParameter(
-                DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-                        MEDIA_MIMETYPE_AUDIO_RAW))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
 
         addParameter(
@@ -110,10 +108,6 @@
     }
 
 private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
     std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
index 3d3aa7d..e2d8cb6 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
@@ -27,36 +27,33 @@
 
 namespace android {
 
+namespace {
+
 constexpr char COMPONENT_NAME[] = "c2.android.amrnb.encoder";
 
-class C2SoftAmrNbEnc::IntfImpl : public C2InterfaceHelper {
-   public:
-    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
-        : C2InterfaceHelper(helper) {
+}  // namespace
+
+
+class C2SoftAmrNbEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
+public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_ENCODER,
+                C2Component::DOMAIN_AUDIO,
+                MEDIA_MIMETYPE_AUDIO_AMR_NB) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
         setDerivedInstance(this);
 
         addParameter(
-            DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(
-                    new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-            DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(
-                    new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-            DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-                    MEDIA_MIMETYPE_AUDIO_RAW))
-                .build());
-
-        addParameter(
-            DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-                    MEDIA_MIMETYPE_AUDIO_AMR_NB))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
 
         addParameter(
@@ -91,11 +88,7 @@
     uint32_t getChannelCount() const { return mChannelCount->value; }
     uint32_t getBitrate() const { return mBitrate->value; }
 
-   private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
+private:
     std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
index 379cb32..84ae4b7 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
@@ -29,36 +29,32 @@
 
 namespace android {
 
+namespace {
+
 constexpr char COMPONENT_NAME[] = "c2.android.amrwb.encoder";
 
-class C2SoftAmrWbEnc::IntfImpl : public C2InterfaceHelper {
-   public:
-    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
-        : C2InterfaceHelper(helper) {
+}  // namespace
+
+class C2SoftAmrWbEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
+public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_ENCODER,
+                C2Component::DOMAIN_AUDIO,
+                MEDIA_MIMETYPE_AUDIO_AMR_WB) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
         setDerivedInstance(this);
 
         addParameter(
-            DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(
-                    new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-            DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(
-                    new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-            DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-                    MEDIA_MIMETYPE_AUDIO_RAW))
-                .build());
-
-        addParameter(
-            DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-                    MEDIA_MIMETYPE_AUDIO_AMR_WB))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
 
         addParameter(
@@ -93,11 +89,7 @@
     uint32_t getChannelCount() const { return mChannelCount->value; }
     uint32_t getBitrate() const { return mBitrate->value; }
 
-   private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
+private:
     std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index 4bcc2c6..769895c 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -78,6 +78,26 @@
                 .withSetter(ProfileLevelSetter, mSize)
                 .build());
 
+        mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
+        addParameter(
+                DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
+                .withDefault(mHdr10PlusInfoInput)
+                .withFields({
+                    C2F(mHdr10PlusInfoInput, m.value).any(),
+                })
+                .withSetter(Hdr10PlusInfoInputSetter)
+                .build());
+
+        mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
+        addParameter(
+                DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
+                .withDefault(mHdr10PlusInfoOutput)
+                .withFields({
+                    C2F(mHdr10PlusInfoOutput, m.value).any(),
+                })
+                .withSetter(Hdr10PlusInfoOutputSetter)
+                .build());
+
         addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
                          .withDefault(new C2StreamMaxPictureSizeTuning::output(
                              0u, 320, 240))
@@ -203,6 +223,18 @@
         return mDefaultColorAspects;
     }
 
+    static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input> &me) {
+        (void)mayBlock;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    static C2R Hdr10PlusInfoOutputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::output> &me) {
+        (void)mayBlock;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
   private:
     std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
     std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
@@ -211,6 +243,8 @@
     std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
     std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
     std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+    std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
+    std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
 };
 
 C2SoftAomDec::C2SoftAomDec(const char* name, c2_node_id_t id,
@@ -341,7 +375,8 @@
                               const std::shared_ptr<C2GraphicBlock>& block) {
     std::shared_ptr<C2Buffer> buffer =
         createGraphicBuffer(block, C2Rect(mWidth, mHeight));
-    auto fillWork = [buffer, index](const std::unique_ptr<C2Work>& work) {
+    auto fillWork = [buffer, index, intf = this->mIntf](
+            const std::unique_ptr<C2Work>& work) {
         uint32_t flags = 0;
         if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
             (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
@@ -353,6 +388,28 @@
         work->worklets.front()->output.buffers.push_back(buffer);
         work->worklets.front()->output.ordinal = work->input.ordinal;
         work->workletsProcessed = 1u;
+
+        for (const std::unique_ptr<C2Param> &param: work->input.configUpdate) {
+            if (param) {
+                C2StreamHdr10PlusInfo::input *hdr10PlusInfo =
+                        C2StreamHdr10PlusInfo::input::From(param.get());
+
+                if (hdr10PlusInfo != nullptr) {
+                    std::vector<std::unique_ptr<C2SettingResult>> failures;
+                    std::unique_ptr<C2Param> outParam = C2Param::CopyAsStream(
+                            *param.get(), true /*output*/, param->stream());
+                    c2_status_t err = intf->config(
+                            { outParam.get() }, C2_MAY_BLOCK, &failures);
+                    if (err == C2_OK) {
+                        work->worklets.front()->output.configUpdate.push_back(
+                                C2Param::Copy(*outParam.get()));
+                    } else {
+                        ALOGE("finishWork: Config update size failed");
+                    }
+                    break;
+                }
+            }
+        }
     };
     if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
         fillWork(work);
@@ -444,15 +501,13 @@
     }
 }
 
-static void copyOutputBufferToYV12Frame(uint8_t *dst,
-        const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+static void copyOutputBufferToYuvPlanarFrame(
+        uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
         size_t srcYStride, size_t srcUStride, size_t srcVStride,
+        size_t dstYStride, size_t dstUVStride,
         uint32_t width, uint32_t height) {
-    size_t dstYStride = align(width, 16);
-    size_t dstUVStride = align(dstYStride / 2, 16);
     uint8_t* dstStart = dst;
 
-
     for (size_t i = 0; i < height; ++i) {
         memcpy(dst, srcY, width);
         srcY += srcYStride;
@@ -540,11 +595,10 @@
 static void convertYUV420Planar16ToYUV420Planar(uint8_t *dst,
         const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
         size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstStride, size_t width, size_t height) {
+        size_t dstYStride, size_t dstUVStride, size_t width, size_t height) {
 
     uint8_t *dstY = (uint8_t *)dst;
-    size_t dstYSize = dstStride * height;
-    size_t dstUVStride = align(dstStride / 2, 16);
+    size_t dstYSize = dstYStride * height;
     size_t dstUVSize = dstUVStride * height / 2;
     uint8_t *dstV = dstY + dstYSize;
     uint8_t *dstU = dstV + dstUVSize;
@@ -555,7 +609,7 @@
         }
 
         srcY += srcYStride;
-        dstY += dstStride;
+        dstY += dstYStride;
     }
 
     for (size_t y = 0; y < (height + 1) / 2; ++y) {
@@ -642,6 +696,9 @@
     size_t srcYStride = img->stride[AOM_PLANE_Y];
     size_t srcUStride = img->stride[AOM_PLANE_U];
     size_t srcVStride = img->stride[AOM_PLANE_V];
+    C2PlanarLayout layout = wView.layout();
+    size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+    size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
 
     if (img->fmt == AOM_IMG_FMT_I42016) {
         const uint16_t *srcY = (const uint16_t *)img->planes[AOM_PLANE_Y];
@@ -651,20 +708,23 @@
         if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
             convertYUV420Planar16ToY410((uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
                                     srcUStride / 2, srcVStride / 2,
-                                    align(mWidth, 16),
+                                    dstYStride / sizeof(uint32_t),
                                     mWidth, mHeight);
         } else {
             convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
                                     srcUStride / 2, srcVStride / 2,
-                                    align(mWidth, 16),
+                                    dstYStride, dstUVStride,
                                     mWidth, mHeight);
         }
     } else {
         const uint8_t *srcY = (const uint8_t *)img->planes[AOM_PLANE_Y];
         const uint8_t *srcU = (const uint8_t *)img->planes[AOM_PLANE_U];
         const uint8_t *srcV = (const uint8_t *)img->planes[AOM_PLANE_V];
-        copyOutputBufferToYV12Frame(dst, srcY, srcU, srcV,
-                                srcYStride, srcUStride, srcVStride, mWidth, mHeight);
+        copyOutputBufferToYuvPlanarFrame(
+                dst, srcY, srcU, srcV,
+                srcYStride, srcUStride, srcVStride,
+                dstYStride, dstUVStride,
+                mWidth, mHeight);
     }
     finishWork(*(int64_t*)img->user_priv, work, std::move(block));
     block = nullptr;
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 9290d74..3f015b4 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -335,7 +335,8 @@
       mIvColorFormat(IV_YUV_420P),
       mWidth(320),
       mHeight(240),
-      mHeaderDecoded(false) {
+      mHeaderDecoded(false),
+      mOutIndex(0u) {
     GENERATE_FILE_NAMES();
     CREATE_DUMP_FILE(mInFile);
 }
@@ -692,6 +693,33 @@
         buffer->setInfo(mIntf->getColorAspects_l());
     }
 
+    class FillWork {
+       public:
+        FillWork(uint32_t flags, C2WorkOrdinalStruct ordinal,
+                 const std::shared_ptr<C2Buffer>& buffer)
+            : mFlags(flags), mOrdinal(ordinal), mBuffer(buffer) {}
+        ~FillWork() = default;
+
+        void operator()(const std::unique_ptr<C2Work>& work) {
+            work->worklets.front()->output.flags = (C2FrameData::flags_t)mFlags;
+            work->worklets.front()->output.buffers.clear();
+            work->worklets.front()->output.ordinal = mOrdinal;
+            work->workletsProcessed = 1u;
+            work->result = C2_OK;
+            if (mBuffer) {
+                work->worklets.front()->output.buffers.push_back(mBuffer);
+            }
+            ALOGV("timestamp = %lld, index = %lld, w/%s buffer",
+                  mOrdinal.timestamp.peekll(), mOrdinal.frameIndex.peekll(),
+                  mBuffer ? "" : "o");
+        }
+
+       private:
+        const uint32_t mFlags;
+        const C2WorkOrdinalStruct mOrdinal;
+        const std::shared_ptr<C2Buffer> mBuffer;
+    };
+
     auto fillWork = [buffer](const std::unique_ptr<C2Work> &work) {
         work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
         work->worklets.front()->output.buffers.clear();
@@ -700,7 +728,20 @@
         work->workletsProcessed = 1u;
     };
     if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
-        fillWork(work);
+        bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+        // TODO: Check if cloneAndSend can be avoided by tracking number of frames remaining
+        if (eos) {
+            if (buffer) {
+                mOutIndex = index;
+                C2WorkOrdinalStruct outOrdinal = work->input.ordinal;
+                cloneAndSend(
+                    mOutIndex, work,
+                    FillWork(C2FrameData::FLAG_INCOMPLETE, outOrdinal, buffer));
+                buffer.reset();
+            }
+        } else {
+            fillWork(work);
+        }
     } else {
         finish(index, fillWork);
     }
@@ -812,19 +853,19 @@
             ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
                   s_decode_op.u4_num_bytes_consumed);
         }
-        if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & 0xFF)) {
+        if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("allocation failure in decoder");
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
-        } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_decode_op.u4_error_code & 0xFF)) {
+        } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("unsupported resolution : %dx%d", mWidth, mHeight);
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
-        } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & 0xFF)) {
+        } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
             ALOGV("resolution changed");
             drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
             resetDecoder();
@@ -834,6 +875,12 @@
             /* Decode header and get new dimensions */
             setParams(mStride, IVD_DECODE_HEADER);
             (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+        } else if (IS_IVD_FATAL_ERROR(s_decode_op.u4_error_code)) {
+            ALOGE("Fatal error in decoder 0x%x", s_decode_op.u4_error_code);
+            mSignalledError = true;
+            work->workletsProcessed = 1u;
+            work->result = C2_CORRUPTED;
+            return;
         }
         if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
             if (mHeaderDecoded == false) {
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
index 2127a93..72ee583 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.h
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -21,6 +21,7 @@
 
 #include <media/stagefright/foundation/ColorUtils.h>
 
+#include <atomic>
 #include <SimpleC2Component.h>
 
 #include "ih264_typedefs.h"
@@ -163,6 +164,7 @@
     bool mSignalledOutputEos;
     bool mSignalledError;
     bool mHeaderDecoded;
+    std::atomic_uint64_t mOutIndex;
     // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
     // converting them to C2 values for each frame
     struct VuiColorAspects {
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index b851908..8d9f21a 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -37,42 +37,40 @@
 
 namespace android {
 
-class C2SoftAvcEnc::IntfImpl : public C2InterfaceHelper {
+namespace {
+
+constexpr char COMPONENT_NAME[] = "c2.android.avc.encoder";
+
+}  // namespace
+
+class C2SoftAvcEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
     explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
-        : C2InterfaceHelper(helper) {
-
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_ENCODER,
+                C2Component::DOMAIN_VIDEO,
+                MEDIA_MIMETYPE_VIDEO_AVC) {
+        noPrivateBuffers(); // TODO: account for our buffers here
+        noInputReferences();
+        noOutputReferences();
+        noTimeStretch();
         setDerivedInstance(this);
 
         addParameter(
-                DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::GRAPHIC))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-                        MEDIA_MIMETYPE_VIDEO_RAW))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-                        MEDIA_MIMETYPE_VIDEO_AVC))
-                .build());
-
-        addParameter(
                 DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
                 .withConstValue(new C2StreamUsageTuning::input(
                         0u, (uint64_t)C2MemoryUsage::CPU_READ))
                 .build());
 
         addParameter(
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
+                .build());
+
+        addParameter(
                 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
                 .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
                 .withFields({
@@ -83,6 +81,13 @@
                 .build());
 
         addParameter(
+                DefineParam(mActualInputDelay, C2_PARAMKEY_INPUT_DELAY)
+                .withDefault(new C2PortActualDelayTuning::input(DEFAULT_B_FRAMES))
+                .withFields({C2F(mActualInputDelay, value).inRange(0, MAX_B_FRAMES)})
+                .withSetter(Setter<decltype(*mActualInputDelay)>::StrictValueWithNoDeps)
+                .build());
+
+        addParameter(
                 DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
                 .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
                 // TODO: More restriction?
@@ -325,10 +330,6 @@
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
 
 private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
     std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
     std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
@@ -341,8 +342,6 @@
 
 #define ive_api_function  ih264e_api_function
 
-constexpr char COMPONENT_NAME[] = "c2.android.avc.encoder";
-
 namespace {
 
 // From external/libavc/encoder/ih264e_bitstream.h
@@ -372,9 +371,9 @@
       mAVCEncLevel(41),
       mStarted(false),
       mSawInputEOS(false),
-      mSawOutputEOS(false),
       mSignalledError(false),
       mCodecCtx(nullptr),
+      mOutBlock(nullptr),
       // TODO: output buffer size
       mOutBufferSize(524288) {
 
@@ -387,7 +386,7 @@
 }
 
 C2SoftAvcEnc::~C2SoftAvcEnc() {
-    releaseEncoder();
+    onRelease();
 }
 
 c2_status_t C2SoftAvcEnc::onInit() {
@@ -401,11 +400,17 @@
 void C2SoftAvcEnc::onReset() {
     // TODO: use IVE_CMD_CTL_RESET?
     releaseEncoder();
+    if (mOutBlock) {
+        mOutBlock.reset();
+    }
     initEncParams();
 }
 
 void C2SoftAvcEnc::onRelease() {
     releaseEncoder();
+    if (mOutBlock) {
+        mOutBlock.reset();
+    }
 }
 
 c2_status_t C2SoftAvcEnc::onFlush_sm() {
@@ -1113,8 +1118,10 @@
         const C2GraphicView *const input,
         uint8_t *base,
         uint32_t capacity,
-        uint64_t timestamp) {
+        uint64_t workIndex) {
     iv_raw_buf_t *ps_inp_raw_buf;
+    memset(ps_encode_ip, 0, sizeof(*ps_encode_ip));
+    memset(ps_encode_op, 0, sizeof(*ps_encode_op));
 
     ps_inp_raw_buf = &ps_encode_ip->s_inp_buf;
     ps_encode_ip->s_out_buf.pv_buf = base;
@@ -1130,8 +1137,8 @@
     ps_encode_ip->u4_mb_info_type = 0;
     ps_encode_ip->u4_pic_info_type = 0;
     ps_encode_ip->u4_is_last = 0;
-    ps_encode_ip->u4_timestamp_high = timestamp >> 32;
-    ps_encode_ip->u4_timestamp_low = timestamp & 0xFFFFFFFF;
+    ps_encode_ip->u4_timestamp_high = workIndex >> 32;
+    ps_encode_ip->u4_timestamp_low = workIndex & 0xFFFFFFFF;
     ps_encode_op->s_out_buf.pv_buf = nullptr;
 
     /* Initialize color formats */
@@ -1139,7 +1146,7 @@
     ps_inp_raw_buf->u4_size = sizeof(iv_raw_buf_t);
     ps_inp_raw_buf->e_color_fmt = mIvVideoColorFormat;
     if (input == nullptr) {
-        if (mSawInputEOS){
+        if (mSawInputEOS) {
             ps_encode_ip->u4_is_last = 1;
         }
         return C2_OK;
@@ -1278,17 +1285,46 @@
     return C2_OK;
 }
 
+void C2SoftAvcEnc::finishWork(uint64_t workIndex, const std::unique_ptr<C2Work> &work,
+                              ive_video_encode_op_t *ps_encode_op) {
+    std::shared_ptr<C2Buffer> buffer =
+            createLinearBuffer(mOutBlock, 0, ps_encode_op->s_out_buf.u4_bytes);
+    if (IV_IDR_FRAME == ps_encode_op->u4_encoded_frame_type) {
+        ALOGV("IDR frame produced");
+        buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
+                0u /* stream id */, C2Config::SYNC_FRAME));
+    }
+    mOutBlock = nullptr;
+
+    auto fillWork = [buffer](const std::unique_ptr<C2Work> &work) {
+        work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.buffers.push_back(buffer);
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->workletsProcessed = 1u;
+    };
+    if (work && c2_cntr64_t(workIndex) == work->input.ordinal.frameIndex) {
+        fillWork(work);
+        if (mSawInputEOS) {
+            work->worklets.front()->output.flags = C2FrameData::FLAG_END_OF_STREAM;
+        }
+    } else {
+        finish(workIndex, fillWork);
+    }
+}
+
 void C2SoftAvcEnc::process(
         const std::unique_ptr<C2Work> &work,
         const std::shared_ptr<C2BlockPool> &pool) {
     // Initialize output work
     work->result = C2_OK;
-    work->workletsProcessed = 1u;
+    work->workletsProcessed = 0u;
     work->worklets.front()->output.flags = work->input.flags;
 
     IV_STATUS_T status;
-    WORD32 timeDelay, timeTaken;
-    uint64_t timestamp = work->input.ordinal.timestamp.peekull();
+    WORD32 timeDelay = 0;
+    WORD32 timeTaken = 0;
+    uint64_t workIndex = work->input.ordinal.frameIndex.peekull();
 
     // Initialize encoder if not already initialized
     if (mCodecCtx == nullptr) {
@@ -1296,27 +1332,29 @@
             ALOGE("Failed to initialize encoder");
             mSignalledError = true;
             work->result = C2_CORRUPTED;
+            work->workletsProcessed = 1u;
             return;
         }
     }
     if (mSignalledError) {
         return;
     }
-
     // while (!mSawOutputEOS && !outQueue.empty()) {
     c2_status_t error;
     ive_video_encode_ip_t s_encode_ip;
     ive_video_encode_op_t s_encode_op;
+    memset(&s_encode_op, 0, sizeof(s_encode_op));
 
     if (!mSpsPpsHeaderReceived) {
         constexpr uint32_t kHeaderLength = MIN_STREAM_SIZE;
         uint8_t header[kHeaderLength];
         error = setEncodeArgs(
-                &s_encode_ip, &s_encode_op, nullptr, header, kHeaderLength, timestamp);
+                &s_encode_ip, &s_encode_op, nullptr, header, kHeaderLength, workIndex);
         if (error != C2_OK) {
             ALOGE("setEncodeArgs failed: %d", error);
             mSignalledError = true;
             work->result = C2_CORRUPTED;
+            work->workletsProcessed = 1u;
             return;
         }
         status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
@@ -1324,6 +1362,7 @@
         if (IV_SUCCESS != status) {
             ALOGE("Encode header failed = 0x%x\n",
                     s_encode_op.u4_error_code);
+            work->workletsProcessed = 1u;
             return;
         } else {
             ALOGV("Bytes Generated in header %d\n",
@@ -1338,6 +1377,7 @@
             ALOGE("CSD allocation failed");
             mSignalledError = true;
             work->result = C2_NO_MEMORY;
+            work->workletsProcessed = 1u;
             return;
         }
         memcpy(csd->m.value, header, s_encode_op.s_out_buf.u4_bytes);
@@ -1345,6 +1385,10 @@
 
         DUMP_TO_FILE(
                 mOutFile, csd->m.value, csd->flexCount());
+        if (work->input.buffers.empty()) {
+            work->workletsProcessed = 1u;
+            return;
+        }
     }
 
     // handle dynamic config parameters
@@ -1401,34 +1445,41 @@
                 inputBuffer->data().graphicBlocks().front().map().get());
         if (view->error() != C2_OK) {
             ALOGE("graphic view map err = %d", view->error());
+            work->workletsProcessed = 1u;
             return;
         }
     }
 
-    std::shared_ptr<C2LinearBlock> block;
-
     do {
-        C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
-        // TODO: error handling, proper usage, etc.
-        c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block);
-        if (err != C2_OK) {
-            ALOGE("fetch linear block err = %d", err);
-            work->result = err;
-            return;
+        if (mSawInputEOS && work->input.buffers.empty()) break;
+        if (!mOutBlock) {
+            C2MemoryUsage usage = {C2MemoryUsage::CPU_READ,
+                                   C2MemoryUsage::CPU_WRITE};
+            // TODO: error handling, proper usage, etc.
+            c2_status_t err =
+                pool->fetchLinearBlock(mOutBufferSize, usage, &mOutBlock);
+            if (err != C2_OK) {
+                ALOGE("fetch linear block err = %d", err);
+                work->result = err;
+                work->workletsProcessed = 1u;
+                return;
+            }
         }
-        C2WriteView wView = block->map().get();
+        C2WriteView wView = mOutBlock->map().get();
         if (wView.error() != C2_OK) {
             ALOGE("write view map err = %d", wView.error());
             work->result = wView.error();
+            work->workletsProcessed = 1u;
             return;
         }
 
         error = setEncodeArgs(
-                &s_encode_ip, &s_encode_op, view.get(), wView.base(), wView.capacity(), timestamp);
+                &s_encode_ip, &s_encode_op, view.get(), wView.base(), wView.capacity(), workIndex);
         if (error != C2_OK) {
             ALOGE("setEncodeArgs failed : %d", error);
             mSignalledError = true;
             work->result = error;
+            work->workletsProcessed = 1u;
             return;
         }
 
@@ -1446,12 +1497,14 @@
             if ((s_encode_op.u4_error_code & 0xFF) == IH264E_BITSTREAM_BUFFER_OVERFLOW) {
                 // TODO: use IVE_CMD_CTL_GETBUFINFO for proper max input size?
                 mOutBufferSize *= 2;
+                mOutBlock.reset();
                 continue;
             }
             ALOGE("Encode Frame failed = 0x%x\n",
                     s_encode_op.u4_error_code);
             mSignalledError = true;
             work->result = C2_CORRUPTED;
+            work->workletsProcessed = 1u;
             return;
         }
     } while (IV_SUCCESS != status);
@@ -1480,41 +1533,104 @@
         }
     }
 
-    work->worklets.front()->output.flags = work->input.flags;
-    work->worklets.front()->output.ordinal = work->input.ordinal;
-    work->worklets.front()->output.ordinal.timestamp =
-        ((uint64_t)s_encode_op.u4_timestamp_high << 32) | s_encode_op.u4_timestamp_low;
-    work->worklets.front()->output.buffers.clear();
-
-    if (s_encode_op.s_out_buf.u4_bytes) {
-        std::shared_ptr<C2Buffer> buffer =
-            createLinearBuffer(block, 0, s_encode_op.s_out_buf.u4_bytes);
-        if (IV_IDR_FRAME == s_encode_op.u4_encoded_frame_type) {
-            ALOGV("IDR frame produced");
-            buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
-                    0u /* stream id */, C2Config::SYNC_FRAME));
+    if (s_encode_op.output_present) {
+        if (!s_encode_op.s_out_buf.u4_bytes) {
+            ALOGE("Error: Output present but bytes generated is zero");
+            mSignalledError = true;
+            work->result = C2_CORRUPTED;
+            work->workletsProcessed = 1u;
+            return;
         }
-        work->worklets.front()->output.buffers.push_back(buffer);
+        uint64_t workId = ((uint64_t)s_encode_op.u4_timestamp_high << 32) |
+                      s_encode_op.u4_timestamp_low;
+        finishWork(workId, work, &s_encode_op);
+    }
+    if (mSawInputEOS) {
+        drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+    }
+}
+
+c2_status_t C2SoftAvcEnc::drainInternal(
+        uint32_t drainMode,
+        const std::shared_ptr<C2BlockPool> &pool,
+        const std::unique_ptr<C2Work> &work) {
+
+    if (drainMode == NO_DRAIN) {
+        ALOGW("drain with NO_DRAIN: no-op");
+        return C2_OK;
+    }
+    if (drainMode == DRAIN_CHAIN) {
+        ALOGW("DRAIN_CHAIN not supported");
+        return C2_OMITTED;
     }
 
-    if (s_encode_op.u4_is_last) {
-        // outputBufferHeader->nFlags |= OMX_BUFFERFLAG_EOS;
-        mSawOutputEOS = true;
-    } else {
-        // outputBufferHeader->nFlags &= ~OMX_BUFFERFLAG_EOS;
+    while (true) {
+        if (!mOutBlock) {
+            C2MemoryUsage usage = {C2MemoryUsage::CPU_READ,
+                                   C2MemoryUsage::CPU_WRITE};
+            // TODO: error handling, proper usage, etc.
+            c2_status_t err =
+                pool->fetchLinearBlock(mOutBufferSize, usage, &mOutBlock);
+            if (err != C2_OK) {
+                ALOGE("fetch linear block err = %d", err);
+                work->result = err;
+                work->workletsProcessed = 1u;
+                return err;
+            }
+        }
+        C2WriteView wView = mOutBlock->map().get();
+        if (wView.error()) {
+            ALOGE("graphic view map failed %d", wView.error());
+            return C2_CORRUPTED;
+        }
+        ive_video_encode_ip_t s_encode_ip;
+        ive_video_encode_op_t s_encode_op;
+        if (C2_OK != setEncodeArgs(&s_encode_ip, &s_encode_op, nullptr,
+                                   wView.base(), wView.capacity(), 0)) {
+            ALOGE("setEncodeArgs failed for drainInternal");
+            mSignalledError = true;
+            work->result = C2_CORRUPTED;
+            work->workletsProcessed = 1u;
+            return C2_CORRUPTED;
+        }
+        (void)ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+
+        void *freed = s_encode_op.s_inp_buf.apv_bufs[0];
+        /* If encoder frees up an input buffer, mark it as free */
+        if (freed != nullptr) {
+            if (mBuffers.count(freed) == 0u) {
+                ALOGD("buffer not tracked");
+            } else {
+                // Release input buffer reference
+                mBuffers.erase(freed);
+                mConversionBuffersInUse.erase(freed);
+            }
+        }
+
+        if (s_encode_op.output_present) {
+            uint64_t workId = ((uint64_t)s_encode_op.u4_timestamp_high << 32) |
+                          s_encode_op.u4_timestamp_low;
+            finishWork(workId, work, &s_encode_op);
+        } else {
+            if (work->workletsProcessed != 1u) {
+                work->worklets.front()->output.flags = work->input.flags;
+                work->worklets.front()->output.ordinal = work->input.ordinal;
+                work->worklets.front()->output.buffers.clear();
+                work->workletsProcessed = 1u;
+            }
+            break;
+        }
     }
+
+    return C2_OK;
 }
 
 c2_status_t C2SoftAvcEnc::drain(
         uint32_t drainMode,
         const std::shared_ptr<C2BlockPool> &pool) {
-    // TODO: use IVE_CMD_CTL_FLUSH?
-    (void)drainMode;
-    (void)pool;
-    return C2_OK;
+    return drainInternal(drainMode, pool, nullptr);
 }
 
-
 class C2SoftAvcEncFactory : public C2ComponentFactory {
 public:
     C2SoftAvcEncFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index aa3ca61..555055b 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -33,12 +33,13 @@
 #define LEN_STATUS_BUFFER        (10  * 1024)
 #define MAX_VBV_BUFF_SIZE        (120 * 16384)
 #define MAX_NUM_IO_BUFS           3
+#define MAX_B_FRAMES              1
 
 #define DEFAULT_MAX_REF_FRM         2
 #define DEFAULT_MAX_REORDER_FRM     0
 #define DEFAULT_QP_MIN              10
 #define DEFAULT_QP_MAX              40
-#define DEFAULT_MAX_BITRATE         20000000
+#define DEFAULT_MAX_BITRATE         240000000
 #define DEFAULT_MAX_SRCH_RANGE_X    256
 #define DEFAULT_MAX_SRCH_RANGE_Y    256
 #define DEFAULT_MAX_FRAMERATE       120000
@@ -167,7 +168,6 @@
     bool     mSpsPpsHeaderReceived;
 
     bool     mSawInputEOS;
-    bool     mSawOutputEOS;
     bool     mSignalledError;
     bool     mIntra4x4;
     bool     mEnableFastSad;
@@ -183,6 +183,8 @@
     size_t mNumMemRecords;       // Number of memory records requested by codec
     size_t mNumCores;            // Number of cores used by the codec
 
+    std::shared_ptr<C2LinearBlock> mOutBlock;
+
     // configurations used by component in process
     // (TODO: keep this in intf but make them internal only)
     std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
@@ -230,7 +232,13 @@
             const C2GraphicView *const input,
             uint8_t *base,
             uint32_t capacity,
-            uint64_t timestamp);
+            uint64_t workIndex);
+    void finishWork(uint64_t workIndex,
+            const std::unique_ptr<C2Work> &work,
+            ive_video_encode_op_t *ps_encode_op);
+    c2_status_t drainInternal(uint32_t drainMode,
+            const std::shared_ptr<C2BlockPool> &pool,
+            const std::unique_ptr<C2Work> &work);
 
     C2_DO_NOT_COPY(C2SoftAvcEnc);
 };
diff --git a/media/codec2/components/flac/C2SoftFlacDec.cpp b/media/codec2/components/flac/C2SoftFlacDec.cpp
index 10b14ce..4039b9b 100644
--- a/media/codec2/components/flac/C2SoftFlacDec.cpp
+++ b/media/codec2/components/flac/C2SoftFlacDec.cpp
@@ -27,35 +27,32 @@
 
 namespace android {
 
+namespace {
+
 constexpr char COMPONENT_NAME[] = "c2.android.flac.decoder";
 
-class C2SoftFlacDec::IntfImpl : public C2InterfaceHelper {
+}  // namespace
+
+class C2SoftFlacDec::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
     explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
-        : C2InterfaceHelper(helper) {
-
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_DECODER,
+                C2Component::DOMAIN_AUDIO,
+                MEDIA_MIMETYPE_AUDIO_FLAC) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
         setDerivedInstance(this);
 
         addParameter(
-                DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-                        MEDIA_MIMETYPE_AUDIO_FLAC))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-                        MEDIA_MIMETYPE_AUDIO_RAW))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
 
         addParameter(
@@ -99,10 +96,6 @@
     int32_t getPcmEncodingInfo() const { return mPcmEncodingInfo->value; }
 
 private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
     std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 0ce2543..cf34dff 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -28,28 +28,32 @@
 
 namespace android {
 
-class C2SoftFlacEnc::IntfImpl : public C2InterfaceHelper {
+namespace {
+
+constexpr char COMPONENT_NAME[] = "c2.android.flac.encoder";
+
+}  // namespace
+
+class C2SoftFlacEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
     explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
-        : C2InterfaceHelper(helper) {
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_ENCODER,
+                C2Component::DOMAIN_AUDIO,
+                MEDIA_MIMETYPE_AUDIO_FLAC) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
         setDerivedInstance(this);
+
         addParameter(
-                DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
-                .build());
-        addParameter(
-                DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
-                .build());
-        addParameter(
-                DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-                        MEDIA_MIMETYPE_AUDIO_RAW))
-                .build());
-        addParameter(
-                DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-                        MEDIA_MIMETYPE_AUDIO_FLAC))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
         addParameter(
                 DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
@@ -92,17 +96,12 @@
     int32_t getPcmEncodingInfo() const { return mPcmEncodingInfo->value; }
 
 private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
     std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
     std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
     std::shared_ptr<C2StreamPcmEncodingInfo::input> mPcmEncodingInfo;
 };
-constexpr char COMPONENT_NAME[] = "c2.android.flac.encoder";
 
 C2SoftFlacEnc::C2SoftFlacEnc(
         const char *name,
diff --git a/media/codec2/components/g711/C2SoftG711Dec.cpp b/media/codec2/components/g711/C2SoftG711Dec.cpp
index 504ca78..43b843a 100644
--- a/media/codec2/components/g711/C2SoftG711Dec.cpp
+++ b/media/codec2/components/g711/C2SoftG711Dec.cpp
@@ -27,43 +27,41 @@
 
 namespace android {
 
+namespace {
+
 #ifdef ALAW
 constexpr char COMPONENT_NAME[] = "c2.android.g711.alaw.decoder";
 #else
 constexpr char COMPONENT_NAME[] = "c2.android.g711.mlaw.decoder";
 #endif
 
-class C2SoftG711Dec::IntfImpl : public C2InterfaceHelper {
+}  // namespace
+
+class C2SoftG711Dec::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
     explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
-        : C2InterfaceHelper(helper) {
-
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_DECODER,
+                C2Component::DOMAIN_AUDIO,
+#ifdef ALAW
+                MEDIA_MIMETYPE_AUDIO_G711_ALAW
+#else
+                MEDIA_MIMETYPE_AUDIO_G711_MLAW
+#endif
+                ) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
         setDerivedInstance(this);
 
         addParameter(
-                DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-#ifdef ALAW
-                        MEDIA_MIMETYPE_AUDIO_G711_ALAW
-#else
-                        MEDIA_MIMETYPE_AUDIO_G711_MLAW
-#endif
-                )).build());
-
-        addParameter(
-                DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-                        MEDIA_MIMETYPE_AUDIO_RAW))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
 
         addParameter(
@@ -94,10 +92,6 @@
     }
 
 private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
     std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
diff --git a/media/codec2/components/gsm/C2SoftGsmDec.cpp b/media/codec2/components/gsm/C2SoftGsmDec.cpp
index 69d4885..287cfc6 100644
--- a/media/codec2/components/gsm/C2SoftGsmDec.cpp
+++ b/media/codec2/components/gsm/C2SoftGsmDec.cpp
@@ -27,34 +27,32 @@
 
 namespace android {
 
+namespace {
+
 constexpr char COMPONENT_NAME[] = "c2.android.gsm.decoder";
 
-class C2SoftGsmDec::IntfImpl : public C2InterfaceHelper {
-   public:
-    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
-        : C2InterfaceHelper(helper) {
+}  // namespace
+
+class C2SoftGsmDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_DECODER,
+                C2Component::DOMAIN_AUDIO,
+                MEDIA_MIMETYPE_AUDIO_MSGSM) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
         setDerivedInstance(this);
 
         addParameter(
-                DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-                        MEDIA_MIMETYPE_AUDIO_MSGSM))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-                        MEDIA_MIMETYPE_AUDIO_RAW))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
 
         addParameter(
@@ -85,10 +83,6 @@
     }
 
    private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
     std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index bb8dda0..7232572 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -329,7 +329,8 @@
         mIvColorformat(IV_YUV_420P),
         mWidth(320),
         mHeight(240),
-        mHeaderDecoded(false) {
+        mHeaderDecoded(false),
+        mOutIndex(0u) {
 }
 
 C2SoftHevcDec::~C2SoftHevcDec() {
@@ -688,6 +689,33 @@
         buffer->setInfo(mIntf->getColorAspects_l());
     }
 
+    class FillWork {
+       public:
+        FillWork(uint32_t flags, C2WorkOrdinalStruct ordinal,
+                 const std::shared_ptr<C2Buffer>& buffer)
+            : mFlags(flags), mOrdinal(ordinal), mBuffer(buffer) {}
+        ~FillWork() = default;
+
+        void operator()(const std::unique_ptr<C2Work>& work) {
+            work->worklets.front()->output.flags = (C2FrameData::flags_t)mFlags;
+            work->worklets.front()->output.buffers.clear();
+            work->worklets.front()->output.ordinal = mOrdinal;
+            work->workletsProcessed = 1u;
+            work->result = C2_OK;
+            if (mBuffer) {
+                work->worklets.front()->output.buffers.push_back(mBuffer);
+            }
+            ALOGV("timestamp = %lld, index = %lld, w/%s buffer",
+                  mOrdinal.timestamp.peekll(), mOrdinal.frameIndex.peekll(),
+                  mBuffer ? "" : "o");
+        }
+
+       private:
+        const uint32_t mFlags;
+        const C2WorkOrdinalStruct mOrdinal;
+        const std::shared_ptr<C2Buffer> mBuffer;
+    };
+
     auto fillWork = [buffer](const std::unique_ptr<C2Work> &work) {
         work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
         work->worklets.front()->output.buffers.clear();
@@ -696,7 +724,20 @@
         work->workletsProcessed = 1u;
     };
     if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
-        fillWork(work);
+        bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+        // TODO: Check if cloneAndSend can be avoided by tracking number of frames remaining
+        if (eos) {
+            if (buffer) {
+                mOutIndex = index;
+                C2WorkOrdinalStruct outOrdinal = work->input.ordinal;
+                cloneAndSend(
+                    mOutIndex, work,
+                    FillWork(C2FrameData::FLAG_INCOMPLETE, outOrdinal, buffer));
+                buffer.reset();
+            }
+        } else {
+            fillWork(work);
+        }
     } else {
         finish(index, fillWork);
     }
@@ -806,19 +847,20 @@
         TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
         ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
               s_decode_op.u4_num_bytes_consumed);
-        if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & 0xFF)) {
+        if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("allocation failure in decoder");
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
-        } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_decode_op.u4_error_code & 0xFF)) {
+        } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED ==
+                   (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("unsupported resolution : %dx%d", mWidth, mHeight);
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
-        } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & 0xFF)) {
+        } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
             ALOGV("resolution changed");
             drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
             resetDecoder();
@@ -828,6 +870,12 @@
             /* Decode header and get new dimensions */
             setParams(mStride, IVD_DECODE_HEADER);
             (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+        } else if (IS_IVD_FATAL_ERROR(s_decode_op.u4_error_code)) {
+            ALOGE("Fatal error in decoder 0x%x", s_decode_op.u4_error_code);
+            mSignalledError = true;
+            work->workletsProcessed = 1u;
+            work->result = C2_CORRUPTED;
+            return;
         }
         if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
             if (mHeaderDecoded == false) {
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.h b/media/codec2/components/hevc/C2SoftHevcDec.h
index 75111fc..b7664e6 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.h
+++ b/media/codec2/components/hevc/C2SoftHevcDec.h
@@ -19,6 +19,7 @@
 
 #include <media/stagefright/foundation/ColorUtils.h>
 
+#include <atomic>
 #include <SimpleC2Component.h>
 
 #include "ihevc_typedefs.h"
@@ -121,6 +122,7 @@
     bool mSignalledOutputEos;
     bool mSignalledError;
     bool mHeaderDecoded;
+    std::atomic_uint64_t mOutIndex;
 
     // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
     // converting them to C2 values for each frame
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index 402d9aa..0d3357f 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -38,48 +38,56 @@
 
 namespace android {
 
-class C2SoftHevcEnc::IntfImpl : public C2InterfaceHelper {
+namespace {
+
+constexpr char COMPONENT_NAME[] = "c2.android.hevc.encoder";
+
+} // namepsace
+
+class C2SoftHevcEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
   public:
-    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
-        : C2InterfaceHelper(helper) {
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_ENCODER,
+                C2Component::DOMAIN_VIDEO,
+                MEDIA_MIMETYPE_VIDEO_HEVC) {
+        noPrivateBuffers(); // TODO: account for our buffers here
+        noInputReferences();
+        noOutputReferences();
+        noTimeStretch();
         setDerivedInstance(this);
 
         addParameter(
-            DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(
-                    new C2StreamBufferTypeSetting::input(0u, C2BufferData::GRAPHIC))
+                DefineParam(mActualInputDelay, C2_PARAMKEY_INPUT_DELAY)
+                .withDefault(new C2PortActualDelayTuning::input(
+                    DEFAULT_B_FRAMES + DEFAULT_RC_LOOKAHEAD))
+                .withFields({C2F(mActualInputDelay, value).inRange(
+                    0, MAX_B_FRAMES + MAX_RC_LOOKAHEAD)})
+                .withSetter(
+                    Setter<decltype(*mActualInputDelay)>::StrictValueWithNoDeps)
                 .build());
 
         addParameter(
-            DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(
-                    new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
 
         addParameter(
-            DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-                    MEDIA_MIMETYPE_VIDEO_RAW))
+                DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
+                .withConstValue(new C2StreamUsageTuning::input(
+                        0u, (uint64_t)C2MemoryUsage::CPU_READ))
                 .build());
 
-        addParameter(
-            DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-                    MEDIA_MIMETYPE_VIDEO_HEVC))
-                .build());
-
-        addParameter(DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
-                         .withConstValue(new C2StreamUsageTuning::input(
-                             0u, (uint64_t)C2MemoryUsage::CPU_READ))
-                         .build());
-
         // matches size limits in codec library
         addParameter(
             DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
                 .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
                 .withFields({
-                    C2F(mSize, width).inRange(320, 1920, 2),
-                    C2F(mSize, height).inRange(128, 1088, 2),
+                    C2F(mSize, width).inRange(2, 1920, 2),
+                    C2F(mSize, height).inRange(2, 1088, 2),
                 })
                 .withSetter(SizeSetter)
                 .build());
@@ -332,10 +340,6 @@
     }
 
    private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
     std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
     std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
@@ -348,8 +352,6 @@
     std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
 };
 
-constexpr char COMPONENT_NAME[] = "c2.android.hevc.encoder";
-
 static size_t GetCPUCoreCount() {
     long cpuCoreCount = 0;
 
@@ -469,7 +471,8 @@
     mIvVideoColorFormat = IV_YUV_420P;
     mEncParams.s_multi_thrd_prms.i4_max_num_cores = mNumCores;
     mEncParams.s_out_strm_prms.i4_codec_profile = mHevcEncProfile;
-    mEncParams.s_lap_prms.i4_rc_look_ahead_pics = 0;
+    mEncParams.s_lap_prms.i4_rc_look_ahead_pics = DEFAULT_RC_LOOKAHEAD;
+    mEncParams.s_coding_tools_prms.i4_max_temporal_layers = DEFAULT_B_FRAMES;
 
     switch (mBitrateMode->value) {
         case C2Config::BITRATE_IGNORE:
@@ -519,10 +522,9 @@
 
 c2_status_t C2SoftHevcEnc::drain(uint32_t drainMode,
                                  const std::shared_ptr<C2BlockPool>& pool) {
-    (void)drainMode;
-    (void)pool;
-    return C2_OK;
+    return drainInternal(drainMode, pool, nullptr);
 }
+
 c2_status_t C2SoftHevcEnc::initEncoder() {
     CHECK(!mCodecCtx);
     {
@@ -559,7 +561,7 @@
 
 c2_status_t C2SoftHevcEnc::setEncodeArgs(ihevce_inp_buf_t* ps_encode_ip,
                                          const C2GraphicView* const input,
-                                         uint64_t timestamp) {
+                                         uint64_t workIndex) {
     ihevce_static_cfg_params_t* params = &mEncParams;
     memset(ps_encode_ip, 0, sizeof(*ps_encode_ip));
 
@@ -703,7 +705,92 @@
     ps_encode_ip->i4_curr_peak_bitrate =
         params->s_tgt_lyr_prms.as_tgt_params[0].ai4_peak_bitrate[0];
     ps_encode_ip->i4_curr_rate_factor = params->s_config_prms.i4_rate_factor;
-    ps_encode_ip->u8_pts = timestamp;
+    ps_encode_ip->u8_pts = workIndex;
+    return C2_OK;
+}
+
+void C2SoftHevcEnc::finishWork(uint64_t index,
+                               const std::unique_ptr<C2Work>& work,
+                               const std::shared_ptr<C2BlockPool>& pool,
+                               ihevce_out_buf_t* ps_encode_op) {
+    std::shared_ptr<C2LinearBlock> block;
+    C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+    c2_status_t status =
+        pool->fetchLinearBlock(ps_encode_op->i4_bytes_generated, usage, &block);
+    if (C2_OK != status) {
+        ALOGE("fetchLinearBlock for Output failed with status 0x%x", status);
+        mSignalledError = true;
+        work->result = status;
+        work->workletsProcessed = 1u;
+        return;
+    }
+    C2WriteView wView = block->map().get();
+    if (C2_OK != wView.error()) {
+        ALOGE("write view map failed with status 0x%x", wView.error());
+        mSignalledError = true;
+        work->result = wView.error();
+        work->workletsProcessed = 1u;
+        return;
+    }
+    memcpy(wView.data(), ps_encode_op->pu1_output_buf,
+           ps_encode_op->i4_bytes_generated);
+
+    std::shared_ptr<C2Buffer> buffer =
+        createLinearBuffer(block, 0, ps_encode_op->i4_bytes_generated);
+
+    DUMP_TO_FILE(mOutFile, ps_encode_op->pu1_output_buf,
+                 ps_encode_op->i4_bytes_generated);
+
+    if (ps_encode_op->i4_is_key_frame) {
+        ALOGV("IDR frame produced");
+        buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
+            0u /* stream id */, C2Config::SYNC_FRAME));
+    }
+
+    auto fillWork = [buffer](const std::unique_ptr<C2Work>& work) {
+        work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.buffers.push_back(buffer);
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->workletsProcessed = 1u;
+    };
+    if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+        fillWork(work);
+        if (mSignalledEos) {
+            work->worklets.front()->output.flags =
+                C2FrameData::FLAG_END_OF_STREAM;
+        }
+    } else {
+        finish(index, fillWork);
+    }
+}
+
+c2_status_t C2SoftHevcEnc::drainInternal(
+        uint32_t drainMode,
+        const std::shared_ptr<C2BlockPool> &pool,
+        const std::unique_ptr<C2Work> &work) {
+
+    if (drainMode == NO_DRAIN) {
+        ALOGW("drain with NO_DRAIN: no-op");
+        return C2_OK;
+    }
+    if (drainMode == DRAIN_CHAIN) {
+        ALOGW("DRAIN_CHAIN not supported");
+        return C2_OMITTED;
+    }
+
+    while (true) {
+        ihevce_out_buf_t s_encode_op{};
+        memset(&s_encode_op, 0, sizeof(s_encode_op));
+
+        ihevce_encode(mCodecCtx, nullptr, &s_encode_op);
+        if (s_encode_op.i4_bytes_generated) {
+            finishWork(s_encode_op.u8_pts, work, pool, &s_encode_op);
+        } else {
+            if (work->workletsProcessed != 1u) fillEmptyWork(work);
+            break;
+        }
+    }
     return C2_OK;
 }
 
@@ -711,7 +798,7 @@
                             const std::shared_ptr<C2BlockPool>& pool) {
     // Initialize output work
     work->result = C2_OK;
-    work->workletsProcessed = 1u;
+    work->workletsProcessed = 0u;
     work->worklets.front()->output.flags = work->input.flags;
 
     if (mSignalledError || mSignalledEos) {
@@ -728,6 +815,7 @@
             ALOGE("Failed to initialize encoder : 0x%x", status);
             mSignalledError = true;
             work->result = status;
+            work->workletsProcessed = 1u;
             return;
         }
     }
@@ -735,6 +823,8 @@
     std::shared_ptr<const C2GraphicView> view;
     std::shared_ptr<C2Buffer> inputBuffer = nullptr;
     bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+    if (eos) mSignalledEos = true;
+
     if (!work->input.buffers.empty()) {
         inputBuffer = work->input.buffers[0];
         view = std::make_shared<const C2GraphicView>(
@@ -743,13 +833,12 @@
             ALOGE("graphic view map err = %d", view->error());
             mSignalledError = true;
             work->result = C2_CORRUPTED;
+            work->workletsProcessed = 1u;
             return;
         }
     }
-
     IHEVCE_PLUGIN_STATUS_T err = IHEVCE_EOK;
 
-    fillEmptyWork(work);
     if (!mSpsPpsHeaderReceived) {
         ihevce_out_buf_t s_header_op{};
         err = ihevce_encode_header(mCodecCtx, &s_header_op);
@@ -761,6 +850,7 @@
                 ALOGE("CSD allocation failed");
                 mSignalledError = true;
                 work->result = C2_NO_MEMORY;
+                work->workletsProcessed = 1u;
                 return;
             }
             memcpy(csd->m.value, s_header_op.pu1_output_buf,
@@ -771,34 +861,40 @@
             mSpsPpsHeaderReceived = true;
         }
         if (!inputBuffer) {
+            work->workletsProcessed = 1u;
             return;
         }
     }
     ihevce_inp_buf_t s_encode_ip{};
     ihevce_out_buf_t s_encode_op{};
-    uint64_t timestamp = work->input.ordinal.timestamp.peekull();
+    uint64_t workIndex = work->input.ordinal.frameIndex.peekull();
 
-    status = setEncodeArgs(&s_encode_ip, view.get(), timestamp);
+    status = setEncodeArgs(&s_encode_ip, view.get(), workIndex);
     if (C2_OK != status) {
         ALOGE("setEncodeArgs failed : 0x%x", status);
         mSignalledError = true;
         work->result = status;
+        work->workletsProcessed = 1u;
         return;
     }
 
     uint64_t timeDelay = 0;
     uint64_t timeTaken = 0;
+    memset(&s_encode_op, 0, sizeof(s_encode_op));
     GETTIME(&mTimeStart, nullptr);
     TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
 
-    ihevce_inp_buf_t* ps_encode_ip = (inputBuffer) ? &s_encode_ip : nullptr;
-
-    err = ihevce_encode(mCodecCtx, ps_encode_ip, &s_encode_op);
-    if (IHEVCE_EOK != err) {
-        ALOGE("Encode Frame failed : 0x%x", err);
-        mSignalledError = true;
-        work->result = C2_CORRUPTED;
-        return;
+    if (inputBuffer) {
+        err = ihevce_encode(mCodecCtx, &s_encode_ip, &s_encode_op);
+        if (IHEVCE_EOK != err) {
+            ALOGE("Encode Frame failed : 0x%x", err);
+            mSignalledError = true;
+            work->result = C2_CORRUPTED;
+            work->workletsProcessed = 1u;
+            return;
+        }
+    } else if (!eos) {
+        fillEmptyWork(work);
     }
 
     GETTIME(&mTimeEnd, nullptr);
@@ -809,42 +905,11 @@
           (int)timeDelay, s_encode_op.i4_bytes_generated);
 
     if (s_encode_op.i4_bytes_generated) {
-        std::shared_ptr<C2LinearBlock> block;
-        C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
-        status = pool->fetchLinearBlock(s_encode_op.i4_bytes_generated, usage, &block);
-        if (C2_OK != status) {
-            ALOGE("fetchLinearBlock for Output failed with status 0x%x", status);
-            work->result = C2_NO_MEMORY;
-            mSignalledError = true;
-            return;
-        }
-        C2WriteView wView = block->map().get();
-        if (C2_OK != wView.error()) {
-            ALOGE("write view map failed with status 0x%x", wView.error());
-            work->result = wView.error();
-            mSignalledError = true;
-            return;
-        }
-        memcpy(wView.data(), s_encode_op.pu1_output_buf,
-               s_encode_op.i4_bytes_generated);
-
-        std::shared_ptr<C2Buffer> buffer =
-            createLinearBuffer(block, 0, s_encode_op.i4_bytes_generated);
-
-        DUMP_TO_FILE(mOutFile, s_encode_op.pu1_output_buf,
-                     s_encode_op.i4_bytes_generated);
-
-        work->worklets.front()->output.ordinal.timestamp = s_encode_op.u8_pts;
-        if (s_encode_op.i4_is_key_frame) {
-            ALOGV("IDR frame produced");
-            buffer->setInfo(
-                std::make_shared<C2StreamPictureTypeMaskInfo::output>(
-                    0u /* stream id */, C2Config::SYNC_FRAME));
-        }
-        work->worklets.front()->output.buffers.push_back(buffer);
+        finishWork(s_encode_op.u8_pts, work, pool, &s_encode_op);
     }
+
     if (eos) {
-        mSignalledEos = true;
+        drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
     }
 }
 
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.h b/media/codec2/components/hevc/C2SoftHevcEnc.h
index 8569a3e..f2c7642 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.h
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.h
@@ -35,7 +35,12 @@
     diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
            ((end).tv_usec - (start).tv_usec);
 
-#define CODEC_MAX_CORES 4
+#define CODEC_MAX_CORES  4
+#define MAX_B_FRAMES     1
+#define MAX_RC_LOOKAHEAD 1
+
+#define DEFAULT_B_FRAMES     0
+#define DEFAULT_RC_LOOKAHEAD 0
 
 struct C2SoftHevcEnc : public SimpleC2Component {
     class IntfImpl;
@@ -95,10 +100,15 @@
     c2_status_t releaseEncoder();
     c2_status_t setEncodeArgs(ihevce_inp_buf_t* ps_encode_ip,
                               const C2GraphicView* const input,
-                              uint64_t timestamp);
+                              uint64_t workIndex);
+    void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                    const std::shared_ptr<C2BlockPool>& pool,
+                    ihevce_out_buf_t* ps_encode_op);
+    c2_status_t drainInternal(uint32_t drainMode,
+                              const std::shared_ptr<C2BlockPool>& pool,
+                              const std::unique_ptr<C2Work>& work);
     C2_DO_NOT_COPY(C2SoftHevcEnc);
 };
-
 #ifdef FILE_DUMP_ENABLE
 
 #define INPUT_DUMP_PATH "/data/local/tmp/hevc"
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.cpp b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
index 9db6d8f..5ba7e3d 100644
--- a/media/codec2/components/mp3/C2SoftMp3Dec.cpp
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
@@ -30,35 +30,32 @@
 
 namespace android {
 
+namespace {
+
 constexpr char COMPONENT_NAME[] = "c2.android.mp3.decoder";
 
-class C2SoftMP3::IntfImpl : public C2InterfaceHelper {
+}  // namespace
+
+class C2SoftMP3::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
     explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
-        : C2InterfaceHelper(helper) {
-
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_DECODER,
+                C2Component::DOMAIN_AUDIO,
+                MEDIA_MIMETYPE_AUDIO_MPEG) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
         setDerivedInstance(this);
 
         addParameter(
-                DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-                        MEDIA_MIMETYPE_AUDIO_MPEG))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-                        MEDIA_MIMETYPE_AUDIO_RAW))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
 
         addParameter(
@@ -90,10 +87,6 @@
     }
 
 private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
     std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 290677e..df7b403 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -315,7 +315,8 @@
         mOutBufferDrain(nullptr),
         mIvColorformat(IV_YUV_420P),
         mWidth(320),
-        mHeight(240) {
+        mHeight(240),
+        mOutIndex(0u) {
     // If input dump is enabled, then open create an empty file
     GENERATE_FILE_NAMES();
     CREATE_DUMP_FILE(mInFile);
@@ -766,6 +767,33 @@
         buffer->setInfo(mIntf->getColorAspects_l());
     }
 
+    class FillWork {
+       public:
+        FillWork(uint32_t flags, C2WorkOrdinalStruct ordinal,
+                 const std::shared_ptr<C2Buffer>& buffer)
+            : mFlags(flags), mOrdinal(ordinal), mBuffer(buffer) {}
+        ~FillWork() = default;
+
+        void operator()(const std::unique_ptr<C2Work>& work) {
+            work->worklets.front()->output.flags = (C2FrameData::flags_t)mFlags;
+            work->worklets.front()->output.buffers.clear();
+            work->worklets.front()->output.ordinal = mOrdinal;
+            work->workletsProcessed = 1u;
+            work->result = C2_OK;
+            if (mBuffer) {
+                work->worklets.front()->output.buffers.push_back(mBuffer);
+            }
+            ALOGV("timestamp = %lld, index = %lld, w/%s buffer",
+                  mOrdinal.timestamp.peekll(), mOrdinal.frameIndex.peekll(),
+                  mBuffer ? "" : "o");
+        }
+
+       private:
+        const uint32_t mFlags;
+        const C2WorkOrdinalStruct mOrdinal;
+        const std::shared_ptr<C2Buffer> mBuffer;
+    };
+
     auto fillWork = [buffer](const std::unique_ptr<C2Work> &work) {
         work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
         work->worklets.front()->output.buffers.clear();
@@ -774,7 +802,20 @@
         work->workletsProcessed = 1u;
     };
     if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
-        fillWork(work);
+        bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+        // TODO: Check if cloneAndSend can be avoided by tracking number of frames remaining
+        if (eos) {
+            if (buffer) {
+                mOutIndex = index;
+                C2WorkOrdinalStruct outOrdinal = work->input.ordinal;
+                cloneAndSend(
+                    mOutIndex, work,
+                    FillWork(C2FrameData::FLAG_INCOMPLETE, outOrdinal, buffer));
+                buffer.reset();
+            }
+        } else {
+            fillWork(work);
+        }
     } else {
         finish(index, fillWork);
     }
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
index 9999872..65d3b87 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_C2_SOFT_MPEG2_DEC_H_
 #define ANDROID_C2_SOFT_MPEG2_DEC_H_
 
+#include <atomic>
 #include <SimpleC2Component.h>
 
 #include <media/stagefright/foundation/ColorUtils.h>
@@ -161,6 +162,7 @@
     uint32_t mStride;
     bool mSignalledOutputEos;
     bool mSignalledError;
+    std::atomic_uint64_t mOutIndex;
 
     // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
     // converting them to C2 values for each frame
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 3d4a733..7e6685e 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -466,9 +466,11 @@
 
 /* TODO: can remove temporary copy after library supports writing to display
  * buffer Y, U and V plane pointers using stride info. */
-static void copyOutputBufferToYV12Frame(uint8_t *dst, uint8_t *src, size_t dstYStride,
-                                        size_t srcYStride, uint32_t width, uint32_t height) {
-    size_t dstUVStride = align(dstYStride / 2, 16);
+static void copyOutputBufferToYuvPlanarFrame(
+        uint8_t *dst, uint8_t *src,
+        size_t dstYStride, size_t dstUVStride,
+        size_t srcYStride, uint32_t width,
+        uint32_t height) {
     size_t srcUVStride = srcYStride / 2;
     uint8_t *srcStart = src;
     uint8_t *dstStart = dst;
@@ -673,8 +675,14 @@
         }
 
         uint8_t *outputBufferY = wView.data()[C2PlanarLayout::PLANE_Y];
-        (void)copyOutputBufferToYV12Frame(outputBufferY, mOutputBuffer[mNumSamplesOutput & 1],
-                                          wView.width(), align(mWidth, 16), mWidth, mHeight);
+        C2PlanarLayout layout = wView.layout();
+        size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+        size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+        (void)copyOutputBufferToYuvPlanarFrame(
+                outputBufferY,
+                mOutputBuffer[mNumSamplesOutput & 1],
+                dstYStride, dstUVStride,
+                align(mWidth, 16), mWidth, mHeight);
 
         inPos += inSize - (size_t)tmpInSize;
         finishWork(workIndex, work);
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index 89fa59d..36053f6 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -39,53 +39,47 @@
 
 namespace android {
 
+namespace {
+
 #ifdef MPEG4
 constexpr char COMPONENT_NAME[] = "c2.android.mpeg4.encoder";
+const char *MEDIA_MIMETYPE_VIDEO = MEDIA_MIMETYPE_VIDEO_MPEG4;
 #else
 constexpr char COMPONENT_NAME[] = "c2.android.h263.encoder";
+const char *MEDIA_MIMETYPE_VIDEO = MEDIA_MIMETYPE_VIDEO_H263;
 #endif
 
-class C2SoftMpeg4Enc::IntfImpl : public C2InterfaceHelper {
+} // namepsace
+
+class C2SoftMpeg4Enc::IntfImpl : public SimpleInterface<void>::BaseParams {
    public:
-    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
-        : C2InterfaceHelper(helper) {
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_ENCODER,
+                C2Component::DOMAIN_VIDEO,
+                MEDIA_MIMETYPE_VIDEO) {
+        noPrivateBuffers(); // TODO: account for our buffers here
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
         setDerivedInstance(this);
 
         addParameter(
-            DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(
-                    new C2StreamBufferTypeSetting::input(0u, C2BufferData::GRAPHIC))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
 
         addParameter(
-            DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(
-                    new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
+                DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
+                .withConstValue(new C2StreamUsageTuning::input(
+                        0u, (uint64_t)C2MemoryUsage::CPU_READ))
                 .build());
 
         addParameter(
-            DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-                    MEDIA_MIMETYPE_VIDEO_RAW))
-                .build());
-
-        addParameter(
-            DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-#ifdef MPEG4
-                    MEDIA_MIMETYPE_VIDEO_MPEG4
-#else
-                    MEDIA_MIMETYPE_VIDEO_H263
-#endif
-                    ))
-                .build());
-
-        addParameter(DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
-                         .withConstValue(new C2StreamUsageTuning::input(
-                             0u, (uint64_t)C2MemoryUsage::CPU_READ))
-                         .build());
-
-        addParameter(
             DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
                 .withDefault(new C2StreamPictureSizeInfo::input(0u, 176, 144))
                 .withFields({
@@ -217,10 +211,6 @@
     }
 
    private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
     std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
     std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
diff --git a/media/codec2/components/opus/C2SoftOpusDec.cpp b/media/codec2/components/opus/C2SoftOpusDec.cpp
index 7dcd53d..6b6974f 100644
--- a/media/codec2/components/opus/C2SoftOpusDec.cpp
+++ b/media/codec2/components/opus/C2SoftOpusDec.cpp
@@ -31,34 +31,32 @@
 
 namespace android {
 
+namespace {
+
 constexpr char COMPONENT_NAME[] = "c2.android.opus.decoder";
 
-class C2SoftOpusDec::IntfImpl : public C2InterfaceHelper {
-   public:
-    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
-        : C2InterfaceHelper(helper) {
+}  // namespace
+
+class C2SoftOpusDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_DECODER,
+                C2Component::DOMAIN_AUDIO,
+                MEDIA_MIMETYPE_AUDIO_OPUS) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
         setDerivedInstance(this);
 
         addParameter(
-                DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-                        MEDIA_MIMETYPE_AUDIO_OPUS))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-                        MEDIA_MIMETYPE_AUDIO_RAW))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
 
         addParameter(
@@ -88,11 +86,7 @@
                 .build());
     }
 
-   private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
+private:
     std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
index f1020c3..7b58c9b 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.cpp
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -32,35 +32,32 @@
 #define DEFAULT_FRAME_DURATION_MS 20
 namespace android {
 
+namespace {
+
 constexpr char COMPONENT_NAME[] = "c2.android.opus.encoder";
 
-class C2SoftOpusEnc::IntfImpl : public C2InterfaceHelper {
+}  // namespace
+
+class C2SoftOpusEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
     explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
-        : C2InterfaceHelper(helper) {
-
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_ENCODER,
+                C2Component::DOMAIN_AUDIO,
+                MEDIA_MIMETYPE_AUDIO_OPUS) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
         setDerivedInstance(this);
 
         addParameter(
-                DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-                        MEDIA_MIMETYPE_AUDIO_RAW))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-                        MEDIA_MIMETYPE_AUDIO_OPUS))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
 
         addParameter(
@@ -104,10 +101,6 @@
     uint32_t getComplexity() const { return mComplexity->value; }
 
 private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
     std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
diff --git a/media/codec2/components/raw/C2SoftRawDec.cpp b/media/codec2/components/raw/C2SoftRawDec.cpp
index 802caa4..8e94a18 100644
--- a/media/codec2/components/raw/C2SoftRawDec.cpp
+++ b/media/codec2/components/raw/C2SoftRawDec.cpp
@@ -27,35 +27,32 @@
 
 namespace android {
 
+namespace {
+
 constexpr char COMPONENT_NAME[] = "c2.android.raw.decoder";
 
-class C2SoftRawDec::IntfImpl : public C2InterfaceHelper {
+}  // namespace
+
+class C2SoftRawDec::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
     explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
-        : C2InterfaceHelper(helper) {
-
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_DECODER,
+                C2Component::DOMAIN_AUDIO,
+                MEDIA_MIMETYPE_AUDIO_RAW) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
         setDerivedInstance(this);
 
         addParameter(
-                DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-                        MEDIA_MIMETYPE_AUDIO_RAW))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-                        MEDIA_MIMETYPE_AUDIO_RAW))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
 
         addParameter(
@@ -98,10 +95,6 @@
     }
 
 private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
     std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
diff --git a/media/codec2/components/vorbis/C2SoftVorbisDec.cpp b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
index e3bafd3..18e6db2 100644
--- a/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
+++ b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
@@ -35,35 +35,32 @@
 
 namespace android {
 
+namespace {
+
 constexpr char COMPONENT_NAME[] = "c2.android.vorbis.decoder";
 
-class C2SoftVorbisDec::IntfImpl : public C2InterfaceHelper {
+}  // namespace
+
+class C2SoftVorbisDec::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
     explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
-        : C2InterfaceHelper(helper) {
-
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_DECODER,
+                C2Component::DOMAIN_AUDIO,
+                MEDIA_MIMETYPE_AUDIO_VORBIS) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
         setDerivedInstance(this);
 
         addParameter(
-                DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-                        MEDIA_MIMETYPE_AUDIO_VORBIS))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-                        MEDIA_MIMETYPE_AUDIO_RAW))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
 
         addParameter(
@@ -94,10 +91,6 @@
     }
 
 private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
     std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
diff --git a/media/codec2/components/vpx/Android.bp b/media/codec2/components/vpx/Android.bp
index abfd379..34f5753 100644
--- a/media/codec2/components/vpx/Android.bp
+++ b/media/codec2/components/vpx/Android.bp
@@ -7,7 +7,7 @@
 
     srcs: ["C2SoftVpxDec.cpp"],
 
-    static_libs: ["libvpx"],
+    shared_libs: ["libvpx"],
 
     cflags: [
         "-DVP9",
@@ -23,7 +23,7 @@
 
     srcs: ["C2SoftVpxDec.cpp"],
 
-    static_libs: ["libvpx"],
+    shared_libs: ["libvpx"],
 }
 
 cc_library_shared {
@@ -38,7 +38,7 @@
         "C2SoftVpxEnc.cpp",
     ],
 
-    static_libs: ["libvpx"],
+    shared_libs: ["libvpx"],
 
     cflags: ["-DVP9"],
 }
@@ -55,6 +55,6 @@
         "C2SoftVpxEnc.cpp",
     ],
 
-    static_libs: ["libvpx"],
+    shared_libs: ["libvpx"],
 }
 
diff --git a/media/codec2/components/vpx/C2SoftVp8Enc.cpp b/media/codec2/components/vpx/C2SoftVp8Enc.cpp
index 0ae717a..f18f5d0 100644
--- a/media/codec2/components/vpx/C2SoftVp8Enc.cpp
+++ b/media/codec2/components/vpx/C2SoftVp8Enc.cpp
@@ -23,8 +23,6 @@
 
 namespace android {
 
-constexpr char COMPONENT_NAME[] = "c2.android.vp8.encoder";
-
 C2SoftVp8Enc::C2SoftVp8Enc(const char* name, c2_node_id_t id,
                            const std::shared_ptr<IntfImpl>& intfImpl)
     : C2SoftVpxEnc(name, id, intfImpl), mDCTPartitions(0), mProfile(1) {}
diff --git a/media/codec2/components/vpx/C2SoftVp9Enc.cpp b/media/codec2/components/vpx/C2SoftVp9Enc.cpp
index b26170f..740dbda 100644
--- a/media/codec2/components/vpx/C2SoftVp9Enc.cpp
+++ b/media/codec2/components/vpx/C2SoftVp9Enc.cpp
@@ -23,8 +23,6 @@
 
 namespace android {
 
-constexpr char COMPONENT_NAME[] = "c2.android.vp9.encoder";
-
 C2SoftVp9Enc::C2SoftVp9Enc(const char* name, c2_node_id_t id,
                            const std::shared_ptr<IntfImpl>& intfImpl)
     : C2SoftVpxEnc(name, id, intfImpl),
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 3120f7a..a52ca15 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -18,6 +18,8 @@
 #define LOG_TAG "C2SoftVpxDec"
 #include <log/log.h>
 
+#include <algorithm>
+
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/MediaDefs.h>
 
@@ -303,13 +305,43 @@
 #endif
 };
 
+C2SoftVpxDec::ConverterThread::ConverterThread(
+        const std::shared_ptr<Mutexed<ConversionQueue>> &queue)
+    : Thread(false), mQueue(queue) {}
+
+bool C2SoftVpxDec::ConverterThread::threadLoop() {
+    Mutexed<ConversionQueue>::Locked queue(*mQueue);
+    if (queue->entries.empty()) {
+        queue.waitForCondition(queue->cond);
+        if (queue->entries.empty()) {
+            return true;
+        }
+    }
+    std::function<void()> convert = queue->entries.front();
+    queue->entries.pop_front();
+    if (!queue->entries.empty()) {
+        queue->cond.signal();
+    }
+    queue.unlock();
+
+    convert();
+
+    queue.lock();
+    if (--queue->numPending == 0u) {
+        queue->cond.broadcast();
+    }
+    return true;
+}
+
 C2SoftVpxDec::C2SoftVpxDec(
         const char *name,
         c2_node_id_t id,
         const std::shared_ptr<IntfImpl> &intfImpl)
     : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
       mIntf(intfImpl),
-      mCodecCtx(nullptr) {
+      mCodecCtx(nullptr),
+      mCoreCount(1),
+      mQueue(new Mutexed<ConversionQueue>) {
 }
 
 C2SoftVpxDec::~C2SoftVpxDec() {
@@ -399,7 +431,7 @@
 
     vpx_codec_dec_cfg_t cfg;
     memset(&cfg, 0, sizeof(vpx_codec_dec_cfg_t));
-    cfg.threads = GetCPUCoreCount();
+    cfg.threads = mCoreCount = GetCPUCoreCount();
 
     vpx_codec_flags_t flags;
     memset(&flags, 0, sizeof(vpx_codec_flags_t));
@@ -413,6 +445,18 @@
         return UNKNOWN_ERROR;
     }
 
+    if (mMode == MODE_VP9) {
+        using namespace std::string_literals;
+        for (int i = 0; i < mCoreCount; ++i) {
+            sp<ConverterThread> thread(new ConverterThread(mQueue));
+            mConverterThreads.push_back(thread);
+            if (thread->run(("vp9conv #"s + std::to_string(i)).c_str(),
+                            ANDROID_PRIORITY_AUDIO) != OK) {
+                return UNKNOWN_ERROR;
+            }
+        }
+    }
+
     return OK;
 }
 
@@ -422,6 +466,21 @@
         delete mCodecCtx;
         mCodecCtx = nullptr;
     }
+    bool running = true;
+    for (const sp<ConverterThread> &thread : mConverterThreads) {
+        thread->requestExit();
+    }
+    while (running) {
+        mQueue->lock()->cond.broadcast();
+        running = false;
+        for (const sp<ConverterThread> &thread : mConverterThreads) {
+            if (thread->isRunning()) {
+                running = true;
+                break;
+            }
+        }
+    }
+    mConverterThreads.clear();
 
     return OK;
 }
@@ -559,12 +618,11 @@
     }
 }
 
-static void copyOutputBufferToYV12Frame(uint8_t *dst,
-        const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+static void copyOutputBufferToYuvPlanarFrame(
+        uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
         size_t srcYStride, size_t srcUStride, size_t srcVStride,
+        size_t dstYStride, size_t dstUVStride,
         uint32_t width, uint32_t height) {
-    size_t dstYStride = align(width, 16);
-    size_t dstUVStride = align(dstYStride / 2, 16);
     uint8_t *dstStart = dst;
 
     for (size_t i = 0; i < height; ++i) {
@@ -654,11 +712,10 @@
 static void convertYUV420Planar16ToYUV420Planar(uint8_t *dst,
         const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
         size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstStride, size_t width, size_t height) {
+        size_t dstYStride, size_t dstUVStride, size_t width, size_t height) {
 
     uint8_t *dstY = (uint8_t *)dst;
-    size_t dstYSize = dstStride * height;
-    size_t dstUVStride = align(dstStride / 2, 16);
+    size_t dstYSize = dstYStride * height;
     size_t dstUVSize = dstUVStride * height / 2;
     uint8_t *dstV = dstY + dstYSize;
     uint8_t *dstU = dstV + dstUVSize;
@@ -669,7 +726,7 @@
         }
 
         srcY += srcYStride;
-        dstY += dstStride;
+        dstY += dstYStride;
     }
 
     for (size_t y = 0; y < (height + 1) / 2; ++y) {
@@ -751,6 +808,9 @@
     size_t srcYStride = img->stride[VPX_PLANE_Y];
     size_t srcUStride = img->stride[VPX_PLANE_U];
     size_t srcVStride = img->stride[VPX_PLANE_V];
+    C2PlanarLayout layout = wView.layout();
+    size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+    size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
 
     if (img->fmt == VPX_IMG_FMT_I42016) {
         const uint16_t *srcY = (const uint16_t *)img->planes[VPX_PLANE_Y];
@@ -758,22 +818,45 @@
         const uint16_t *srcV = (const uint16_t *)img->planes[VPX_PLANE_V];
 
         if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
-            convertYUV420Planar16ToY410((uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
-                                    srcUStride / 2, srcVStride / 2,
-                                    align(mWidth, 16),
-                                    mWidth, mHeight);
+            Mutexed<ConversionQueue>::Locked queue(*mQueue);
+            size_t i = 0;
+            constexpr size_t kHeight = 64;
+            for (; i < mHeight; i += kHeight) {
+                queue->entries.push_back(
+                        [dst, srcY, srcU, srcV,
+                         srcYStride, srcUStride, srcVStride, dstYStride,
+                         width = mWidth, height = std::min(mHeight - i, kHeight)] {
+                            convertYUV420Planar16ToY410(
+                                    (uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
+                                    srcUStride / 2, srcVStride / 2, dstYStride / sizeof(uint32_t),
+                                    width, height);
+                        });
+                srcY += srcYStride / 2 * kHeight;
+                srcU += srcUStride / 2 * (kHeight / 2);
+                srcV += srcVStride / 2 * (kHeight / 2);
+                dst += dstYStride * kHeight;
+            }
+            CHECK_EQ(0u, queue->numPending);
+            queue->numPending = queue->entries.size();
+            while (queue->numPending > 0) {
+                queue->cond.signal();
+                queue.waitForCondition(queue->cond);
+            }
         } else {
             convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
-                                    srcUStride / 2, srcVStride / 2,
-                                    align(mWidth, 16),
-                                    mWidth, mHeight);
+                                                srcUStride / 2, srcVStride / 2,
+                                                dstYStride, dstUVStride,
+                                                mWidth, mHeight);
         }
     } else {
         const uint8_t *srcY = (const uint8_t *)img->planes[VPX_PLANE_Y];
         const uint8_t *srcU = (const uint8_t *)img->planes[VPX_PLANE_U];
         const uint8_t *srcV = (const uint8_t *)img->planes[VPX_PLANE_V];
-        copyOutputBufferToYV12Frame(dst, srcY, srcU, srcV,
-                                srcYStride, srcUStride, srcVStride, mWidth, mHeight);
+        copyOutputBufferToYuvPlanarFrame(
+                dst, srcY, srcU, srcV,
+                srcYStride, srcUStride, srcVStride,
+                dstYStride, dstUVStride,
+                mWidth, mHeight);
     }
     finishWork(*(int64_t *)img->user_priv, work, std::move(block));
     return true;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
index 60c8484..e51bcee 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.h
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -50,6 +50,19 @@
         MODE_VP9,
     } mMode;
 
+    struct ConversionQueue;
+
+    class ConverterThread : public Thread {
+    public:
+        explicit ConverterThread(
+                const std::shared_ptr<Mutexed<ConversionQueue>> &queue);
+        ~ConverterThread() override = default;
+        bool threadLoop() override;
+
+    private:
+        std::shared_ptr<Mutexed<ConversionQueue>> mQueue;
+    };
+
     std::shared_ptr<IntfImpl> mIntf;
     vpx_codec_ctx_t *mCodecCtx;
     bool mFrameParallelMode;  // Frame parallel is only supported by VP9 decoder.
@@ -59,6 +72,15 @@
     bool mSignalledOutputEos;
     bool mSignalledError;
 
+    int mCoreCount;
+    struct ConversionQueue {
+        std::list<std::function<void()>> entries;
+        Condition cond;
+        size_t numPending{0u};
+    };
+    std::shared_ptr<Mutexed<ConversionQueue>> mQueue;
+    std::vector<sp<ConverterThread>> mConverterThreads;
+
     status_t initDecoder();
     status_t destroyDecoder();
     void finishWork(uint64_t index, const std::unique_ptr<C2Work> &work,
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index 5591a49..90758f9 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -222,47 +222,47 @@
      C2_DO_NOT_COPY(C2SoftVpxEnc);
 };
 
-class C2SoftVpxEnc::IntfImpl : public C2InterfaceHelper {
+namespace {
+
+#ifdef VP9
+constexpr char COMPONENT_NAME[] = "c2.android.vp9.encoder";
+const char *MEDIA_MIMETYPE_VIDEO = MEDIA_MIMETYPE_VIDEO_VP9;
+#else
+constexpr char COMPONENT_NAME[] = "c2.android.vp8.encoder";
+const char *MEDIA_MIMETYPE_VIDEO = MEDIA_MIMETYPE_VIDEO_VP8;
+#endif
+
+} // namepsace
+
+class C2SoftVpxEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
    public:
-    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
-        : C2InterfaceHelper(helper) {
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_ENCODER,
+                C2Component::DOMAIN_VIDEO,
+                MEDIA_MIMETYPE_VIDEO) {
+        noPrivateBuffers(); // TODO: account for our buffers here
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
         setDerivedInstance(this);
 
         addParameter(
-            DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(
-                    new C2StreamBufferTypeSetting::input(0u, C2BufferData::GRAPHIC))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
 
         addParameter(
-            DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(
-                    new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
+                DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
+                .withConstValue(new C2StreamUsageTuning::input(
+                        0u, (uint64_t)C2MemoryUsage::CPU_READ))
                 .build());
 
         addParameter(
-            DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-                    MEDIA_MIMETYPE_VIDEO_RAW))
-                .build());
-
-        addParameter(
-            DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-#ifdef VP9
-                    MEDIA_MIMETYPE_VIDEO_VP9
-#else
-                    MEDIA_MIMETYPE_VIDEO_VP8
-#endif
-                    ))
-                .build());
-
-        addParameter(DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
-                         .withConstValue(new C2StreamUsageTuning::input(
-                             0u, (uint64_t)C2MemoryUsage::CPU_READ))
-                         .build());
-
-        addParameter(
             DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
                 .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
                 .withFields({
@@ -416,10 +416,6 @@
     }
 
    private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
     std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
     std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
diff --git a/media/codec2/components/xaac/C2SoftXaacDec.cpp b/media/codec2/components/xaac/C2SoftXaacDec.cpp
index ed730c3..a3ebadb 100644
--- a/media/codec2/components/xaac/C2SoftXaacDec.cpp
+++ b/media/codec2/components/xaac/C2SoftXaacDec.cpp
@@ -56,35 +56,31 @@
 
 namespace android {
 
+namespace {
+
 constexpr char COMPONENT_NAME[] = "c2.android.xaac.decoder";
 
-class C2SoftXaacDec::IntfImpl : public C2InterfaceHelper {
+}  // namespace
+
+class C2SoftXaacDec::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
     explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
-        : C2InterfaceHelper(helper) {
-
-        setDerivedInstance(this);
+        : SimpleInterface<void>::BaseParams(
+                helper,
+                COMPONENT_NAME,
+                C2Component::KIND_DECODER,
+                C2Component::DOMAIN_AUDIO,
+                MEDIA_MIMETYPE_AUDIO_AAC) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
 
         addParameter(
-                DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
-                .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
-                .build());
-
-        addParameter(
-                DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
-                        MEDIA_MIMETYPE_AUDIO_AAC))
-                .build());
-
-        addParameter(
-                DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
-                .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
-                        MEDIA_MIMETYPE_AUDIO_RAW))
+                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                .withConstValue(new C2ComponentAttributesSetting(
+                    C2Component::ATTRIB_IS_TEMPORAL))
                 .build());
 
         addParameter(
@@ -218,10 +214,6 @@
     int32_t getDrcEffectType() const { return mDrcEffectType->value; }
 
 private:
-    std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
-    std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
-    std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
-    std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
     std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
diff --git a/media/codec2/core/include/media/stagefright/codec2/1.0/InputSurface.h b/media/codec2/core/include/media/stagefright/codec2/1.0/InputSurface.h
index b011a06..0a82a68 100644
--- a/media/codec2/core/include/media/stagefright/codec2/1.0/InputSurface.h
+++ b/media/codec2/core/include/media/stagefright/codec2/1.0/InputSurface.h
@@ -20,7 +20,6 @@
 #include <memory>
 
 #include <C2Component.h>
-#include <media/stagefright/bqhelper/WGraphicBufferProducer.h>
 #include <media/stagefright/codec2/1.0/InputSurfaceConnection.h>
 
 namespace android {
diff --git a/media/codec2/core/include/media/stagefright/codec2/1.0/InputSurfaceConnection.h b/media/codec2/core/include/media/stagefright/codec2/1.0/InputSurfaceConnection.h
index b24a416..5eae3af 100644
--- a/media/codec2/core/include/media/stagefright/codec2/1.0/InputSurfaceConnection.h
+++ b/media/codec2/core/include/media/stagefright/codec2/1.0/InputSurfaceConnection.h
@@ -21,7 +21,6 @@
 
 #include <C2Component.h>
 #include <media/stagefright/bqhelper/GraphicBufferSource.h>
-#include <media/stagefright/bqhelper/WGraphicBufferProducer.h>
 #include <media/stagefright/codec2/1.0/InputSurfaceConnection.h>
 
 namespace android {
diff --git a/media/codec2/hidl/1.0/utils/Android.bp b/media/codec2/hidl/1.0/utils/Android.bp
index b73f0c8..63fe36b 100644
--- a/media/codec2/hidl/1.0/utils/Android.bp
+++ b/media/codec2/hidl/1.0/utils/Android.bp
@@ -48,9 +48,6 @@
 cc_library {
     name: "libcodec2_hidl@1.0",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
 
     defaults: ["hidl_defaults"],
 
diff --git a/media/codec2/hidl/1.0/utils/ComponentStore.cpp b/media/codec2/hidl/1.0/utils/ComponentStore.cpp
index d6f84b4..1e0a190 100644
--- a/media/codec2/hidl/1.0/utils/ComponentStore.cpp
+++ b/media/codec2/hidl/1.0/utils/ComponentStore.cpp
@@ -23,7 +23,6 @@
 #include <codec2/hidl/1.0/types.h>
 
 #include <android-base/file.h>
-#include <gui/bufferqueue/2.0/B2HGraphicBufferProducer.h>
 #include <media/stagefright/bqhelper/GraphicBufferSource.h>
 #include <utils/Errors.h>
 
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurface.h b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurface.h
index 34ea959..29ed7ff 100644
--- a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurface.h
+++ b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurface.h
@@ -22,7 +22,6 @@
 #include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
 #include <android/hardware/media/c2/1.0/IInputSink.h>
 #include <android/hardware/media/c2/1.0/IInputSurface.h>
-#include <gui/IGraphicBufferProducer.h>
 #include <hidl/Status.h>
 #include <media/stagefright/bqhelper/GraphicBufferSource.h>
 
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHidlC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHidlC2V1_0TargetAudioDecTest.cpp
index 89947d4..6469735 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHidlC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHidlC2V1_0TargetAudioDecTest.cpp
@@ -117,17 +117,10 @@
         const size_t kNumStringToName =
             sizeof(kStringToName) / sizeof(kStringToName[0]);
 
-        std::string substring;
-        std::string comp;
-        substring = std::string(gEnv->getComponent());
-        /* TODO: better approach to find the component */
-        /* "c2.android." => 11th position */
-        size_t pos = 11;
-        size_t len = substring.find(".decoder", pos);
-        comp = substring.substr(pos, len - pos);
-
+        // Find the component type
+        std::string comp = std::string(gEnv->getComponent());
         for (size_t i = 0; i < kNumStringToName; ++i) {
-            if (!strcasecmp(comp.c_str(), kStringToName[i].Name)) {
+            if (strcasestr(comp.c_str(), kStringToName[i].Name)) {
                 mCompName = kStringToName[i].CompName;
                 break;
             }
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHidlC2V1_0TargetAudioEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHidlC2V1_0TargetAudioEncTest.cpp
index 0946fa6..01baf7e 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHidlC2V1_0TargetAudioEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHidlC2V1_0TargetAudioEncTest.cpp
@@ -103,17 +103,10 @@
         const size_t kNumStringToName =
             sizeof(kStringToName) / sizeof(kStringToName[0]);
 
-        std::string substring;
-        std::string comp;
-        substring = std::string(gEnv->getComponent());
-        /* TODO: better approach to find the component */
-        /* "c2.android." => 11th position */
-        size_t pos = 11;
-        size_t len = substring.find(".encoder", pos);
-        comp = substring.substr(pos, len - pos);
-
+        // Find the component type
+        std::string comp = std::string(gEnv->getComponent());
         for (size_t i = 0; i < kNumStringToName; ++i) {
-            if (!strcasecmp(comp.c_str(), kStringToName[i].Name)) {
+            if (strcasestr(comp.c_str(), kStringToName[i].Name)) {
                 mCompName = kStringToName[i].CompName;
                 break;
             }
diff --git a/media/codec2/hidl/1.0/vts/functional/common/README.md b/media/codec2/hidl/1.0/vts/functional/common/README.md
index da569a8..3deab10 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/README.md
+++ b/media/codec2/hidl/1.0/vts/functional/common/README.md
@@ -1,22 +1,31 @@
-## Codec2 Hal @ 1.0 tests ##
+## Codec2 VTS Hal @ 1.0 tests ##
 ---
 #### master :
 Functionality of master is to enumerate all the Codec2 components available in C2 media service.
 
-usage: MtsHidlC2V1\_0TargetMasterTest -I software
+usage: VtsHidlC2V1\_0TargetMasterTest -I default
 
 #### component :
-Functionality of component is to test common functionality across all the Codec2 components available in C2 media service. For a standard C2 component, these tests are expected to pass.
+Functionality of component test is to validate common functionality across all the Codec2 components available in C2 media service. For a standard C2 component, these tests are expected to pass.
 
-usage: MtsHidlC2V1\_0TargetComponentTest -I software -C <comp name>
+usage: VtsHidlC2V1\_0TargetComponentTest -I software -C <comp name>
+example: VtsHidlC2V1\_0TargetComponentTest -I software -C c2.android.vorbis.decoder
 
 #### audio :
-Functionality of audio test is to validate audio specific functionality Codec2 components. The resource files for this test are taken from hardware/interfaces/media/res. The path to these files on the device is required to be given for bitstream tests.
+Functionality of audio test is to validate audio specific functionality Codec2 components. The resource files for this test are taken from media/codec2/hidl/1.0/vts/functional/res. The path to these files on the device is required to be given for bitstream tests.
 
-usage: MtsHidlC2V1\_0TargetAudioDecTest -I software -C <comp name> -P /sdcard/media
+usage: VtsHidlC2V1\_0TargetAudioDecTest -I default -C <comp name> -P /sdcard/res/
+usage: VtsHidlC2V1\_0TargetAudioEncTest -I software -C <comp name> -P /sdcard/res/
+
+example: VtsHidlC2V1\_0TargetAudioDecTest -I software -C c2.android.flac.decoder -P /sdcard/res/
+example: VtsHidlC2V1\_0TargetAudioEncTest -I software -C c2.android.opus.encoder -P /sdcard/res/
 
 #### video :
-Functionality of video test is to validate video specific functionality Codec2 components. The resource files for this test are taken from hardware/interfaces/media/res. The path to these files on the device is required to be given for bitstream tests.
+Functionality of video test is to validate video specific functionality Codec2 components. The resource files for this test are taken from media/codec2/hidl/1.0/vts/functional/res. The path to these files on the device is required to be given for bitstream tests.
 
-usage: MtsHidlC2V1\_0TargetVideoDecTest -I software -C <comp name> -P /sdcard/media
+usage: VtsHidlC2V1\_0TargetVideoDecTest -I default -C <comp name> -P /sdcard/res/
+usage: VtsHidlC2V1\_0TargetVideoEncTest -I software -C <comp name> -P /sdcard/res/
+
+example: VtsHidlC2V1\_0TargetVideoDecTest -I software -C c2.android.avc.decoder -P /sdcard/res/
+example: VtsHidlC2V1\_0TargetVideoEncTest -I software -C c2.android.vp9.encoder -P /sdcard/res/
 
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHidlC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHidlC2V1_0TargetVideoDecTest.cpp
index dffcb6e..33fa848 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHidlC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHidlC2V1_0TargetVideoDecTest.cpp
@@ -107,17 +107,10 @@
         const size_t kNumStringToName =
             sizeof(kStringToName) / sizeof(kStringToName[0]);
 
-        std::string substring;
-        std::string comp;
-        substring = std::string(gEnv->getComponent());
-        /* TODO: better approach to find the component */
-        /* "c2.android." => 11th position */
-        size_t pos = 11;
-        size_t len = substring.find(".decoder", pos);
-        comp = substring.substr(pos, len - pos);
-
+        // Find the component type
+        std::string comp = std::string(gEnv->getComponent());
         for (size_t i = 0; i < kNumStringToName; ++i) {
-            if (!strcasecmp(comp.c_str(), kStringToName[i].Name)) {
+            if (strcasestr(comp.c_str(), kStringToName[i].Name)) {
                 mCompName = kStringToName[i].CompName;
                 break;
             }
@@ -445,6 +438,9 @@
     int bytesCount = 0;
     uint32_t flags = 0;
     uint32_t timestamp = 0;
+    mTimestampDevTest = true;
+    mFlushedIndices.clear();
+    mTimestampUslist.clear();
     while (1) {
         if (!(eleInfo >> bytesCount)) break;
         eleInfo >> flags;
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHidlC2V1_0TargetVideoEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHidlC2V1_0TargetVideoEncTest.cpp
index 673dc85..6bcf840 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHidlC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHidlC2V1_0TargetVideoEncTest.cpp
@@ -101,17 +101,10 @@
         const size_t kNumStringToName =
             sizeof(kStringToName) / sizeof(kStringToName[0]);
 
-        std::string substring;
-        std::string comp;
-        substring = std::string(gEnv->getComponent());
-        /* TODO: better approach to find the component */
-        /* "c2.android." => 11th position */
-        size_t pos = 11;
-        size_t len = substring.find(".encoder", pos);
-        comp = substring.substr(pos, len - pos);
-
+        // Find the component type
+        std::string comp = std::string(gEnv->getComponent());
         for (size_t i = 0; i < kNumStringToName; ++i) {
-            if (!strcasecmp(comp.c_str(), kStringToName[i].Name)) {
+            if (strcasestr(comp.c_str(), kStringToName[i].Name)) {
                 mCompName = kStringToName[i].CompName;
                 break;
             }
@@ -120,6 +113,8 @@
         mCsd = false;
         mFramesReceived = 0;
         mFailedWorkReceived = 0;
+        mTimestampUs = 0u;
+        mTimestampDevTest = false;
         if (mCompName == unknown_comp) mDisableTest = true;
         if (mDisableTest) std::cout << "[   WARN   ] Test Disabled \n";
     }
@@ -139,6 +134,45 @@
     void handleWorkDone(std::list<std::unique_ptr<C2Work>>& workItems) {
         for (std::unique_ptr<C2Work>& work : workItems) {
             if (!work->worklets.empty()) {
+                // For encoder components current timestamp always exceeds
+                // previous timestamp
+                typedef std::unique_lock<std::mutex> ULock;
+                if (!mTimestampUslist.empty()) {
+                    EXPECT_GE((work->worklets.front()
+                                   ->output.ordinal.timestamp.peeku()),
+                              mTimestampUs);
+                    mTimestampUs = work->worklets.front()
+                                       ->output.ordinal.timestamp.peeku();
+                    // Currently this lock is redundant as no mTimestampUslist is only initialized
+                    // before queuing any work to component. Once AdaptiveTest is added similar to
+                    // the one in video decoders, this is needed.
+                    ULock l(mQueueLock);
+
+                    if (mTimestampDevTest) {
+                        bool tsHit = false;
+                        std::list<uint64_t>::iterator it =
+                            mTimestampUslist.begin();
+                        while (it != mTimestampUslist.end()) {
+                            if (*it == mTimestampUs) {
+                                mTimestampUslist.erase(it);
+                                tsHit = true;
+                                break;
+                            }
+                            it++;
+                        }
+                        if (tsHit == false) {
+                            if (mTimestampUslist.empty() == false) {
+                                EXPECT_EQ(tsHit, true)
+                                    << "TimeStamp not recognized";
+                            } else {
+                                std::cout
+                                    << "[   INFO   ] Received non-zero "
+                                       "output / TimeStamp not recognized \n";
+                            }
+                        }
+                    }
+                }
+
                 if (work->result != C2_OK) mFailedWorkReceived++;
                 workDone(mComponent, work, mFlushedIndices, mQueueLock,
                          mQueueCondition, mWorkQueue, mEos, mCsd,
@@ -161,10 +195,13 @@
     bool mCsd;
     bool mDisableTest;
     bool mConfig;
+    bool mTimestampDevTest;
     standardComp mCompName;
     uint32_t mFramesReceived;
     uint32_t mFailedWorkReceived;
+    uint64_t mTimestampUs;
 
+    std::list<uint64_t> mTimestampUslist;
     std::list<uint64_t> mFlushedIndices;
 
     C2BlockPool::local_id_t mBlockPoolId;
@@ -364,6 +401,18 @@
     ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
     ALOGV("mURL : %s", mURL);
 
+    mTimestampUs = 0;
+    mTimestampDevTest = true;
+    mFlushedIndices.clear();
+    mTimestampUslist.clear();
+    uint32_t inputFrames = ENC_NUM_FRAMES;
+    uint32_t timestamp = 0;
+    // Add input timestamp to timestampUslist
+    while (inputFrames) {
+        if (mTimestampDevTest) mTimestampUslist.push_back(timestamp);
+        timestamp += ENCODER_TIMESTAMP_INCREMENT;
+        inputFrames--;
+    }
     if (!setupConfigParam(nWidth, nHeight)) {
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
@@ -375,7 +424,7 @@
                       0, ENC_NUM_FRAMES, nWidth, nHeight, false, signalEOS));
 
     // If EOS is not sent, sending empty input with EOS flag
-    uint32_t inputFrames = ENC_NUM_FRAMES;
+    inputFrames = ENC_NUM_FRAMES;
     if (!signalEOS) {
         ASSERT_NO_FATAL_FAILURE(
             waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue, 1));
@@ -407,6 +456,7 @@
         ASSERT_TRUE(false) << "CSD Buffer not expected";
     }
 
+    if (mTimestampDevTest) EXPECT_EQ(mTimestampUslist.empty(), true);
     ASSERT_EQ(mComponent->stop(), C2_OK);
 }
 
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 715e78b..7669421 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1203,6 +1203,9 @@
 
 void CCodecBufferChannel::onInputBufferDone(
         uint64_t frameIndex, size_t arrayIndex) {
+    if (mInputSurface) {
+        return;
+    }
     std::shared_ptr<C2Buffer> buffer =
             mPipelineWatcher.lock()->onInputBufferReleased(frameIndex, arrayIndex);
     bool newInputSlotAvailable;
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index ead0a9b..c54c601 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -212,6 +212,76 @@
     }
 }
 
+class Switch {
+    enum Flags : uint8_t {
+        // flags
+        IS_ENABLED = (1 << 0),
+        BY_DEFAULT = (1 << 1),
+    };
+
+    constexpr Switch(uint8_t flags) : mFlags(flags) {}
+
+    uint8_t mFlags;
+
+public:
+    // have to create class due to this bool conversion operator...
+    constexpr operator bool() const {
+        return mFlags & IS_ENABLED;
+    }
+
+    constexpr Switch operator!() const {
+        return Switch(mFlags ^ IS_ENABLED);
+    }
+
+    static constexpr Switch DISABLED() { return 0; };
+    static constexpr Switch ENABLED() { return IS_ENABLED; };
+    static constexpr Switch DISABLED_BY_DEFAULT() { return BY_DEFAULT; };
+    static constexpr Switch ENABLED_BY_DEFAULT() { return IS_ENABLED | BY_DEFAULT; };
+
+    const char *toString(const char *def = "??") const {
+        switch (mFlags) {
+        case 0:                         return "0";
+        case IS_ENABLED:                return "1";
+        case BY_DEFAULT:                return "(0)";
+        case IS_ENABLED | BY_DEFAULT:   return "(1)";
+        default: return def;
+        }
+    }
+
+};
+
+const char *asString(const Switch &s, const char *def = "??") {
+    return s.toString(def);
+}
+
+Switch isSettingEnabled(
+        std::string setting, const MediaCodecsXmlParser::AttributeMap &settings,
+        Switch def = Switch::DISABLED_BY_DEFAULT()) {
+    const auto enablement = settings.find(setting);
+    if (enablement == settings.end()) {
+        return def;
+    }
+    return enablement->second == "1" ? Switch::ENABLED() : Switch::DISABLED();
+}
+
+Switch isVariantEnabled(
+        std::string variant, const MediaCodecsXmlParser::AttributeMap &settings) {
+    return isSettingEnabled("variant-" + variant, settings);
+}
+
+Switch isVariantExpressionEnabled(
+        std::string exp, const MediaCodecsXmlParser::AttributeMap &settings) {
+    if (!exp.empty() && exp.at(0) == '!') {
+        return !isVariantEnabled(exp.substr(1, exp.size() - 1), settings);
+    }
+    return isVariantEnabled(exp, settings);
+}
+
+Switch isDomainEnabled(
+        std::string domain, const MediaCodecsXmlParser::AttributeMap &settings) {
+    return isSettingEnabled("domain-" + domain, settings);
+}
+
 } // unnamed namespace
 
 status_t Codec2InfoBuilder::buildMediaCodecList(MediaCodecListWriter* writer) {
@@ -243,20 +313,38 @@
     //
     // Note: Currently, OMX components have default rank 0x100, while all
     // Codec2.0 software components have default rank 0x200.
-    int option = ::android::base::GetIntProperty("debug.stagefright.ccodec", 1);
+    int option = ::android::base::GetIntProperty("debug.stagefright.ccodec", 4);
 
     // Obtain Codec2Client
     std::vector<Traits> traits = Codec2Client::ListComponents();
 
-    MediaCodecsXmlParser parser(
-            MediaCodecsXmlParser::defaultSearchDirs,
-            "media_codecs_c2.xml",
-            "media_codecs_performance_c2.xml");
+    // parse APEX XML first, followed by vendor XML
+    MediaCodecsXmlParser parser;
+    parser.parseXmlFilesInSearchDirs(
+            parser.getDefaultXmlNames(),
+            { "/apex/com.android.media.swcodec/etc" });
+
+    // TODO: remove these c2-specific files once product moved to default file names
+    parser.parseXmlFilesInSearchDirs(
+            { "media_codecs_c2.xml", "media_codecs_performance_c2.xml" });
+
+    // parse default XML files
+    parser.parseXmlFilesInSearchDirs();
+
     if (parser.getParsingStatus() != OK) {
         ALOGD("XML parser no good");
         return OK;
     }
 
+    MediaCodecsXmlParser::AttributeMap settings = parser.getServiceAttributeMap();
+    for (const auto &v : settings) {
+        if (!hasPrefix(v.first, "media-type-")
+                && !hasPrefix(v.first, "domain-")
+                && !hasPrefix(v.first, "variant-")) {
+            writer->addGlobalSetting(v.first.c_str(), v.second.c_str());
+        }
+    }
+
     for (const Traits& trait : traits) {
         C2Component::rank_t rank = trait.rank;
 
@@ -341,12 +429,42 @@
                 break;
             }
 
+            const MediaCodecsXmlParser::CodecProperties &codec =
+                parser.getCodecMap().at(nameOrAlias);
+
+            // verify that either the codec is explicitly enabled, or one of its domains is
+            bool codecEnabled = codec.quirkSet.find("attribute::disabled") == codec.quirkSet.end();
+            if (!codecEnabled) {
+                for (const std::string &domain : codec.domainSet) {
+                    const Switch enabled = isDomainEnabled(domain, settings);
+                    ALOGV("codec entry '%s' is in domain '%s' that is '%s'",
+                            nameOrAlias.c_str(), domain.c_str(), asString(enabled));
+                    if (enabled) {
+                        codecEnabled = true;
+                        break;
+                    }
+                }
+            }
+            // if codec has variants, also check that at least one of them is enabled
+            bool variantEnabled = codec.variantSet.empty();
+            for (const std::string &variant : codec.variantSet) {
+                const Switch enabled = isVariantExpressionEnabled(variant, settings);
+                ALOGV("codec entry '%s' has a variant '%s' that is '%s'",
+                        nameOrAlias.c_str(), variant.c_str(), asString(enabled));
+                if (enabled) {
+                    variantEnabled = true;
+                    break;
+                }
+            }
+            if (!codecEnabled || !variantEnabled) {
+                ALOGD("codec entry for '%s' is disabled", nameOrAlias.c_str());
+                continue;
+            }
+
             ALOGV("adding codec entry for '%s'", nameOrAlias.c_str());
             std::unique_ptr<MediaCodecInfoWriter> codecInfo = writer->addMediaCodecInfo();
             codecInfo->setName(nameOrAlias.c_str());
             codecInfo->setOwner(("codec2::" + trait.owner).c_str());
-            const MediaCodecsXmlParser::CodecProperties &codec =
-                parser.getCodecMap().at(nameOrAlias);
 
             bool encoder = trait.kind == C2Component::KIND_ENCODER;
             typename std::underlying_type<MediaCodecInfo::Attributes>::type attrs = 0;
@@ -373,6 +491,7 @@
                     rank = xmlRank;
                 }
             }
+            ALOGV("rank: %u", (unsigned)rank);
             codecInfo->setRank(rank);
 
             for (const std::string &alias : codec.aliases) {
@@ -382,12 +501,39 @@
 
             for (auto typeIt = codec.typeMap.begin(); typeIt != codec.typeMap.end(); ++typeIt) {
                 const std::string &mediaType = typeIt->first;
+                const Switch typeEnabled = isSettingEnabled(
+                        "media-type-" + mediaType, settings, Switch::ENABLED_BY_DEFAULT());
+                const Switch domainTypeEnabled = isSettingEnabled(
+                        "media-type-" + mediaType + (encoder ? "-encoder" : "-decoder"),
+                        settings, Switch::ENABLED_BY_DEFAULT());
+                ALOGV("type '%s-%s' is '%s/%s'",
+                        mediaType.c_str(), (encoder ? "encoder" : "decoder"),
+                        asString(typeEnabled), asString(domainTypeEnabled));
+                if (!typeEnabled || !domainTypeEnabled) {
+                    ALOGD("media type '%s' for codec entry '%s' is disabled", mediaType.c_str(),
+                            nameOrAlias.c_str());
+                    continue;
+                }
+
+                ALOGI("adding type '%s'", typeIt->first.c_str());
                 const MediaCodecsXmlParser::AttributeMap &attrMap = typeIt->second;
                 std::unique_ptr<MediaCodecInfo::CapabilitiesWriter> caps =
                     codecInfo->addMediaType(mediaType.c_str());
-                for (auto attrIt = attrMap.begin(); attrIt != attrMap.end(); ++attrIt) {
-                    std::string key, value;
-                    std::tie(key, value) = *attrIt;
+                for (const auto &v : attrMap) {
+                    std::string key = v.first;
+                    std::string value = v.second;
+
+                    size_t variantSep = key.find(":::");
+                    if (variantSep != std::string::npos) {
+                        std::string variant = key.substr(0, variantSep);
+                        const Switch enabled = isVariantExpressionEnabled(variant, settings);
+                        ALOGV("variant '%s' is '%s'", variant.c_str(), asString(enabled));
+                        if (!enabled) {
+                            continue;
+                        }
+                        key = key.substr(variantSep + 3);
+                    }
+
                     if (key.find("feature-") == 0 && key.find("feature-bitrate-modes") != 0) {
                         int32_t intValue = 0;
                         // Ignore trailing bad characters and default to 0.
diff --git a/media/codec2/sfplugin/PipelineWatcher.cpp b/media/codec2/sfplugin/PipelineWatcher.cpp
index df81d49..74d14e8 100644
--- a/media/codec2/sfplugin/PipelineWatcher.cpp
+++ b/media/codec2/sfplugin/PipelineWatcher.cpp
@@ -121,6 +121,13 @@
               sizeWithInputReleased);
         return true;
     }
+
+    size_t sizeWithInputsPending = mFramesInPipeline.size() - sizeWithInputReleased;
+    if (sizeWithInputsPending > mPipelineDelay + mInputDelay + mSmoothnessFactor) {
+        ALOGV("pipelineFull: too many inputs pending (%zu) in pipeline, with inputs released (%zu)",
+              sizeWithInputsPending, sizeWithInputReleased);
+        return true;
+    }
     ALOGV("pipeline has room (total: %zu, input released: %zu)",
           mFramesInPipeline.size(), sizeWithInputReleased);
     return false;
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index d62944a..40160c7 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -379,11 +379,19 @@
 
 
 ALookup<C2Config::profile_t, int32_t> sAv1Profiles = {
-    { C2Config::PROFILE_AV1_0, AV1Profile0 },
-    { C2Config::PROFILE_AV1_1, AV1Profile1 },
-    { C2Config::PROFILE_AV1_2, AV1Profile2 },
+    // TODO: will need to disambiguate between Main8 and Main10
+    { C2Config::PROFILE_AV1_0, AV1ProfileMain8 },
+    { C2Config::PROFILE_AV1_0, AV1ProfileMain10 },
 };
 
+ALookup<C2Config::profile_t, int32_t> sAv1HdrProfiles = {
+    { C2Config::PROFILE_AV1_0, AV1ProfileMain10 },
+    { C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10 },
+};
+
+ALookup<C2Config::profile_t, int32_t> sAv1Hdr10PlusProfiles = {
+    { C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10Plus },
+};
 
 /**
  * A helper that passes through vendor extension profile and level values.
@@ -590,6 +598,10 @@
 };
 
 struct Av1ProfileLevelMapper : ProfileLevelMapperHelper {
+    Av1ProfileLevelMapper(bool isHdr = false, bool isHdr10Plus = false) :
+        ProfileLevelMapperHelper(),
+        mIsHdr(isHdr), mIsHdr10Plus(isHdr10Plus) {}
+
     virtual bool simpleMap(C2Config::level_t from, int32_t *to) {
         return sAv1Levels.map(from, to);
     }
@@ -597,11 +609,19 @@
         return sAv1Levels.map(from, to);
     }
     virtual bool simpleMap(C2Config::profile_t from, int32_t *to) {
-        return sAv1Profiles.map(from, to);
+        return mIsHdr10Plus ? sAv1Hdr10PlusProfiles.map(from, to) :
+                     mIsHdr ? sAv1HdrProfiles.map(from, to) :
+                              sAv1Profiles.map(from, to);
     }
     virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
-        return sAv1Profiles.map(from, to);
+        return mIsHdr10Plus ? sAv1Hdr10PlusProfiles.map(from, to) :
+                     mIsHdr ? sAv1HdrProfiles.map(from, to) :
+                              sAv1Profiles.map(from, to);
     }
+
+private:
+    bool mIsHdr;
+    bool mIsHdr10Plus;
 };
 
 } // namespace
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 198bd72..ca69810 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -14,9 +14,6 @@
 cc_library_shared {
     name: "libcodec2_vndk",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
 
     srcs: [
         "C2AllocatorIon.cpp",
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index 10c4dcc..f8afa7c 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -872,7 +872,6 @@
     emplace("libcodec2_soft_vp8enc.so");
     emplace("libcodec2_soft_vp9dec.so");
     emplace("libcodec2_soft_vp9enc.so");
-    emplace("libcodec2_soft_xaacdec.so");
 }
 
 c2_status_t C2PlatformComponentStore::copyBuffer(
diff --git a/media/extractors/mkv/MatroskaExtractor.cpp b/media/extractors/mkv/MatroskaExtractor.cpp
index a1c81f3..b1eb301 100644
--- a/media/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/extractors/mkv/MatroskaExtractor.cpp
@@ -151,6 +151,7 @@
         HEVC,
         MP3,
         PCM,
+        VORBIS,
         OTHER
     };
 
@@ -273,6 +274,8 @@
         mType = MP3;
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
         mType = PCM;
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
+        mType = VORBIS;
     }
 }
 
@@ -802,6 +805,26 @@
         AMediaFormat_setInt64(meta, AMEDIAFORMAT_KEY_TIME_US, timeUs);
         AMediaFormat_setInt32(meta, AMEDIAFORMAT_KEY_IS_SYNC_FRAME, block->IsKey());
 
+        if (mType == VORBIS) {
+            int32_t sampleRate;
+            if (!AMediaFormat_getInt32(trackInfo->mMeta, AMEDIAFORMAT_KEY_SAMPLE_RATE,
+                                       &sampleRate)) {
+                return AMEDIA_ERROR_MALFORMED;
+            }
+            int64_t durationUs;
+            if (!AMediaFormat_getInt64(trackInfo->mMeta, AMEDIAFORMAT_KEY_DURATION,
+                                       &durationUs)) {
+                return AMEDIA_ERROR_MALFORMED;
+            }
+            // TODO: Explore if this can be handled similar to MPEG4 extractor where padding is
+            // signalled instead of VALID_SAMPLES
+            // Remaining valid samples in Vorbis track
+            if (durationUs > timeUs) {
+                int32_t validSamples = ((durationUs - timeUs) * sampleRate) / 1000000ll;
+                AMediaFormat_setInt32(meta, AMEDIAFORMAT_KEY_VALID_SAMPLES, validSamples);
+            }
+        }
+
         status_t err = frame.Read(mExtractor->mReader, data + trackInfo->mHeaderLen);
         if (err == OK
                 && mExtractor->mIsWebm
@@ -1970,6 +1993,12 @@
                     }
                 } else if (!strcmp("V_AV1", codecID)) {
                     AMediaFormat_setString(meta, AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_VIDEO_AV1);
+                    if (codecPrivateSize > 0) {
+                        // 'csd-0' for AV1 is the Blob of Codec Private data as
+                        // specified in https://aomediacodec.github.io/av1-isobmff/.
+                        AMediaFormat_setBuffer(
+                                meta, AMEDIAFORMAT_KEY_CSD_0, codecPrivate, codecPrivateSize);
+                    }
                 } else if (!strcmp("V_MPEG2", codecID) || !strcmp("V_MPEG1", codecID)) {
                         AMediaFormat_setString(meta, AMEDIAFORMAT_KEY_MIME,
                                 MEDIA_MIMETYPE_VIDEO_MPEG2);
diff --git a/media/extractors/mp4/ItemTable.cpp b/media/extractors/mp4/ItemTable.cpp
index d56abaa..8c8e6d1 100644
--- a/media/extractors/mp4/ItemTable.cpp
+++ b/media/extractors/mp4/ItemTable.cpp
@@ -426,10 +426,8 @@
         }
         ALOGV("extent_count %d", extent_count);
 
-        if (extent_count > 1 && (offset_size == 0 || length_size == 0)) {
-            // if the item is dividec into more than one extents, offset and
-            // length must be present.
-            return ERROR_MALFORMED;
+        if (extent_count > 1) {
+            return ERROR_UNSUPPORTED;
         }
         offset += 2;
 
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 3d1c95d..aa3b5b7 100755
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -45,6 +45,7 @@
 #include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/OpusHeader.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaDataBase.h>
@@ -1735,15 +1736,21 @@
             AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_SAMPLE_RATE, sample_rate);
 
             if (chunk_type == FOURCC("Opus")) {
-                uint8_t opusInfo[19];
+                uint8_t opusInfo[AOPUS_OPUSHEAD_MAXSIZE];
                 data_offset += sizeof(buffer);
+                size_t opusInfoSize = chunk_data_size - sizeof(buffer);
+
+                if (opusInfoSize < AOPUS_OPUSHEAD_MINSIZE ||
+                    opusInfoSize > AOPUS_OPUSHEAD_MAXSIZE) {
+                    return ERROR_MALFORMED;
+                }
                 // Read Opus Header
                 if (mDataSource->readAt(
-                        data_offset, opusInfo, sizeof(opusInfo)) < (ssize_t)sizeof(opusInfo)) {
+                        data_offset, opusInfo, opusInfoSize) < opusInfoSize) {
                     return ERROR_IO;
                 }
 
-                // OpusHeader must start with this magic sequence
+                // OpusHeader must start with this magic sequence, overwrite first 8 bytes
                 // http://wiki.xiph.org/OggOpus#ID_Header
                 strncpy((char *)opusInfo, "OpusHead", 8);
 
@@ -1760,17 +1767,18 @@
                 memcpy(&opusInfo[opusOffset + 2], &sample_rate, sizeof(sample_rate));
                 memcpy(&opusInfo[opusOffset + 6], &out_gain, sizeof(out_gain));
 
-                int64_t codecDelay = 6500000;
-                int64_t seekPreRollNs = 80000000;  // Fixed 80 msec
+                static const int64_t kSeekPreRollNs = 80000000;  // Fixed 80 msec
+                static const int32_t kOpusSampleRate = 48000;
+                int64_t codecDelay = pre_skip * 1000000000ll / kOpusSampleRate;
 
                 AMediaFormat_setBuffer(mLastTrack->meta,
                             AMEDIAFORMAT_KEY_CSD_0, opusInfo, sizeof(opusInfo));
                 AMediaFormat_setBuffer(mLastTrack->meta,
                         AMEDIAFORMAT_KEY_CSD_1, &codecDelay, sizeof(codecDelay));
                 AMediaFormat_setBuffer(mLastTrack->meta,
-                        AMEDIAFORMAT_KEY_CSD_2, &seekPreRollNs, sizeof(seekPreRollNs));
+                        AMEDIAFORMAT_KEY_CSD_2, &kSeekPreRollNs, sizeof(kSeekPreRollNs));
 
-                data_offset += sizeof(opusInfo);
+                data_offset += opusInfoSize;
                 *offset = data_offset;
                 CHECK_EQ(*offset, stop_offset);
             }
@@ -1964,6 +1972,8 @@
                 return err;
             }
 
+            adjustRawDefaultFrameSize();
+
             size_t max_size;
             err = mLastTrack->sampleTable->getMaxSampleSize(&max_size);
 
@@ -4297,6 +4307,77 @@
         return ERROR_MALFORMED;
     }
 
+    if (objectTypeIndication == 0xdd) {
+        // vorbis audio
+        if (csd[0] != 0x02) {
+            return ERROR_MALFORMED;
+        }
+
+        // codecInfo starts with two lengths, len1 and len2, that are
+        // "Xiph-style-lacing encoded"..
+
+        size_t offset = 1;
+        size_t len1 = 0;
+        while (offset < csd_size && csd[offset] == 0xff) {
+            if (__builtin_add_overflow(len1, 0xff, &len1)) {
+                return ERROR_MALFORMED;
+            }
+            ++offset;
+        }
+        if (offset >= csd_size) {
+            return ERROR_MALFORMED;
+        }
+        if (__builtin_add_overflow(len1, csd[offset], &len1)) {
+            return ERROR_MALFORMED;
+        }
+        ++offset;
+        if (len1 == 0) {
+            return ERROR_MALFORMED;
+        }
+
+        size_t len2 = 0;
+        while (offset < csd_size && csd[offset] == 0xff) {
+            if (__builtin_add_overflow(len2, 0xff, &len2)) {
+                return ERROR_MALFORMED;
+            }
+            ++offset;
+        }
+        if (offset >= csd_size) {
+            return ERROR_MALFORMED;
+        }
+        if (__builtin_add_overflow(len2, csd[offset], &len2)) {
+            return ERROR_MALFORMED;
+        }
+        ++offset;
+        if (len2 == 0) {
+            return ERROR_MALFORMED;
+        }
+        if (offset >= csd_size || csd[offset] != 0x01) {
+            return ERROR_MALFORMED;
+        }
+        // formerly kKeyVorbisInfo
+        AMediaFormat_setBuffer(mLastTrack->meta,
+                AMEDIAFORMAT_KEY_CSD_0, &csd[offset], len1);
+
+        if (__builtin_add_overflow(offset, len1, &offset) ||
+                offset >= csd_size || csd[offset] != 0x03) {
+            return ERROR_MALFORMED;
+        }
+
+        if (__builtin_add_overflow(offset, len2, &offset) ||
+                offset >= csd_size || csd[offset] != 0x05) {
+            return ERROR_MALFORMED;
+        }
+
+        // formerly kKeyVorbisBooks
+        AMediaFormat_setBuffer(mLastTrack->meta,
+                AMEDIAFORMAT_KEY_CSD_1, &csd[offset], csd_size - offset);
+        AMediaFormat_setString(mLastTrack->meta,
+                AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_AUDIO_VORBIS);
+
+        return OK;
+    }
+
     static uint32_t kSamplingRate[] = {
         96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050,
         16000, 12000, 11025, 8000, 7350
@@ -4527,6 +4608,20 @@
     return OK;
 }
 
+void MPEG4Extractor::adjustRawDefaultFrameSize() {
+    int32_t chanCount = 0;
+    int32_t bitWidth = 0;
+    const char *mimeStr = NULL;
+
+    if(AMediaFormat_getString(mLastTrack->meta, AMEDIAFORMAT_KEY_MIME, &mimeStr) &&
+        !strcasecmp(mimeStr, MEDIA_MIMETYPE_AUDIO_RAW) &&
+        AMediaFormat_getInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &chanCount) &&
+        AMediaFormat_getInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_BITS_PER_SAMPLE, &bitWidth)) {
+        // samplesize in stsz may not right , so updade default samplesize
+        mLastTrack->sampleTable->setPredictSampleSize(chanCount * bitWidth / 8);
+    }
+}
+
 ////////////////////////////////////////////////////////////////////////////////
 
 MPEG4Source::MPEG4Source(
@@ -4893,8 +4988,11 @@
 }
 
 status_t MPEG4Source::parseSampleAuxiliaryInformationSizes(
-        off64_t offset, off64_t /* size */) {
+        off64_t offset, off64_t size) {
     ALOGV("parseSampleAuxiliaryInformationSizes");
+    if (size < 9) {
+        return -EINVAL;
+    }
     // 14496-12 8.7.12
     uint8_t version;
     if (mDataSource->readAt(
@@ -4907,25 +5005,32 @@
         return ERROR_UNSUPPORTED;
     }
     offset++;
+    size--;
 
     uint32_t flags;
     if (!mDataSource->getUInt24(offset, &flags)) {
         return ERROR_IO;
     }
     offset += 3;
+    size -= 3;
 
     if (flags & 1) {
+        if (size < 13) {
+            return -EINVAL;
+        }
         uint32_t tmp;
         if (!mDataSource->getUInt32(offset, &tmp)) {
             return ERROR_MALFORMED;
         }
         mCurrentAuxInfoType = tmp;
         offset += 4;
+        size -= 4;
         if (!mDataSource->getUInt32(offset, &tmp)) {
             return ERROR_MALFORMED;
         }
         mCurrentAuxInfoTypeParameter = tmp;
         offset += 4;
+        size -= 4;
     }
 
     uint8_t defsize;
@@ -4934,6 +5039,7 @@
     }
     mCurrentDefaultSampleInfoSize = defsize;
     offset++;
+    size--;
 
     uint32_t smplcnt;
     if (!mDataSource->getUInt32(offset, &smplcnt)) {
@@ -4941,7 +5047,12 @@
     }
     mCurrentSampleInfoCount = smplcnt;
     offset += 4;
-
+    size -= 4;
+    if(smplcnt > size) {
+        ALOGW("b/124525515 - smplcnt(%u) > size(%ld)", (unsigned int)smplcnt, (unsigned long)size);
+        android_errorWriteLog(0x534e4554, "124525515");
+        return -EINVAL;
+    }
     if (mCurrentDefaultSampleInfoSize != 0) {
         ALOGV("@@@@ using default sample info size of %d", mCurrentDefaultSampleInfoSize);
         return OK;
@@ -4961,26 +5072,32 @@
 }
 
 status_t MPEG4Source::parseSampleAuxiliaryInformationOffsets(
-        off64_t offset, off64_t /* size */) {
+        off64_t offset, off64_t size) {
     ALOGV("parseSampleAuxiliaryInformationOffsets");
+    if (size < 8) {
+        return -EINVAL;
+    }
     // 14496-12 8.7.13
     uint8_t version;
     if (mDataSource->readAt(offset, &version, sizeof(version)) != 1) {
         return ERROR_IO;
     }
     offset++;
+    size--;
 
     uint32_t flags;
     if (!mDataSource->getUInt24(offset, &flags)) {
         return ERROR_IO;
     }
     offset += 3;
+    size -= 3;
 
     uint32_t entrycount;
     if (!mDataSource->getUInt32(offset, &entrycount)) {
         return ERROR_IO;
     }
     offset += 4;
+    size -= 4;
     if (entrycount == 0) {
         return OK;
     }
@@ -5006,19 +5123,31 @@
 
     for (size_t i = 0; i < entrycount; i++) {
         if (version == 0) {
+            if (size < 4) {
+                ALOGW("b/124526959");
+                android_errorWriteLog(0x534e4554, "124526959");
+                return -EINVAL;
+            }
             uint32_t tmp;
             if (!mDataSource->getUInt32(offset, &tmp)) {
                 return ERROR_IO;
             }
             mCurrentSampleInfoOffsets[i] = tmp;
             offset += 4;
+            size -= 4;
         } else {
+            if (size < 8) {
+                ALOGW("b/124526959");
+                android_errorWriteLog(0x534e4554, "124526959");
+                return -EINVAL;
+            }
             uint64_t tmp;
             if (!mDataSource->getUInt64(offset, &tmp)) {
                 return ERROR_IO;
             }
             mCurrentSampleInfoOffsets[i] = tmp;
             offset += 8;
+            size -= 8;
         }
     }
 
diff --git a/media/extractors/mp4/MPEG4Extractor.h b/media/extractors/mp4/MPEG4Extractor.h
index 031e793..e10bf8a 100644
--- a/media/extractors/mp4/MPEG4Extractor.h
+++ b/media/extractors/mp4/MPEG4Extractor.h
@@ -179,6 +179,7 @@
     status_t parseAC3SpecificBox(off64_t offset);
     status_t parseEAC3SpecificBox(off64_t offset);
     status_t parseAC4SpecificBox(off64_t offset);
+    void adjustRawDefaultFrameSize();
 
     MPEG4Extractor(const MPEG4Extractor &);
     MPEG4Extractor &operator=(const MPEG4Extractor &);
diff --git a/media/extractors/mp4/SampleTable.cpp b/media/extractors/mp4/SampleTable.cpp
index bf29bf1..e7e8901 100644
--- a/media/extractors/mp4/SampleTable.cpp
+++ b/media/extractors/mp4/SampleTable.cpp
@@ -391,20 +391,11 @@
     }
 
     mTimeToSampleCount = U32_AT(&header[4]);
-    if (mTimeToSampleCount > UINT32_MAX / (2 * sizeof(uint32_t))) {
-        // Choose this bound because
-        // 1) 2 * sizeof(uint32_t) is the amount of memory needed for one
-        //    time-to-sample entry in the time-to-sample table.
-        // 2) mTimeToSampleCount is the number of entries of the time-to-sample
-        //    table.
-        // 3) We hope that the table size does not exceed UINT32_MAX.
+    if (mTimeToSampleCount > (data_size - 8) / (2 * sizeof(uint32_t))) {
         ALOGE("Time-to-sample table size too large.");
         return ERROR_OUT_OF_RANGE;
     }
 
-    // Note: At this point, we know that mTimeToSampleCount * 2 will not
-    // overflow because of the above condition.
-
     uint64_t allocSize = (uint64_t)mTimeToSampleCount * 2 * sizeof(uint32_t);
     mTotalSize += allocSize;
     if (mTotalSize > kMaxTotalSize) {
@@ -540,6 +531,12 @@
     }
 
     uint64_t allocSize = (uint64_t)numSyncSamples * sizeof(uint32_t);
+    if (allocSize > data_size - 8) {
+        ALOGW("b/124771364 - allocSize(%lu) > size(%lu)",
+                (unsigned long)allocSize, (unsigned long)(data_size - 8));
+        android_errorWriteLog(0x534e4554, "124771364");
+        return ERROR_MALFORMED;
+    }
     if (allocSize > kMaxTotalSize) {
         ALOGE("Sync sample table size too large.");
         return ERROR_OUT_OF_RANGE;
diff --git a/media/extractors/mp4/SampleTable.h b/media/extractors/mp4/SampleTable.h
index 57f6e62..076f4c3 100644
--- a/media/extractors/mp4/SampleTable.h
+++ b/media/extractors/mp4/SampleTable.h
@@ -89,6 +89,10 @@
 
     status_t findThumbnailSample(uint32_t *sample_index);
 
+    void setPredictSampleSize(uint32_t sampleSize) {
+        mDefaultSampleSize = sampleSize;
+    }
+
 protected:
     ~SampleTable();
 
diff --git a/media/libaaudio/src/binding/IAAudioService.cpp b/media/libaaudio/src/binding/IAAudioService.cpp
index 9b32543..97ad2b0 100644
--- a/media/libaaudio/src/binding/IAAudioService.cpp
+++ b/media/libaaudio/src/binding/IAAudioService.cpp
@@ -251,8 +251,15 @@
             CHECK_INTERFACE(IAAudioService, data, reply);
             sp<IAAudioClient> client = interface_cast<IAAudioClient>(
                     data.readStrongBinder());
-            registerClient(client);
-            return NO_ERROR;
+            // readStrongBinder() can return null
+            if (client.get() == nullptr) {
+                ALOGE("BnAAudioService::%s(REGISTER_CLIENT) client is NULL!", __func__);
+                android_errorWriteLog(0x534e4554, "116230453");
+                return DEAD_OBJECT;
+            } else {
+                registerClient(client);
+                return NO_ERROR;
+            }
         } break;
 
         case OPEN_STREAM: {
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index 4a65fc9..71efc30 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -63,7 +63,9 @@
     // TODO Support UNSPECIFIED in AudioRecord. For now, use stereo if unspecified.
     int32_t samplesPerFrame = (getSamplesPerFrame() == AAUDIO_UNSPECIFIED)
                               ? 2 : getSamplesPerFrame();
-    audio_channel_mask_t channelMask = audio_channel_in_mask_from_count(samplesPerFrame);
+    audio_channel_mask_t channelMask = samplesPerFrame <= 2 ?
+                               audio_channel_in_mask_from_count(samplesPerFrame) :
+                               audio_channel_mask_for_index_assignment_from_count(samplesPerFrame);
 
     size_t frameCount = (builder.getBufferCapacity() == AAUDIO_UNSPECIFIED) ? 0
                         : builder.getBufferCapacity();
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index d628bf7..094cdd1 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -66,7 +66,9 @@
     // Use stereo if unspecified.
     int32_t samplesPerFrame = (getSamplesPerFrame() == AAUDIO_UNSPECIFIED)
                               ? 2 : getSamplesPerFrame();
-    audio_channel_mask_t channelMask = audio_channel_out_mask_from_count(samplesPerFrame);
+    audio_channel_mask_t channelMask = samplesPerFrame <= 2 ?
+                            audio_channel_out_mask_from_count(samplesPerFrame) :
+                            audio_channel_mask_for_index_assignment_from_count(samplesPerFrame);
 
     audio_output_flags_t flags;
     aaudio_performance_mode_t perfMode = getPerformanceMode();
diff --git a/media/libaudioclient/AudioPolicy.cpp b/media/libaudioclient/AudioPolicy.cpp
index 65e797f..3cdf095 100644
--- a/media/libaudioclient/AudioPolicy.cpp
+++ b/media/libaudioclient/AudioPolicy.cpp
@@ -159,4 +159,29 @@
     mCriteria.add(crit);
 }
 
+bool AudioMix::hasUidRule(bool match, uid_t uid) const {
+    const uint32_t rule = match ? RULE_MATCH_UID : RULE_EXCLUDE_UID;
+    for (size_t i = 0; i < mCriteria.size(); i++) {
+        if (mCriteria[i].mRule == rule
+                && mCriteria[i].mValue.mUid == uid) {
+            return true;
+        }
+    }
+    return false;
+}
+
+bool AudioMix::hasMatchUidRule() const {
+    for (size_t i = 0; i < mCriteria.size(); i++) {
+        if (mCriteria[i].mRule == RULE_MATCH_UID) {
+            return true;
+        }
+    }
+    return false;
+}
+
+bool AudioMix::isDeviceAffinityCompatible() const {
+    return ((mMixType == MIX_TYPE_PLAYERS)
+            && (mRouteFlags == MIX_ROUTE_FLAG_RENDER));
+}
+
 } // namespace android
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 0cce5bc..a1b04ca 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -177,7 +177,7 @@
         }
         // No lock here: worst case we remove a NULL callback which will be a nop
         if (mDeviceCallback != 0 && mInput != AUDIO_IO_HANDLE_NONE) {
-            AudioSystem::removeAudioDeviceCallback(this, mInput);
+            AudioSystem::removeAudioDeviceCallback(this, mInput, mPortId);
         }
         IInterface::asBinder(mAudioRecord)->unlinkToDeath(mDeathNotifier, this);
         mAudioRecord.clear();
@@ -790,14 +790,13 @@
     mAudioRecord = record;
     mCblkMemory = output.cblk;
     mBufferMemory = output.buffers;
-    mPortId = output.portId;
     IPCThreadState::self()->flushCommands();
 
     mCblk = cblk;
     // note that output.frameCount is the (possibly revised) value of mReqFrameCount
     if (output.frameCount < mReqFrameCount || (mReqFrameCount == 0 && output.frameCount == 0)) {
         ALOGW("%s(%d): Requested frameCount %zu but received frameCount %zu",
-              __func__, mPortId,
+              __func__, output.portId,
               mReqFrameCount,  output.frameCount);
     }
 
@@ -805,19 +804,20 @@
     // The computation is done on server side.
     if (mNotificationFramesReq > 0 && output.notificationFrameCount != mNotificationFramesReq) {
         ALOGW("%s(%d): Server adjusted notificationFrames from %u to %zu for frameCount %zu",
-                __func__, mPortId,
+                __func__, output.portId,
                 mNotificationFramesReq, output.notificationFrameCount, output.frameCount);
     }
     mNotificationFramesAct = (uint32_t)output.notificationFrameCount;
 
     //mInput != input includes the case where mInput == AUDIO_IO_HANDLE_NONE for first creation
-    if (mDeviceCallback != 0 && mInput != output.inputId) {
+    if (mDeviceCallback != 0) {
         if (mInput != AUDIO_IO_HANDLE_NONE) {
-            AudioSystem::removeAudioDeviceCallback(this, mInput);
+            AudioSystem::removeAudioDeviceCallback(this, mInput, mPortId);
         }
-        AudioSystem::addAudioDeviceCallback(this, output.inputId);
+        AudioSystem::addAudioDeviceCallback(this, output.inputId, output.portId);
     }
 
+    mPortId = output.portId;
     // We retain a copy of the I/O handle, but don't own the reference
     mInput = output.inputId;
     mRefreshRemaining = true;
@@ -1332,9 +1332,9 @@
     if (mInput != AUDIO_IO_HANDLE_NONE) {
         if (mDeviceCallback != 0) {
             ALOGW("%s(%d): callback already present!", __func__, mPortId);
-            AudioSystem::removeAudioDeviceCallback(this, mInput);
+            AudioSystem::removeAudioDeviceCallback(this, mInput, mPortId);
         }
-        status = AudioSystem::addAudioDeviceCallback(this, mInput);
+        status = AudioSystem::addAudioDeviceCallback(this, mInput, mPortId);
     }
     mDeviceCallback = callback;
     return status;
@@ -1354,7 +1354,7 @@
     }
     mDeviceCallback.clear();
     if (mInput != AUDIO_IO_HANDLE_NONE) {
-        AudioSystem::removeAudioDeviceCallback(this, mInput);
+        AudioSystem::removeAudioDeviceCallback(this, mInput, mPortId);
     }
     return NO_ERROR;
 }
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 47e2c28..1e7f9fa 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -523,12 +523,10 @@
     if (ioDesc == 0 || ioDesc->mIoHandle == AUDIO_IO_HANDLE_NONE) return;
 
     audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
-    Vector<sp<AudioDeviceCallback>> callbacksToCall;
+    std::vector<sp<AudioDeviceCallback>> callbacksToCall;
     {
         Mutex::Autolock _l(mLock);
-        bool deviceValidOrChanged = false;
-        bool sendCallbacks = false;
-        ssize_t ioIndex = -1;
+        auto callbacks = std::map<audio_port_handle_t, wp<AudioDeviceCallback>>();
 
         switch (event) {
         case AUDIO_OUTPUT_OPENED:
@@ -546,17 +544,11 @@
             if (ioDesc->getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
                 deviceId = ioDesc->getDeviceId();
                 if (event == AUDIO_OUTPUT_OPENED || event == AUDIO_INPUT_OPENED) {
-                    ioIndex = mAudioDeviceCallbackProxies.indexOfKey(ioDesc->mIoHandle);
-                    if (ioIndex >= 0) {
-                        sendCallbacks = true;
-                        deviceValidOrChanged = true;
+                    auto it = mAudioDeviceCallbacks.find(ioDesc->mIoHandle);
+                    if (it != mAudioDeviceCallbacks.end()) {
+                        callbacks = it->second;
                     }
                 }
-                if (event == AUDIO_OUTPUT_REGISTERED || event == AUDIO_INPUT_REGISTERED) {
-                    ioIndex = mAudioDeviceCallbackProxies.indexOfKey(ioDesc->mIoHandle);
-                    sendCallbacks = (ioIndex >= 0)
-                            && !mAudioDeviceCallbackProxies.valueAt(ioIndex).notifiedOnce();
-                }
             }
             ALOGV("ioConfigChanged() new %s %s %d samplingRate %u, format %#x channel mask %#x "
                     "frameCount %zu deviceId %d",
@@ -578,7 +570,7 @@
                   event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->mIoHandle);
 
             mIoDescriptors.removeItem(ioDesc->mIoHandle);
-            mAudioDeviceCallbackProxies.removeItem(ioDesc->mIoHandle);
+            mAudioDeviceCallbacks.erase(ioDesc->mIoHandle);
             } break;
 
         case AUDIO_OUTPUT_CONFIG_CHANGED:
@@ -593,10 +585,11 @@
             mIoDescriptors.replaceValueFor(ioDesc->mIoHandle, ioDesc);
 
             if (deviceId != ioDesc->getDeviceId()) {
-                deviceValidOrChanged = true;
                 deviceId = ioDesc->getDeviceId();
-                ioIndex = mAudioDeviceCallbackProxies.indexOfKey(ioDesc->mIoHandle);
-                sendCallbacks = ioIndex >= 0;
+                auto it = mAudioDeviceCallbacks.find(ioDesc->mIoHandle);
+                if (it != mAudioDeviceCallbacks.end()) {
+                    callbacks = it->second;
+                }
             }
             ALOGV("ioConfigChanged() new config for %s %d samplingRate %u, format %#x "
                     "channel mask %#x frameCount %zu frameCountHAL %zu deviceId %d",
@@ -606,35 +599,40 @@
                     ioDesc->getDeviceId());
 
         } break;
-        }
-
-        // sendCallbacks true =>  ioDesc->mIoHandle and deviceId are valid
-        if (sendCallbacks) {
-            AudioDeviceCallbackProxies &callbackProxies =
-                mAudioDeviceCallbackProxies.editValueAt(ioIndex);
-            for (size_t i = 0; i < callbackProxies.size(); ) {
-                sp<AudioDeviceCallback> callback = callbackProxies[i]->callback();
-                if (callback.get() != nullptr) {
-                    // Call the callback only if the device actually changed, the input or output
-                    // was opened or closed or the client was newly registered and the callback
-                    // was never called
-                    if (!callbackProxies[i]->notifiedOnce() || deviceValidOrChanged) {
-                        callbacksToCall.add(callback);
-                        callbackProxies[i]->setNotifiedOnce();
-                    }
-                    i++;
-                } else {
-                    callbackProxies.removeAt(i);
+        case AUDIO_CLIENT_STARTED: {
+            sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->mIoHandle);
+            if (oldDesc == 0) {
+                ALOGW("ioConfigChanged() start client on unknown io! %d", ioDesc->mIoHandle);
+                break;
+            }
+            ALOGV("ioConfigChanged() AUDIO_CLIENT_STARTED  io %d port %d num callbacks %zu",
+                ioDesc->mIoHandle, ioDesc->mPortId, mAudioDeviceCallbacks.size());
+            oldDesc->mPatch = ioDesc->mPatch;
+            auto it = mAudioDeviceCallbacks.find(ioDesc->mIoHandle);
+            if (it != mAudioDeviceCallbacks.end()) {
+                auto cbks = it->second;
+                auto it2 = cbks.find(ioDesc->mPortId);
+                if (it2 != cbks.end()) {
+                   callbacks.emplace(ioDesc->mPortId, it2->second);
+                   deviceId = oldDesc->getDeviceId();
                 }
             }
-            callbackProxies.setNotifiedOnce();
+        } break;
+        }
+
+        for (auto wpCbk : callbacks) {
+            sp<AudioDeviceCallback> spCbk = wpCbk.second.promote();
+            if (spCbk != nullptr) {
+                callbacksToCall.push_back(spCbk);
+            }
         }
     }
 
     // Callbacks must be called without mLock held. May lead to dead lock if calling for
     // example getRoutedDevice that updates the device and tries to acquire mLock.
-    for (size_t i = 0; i < callbacksToCall.size(); i++) {
-        callbacksToCall[i]->onAudioDeviceUpdate(ioDesc->mIoHandle, deviceId);
+    for (auto cb  : callbacksToCall) {
+        // If callbacksToCall is not empty, it implies ioDesc->mIoHandle and deviceId are valid
+        cb->onAudioDeviceUpdate(ioDesc->mIoHandle, deviceId);
     }
 }
 
@@ -687,51 +685,34 @@
 }
 
 status_t AudioSystem::AudioFlingerClient::addAudioDeviceCallback(
-        const wp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo)
+        const wp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo,
+        audio_port_handle_t portId)
 {
+    ALOGV("%s audioIo %d portId %d", __func__, audioIo, portId);
     Mutex::Autolock _l(mLock);
-    AudioDeviceCallbackProxies callbackProxies;
-    ssize_t ioIndex = mAudioDeviceCallbackProxies.indexOfKey(audioIo);
-    if (ioIndex >= 0) {
-        callbackProxies = mAudioDeviceCallbackProxies.valueAt(ioIndex);
+    auto& callbacks = mAudioDeviceCallbacks.emplace(audioIo, std::map<audio_port_handle_t, wp<AudioDeviceCallback>>()).first->second;
+    auto result = callbacks.try_emplace(portId, callback);
+    if (!result.second) {
+        return INVALID_OPERATION;
     }
-
-    for (size_t cbIndex = 0; cbIndex < callbackProxies.size(); cbIndex++) {
-        sp<AudioDeviceCallback> cbk = callbackProxies[cbIndex]->callback();
-        if (cbk.get() == callback.unsafe_get()) {
-            return INVALID_OPERATION;
-        }
-    }
-    callbackProxies.add(new AudioDeviceCallbackProxy(callback));
-    callbackProxies.resetNotifiedOnce();
-    mAudioDeviceCallbackProxies.replaceValueFor(audioIo, callbackProxies);
     return NO_ERROR;
 }
 
 status_t AudioSystem::AudioFlingerClient::removeAudioDeviceCallback(
-        const wp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo)
+        const wp<AudioDeviceCallback>& callback __unused, audio_io_handle_t audioIo,
+        audio_port_handle_t portId)
 {
+    ALOGV("%s audioIo %d portId %d", __func__, audioIo, portId);
     Mutex::Autolock _l(mLock);
-    ssize_t ioIndex = mAudioDeviceCallbackProxies.indexOfKey(audioIo);
-    if (ioIndex < 0) {
+    auto it = mAudioDeviceCallbacks.find(audioIo);
+    if (it == mAudioDeviceCallbacks.end()) {
         return INVALID_OPERATION;
     }
-    AudioDeviceCallbackProxies callbackProxies = mAudioDeviceCallbackProxies.valueAt(ioIndex);
-    size_t cbIndex;
-    for (cbIndex = 0; cbIndex < callbackProxies.size(); cbIndex++) {
-        sp<AudioDeviceCallback> cbk = callbackProxies[cbIndex]->callback();
-        if (cbk.get() == callback.unsafe_get()) {
-            break;
-        }
-    }
-    if (cbIndex == callbackProxies.size()) {
+    if (it->second.erase(portId) == 0) {
         return INVALID_OPERATION;
     }
-    callbackProxies.removeAt(cbIndex);
-    if (callbackProxies.size() != 0) {
-        mAudioDeviceCallbackProxies.replaceValueFor(audioIo, callbackProxies);
-    } else {
-        mAudioDeviceCallbackProxies.removeItem(audioIo);
+    if (it->second.size() == 0) {
+        mAudioDeviceCallbacks.erase(audioIo);
     }
     return NO_ERROR;
 }
@@ -883,7 +864,7 @@
     return aps->getOutput(stream);
 }
 
-status_t AudioSystem::getOutputForAttr(const audio_attributes_t *attr,
+status_t AudioSystem::getOutputForAttr(audio_attributes_t *attr,
                                         audio_io_handle_t *output,
                                         audio_session_t session,
                                         audio_stream_type_t *stream,
@@ -1271,13 +1252,14 @@
 }
 
 status_t AudioSystem::addAudioDeviceCallback(
-        const wp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo)
+        const wp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo,
+        audio_port_handle_t portId)
 {
     const sp<AudioFlingerClient> afc = getAudioFlingerClient();
     if (afc == 0) {
         return NO_INIT;
     }
-    status_t status = afc->addAudioDeviceCallback(callback, audioIo);
+    status_t status = afc->addAudioDeviceCallback(callback, audioIo, portId);
     if (status == NO_ERROR) {
         const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
         if (af != 0) {
@@ -1288,13 +1270,14 @@
 }
 
 status_t AudioSystem::removeAudioDeviceCallback(
-        const wp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo)
+        const wp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo,
+        audio_port_handle_t portId)
 {
     const sp<AudioFlingerClient> afc = getAudioFlingerClient();
     if (afc == 0) {
         return NO_INIT;
     }
-    return afc->removeAudioDeviceCallback(callback, audioIo);
+    return afc->removeAudioDeviceCallback(callback, audioIo, portId);
 }
 
 audio_port_handle_t AudioSystem::getDeviceIdForIo(audio_io_handle_t audioIo)
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index bd48f56..4a80cd3 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -309,7 +309,7 @@
         }
         // No lock here: worst case we remove a NULL callback which will be a nop
         if (mDeviceCallback != 0 && mOutput != AUDIO_IO_HANDLE_NONE) {
-            AudioSystem::removeAudioDeviceCallback(this, mOutput);
+            AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
         }
         IInterface::asBinder(mAudioTrack)->unlinkToDeath(mDeathNotifier, this);
         mAudioTrack.clear();
@@ -1495,7 +1495,6 @@
     mAfFrameCount = output.afFrameCount;
     mAfSampleRate = output.afSampleRate;
     mAfLatency = output.afLatencyMs;
-    mPortId = output.portId;
 
     mLatency = mAfLatency + (1000LL * mFrameCount) / mSampleRate;
 
@@ -1543,14 +1542,15 @@
     mFlags = output.flags;
 
     //mOutput != output includes the case where mOutput == AUDIO_IO_HANDLE_NONE for first creation
-    if (mDeviceCallback != 0 && mOutput != output.outputId) {
+    if (mDeviceCallback != 0) {
         if (mOutput != AUDIO_IO_HANDLE_NONE) {
-            AudioSystem::removeAudioDeviceCallback(this, mOutput);
+            AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
         }
-        AudioSystem::addAudioDeviceCallback(this, output.outputId);
+        AudioSystem::addAudioDeviceCallback(this, output.outputId, output.portId);
         callbackAdded = true;
     }
 
+    mPortId = output.portId;
     // We retain a copy of the I/O handle, but don't own the reference
     mOutput = output.outputId;
     mRefreshRemaining = true;
@@ -1615,7 +1615,7 @@
 exit:
     if (status != NO_ERROR && callbackAdded) {
         // note: mOutput is always valid is callbackAdded is true
-        AudioSystem::removeAudioDeviceCallback(this, mOutput);
+        AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
     }
 
     mStatus = status;
@@ -2144,9 +2144,27 @@
                     const nsecs_t timeNow = systemTime();
                     ns = max((nsecs_t)0, ns - (timeNow - timeAfterCallbacks));
                 }
-                nsecs_t myns = framesToNanoseconds(mRemainingFrames - avail, sampleRate, speed);
-                if (ns < 0 /* NS_WHENEVER */ || myns < ns) {
-                    ns = myns;
+
+                // delayNs is first computed by the additional frames required in the buffer.
+                nsecs_t delayNs = framesToNanoseconds(
+                        mRemainingFrames - avail, sampleRate, speed);
+
+                // afNs is the AudioFlinger mixer period in ns.
+                const nsecs_t afNs = framesToNanoseconds(mAfFrameCount, mAfSampleRate, speed);
+
+                // If the AudioTrack is double buffered based on the AudioFlinger mixer period,
+                // we may have a race if we wait based on the number of frames desired.
+                // This is a possible issue with resampling and AAudio.
+                //
+                // The granularity of audioflinger processing is one mixer period; if
+                // our wait time is less than one mixer period, wait at most half the period.
+                if (delayNs < afNs) {
+                    delayNs = std::min(delayNs, afNs / 2);
+                }
+
+                // adjust our ns wait by delayNs.
+                if (ns < 0 /* NS_WHENEVER */ || delayNs < ns) {
+                    ns = delayNs;
                 }
                 return ns;
             }
@@ -2922,6 +2940,7 @@
 
 status_t AudioTrack::addAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback)
 {
+
     if (callback == 0) {
         ALOGW("%s(%d): adding NULL callback!", __func__, mPortId);
         return BAD_VALUE;
@@ -2935,9 +2954,9 @@
     if (mOutput != AUDIO_IO_HANDLE_NONE) {
         if (mDeviceCallback != 0) {
             ALOGW("%s(%d): callback already present!", __func__, mPortId);
-            AudioSystem::removeAudioDeviceCallback(this, mOutput);
+            AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
         }
-        status = AudioSystem::addAudioDeviceCallback(this, mOutput);
+        status = AudioSystem::addAudioDeviceCallback(this, mOutput, mPortId);
     }
     mDeviceCallback = callback;
     return status;
@@ -2957,7 +2976,7 @@
     }
     mDeviceCallback.clear();
     if (mOutput != AUDIO_IO_HANDLE_NONE) {
-        AudioSystem::removeAudioDeviceCallback(this, mOutput);
+        AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
     }
     return NO_ERROR;
 }
@@ -2979,6 +2998,7 @@
             mRoutedDeviceId = deviceId;
         }
     }
+
     if (callback.get() != nullptr) {
         callback->onAudioDeviceUpdate(mOutput, mRoutedDeviceId);
     }
diff --git a/media/libaudioclient/IAudioFlingerClient.cpp b/media/libaudioclient/IAudioFlingerClient.cpp
index b2dbc4c..47eb7dc 100644
--- a/media/libaudioclient/IAudioFlingerClient.cpp
+++ b/media/libaudioclient/IAudioFlingerClient.cpp
@@ -52,6 +52,7 @@
         data.writeInt64(ioDesc->mFrameCount);
         data.writeInt64(ioDesc->mFrameCountHAL);
         data.writeInt32(ioDesc->mLatency);
+        data.writeInt32(ioDesc->mPortId);
         remote()->transact(IO_CONFIG_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
     }
 };
@@ -76,6 +77,7 @@
             ioDesc->mFrameCount = data.readInt64();
             ioDesc->mFrameCountHAL = data.readInt64();
             ioDesc->mLatency = data.readInt32();
+            ioDesc->mPortId = data.readInt32();
             ioConfigChanged(event, ioDesc);
             return NO_ERROR;
         } break;
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index 9b4221c..c548457 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -198,7 +198,7 @@
         return static_cast <audio_io_handle_t> (reply.readInt32());
     }
 
-    status_t getOutputForAttr(const audio_attributes_t *attr,
+    status_t getOutputForAttr(audio_attributes_t *attr,
                               audio_io_handle_t *output,
                               audio_session_t session,
                               audio_stream_type_t *stream,
@@ -212,38 +212,27 @@
         {
             Parcel data, reply;
             data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-            if (attr == NULL) {
-                if (stream == NULL) {
-                    ALOGE("getOutputForAttr(): NULL audio attributes and stream type");
-                    return BAD_VALUE;
-                }
-                if (*stream == AUDIO_STREAM_DEFAULT) {
-                    ALOGE("getOutputForAttr unspecified stream type");
-                    return BAD_VALUE;
-                }
-            }
-            if (output == NULL) {
-                ALOGE("getOutputForAttr NULL output - shouldn't happen");
+            if (attr == nullptr) {
+                ALOGE("%s NULL audio attributes", __func__);
                 return BAD_VALUE;
             }
-            if (selectedDeviceId == NULL) {
-                ALOGE("getOutputForAttr NULL selectedDeviceId - shouldn't happen");
+            if (output == nullptr) {
+                ALOGE("%s NULL output - shouldn't happen", __func__);
                 return BAD_VALUE;
             }
-            if (portId == NULL) {
-                ALOGE("getOutputForAttr NULL portId - shouldn't happen");
+            if (selectedDeviceId == nullptr) {
+                ALOGE("%s NULL selectedDeviceId - shouldn't happen", __func__);
                 return BAD_VALUE;
             }
-            if (secondaryOutputs == NULL) {
-                ALOGE("getOutputForAttr NULL secondaryOutputs - shouldn't happen");
+            if (portId == nullptr) {
+                ALOGE("%s NULL portId - shouldn't happen", __func__);
                 return BAD_VALUE;
             }
-            if (attr == NULL) {
-                data.writeInt32(0);
-            } else {
-                data.writeInt32(1);
-                data.write(attr, sizeof(audio_attributes_t));
+            if (secondaryOutputs == nullptr) {
+                ALOGE("%s NULL secondaryOutputs - shouldn't happen", __func__);
+                return BAD_VALUE;
             }
+            data.write(attr, sizeof(audio_attributes_t));
             data.writeInt32(session);
             if (stream == NULL) {
                 data.writeInt32(0);
@@ -265,6 +254,10 @@
             if (status != NO_ERROR) {
                 return status;
             }
+            status = (status_t)reply.read(&attr, sizeof(audio_attributes_t));
+            if (status != NO_ERROR) {
+                return status;
+            }
             *output = (audio_io_handle_t)reply.readInt32();
             audio_stream_type_t lStream = (audio_stream_type_t)reply.readInt32();
             if (stream != NULL) {
@@ -1449,12 +1442,12 @@
 
         case GET_OUTPUT_FOR_ATTR: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_attributes_t attr = {};
-            bool hasAttributes = data.readInt32() != 0;
-            if (hasAttributes) {
-                data.read(&attr, sizeof(audio_attributes_t));
-                sanetizeAudioAttributes(&attr);
+            audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+            status_t status = data.read(&attr, sizeof(audio_attributes_t));
+            if (status != NO_ERROR) {
+                return status;
             }
+            sanetizeAudioAttributes(&attr);
             audio_session_t session = (audio_session_t)data.readInt32();
             audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
             bool hasStream = data.readInt32() != 0;
@@ -1472,11 +1465,15 @@
             audio_port_handle_t portId = (audio_port_handle_t)data.readInt32();
             audio_io_handle_t output = 0;
             std::vector<audio_io_handle_t> secondaryOutputs;
-            status_t status = getOutputForAttr(hasAttributes ? &attr : NULL,
+            status = getOutputForAttr(&attr,
                     &output, session, &stream, pid, uid,
                     &config,
                     flags, &selectedDeviceId, &portId, &secondaryOutputs);
             reply->writeInt32(status);
+            status = reply->write(&attr, sizeof(audio_attributes_t));
+            if (status != NO_ERROR) {
+                return status;
+            }
             reply->writeInt32(output);
             reply->writeInt32(stream);
             reply->writeInt32(selectedDeviceId);
diff --git a/media/libaudioclient/RecordingActivityTracker.cpp b/media/libaudioclient/RecordingActivityTracker.cpp
index bd10895..42d4361 100644
--- a/media/libaudioclient/RecordingActivityTracker.cpp
+++ b/media/libaudioclient/RecordingActivityTracker.cpp
@@ -36,6 +36,9 @@
 
 RecordingActivityTracker::~RecordingActivityTracker()
 {
+    if (mRIId != RECORD_RIID_INVALID && mAudioManager) {
+        mAudioManager->releaseRecorder(mRIId);
+    }
 }
 
 audio_unique_id_t RecordingActivityTracker::getRiid()
diff --git a/media/libaudioclient/include/media/AudioIoDescriptor.h b/media/libaudioclient/include/media/AudioIoDescriptor.h
index 859f1a9..981d33a 100644
--- a/media/libaudioclient/include/media/AudioIoDescriptor.h
+++ b/media/libaudioclient/include/media/AudioIoDescriptor.h
@@ -28,6 +28,7 @@
     AUDIO_INPUT_OPENED,
     AUDIO_INPUT_CLOSED,
     AUDIO_INPUT_CONFIG_CHANGED,
+    AUDIO_CLIENT_STARTED,
 };
 
 // audio input/output descriptor used to cache output configurations in client process to avoid
@@ -37,7 +38,7 @@
     AudioIoDescriptor() :
         mIoHandle(AUDIO_IO_HANDLE_NONE),
         mSamplingRate(0), mFormat(AUDIO_FORMAT_DEFAULT), mChannelMask(AUDIO_CHANNEL_NONE),
-        mFrameCount(0), mFrameCountHAL(0), mLatency(0)
+        mFrameCount(0), mFrameCountHAL(0), mLatency(0), mPortId(AUDIO_PORT_HANDLE_NONE)
     {
         memset(&mPatch, 0, sizeof(struct audio_patch));
     }
@@ -66,6 +67,7 @@
     size_t                  mFrameCount;
     size_t                  mFrameCountHAL;
     uint32_t                mLatency;   // only valid for output
+    audio_port_handle_t     mPortId;    // valid for event AUDIO_CLIENT_STARTED
 };
 
 
diff --git a/media/libaudioclient/include/media/AudioPolicy.h b/media/libaudioclient/include/media/AudioPolicy.h
index a40e019..ef39fd1 100644
--- a/media/libaudioclient/include/media/AudioPolicy.h
+++ b/media/libaudioclient/include/media/AudioPolicy.h
@@ -106,6 +106,12 @@
 
     void setExcludeUid(uid_t uid) const;
     void setMatchUid(uid_t uid) const;
+    /** returns true if this mix has a rule to match or exclude the given uid */
+    bool hasUidRule(bool match, uid_t uid) const;
+    /** returns true if this mix has a rule for uid match (any uid) */
+    bool hasMatchUidRule() const;
+    /** returns true if this mix can be used for uid-device affinity routing */
+    bool isDeviceAffinityCompatible() const;
 
     mutable Vector<AudioMixMatchCriterion> mCriteria;
     uint32_t        mMixType;
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index d3035da..56c69f6 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -223,7 +223,7 @@
     static status_t setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config);
     static audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage);
 
-    static status_t getOutputForAttr(const audio_attributes_t *attr,
+    static status_t getOutputForAttr(audio_attributes_t *attr,
                                      audio_io_handle_t *output,
                                      audio_session_t session,
                                      audio_stream_type_t *stream,
@@ -438,32 +438,12 @@
                                          audio_port_handle_t deviceId) = 0;
     };
 
-    class AudioDeviceCallbackProxy : public RefBase
-    {
-    public:
-
-          AudioDeviceCallbackProxy(wp<AudioDeviceCallback> callback)
-              : mCallback(callback) {}
-          ~AudioDeviceCallbackProxy() override {}
-
-          sp<AudioDeviceCallback> callback() const { return mCallback.promote(); };
-
-          bool notifiedOnce() const { return mNotifiedOnce; }
-          void setNotifiedOnce() { mNotifiedOnce = true; }
-    private:
-        /**
-         * @brief mNotifiedOnce it forces the callback to be called at least once when
-         * registered with a VALID AudioDevice, and allows not to flood other listeners
-         * on this iohandle that already know the valid device.
-         */
-         bool mNotifiedOnce = false;
-         wp<AudioDeviceCallback> mCallback;
-    };
-
     static status_t addAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
-                                           audio_io_handle_t audioIo);
+                                           audio_io_handle_t audioIo,
+                                           audio_port_handle_t portId);
     static status_t removeAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
-                                              audio_io_handle_t audioIo);
+                                              audio_io_handle_t audioIo,
+                                              audio_port_handle_t portId);
 
     static audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo);
 
@@ -494,9 +474,11 @@
 
 
         status_t addAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
-                                               audio_io_handle_t audioIo);
+                                               audio_io_handle_t audioIo,
+                                               audio_port_handle_t portId);
         status_t removeAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
-                                           audio_io_handle_t audioIo);
+                                           audio_io_handle_t audioIo,
+                                           audio_port_handle_t portId);
 
         audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo);
 
@@ -504,26 +486,8 @@
         Mutex                               mLock;
         DefaultKeyedVector<audio_io_handle_t, sp<AudioIoDescriptor> >   mIoDescriptors;
 
-        class AudioDeviceCallbackProxies : public Vector<sp<AudioDeviceCallbackProxy>>
-        {
-        public:
-            /**
-             * @brief notifiedOnce ensures that if a client adds a callback, it must at least be
-             * called once with the device on which it will be routed to.
-             * @return true if already notified or nobody waits for a callback, false otherwise.
-             */
-            bool notifiedOnce() const { return (size() == 0) || mNotifiedOnce; }
-            void setNotifiedOnce() { mNotifiedOnce = true; }
-            void resetNotifiedOnce() { mNotifiedOnce = false; }
-        private:
-            /**
-             * @brief mNotifiedOnce it forces each callback to be called at least once when
-             * registered with a VALID AudioDevice
-             */
-            bool mNotifiedOnce = false;
-        };
-        DefaultKeyedVector<audio_io_handle_t, AudioDeviceCallbackProxies>
-                mAudioDeviceCallbackProxies;
+        std::map<audio_io_handle_t, std::map<audio_port_handle_t, wp<AudioDeviceCallback>>>
+                mAudioDeviceCallbacks;
         // cached values for recording getInputBufferSize() queries
         size_t                              mInBuffSize;    // zero indicates cache is invalid
         uint32_t                            mInSamplingRate;
diff --git a/media/libaudioclient/include/media/IAudioPolicyService.h b/media/libaudioclient/include/media/IAudioPolicyService.h
index b639044..33ab1f9 100644
--- a/media/libaudioclient/include/media/IAudioPolicyService.h
+++ b/media/libaudioclient/include/media/IAudioPolicyService.h
@@ -57,7 +57,7 @@
                                     audio_policy_forced_cfg_t config) = 0;
     virtual audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage) = 0;
     virtual audio_io_handle_t getOutput(audio_stream_type_t stream) = 0;
-    virtual status_t getOutputForAttr(const audio_attributes_t *attr,
+    virtual status_t getOutputForAttr(audio_attributes_t *attr,
                                       audio_io_handle_t *output,
                                       audio_session_t session,
                                       audio_stream_type_t *stream,
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index f9fa86e..028bea1 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -109,7 +109,7 @@
             data.writeInt32(0);
         } else {
             // serialize the headers
-            data.writeInt64(headers->size());
+            data.writeInt32(headers->size());
             for (size_t i = 0; i < headers->size(); ++i) {
                 data.writeString8(headers->keyAt(i));
                 data.writeString8(headers->valueAt(i));
@@ -318,11 +318,22 @@
             }
 
             KeyedVector<String8, String8> headers;
-            size_t numHeaders = (size_t) data.readInt64();
+            size_t numHeaders = (size_t) data.readInt32();
             for (size_t i = 0; i < numHeaders; ++i) {
-                String8 key = data.readString8();
-                String8 value = data.readString8();
-                headers.add(key, value);
+                String8 key;
+                String8 value;
+                status_t status;
+                status = data.readString8(&key);
+                if (status != OK) {
+                    return status;
+                }
+                status = data.readString8(&value);
+                if (status != OK) {
+                    return status;
+                }
+                if (headers.add(key, value) < 0) {
+                    return UNKNOWN_ERROR;
+                }
             }
 
             reply->writeInt32(
diff --git a/media/libmedia/include/media/DrmHal.h b/media/libmedia/include/media/DrmHal.h
index a630bfd..bdf1b30 100644
--- a/media/libmedia/include/media/DrmHal.h
+++ b/media/libmedia/include/media/DrmHal.h
@@ -37,14 +37,15 @@
 using drm::V1_0::IDrmFactory;
 using drm::V1_0::IDrmPlugin;
 using drm::V1_0::IDrmPluginListener;
-using drm::V1_0::KeyStatus;
 using drm::V1_1::SecurityLevel;
+using drm::V1_2::KeyStatus;
 using drm::V1_2::OfflineLicenseState;
 using ::android::hardware::hidl_vec;
 using ::android::hardware::Return;
 using ::android::hardware::Void;
 
 typedef drm::V1_2::IDrmPluginListener IDrmPluginListener_V1_2;
+typedef drm::V1_0::KeyStatus KeyStatus_V1_0;
 
 namespace android {
 
@@ -56,7 +57,7 @@
 }
 
 struct DrmHal : public BnDrm,
-             public IBinder::DeathRecipient,
+                public IBinder::DeathRecipient,
                 public IDrmPluginListener_V1_2 {
     DrmHal();
     virtual ~DrmHal();
@@ -180,6 +181,9 @@
             int64_t expiryTimeInMS);
 
     Return<void> sendKeysChange(const hidl_vec<uint8_t>& sessionId,
+            const hidl_vec<KeyStatus_V1_0>& keyStatusList, bool hasNewUsableKey);
+
+    Return<void> sendKeysChange_1_2(const hidl_vec<uint8_t>& sessionId,
             const hidl_vec<KeyStatus>& keyStatusList, bool hasNewUsableKey);
 
     Return<void> sendSessionLostState(const hidl_vec<uint8_t>& sessionId);
diff --git a/media/libmedia/include/media/DrmMetrics.h b/media/libmedia/include/media/DrmMetrics.h
index 261998f..6f132bf 100644
--- a/media/libmedia/include/media/DrmMetrics.h
+++ b/media/libmedia/include/media/DrmMetrics.h
@@ -21,6 +21,7 @@
 
 #include <android/hardware/drm/1.0/types.h>
 #include <android/hardware/drm/1.1/types.h>
+#include <android/hardware/drm/1.2/types.h>
 #include <binder/PersistableBundle.h>
 #include <media/CounterMetric.h>
 #include <media/EventMetric.h>
@@ -50,7 +51,7 @@
   CounterMetric<status_t> mProvideProvisionResponseCounter;
 
   // Count of key status events broken out by status type.
-  CounterMetric<::android::hardware::drm::V1_0::KeyStatusType>
+  CounterMetric<::android::hardware::drm::V1_2::KeyStatusType>
       mKeyStatusChangeCounter;
   // Count of events broken out by event type
   CounterMetric<::android::hardware::drm::V1_0::EventType> mEventCounter;
diff --git a/media/libmedia/include/media/omx/1.0/Conversion.h b/media/libmedia/include/media/omx/1.0/Conversion.h
index babda22..80e8f3a 100644
--- a/media/libmedia/include/media/omx/1.0/Conversion.h
+++ b/media/libmedia/include/media/omx/1.0/Conversion.h
@@ -14,8 +14,8 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_HARDWARE_MEDIA_OMX_V1_0__CONVERSION_H
-#define ANDROID_HARDWARE_MEDIA_OMX_V1_0__CONVERSION_H
+#ifndef ANDROID_HARDWARE_MEDIA_OMX_V1_0_UTILS_CONVERSION_H
+#define ANDROID_HARDWARE_MEDIA_OMX_V1_0_UTILS_CONVERSION_H
 
 #include <vector>
 #include <list>
@@ -258,7 +258,12 @@
  */
 // convert: Status -> status_t
 inline status_t toStatusT(Return<Status> const& t) {
-    return t.isOk() ? toStatusT(static_cast<Status>(t)) : UNKNOWN_ERROR;
+    if (t.isOk()) {
+        return toStatusT(static_cast<Status>(t));
+    } else if (t.isDeadObject()) {
+        return DEAD_OBJECT;
+    }
+    return UNKNOWN_ERROR;
 }
 
 /**
@@ -938,4 +943,4 @@
 }  // namespace hardware
 }  // namespace android
 
-#endif  // ANDROID_HARDWARE_MEDIA_OMX_V1_0__CONVERSION_H
+#endif  // ANDROID_HARDWARE_MEDIA_OMX_V1_0_UTILS_CONVERSION_H
diff --git a/media/libmedia/xsd/api/current.txt b/media/libmedia/xsd/api/current.txt
index 05e8a49..73b5f8d 100644
--- a/media/libmedia/xsd/api/current.txt
+++ b/media/libmedia/xsd/api/current.txt
@@ -44,20 +44,20 @@
   public class CamcorderProfiles {
     ctor public CamcorderProfiles();
     method public int getCameraId();
-    method public java.util.List<media.profiles.EncoderProfile> getEncoderProfile();
-    method public java.util.List<media.profiles.CamcorderProfiles.ImageDecoding> getImageDecoding();
-    method public java.util.List<media.profiles.CamcorderProfiles.ImageEncoding> getImageEncoding();
+    method public java.util.List<media.profiles.EncoderProfile> getEncoderProfile_optional();
+    method public java.util.List<media.profiles.CamcorderProfiles.ImageDecodingOptional> getImageDecoding_optional();
+    method public java.util.List<media.profiles.CamcorderProfiles.ImageEncodingOptional> getImageEncoding_optional();
     method public void setCameraId(int);
   }
 
-  public static class CamcorderProfiles.ImageDecoding {
-    ctor public CamcorderProfiles.ImageDecoding();
+  public static class CamcorderProfiles.ImageDecodingOptional {
+    ctor public CamcorderProfiles.ImageDecodingOptional();
     method public int getMemCap();
     method public void setMemCap(int);
   }
 
-  public static class CamcorderProfiles.ImageEncoding {
-    ctor public CamcorderProfiles.ImageEncoding();
+  public static class CamcorderProfiles.ImageEncodingOptional {
+    ctor public CamcorderProfiles.ImageEncodingOptional();
     method public int getQuality();
     method public void setQuality(int);
   }
diff --git a/media/libmedia/xsd/media_profiles.xsd b/media/libmedia/xsd/media_profiles.xsd
index a02252a..9664456 100644
--- a/media/libmedia/xsd/media_profiles.xsd
+++ b/media/libmedia/xsd/media_profiles.xsd
@@ -35,19 +35,19 @@
         </xs:complexType>
     </xs:element>
     <xs:complexType name="CamcorderProfiles">
-        <xs:sequence>
-            <xs:element name="EncoderProfile" type="EncoderProfile" minOccurs="0" maxOccurs="unbounded"/>
-            <xs:element name="ImageEncoding" minOccurs="0" maxOccurs="unbounded">
+        <xs:choice minOccurs="0" maxOccurs="unbounded">
+            <xs:element name="EncoderProfile" type="EncoderProfile"/>
+            <xs:element name="ImageEncoding">
                 <xs:complexType>
                     <xs:attribute name="quality" type="xs:int"/>
                 </xs:complexType>
             </xs:element>
-            <xs:element name="ImageDecoding" minOccurs="0" maxOccurs="unbounded">
+            <xs:element name="ImageDecoding">
                 <xs:complexType>
                     <xs:attribute name="memCap" type="xs:int"/>
                 </xs:complexType>
             </xs:element>
-        </xs:sequence>
+        </xs:choice>
         <xs:attribute name="cameraId" type="xs:int"/>
     </xs:complexType>
     <xs:complexType name="EncoderProfile">
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 2b813e7..865cb2a 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -635,7 +635,7 @@
 
         // re-init in case we prepare() and start() again.
         delete mAnalyticsItem ;
-        mAnalyticsItem = MediaAnalyticsItem::create("nuplayer");
+        mAnalyticsItem = MediaAnalyticsItem::create(kKeyPlayer);
         if (mAnalyticsItem) {
             mAnalyticsItem->setUid(mClientUid);
         }
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 317b5ec..3d67c91 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1138,6 +1138,8 @@
         return err;
     }
 
+    static_cast<Surface *>(mNativeWindow.get())->setDequeueTimeout(-1);
+
     // Exits here for tunneled video playback codecs -- i.e. skips native window
     // buffer allocation step as this is managed by the tunneled OMX omponent
     // itself and explicitly sets def.nBufferCountActual to 0.
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 5932518..9170805 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -212,7 +212,6 @@
         "libstagefright_mediafilter",
         "libstagefright_webm",
         "libstagefright_timedtext",
-        "libvpx",
         "libogg",
         "libwebm",
         "libstagefright_esds",
diff --git a/media/libstagefright/MediaExtractorFactory.cpp b/media/libstagefright/MediaExtractorFactory.cpp
index d97591f..4c8be1f 100644
--- a/media/libstagefright/MediaExtractorFactory.cpp
+++ b/media/libstagefright/MediaExtractorFactory.cpp
@@ -71,8 +71,6 @@
 
     ALOGV("MediaExtractorFactory::CreateFromService %s", mime);
 
-    UpdateExtractors();
-
     // initialize source decryption if needed
     source->DrmInitialization(nullptr /* mime */);
 
@@ -270,7 +268,7 @@
 static std::unordered_set<std::string> gSupportedExtensions;
 
 // static
-void MediaExtractorFactory::UpdateExtractors() {
+void MediaExtractorFactory::LoadExtractors() {
     Mutex::Autolock autoLock(gPluginMutex);
 
     if (gPluginsRegistered) {
diff --git a/media/libstagefright/StagefrightPluginLoader.cpp b/media/libstagefright/StagefrightPluginLoader.cpp
index b90649c..fb03c5e 100644
--- a/media/libstagefright/StagefrightPluginLoader.cpp
+++ b/media/libstagefright/StagefrightPluginLoader.cpp
@@ -35,7 +35,7 @@
 }  // unnamed namespace
 
 StagefrightPluginLoader::StagefrightPluginLoader(const char *libPath) {
-    if (android::base::GetIntProperty("debug.media.codec2", 0) == 0) {
+    if (android::base::GetIntProperty("debug.stagefright.ccodec", 1) == 0) {
         ALOGD("CCodec is disabled.");
         return;
     }
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 3de934f..135151f 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -39,6 +39,7 @@
 #include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/foundation/OpusHeader.h>
 #include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/AudioSystem.h>
 #include <media/MediaPlayerInterface.h>
@@ -573,6 +574,68 @@
     }
 }
 
+static void parseAV1ProfileLevelFromCsd(const sp<ABuffer> &csd, sp<AMessage> &format) {
+    // Parse CSD structure to extract profile level information
+    // https://aomediacodec.github.io/av1-isobmff/#av1codecconfigurationbox
+    const uint8_t *data = csd->data();
+    size_t remaining = csd->size();
+    if (remaining < 4 || data[0] != 0x81) {  // configurationVersion == 1
+        return;
+    }
+    uint8_t profileData = (data[1] & 0xE0) >> 5;
+    uint8_t levelData = data[1] & 0x1F;
+    uint8_t highBitDepth = (data[2] & 0x40) >> 6;
+
+    const static ALookup<std::pair<uint8_t, uint8_t>, int32_t> profiles {
+        { { 0, 0 }, AV1ProfileMain8 },
+        { { 1, 0 }, AV1ProfileMain10 },
+    };
+
+    int32_t profile;
+    if (profiles.map(std::make_pair(highBitDepth, profileData), &profile)) {
+        // bump to HDR profile
+        if (isHdr(format) && profile == AV1ProfileMain10) {
+            if (format->contains("hdr10-plus-info")) {
+                profile = AV1ProfileMain10HDR10Plus;
+            } else {
+                profile = AV1ProfileMain10HDR10;
+            }
+        }
+        format->setInt32("profile", profile);
+    }
+    const static ALookup<uint8_t, int32_t> levels {
+        { 0, AV1Level2   },
+        { 1, AV1Level21  },
+        { 2, AV1Level22  },
+        { 3, AV1Level23  },
+        { 4, AV1Level3   },
+        { 5, AV1Level31  },
+        { 6, AV1Level32  },
+        { 7, AV1Level33  },
+        { 8, AV1Level4   },
+        { 9, AV1Level41  },
+        { 10, AV1Level42  },
+        { 11, AV1Level43  },
+        { 12, AV1Level5   },
+        { 13, AV1Level51  },
+        { 14, AV1Level52  },
+        { 15, AV1Level53  },
+        { 16, AV1Level6   },
+        { 17, AV1Level61  },
+        { 18, AV1Level62  },
+        { 19, AV1Level63  },
+        { 20, AV1Level7   },
+        { 21, AV1Level71  },
+        { 22, AV1Level72  },
+        { 23, AV1Level73  },
+    };
+
+    int32_t level;
+    if (levels.map(levelData, &level)) {
+        format->setInt32("level", level);
+    }
+}
+
 
 static std::vector<std::pair<const char *, uint32_t>> stringMappings {
     {
@@ -1234,6 +1297,7 @@
         buffer->meta()->setInt32("csd", true);
         buffer->meta()->setInt64("timeUs", 0);
         msg->setBuffer("csd-0", buffer);
+        parseAV1ProfileLevelFromCsd(buffer, msg);
     } else if (meta->findData(kKeyESDS, &type, &data, &size)) {
         ESDS esds((const char *)data, size);
         if (esds.InitCheck() != (status_t)OK) {
diff --git a/media/libstagefright/bqhelper/Android.bp b/media/libstagefright/bqhelper/Android.bp
index 218fe15..db67034 100644
--- a/media/libstagefright/bqhelper/Android.bp
+++ b/media/libstagefright/bqhelper/Android.bp
@@ -6,10 +6,8 @@
     },
     double_loadable: true,
     srcs: [
-        "Conversion.cpp",
         "FrameDropper.cpp",
         "GraphicBufferSource.cpp",
-        "WProducerListener.cpp",
     ],
 
     export_include_dirs: [
@@ -27,7 +25,6 @@
     shared_libs: [
         "libbinder",
         "libcutils",
-        "libgui",
         "libhidlbase",
         "libhidlmemory",
         "libhidltransport",
@@ -37,12 +34,25 @@
         "libutils",
 
         "android.hardware.graphics.bufferqueue@1.0",
+        // Following libs are from libgui_bufferqueue_static
+        "android.hardware.graphics.bufferqueue@2.0",
+        "android.hidl.token@1.0-utils",
+        "libbase",
+        "libEGL",
+        "libhwbinder",
+        "libnativewindow",
+        "libvndksupport",
+    ],
+    
+    static_libs: [
+        "libgui_bufferqueue_static"
     ],
 
     export_shared_lib_headers: [
-        "libgui",
         "libhidlmemory",
         "libstagefright_foundation",
+        "android.hardware.graphics.bufferqueue@1.0",
+        "android.hardware.graphics.bufferqueue@2.0",
     ],
 
     cflags: [
diff --git a/media/libstagefright/bqhelper/Conversion.cpp b/media/libstagefright/bqhelper/Conversion.cpp
deleted file mode 100644
index 91d7c74..0000000
--- a/media/libstagefright/bqhelper/Conversion.cpp
+++ /dev/null
@@ -1,1542 +0,0 @@
-/*
- * Copyright 2018, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <media/stagefright/bqhelper/Conversion.h>
-
-namespace android {
-namespace conversion {
-
-// native_handle_t helper functions.
-
-/**
- * \brief Take an fd and create a native handle containing only the given fd.
- * The created handle will need to be deleted manually with
- * `native_handle_delete()`.
- *
- * \param[in] fd The source file descriptor (of type `int`).
- * \return The create `native_handle_t*` that contains the given \p fd. If the
- * supplied \p fd is negative, the created native handle will contain no file
- * descriptors.
- *
- * If the native handle cannot be created, the return value will be
- * `nullptr`.
- *
- * This function does not duplicate the file descriptor.
- */
-native_handle_t* native_handle_create_from_fd(int fd) {
-    if (fd < 2) {
-        return native_handle_create(0, 0);
-    }
-    native_handle_t* nh = native_handle_create(1, 0);
-    if (nh == nullptr) {
-        return nullptr;
-    }
-    nh->data[0] = fd;
-    return nh;
-}
-
-/**
- * \brief Extract a file descriptor from a native handle.
- *
- * \param[in] nh The source `native_handle_t*`.
- * \param[in] index The index of the file descriptor in \p nh to read from. This
- * input has the default value of `0`.
- * \return The `index`-th file descriptor in \p nh. If \p nh does not have
- * enough file descriptors, the returned value will be `-1`.
- *
- * This function does not duplicate the file descriptor.
- */
-int native_handle_read_fd(native_handle_t const* nh, int index) {
-    return ((nh == nullptr) || (nh->numFds == 0) ||
-            (nh->numFds <= index) || (index < 0)) ?
-            -1 : nh->data[index];
-}
-
-/**
- * Conversion functions
- * ====================
- *
- * There are two main directions of conversion:
- * - `inTargetType(...)`: Create a wrapper whose lifetime depends on the
- *   input. The wrapper has type `TargetType`.
- * - `toTargetType(...)`: Create a standalone object of type `TargetType` that
- *   corresponds to the input. The lifetime of the output does not depend on the
- *   lifetime of the input.
- * - `wrapIn(TargetType*, ...)`: Same as `inTargetType()`, but for `TargetType`
- *   that cannot be copied and/or moved efficiently, or when there are multiple
- *   output arguments.
- * - `convertTo(TargetType*, ...)`: Same as `toTargetType()`, but for
- *   `TargetType` that cannot be copied and/or moved efficiently, or when there
- *   are multiple output arguments.
- *
- * `wrapIn()` and `convertTo()` functions will take output arguments before
- * input arguments. Some of these functions might return a value to indicate
- * success or error.
- *
- * In converting or wrapping something as a Treble type that contains a
- * `hidl_handle`, `native_handle_t*` will need to be created and returned as
- * an additional output argument, hence only `wrapIn()` or `convertTo()` would
- * be available. The caller must call `native_handle_delete()` to deallocate the
- * returned native handle when it is no longer needed.
- *
- * For types that contain file descriptors, `inTargetType()` and `wrapAs()` do
- * not perform duplication of file descriptors, while `toTargetType()` and
- * `convertTo()` do.
- */
-
-/**
- * \brief Convert `Return<void>` to `status_t`. This is for legacy binder calls.
- *
- * \param[in] t The source `Return<void>`.
- * \return The corresponding `status_t`.
- */
-// convert: Return<void> -> status_t
-status_t toStatusT(Return<void> const& t) {
-    return t.isOk() ? OK : (t.isDeadObject() ? DEAD_OBJECT : UNKNOWN_ERROR);
-}
-
-/**
- * \brief Convert `Return<void>` to `binder::Status`.
- *
- * \param[in] t The source `Return<void>`.
- * \return The corresponding `binder::Status`.
- */
-// convert: Return<void> -> ::android::binder::Status
-::android::binder::Status toBinderStatus(
-        Return<void> const& t) {
-    return ::android::binder::Status::fromExceptionCode(
-            toStatusT(t),
-            t.description().c_str());
-}
-
-/**
- * \brief Wrap `native_handle_t*` in `hidl_handle`.
- *
- * \param[in] nh The source `native_handle_t*`.
- * \return The `hidl_handle` that points to \p nh.
- */
-// wrap: native_handle_t* -> hidl_handle
-hidl_handle inHidlHandle(native_handle_t const* nh) {
-    return hidl_handle(nh);
-}
-
-/**
- * \brief Convert `int32_t` to `Dataspace`.
- *
- * \param[in] l The source `int32_t`.
- * \result The corresponding `Dataspace`.
- */
-// convert: int32_t -> Dataspace
-Dataspace toHardwareDataspace(int32_t l) {
-    return static_cast<Dataspace>(l);
-}
-
-/**
- * \brief Convert `Dataspace` to `int32_t`.
- *
- * \param[in] t The source `Dataspace`.
- * \result The corresponding `int32_t`.
- */
-// convert: Dataspace -> int32_t
-int32_t toRawDataspace(Dataspace const& t) {
-    return static_cast<int32_t>(t);
-}
-
-/**
- * \brief Wrap an opaque buffer inside a `hidl_vec<uint8_t>`.
- *
- * \param[in] l The pointer to the beginning of the opaque buffer.
- * \param[in] size The size of the buffer.
- * \return A `hidl_vec<uint8_t>` that points to the buffer.
- */
-// wrap: void*, size_t -> hidl_vec<uint8_t>
-hidl_vec<uint8_t> inHidlBytes(void const* l, size_t size) {
-    hidl_vec<uint8_t> t;
-    t.setToExternal(static_cast<uint8_t*>(const_cast<void*>(l)), size, false);
-    return t;
-}
-
-/**
- * \brief Create a `hidl_vec<uint8_t>` that is a copy of an opaque buffer.
- *
- * \param[in] l The pointer to the beginning of the opaque buffer.
- * \param[in] size The size of the buffer.
- * \return A `hidl_vec<uint8_t>` that is a copy of the input buffer.
- */
-// convert: void*, size_t -> hidl_vec<uint8_t>
-hidl_vec<uint8_t> toHidlBytes(void const* l, size_t size) {
-    hidl_vec<uint8_t> t;
-    t.resize(size);
-    uint8_t const* src = static_cast<uint8_t const*>(l);
-    std::copy(src, src + size, t.data());
-    return t;
-}
-
-/**
- * \brief Wrap `GraphicBuffer` in `AnwBuffer`.
- *
- * \param[out] t The wrapper of type `AnwBuffer`.
- * \param[in] l The source `GraphicBuffer`.
- */
-// wrap: GraphicBuffer -> AnwBuffer
-void wrapAs(AnwBuffer* t, GraphicBuffer const& l) {
-    t->attr.width = l.getWidth();
-    t->attr.height = l.getHeight();
-    t->attr.stride = l.getStride();
-    t->attr.format = static_cast<PixelFormat>(l.getPixelFormat());
-    t->attr.layerCount = l.getLayerCount();
-    t->attr.usage = l.getUsage();
-    t->attr.id = l.getId();
-    t->attr.generationNumber = l.getGenerationNumber();
-    t->nativeHandle = hidl_handle(l.handle);
-}
-
-/**
- * \brief Convert `AnwBuffer` to `GraphicBuffer`.
- *
- * \param[out] l The destination `GraphicBuffer`.
- * \param[in] t The source `AnwBuffer`.
- *
- * This function will duplicate all file descriptors in \p t.
- */
-// convert: AnwBuffer -> GraphicBuffer
-// Ref: frameworks/native/libs/ui/GraphicBuffer.cpp: GraphicBuffer::flatten
-bool convertTo(GraphicBuffer* l, AnwBuffer const& t) {
-    native_handle_t* handle = t.nativeHandle == nullptr ?
-            nullptr : native_handle_clone(t.nativeHandle);
-
-    size_t const numInts = 12 + (handle ? handle->numInts : 0);
-    int32_t* ints = new int32_t[numInts];
-
-    size_t numFds = static_cast<size_t>(handle ? handle->numFds : 0);
-    int* fds = new int[numFds];
-
-    ints[0] = 'GBFR';
-    ints[1] = static_cast<int32_t>(t.attr.width);
-    ints[2] = static_cast<int32_t>(t.attr.height);
-    ints[3] = static_cast<int32_t>(t.attr.stride);
-    ints[4] = static_cast<int32_t>(t.attr.format);
-    ints[5] = static_cast<int32_t>(t.attr.layerCount);
-    ints[6] = static_cast<int32_t>(t.attr.usage);
-    ints[7] = static_cast<int32_t>(t.attr.id >> 32);
-    ints[8] = static_cast<int32_t>(t.attr.id & 0xFFFFFFFF);
-    ints[9] = static_cast<int32_t>(t.attr.generationNumber);
-    ints[10] = 0;
-    ints[11] = 0;
-    if (handle) {
-        ints[10] = static_cast<int32_t>(handle->numFds);
-        ints[11] = static_cast<int32_t>(handle->numInts);
-        int* intsStart = handle->data + handle->numFds;
-        std::copy(handle->data, intsStart, fds);
-        std::copy(intsStart, intsStart + handle->numInts, &ints[12]);
-    }
-
-    void const* constBuffer = static_cast<void const*>(ints);
-    size_t size = numInts * sizeof(int32_t);
-    int const* constFds = static_cast<int const*>(fds);
-    status_t status = l->unflatten(constBuffer, size, constFds, numFds);
-
-    delete [] fds;
-    delete [] ints;
-    native_handle_delete(handle);
-    return status == NO_ERROR;
-}
-
-/**
- * Conversion functions for types outside media
- * ============================================
- *
- * Some objects in libui and libgui that were made to go through binder calls do
- * not expose ways to read or write their fields to the public. To pass an
- * object of this kind through the HIDL boundary, translation functions need to
- * work around the access restriction by using the publicly available
- * `flatten()` and `unflatten()` functions.
- *
- * All `flatten()` and `unflatten()` overloads follow the same convention as
- * follows:
- *
- *     status_t flatten(ObjectType const& object,
- *                      [OtherType const& other, ...]
- *                      void*& buffer, size_t& size,
- *                      int*& fds, size_t& numFds)
- *
- *     status_t unflatten(ObjectType* object,
- *                        [OtherType* other, ...,]
- *                        void*& buffer, size_t& size,
- *                        int*& fds, size_t& numFds)
- *
- * The number of `other` parameters varies depending on the `ObjectType`. For
- * example, in the process of unflattening an object that contains
- * `hidl_handle`, `other` is needed to hold `native_handle_t` objects that will
- * be created.
- *
- * The last four parameters always work the same way in all overloads of
- * `flatten()` and `unflatten()`:
- * - For `flatten()`, `buffer` is the pointer to the non-fd buffer to be filled,
- *   `size` is the size (in bytes) of the non-fd buffer pointed to by `buffer`,
- *   `fds` is the pointer to the fd buffer to be filled, and `numFds` is the
- *   size (in ints) of the fd buffer pointed to by `fds`.
- * - For `unflatten()`, `buffer` is the pointer to the non-fd buffer to be read
- *   from, `size` is the size (in bytes) of the non-fd buffer pointed to by
- *   `buffer`, `fds` is the pointer to the fd buffer to be read from, and
- *   `numFds` is the size (in ints) of the fd buffer pointed to by `fds`.
- * - After a successful call to `flatten()` or `unflatten()`, `buffer` and `fds`
- *   will be advanced, while `size` and `numFds` will be decreased to reflect
- *   how much storage/data of the two buffers (fd and non-fd) have been used.
- * - After an unsuccessful call, the values of `buffer`, `size`, `fds` and
- *   `numFds` are invalid.
- *
- * The return value of a successful `flatten()` or `unflatten()` call will be
- * `OK` (also aliased as `NO_ERROR`). Any other values indicate a failure.
- *
- * For each object type that supports flattening, there will be two accompanying
- * functions: `getFlattenedSize()` and `getFdCount()`. `getFlattenedSize()` will
- * return the size of the non-fd buffer that the object will need for
- * flattening. `getFdCount()` will return the size of the fd buffer that the
- * object will need for flattening.
- *
- * The set of these four functions, `getFlattenedSize()`, `getFdCount()`,
- * `flatten()` and `unflatten()`, are similar to functions of the same name in
- * the abstract class `Flattenable`. The only difference is that functions in
- * this file are not member functions of the object type. For example, we write
- *
- *     flatten(x, buffer, size, fds, numFds)
- *
- * instead of
- *
- *     x.flatten(buffer, size, fds, numFds)
- *
- * because we cannot modify the type of `x`.
- *
- * There is one exception to the naming convention: `hidl_handle` that
- * represents a fence. The four functions for this "Fence" type have the word
- * "Fence" attched to their names because the object type, which is
- * `hidl_handle`, does not carry the special meaning that the object itself can
- * only contain zero or one file descriptor.
- */
-
-// Ref: frameworks/native/libs/ui/Fence.cpp
-
-/**
- * \brief Return the size of the non-fd buffer required to flatten a fence.
- *
- * \param[in] fence The input fence of type `hidl_handle`.
- * \return The required size of the flat buffer.
- *
- * The current version of this function always returns 4, which is the number of
- * bytes required to store the number of file descriptors contained in the fd
- * part of the flat buffer.
- */
-size_t getFenceFlattenedSize(hidl_handle const& /* fence */) {
-    return 4;
-};
-
-/**
- * \brief Return the number of file descriptors contained in a fence.
- *
- * \param[in] fence The input fence of type `hidl_handle`.
- * \return `0` if \p fence does not contain a valid file descriptor, or `1`
- * otherwise.
- */
-size_t getFenceFdCount(hidl_handle const& fence) {
-    return native_handle_read_fd(fence) == -1 ? 0 : 1;
-}
-
-/**
- * \brief Unflatten `Fence` to `hidl_handle`.
- *
- * \param[out] fence The destination `hidl_handle`.
- * \param[out] nh The underlying native handle.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * If the return value is `NO_ERROR`, \p nh will point to a newly created
- * native handle, which needs to be deleted with `native_handle_delete()`
- * afterwards.
- */
-status_t unflattenFence(hidl_handle* fence, native_handle_t** nh,
-        void const*& buffer, size_t& size, int const*& fds, size_t& numFds) {
-    if (size < 4) {
-        return NO_MEMORY;
-    }
-
-    uint32_t numFdsInHandle;
-    FlattenableUtils::read(buffer, size, numFdsInHandle);
-
-    if (numFdsInHandle > 1) {
-        return BAD_VALUE;
-    }
-
-    if (numFds < numFdsInHandle) {
-        return NO_MEMORY;
-    }
-
-    if (numFdsInHandle) {
-        *nh = native_handle_create_from_fd(*fds);
-        if (*nh == nullptr) {
-            return NO_MEMORY;
-        }
-        *fence = *nh;
-        ++fds;
-        --numFds;
-    } else {
-        *nh = nullptr;
-        *fence = hidl_handle();
-    }
-
-    return NO_ERROR;
-}
-
-/**
- * \brief Flatten `hidl_handle` as `Fence`.
- *
- * \param[in] t The source `hidl_handle`.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- */
-status_t flattenFence(hidl_handle const& fence,
-        void*& buffer, size_t& size, int*& fds, size_t& numFds) {
-    if (size < getFenceFlattenedSize(fence) ||
-            numFds < getFenceFdCount(fence)) {
-        return NO_MEMORY;
-    }
-    // Cast to uint32_t since the size of a size_t can vary between 32- and
-    // 64-bit processes
-    FlattenableUtils::write(buffer, size,
-            static_cast<uint32_t>(getFenceFdCount(fence)));
-    int fd = native_handle_read_fd(fence);
-    if (fd != -1) {
-        *fds = fd;
-        ++fds;
-        --numFds;
-    }
-    return NO_ERROR;
-}
-
-/**
- * \brief Wrap `Fence` in `hidl_handle`.
- *
- * \param[out] t The wrapper of type `hidl_handle`.
- * \param[out] nh The native handle pointed to by \p t.
- * \param[in] l The source `Fence`.
- *
- * On success, \p nh will hold a newly created native handle, which must be
- * deleted manually with `native_handle_delete()` afterwards.
- */
-// wrap: Fence -> hidl_handle
-bool wrapAs(hidl_handle* t, native_handle_t** nh, Fence const& l) {
-    size_t const baseSize = l.getFlattenedSize();
-    std::unique_ptr<uint8_t[]> baseBuffer(
-            new (std::nothrow) uint8_t[baseSize]);
-    if (!baseBuffer) {
-        return false;
-    }
-
-    size_t const baseNumFds = l.getFdCount();
-    std::unique_ptr<int[]> baseFds(
-            new (std::nothrow) int[baseNumFds]);
-    if (!baseFds) {
-        return false;
-    }
-
-    void* buffer = static_cast<void*>(baseBuffer.get());
-    size_t size = baseSize;
-    int* fds = static_cast<int*>(baseFds.get());
-    size_t numFds = baseNumFds;
-    if (l.flatten(buffer, size, fds, numFds) != NO_ERROR) {
-        return false;
-    }
-
-    void const* constBuffer = static_cast<void const*>(baseBuffer.get());
-    size = baseSize;
-    int const* constFds = static_cast<int const*>(baseFds.get());
-    numFds = baseNumFds;
-    if (unflattenFence(t, nh, constBuffer, size, constFds, numFds)
-            != NO_ERROR) {
-        return false;
-    }
-
-    return true;
-}
-
-/**
- * \brief Convert `hidl_handle` to `Fence`.
- *
- * \param[out] l The destination `Fence`. `l` must not have been used
- * (`l->isValid()` must return `false`) before this function is called.
- * \param[in] t The source `hidl_handle`.
- *
- * If \p t contains a valid file descriptor, it will be duplicated.
- */
-// convert: hidl_handle -> Fence
-bool convertTo(Fence* l, hidl_handle const& t) {
-    int fd = native_handle_read_fd(t);
-    if (fd != -1) {
-        fd = dup(fd);
-        if (fd == -1) {
-            return false;
-        }
-    }
-    native_handle_t* nh = native_handle_create_from_fd(fd);
-    if (nh == nullptr) {
-        if (fd != -1) {
-            close(fd);
-        }
-        return false;
-    }
-
-    size_t const baseSize = getFenceFlattenedSize(t);
-    std::unique_ptr<uint8_t[]> baseBuffer(
-            new (std::nothrow) uint8_t[baseSize]);
-    if (!baseBuffer) {
-        native_handle_delete(nh);
-        return false;
-    }
-
-    size_t const baseNumFds = getFenceFdCount(t);
-    std::unique_ptr<int[]> baseFds(
-            new (std::nothrow) int[baseNumFds]);
-    if (!baseFds) {
-        native_handle_delete(nh);
-        return false;
-    }
-
-    void* buffer = static_cast<void*>(baseBuffer.get());
-    size_t size = baseSize;
-    int* fds = static_cast<int*>(baseFds.get());
-    size_t numFds = baseNumFds;
-    if (flattenFence(hidl_handle(nh), buffer, size, fds, numFds) != NO_ERROR) {
-        native_handle_delete(nh);
-        return false;
-    }
-    native_handle_delete(nh);
-
-    void const* constBuffer = static_cast<void const*>(baseBuffer.get());
-    size = baseSize;
-    int const* constFds = static_cast<int const*>(baseFds.get());
-    numFds = baseNumFds;
-    if (l->unflatten(constBuffer, size, constFds, numFds) != NO_ERROR) {
-        return false;
-    }
-
-    return true;
-}
-
-// Ref: frameworks/native/libs/ui/FenceTime.cpp: FenceTime::Snapshot
-
-/**
- * \brief Return the size of the non-fd buffer required to flatten
- * `FenceTimeSnapshot`.
- *
- * \param[in] t The input `FenceTimeSnapshot`.
- * \return The required size of the flat buffer.
- */
-size_t getFlattenedSize(
-        HGraphicBufferProducer::FenceTimeSnapshot const& t) {
-    constexpr size_t min = sizeof(t.state);
-    switch (t.state) {
-        case HGraphicBufferProducer::FenceTimeSnapshot::State::EMPTY:
-            return min;
-        case HGraphicBufferProducer::FenceTimeSnapshot::State::FENCE:
-            return min + getFenceFlattenedSize(t.fence);
-        case HGraphicBufferProducer::FenceTimeSnapshot::State::SIGNAL_TIME:
-            return min + sizeof(
-                    ::android::FenceTime::Snapshot::signalTime);
-    }
-    return 0;
-}
-
-/**
- * \brief Return the number of file descriptors contained in
- * `FenceTimeSnapshot`.
- *
- * \param[in] t The input `FenceTimeSnapshot`.
- * \return The number of file descriptors contained in \p snapshot.
- */
-size_t getFdCount(
-        HGraphicBufferProducer::FenceTimeSnapshot const& t) {
-    return t.state ==
-            HGraphicBufferProducer::FenceTimeSnapshot::State::FENCE ?
-            getFenceFdCount(t.fence) : 0;
-}
-
-/**
- * \brief Flatten `FenceTimeSnapshot`.
- *
- * \param[in] t The source `FenceTimeSnapshot`.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * This function will duplicate the file descriptor in `t.fence` if `t.state ==
- * FENCE`.
- */
-status_t flatten(HGraphicBufferProducer::FenceTimeSnapshot const& t,
-        void*& buffer, size_t& size, int*& fds, size_t& numFds) {
-    if (size < getFlattenedSize(t)) {
-        return NO_MEMORY;
-    }
-
-    switch (t.state) {
-        case HGraphicBufferProducer::FenceTimeSnapshot::State::EMPTY:
-            FlattenableUtils::write(buffer, size,
-                    ::android::FenceTime::Snapshot::State::EMPTY);
-            return NO_ERROR;
-        case HGraphicBufferProducer::FenceTimeSnapshot::State::FENCE:
-            FlattenableUtils::write(buffer, size,
-                    ::android::FenceTime::Snapshot::State::FENCE);
-            return flattenFence(t.fence, buffer, size, fds, numFds);
-        case HGraphicBufferProducer::FenceTimeSnapshot::State::SIGNAL_TIME:
-            FlattenableUtils::write(buffer, size,
-                    ::android::FenceTime::Snapshot::State::SIGNAL_TIME);
-            FlattenableUtils::write(buffer, size, t.signalTimeNs);
-            return NO_ERROR;
-    }
-    return NO_ERROR;
-}
-
-/**
- * \brief Unflatten `FenceTimeSnapshot`.
- *
- * \param[out] t The destination `FenceTimeSnapshot`.
- * \param[out] nh The underlying native handle.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * If the return value is `NO_ERROR` and the constructed snapshot contains a
- * file descriptor, \p nh will be created to hold that file descriptor. In this
- * case, \p nh needs to be deleted with `native_handle_delete()` afterwards.
- */
-status_t unflatten(
-        HGraphicBufferProducer::FenceTimeSnapshot* t, native_handle_t** nh,
-        void const*& buffer, size_t& size, int const*& fds, size_t& numFds) {
-    if (size < sizeof(t->state)) {
-        return NO_MEMORY;
-    }
-
-    *nh = nullptr;
-    ::android::FenceTime::Snapshot::State state;
-    FlattenableUtils::read(buffer, size, state);
-    switch (state) {
-        case ::android::FenceTime::Snapshot::State::EMPTY:
-            t->state = HGraphicBufferProducer::FenceTimeSnapshot::State::EMPTY;
-            return NO_ERROR;
-        case ::android::FenceTime::Snapshot::State::FENCE:
-            t->state = HGraphicBufferProducer::FenceTimeSnapshot::State::FENCE;
-            return unflattenFence(&t->fence, nh, buffer, size, fds, numFds);
-        case ::android::FenceTime::Snapshot::State::SIGNAL_TIME:
-            t->state = HGraphicBufferProducer::FenceTimeSnapshot::State::SIGNAL_TIME;
-            if (size < sizeof(t->signalTimeNs)) {
-                return NO_MEMORY;
-            }
-            FlattenableUtils::read(buffer, size, t->signalTimeNs);
-            return NO_ERROR;
-    }
-    return NO_ERROR;
-}
-
-// Ref: frameworks/native/libs/gui/FrameTimestamps.cpp: FrameEventsDelta
-
-/**
- * \brief Return a lower bound on the size of the non-fd buffer required to
- * flatten `FrameEventsDelta`.
- *
- * \param[in] t The input `FrameEventsDelta`.
- * \return A lower bound on the size of the flat buffer.
- */
-constexpr size_t minFlattenedSize(
-        HGraphicBufferProducer::FrameEventsDelta const& /* t */) {
-    return sizeof(uint64_t) + // mFrameNumber
-            sizeof(uint8_t) + // mIndex
-            sizeof(uint8_t) + // mAddPostCompositeCalled
-            sizeof(uint8_t) + // mAddRetireCalled
-            sizeof(uint8_t) + // mAddReleaseCalled
-            sizeof(nsecs_t) + // mPostedTime
-            sizeof(nsecs_t) + // mRequestedPresentTime
-            sizeof(nsecs_t) + // mLatchTime
-            sizeof(nsecs_t) + // mFirstRefreshStartTime
-            sizeof(nsecs_t); // mLastRefreshStartTime
-}
-
-/**
- * \brief Return the size of the non-fd buffer required to flatten
- * `FrameEventsDelta`.
- *
- * \param[in] t The input `FrameEventsDelta`.
- * \return The required size of the flat buffer.
- */
-size_t getFlattenedSize(
-        HGraphicBufferProducer::FrameEventsDelta const& t) {
-    return minFlattenedSize(t) +
-            getFlattenedSize(t.gpuCompositionDoneFence) +
-            getFlattenedSize(t.displayPresentFence) +
-            getFlattenedSize(t.displayRetireFence) +
-            getFlattenedSize(t.releaseFence);
-};
-
-/**
- * \brief Return the number of file descriptors contained in
- * `FrameEventsDelta`.
- *
- * \param[in] t The input `FrameEventsDelta`.
- * \return The number of file descriptors contained in \p t.
- */
-size_t getFdCount(
-        HGraphicBufferProducer::FrameEventsDelta const& t) {
-    return getFdCount(t.gpuCompositionDoneFence) +
-            getFdCount(t.displayPresentFence) +
-            getFdCount(t.displayRetireFence) +
-            getFdCount(t.releaseFence);
-};
-
-/**
- * \brief Unflatten `FrameEventsDelta`.
- *
- * \param[out] t The destination `FrameEventsDelta`.
- * \param[out] nh The underlying array of native handles.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * If the return value is `NO_ERROR`, \p nh will have length 4, and it will be
- * populated with `nullptr` or newly created handles. Each non-null slot in \p
- * nh will need to be deleted manually with `native_handle_delete()`.
- */
-status_t unflatten(HGraphicBufferProducer::FrameEventsDelta* t,
-        std::vector<native_handle_t*>* nh,
-        void const*& buffer, size_t& size, int const*& fds, size_t& numFds) {
-    if (size < minFlattenedSize(*t)) {
-        return NO_MEMORY;
-    }
-    FlattenableUtils::read(buffer, size, t->frameNumber);
-
-    // These were written as uint8_t for alignment.
-    uint8_t temp = 0;
-    FlattenableUtils::read(buffer, size, temp);
-    size_t index = static_cast<size_t>(temp);
-    if (index >= ::android::FrameEventHistory::MAX_FRAME_HISTORY) {
-        return BAD_VALUE;
-    }
-    t->index = static_cast<uint32_t>(index);
-
-    FlattenableUtils::read(buffer, size, temp);
-    t->addPostCompositeCalled = static_cast<bool>(temp);
-    FlattenableUtils::read(buffer, size, temp);
-    t->addRetireCalled = static_cast<bool>(temp);
-    FlattenableUtils::read(buffer, size, temp);
-    t->addReleaseCalled = static_cast<bool>(temp);
-
-    FlattenableUtils::read(buffer, size, t->postedTimeNs);
-    FlattenableUtils::read(buffer, size, t->requestedPresentTimeNs);
-    FlattenableUtils::read(buffer, size, t->latchTimeNs);
-    FlattenableUtils::read(buffer, size, t->firstRefreshStartTimeNs);
-    FlattenableUtils::read(buffer, size, t->lastRefreshStartTimeNs);
-    FlattenableUtils::read(buffer, size, t->dequeueReadyTime);
-
-    // Fences
-    HGraphicBufferProducer::FenceTimeSnapshot* tSnapshot[4];
-    tSnapshot[0] = &t->gpuCompositionDoneFence;
-    tSnapshot[1] = &t->displayPresentFence;
-    tSnapshot[2] = &t->displayRetireFence;
-    tSnapshot[3] = &t->releaseFence;
-    nh->resize(4);
-    for (size_t snapshotIndex = 0; snapshotIndex < 4; ++snapshotIndex) {
-        status_t status = unflatten(
-                tSnapshot[snapshotIndex], &((*nh)[snapshotIndex]),
-                buffer, size, fds, numFds);
-        if (status != NO_ERROR) {
-            while (snapshotIndex > 0) {
-                --snapshotIndex;
-                if ((*nh)[snapshotIndex] != nullptr) {
-                    native_handle_delete((*nh)[snapshotIndex]);
-                }
-            }
-            return status;
-        }
-    }
-    return NO_ERROR;
-}
-
-/**
- * \brief Flatten `FrameEventsDelta`.
- *
- * \param[in] t The source `FrameEventsDelta`.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * This function will duplicate file descriptors contained in \p t.
- */
-// Ref: frameworks/native/libs/gui/FrameTimestamp.cpp:
-//      FrameEventsDelta::flatten
-status_t flatten(HGraphicBufferProducer::FrameEventsDelta const& t,
-        void*& buffer, size_t& size, int*& fds, size_t numFds) {
-    // Check that t.index is within a valid range.
-    if (t.index >= static_cast<uint32_t>(FrameEventHistory::MAX_FRAME_HISTORY)
-            || t.index > std::numeric_limits<uint8_t>::max()) {
-        return BAD_VALUE;
-    }
-
-    FlattenableUtils::write(buffer, size, t.frameNumber);
-
-    // These are static_cast to uint8_t for alignment.
-    FlattenableUtils::write(buffer, size, static_cast<uint8_t>(t.index));
-    FlattenableUtils::write(
-            buffer, size, static_cast<uint8_t>(t.addPostCompositeCalled));
-    FlattenableUtils::write(
-            buffer, size, static_cast<uint8_t>(t.addRetireCalled));
-    FlattenableUtils::write(
-            buffer, size, static_cast<uint8_t>(t.addReleaseCalled));
-
-    FlattenableUtils::write(buffer, size, t.postedTimeNs);
-    FlattenableUtils::write(buffer, size, t.requestedPresentTimeNs);
-    FlattenableUtils::write(buffer, size, t.latchTimeNs);
-    FlattenableUtils::write(buffer, size, t.firstRefreshStartTimeNs);
-    FlattenableUtils::write(buffer, size, t.lastRefreshStartTimeNs);
-    FlattenableUtils::write(buffer, size, t.dequeueReadyTime);
-
-    // Fences
-    HGraphicBufferProducer::FenceTimeSnapshot const* tSnapshot[4];
-    tSnapshot[0] = &t.gpuCompositionDoneFence;
-    tSnapshot[1] = &t.displayPresentFence;
-    tSnapshot[2] = &t.displayRetireFence;
-    tSnapshot[3] = &t.releaseFence;
-    for (size_t snapshotIndex = 0; snapshotIndex < 4; ++snapshotIndex) {
-        status_t status = flatten(
-                *(tSnapshot[snapshotIndex]), buffer, size, fds, numFds);
-        if (status != NO_ERROR) {
-            return status;
-        }
-    }
-    return NO_ERROR;
-}
-
-// Ref: frameworks/native/libs/gui/FrameTimestamps.cpp: FrameEventHistoryDelta
-
-/**
- * \brief Return the size of the non-fd buffer required to flatten
- * `HGraphicBufferProducer::FrameEventHistoryDelta`.
- *
- * \param[in] t The input `HGraphicBufferProducer::FrameEventHistoryDelta`.
- * \return The required size of the flat buffer.
- */
-size_t getFlattenedSize(
-        HGraphicBufferProducer::FrameEventHistoryDelta const& t) {
-    size_t size = 4 + // mDeltas.size()
-            sizeof(t.compositorTiming);
-    for (size_t i = 0; i < t.deltas.size(); ++i) {
-        size += getFlattenedSize(t.deltas[i]);
-    }
-    return size;
-}
-
-/**
- * \brief Return the number of file descriptors contained in
- * `HGraphicBufferProducer::FrameEventHistoryDelta`.
- *
- * \param[in] t The input `HGraphicBufferProducer::FrameEventHistoryDelta`.
- * \return The number of file descriptors contained in \p t.
- */
-size_t getFdCount(
-        HGraphicBufferProducer::FrameEventHistoryDelta const& t) {
-    size_t numFds = 0;
-    for (size_t i = 0; i < t.deltas.size(); ++i) {
-        numFds += getFdCount(t.deltas[i]);
-    }
-    return numFds;
-}
-
-/**
- * \brief Unflatten `FrameEventHistoryDelta`.
- *
- * \param[out] t The destination `FrameEventHistoryDelta`.
- * \param[out] nh The underlying array of arrays of native handles.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * If the return value is `NO_ERROR`, \p nh will be populated with `nullptr` or
- * newly created handles. The second dimension of \p nh will be 4. Each non-null
- * slot in \p nh will need to be deleted manually with `native_handle_delete()`.
- */
-status_t unflatten(
-        HGraphicBufferProducer::FrameEventHistoryDelta* t,
-        std::vector<std::vector<native_handle_t*> >* nh,
-        void const*& buffer, size_t& size, int const*& fds, size_t& numFds) {
-    if (size < 4) {
-        return NO_MEMORY;
-    }
-
-    FlattenableUtils::read(buffer, size, t->compositorTiming);
-
-    uint32_t deltaCount = 0;
-    FlattenableUtils::read(buffer, size, deltaCount);
-    if (static_cast<size_t>(deltaCount) >
-            ::android::FrameEventHistory::MAX_FRAME_HISTORY) {
-        return BAD_VALUE;
-    }
-    t->deltas.resize(deltaCount);
-    nh->resize(deltaCount);
-    for (size_t deltaIndex = 0; deltaIndex < deltaCount; ++deltaIndex) {
-        status_t status = unflatten(
-                &(t->deltas[deltaIndex]), &((*nh)[deltaIndex]),
-                buffer, size, fds, numFds);
-        if (status != NO_ERROR) {
-            return status;
-        }
-    }
-    return NO_ERROR;
-}
-
-/**
- * \brief Flatten `FrameEventHistoryDelta`.
- *
- * \param[in] t The source `FrameEventHistoryDelta`.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * This function will duplicate file descriptors contained in \p t.
- */
-status_t flatten(
-        HGraphicBufferProducer::FrameEventHistoryDelta const& t,
-        void*& buffer, size_t& size, int*& fds, size_t& numFds) {
-    if (t.deltas.size() > ::android::FrameEventHistory::MAX_FRAME_HISTORY) {
-        return BAD_VALUE;
-    }
-    if (size < getFlattenedSize(t)) {
-        return NO_MEMORY;
-    }
-
-    FlattenableUtils::write(buffer, size, t.compositorTiming);
-
-    FlattenableUtils::write(buffer, size, static_cast<uint32_t>(t.deltas.size()));
-    for (size_t deltaIndex = 0; deltaIndex < t.deltas.size(); ++deltaIndex) {
-        status_t status = flatten(t.deltas[deltaIndex], buffer, size, fds, numFds);
-        if (status != NO_ERROR) {
-            return status;
-        }
-    }
-    return NO_ERROR;
-}
-
-/**
- * \brief Wrap `::android::FrameEventHistoryData` in
- * `HGraphicBufferProducer::FrameEventHistoryDelta`.
- *
- * \param[out] t The wrapper of type
- * `HGraphicBufferProducer::FrameEventHistoryDelta`.
- * \param[out] nh The array of array of native handles that are referred to by
- * members of \p t.
- * \param[in] l The source `::android::FrameEventHistoryDelta`.
- *
- * On success, each member of \p nh will be either `nullptr` or a newly created
- * native handle. All the non-`nullptr` elements must be deleted individually
- * with `native_handle_delete()`.
- */
-bool wrapAs(HGraphicBufferProducer::FrameEventHistoryDelta* t,
-        std::vector<std::vector<native_handle_t*> >* nh,
-        ::android::FrameEventHistoryDelta const& l) {
-
-    size_t const baseSize = l.getFlattenedSize();
-    std::unique_ptr<uint8_t[]> baseBuffer(
-            new (std::nothrow) uint8_t[baseSize]);
-    if (!baseBuffer) {
-        return false;
-    }
-
-    size_t const baseNumFds = l.getFdCount();
-    std::unique_ptr<int[]> baseFds(
-            new (std::nothrow) int[baseNumFds]);
-    if (!baseFds) {
-        return false;
-    }
-
-    void* buffer = static_cast<void*>(baseBuffer.get());
-    size_t size = baseSize;
-    int* fds = baseFds.get();
-    size_t numFds = baseNumFds;
-    if (l.flatten(buffer, size, fds, numFds) != NO_ERROR) {
-        return false;
-    }
-
-    void const* constBuffer = static_cast<void const*>(baseBuffer.get());
-    size = baseSize;
-    int const* constFds = static_cast<int const*>(baseFds.get());
-    numFds = baseNumFds;
-    if (unflatten(t, nh, constBuffer, size, constFds, numFds) != NO_ERROR) {
-        return false;
-    }
-
-    return true;
-}
-
-/**
- * \brief Convert `HGraphicBufferProducer::FrameEventHistoryDelta` to
- * `::android::FrameEventHistoryDelta`.
- *
- * \param[out] l The destination `::android::FrameEventHistoryDelta`.
- * \param[in] t The source `HGraphicBufferProducer::FrameEventHistoryDelta`.
- *
- * This function will duplicate all file descriptors contained in \p t.
- */
-bool convertTo(
-        ::android::FrameEventHistoryDelta* l,
-        HGraphicBufferProducer::FrameEventHistoryDelta const& t) {
-
-    size_t const baseSize = getFlattenedSize(t);
-    std::unique_ptr<uint8_t[]> baseBuffer(
-            new (std::nothrow) uint8_t[baseSize]);
-    if (!baseBuffer) {
-        return false;
-    }
-
-    size_t const baseNumFds = getFdCount(t);
-    std::unique_ptr<int[]> baseFds(
-            new (std::nothrow) int[baseNumFds]);
-    if (!baseFds) {
-        return false;
-    }
-
-    void* buffer = static_cast<void*>(baseBuffer.get());
-    size_t size = baseSize;
-    int* fds = static_cast<int*>(baseFds.get());
-    size_t numFds = baseNumFds;
-    if (flatten(t, buffer, size, fds, numFds) != NO_ERROR) {
-        return false;
-    }
-
-    void const* constBuffer = static_cast<void const*>(baseBuffer.get());
-    size = baseSize;
-    int const* constFds = static_cast<int const*>(baseFds.get());
-    numFds = baseNumFds;
-    if (l->unflatten(constBuffer, size, constFds, numFds) != NO_ERROR) {
-        return false;
-    }
-
-    return true;
-}
-
-// Ref: frameworks/native/libs/ui/Region.cpp
-
-/**
- * \brief Return the size of the buffer required to flatten `Region`.
- *
- * \param[in] t The input `Region`.
- * \return The required size of the flat buffer.
- */
-size_t getFlattenedSize(Region const& t) {
-    return sizeof(uint32_t) + t.size() * sizeof(::android::Rect);
-}
-
-/**
- * \brief Unflatten `Region`.
- *
- * \param[out] t The destination `Region`.
- * \param[in,out] buffer The pointer to the flat buffer.
- * \param[in,out] size The size of the flat buffer.
- * \return `NO_ERROR` on success; other value on failure.
- */
-status_t unflatten(Region* t, void const*& buffer, size_t& size) {
-    if (size < sizeof(uint32_t)) {
-        return NO_MEMORY;
-    }
-
-    uint32_t numRects = 0;
-    FlattenableUtils::read(buffer, size, numRects);
-    if (size < numRects * sizeof(Rect)) {
-        return NO_MEMORY;
-    }
-    if (numRects > (UINT32_MAX / sizeof(Rect))) {
-        return NO_MEMORY;
-    }
-
-    t->resize(numRects);
-    for (size_t r = 0; r < numRects; ++r) {
-        ::android::Rect rect(::android::Rect::EMPTY_RECT);
-        status_t status = rect.unflatten(buffer, size);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        FlattenableUtils::advance(buffer, size, sizeof(rect));
-        (*t)[r] = Rect{
-                static_cast<int32_t>(rect.left),
-                static_cast<int32_t>(rect.top),
-                static_cast<int32_t>(rect.right),
-                static_cast<int32_t>(rect.bottom)};
-    }
-    return NO_ERROR;
-}
-
-/**
- * \brief Flatten `Region`.
- *
- * \param[in] t The source `Region`.
- * \param[in,out] buffer The pointer to the flat buffer.
- * \param[in,out] size The size of the flat buffer.
- * \return `NO_ERROR` on success; other value on failure.
- */
-status_t flatten(Region const& t, void*& buffer, size_t& size) {
-    if (size < getFlattenedSize(t)) {
-        return NO_MEMORY;
-    }
-
-    FlattenableUtils::write(buffer, size, static_cast<uint32_t>(t.size()));
-    for (size_t r = 0; r < t.size(); ++r) {
-        ::android::Rect rect(
-                static_cast<int32_t>(t[r].left),
-                static_cast<int32_t>(t[r].top),
-                static_cast<int32_t>(t[r].right),
-                static_cast<int32_t>(t[r].bottom));
-        status_t status = rect.flatten(buffer, size);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        FlattenableUtils::advance(buffer, size, sizeof(rect));
-    }
-    return NO_ERROR;
-}
-
-/**
- * \brief Convert `::android::Region` to `Region`.
- *
- * \param[out] t The destination `Region`.
- * \param[in] l The source `::android::Region`.
- */
-// convert: ::android::Region -> Region
-bool convertTo(Region* t, ::android::Region const& l) {
-    size_t const baseSize = l.getFlattenedSize();
-    std::unique_ptr<uint8_t[]> baseBuffer(
-            new (std::nothrow) uint8_t[baseSize]);
-    if (!baseBuffer) {
-        return false;
-    }
-
-    void* buffer = static_cast<void*>(baseBuffer.get());
-    size_t size = baseSize;
-    if (l.flatten(buffer, size) != NO_ERROR) {
-        return false;
-    }
-
-    void const* constBuffer = static_cast<void const*>(baseBuffer.get());
-    size = baseSize;
-    if (unflatten(t, constBuffer, size) != NO_ERROR) {
-        return false;
-    }
-
-    return true;
-}
-
-/**
- * \brief Convert `Region` to `::android::Region`.
- *
- * \param[out] l The destination `::android::Region`.
- * \param[in] t The source `Region`.
- */
-// convert: Region -> ::android::Region
-bool convertTo(::android::Region* l, Region const& t) {
-    size_t const baseSize = getFlattenedSize(t);
-    std::unique_ptr<uint8_t[]> baseBuffer(
-            new (std::nothrow) uint8_t[baseSize]);
-    if (!baseBuffer) {
-        return false;
-    }
-
-    void* buffer = static_cast<void*>(baseBuffer.get());
-    size_t size = baseSize;
-    if (flatten(t, buffer, size) != NO_ERROR) {
-        return false;
-    }
-
-    void const* constBuffer = static_cast<void const*>(baseBuffer.get());
-    size = baseSize;
-    if (l->unflatten(constBuffer, size) != NO_ERROR) {
-        return false;
-    }
-
-    return true;
-}
-
-// Ref: frameworks/native/libs/gui/BGraphicBufferProducer.cpp:
-//      BGraphicBufferProducer::QueueBufferInput
-
-/**
- * \brief Return a lower bound on the size of the buffer required to flatten
- * `HGraphicBufferProducer::QueueBufferInput`.
- *
- * \param[in] t The input `HGraphicBufferProducer::QueueBufferInput`.
- * \return A lower bound on the size of the flat buffer.
- */
-constexpr size_t minFlattenedSize(
-        HGraphicBufferProducer::QueueBufferInput const& /* t */) {
-    return sizeof(int64_t) + // timestamp
-            sizeof(int) + // isAutoTimestamp
-            sizeof(android_dataspace) + // dataSpace
-            sizeof(::android::Rect) + // crop
-            sizeof(int) + // scalingMode
-            sizeof(uint32_t) + // transform
-            sizeof(uint32_t) + // stickyTransform
-            sizeof(bool); // getFrameTimestamps
-}
-
-/**
- * \brief Return the size of the buffer required to flatten
- * `HGraphicBufferProducer::QueueBufferInput`.
- *
- * \param[in] t The input `HGraphicBufferProducer::QueueBufferInput`.
- * \return The required size of the flat buffer.
- */
-size_t getFlattenedSize(HGraphicBufferProducer::QueueBufferInput const& t) {
-    return minFlattenedSize(t) +
-            getFenceFlattenedSize(t.fence) +
-            getFlattenedSize(t.surfaceDamage) +
-            sizeof(HdrMetadata::validTypes);
-}
-
-/**
- * \brief Return the number of file descriptors contained in
- * `HGraphicBufferProducer::QueueBufferInput`.
- *
- * \param[in] t The input `HGraphicBufferProducer::QueueBufferInput`.
- * \return The number of file descriptors contained in \p t.
- */
-size_t getFdCount(
-        HGraphicBufferProducer::QueueBufferInput const& t) {
-    return getFenceFdCount(t.fence);
-}
-
-/**
- * \brief Flatten `HGraphicBufferProducer::QueueBufferInput`.
- *
- * \param[in] t The source `HGraphicBufferProducer::QueueBufferInput`.
- * \param[out] nh The native handle cloned from `t.fence`.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * This function will duplicate the file descriptor in `t.fence`. */
-status_t flatten(HGraphicBufferProducer::QueueBufferInput const& t,
-        native_handle_t** nh,
-        void*& buffer, size_t& size, int*& fds, size_t& numFds) {
-    if (size < getFlattenedSize(t)) {
-        return NO_MEMORY;
-    }
-
-    FlattenableUtils::write(buffer, size, t.timestamp);
-    FlattenableUtils::write(buffer, size, static_cast<int>(t.isAutoTimestamp));
-    FlattenableUtils::write(buffer, size,
-            static_cast<android_dataspace_t>(t.dataSpace));
-    FlattenableUtils::write(buffer, size, ::android::Rect(
-            static_cast<int32_t>(t.crop.left),
-            static_cast<int32_t>(t.crop.top),
-            static_cast<int32_t>(t.crop.right),
-            static_cast<int32_t>(t.crop.bottom)));
-    FlattenableUtils::write(buffer, size, static_cast<int>(t.scalingMode));
-    FlattenableUtils::write(buffer, size, t.transform);
-    FlattenableUtils::write(buffer, size, t.stickyTransform);
-    FlattenableUtils::write(buffer, size, t.getFrameTimestamps);
-
-    *nh = t.fence.getNativeHandle() == nullptr ?
-            nullptr : native_handle_clone(t.fence);
-    status_t status = flattenFence(hidl_handle(*nh), buffer, size, fds, numFds);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    status = flatten(t.surfaceDamage, buffer, size);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    FlattenableUtils::write(buffer, size, decltype(HdrMetadata::validTypes)(0));
-    return NO_ERROR;
-}
-
-/**
- * \brief Unflatten `HGraphicBufferProducer::QueueBufferInput`.
- *
- * \param[out] t The destination `HGraphicBufferProducer::QueueBufferInput`.
- * \param[out] nh The underlying native handle for `t->fence`.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * If the return value is `NO_ERROR` and `t->fence` contains a valid file
- * descriptor, \p nh will be a newly created native handle holding that file
- * descriptor. \p nh needs to be deleted with `native_handle_delete()`
- * afterwards.
- */
-status_t unflatten(
-        HGraphicBufferProducer::QueueBufferInput* t, native_handle_t** nh,
-        void const*& buffer, size_t& size, int const*& fds, size_t& numFds) {
-    if (size < minFlattenedSize(*t)) {
-        return NO_MEMORY;
-    }
-
-    FlattenableUtils::read(buffer, size, t->timestamp);
-    int lIsAutoTimestamp;
-    FlattenableUtils::read(buffer, size, lIsAutoTimestamp);
-    t->isAutoTimestamp = static_cast<int32_t>(lIsAutoTimestamp);
-    android_dataspace_t lDataSpace;
-    FlattenableUtils::read(buffer, size, lDataSpace);
-    t->dataSpace = static_cast<Dataspace>(lDataSpace);
-    Rect lCrop;
-    FlattenableUtils::read(buffer, size, lCrop);
-    t->crop = Rect{
-            static_cast<int32_t>(lCrop.left),
-            static_cast<int32_t>(lCrop.top),
-            static_cast<int32_t>(lCrop.right),
-            static_cast<int32_t>(lCrop.bottom)};
-    int lScalingMode;
-    FlattenableUtils::read(buffer, size, lScalingMode);
-    t->scalingMode = static_cast<int32_t>(lScalingMode);
-    FlattenableUtils::read(buffer, size, t->transform);
-    FlattenableUtils::read(buffer, size, t->stickyTransform);
-    FlattenableUtils::read(buffer, size, t->getFrameTimestamps);
-
-    status_t status = unflattenFence(&(t->fence), nh,
-            buffer, size, fds, numFds);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    // HdrMetadata ignored
-    return unflatten(&(t->surfaceDamage), buffer, size);
-}
-
-/**
- * \brief Wrap `BGraphicBufferProducer::QueueBufferInput` in
- * `HGraphicBufferProducer::QueueBufferInput`.
- *
- * \param[out] t The wrapper of type
- * `HGraphicBufferProducer::QueueBufferInput`.
- * \param[out] nh The underlying native handle for `t->fence`.
- * \param[in] l The source `BGraphicBufferProducer::QueueBufferInput`.
- *
- * If the return value is `true` and `t->fence` contains a valid file
- * descriptor, \p nh will be a newly created native handle holding that file
- * descriptor. \p nh needs to be deleted with `native_handle_delete()`
- * afterwards.
- */
-bool wrapAs(
-        HGraphicBufferProducer::QueueBufferInput* t,
-        native_handle_t** nh,
-        BGraphicBufferProducer::QueueBufferInput const& l) {
-
-    size_t const baseSize = l.getFlattenedSize();
-    std::unique_ptr<uint8_t[]> baseBuffer(
-            new (std::nothrow) uint8_t[baseSize]);
-    if (!baseBuffer) {
-        return false;
-    }
-
-    size_t const baseNumFds = l.getFdCount();
-    std::unique_ptr<int[]> baseFds(
-            new (std::nothrow) int[baseNumFds]);
-    if (!baseFds) {
-        return false;
-    }
-
-    void* buffer = static_cast<void*>(baseBuffer.get());
-    size_t size = baseSize;
-    int* fds = baseFds.get();
-    size_t numFds = baseNumFds;
-    if (l.flatten(buffer, size, fds, numFds) != NO_ERROR) {
-        return false;
-    }
-
-    void const* constBuffer = static_cast<void const*>(baseBuffer.get());
-    size = baseSize;
-    int const* constFds = static_cast<int const*>(baseFds.get());
-    numFds = baseNumFds;
-    if (unflatten(t, nh, constBuffer, size, constFds, numFds) != NO_ERROR) {
-        return false;
-    }
-
-    return true;
-}
-
-/**
- * \brief Convert `HGraphicBufferProducer::QueueBufferInput` to
- * `BGraphicBufferProducer::QueueBufferInput`.
- *
- * \param[out] l The destination `BGraphicBufferProducer::QueueBufferInput`.
- * \param[in] t The source `HGraphicBufferProducer::QueueBufferInput`.
- *
- * If `t.fence` has a valid file descriptor, it will be duplicated.
- */
-bool convertTo(
-        BGraphicBufferProducer::QueueBufferInput* l,
-        HGraphicBufferProducer::QueueBufferInput const& t) {
-
-    size_t const baseSize = getFlattenedSize(t);
-    std::unique_ptr<uint8_t[]> baseBuffer(
-            new (std::nothrow) uint8_t[baseSize]);
-    if (!baseBuffer) {
-        return false;
-    }
-
-    size_t const baseNumFds = getFdCount(t);
-    std::unique_ptr<int[]> baseFds(
-            new (std::nothrow) int[baseNumFds]);
-    if (!baseFds) {
-        return false;
-    }
-
-    void* buffer = static_cast<void*>(baseBuffer.get());
-    size_t size = baseSize;
-    int* fds = baseFds.get();
-    size_t numFds = baseNumFds;
-    native_handle_t* nh;
-    if (flatten(t, &nh, buffer, size, fds, numFds) != NO_ERROR) {
-        return false;
-    }
-
-    void const* constBuffer = static_cast<void const*>(baseBuffer.get());
-    size = baseSize;
-    int const* constFds = static_cast<int const*>(baseFds.get());
-    numFds = baseNumFds;
-    if (l->unflatten(constBuffer, size, constFds, numFds) != NO_ERROR) {
-        if (nh != nullptr) {
-            native_handle_close(nh);
-            native_handle_delete(nh);
-        }
-        return false;
-    }
-
-    native_handle_delete(nh);
-    return true;
-}
-
-// Ref: frameworks/native/libs/gui/BGraphicBufferProducer.cpp:
-//      BGraphicBufferProducer::QueueBufferOutput
-
-/**
- * \brief Wrap `BGraphicBufferProducer::QueueBufferOutput` in
- * `HGraphicBufferProducer::QueueBufferOutput`.
- *
- * \param[out] t The wrapper of type
- * `HGraphicBufferProducer::QueueBufferOutput`.
- * \param[out] nh The array of array of native handles that are referred to by
- * members of \p t.
- * \param[in] l The source `BGraphicBufferProducer::QueueBufferOutput`.
- *
- * On success, each member of \p nh will be either `nullptr` or a newly created
- * native handle. All the non-`nullptr` elements must be deleted individually
- * with `native_handle_delete()`.
- */
-// wrap: BGraphicBufferProducer::QueueBufferOutput ->
-// HGraphicBufferProducer::QueueBufferOutput
-bool wrapAs(HGraphicBufferProducer::QueueBufferOutput* t,
-        std::vector<std::vector<native_handle_t*> >* nh,
-        BGraphicBufferProducer::QueueBufferOutput const& l) {
-    if (!wrapAs(&(t->frameTimestamps), nh, l.frameTimestamps)) {
-        return false;
-    }
-    t->width = l.width;
-    t->height = l.height;
-    t->transformHint = l.transformHint;
-    t->numPendingBuffers = l.numPendingBuffers;
-    t->nextFrameNumber = l.nextFrameNumber;
-    t->bufferReplaced = l.bufferReplaced;
-    return true;
-}
-
-/**
- * \brief Convert `HGraphicBufferProducer::QueueBufferOutput` to
- * `BGraphicBufferProducer::QueueBufferOutput`.
- *
- * \param[out] l The destination `BGraphicBufferProducer::QueueBufferOutput`.
- * \param[in] t The source `HGraphicBufferProducer::QueueBufferOutput`.
- *
- * This function will duplicate all file descriptors contained in \p t.
- */
-// convert: HGraphicBufferProducer::QueueBufferOutput ->
-// BGraphicBufferProducer::QueueBufferOutput
-bool convertTo(
-        BGraphicBufferProducer::QueueBufferOutput* l,
-        HGraphicBufferProducer::QueueBufferOutput const& t) {
-    if (!convertTo(&(l->frameTimestamps), t.frameTimestamps)) {
-        return false;
-    }
-    l->width = t.width;
-    l->height = t.height;
-    l->transformHint = t.transformHint;
-    l->numPendingBuffers = t.numPendingBuffers;
-    l->nextFrameNumber = t.nextFrameNumber;
-    l->bufferReplaced = t.bufferReplaced;
-    return true;
-}
-
-/**
- * \brief Convert `BGraphicBufferProducer::DisconnectMode` to
- * `HGraphicBufferProducer::DisconnectMode`.
- *
- * \param[in] l The source `BGraphicBufferProducer::DisconnectMode`.
- * \return The corresponding `HGraphicBufferProducer::DisconnectMode`.
- */
-HGraphicBufferProducer::DisconnectMode toHidlDisconnectMode(
-        BGraphicBufferProducer::DisconnectMode l) {
-    switch (l) {
-        case BGraphicBufferProducer::DisconnectMode::Api:
-            return HGraphicBufferProducer::DisconnectMode::API;
-        case BGraphicBufferProducer::DisconnectMode::AllLocal:
-            return HGraphicBufferProducer::DisconnectMode::ALL_LOCAL;
-    }
-    return HGraphicBufferProducer::DisconnectMode::API;
-}
-
-/**
- * \brief Convert `HGraphicBufferProducer::DisconnectMode` to
- * `BGraphicBufferProducer::DisconnectMode`.
- *
- * \param[in] l The source `HGraphicBufferProducer::DisconnectMode`.
- * \return The corresponding `BGraphicBufferProducer::DisconnectMode`.
- */
-BGraphicBufferProducer::DisconnectMode toGuiDisconnectMode(
-        HGraphicBufferProducer::DisconnectMode t) {
-    switch (t) {
-        case HGraphicBufferProducer::DisconnectMode::API:
-            return BGraphicBufferProducer::DisconnectMode::Api;
-        case HGraphicBufferProducer::DisconnectMode::ALL_LOCAL:
-            return BGraphicBufferProducer::DisconnectMode::AllLocal;
-    }
-    return BGraphicBufferProducer::DisconnectMode::Api;
-}
-
-}  // namespace conversion
-}  // namespace android
-
diff --git a/media/libstagefright/bqhelper/GraphicBufferSource.cpp b/media/libstagefright/bqhelper/GraphicBufferSource.cpp
index 68abcb5..59317e7 100644
--- a/media/libstagefright/bqhelper/GraphicBufferSource.cpp
+++ b/media/libstagefright/bqhelper/GraphicBufferSource.cpp
@@ -32,7 +32,11 @@
 #include <media/hardware/MetadataBufferType.h>
 #include <ui/GraphicBuffer.h>
 #include <gui/BufferItem.h>
+#include <gui/BufferQueue.h>
+#include <gui/bufferqueue/1.0/WGraphicBufferProducer.h>
 #include <gui/bufferqueue/2.0/B2HGraphicBufferProducer.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/IGraphicBufferConsumer.h>
 #include <media/hardware/HardwareAPI.h>
 
 #include <inttypes.h>
@@ -389,6 +393,18 @@
     }
 }
 
+sp<IGraphicBufferProducer> GraphicBufferSource::getIGraphicBufferProducer() const {
+    return mProducer;
+}
+
+sp<::android::hardware::graphics::bufferqueue::V1_0::IGraphicBufferProducer>
+GraphicBufferSource::getHGraphicBufferProducer_V1_0() const {
+    using TWGraphicBufferProducer = ::android::TWGraphicBufferProducer<
+        ::android::hardware::graphics::bufferqueue::V1_0::IGraphicBufferProducer>;
+
+    return new TWGraphicBufferProducer(getIGraphicBufferProducer());
+}
+
 sp<::android::hardware::graphics::bufferqueue::V2_0::IGraphicBufferProducer>
 GraphicBufferSource::getHGraphicBufferProducer() const {
     return new ::android::hardware::graphics::bufferqueue::V2_0::utils::
diff --git a/media/libstagefright/bqhelper/WProducerListener.cpp b/media/libstagefright/bqhelper/WProducerListener.cpp
deleted file mode 100644
index 2ca13be..0000000
--- a/media/libstagefright/bqhelper/WProducerListener.cpp
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2016, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <media/stagefright/bqhelper/WProducerListener.h>
-
-namespace android {
-
-// TWProducerListener
-TWProducerListener::TWProducerListener(
-        sp<BProducerListener> const& base):
-    mBase(base) {
-}
-
-Return<void> TWProducerListener::onBufferReleased() {
-    mBase->onBufferReleased();
-    return Void();
-}
-
-Return<bool> TWProducerListener::needsReleaseNotify() {
-    return mBase->needsReleaseNotify();
-}
-
-// LWProducerListener
-LWProducerListener::LWProducerListener(
-        sp<HProducerListener> const& base):
-    mBase(base) {
-}
-
-void LWProducerListener::onBufferReleased() {
-    mBase->onBufferReleased();
-}
-
-bool LWProducerListener::needsReleaseNotify() {
-    return static_cast<bool>(mBase->needsReleaseNotify());
-}
-
-}  // namespace android
diff --git a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/Conversion.h b/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/Conversion.h
deleted file mode 100644
index 60d8c1e..0000000
--- a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/Conversion.h
+++ /dev/null
@@ -1,769 +0,0 @@
-/*
- * Copyright 2016, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef MEDIA_STAGEFRIGHT_BQHELPER_CONVERSION_H_
-#define MEDIA_STAGEFRIGHT_BQHELPER_CONVERSION_H_
-
-#include <vector>
-#include <list>
-
-#include <unistd.h>
-
-#include <hidl/MQDescriptor.h>
-#include <hidl/Status.h>
-#include <hidlmemory/mapping.h>
-
-#include <binder/Binder.h>
-#include <binder/Status.h>
-#include <ui/FenceTime.h>
-#include <cutils/native_handle.h>
-#include <gui/IGraphicBufferProducer.h>
-
-#include <media/hardware/VideoAPI.h>
-
-#include <android/hidl/memory/1.0/IMemory.h>
-#include <android/hardware/graphics/bufferqueue/1.0/IProducerListener.h>
-
-namespace android {
-namespace conversion {
-
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::hidl_handle;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::sp;
-using ::android::status_t;
-
-using ::android::String8;
-
-using ::android::hardware::media::V1_0::Rect;
-using ::android::hardware::media::V1_0::Region;
-
-using ::android::hardware::graphics::common::V1_0::Dataspace;
-
-using ::android::hardware::graphics::common::V1_0::PixelFormat;
-
-using ::android::hardware::media::V1_0::AnwBuffer;
-using ::android::GraphicBuffer;
-
-typedef ::android::hardware::graphics::bufferqueue::V1_0::IGraphicBufferProducer
-        HGraphicBufferProducer;
-typedef ::android::IGraphicBufferProducer
-        BGraphicBufferProducer;
-
-// native_handle_t helper functions.
-
-/**
- * \brief Take an fd and create a native handle containing only the given fd.
- * The created handle will need to be deleted manually with
- * `native_handle_delete()`.
- *
- * \param[in] fd The source file descriptor (of type `int`).
- * \return The create `native_handle_t*` that contains the given \p fd. If the
- * supplied \p fd is negative, the created native handle will contain no file
- * descriptors.
- *
- * If the native handle cannot be created, the return value will be
- * `nullptr`.
- *
- * This function does not duplicate the file descriptor.
- */
-native_handle_t* native_handle_create_from_fd(int fd);
-
-/**
- * \brief Extract a file descriptor from a native handle.
- *
- * \param[in] nh The source `native_handle_t*`.
- * \param[in] index The index of the file descriptor in \p nh to read from. This
- * input has the default value of `0`.
- * \return The `index`-th file descriptor in \p nh. If \p nh does not have
- * enough file descriptors, the returned value will be `-1`.
- *
- * This function does not duplicate the file descriptor.
- */
-int native_handle_read_fd(native_handle_t const* nh, int index = 0);
-
-/**
- * Conversion functions
- * ====================
- *
- * There are two main directions of conversion:
- * - `inTargetType(...)`: Create a wrapper whose lifetime depends on the
- *   input. The wrapper has type `TargetType`.
- * - `toTargetType(...)`: Create a standalone object of type `TargetType` that
- *   corresponds to the input. The lifetime of the output does not depend on the
- *   lifetime of the input.
- * - `wrapIn(TargetType*, ...)`: Same as `inTargetType()`, but for `TargetType`
- *   that cannot be copied and/or moved efficiently, or when there are multiple
- *   output arguments.
- * - `convertTo(TargetType*, ...)`: Same as `toTargetType()`, but for
- *   `TargetType` that cannot be copied and/or moved efficiently, or when there
- *   are multiple output arguments.
- *
- * `wrapIn()` and `convertTo()` functions will take output arguments before
- * input arguments. Some of these functions might return a value to indicate
- * success or error.
- *
- * In converting or wrapping something as a Treble type that contains a
- * `hidl_handle`, `native_handle_t*` will need to be created and returned as
- * an additional output argument, hence only `wrapIn()` or `convertTo()` would
- * be available. The caller must call `native_handle_delete()` to deallocate the
- * returned native handle when it is no longer needed.
- *
- * For types that contain file descriptors, `inTargetType()` and `wrapAs()` do
- * not perform duplication of file descriptors, while `toTargetType()` and
- * `convertTo()` do.
- */
-
-/**
- * \brief Convert `Return<void>` to `binder::Status`.
- *
- * \param[in] t The source `Return<void>`.
- * \return The corresponding `binder::Status`.
- */
-// convert: Return<void> -> ::android::binder::Status
-::android::binder::Status toBinderStatus(Return<void> const& t);
-
-/**
- * \brief Convert `Return<void>` to `status_t`. This is for legacy binder calls.
- *
- * \param[in] t The source `Return<void>`.
- * \return The corresponding `status_t`.
- */
-// convert: Return<void> -> status_t
-status_t toStatusT(Return<void> const& t);
-
-/**
- * \brief Wrap `native_handle_t*` in `hidl_handle`.
- *
- * \param[in] nh The source `native_handle_t*`.
- * \return The `hidl_handle` that points to \p nh.
- */
-// wrap: native_handle_t* -> hidl_handle
-hidl_handle inHidlHandle(native_handle_t const* nh);
-
-/**
- * \brief Convert `int32_t` to `Dataspace`.
- *
- * \param[in] l The source `int32_t`.
- * \result The corresponding `Dataspace`.
- */
-// convert: int32_t -> Dataspace
-Dataspace toHardwareDataspace(int32_t l);
-
-/**
- * \brief Convert `Dataspace` to `int32_t`.
- *
- * \param[in] t The source `Dataspace`.
- * \result The corresponding `int32_t`.
- */
-// convert: Dataspace -> int32_t
-int32_t toRawDataspace(Dataspace const& t);
-
-/**
- * \brief Wrap an opaque buffer inside a `hidl_vec<uint8_t>`.
- *
- * \param[in] l The pointer to the beginning of the opaque buffer.
- * \param[in] size The size of the buffer.
- * \return A `hidl_vec<uint8_t>` that points to the buffer.
- */
-// wrap: void*, size_t -> hidl_vec<uint8_t>
-hidl_vec<uint8_t> inHidlBytes(void const* l, size_t size);
-
-/**
- * \brief Create a `hidl_vec<uint8_t>` that is a copy of an opaque buffer.
- *
- * \param[in] l The pointer to the beginning of the opaque buffer.
- * \param[in] size The size of the buffer.
- * \return A `hidl_vec<uint8_t>` that is a copy of the input buffer.
- */
-// convert: void*, size_t -> hidl_vec<uint8_t>
-hidl_vec<uint8_t> toHidlBytes(void const* l, size_t size);
-
-/**
- * \brief Wrap `GraphicBuffer` in `AnwBuffer`.
- *
- * \param[out] t The wrapper of type `AnwBuffer`.
- * \param[in] l The source `GraphicBuffer`.
- */
-// wrap: GraphicBuffer -> AnwBuffer
-void wrapAs(AnwBuffer* t, GraphicBuffer const& l);
-
-/**
- * \brief Convert `AnwBuffer` to `GraphicBuffer`.
- *
- * \param[out] l The destination `GraphicBuffer`.
- * \param[in] t The source `AnwBuffer`.
- *
- * This function will duplicate all file descriptors in \p t.
- */
-// convert: AnwBuffer -> GraphicBuffer
-// Ref: frameworks/native/libs/ui/GraphicBuffer.cpp: GraphicBuffer::flatten
-bool convertTo(GraphicBuffer* l, AnwBuffer const& t);
-
-/**
- * Conversion functions for types outside media
- * ============================================
- *
- * Some objects in libui and libgui that were made to go through binder calls do
- * not expose ways to read or write their fields to the public. To pass an
- * object of this kind through the HIDL boundary, translation functions need to
- * work around the access restriction by using the publicly available
- * `flatten()` and `unflatten()` functions.
- *
- * All `flatten()` and `unflatten()` overloads follow the same convention as
- * follows:
- *
- *     status_t flatten(ObjectType const& object,
- *                      [OtherType const& other, ...]
- *                      void*& buffer, size_t& size,
- *                      int*& fds, size_t& numFds)
- *
- *     status_t unflatten(ObjectType* object,
- *                        [OtherType* other, ...,]
- *                        void*& buffer, size_t& size,
- *                        int*& fds, size_t& numFds)
- *
- * The number of `other` parameters varies depending on the `ObjectType`. For
- * example, in the process of unflattening an object that contains
- * `hidl_handle`, `other` is needed to hold `native_handle_t` objects that will
- * be created.
- *
- * The last four parameters always work the same way in all overloads of
- * `flatten()` and `unflatten()`:
- * - For `flatten()`, `buffer` is the pointer to the non-fd buffer to be filled,
- *   `size` is the size (in bytes) of the non-fd buffer pointed to by `buffer`,
- *   `fds` is the pointer to the fd buffer to be filled, and `numFds` is the
- *   size (in ints) of the fd buffer pointed to by `fds`.
- * - For `unflatten()`, `buffer` is the pointer to the non-fd buffer to be read
- *   from, `size` is the size (in bytes) of the non-fd buffer pointed to by
- *   `buffer`, `fds` is the pointer to the fd buffer to be read from, and
- *   `numFds` is the size (in ints) of the fd buffer pointed to by `fds`.
- * - After a successful call to `flatten()` or `unflatten()`, `buffer` and `fds`
- *   will be advanced, while `size` and `numFds` will be decreased to reflect
- *   how much storage/data of the two buffers (fd and non-fd) have been used.
- * - After an unsuccessful call, the values of `buffer`, `size`, `fds` and
- *   `numFds` are invalid.
- *
- * The return value of a successful `flatten()` or `unflatten()` call will be
- * `OK` (also aliased as `NO_ERROR`). Any other values indicate a failure.
- *
- * For each object type that supports flattening, there will be two accompanying
- * functions: `getFlattenedSize()` and `getFdCount()`. `getFlattenedSize()` will
- * return the size of the non-fd buffer that the object will need for
- * flattening. `getFdCount()` will return the size of the fd buffer that the
- * object will need for flattening.
- *
- * The set of these four functions, `getFlattenedSize()`, `getFdCount()`,
- * `flatten()` and `unflatten()`, are similar to functions of the same name in
- * the abstract class `Flattenable`. The only difference is that functions in
- * this file are not member functions of the object type. For example, we write
- *
- *     flatten(x, buffer, size, fds, numFds)
- *
- * instead of
- *
- *     x.flatten(buffer, size, fds, numFds)
- *
- * because we cannot modify the type of `x`.
- *
- * There is one exception to the naming convention: `hidl_handle` that
- * represents a fence. The four functions for this "Fence" type have the word
- * "Fence" attched to their names because the object type, which is
- * `hidl_handle`, does not carry the special meaning that the object itself can
- * only contain zero or one file descriptor.
- */
-
-// Ref: frameworks/native/libs/ui/Fence.cpp
-
-/**
- * \brief Return the size of the non-fd buffer required to flatten a fence.
- *
- * \param[in] fence The input fence of type `hidl_handle`.
- * \return The required size of the flat buffer.
- *
- * The current version of this function always returns 4, which is the number of
- * bytes required to store the number of file descriptors contained in the fd
- * part of the flat buffer.
- */
-size_t getFenceFlattenedSize(hidl_handle const& fence);
-
-/**
- * \brief Return the number of file descriptors contained in a fence.
- *
- * \param[in] fence The input fence of type `hidl_handle`.
- * \return `0` if \p fence does not contain a valid file descriptor, or `1`
- * otherwise.
- */
-size_t getFenceFdCount(hidl_handle const& fence);
-
-/**
- * \brief Unflatten `Fence` to `hidl_handle`.
- *
- * \param[out] fence The destination `hidl_handle`.
- * \param[out] nh The underlying native handle.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * If the return value is `NO_ERROR`, \p nh will point to a newly created
- * native handle, which needs to be deleted with `native_handle_delete()`
- * afterwards.
- */
-status_t unflattenFence(hidl_handle* fence, native_handle_t** nh,
-        void const*& buffer, size_t& size, int const*& fds, size_t& numFds);
-
-/**
- * \brief Flatten `hidl_handle` as `Fence`.
- *
- * \param[in] t The source `hidl_handle`.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- */
-status_t flattenFence(hidl_handle const& fence,
-        void*& buffer, size_t& size, int*& fds, size_t& numFds);
-
-/**
- * \brief Wrap `Fence` in `hidl_handle`.
- *
- * \param[out] t The wrapper of type `hidl_handle`.
- * \param[out] nh The native handle pointed to by \p t.
- * \param[in] l The source `Fence`.
- *
- * On success, \p nh will hold a newly created native handle, which must be
- * deleted manually with `native_handle_delete()` afterwards.
- */
-// wrap: Fence -> hidl_handle
-bool wrapAs(hidl_handle* t, native_handle_t** nh, Fence const& l);
-
-/**
- * \brief Convert `hidl_handle` to `Fence`.
- *
- * \param[out] l The destination `Fence`. `l` must not have been used
- * (`l->isValid()` must return `false`) before this function is called.
- * \param[in] t The source `hidl_handle`.
- *
- * If \p t contains a valid file descriptor, it will be duplicated.
- */
-// convert: hidl_handle -> Fence
-bool convertTo(Fence* l, hidl_handle const& t);
-
-// Ref: frameworks/native/libs/ui/FenceTime.cpp: FenceTime::Snapshot
-
-/**
- * \brief Return the size of the non-fd buffer required to flatten
- * `FenceTimeSnapshot`.
- *
- * \param[in] t The input `FenceTimeSnapshot`.
- * \return The required size of the flat buffer.
- */
-size_t getFlattenedSize(HGraphicBufferProducer::FenceTimeSnapshot const& t);
-
-/**
- * \brief Return the number of file descriptors contained in
- * `FenceTimeSnapshot`.
- *
- * \param[in] t The input `FenceTimeSnapshot`.
- * \return The number of file descriptors contained in \p snapshot.
- */
-size_t getFdCount(HGraphicBufferProducer::FenceTimeSnapshot const& t);
-
-/**
- * \brief Flatten `FenceTimeSnapshot`.
- *
- * \param[in] t The source `FenceTimeSnapshot`.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * This function will duplicate the file descriptor in `t.fence` if `t.state ==
- * FENCE`.
- */
-status_t flatten(HGraphicBufferProducer::FenceTimeSnapshot const& t,
-        void*& buffer, size_t& size, int*& fds, size_t& numFds);
-
-/**
- * \brief Unflatten `FenceTimeSnapshot`.
- *
- * \param[out] t The destination `FenceTimeSnapshot`.
- * \param[out] nh The underlying native handle.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * If the return value is `NO_ERROR` and the constructed snapshot contains a
- * file descriptor, \p nh will be created to hold that file descriptor. In this
- * case, \p nh needs to be deleted with `native_handle_delete()` afterwards.
- */
-status_t unflatten(
-        HGraphicBufferProducer::FenceTimeSnapshot* t, native_handle_t** nh,
-        void const*& buffer, size_t& size, int const*& fds, size_t& numFds);
-
-// Ref: frameworks/native/libs/gui/FrameTimestamps.cpp: FrameEventsDelta
-
-/**
- * \brief Return the size of the non-fd buffer required to flatten
- * `FrameEventsDelta`.
- *
- * \param[in] t The input `FrameEventsDelta`.
- * \return The required size of the flat buffer.
- */
-size_t getFlattenedSize(HGraphicBufferProducer::FrameEventsDelta const& t);
-
-/**
- * \brief Return the number of file descriptors contained in
- * `FrameEventsDelta`.
- *
- * \param[in] t The input `FrameEventsDelta`.
- * \return The number of file descriptors contained in \p t.
- */
-size_t getFdCount(HGraphicBufferProducer::FrameEventsDelta const& t);
-
-/**
- * \brief Unflatten `FrameEventsDelta`.
- *
- * \param[out] t The destination `FrameEventsDelta`.
- * \param[out] nh The underlying array of native handles.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * If the return value is `NO_ERROR`, \p nh will have length 4, and it will be
- * populated with `nullptr` or newly created handles. Each non-null slot in \p
- * nh will need to be deleted manually with `native_handle_delete()`.
- */
-status_t unflatten(HGraphicBufferProducer::FrameEventsDelta* t,
-        std::vector<native_handle_t*>* nh,
-        void const*& buffer, size_t& size, int const*& fds, size_t& numFds);
-
-/**
- * \brief Flatten `FrameEventsDelta`.
- *
- * \param[in] t The source `FrameEventsDelta`.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * This function will duplicate file descriptors contained in \p t.
- */
-// Ref: frameworks/native/libs/gui/FrameTimestamp.cpp:
-//      FrameEventsDelta::flatten
-status_t flatten(HGraphicBufferProducer::FrameEventsDelta const& t,
-        void*& buffer, size_t& size, int*& fds, size_t numFds);
-
-// Ref: frameworks/native/libs/gui/FrameTimestamps.cpp: FrameEventHistoryDelta
-
-/**
- * \brief Return the size of the non-fd buffer required to flatten
- * `HGraphicBufferProducer::FrameEventHistoryDelta`.
- *
- * \param[in] t The input `HGraphicBufferProducer::FrameEventHistoryDelta`.
- * \return The required size of the flat buffer.
- */
-size_t getFlattenedSize(
-        HGraphicBufferProducer::FrameEventHistoryDelta const& t);
-
-/**
- * \brief Return the number of file descriptors contained in
- * `HGraphicBufferProducer::FrameEventHistoryDelta`.
- *
- * \param[in] t The input `HGraphicBufferProducer::FrameEventHistoryDelta`.
- * \return The number of file descriptors contained in \p t.
- */
-size_t getFdCount(
-        HGraphicBufferProducer::FrameEventHistoryDelta const& t);
-
-/**
- * \brief Unflatten `FrameEventHistoryDelta`.
- *
- * \param[out] t The destination `FrameEventHistoryDelta`.
- * \param[out] nh The underlying array of arrays of native handles.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * If the return value is `NO_ERROR`, \p nh will be populated with `nullptr` or
- * newly created handles. The second dimension of \p nh will be 4. Each non-null
- * slot in \p nh will need to be deleted manually with `native_handle_delete()`.
- */
-status_t unflatten(
-        HGraphicBufferProducer::FrameEventHistoryDelta* t,
-        std::vector<std::vector<native_handle_t*> >* nh,
-        void const*& buffer, size_t& size, int const*& fds, size_t& numFds);
-
-/**
- * \brief Flatten `FrameEventHistoryDelta`.
- *
- * \param[in] t The source `FrameEventHistoryDelta`.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * This function will duplicate file descriptors contained in \p t.
- */
-status_t flatten(
-        HGraphicBufferProducer::FrameEventHistoryDelta const& t,
-        void*& buffer, size_t& size, int*& fds, size_t& numFds);
-
-/**
- * \brief Wrap `::android::FrameEventHistoryData` in
- * `HGraphicBufferProducer::FrameEventHistoryDelta`.
- *
- * \param[out] t The wrapper of type
- * `HGraphicBufferProducer::FrameEventHistoryDelta`.
- * \param[out] nh The array of array of native handles that are referred to by
- * members of \p t.
- * \param[in] l The source `::android::FrameEventHistoryDelta`.
- *
- * On success, each member of \p nh will be either `nullptr` or a newly created
- * native handle. All the non-`nullptr` elements must be deleted individually
- * with `native_handle_delete()`.
- */
-bool wrapAs(HGraphicBufferProducer::FrameEventHistoryDelta* t,
-        std::vector<std::vector<native_handle_t*> >* nh,
-        ::android::FrameEventHistoryDelta const& l);
-
-/**
- * \brief Convert `HGraphicBufferProducer::FrameEventHistoryDelta` to
- * `::android::FrameEventHistoryDelta`.
- *
- * \param[out] l The destination `::android::FrameEventHistoryDelta`.
- * \param[in] t The source `HGraphicBufferProducer::FrameEventHistoryDelta`.
- *
- * This function will duplicate all file descriptors contained in \p t.
- */
-bool convertTo(
-        ::android::FrameEventHistoryDelta* l,
-        HGraphicBufferProducer::FrameEventHistoryDelta const& t);
-
-// Ref: frameworks/native/libs/ui/Region.cpp
-
-/**
- * \brief Return the size of the buffer required to flatten `Region`.
- *
- * \param[in] t The input `Region`.
- * \return The required size of the flat buffer.
- */
-size_t getFlattenedSize(Region const& t);
-
-/**
- * \brief Unflatten `Region`.
- *
- * \param[out] t The destination `Region`.
- * \param[in,out] buffer The pointer to the flat buffer.
- * \param[in,out] size The size of the flat buffer.
- * \return `NO_ERROR` on success; other value on failure.
- */
-status_t unflatten(Region* t, void const*& buffer, size_t& size);
-
-/**
- * \brief Flatten `Region`.
- *
- * \param[in] t The source `Region`.
- * \param[in,out] buffer The pointer to the flat buffer.
- * \param[in,out] size The size of the flat buffer.
- * \return `NO_ERROR` on success; other value on failure.
- */
-status_t flatten(Region const& t, void*& buffer, size_t& size);
-
-/**
- * \brief Convert `::android::Region` to `Region`.
- *
- * \param[out] t The destination `Region`.
- * \param[in] l The source `::android::Region`.
- */
-// convert: ::android::Region -> Region
-bool convertTo(Region* t, ::android::Region const& l);
-
-/**
- * \brief Convert `Region` to `::android::Region`.
- *
- * \param[out] l The destination `::android::Region`.
- * \param[in] t The source `Region`.
- */
-// convert: Region -> ::android::Region
-bool convertTo(::android::Region* l, Region const& t);
-
-// Ref: frameworks/native/libs/gui/BGraphicBufferProducer.cpp:
-//      BGraphicBufferProducer::QueueBufferInput
-
-/**
- * \brief Return the size of the buffer required to flatten
- * `HGraphicBufferProducer::QueueBufferInput`.
- *
- * \param[in] t The input `HGraphicBufferProducer::QueueBufferInput`.
- * \return The required size of the flat buffer.
- */
-size_t getFlattenedSize(HGraphicBufferProducer::QueueBufferInput const& t);
-
-/**
- * \brief Return the number of file descriptors contained in
- * `HGraphicBufferProducer::QueueBufferInput`.
- *
- * \param[in] t The input `HGraphicBufferProducer::QueueBufferInput`.
- * \return The number of file descriptors contained in \p t.
- */
-size_t getFdCount(
-        HGraphicBufferProducer::QueueBufferInput const& t);
-/**
- * \brief Flatten `HGraphicBufferProducer::QueueBufferInput`.
- *
- * \param[in] t The source `HGraphicBufferProducer::QueueBufferInput`.
- * \param[out] nh The native handle cloned from `t.fence`.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * This function will duplicate the file descriptor in `t.fence`. */
-status_t flatten(HGraphicBufferProducer::QueueBufferInput const& t,
-        native_handle_t** nh,
-        void*& buffer, size_t& size, int*& fds, size_t& numFds);
-
-/**
- * \brief Unflatten `HGraphicBufferProducer::QueueBufferInput`.
- *
- * \param[out] t The destination `HGraphicBufferProducer::QueueBufferInput`.
- * \param[out] nh The underlying native handle for `t->fence`.
- * \param[in,out] buffer The pointer to the flat non-fd buffer.
- * \param[in,out] size The size of the flat non-fd buffer.
- * \param[in,out] fds The pointer to the flat fd buffer.
- * \param[in,out] numFds The size of the flat fd buffer.
- * \return `NO_ERROR` on success; other value on failure.
- *
- * If the return value is `NO_ERROR` and `t->fence` contains a valid file
- * descriptor, \p nh will be a newly created native handle holding that file
- * descriptor. \p nh needs to be deleted with `native_handle_delete()`
- * afterwards.
- */
-status_t unflatten(
-        HGraphicBufferProducer::QueueBufferInput* t, native_handle_t** nh,
-        void const*& buffer, size_t& size, int const*& fds, size_t& numFds);
-
-/**
- * \brief Wrap `BGraphicBufferProducer::QueueBufferInput` in
- * `HGraphicBufferProducer::QueueBufferInput`.
- *
- * \param[out] t The wrapper of type
- * `HGraphicBufferProducer::QueueBufferInput`.
- * \param[out] nh The underlying native handle for `t->fence`.
- * \param[in] l The source `BGraphicBufferProducer::QueueBufferInput`.
- *
- * If the return value is `true` and `t->fence` contains a valid file
- * descriptor, \p nh will be a newly created native handle holding that file
- * descriptor. \p nh needs to be deleted with `native_handle_delete()`
- * afterwards.
- */
-bool wrapAs(
-        HGraphicBufferProducer::QueueBufferInput* t,
-        native_handle_t** nh,
-        BGraphicBufferProducer::QueueBufferInput const& l);
-
-/**
- * \brief Convert `HGraphicBufferProducer::QueueBufferInput` to
- * `BGraphicBufferProducer::QueueBufferInput`.
- *
- * \param[out] l The destination `BGraphicBufferProducer::QueueBufferInput`.
- * \param[in] t The source `HGraphicBufferProducer::QueueBufferInput`.
- *
- * If `t.fence` has a valid file descriptor, it will be duplicated.
- */
-bool convertTo(
-        BGraphicBufferProducer::QueueBufferInput* l,
-        HGraphicBufferProducer::QueueBufferInput const& t);
-
-// Ref: frameworks/native/libs/gui/BGraphicBufferProducer.cpp:
-//      BGraphicBufferProducer::QueueBufferOutput
-
-/**
- * \brief Wrap `BGraphicBufferProducer::QueueBufferOutput` in
- * `HGraphicBufferProducer::QueueBufferOutput`.
- *
- * \param[out] t The wrapper of type
- * `HGraphicBufferProducer::QueueBufferOutput`.
- * \param[out] nh The array of array of native handles that are referred to by
- * members of \p t.
- * \param[in] l The source `BGraphicBufferProducer::QueueBufferOutput`.
- *
- * On success, each member of \p nh will be either `nullptr` or a newly created
- * native handle. All the non-`nullptr` elements must be deleted individually
- * with `native_handle_delete()`.
- */
-// wrap: BGraphicBufferProducer::QueueBufferOutput ->
-// HGraphicBufferProducer::QueueBufferOutput
-bool wrapAs(HGraphicBufferProducer::QueueBufferOutput* t,
-        std::vector<std::vector<native_handle_t*> >* nh,
-        BGraphicBufferProducer::QueueBufferOutput const& l);
-
-/**
- * \brief Convert `HGraphicBufferProducer::QueueBufferOutput` to
- * `BGraphicBufferProducer::QueueBufferOutput`.
- *
- * \param[out] l The destination `BGraphicBufferProducer::QueueBufferOutput`.
- * \param[in] t The source `HGraphicBufferProducer::QueueBufferOutput`.
- *
- * This function will duplicate all file descriptors contained in \p t.
- */
-// convert: HGraphicBufferProducer::QueueBufferOutput ->
-// BGraphicBufferProducer::QueueBufferOutput
-bool convertTo(
-        BGraphicBufferProducer::QueueBufferOutput* l,
-        HGraphicBufferProducer::QueueBufferOutput const& t);
-
-/**
- * \brief Convert `BGraphicBufferProducer::DisconnectMode` to
- * `HGraphicBufferProducer::DisconnectMode`.
- *
- * \param[in] l The source `BGraphicBufferProducer::DisconnectMode`.
- * \return The corresponding `HGraphicBufferProducer::DisconnectMode`.
- */
-HGraphicBufferProducer::DisconnectMode toHidlDisconnectMode(
-        BGraphicBufferProducer::DisconnectMode l);
-
-/**
- * \brief Convert `HGraphicBufferProducer::DisconnectMode` to
- * `BGraphicBufferProducer::DisconnectMode`.
- *
- * \param[in] l The source `HGraphicBufferProducer::DisconnectMode`.
- * \return The corresponding `BGraphicBufferProducer::DisconnectMode`.
- */
-BGraphicBufferProducer::DisconnectMode toGuiDisconnectMode(
-        HGraphicBufferProducer::DisconnectMode t);
-
-}  // namespace conversion
-}  // namespace android
-
-#endif  // MEDIA_STAGEFRIGHT_BQHELPER_CONVERSION_H_
diff --git a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h b/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h
index bf329b9..b4c4d4a 100644
--- a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h
+++ b/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h
@@ -19,8 +19,6 @@
 #define GRAPHIC_BUFFER_SOURCE_H_
 
 #include <binder/Status.h>
-#include <gui/BufferQueue.h>
-#include <gui/IGraphicBufferProducer.h>
 #include <utils/RefBase.h>
 
 #include <media/hardware/VideoAPI.h>
@@ -28,13 +26,17 @@
 #include <media/stagefright/foundation/AHandlerReflector.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/bqhelper/ComponentWrapper.h>
+#include <android/hardware/graphics/bufferqueue/1.0/IGraphicBufferProducer.h>
+#include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
 
 namespace android {
 
 using ::android::binder::Status;
 
 struct FrameDropper;
-
+class BufferItem;
+class IGraphicBufferProducer;
+class IGraphicBufferConsumer;
 /*
  * This class is used to feed codecs from a Surface via BufferQueue or
  * HW producer.
@@ -82,9 +84,12 @@
 
     // Returns the handle to the producer side of the BufferQueue.  Buffers
     // queued on this will be received by GraphicBufferSource.
-    sp<IGraphicBufferProducer> getIGraphicBufferProducer() const {
-        return mProducer;
-    }
+    sp<IGraphicBufferProducer> getIGraphicBufferProducer() const;
+
+    // Returns the handle to the bufferqueue HAL (V1_0) producer side of the BufferQueue.
+    // Buffers queued on this will be received by GraphicBufferSource.
+    sp<::android::hardware::graphics::bufferqueue::V1_0::IGraphicBufferProducer>
+        getHGraphicBufferProducer_V1_0() const;
 
     // Returns the handle to the bufferqueue HAL producer side of the BufferQueue.
     // Buffers queued on this will be received by GraphicBufferSource.
diff --git a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/WGraphicBufferProducer.h b/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/WGraphicBufferProducer.h
deleted file mode 100644
index 8ddf20f..0000000
--- a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/WGraphicBufferProducer.h
+++ /dev/null
@@ -1,380 +0,0 @@
-/*
- * Copyright 2016, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef MEDIA_STAGEFRIGHT_WGRAPHICBUFFERPRODUCER_H_
-#define MEDIA_STAGEFRIGHT_WGRAPHICBUFFERPRODUCER_H_
-
-#include <hidl/MQDescriptor.h>
-#include <hidl/Status.h>
-
-#include <binder/Binder.h>
-#include <gui/IGraphicBufferProducer.h>
-#include <gui/IProducerListener.h>
-#include <media/stagefright/bqhelper/Conversion.h>
-#include <media/stagefright/bqhelper/WProducerListener.h>
-#include <system/window.h>
-
-#include <android/hardware/graphics/bufferqueue/1.0/IGraphicBufferProducer.h>
-
-namespace android {
-
-using ::android::hardware::media::V1_0::AnwBuffer;
-using ::android::hidl::base::V1_0::IBase;
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_handle;
-using ::android::hardware::hidl_memory;
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::sp;
-
-typedef ::android::hardware::graphics::bufferqueue::V1_0::
-        IGraphicBufferProducer HGraphicBufferProducer;
-typedef ::android::hardware::graphics::bufferqueue::V1_0::
-        IProducerListener HProducerListener;
-
-typedef ::android::IGraphicBufferProducer BGraphicBufferProducer;
-typedef ::android::IProducerListener BProducerListener;
-using ::android::BnGraphicBufferProducer;
-
-#ifndef LOG
-struct LOG_dummy {
-    template <typename T>
-    LOG_dummy& operator<< (const T&) { return *this; }
-};
-
-#define LOG(x)  LOG_dummy()
-#endif
-
-// Instantiate only if HGraphicBufferProducer is base of BASE.
-template <typename BASE,
-          typename = typename std::enable_if<std::is_base_of<HGraphicBufferProducer, BASE>::value>::type>
-struct TWGraphicBufferProducer : public BASE {
-    TWGraphicBufferProducer(sp<BGraphicBufferProducer> const& base) : mBase(base) {}
-    Return<void> requestBuffer(int32_t slot, HGraphicBufferProducer::requestBuffer_cb _hidl_cb) override {
-        sp<GraphicBuffer> buf;
-        status_t status = mBase->requestBuffer(slot, &buf);
-        AnwBuffer anwBuffer;
-        if (buf != nullptr) {
-            ::android::conversion::wrapAs(&anwBuffer, *buf);
-        }
-        _hidl_cb(static_cast<int32_t>(status), anwBuffer);
-        return Void();
-    }
-
-    Return<int32_t> setMaxDequeuedBufferCount(int32_t maxDequeuedBuffers) override {
-        return static_cast<int32_t>(mBase->setMaxDequeuedBufferCount(
-                static_cast<int>(maxDequeuedBuffers)));
-    }
-
-    Return<int32_t> setAsyncMode(bool async) override {
-        return static_cast<int32_t>(mBase->setAsyncMode(async));
-    }
-
-    Return<void> dequeueBuffer(
-            uint32_t width, uint32_t height,
-            ::android::hardware::graphics::common::V1_0::PixelFormat format, uint32_t usage,
-            bool getFrameTimestamps, HGraphicBufferProducer::dequeueBuffer_cb _hidl_cb) override {
-        int slot;
-        sp<Fence> fence;
-        ::android::FrameEventHistoryDelta outTimestamps;
-        status_t status = mBase->dequeueBuffer(
-            &slot, &fence, width, height,
-            static_cast<::android::PixelFormat>(format), usage, nullptr,
-            getFrameTimestamps ? &outTimestamps : nullptr);
-        hidl_handle tFence;
-        HGraphicBufferProducer::FrameEventHistoryDelta tOutTimestamps;
-
-        native_handle_t* nh = nullptr;
-        if ((fence == nullptr) || !::android::conversion::wrapAs(&tFence, &nh, *fence)) {
-            LOG(ERROR) << "TWGraphicBufferProducer::dequeueBuffer - "
-                    "Invalid output fence";
-            _hidl_cb(static_cast<int32_t>(status),
-                     static_cast<int32_t>(slot),
-                     tFence,
-                     tOutTimestamps);
-            return Void();
-        }
-        std::vector<std::vector<native_handle_t*> > nhAA;
-        if (getFrameTimestamps && !::android::conversion::wrapAs(&tOutTimestamps, &nhAA, outTimestamps)) {
-            LOG(ERROR) << "TWGraphicBufferProducer::dequeueBuffer - "
-                    "Invalid output timestamps";
-            _hidl_cb(static_cast<int32_t>(status),
-                     static_cast<int32_t>(slot),
-                     tFence,
-                     tOutTimestamps);
-            native_handle_delete(nh);
-            return Void();
-        }
-
-        _hidl_cb(static_cast<int32_t>(status),
-                static_cast<int32_t>(slot),
-                tFence,
-                tOutTimestamps);
-        native_handle_delete(nh);
-        if (getFrameTimestamps) {
-            for (auto& nhA : nhAA) {
-                for (auto& handle : nhA) {
-                    native_handle_delete(handle);
-                }
-            }
-        }
-        return Void();
-    }
-
-    Return<int32_t> detachBuffer(int32_t slot) override {
-        return static_cast<int32_t>(mBase->detachBuffer(slot));
-    }
-
-    Return<void> detachNextBuffer(HGraphicBufferProducer::detachNextBuffer_cb _hidl_cb) override {
-        sp<GraphicBuffer> outBuffer;
-        sp<Fence> outFence;
-        status_t status = mBase->detachNextBuffer(&outBuffer, &outFence);
-        AnwBuffer tBuffer;
-        hidl_handle tFence;
-
-        if (outBuffer == nullptr) {
-            LOG(ERROR) << "TWGraphicBufferProducer::detachNextBuffer - "
-                    "Invalid output buffer";
-            _hidl_cb(static_cast<int32_t>(status), tBuffer, tFence);
-            return Void();
-        }
-        ::android::conversion::wrapAs(&tBuffer, *outBuffer);
-        native_handle_t* nh = nullptr;
-        if ((outFence != nullptr) && !::android::conversion::wrapAs(&tFence, &nh, *outFence)) {
-            LOG(ERROR) << "TWGraphicBufferProducer::detachNextBuffer - "
-                    "Invalid output fence";
-            _hidl_cb(static_cast<int32_t>(status), tBuffer, tFence);
-            return Void();
-        }
-
-        _hidl_cb(static_cast<int32_t>(status), tBuffer, tFence);
-        native_handle_delete(nh);
-        return Void();
-    }
-
-    Return<void> attachBuffer(const AnwBuffer& buffer, HGraphicBufferProducer::attachBuffer_cb _hidl_cb) override {
-        int outSlot;
-        sp<GraphicBuffer> lBuffer = new GraphicBuffer();
-        if (!::android::conversion::convertTo(lBuffer.get(), buffer)) {
-            LOG(ERROR) << "TWGraphicBufferProducer::attachBuffer - "
-                    "Invalid input native window buffer";
-            _hidl_cb(static_cast<int32_t>(BAD_VALUE), -1);
-            return Void();
-        }
-        status_t status = mBase->attachBuffer(&outSlot, lBuffer);
-
-        _hidl_cb(static_cast<int32_t>(status), static_cast<int32_t>(outSlot));
-        return Void();
-    }
-
-    Return<void> queueBuffer(
-            int32_t slot, const HGraphicBufferProducer::QueueBufferInput& input,
-            HGraphicBufferProducer::queueBuffer_cb _hidl_cb) override {
-        HGraphicBufferProducer::QueueBufferOutput tOutput;
-        BGraphicBufferProducer::QueueBufferInput lInput(
-                0, false, HAL_DATASPACE_UNKNOWN,
-                ::android::Rect(0, 0, 1, 1),
-                NATIVE_WINDOW_SCALING_MODE_FREEZE,
-                0, ::android::Fence::NO_FENCE);
-        if (!::android::conversion::convertTo(&lInput, input)) {
-            LOG(ERROR) << "TWGraphicBufferProducer::queueBuffer - "
-                    "Invalid input";
-            _hidl_cb(static_cast<int32_t>(BAD_VALUE), tOutput);
-            return Void();
-        }
-        BGraphicBufferProducer::QueueBufferOutput lOutput;
-        status_t status = mBase->queueBuffer(
-                static_cast<int>(slot), lInput, &lOutput);
-
-        std::vector<std::vector<native_handle_t*> > nhAA;
-        if (!::android::conversion::wrapAs(&tOutput, &nhAA, lOutput)) {
-            LOG(ERROR) << "TWGraphicBufferProducer::queueBuffer - "
-                    "Invalid output";
-            _hidl_cb(static_cast<int32_t>(BAD_VALUE), tOutput);
-            return Void();
-        }
-
-        _hidl_cb(static_cast<int32_t>(status), tOutput);
-        for (auto& nhA : nhAA) {
-            for (auto& nh : nhA) {
-                native_handle_delete(nh);
-            }
-        }
-        return Void();
-    }
-
-    Return<int32_t> cancelBuffer(int32_t slot, const hidl_handle& fence) override {
-        sp<Fence> lFence = new Fence();
-        if (!::android::conversion::convertTo(lFence.get(), fence)) {
-            LOG(ERROR) << "TWGraphicBufferProducer::cancelBuffer - "
-                    "Invalid input fence";
-            return static_cast<int32_t>(BAD_VALUE);
-        }
-        return static_cast<int32_t>(mBase->cancelBuffer(static_cast<int>(slot), lFence));
-    }
-
-    Return<void> query(int32_t what, HGraphicBufferProducer::query_cb _hidl_cb) override {
-        int lValue;
-        int lReturn = mBase->query(static_cast<int>(what), &lValue);
-        _hidl_cb(static_cast<int32_t>(lReturn), static_cast<int32_t>(lValue));
-        return Void();
-    }
-
-    Return<void> connect(const sp<HProducerListener>& listener,
-            int32_t api, bool producerControlledByApp,
-            HGraphicBufferProducer::connect_cb _hidl_cb) override {
-        sp<BProducerListener> lListener = listener == nullptr ?
-                nullptr : new LWProducerListener(listener);
-        BGraphicBufferProducer::QueueBufferOutput lOutput;
-        status_t status = mBase->connect(lListener,
-                static_cast<int>(api),
-                producerControlledByApp,
-                &lOutput);
-
-        HGraphicBufferProducer::QueueBufferOutput tOutput;
-        std::vector<std::vector<native_handle_t*> > nhAA;
-        if (!::android::conversion::wrapAs(&tOutput, &nhAA, lOutput)) {
-            LOG(ERROR) << "TWGraphicBufferProducer::connect - "
-                    "Invalid output";
-            _hidl_cb(static_cast<int32_t>(status), tOutput);
-            return Void();
-        }
-
-        _hidl_cb(static_cast<int32_t>(status), tOutput);
-        for (auto& nhA : nhAA) {
-            for (auto& nh : nhA) {
-                native_handle_delete(nh);
-            }
-        }
-        return Void();
-    }
-
-    Return<int32_t> disconnect(
-            int32_t api,
-            HGraphicBufferProducer::DisconnectMode mode) override {
-        return static_cast<int32_t>(mBase->disconnect(
-                static_cast<int>(api),
-                ::android::conversion::toGuiDisconnectMode(mode)));
-    }
-
-    Return<int32_t> setSidebandStream(const hidl_handle& stream) override {
-        return static_cast<int32_t>(mBase->setSidebandStream(NativeHandle::create(
-                stream ? native_handle_clone(stream) : NULL, true)));
-    }
-
-    Return<void> allocateBuffers(
-            uint32_t width, uint32_t height,
-            ::android::hardware::graphics::common::V1_0::PixelFormat format,
-            uint32_t usage) override {
-        mBase->allocateBuffers(
-                width, height,
-                static_cast<::android::PixelFormat>(format),
-                usage);
-        return Void();
-    }
-
-    Return<int32_t> allowAllocation(bool allow) override {
-        return static_cast<int32_t>(mBase->allowAllocation(allow));
-    }
-
-    Return<int32_t> setGenerationNumber(uint32_t generationNumber) override {
-        return static_cast<int32_t>(mBase->setGenerationNumber(generationNumber));
-    }
-
-    Return<void> getConsumerName(HGraphicBufferProducer::getConsumerName_cb _hidl_cb) override {
-        _hidl_cb(mBase->getConsumerName().string());
-        return Void();
-    }
-
-    Return<int32_t> setSharedBufferMode(bool sharedBufferMode) override {
-        return static_cast<int32_t>(mBase->setSharedBufferMode(sharedBufferMode));
-    }
-
-    Return<int32_t> setAutoRefresh(bool autoRefresh) override {
-        return static_cast<int32_t>(mBase->setAutoRefresh(autoRefresh));
-    }
-
-    Return<int32_t> setDequeueTimeout(int64_t timeoutNs) override {
-        return static_cast<int32_t>(mBase->setDequeueTimeout(timeoutNs));
-    }
-
-    Return<void> getLastQueuedBuffer(HGraphicBufferProducer::getLastQueuedBuffer_cb _hidl_cb) override {
-        sp<GraphicBuffer> lOutBuffer = new GraphicBuffer();
-        sp<Fence> lOutFence = new Fence();
-        float lOutTransformMatrix[16];
-        status_t status = mBase->getLastQueuedBuffer(
-                &lOutBuffer, &lOutFence, lOutTransformMatrix);
-
-        AnwBuffer tOutBuffer;
-        if (lOutBuffer != nullptr) {
-            ::android::conversion::wrapAs(&tOutBuffer, *lOutBuffer);
-        }
-        hidl_handle tOutFence;
-        native_handle_t* nh = nullptr;
-        if ((lOutFence == nullptr) || !::android::conversion::wrapAs(&tOutFence, &nh, *lOutFence)) {
-            LOG(ERROR) << "TWGraphicBufferProducer::getLastQueuedBuffer - "
-                    "Invalid output fence";
-            _hidl_cb(static_cast<int32_t>(status),
-                    tOutBuffer,
-                    tOutFence,
-                    hidl_array<float, 16>());
-            return Void();
-        }
-        hidl_array<float, 16> tOutTransformMatrix(lOutTransformMatrix);
-
-        _hidl_cb(static_cast<int32_t>(status), tOutBuffer, tOutFence, tOutTransformMatrix);
-        native_handle_delete(nh);
-        return Void();
-    }
-
-    Return<void> getFrameTimestamps(HGraphicBufferProducer::getFrameTimestamps_cb _hidl_cb) override {
-        ::android::FrameEventHistoryDelta lDelta;
-        mBase->getFrameTimestamps(&lDelta);
-
-        HGraphicBufferProducer::FrameEventHistoryDelta tDelta;
-        std::vector<std::vector<native_handle_t*> > nhAA;
-        if (!::android::conversion::wrapAs(&tDelta, &nhAA, lDelta)) {
-            LOG(ERROR) << "TWGraphicBufferProducer::getFrameTimestamps - "
-                    "Invalid output frame timestamps";
-            _hidl_cb(tDelta);
-            return Void();
-        }
-
-        _hidl_cb(tDelta);
-        for (auto& nhA : nhAA) {
-            for (auto& nh : nhA) {
-                native_handle_delete(nh);
-            }
-        }
-        return Void();
-    }
-
-    Return<void> getUniqueId(HGraphicBufferProducer::getUniqueId_cb _hidl_cb) override {
-        uint64_t outId;
-        status_t status = mBase->getUniqueId(&outId);
-        _hidl_cb(static_cast<int32_t>(status), outId);
-        return Void();
-    }
-
-private:
-    sp<BGraphicBufferProducer> mBase;
-};
-
-}  // namespace android
-
-#endif  // MEDIA_STAGEFRIGHT_WGRAPHICBUFFERPRODUCER_H_
diff --git a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/WProducerListener.h b/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/WProducerListener.h
deleted file mode 100644
index febba87..0000000
--- a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/WProducerListener.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright 2016, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef MEDIA_STAGEFRIGHT_WPRODUCERLISTENER_H_
-#define MEDIA_STAGEFRIGHT_WPRODUCERLISTENER_H_
-
-#include <hidl/MQDescriptor.h>
-#include <hidl/Status.h>
-
-#include <binder/IBinder.h>
-#include <gui/IProducerListener.h>
-
-#include <android/hardware/graphics/bufferqueue/1.0/IProducerListener.h>
-
-namespace android {
-
-using ::android::hidl::base::V1_0::IBase;
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_memory;
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::sp;
-
-typedef ::android::hardware::graphics::bufferqueue::V1_0::IProducerListener
-        HProducerListener;
-typedef ::android::IProducerListener
-        BProducerListener;
-using ::android::BnProducerListener;
-
-struct TWProducerListener : public HProducerListener {
-    sp<BProducerListener> mBase;
-    TWProducerListener(sp<BProducerListener> const& base);
-    Return<void> onBufferReleased() override;
-    Return<bool> needsReleaseNotify() override;
-};
-
-class LWProducerListener : public BnProducerListener {
-public:
-    sp<HProducerListener> mBase;
-    LWProducerListener(sp<HProducerListener> const& base);
-    void onBufferReleased() override;
-    bool needsReleaseNotify() override;
-};
-
-}  // namespace android
-
-#endif  // MEDIA_STAGEFRIGHT_WPRODUCERLISTENER_H_
diff --git a/media/libstagefright/codecs/amrnb/common/Android.bp b/media/libstagefright/codecs/amrnb/common/Android.bp
index 5177593..772ebf9 100644
--- a/media/libstagefright/codecs/amrnb/common/Android.bp
+++ b/media/libstagefright/codecs/amrnb/common/Android.bp
@@ -1,9 +1,6 @@
 cc_library_shared {
     name: "libstagefright_amrnb_common",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
 
     srcs: [
         "src/add.cpp",
diff --git a/media/libstagefright/codecs/amrwb/src/deemphasis_32.cpp b/media/libstagefright/codecs/amrwb/src/deemphasis_32.cpp
index b80555b..c0e4c51 100644
--- a/media/libstagefright/codecs/amrwb/src/deemphasis_32.cpp
+++ b/media/libstagefright/codecs/amrwb/src/deemphasis_32.cpp
@@ -131,7 +131,7 @@
     int16 lo, hi;
 
     L_tmp  = ((int32)x_hi[0]) << 16;
-    L_tmp += ((int32)x_lo[0]) << 4;
+    L_tmp += (((int32)x_lo[0]) << 4) & 0xFFFF;
     L_tmp  = shl_int32(L_tmp, 3);
 
     L_tmp = fxp_mac_16by16(*mem, mu, L_tmp),
@@ -144,7 +144,7 @@
     for (i = 1; i < L - 1; i++)
     {
         L_tmp  = ((int32)hi) << 16;
-        L_tmp += ((int32)lo) << 4;
+        L_tmp += (((int32)lo) << 4) & 0xFFFF;
         L_tmp  = shl_int32(L_tmp, 3);
         L_tmp  = fxp_mac_16by16(y[i - 1], mu, L_tmp),
                  L_tmp  = shl_int32(L_tmp, 1);           /* saturation can occur here */
@@ -153,7 +153,7 @@
         hi     = x_hi[i+1];
     }
     L_tmp  = ((int32)hi) << 16;
-    L_tmp += ((int32)lo) << 4;
+    L_tmp += (((int32)lo) << 4) & 0xFFFF;
     L_tmp  = shl_int32(L_tmp, 3);
     L_tmp  = fxp_mac_16by16(y[i - 1], mu, L_tmp),
              L_tmp  = shl_int32(L_tmp, 1);           /* saturation can occur here */
diff --git a/media/libstagefright/codecs/amrwbenc/Android.bp b/media/libstagefright/codecs/amrwbenc/Android.bp
index 3beed66..084be0a 100644
--- a/media/libstagefright/codecs/amrwbenc/Android.bp
+++ b/media/libstagefright/codecs/amrwbenc/Android.bp
@@ -129,6 +129,7 @@
 
     shared_libs: [
         "libstagefright_enc_common",
+        "liblog",
     ],
 
     cflags: ["-Werror"],
diff --git a/media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp b/media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp
index 95f9494..9442fc4 100644
--- a/media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp
+++ b/media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp
@@ -14,6 +14,7 @@
 
     shared_libs: [
         "libdl",
+        "liblog",
     ],
 
     static_libs: [
diff --git a/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c b/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c
index 8cebb09..f2e28c4 100644
--- a/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c
+++ b/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c
@@ -47,6 +47,10 @@
 
 #include "q_pulse.h"
 
+#undef LOG_TAG
+#define LOG_TAG "amrwbenc"
+#include "log/log.h"
+
 static Word16 tipos[36] = {
     0, 1, 2, 3,                            /* starting point &ipos[0], 1st iter */
     1, 2, 3, 0,                            /* starting point &ipos[4], 2nd iter */
@@ -745,11 +749,16 @@
 
         i = (Word16)((vo_L_mult(track, NPMAXPT) >> 1));
 
-        while (ind[i] >= 0)
+        while (i < NPMAXPT * NB_TRACK && ind[i] >= 0)
         {
             i += 1;
         }
-        ind[i] = index;
+        if (i < NPMAXPT * NB_TRACK) {
+            ind[i] = index;
+        } else {
+            ALOGE("b/132647222, OOB access in ind array track=%d i=%d", track, i);
+            android_errorWriteLog(0x534e4554, "132647222");
+        }
     }
 
     k = 0;
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
index bf5e243..5a4b2f8 100644
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
@@ -580,7 +580,7 @@
             status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
 
             bool unsupportedResolution =
-                (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & 0xFF));
+                (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & IVD_ERROR_MASK));
 
             /* Check for unsupported dimensions */
             if (unsupportedResolution) {
@@ -590,7 +590,7 @@
                 return;
             }
 
-            bool allocationFailed = (IVD_MEM_ALLOC_FAILED == (s_dec_op.u4_error_code & 0xFF));
+            bool allocationFailed = (IVD_MEM_ALLOC_FAILED == (s_dec_op.u4_error_code & IVD_ERROR_MASK));
             if (allocationFailed) {
                 ALOGE("Allocation failure in decoder");
                 notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
@@ -598,7 +598,14 @@
                 return;
             }
 
-            bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
+            if (IS_IVD_FATAL_ERROR(s_dec_op.u4_error_code)) {
+                ALOGE("Fatal Error : 0x%x", s_dec_op.u4_error_code);
+                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
+                mSignalledError = true;
+                return;
+            }
+
+            bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & IVD_ERROR_MASK));
 
             getVUIParams();
 
diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.h b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
index 8253b7d..6d2e084 100644
--- a/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
+++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
@@ -36,7 +36,7 @@
 #define DEFAULT_MAX_REORDER_FRM     0
 #define DEFAULT_QP_MIN              10
 #define DEFAULT_QP_MAX              40
-#define DEFAULT_MAX_BITRATE         20000000
+#define DEFAULT_MAX_BITRATE         240000000
 #define DEFAULT_MAX_SRCH_RANGE_X    256
 #define DEFAULT_MAX_SRCH_RANGE_Y    256
 #define DEFAULT_MAX_FRAMERATE       120000
diff --git a/media/libstagefright/codecs/common/Android.bp b/media/libstagefright/codecs/common/Android.bp
index 3726922..c5a076a 100644
--- a/media/libstagefright/codecs/common/Android.bp
+++ b/media/libstagefright/codecs/common/Android.bp
@@ -1,9 +1,6 @@
 cc_library {
     name: "libstagefright_enc_common",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
 
     srcs: ["cmnMemory.c"],
 
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
index bb7d361..f6ae1f4 100644
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
@@ -569,7 +569,7 @@
             status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
 
             bool unsupportedResolution =
-                (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & 0xFF));
+                (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & IVD_ERROR_MASK));
 
             /* Check for unsupported dimensions */
             if (unsupportedResolution) {
@@ -579,7 +579,8 @@
                 return;
             }
 
-            bool allocationFailed = (IVD_MEM_ALLOC_FAILED == (s_dec_op.u4_error_code & 0xFF));
+            bool allocationFailed = 
+                (IVD_MEM_ALLOC_FAILED == (s_dec_op.u4_error_code & IVD_ERROR_MASK));
             if (allocationFailed) {
                 ALOGE("Allocation failure in decoder");
                 notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
@@ -587,7 +588,14 @@
                 return;
             }
 
-            bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
+            if (IS_IVD_FATAL_ERROR(s_dec_op.u4_error_code)) {
+                ALOGE("Fatal Error : 0x%x", s_dec_op.u4_error_code);
+                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
+                mSignalledError = true;
+                return;
+            }
+
+            bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & IVD_ERROR_MASK));
 
             getVUIParams();
 
diff --git a/media/libstagefright/codecs/on2/dec/Android.bp b/media/libstagefright/codecs/on2/dec/Android.bp
index 577231c..82bb8d1 100644
--- a/media/libstagefright/codecs/on2/dec/Android.bp
+++ b/media/libstagefright/codecs/on2/dec/Android.bp
@@ -4,7 +4,7 @@
 
     srcs: ["SoftVPX.cpp"],
 
-    static_libs: ["libvpx"],
+    shared_libs: ["libvpx"],
 
     version_script: "exports.lds",
 
diff --git a/media/libstagefright/codecs/on2/enc/Android.bp b/media/libstagefright/codecs/on2/enc/Android.bp
index 82c215e..cd69e0d 100644
--- a/media/libstagefright/codecs/on2/enc/Android.bp
+++ b/media/libstagefright/codecs/on2/enc/Android.bp
@@ -20,5 +20,5 @@
         cfi: true,
     },
 
-    static_libs: ["libvpx"],
+    shared_libs: ["libvpx"],
 }
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index d136d9e..d685321 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -395,11 +395,16 @@
                 dst.mStride, src.cropWidth(), src.cropHeight());
         break;
 
-    case OMX_COLOR_Format32BitRGBA8888:
+    case OMX_COLOR_Format32bitBGRA8888:
         libyuv::NV12ToARGB(src_y, src.mStride, src_u, src.mStride, (uint8 *)dst_ptr,
                 dst.mStride, src.cropWidth(), src.cropHeight());
         break;
 
+    case OMX_COLOR_Format32BitRGBA8888:
+        libyuv::NV12ToABGR(src_y, src.mStride, src_u, src.mStride, (uint8 *)dst_ptr,
+                dst.mStride, src.cropWidth(), src.cropHeight());
+        break;
+
     default:
         return ERROR_UNSUPPORTED;
    }
diff --git a/media/libstagefright/data/Android.bp b/media/libstagefright/data/Android.bp
new file mode 100644
index 0000000..616b4b3
--- /dev/null
+++ b/media/libstagefright/data/Android.bp
@@ -0,0 +1,6 @@
+prebuilt_etc {
+    name: "mediaswcodec.xml",
+    src: "media_codecs_sw.xml",
+    filename: "media_codecs.xml",
+    installable: false,
+}
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index f785bfa..04041eb 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -109,11 +109,12 @@
         </MediaCodec>
         <MediaCodec name="c2.android.hevc.encoder" type="video/hevc">
             <!-- profiles and levels:  ProfileMain : MainTierLevel51 -->
-            <Limit name="size" min="320x128" max="512x512" />
+            <Limit name="size" min="2x2" max="512x512" />
             <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="8x8" />
             <Limit name="block-count" range="1-4096" /> <!-- max 512x512 -->
             <Limit name="blocks-per-second" range="1-122880" />
+            <Limit name="frame-rate" range="1-120" />
             <Limit name="bitrate" range="1-10000000" />
             <Limit name="complexity" range="0-10"  default="0" />
             <Limit name="quality" range="0-100"  default="80" />
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
new file mode 100644
index 0000000..37f3f61
--- /dev/null
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -0,0 +1,322 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!-- Copyright (C) 2014 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<MediaCodecs>
+    <Settings>
+        <!-- disable TV and telephony domains by default. These must be opted in by OEMs -->
+        <Domain name="telephony" enabled="false" />
+        <Domain name="tv" enabled="false" />
+        <Variant name="slow-cpu" enabled="false" />
+    </Settings>
+    <Decoders>
+        <MediaCodec name="c2.android.mp3.decoder" type="audio/mpeg">
+            <Alias name="OMX.google.mp3.decoder" />
+            <Limit name="channel-count" max="2" />
+            <Limit name="sample-rate" ranges="8000,11025,12000,16000,22050,24000,32000,44100,48000" />
+            <Limit name="bitrate" range="8000-320000" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.amrnb.decoder" type="audio/3gpp">
+            <Alias name="OMX.google.amrnb.decoder" />
+            <Limit name="channel-count" max="1" />
+            <Limit name="sample-rate" ranges="8000" />
+            <Limit name="bitrate" range="4750-12200" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.amrwb.decoder" type="audio/amr-wb">
+            <Alias name="OMX.google.amrwb.decoder" />
+            <Limit name="channel-count" max="1" />
+            <Limit name="sample-rate" ranges="16000" />
+            <Limit name="bitrate" range="6600-23850" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.aac.decoder" type="audio/mp4a-latm">
+            <Alias name="OMX.google.aac.decoder" />
+            <Limit name="channel-count" max="8" />
+            <Limit name="sample-rate" ranges="7350,8000,11025,12000,16000,22050,24000,32000,44100,48000" />
+            <Limit name="bitrate" range="8000-960000" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.g711.alaw.decoder" type="audio/g711-alaw">
+            <Alias name="OMX.google.g711.alaw.decoder" />
+            <Limit name="channel-count" max="1" />
+            <Limit name="sample-rate" ranges="8000-48000" />
+            <Limit name="bitrate" range="64000" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.g711.mlaw.decoder" type="audio/g711-mlaw">
+            <Alias name="OMX.google.g711.mlaw.decoder" />
+            <Limit name="channel-count" max="1" />
+            <Limit name="sample-rate" ranges="8000-48000" />
+            <Limit name="bitrate" range="64000" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.vorbis.decoder" type="audio/vorbis">
+            <Alias name="OMX.google.vorbis.decoder" />
+            <Limit name="channel-count" max="8" />
+            <Limit name="sample-rate" ranges="8000-96000" />
+            <Limit name="bitrate" range="32000-500000" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.opus.decoder" type="audio/opus">
+            <Alias name="OMX.google.opus.decoder" />
+            <Limit name="channel-count" max="8" />
+            <Limit name="sample-rate" ranges="48000" />
+            <Limit name="bitrate" range="6000-510000" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.raw.decoder" type="audio/raw">
+            <Alias name="OMX.google.raw.decoder" />
+            <Limit name="channel-count" max="8" />
+            <Limit name="sample-rate" ranges="8000-96000" />
+            <Limit name="bitrate" range="1-10000000" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.flac.decoder" type="audio/flac">
+            <Alias name="OMX.google.flac.decoder" />
+            <Limit name="channel-count" max="8" />
+            <Limit name="sample-rate" ranges="1-655350" />
+            <Limit name="bitrate" range="1-21000000" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.gsm.decoder" type="audio/gsm" domain="telephony">
+            <Alias name="OMX.google.gsm.decoder" />
+            <Limit name="channel-count" max="1" />
+            <Limit name="sample-rate" ranges="8000" />
+            <Limit name="bitrate" range="13000" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.mpeg4.decoder" type="video/mp4v-es">
+            <Alias name="OMX.google.mpeg4.decoder" />
+            <!-- profiles and levels:  ProfileSimple : Level3 -->
+            <Limit name="size" min="2x2" max="352x288" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <Limit name="blocks-per-second" range="12-11880" />
+            <Limit name="bitrate" range="1-384000" />
+            <Feature name="adaptive-playback" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.h263.decoder" type="video/3gpp">
+            <Alias name="OMX.google.h263.decoder" />
+            <!-- profiles and levels:  ProfileBaseline : Level30, ProfileBaseline : Level45
+                    ProfileISWV2 : Level30, ProfileISWV2 : Level45 -->
+            <Limit name="size" min="2x2" max="352x288" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="bitrate" range="1-384000" />
+            <Feature name="adaptive-playback" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.avc.decoder" type="video/avc" variant="slow-cpu,!slow-cpu">
+            <Alias name="OMX.google.h264.decoder" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <Variant name="!slow-cpu">
+                <Limit name="size" min="2x2" max="4080x4080" />
+                <!-- profiles and levels:  ProfileHigh : Level52 -->
+                <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+                <Limit name="blocks-per-second" range="1-1966080" />
+                <Limit name="bitrate" range="1-48000000" />
+            </Variant>
+            <Variant name="slow-cpu">
+                <Limit name="size" min="2x2" max="2048x2048" />
+                <!-- profiles and levels:  ProfileHigh : Level51 -->
+                <Limit name="block-count" range="1-16384" />
+                <Limit name="blocks-per-second" range="1-491520" />
+                <Limit name="bitrate" range="1-40000000" />
+            </Variant>
+            <Feature name="adaptive-playback" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.hevc.decoder" type="video/hevc" variant="slow-cpu,!slow-cpu">
+            <Alias name="OMX.google.hevc.decoder" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="8x8" />
+            <Variant name="!slow-cpu">
+                <Limit name="size" min="2x2" max="4096x4096" />
+                <!-- profiles and levels:  ProfileMain : MainTierLevel51 -->
+                <Limit name="block-count" range="1-196608" /> <!-- max 4096x3072 -->
+                <Limit name="blocks-per-second" range="1-2000000" />
+                <Limit name="bitrate" range="1-10000000" />
+            </Variant>
+            <Variant name="slow-cpu">
+                <Limit name="size" min="2x2" max="2048x2048" />
+                <!-- profiles and levels:  ProfileMain : MainTierLevel51 -->
+                <Limit name="block-count" range="1-65536" />
+                <Limit name="blocks-per-second" range="1-491520" />
+                <Limit name="bitrate" range="1-5000000" />
+            </Variant>
+            <Feature name="adaptive-playback" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.vp8.decoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
+            <Alias name="OMX.google.vp8.decoder" />
+            <Limit name="size" min="2x2" max="2048x2048" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <Variant name="!slow-cpu">
+                <Limit name="block-count" range="1-16384" />
+                <Limit name="blocks-per-second" range="1-1000000" />
+                <Limit name="bitrate" range="1-40000000" />
+            </Variant>
+            <Variant name="slow-cpu">
+                <Limit name="block-count" range="1-8192" /> <!-- max 2048x1024 -->
+                <Limit name="blocks-per-second" range="1-1000000" />
+                <Limit name="bitrate" range="1-40000000" />
+            </Variant>
+            <Feature name="adaptive-playback" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.vp9.decoder" type="video/x-vnd.on2.vp9" variant="slow-cpu,!slow-cpu">
+            <Alias name="OMX.google.vp9.decoder" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <Variant name="!slow-cpu">
+                <Limit name="size" min="2x2" max="2048x2048" />
+                <Limit name="block-count" range="1-16384" />
+                <Limit name="blocks-per-second" range="1-500000" />
+                <Limit name="bitrate" range="1-40000000" />
+            </Variant>
+            <Variant name="slow-cpu">
+                <Limit name="size" min="2x2" max="1280x1280" />
+                <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
+                <Limit name="blocks-per-second" range="1-108000" />
+                <Limit name="bitrate" range="1-5000000" />
+            </Variant>
+            <Feature name="adaptive-playback" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="!slow-cpu">
+            <Limit name="size" min="2x2" max="1920x1080" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <Limit name="block-count" range="1-16384" />
+            <Limit name="blocks-per-second" range="1-2073600" />
+            <Limit name="bitrate" range="1-120000000" />
+            <Feature name="adaptive-playback" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.mpeg2.decoder" type="video/mpeg2" domain="tv">
+            <Alias name="OMX.google.mpeg2.decoder" />
+            <!-- profiles and levels:  ProfileMain : LevelHL -->
+            <Limit name="size" min="16x16" max="1920x1088" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <Limit name="blocks-per-second" range="1-244800" />
+            <Limit name="bitrate" range="1-20000000" />
+            <Feature name="adaptive-playback" />
+        </MediaCodec>
+    </Decoders>
+    <Encoders>
+        <MediaCodec name="c2.android.aac.encoder" type="audio/mp4a-latm">
+            <Alias name="OMX.google.aac.encoder" />
+            <Limit name="channel-count" max="6" />
+            <Limit name="sample-rate" ranges="8000,11025,12000,16000,22050,24000,32000,44100,48000" />
+            <!-- also may support 64000, 88200  and 96000 Hz -->
+            <Limit name="bitrate" range="8000-960000" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.amrnb.encoder" type="audio/3gpp">
+            <Alias name="OMX.google.amrnb.encoder" />
+            <Limit name="channel-count" max="1" />
+            <Limit name="sample-rate" ranges="8000" />
+            <Limit name="bitrate" range="4750-12200" />
+            <Feature name="bitrate-modes" value="CBR" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.amrwb.encoder" type="audio/amr-wb">
+            <Alias name="OMX.google.amrwb.encoder" />
+            <Limit name="channel-count" max="1" />
+            <Limit name="sample-rate" ranges="16000" />
+            <Limit name="bitrate" range="6600-23850" />
+            <Feature name="bitrate-modes" value="CBR" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.flac.encoder" type="audio/flac">
+            <Alias name="OMX.google.flac.encoder" />
+            <Limit name="channel-count" max="2" />
+            <Limit name="sample-rate" ranges="1-655350" />
+            <Limit name="bitrate" range="1-21000000" />
+            <Limit name="complexity" range="0-8"  default="5" />
+            <Feature name="bitrate-modes" value="CQ" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.opus.encoder" type="audio/opus">
+            <Limit name="channel-count" max="2" />
+            <Limit name="sample-rate" ranges="8000,12000,16000,24000,48000" />
+            <Limit name="bitrate" range="500-512000" />
+            <Limit name="complexity" range="0-10"  default="5" />
+            <Feature name="bitrate-modes" value="CQ" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.h263.encoder" type="video/3gpp">
+            <Alias name="OMX.google.h263.encoder" />
+            <!-- profiles and levels:  ProfileBaseline : Level45 -->
+            <Limit name="size" min="176x144" max="176x144" />
+            <Limit name="alignment" value="16x16" />
+            <Limit name="bitrate" range="1-128000" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.mpeg4.encoder" type="video/mp4v-es">
+            <Alias name="OMX.google.mpeg4.encoder" />
+            <!-- profiles and levels:  ProfileCore : Level2 -->
+            <Limit name="size" min="16x16" max="176x144" />
+            <Limit name="alignment" value="16x16" />
+            <Limit name="block-size" value="16x16" />
+            <Limit name="blocks-per-second" range="12-1485" />
+            <Limit name="bitrate" range="1-64000" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.avc.encoder" type="video/avc" variant="slow-cpu,!slow-cpu">
+            <Alias name="OMX.google.h264.encoder" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <Variant name="!slow-cpu">
+                <Limit name="size" min="16x16" max="2048x2048" />
+                <!-- profiles and levels:  ProfileBaseline : Level41 -->
+                <Limit name="block-count" range="1-8192" /> <!-- max 2048x1024 -->
+                <Limit name="blocks-per-second" range="1-245760" />
+                <Limit name="bitrate" range="1-12000000" />
+            </Variant>
+            <Variant name="slow-cpu">
+                <Limit name="size" min="16x16" max="1808x1808" />
+                <!-- profiles and levels:  ProfileBaseline : Level3 -->
+                <Limit name="block-count" range="1-1620" />
+                <Limit name="blocks-per-second" range="1-40500" />
+                <Limit name="bitrate" range="1-2000000" />
+            </Variant>
+            <Feature name="intra-refresh" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.vp8.encoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
+            <Alias name="OMX.google.vp8.encoder" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <Variant name="!slow-cpu">
+                <Limit name="size" min="2x2" max="2048x2048" />
+                <!-- profiles and levels:  ProfileMain : Level_Version0-3 -->
+                <!-- 2016 devices can encode at about 10fps at this block count -->
+                <Limit name="block-count" range="1-16384" />
+                <Limit name="bitrate" range="1-40000000" />
+            </Variant>
+            <Variant name="slow-cpu">
+                <Limit name="size" min="2x2" max="1280x1280" />
+                <!-- profiles and levels:  ProfileMain : Level_Version0-3 -->
+                <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
+                <Limit name="bitrate" range="1-20000000" />
+            </Variant>
+            <Feature name="bitrate-modes" value="VBR,CBR" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.hevc.encoder" type="video/hevc" variant="!slow-cpu">
+            <!-- profiles and levels:  ProfileMain : MainTierLevel51 -->
+            <Limit name="size" min="2x2" max="512x512" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="8x8" />
+            <Limit name="block-count" range="1-4096" /> <!-- max 512x512 -->
+            <Limit name="blocks-per-second" range="1-122880" />
+            <Limit name="frame-rate" range="1-120" />
+            <Limit name="bitrate" range="1-10000000" />
+            <Limit name="complexity" range="0-10"  default="0" />
+            <Limit name="quality" range="0-100"  default="80" />
+            <Feature name="bitrate-modes" value="VBR,CBR,CQ" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.vp9.encoder" type="video/x-vnd.on2.vp9" variant="!slow-cpu">
+            <Alias name="OMX.google.vp9.encoder" />
+            <!-- profiles and levels:  ProfileMain : Level_Version0-3 -->
+            <Limit name="size" min="2x2" max="2048x2048" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <!-- 2016 devices can encode at about 8fps at this block count -->
+            <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
+            <Limit name="bitrate" range="1-40000000" />
+            <Feature name="bitrate-modes" value="VBR,CBR" />
+        </MediaCodec>
+    </Encoders>
+</MediaCodecs>
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/libstagefright/flac/dec/Android.bp
index b270808..b494e16 100644
--- a/media/libstagefright/flac/dec/Android.bp
+++ b/media/libstagefright/flac/dec/Android.bp
@@ -1,9 +1,6 @@
 cc_library {
     name: "libstagefright_flacdec",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
 
     srcs: [
         "FLACDecoder.cpp",
diff --git a/media/libstagefright/foundation/avc_utils.cpp b/media/libstagefright/foundation/avc_utils.cpp
index e8a6083..f53d2c9 100644
--- a/media/libstagefright/foundation/avc_utils.cpp
+++ b/media/libstagefright/foundation/avc_utils.cpp
@@ -166,10 +166,21 @@
     unsigned pic_height_in_map_units_minus1 = parseUE(&br);
     unsigned frame_mbs_only_flag = br.getBits(1);
 
-    *width = pic_width_in_mbs_minus1 * 16 + 16;
+    //    *width = pic_width_in_mbs_minus1 * 16 + 16;
+    if (__builtin_mul_overflow(pic_width_in_mbs_minus1, 16, &pic_width_in_mbs_minus1) ||
+        __builtin_add_overflow(pic_width_in_mbs_minus1, 16, width)) {
+        *width = 0;
+    }
 
-    *height = (2 - frame_mbs_only_flag)
-        * (pic_height_in_map_units_minus1 * 16 + 16);
+    //    *height = (2 - frame_mbs_only_flag) * (pic_height_in_map_units_minus1 * 16 + 16);
+    if (__builtin_mul_overflow(
+                pic_height_in_map_units_minus1, 16, &pic_height_in_map_units_minus1) ||
+        __builtin_add_overflow(
+                pic_height_in_map_units_minus1, 16, &pic_height_in_map_units_minus1) ||
+        __builtin_mul_overflow(
+                pic_height_in_map_units_minus1, (2 - frame_mbs_only_flag), height)) {
+        *height = 0;
+    }
 
     if (!frame_mbs_only_flag) {
         br.getBits(1);  // mb_adaptive_frame_field_flag
@@ -202,17 +213,19 @@
 
 
         // *width -= (frame_crop_left_offset + frame_crop_right_offset) * cropUnitX;
-        if(__builtin_add_overflow(frame_crop_left_offset, frame_crop_right_offset, &frame_crop_left_offset) ||
-            __builtin_mul_overflow(frame_crop_left_offset, cropUnitX, &frame_crop_left_offset) ||
-            __builtin_sub_overflow(*width, frame_crop_left_offset, width) ||
+        if(__builtin_add_overflow(
+                   frame_crop_left_offset, frame_crop_right_offset, &frame_crop_left_offset) ||
+           __builtin_mul_overflow(frame_crop_left_offset, cropUnitX, &frame_crop_left_offset) ||
+           __builtin_sub_overflow(*width, frame_crop_left_offset, width) ||
             *width < 0) {
             *width = 0;
         }
 
         //*height -= (frame_crop_top_offset + frame_crop_bottom_offset) * cropUnitY;
-        if(__builtin_add_overflow(frame_crop_top_offset, frame_crop_bottom_offset, &frame_crop_top_offset) ||
-            __builtin_mul_overflow(frame_crop_top_offset, cropUnitY, &frame_crop_top_offset) ||
-            __builtin_sub_overflow(*height, frame_crop_top_offset, height) ||
+        if(__builtin_add_overflow(
+                   frame_crop_top_offset, frame_crop_bottom_offset, &frame_crop_top_offset) ||
+           __builtin_mul_overflow(frame_crop_top_offset, cropUnitY, &frame_crop_top_offset) ||
+           __builtin_sub_overflow(*height, frame_crop_top_offset, height) ||
             *height < 0) {
             *height = 0;
         }
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h
index a8b88fd..ab17a02 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h
@@ -53,9 +53,11 @@
 #define LITERAL_TO_STRING_INTERNAL(x)    #x
 #define LITERAL_TO_STRING(x) LITERAL_TO_STRING_INTERNAL(x)
 
-#ifdef CHECK
-#undef CHECK
-#endif
+// allow to use CHECK_OP from android-base/logging.h
+// TODO: longterm replace this with android-base/logging.h, but there are some nuances, e.g.
+// android-base CHECK_OP requires a copy constructor, whereas we don't.
+#ifndef CHECK_OP
+
 #define CHECK(condition)                                \
     LOG_ALWAYS_FATAL_IF(                                \
             !(condition),                               \
@@ -63,10 +65,6 @@
             __FILE__ ":" LITERAL_TO_STRING(__LINE__)    \
             " CHECK(" #condition ") failed.")
 
-#ifdef CHECK_OP
-#undef CHECK_OP
-#endif
-
 #define CHECK_OP(x,y,suffix,op)                                         \
     do {                                                                \
         const auto &a = x;                                              \
@@ -82,15 +80,6 @@
         }                                                               \
     } while (false)
 
-#ifdef CHECK_EQ
-#undef CHECK_EQ
-#undef CHECK_NE
-#undef CHECK_LE
-#undef CHECK_LT
-#undef CHECK_GE
-#undef CHECK_GT
-#endif
-
 #define CHECK_EQ(x,y)   CHECK_OP(x,y,EQ,==)
 #define CHECK_NE(x,y)   CHECK_OP(x,y,NE,!=)
 #define CHECK_LE(x,y)   CHECK_OP(x,y,LE,<=)
@@ -98,6 +87,8 @@
 #define CHECK_GE(x,y)   CHECK_OP(x,y,GE,>=)
 #define CHECK_GT(x,y)   CHECK_OP(x,y,GT,>)
 
+#endif
+
 #define TRESPASS(...) \
         LOG_ALWAYS_FATAL(                                       \
             __FILE__ ":" LITERAL_TO_STRING(__LINE__)            \
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 5e7f90a..9cf97c7 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -1384,7 +1384,7 @@
         while (index > lowestBandwidth) {
             // be conservative (70%) to avoid overestimating and immediately
             // switching down again.
-            size_t adjustedBandwidthBps = bandwidthBps * 7 / 10;
+            size_t adjustedBandwidthBps = bandwidthBps * .7f;
             const BandwidthItem &item = mBandwidthItems[index];
             if (item.mBandwidth <= adjustedBandwidthBps
                     && isBandwidthValid(item)) {
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index 635ecfe..0950db0 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -2160,7 +2160,9 @@
             return ERROR_MALFORMED;
         }
 
-        CHECK_LE(offset + aac_frame_length, buffer->size());
+        if (aac_frame_length > buffer->size() - offset) {
+            return ERROR_MALFORMED;
+        }
 
         int64_t unitTimeUs = timeUs + numSamples * 1000000LL / sampleRate;
         offset += aac_frame_length;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index c84614a..50d7724 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -356,16 +356,18 @@
     }
 }
 
-constexpr int32_t AV1Profile0 = 0x01;
-constexpr int32_t AV1Profile1 = 0x02;
-constexpr int32_t AV1Profile2 = 0x04;
+constexpr int32_t AV1ProfileMain8 = 0x1;
+constexpr int32_t AV1ProfileMain10 = 0x2;
+constexpr int32_t AV1ProfileMain10HDR10 = 0x1000;
+constexpr int32_t AV1ProfileMain10HDR10Plus = 0x2000;
 
 inline static const char *asString_AV1Profile(int32_t i, const char *def = "??") {
     switch (i) {
-        case AV1Profile0:       return "0";
-        case AV1Profile1:       return "1";
-        case AV1Profile2:       return "2";
-        default:                return def;
+        case AV1ProfileMain8:           return "Main8";
+        case AV1ProfileMain10:          return "Main10HDR";
+        case AV1ProfileMain10HDR10:     return "Main10HDR10";
+        case AV1ProfileMain10HDR10Plus: return "Main10HDRPlus";
+        default:                        return def;
     }
 }
 
diff --git a/media/libstagefright/include/media/stagefright/MediaExtractorFactory.h b/media/libstagefright/include/media/stagefright/MediaExtractorFactory.h
index ea87948..2ab98e1 100644
--- a/media/libstagefright/include/media/stagefright/MediaExtractorFactory.h
+++ b/media/libstagefright/include/media/stagefright/MediaExtractorFactory.h
@@ -37,6 +37,7 @@
             const sp<DataSource> &source, const char *mime = NULL);
     static status_t dump(int fd, const Vector<String16>& args);
     static std::unordered_set<std::string> getSupportedTypes();
+    static void LoadExtractors();
 
 private:
     static Mutex gPluginMutex;
@@ -53,8 +54,6 @@
     static void *sniff(const sp<DataSource> &source,
             float *confidence, void **meta, FreeMetaFunc *freeMeta,
             sp<ExtractorPlugin> &plugin, uint32_t *creatorVersion);
-
-    static void UpdateExtractors();
 };
 
 }  // namespace android
diff --git a/media/libstagefright/omx/1.0/Omx.cpp b/media/libstagefright/omx/1.0/Omx.cpp
index 121bb1a..eef9ce3 100644
--- a/media/libstagefright/omx/1.0/Omx.cpp
+++ b/media/libstagefright/omx/1.0/Omx.cpp
@@ -18,7 +18,6 @@
 #include <list>
 
 #include <android-base/logging.h>
-#include <gui/IGraphicBufferProducer.h>
 #include <media/openmax/OMX_Core.h>
 #include <media/openmax/OMX_AsString.h>
 
@@ -28,7 +27,6 @@
 
 #include <media/stagefright/omx/1.0/WOmxNode.h>
 #include <media/stagefright/omx/1.0/WOmxObserver.h>
-#include <media/stagefright/omx/1.0/WGraphicBufferProducer.h>
 #include <media/stagefright/omx/1.0/WGraphicBufferSource.h>
 #include <media/stagefright/omx/1.0/Conversion.h>
 #include <media/stagefright/omx/1.0/Omx.h>
@@ -45,6 +43,8 @@
 Omx::Omx() :
     mMaster(new OMXMaster()),
     mParser() {
+    (void)mParser.parseXmlFilesInSearchDirs();
+    (void)mParser.parseXmlPath(mParser.defaultProfilingResultsXmlPath);
 }
 
 Omx::~Omx() {
@@ -146,8 +146,6 @@
 }
 
 Return<void> Omx::createInputSurface(createInputSurface_cb _hidl_cb) {
-    sp<::android::IGraphicBufferProducer> bufferProducer;
-
     sp<OmxGraphicBufferSource> graphicBufferSource = new OmxGraphicBufferSource();
     status_t err = graphicBufferSource->initCheck();
     if (err != OK) {
@@ -157,10 +155,9 @@
         _hidl_cb(toStatus(err), nullptr, nullptr);
         return Void();
     }
-    bufferProducer = graphicBufferSource->getIGraphicBufferProducer();
 
     _hidl_cb(toStatus(OK),
-            new TWGraphicBufferProducer(bufferProducer),
+            graphicBufferSource->getHGraphicBufferProducer_V1_0(),
             new TWGraphicBufferSource(graphicBufferSource));
     return Void();
 }
diff --git a/media/libstagefright/omx/1.0/OmxStore.cpp b/media/libstagefright/omx/1.0/OmxStore.cpp
index 2e041e3..67f478e 100644
--- a/media/libstagefright/omx/1.0/OmxStore.cpp
+++ b/media/libstagefright/omx/1.0/OmxStore.cpp
@@ -17,6 +17,8 @@
 #include <ios>
 #include <list>
 
+#define LOG_TAG "OmxStore"
+
 #include <android-base/logging.h>
 
 #include <media/stagefright/omx/1.0/Conversion.h>
@@ -30,16 +32,33 @@
 namespace V1_0 {
 namespace implementation {
 
+using ::android::hardware::media::omx::V1_0::Status;
+using ::android::hardware::media::omx::V1_0::IOmx;
+
 OmxStore::OmxStore(
+        const sp<IOmx> &omx,
         const char* owner,
-        const char* const* searchDirs,
-        const char* mainXmlName,
-        const char* performanceXmlName,
+        const std::vector<std::string> &searchDirs,
+        const std::vector<std::string> &xmlNames,
         const char* profilingResultsXmlPath) {
-    MediaCodecsXmlParser parser(searchDirs,
-            mainXmlName,
-            performanceXmlName,
-            profilingResultsXmlPath);
+    // retrieve list of omx nodes
+    std::set<std::string> nodes;
+    if (omx != nullptr) {
+        omx->listNodes([&nodes](const Status &status,
+                                const hidl_vec<IOmx::ComponentInfo> &nodeList) {
+            if (status == Status::OK) {
+                for (const IOmx::ComponentInfo& info : nodeList) {
+                    nodes.emplace(info.mName.c_str());
+                }
+            }
+        });
+    }
+
+    MediaCodecsXmlParser parser;
+    parser.parseXmlFilesInSearchDirs(xmlNames, searchDirs);
+    if (profilingResultsXmlPath != nullptr) {
+        parser.parseXmlPath(profilingResultsXmlPath);
+    }
     mParsingStatus = toStatus(parser.getParsingStatus());
 
     const auto& serviceAttributeMap = parser.getServiceAttributeMap();
@@ -66,6 +85,13 @@
         nodeList.resize(rolePair.second.nodeList.size());
         size_t j = 0;
         for (const auto& nodePair : rolePair.second.nodeList) {
+            if (!nodes.count(nodePair.second.name)) {
+                // not supported by this OMX instance
+                if (!strncasecmp(nodePair.second.name.c_str(), "omx.", 4)) {
+                    LOG(INFO) << "node [" << nodePair.second.name.c_str() << "] not found in IOmx";
+                }
+                continue;
+            }
             NodeInfo node;
             node.name = nodePair.second.name;
             node.owner = owner;
@@ -82,6 +108,7 @@
             nodeList[j] = std::move(node);
             ++j;
         }
+        nodeList.resize(j);
         mRoleList[i] = std::move(role);
         ++i;
     }
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 0c50752..e260cae 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -41,7 +41,6 @@
         "libcutils",
         "libstagefright_foundation",
         "libstagefright_bufferqueue_helper",
-        "libstagefright_softomx",
         "libstagefright_xmlparser",
         "libdl",
         "libhidlbase",
@@ -79,14 +78,10 @@
 cc_library_shared {
     name: "libstagefright_softomx",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
 
     srcs: [
         "SimpleSoftOMXComponent.cpp",
         "SoftOMXComponent.cpp",
-        "SoftOMXPlugin.cpp",
         "SoftVideoDecoderOMXComponent.cpp",
         "SoftVideoEncoderOMXComponent.cpp",
     ],
@@ -126,6 +121,49 @@
     },
 }
 
+cc_library_shared {
+    name: "libstagefright_softomx_plugin",
+    vendor_available: true,
+
+    srcs: [
+        "SoftOMXPlugin.cpp",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    header_libs: [
+        "media_plugin_headers",
+    ],
+
+    export_header_lib_headers: [
+        "media_plugin_headers",
+    ],
+
+    shared_libs: [
+        "libstagefright_softomx",
+        "libstagefright_foundation",
+        "liblog",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-Wno-unused-parameter",
+        "-Wno-documentation",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+            "unsigned-integer-overflow",
+        ],
+        cfi: true,
+    },
+}
+
 cc_defaults {
     name: "libstagefright_softomx-defaults",
     vendor_available: true,
diff --git a/media/libstagefright/omx/OMXMaster.cpp b/media/libstagefright/omx/OMXMaster.cpp
index 0967b5f..8c5ca6e 100644
--- a/media/libstagefright/omx/OMXMaster.cpp
+++ b/media/libstagefright/omx/OMXMaster.cpp
@@ -29,8 +29,7 @@
 
 namespace android {
 
-OMXMaster::OMXMaster()
-    : mVendorLibHandle(NULL) {
+OMXMaster::OMXMaster() {
 
     pid_t pid = getpid();
     char filename[20];
@@ -52,47 +51,52 @@
     }
 
     addVendorPlugin();
-    addPlugin(new SoftOMXPlugin);
+    addPlatformPlugin();
 }
 
 OMXMaster::~OMXMaster() {
     clearPlugins();
-
-    if (mVendorLibHandle != NULL) {
-        dlclose(mVendorLibHandle);
-        mVendorLibHandle = NULL;
-    }
 }
 
 void OMXMaster::addVendorPlugin() {
     addPlugin("libstagefrighthw.so");
 }
 
-void OMXMaster::addPlugin(const char *libname) {
-    mVendorLibHandle = android_load_sphal_library(libname, RTLD_NOW);
+void OMXMaster::addPlatformPlugin() {
+    addPlugin("libstagefright_softomx_plugin.so");
+}
 
-    if (mVendorLibHandle == NULL) {
+void OMXMaster::addPlugin(const char *libname) {
+    void *libHandle = android_load_sphal_library(libname, RTLD_NOW);
+
+    if (libHandle == NULL) {
         return;
     }
 
     typedef OMXPluginBase *(*CreateOMXPluginFunc)();
     CreateOMXPluginFunc createOMXPlugin =
         (CreateOMXPluginFunc)dlsym(
-                mVendorLibHandle, "createOMXPlugin");
+                libHandle, "createOMXPlugin");
     if (!createOMXPlugin)
         createOMXPlugin = (CreateOMXPluginFunc)dlsym(
-                mVendorLibHandle, "_ZN7android15createOMXPluginEv");
+                libHandle, "_ZN7android15createOMXPluginEv");
 
+    OMXPluginBase *plugin = nullptr;
     if (createOMXPlugin) {
-        addPlugin((*createOMXPlugin)());
+        plugin = (*createOMXPlugin)();
+    }
+
+    if (plugin) {
+        mPlugins.push_back({ plugin, libHandle });
+        addPlugin(plugin);
+    } else {
+        android_unload_sphal_library(libHandle);
     }
 }
 
 void OMXMaster::addPlugin(OMXPluginBase *plugin) {
     Mutex::Autolock autoLock(mLock);
 
-    mPlugins.push_back(plugin);
-
     OMX_U32 index = 0;
 
     char name[128];
@@ -120,20 +124,20 @@
 void OMXMaster::clearPlugins() {
     Mutex::Autolock autoLock(mLock);
 
-    typedef void (*DestroyOMXPluginFunc)(OMXPluginBase*);
-    DestroyOMXPluginFunc destroyOMXPlugin =
-        (DestroyOMXPluginFunc)dlsym(
-                mVendorLibHandle, "destroyOMXPlugin");
-
     mPluginByComponentName.clear();
+    mPluginByInstance.clear();
 
-    for (List<OMXPluginBase *>::iterator it = mPlugins.begin();
-            it != mPlugins.end(); ++it) {
+    typedef void (*DestroyOMXPluginFunc)(OMXPluginBase*);
+    for (const Plugin &plugin : mPlugins) {
+        DestroyOMXPluginFunc destroyOMXPlugin =
+            (DestroyOMXPluginFunc)dlsym(
+                    plugin.mLibHandle, "destroyOMXPlugin");
         if (destroyOMXPlugin)
-            destroyOMXPlugin(*it);
+            destroyOMXPlugin(plugin.mOmx);
         else
-            delete *it;
-        *it = NULL;
+            delete plugin.mOmx;
+
+        android_unload_sphal_library(plugin.mLibHandle);
     }
 
     mPlugins.clear();
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index d7aacff..ddb4ba0 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -33,12 +33,13 @@
 
 #include <binder/IMemory.h>
 #include <cutils/properties.h>
-#include <gui/BufferQueue.h>
 #include <media/hardware/HardwareAPI.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/MediaErrors.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/Fence.h>
 #include <utils/misc.h>
 #include <utils/NativeHandle.h>
 #include <media/OMXBuffer.h>
diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp
index 1f3e8c1..a720bc9 100644
--- a/media/libstagefright/omx/SoftOMXPlugin.cpp
+++ b/media/libstagefright/omx/SoftOMXPlugin.cpp
@@ -71,6 +71,16 @@
 static const size_t kNumComponents =
     sizeof(kComponents) / sizeof(kComponents[0]);
 
+extern "C" OMXPluginBase* createOMXPlugin() {
+    ALOGI("createOMXPlugin");
+    return new SoftOMXPlugin();
+}
+
+extern "C" void destroyOMXPlugin(OMXPluginBase* plugin) {
+    ALOGI("destroyOMXPlugin");
+    delete plugin;
+}
+
 SoftOMXPlugin::SoftOMXPlugin() {
 }
 
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
index 0f229f7..9669677 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
@@ -14,8 +14,8 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_HARDWARE_MEDIA_OMX_V1_0__CONVERSION_H
-#define ANDROID_HARDWARE_MEDIA_OMX_V1_0__CONVERSION_H
+#ifndef ANDROID_HARDWARE_MEDIA_OMX_V1_0_IMPL_CONVERSION_H
+#define ANDROID_HARDWARE_MEDIA_OMX_V1_0_IMPL_CONVERSION_H
 
 #include <vector>
 #include <list>
@@ -29,15 +29,15 @@
 
 #include <binder/Binder.h>
 #include <binder/Status.h>
+#include <ui/BufferQueueDefs.h>
 #include <ui/FenceTime.h>
 #include <cutils/native_handle.h>
-#include <gui/IGraphicBufferProducer.h>
 
 #include <media/OMXFenceParcelable.h>
 #include <media/OMXBuffer.h>
+#include <media/omx/1.0/Conversion.h>
 #include <media/hardware/VideoAPI.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/bqhelper/Conversion.h>
 
 #include <android/hidl/memory/1.0/IMemory.h>
 #include <android/hardware/graphics/bufferqueue/1.0/IProducerListener.h>
@@ -99,16 +99,9 @@
 
 typedef ::android::hardware::graphics::bufferqueue::V1_0::IGraphicBufferProducer
         HGraphicBufferProducer;
-typedef ::android::IGraphicBufferProducer
-        BGraphicBufferProducer;
 
-// We want to use all functions declared in ::android::conversion
-using namespace ::android::conversion;
-
-// Now specifically inject these two functions here, because we're going to
-// declare functions with the same name in this namespace.
-using ::android::conversion::convertTo;
-using ::android::conversion::toStatusT;
+// Use the conversion functions from libmedia_omx so that we don't need libgui
+using namespace ::android::hardware::media::omx::V1_0::utils;
 
 /**
  * Conversion functions
@@ -143,62 +136,6 @@
  */
 
 /**
- * \brief Convert `Status` to `status_t`. This is for legacy binder calls.
- *
- * \param[in] t The source `Status`.
- * \return the corresponding `status_t`.
- */
-// convert: Status -> status_t
-inline status_t toStatusT(Status const& t) {
-    switch (t) {
-    case Status::NO_ERROR:
-    case Status::NAME_NOT_FOUND:
-    case Status::WOULD_BLOCK:
-    case Status::NO_MEMORY:
-    case Status::ALREADY_EXISTS:
-    case Status::NO_INIT:
-    case Status::BAD_VALUE:
-    case Status::DEAD_OBJECT:
-    case Status::INVALID_OPERATION:
-    case Status::TIMED_OUT:
-    case Status::ERROR_UNSUPPORTED:
-    case Status::UNKNOWN_ERROR:
-    case Status::RELEASE_ALL_BUFFERS:
-        return static_cast<status_t>(t);
-    case Status::BUFFER_NEEDS_REALLOCATION:
-        return NOT_ENOUGH_DATA;
-    default:
-        ALOGW("Unrecognized status value: %" PRId32, static_cast<int32_t>(t));
-        return static_cast<status_t>(t);
-    }
-}
-
-/**
- * \brief Convert `Return<Status>` to `status_t`. This is for legacy binder
- * calls.
- *
- * \param[in] t The source `Return<Status>`.
- * \return The corresponding `status_t`.
- *
- * This function first check if \p t has a transport error. If it does, then the
- * return value is the transport error code. Otherwise, the return value is
- * converted from `Status` contained inside \p t.
- *
- * Note:
- * - This `Status` is omx-specific. It is defined in `types.hal`.
- * - The name of this function is not `convert`.
- */
-// convert: Status -> status_t
-inline status_t toStatusT(Return<Status> const& t) {
-    if (t.isOk()) {
-        return toStatusT(static_cast<Status>(t));
-    } else if (t.isDeadObject()) {
-        return DEAD_OBJECT;
-    }
-    return UNKNOWN_ERROR;
-}
-
-/**
  * \brief Convert `status_t` to `Status`.
  *
  * \param[in] l The source `status_t`.
@@ -219,8 +156,8 @@
     case TIMED_OUT:
     case ERROR_UNSUPPORTED:
     case UNKNOWN_ERROR:
-    case IGraphicBufferProducer::RELEASE_ALL_BUFFERS:
-    case IGraphicBufferProducer::BUFFER_NEEDS_REALLOCATION:
+    case BufferQueueDefs::RELEASE_ALL_BUFFERS:
+    case BufferQueueDefs::BUFFER_NEEDS_REALLOCATION:
         return static_cast<Status>(l);
     case NOT_ENOUGH_DATA:
         return Status::BUFFER_NEEDS_REALLOCATION;
@@ -572,7 +509,8 @@
             anwBuffer.nativeHandle = t.nativeHandle;
             anwBuffer.attr = t.attr.anwBuffer;
             sp<GraphicBuffer> graphicBuffer = new GraphicBuffer();
-            if (!convertTo(graphicBuffer.get(), anwBuffer)) {
+            if (!::android::hardware::media::omx::V1_0::utils::convertTo(
+                    graphicBuffer.get(), anwBuffer)) {
                 return false;
             }
             *l = OMXBuffer(graphicBuffer);
@@ -756,4 +694,4 @@
 }  // namespace hardware
 }  // namespace android
 
-#endif  // ANDROID_HARDWARE_MEDIA_OMX_V1_0__CONVERSION_H
+#endif  // ANDROID_HARDWARE_MEDIA_OMX_V1_0_IMPL_CONVERSION_H
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/1.0/OmxStore.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/OmxStore.h
index 006d2d9..15d46991 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/1.0/OmxStore.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/OmxStore.h
@@ -20,6 +20,7 @@
 #include <hidl/MQDescriptor.h>
 #include <hidl/Status.h>
 
+#include <android/hardware/media/omx/1.0/IOmx.h>
 #include <android/hardware/media/omx/1.0/IOmxStore.h>
 #include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
 
@@ -43,15 +44,14 @@
 
 struct OmxStore : public IOmxStore {
     OmxStore(
+            const sp<IOmx> &omx = nullptr,
             const char* owner = "default",
-            const char* const* searchDirs
-                = MediaCodecsXmlParser::defaultSearchDirs,
-            const char* mainXmlName
-                = MediaCodecsXmlParser::defaultMainXmlName,
-            const char* performanceXmlName
-                = MediaCodecsXmlParser::defaultPerformanceXmlName,
-            const char* profilingResultsXmlPath
-                = MediaCodecsXmlParser::defaultProfilingResultsXmlPath);
+            const std::vector<std::string> &searchDirs =
+                MediaCodecsXmlParser::getDefaultSearchDirs(),
+            const std::vector<std::string> &xmlFiles =
+                MediaCodecsXmlParser::getDefaultXmlNames(),
+            const char *xmlProfilingResultsPath =
+                MediaCodecsXmlParser::defaultProfilingResultsXmlPath);
 
     virtual ~OmxStore();
 
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/1.0/WGraphicBufferProducer.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/WGraphicBufferProducer.h
deleted file mode 100644
index 322a699..0000000
--- a/media/libstagefright/omx/include/media/stagefright/omx/1.0/WGraphicBufferProducer.h
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright 2018, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_MEDIA_OMX_V1_0_WGRAPHICBUFFERPRODUCER_H
-#define ANDROID_HARDWARE_MEDIA_OMX_V1_0_WGRAPHICBUFFERPRODUCER_H
-
-#include <media/stagefright/bqhelper/WGraphicBufferProducer.h>
-
-namespace android {
-namespace hardware {
-namespace media {
-namespace omx {
-namespace V1_0 {
-namespace implementation {
-
-using TWGraphicBufferProducer = ::android::TWGraphicBufferProducer<
-    ::android::hardware::graphics::bufferqueue::V1_0::IGraphicBufferProducer>;
-
-}  // namespace implementation
-}  // namespace V1_0
-}  // namespace omx
-}  // namespace media
-}  // namespace hardware
-}  // namespace android
-
-#endif  // ANDROID_HARDWARE_MEDIA_OMX_V1_0_WOMXBUFFERPRODUCER_H
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h
index 897f287..93eaef1 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h
@@ -52,13 +52,16 @@
 private:
     char mProcessName[16];
     Mutex mLock;
-    List<OMXPluginBase *> mPlugins;
+    struct Plugin {
+        OMXPluginBase *mOmx;
+        void *mLibHandle;
+    };
+    List<Plugin> mPlugins;
     KeyedVector<String8, OMXPluginBase *> mPluginByComponentName;
     KeyedVector<OMX_COMPONENTTYPE *, OMXPluginBase *> mPluginByInstance;
 
-    void *mVendorLibHandle;
-
     void addVendorPlugin();
+    void addPlatformPlugin();
     void addPlugin(const char *libname);
     void addPlugin(OMXPluginBase *plugin);
     void clearPlugins();
diff --git a/media/libstagefright/timedtext/TextDescriptions.cpp b/media/libstagefright/timedtext/TextDescriptions.cpp
index 088eaae..0dc7722 100644
--- a/media/libstagefright/timedtext/TextDescriptions.cpp
+++ b/media/libstagefright/timedtext/TextDescriptions.cpp
@@ -383,7 +383,7 @@
         tmpData += 8;
         size_t remaining = size - 8;
 
-        if (size < chunkSize) {
+        if (chunkSize <= 8 || size < chunkSize) {
             return OK;
         }
         switch(chunkType) {
diff --git a/media/libstagefright/timedtext/TextDescriptions2.cpp b/media/libstagefright/timedtext/TextDescriptions2.cpp
index f48eacc..fd42d3a 100644
--- a/media/libstagefright/timedtext/TextDescriptions2.cpp
+++ b/media/libstagefright/timedtext/TextDescriptions2.cpp
@@ -145,7 +145,7 @@
         tmpData += 8;
         size_t remaining = size - 8;
 
-        if (size < chunkSize) {
+        if (chunkSize <= 8 || size < chunkSize) {
             return OK;
         }
         switch(chunkType) {
diff --git a/media/libstagefright/webm/WebmFrame.cpp b/media/libstagefright/webm/WebmFrame.cpp
index 4b0d47c..52c30ec 100644
--- a/media/libstagefright/webm/WebmFrame.cpp
+++ b/media/libstagefright/webm/WebmFrame.cpp
@@ -62,6 +62,14 @@
             mData);
 }
 
+uint64_t WebmFrame::getAbsTimecode() {
+    return mAbsTimecode;
+}
+
+void WebmFrame::updateAbsTimecode(uint64_t newAbsTimecode) {
+    mAbsTimecode = newAbsTimecode;
+}
+
 bool WebmFrame::operator<(const WebmFrame &other) const {
     if (this->mEos) {
         return false;
diff --git a/media/libstagefright/webm/WebmFrame.h b/media/libstagefright/webm/WebmFrame.h
index a410a87..47f2523 100644
--- a/media/libstagefright/webm/WebmFrame.h
+++ b/media/libstagefright/webm/WebmFrame.h
@@ -25,7 +25,7 @@
 public:
     const int mType;
     const bool mKey;
-    const uint64_t mAbsTimecode;
+    uint64_t mAbsTimecode;
     const sp<ABuffer> mData;
     const bool mEos;
 
@@ -33,6 +33,8 @@
     WebmFrame(int type, bool key, uint64_t absTimecode, MediaBufferBase *buf);
     ~WebmFrame() {}
 
+    uint64_t getAbsTimecode();
+    void updateAbsTimecode(uint64_t newAbsTimecode);
     sp<WebmElement> SimpleBlock(uint64_t baseTimecode) const;
 
     bool operator<(const WebmFrame &other) const;
diff --git a/media/libstagefright/webm/WebmFrameThread.cpp b/media/libstagefright/webm/WebmFrameThread.cpp
index 4b6f928..631a2ab 100644
--- a/media/libstagefright/webm/WebmFrameThread.cpp
+++ b/media/libstagefright/webm/WebmFrameThread.cpp
@@ -78,6 +78,7 @@
       mVideoFrames(videoThread->mSink),
       mAudioFrames(audioThread->mSink),
       mCues(cues),
+      mStartOffsetTimecode(UINT64_MAX),
       mDone(true) {
 }
 
@@ -92,6 +93,7 @@
       mVideoFrames(videoSource),
       mAudioFrames(audioSource),
       mCues(cues),
+      mStartOffsetTimecode(UINT64_MAX),
       mDone(true) {
 }
 
@@ -213,6 +215,11 @@
         const sp<WebmFrame> audioFrame = mAudioFrames.peek();
         ALOGV("a frame: %p", audioFrame.get());
 
+        if (mStartOffsetTimecode == UINT64_MAX) {
+            mStartOffsetTimecode =
+                    std::min(audioFrame->getAbsTimecode(), videoFrame->getAbsTimecode());
+        }
+
         if (videoFrame->mEos && audioFrame->mEos) {
             break;
         }
@@ -220,10 +227,12 @@
         if (*audioFrame < *videoFrame) {
             ALOGV("take a frame");
             mAudioFrames.take();
+            audioFrame->updateAbsTimecode(audioFrame->getAbsTimecode() - mStartOffsetTimecode);
             outstandingFrames.push_back(audioFrame);
         } else {
             ALOGV("take v frame");
             mVideoFrames.take();
+            videoFrame->updateAbsTimecode(videoFrame->getAbsTimecode() - mStartOffsetTimecode);
             outstandingFrames.push_back(videoFrame);
             if (videoFrame->mKey)
                 numVideoKeyFrames++;
@@ -350,7 +359,6 @@
         if (mStartTimeUs == kUninitialized) {
             mStartTimeUs = timestampUs;
         }
-        timestampUs -= mStartTimeUs;
 
         if (mPaused && !mResumed) {
             lastDurationUs = timestampUs - lastTimestampUs;
diff --git a/media/libstagefright/webm/WebmFrameThread.h b/media/libstagefright/webm/WebmFrameThread.h
index 76c91f1..1ddaf9a 100644
--- a/media/libstagefright/webm/WebmFrameThread.h
+++ b/media/libstagefright/webm/WebmFrameThread.h
@@ -83,6 +83,7 @@
     LinkedBlockingQueue<const sp<WebmFrame> >& mVideoFrames;
     LinkedBlockingQueue<const sp<WebmFrame> >& mAudioFrames;
     List<sp<WebmElement> >& mCues;
+    uint64_t mStartOffsetTimecode;
 
     volatile bool mDone;
 
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index 26e0884..5eaadbd 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -124,6 +124,11 @@
         return NULL;
     }
 
+    int32_t bitsPerSample = 0;
+    if (!md->findInt32(kKeyBitsPerSample, &bitsPerSample)) {
+        ALOGV("kKeyBitsPerSample not available");
+    }
+
     if (!strncasecmp(mimeType, MEDIA_MIMETYPE_AUDIO_OPUS, strlen(MEDIA_MIMETYPE_AUDIO_OPUS))) {
         // Opus in WebM is a well-known, yet under-documented, format. The codec private data
         // of the track is an Opus Ogg header (https://tools.ietf.org/html/rfc7845#section-5.1)
@@ -164,8 +169,8 @@
         uint8_t* codecPrivateData = codecPrivateBuf->data();
 
         memcpy(codecPrivateData + off, (uint8_t*)header_data, headerSize);
-        sp<WebmElement> entry =
-                WebmElement::AudioTrackEntry("A_OPUS", nChannels, samplerate, codecPrivateBuf);
+        sp<WebmElement> entry = WebmElement::AudioTrackEntry("A_OPUS", nChannels, samplerate,
+                                                             codecPrivateBuf, bitsPerSample);
         return entry;
     } else if (!strncasecmp(mimeType,
                             MEDIA_MIMETYPE_AUDIO_VORBIS,
@@ -203,8 +208,8 @@
         off += headerSize2;
         memcpy(codecPrivateData + off, headerData3, headerSize3);
 
-        sp<WebmElement> entry =
-                WebmElement::AudioTrackEntry("A_VORBIS", nChannels, samplerate, codecPrivateBuf);
+        sp<WebmElement> entry = WebmElement::AudioTrackEntry("A_VORBIS", nChannels, samplerate,
+                                                             codecPrivateBuf, bitsPerSample);
         return entry;
     } else {
         ALOGE("Track (%s) is not a supported audio format", mimeType);
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index 202e964..38de831 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -20,15 +20,12 @@
     ],
 
     shared_libs: [
+        "libbase",
         "libexpat",
         "liblog",
         "libstagefright_omx_utils",
     ],
 
-    header_libs: [
-        "libbase_headers",
-    ],
-
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
index 7046f61..9783e9b 100644
--- a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
+++ b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
@@ -19,12 +19,18 @@
 
 #include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
 
+#include <android-base/logging.h>
 #include <android-base/macros.h>
 #include <utils/Log.h>
+
 #include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/omx/OMXUtils.h>
-#include <sys/stat.h>
+
 #include <expat.h>
+#include <stdio.h>
+#include <string.h>
+#include <sys/stat.h>
 
 #include <algorithm>
 #include <cctype>
@@ -32,8 +38,15 @@
 
 namespace android {
 
+using MCXP = MediaCodecsXmlParser;
+
 namespace {
 
+bool fileExists(const std::string &path) {
+    struct stat fileStat;
+    return stat(path.c_str(), &fileStat) == 0 && S_ISREG(fileStat.st_mode);
+}
+
 /**
  * Search for a file in a list of search directories.
  *
@@ -44,7 +57,7 @@
  * search continues until the `nullptr` element in `searchDirs` is reached, at
  * which point the function returns `false`.
  *
- * \param[in] searchDirs Null-terminated array of search paths.
+ * \param[in] searchDirs array of search paths.
  * \param[in] fileName Name of the file to search.
  * \param[out] outPath Full path of the file. `outPath` will hold a valid file
  * name if the return value of this function is `true`.
@@ -52,14 +65,13 @@
  * valid file name; `false` otherwise.
  */
 bool findFileInDirs(
-        const char* const* searchDirs,
-        const char *fileName,
+        const std::vector<std::string> &searchDirs,
+        const std::string &fileName,
         std::string *outPath) {
-    for (; *searchDirs != nullptr; ++searchDirs) {
-        *outPath = std::string(*searchDirs) + "/" + fileName;
-        struct stat fileStat;
-        if (stat(outPath->c_str(), &fileStat) == 0 &&
-                S_ISREG(fileStat.st_mode)) {
+    for (const std::string &searchDir : searchDirs) {
+        std::string path = searchDir + "/" + fileName;
+        if (fileExists(path)) {
+            *outPath = path;
             return true;
         }
     }
@@ -85,123 +97,435 @@
 bool parseBoolean(const char* s) {
     return striEq(s, "y") ||
             striEq(s, "yes") ||
+            striEq(s, "enabled") ||
             striEq(s, "t") ||
             striEq(s, "true") ||
             striEq(s, "1");
 }
 
-status_t limitFoundMissingAttr(const char* name, const char *attr, bool found = true) {
-    ALOGE("limit '%s' with %s'%s' attribute", name,
-            (found ? "" : "no "), attr);
-    return BAD_VALUE;
+
+status_t combineStatus(status_t a, status_t b) {
+    if (a == NO_INIT) {
+        return b;
+    } else if ((a == OK && (b == NAME_NOT_FOUND || b == ALREADY_EXISTS || b == NO_INIT))
+            || (b == OK && (a == NAME_NOT_FOUND || a == ALREADY_EXISTS))) {
+        // ignore NAME_NOT_FOUND and ALREADY_EXIST errors as long as the other error is OK
+        // also handle OK + NO_INIT here
+        return OK;
+    } else {
+        // prefer the first error result
+        return a ? : b;
+    }
 }
 
-status_t limitError(const char* name, const char *msg) {
-    ALOGE("limit '%s' %s", name, msg);
-    return BAD_VALUE;
+MCXP::StringSet parseCommaSeparatedStringSet(const char *s) {
+    MCXP::StringSet result;
+    for (const char *ptr = s ? : ""; *ptr; ) {
+        const char *end = strchrnul(ptr, ',');
+        if (ptr != end) { // skip empty values
+            result.emplace(ptr, end - ptr);
+        }
+        ptr = end + ('\0' != *end);
+    }
+    return result;
 }
 
-status_t limitInvalidAttr(const char* name, const char* attr, const char* value) {
-    ALOGE("limit '%s' with invalid '%s' attribute (%s)", name,
-            attr, value);
-    return BAD_VALUE;
-}
+#define PLOGD(msg, ...) \
+        ALOGD(msg " at line %zu of %s", ##__VA_ARGS__, \
+                (size_t)::XML_GetCurrentLineNumber(mParser.get()), mPath.c_str());
 
-}; // unnamed namespace
+}  // unnamed namespace
 
-constexpr char const* MediaCodecsXmlParser::defaultSearchDirs[];
-constexpr char const* MediaCodecsXmlParser::defaultMainXmlName;
-constexpr char const* MediaCodecsXmlParser::defaultPerformanceXmlName;
+struct MediaCodecsXmlParser::Impl {
+    // status + error message
+    struct Result {
+    private:
+        status_t mStatus;
+        std::string mError;
+
+    public:
+        Result(status_t s, std::string error = "")
+            : mStatus(s),
+              mError(error) {
+            if (error.empty() && s) {
+                mError = "Failed (" + std::string(asString(s)) + ")";
+            }
+        }
+        operator status_t() const { return mStatus; }
+        std::string error() const { return mError; }
+    };
+
+
+    // Parsed data
+    struct Data {
+        // Service attributes
+        AttributeMap mServiceAttributeMap;
+        CodecMap mCodecMap;
+        Result addGlobal(std::string key, std::string value, bool updating);
+    };
+
+    enum Section {
+        SECTION_TOPLEVEL,
+        SECTION_SETTINGS,
+        SECTION_DECODERS,
+        SECTION_DECODER,
+        SECTION_DECODER_TYPE,
+        SECTION_ENCODERS,
+        SECTION_ENCODER,
+        SECTION_ENCODER_TYPE,
+        SECTION_INCLUDE,
+        SECTION_VARIANT,
+        SECTION_UNKNOWN,
+    };
+
+    // XML parsing state
+    struct State {
+    private:
+        Data *mData;
+
+        // current codec and/or type, plus whether we are updating
+        struct CodecAndType {
+            std::string mName;
+            CodecMap::iterator mCodec;
+            TypeMap::iterator mType;
+            bool mUpdating;
+        };
+
+        // using vectors as we need to reset their sizes
+        std::vector<std::string> mIncludeStack;
+        std::vector<Section> mSectionStack;
+        std::vector<StringSet> mVariantsStack;
+        std::vector<CodecAndType> mCurrent;
+
+    public:
+        State(Data *data);
+
+        Data &data() { return *mData; }
+
+        // used to restore parsing state at XML include boundaries, in case parsing the included
+        // file fails.
+        struct RestorePoint {
+            size_t numIncludes;
+            size_t numSections;
+            size_t numVariantSets;
+            size_t numCodecAndTypes;
+        };
+
+        // method manipulating restore points (all state stacks)
+        RestorePoint createRestorePoint() const {
+            return {
+                mIncludeStack.size(), mSectionStack.size(), mVariantsStack.size(), mCurrent.size()
+            };
+        }
+
+        void restore(RestorePoint rp) {
+            CHECK_GE(mIncludeStack.size(), rp.numIncludes);
+            CHECK_GE(mSectionStack.size(), rp.numSections);
+            CHECK_GE(mVariantsStack.size(), rp.numVariantSets);
+            CHECK_GE(mCurrent.size(), rp.numCodecAndTypes);
+
+            mIncludeStack.resize(rp.numIncludes);
+            mSectionStack.resize(rp.numSections);
+            mVariantsStack.resize(rp.numVariantSets);
+            mCurrent.resize(rp.numCodecAndTypes);
+        }
+
+        // methods manipulating the include stack
+        Result enterInclude(const std::string &path);
+        void exitInclude() {
+            mIncludeStack.pop_back();
+        }
+
+        // methods manipulating the codec/type stack/state
+        bool inCodec() const {
+            return !mCurrent.empty() && mCurrent.back().mCodec != mData->mCodecMap.end();
+        }
+
+        bool inType() const {
+            return inCodec()
+                    && mCurrent.back().mType != mCurrent.back().mCodec->second.typeMap.end();
+        }
+
+        Result enterMediaCodec(bool encoder, const char *name, const char *type, bool update);
+        Result enterType(const char *name, bool update);
+        void exitCodecOrType() {
+            mCurrent.pop_back();
+        }
+
+        // can only be called when inCodec()
+        MediaCodecsXmlParser::CodecProperties &codec() {
+            return mCurrent.back().mCodec->second;
+        }
+        // can only be called when inCodec()
+        std::string codecName() const {
+            return mCurrent.back().mName;
+        }
+        // can only be called when inCodec()
+        bool updating() const {
+            return mCurrent.back().mUpdating;
+        }
+        // can only be called when inType()
+        MediaCodecsXmlParser::AttributeMap &type() {
+            return mCurrent.back().mType->second;
+        }
+
+        // methods manipulating the section stack
+        Section section() const {
+            return mSectionStack.back();
+        }
+        Section lastNonIncludeSection() const;
+        void enterSection(Section s) {
+            mSectionStack.push_back(s);
+        }
+        void exitSection() {
+            mSectionStack.pop_back();
+            CHECK(!mSectionStack.empty());
+        }
+
+        // methods manipulating the variants stack
+        StringSet variants() const {
+            return mVariantsStack.back();
+        }
+        void enterVariants(StringSet variants) {
+            mVariantsStack.push_back(variants);
+        }
+        void exitVariants() {
+            mVariantsStack.pop_back();
+        }
+
+        // utility methods
+
+        // updates rank, domains, variants and enabledness on the current codec/type
+        Result updateCodec(
+                const char *rank, StringSet domains, StringSet variants, const char *enabled);
+        // adds a key-value attribute detail to the current type of the current codec
+        void addDetail(const std::string &key, const std::string &value);
+    };
+
+    /** XML Parser (state) */
+    struct Parser {
+        State *mState;
+
+        Parser(State *state, std::string path);
+
+        // keep track of the parser state
+        std::shared_ptr<XML_ParserStruct> mParser;
+        std::string mPath;
+        std::string mHrefBase;
+        status_t mStatus;
+
+        void parseXmlFile();
+
+        // XML parser callbacks
+        static void StartElementHandlerWrapper(void *me, const char *name, const char **attrs);
+        static void EndElementHandlerWrapper(void *me, const char *name);
+
+        void startElementHandler(const char *name, const char **attrs);
+        void endElementHandler(const char *name);
+
+        void updateStatus(status_t status);
+        void logAnyErrors(const Result &status) const;
+        status_t getStatus() const { return mStatus; }
+
+        status_t addAlias(const char **attrs);
+        status_t addFeature(const char **attrs);
+        status_t addLimit(const char **attrs);
+        status_t addQuirk(const char **attrs, const char *prefix = nullptr);
+        status_t addSetting(const char **attrs, const char *prefix = nullptr);
+        status_t enterMediaCodec(const char **attrs, bool encoder);
+        status_t enterType(const char **attrs);
+        status_t includeXmlFile(const char **attrs);
+        status_t limitVariants(const char **attrs);
+
+        status_t updateMediaCodec(
+                const char *rank, const StringSet &domain, const StringSet &variants,
+                const char *enabled);
+    };
+
+    status_t parseXmlFilesInSearchDirs(
+        const std::vector<std::string> &fileNames,
+        const std::vector<std::string> &searchDirs);
+
+    status_t parseXmlPath(const std::string &path);
+
+    // Computed longest common prefix
+    Data mData;
+    State mState;
+
+    // Role map
+    mutable std::string mCommonPrefix;
+    mutable RoleMap mRoleMap;
+    mutable std::mutex mLock;
+
+    status_t mParsingStatus;
+
+    Impl()
+        : mState(&mData),
+          mParsingStatus(NO_INIT) {
+    }
+
+    void generateRoleMap() const;
+    void generateCommonPrefix() const;
+
+    const AttributeMap& getServiceAttributeMap() const {
+        std::lock_guard<std::mutex> guard(mLock);
+        return mData.mServiceAttributeMap;
+    }
+
+    const CodecMap& getCodecMap() const {
+        std::lock_guard<std::mutex> guard(mLock);
+        return mData.mCodecMap;
+    }
+
+    const RoleMap& getRoleMap() const;
+    const char* getCommonPrefix() const;
+
+    status_t getParsingStatus() const {
+        std::lock_guard<std::mutex> guard(mLock);
+        return mParsingStatus;
+    }
+};
+
 constexpr char const* MediaCodecsXmlParser::defaultProfilingResultsXmlPath;
 
-MediaCodecsXmlParser::MediaCodecsXmlParser(
-        const char* const* searchDirs,
-        const char* mainXmlName,
-        const char* performanceXmlName,
-        const char* profilingResultsXmlPath) :
-    mParsingStatus(NO_INIT),
-    mUpdate(false),
-    mCodecCounter(0) {
-    std::string path;
-    if (findFileInDirs(searchDirs, mainXmlName, &path)) {
-        parseTopLevelXMLFile(path.c_str(), false);
-    } else {
-        ALOGE("Cannot find %s", mainXmlName);
-        mParsingStatus = NAME_NOT_FOUND;
-    }
-    if (findFileInDirs(searchDirs, performanceXmlName, &path)) {
-        parseTopLevelXMLFile(path.c_str(), true);
-    }
-    if (profilingResultsXmlPath != nullptr) {
-        parseTopLevelXMLFile(profilingResultsXmlPath, true);
-    }
+MediaCodecsXmlParser::MediaCodecsXmlParser()
+    : mImpl(new Impl()) {
 }
 
-bool MediaCodecsXmlParser::parseTopLevelXMLFile(
-        const char *codecs_xml,
-        bool ignore_errors) {
-    // get href_base
-    const char *href_base_end = strrchr(codecs_xml, '/');
-    if (href_base_end != nullptr) {
-        mHrefBase = std::string(codecs_xml, href_base_end - codecs_xml + 1);
-    }
+status_t MediaCodecsXmlParser::parseXmlFilesInSearchDirs(
+        const std::vector<std::string> &fileNames,
+        const std::vector<std::string> &searchDirs) {
+    return mImpl->parseXmlFilesInSearchDirs(fileNames, searchDirs);
+}
 
-    mParsingStatus = OK; // keeping this here for safety
-    mCurrentSection = SECTION_TOPLEVEL;
+status_t MediaCodecsXmlParser::parseXmlPath(const std::string &path) {
+    return mImpl->parseXmlPath(path);
+}
 
-    parseXMLFile(codecs_xml);
-
-    if (mParsingStatus != OK) {
-        ALOGW("parseTopLevelXMLFile(%s) failed", codecs_xml);
-        if (ignore_errors) {
-            mParsingStatus = OK;
-            return false;
+status_t MediaCodecsXmlParser::Impl::parseXmlFilesInSearchDirs(
+        const std::vector<std::string> &fileNames,
+        const std::vector<std::string> &searchDirs) {
+    status_t res = NO_INIT;
+    for (const std::string fileName : fileNames) {
+        status_t err = NO_INIT;
+        std::string path;
+        if (findFileInDirs(searchDirs, fileName, &path)) {
+            err = parseXmlPath(path);
+        } else {
+            ALOGD("Cannot find %s", path.c_str());
         }
-        mCodecMap.clear();
-        return false;
+        res = combineStatus(res, err);
     }
-    return true;
+    return res;
+}
+
+status_t MediaCodecsXmlParser::Impl::parseXmlPath(const std::string &path) {
+    std::lock_guard<std::mutex> guard(mLock);
+    if (!fileExists(path)) {
+        ALOGD("Cannot find %s", path.c_str());
+        mParsingStatus = combineStatus(mParsingStatus, NAME_NOT_FOUND);
+        return NAME_NOT_FOUND;
+    }
+
+    // save state (even though we should always be at toplevel here)
+    State::RestorePoint rp = mState.createRestorePoint();
+    Parser parser(&mState, path);
+    parser.parseXmlFile();
+    mState.restore(rp);
+
+    if (parser.getStatus() != OK) {
+        ALOGD("parseXmlPath(%s) failed with %s", path.c_str(), asString(parser.getStatus()));
+    }
+    mParsingStatus = combineStatus(mParsingStatus, parser.getStatus());
+    return parser.getStatus();
 }
 
 MediaCodecsXmlParser::~MediaCodecsXmlParser() {
 }
 
-void MediaCodecsXmlParser::parseXMLFile(const char *path) {
+MediaCodecsXmlParser::Impl::State::State(MediaCodecsXmlParser::Impl::Data *data)
+    : mData(data) {
+    mSectionStack.emplace_back(SECTION_TOPLEVEL);
+}
+
+MediaCodecsXmlParser::Impl::Section
+MediaCodecsXmlParser::Impl::State::lastNonIncludeSection() const {
+    for (auto it = mSectionStack.end(); it != mSectionStack.begin(); --it) {
+        if (it[-1] != SECTION_INCLUDE) {
+            return it[-1];
+        }
+    }
+    TRESPASS("must have non-include section");
+}
+
+void MediaCodecsXmlParser::Impl::Parser::updateStatus(status_t status) {
+    mStatus = combineStatus(mStatus, status);
+}
+
+void MediaCodecsXmlParser::Impl::Parser::logAnyErrors(const Result &status) const {
+    if (status) {
+        if (status.error().empty()) {
+            PLOGD("error %s", asString((status_t)status));
+        } else {
+            PLOGD("%s", status.error().c_str());
+        }
+    }
+}
+
+MediaCodecsXmlParser::Impl::Parser::Parser(State *state, std::string path)
+    : mState(state),
+      mPath(path),
+      mStatus(NO_INIT) {
+    // determine href_base
+    std::string::size_type end = path.rfind("/");
+    if (end != std::string::npos) {
+        mHrefBase = path.substr(0, end + 1);
+    }
+}
+
+void MediaCodecsXmlParser::Impl::Parser::parseXmlFile() {
+    const char *path = mPath.c_str();
+    ALOGD("parsing %s...", path);
     FILE *file = fopen(path, "r");
 
     if (file == nullptr) {
-        ALOGW("unable to open media codecs configuration xml file: %s", path);
-        mParsingStatus = NAME_NOT_FOUND;
+        ALOGD("unable to open media codecs configuration xml file: %s", path);
+        mStatus = NAME_NOT_FOUND;
         return;
     }
 
-    XML_Parser parser = ::XML_ParserCreate(nullptr);
-    LOG_FATAL_IF(parser == nullptr, "XML_MediaCodecsXmlParserCreate() failed.");
+    mParser = std::shared_ptr<XML_ParserStruct>(
+        ::XML_ParserCreate(nullptr),
+        [](XML_ParserStruct *parser) { ::XML_ParserFree(parser); });
+    LOG_FATAL_IF(!mParser, "XML_MediaCodecsXmlParserCreate() failed.");
 
-    ::XML_SetUserData(parser, this);
-    ::XML_SetElementHandler(
-            parser, StartElementHandlerWrapper, EndElementHandlerWrapper);
+    ::XML_SetUserData(mParser.get(), this);
+    ::XML_SetElementHandler(mParser.get(), StartElementHandlerWrapper, EndElementHandlerWrapper);
 
     static constexpr int BUFF_SIZE = 512;
-    while (mParsingStatus == OK) {
-        void *buff = ::XML_GetBuffer(parser, BUFF_SIZE);
+    // updateStatus(OK);
+    if (mStatus == NO_INIT) {
+        mStatus = OK;
+    }
+    while (mStatus == OK) {
+        void *buff = ::XML_GetBuffer(mParser.get(), BUFF_SIZE);
         if (buff == nullptr) {
-            ALOGE("failed in call to XML_GetBuffer()");
-            mParsingStatus = UNKNOWN_ERROR;
+            ALOGD("failed in call to XML_GetBuffer()");
+            mStatus = UNKNOWN_ERROR;
             break;
         }
 
         int bytes_read = ::fread(buff, 1, BUFF_SIZE, file);
         if (bytes_read < 0) {
-            ALOGE("failed in call to read");
-            mParsingStatus = ERROR_IO;
+            ALOGD("failed in call to read");
+            mStatus = ERROR_IO;
             break;
         }
 
-        XML_Status status = ::XML_ParseBuffer(parser, bytes_read, bytes_read == 0);
+        XML_Status status = ::XML_ParseBuffer(mParser.get(), bytes_read, bytes_read == 0);
         if (status != XML_STATUS_OK) {
-            ALOGE("malformed (%s)", ::XML_ErrorString(::XML_GetErrorCode(parser)));
-            mParsingStatus = ERROR_MALFORMED;
+            PLOGD("malformed (%s)", ::XML_ErrorString(::XML_GetErrorCode(mParser.get())));
+            mStatus = ERROR_MALFORMED;
             break;
         }
 
@@ -210,39 +534,47 @@
         }
     }
 
-    ::XML_ParserFree(parser);
+    mParser.reset();
 
     fclose(file);
     file = nullptr;
 }
 
 // static
-void MediaCodecsXmlParser::StartElementHandlerWrapper(
+void MediaCodecsXmlParser::Impl::Parser::StartElementHandlerWrapper(
         void *me, const char *name, const char **attrs) {
-    static_cast<MediaCodecsXmlParser*>(me)->startElementHandler(name, attrs);
+    static_cast<MediaCodecsXmlParser::Impl::Parser*>(me)->startElementHandler(name, attrs);
 }
 
 // static
-void MediaCodecsXmlParser::EndElementHandlerWrapper(void *me, const char *name) {
-    static_cast<MediaCodecsXmlParser*>(me)->endElementHandler(name);
+void MediaCodecsXmlParser::Impl::Parser::EndElementHandlerWrapper(void *me, const char *name) {
+    static_cast<MediaCodecsXmlParser::Impl::Parser*>(me)->endElementHandler(name);
 }
 
-status_t MediaCodecsXmlParser::includeXMLFile(const char **attrs) {
+status_t MediaCodecsXmlParser::Impl::Parser::includeXmlFile(const char **attrs) {
     const char *href = nullptr;
     size_t i = 0;
     while (attrs[i] != nullptr) {
-        if (strEq(attrs[i], "href")) {
-            if (attrs[++i] == nullptr) {
-                return BAD_VALUE;
-            }
-            href = attrs[i];
-        } else {
-            ALOGE("includeXMLFile: unrecognized attribute: %s", attrs[i]);
+        CHECK((i & 1) == 0);
+        if (attrs[i + 1] == nullptr) {
+            PLOGD("Include: attribute '%s' is null", attrs[i]);
             return BAD_VALUE;
         }
+
+        if (strEq(attrs[i], "href")) {
+            href = attrs[++i];
+        } else {
+            PLOGD("Include: ignoring unrecognized attribute '%s'", attrs[i]);
+            ++i;
+        }
         ++i;
     }
 
+    if (href == nullptr) {
+        PLOGD("Include with no 'href' attribute");
+        return BAD_VALUE;
+    }
+
     // For security reasons and for simplicity, file names can only contain
     // [a-zA-Z0-9_.] and must start with  media_codecs_ and end with .xml
     for (i = 0; href[i] != '\0'; i++) {
@@ -252,74 +584,114 @@
                 (href[i] >= 'a' && href[i] <= 'z')) {
             continue;
         }
-        ALOGE("invalid include file name: %s", href);
+        PLOGD("invalid include file name: %s", href);
         return BAD_VALUE;
     }
 
     std::string filename = href;
     if (filename.compare(0, 13, "media_codecs_") != 0 ||
             filename.compare(filename.size() - 4, 4, ".xml") != 0) {
-        ALOGE("invalid include file name: %s", href);
+        PLOGD("invalid include file name: %s", href);
         return BAD_VALUE;
     }
     filename.insert(0, mHrefBase);
 
-    status_t oldParsingStatus = mParsingStatus;
+    Result res = mState->enterInclude(filename);
+    if (res) {
+        logAnyErrors(res);
+        return res;
+    }
 
-    parseXMLFile(filename.c_str());
-
-    status_t newParsingStatus = mParsingStatus;
-    mParsingStatus = oldParsingStatus;
-    return newParsingStatus;
+    // save state so that we can resume even if XML parsing of the included file failed midway
+    State::RestorePoint rp = mState->createRestorePoint();
+    Parser parser(mState, filename);
+    parser.parseXmlFile();
+    mState->restore(rp);
+    mState->exitInclude();
+    return parser.getStatus();
 }
 
-void MediaCodecsXmlParser::startElementHandler(
+MediaCodecsXmlParser::Impl::Result
+MediaCodecsXmlParser::Impl::State::enterInclude(const std::string &fileName) {
+    if (std::find(mIncludeStack.begin(), mIncludeStack.end(), fileName)
+            != mIncludeStack.end()) {
+        return { BAD_VALUE, "recursive include chain" };
+    }
+    mIncludeStack.emplace_back(fileName);
+    return OK;
+}
+
+void MediaCodecsXmlParser::Impl::Parser::startElementHandler(
         const char *name, const char **attrs) {
     bool inType = true;
+    Result err = NO_INIT;
 
+    Section section = mState->section();
+
+    // handle include at any level
     if (strEq(name, "Include")) {
-        if (includeXMLFile(attrs) == OK) {
-            mSectionStack.push_back(mCurrentSection);
-            mCurrentSection = SECTION_INCLUDE;
-        }
+        mState->enterSection(SECTION_INCLUDE);
+        updateStatus(includeXmlFile(attrs));
         return;
     }
 
-    switch (mCurrentSection) {
+    // handle include section (top level)
+    if (section == SECTION_INCLUDE) {
+        if (strEq(name, "Included")) {
+            return;
+        }
+        // imitate prior level
+        section = mState->lastNonIncludeSection();
+    }
+
+    switch (section) {
         case SECTION_TOPLEVEL:
         {
+            Section nextSection;
             if (strEq(name, "Decoders")) {
-                mCurrentSection = SECTION_DECODERS;
+                nextSection = SECTION_DECODERS;
             } else if (strEq(name, "Encoders")) {
-                mCurrentSection = SECTION_ENCODERS;
+                nextSection = SECTION_ENCODERS;
             } else if (strEq(name, "Settings")) {
-                mCurrentSection = SECTION_SETTINGS;
+                nextSection = SECTION_SETTINGS;
+            } else if (strEq(name, "MediaCodecs") || strEq(name, "Included")) {
+                return;
+            } else {
+                break;
             }
-            break;
+            mState->enterSection(nextSection);
+            return;
         }
 
         case SECTION_SETTINGS:
         {
             if (strEq(name, "Setting")) {
-                (void)addSettingFromAttributes(attrs);
+                err = addSetting(attrs);
+            } else if (strEq(name, "Variant")) {
+                err = addSetting(attrs, "variant-");
+            } else if (strEq(name, "Domain")) {
+                err = addSetting(attrs, "domain-");
+            } else {
+                break;
             }
-            break;
+            updateStatus(err);
+            return;
         }
 
         case SECTION_DECODERS:
-        {
-            if (strEq(name, "MediaCodec")) {
-                (void)addMediaCodecFromAttributes(false /* encoder */, attrs);
-                mCurrentSection = SECTION_DECODER;
-            }
-            break;
-        }
-
         case SECTION_ENCODERS:
         {
             if (strEq(name, "MediaCodec")) {
-                (void)addMediaCodecFromAttributes(true /* encoder */, attrs);
-                mCurrentSection = SECTION_ENCODER;
+                err = enterMediaCodec(attrs, section == SECTION_ENCODERS);
+                updateStatus(err);
+                if (err != OK) { // skip this element on error
+                    mState->enterSection(SECTION_UNKNOWN);
+                } else {
+                    mState->enterVariants(mState->codec().variantSet);
+                    mState->enterSection(
+                            section == SECTION_DECODERS ? SECTION_DECODER : SECTION_ENCODER);
+                }
+                return;
             }
             break;
         }
@@ -327,14 +699,21 @@
         case SECTION_DECODER:
         case SECTION_ENCODER:
         {
-            if (strEq(name, "Quirk") || strEq(name, "Attribute")) {
-                (void)addQuirk(attrs, name);
+            if (strEq(name, "Quirk")) {
+                err = addQuirk(attrs, "quirk::");
+            } else if (strEq(name, "Attribute")) {
+                err = addQuirk(attrs, "attribute::");
+            } else if (strEq(name, "Alias")) {
+                err = addAlias(attrs);
             } else if (strEq(name, "Type")) {
-                (void)addTypeFromAttributes(attrs,
-                        (mCurrentSection == SECTION_ENCODER));
-                mCurrentSection =
-                        (mCurrentSection == SECTION_DECODER ?
-                        SECTION_DECODER_TYPE : SECTION_ENCODER_TYPE);
+                err = enterType(attrs);
+                if (err != OK) { // skip this element on error
+                    mState->enterSection(SECTION_UNKNOWN);
+                } else {
+                    mState->enterSection(
+                            section == SECTION_DECODER
+                                    ? SECTION_DECODER_TYPE : SECTION_ENCODER_TYPE);
+                }
             }
         }
         inType = false;
@@ -342,91 +721,102 @@
 
         case SECTION_DECODER_TYPE:
         case SECTION_ENCODER_TYPE:
+        case SECTION_VARIANT:
         {
             // ignore limits and features specified outside of type
-            bool outside = !inType &&
-                    mCurrentType == mCurrentCodec->second.typeMap.end();
-            if (outside &&
-                    (strEq(name, "Limit") || strEq(name, "Feature"))) {
-                ALOGW("ignoring %s specified outside of a Type", name);
-            } else if (strEq(name, "Alias")) {
-                (void)addAlias(attrs);
+            if (!mState->inType()
+                    && (strEq(name, "Limit") || strEq(name, "Feature") || strEq(name, "Variant"))) {
+                PLOGD("ignoring %s specified outside of a Type", name);
+                return;
             } else if (strEq(name, "Limit")) {
-                (void)addLimit(attrs);
+                err = addLimit(attrs);
             } else if (strEq(name, "Feature")) {
-                (void)addFeature(attrs);
+                err = addFeature(attrs);
+            } else if (strEq(name, "Variant") && section != SECTION_VARIANT) {
+                err = limitVariants(attrs);
+                mState->enterSection(err == OK ? SECTION_VARIANT : SECTION_UNKNOWN);
+            } else if (inType
+                    && (strEq(name, "Alias") || strEq(name, "Attribute") || strEq(name, "Quirk"))) {
+                PLOGD("ignoring %s specified not directly in a MediaCodec", name);
+                return;
+            } else if (err == NO_INIT) {
+                break;
             }
-            break;
+            updateStatus(err);
+            return;
         }
 
         default:
             break;
     }
 
+    if (section != SECTION_UNKNOWN) {
+        PLOGD("Ignoring unrecognized tag <%s>", name);
+    }
+    mState->enterSection(SECTION_UNKNOWN);
 }
 
-void MediaCodecsXmlParser::endElementHandler(const char *name) {
-    switch (mCurrentSection) {
+void MediaCodecsXmlParser::Impl::Parser::endElementHandler(const char *name) {
+    // XMLParser handles tag matching, so we really just need to handle the section state here
+    Section section = mState->section();
+    switch (section) {
+        case SECTION_INCLUDE:
+        {
+            // this could also be any of: Included, MediaCodecs
+            if (strEq(name, "Include")) {
+                mState->exitSection();
+                return;
+            }
+            break;
+        }
+
         case SECTION_SETTINGS:
         {
+            // this could also be any of: Domain, Variant, Setting
             if (strEq(name, "Settings")) {
-                mCurrentSection = SECTION_TOPLEVEL;
+                mState->exitSection();
             }
             break;
         }
 
         case SECTION_DECODERS:
-        {
-            if (strEq(name, "Decoders")) {
-                mCurrentSection = SECTION_TOPLEVEL;
-            }
-            break;
-        }
-
         case SECTION_ENCODERS:
+        case SECTION_UNKNOWN:
         {
-            if (strEq(name, "Encoders")) {
-                mCurrentSection = SECTION_TOPLEVEL;
-            }
+            mState->exitSection();
             break;
         }
 
         case SECTION_DECODER_TYPE:
         case SECTION_ENCODER_TYPE:
         {
+            // this could also be any of: Alias, Limit, Feature
             if (strEq(name, "Type")) {
-                mCurrentSection =
-                        (mCurrentSection == SECTION_DECODER_TYPE ?
-                        SECTION_DECODER : SECTION_ENCODER);
-
-                mCurrentType = mCurrentCodec->second.typeMap.end();
+                mState->exitSection();
+                mState->exitCodecOrType();
             }
             break;
         }
 
         case SECTION_DECODER:
-        {
-            if (strEq(name, "MediaCodec")) {
-                mCurrentSection = SECTION_DECODERS;
-                mCurrentName.clear();
-            }
-            break;
-        }
-
         case SECTION_ENCODER:
         {
+            // this could also be any of: Alias, Limit, Quirk, Variant
             if (strEq(name, "MediaCodec")) {
-                mCurrentSection = SECTION_ENCODERS;
-                mCurrentName.clear();
+                mState->exitSection();
+                mState->exitCodecOrType();
+                mState->exitVariants();
             }
             break;
         }
 
-        case SECTION_INCLUDE:
+        case SECTION_VARIANT:
         {
-            if (strEq(name, "Include") && (mSectionStack.size() > 0)) {
-                mCurrentSection = mSectionStack.back();
-                mSectionStack.pop_back();
+            // this could also be any of: Alias, Limit, Quirk
+            if (strEq(name, "Variant")) {
+                mState->exitSection();
+                mState->exitVariants();
+                return;
             }
             break;
         }
@@ -434,264 +824,302 @@
         default:
             break;
     }
-
 }
 
-status_t MediaCodecsXmlParser::addSettingFromAttributes(const char **attrs) {
-    const char *name = nullptr;
-    const char *value = nullptr;
-    const char *update = nullptr;
+status_t MediaCodecsXmlParser::Impl::Parser::addSetting(const char **attrs, const char *prefix) {
+    const char *a_name = nullptr;
+    const char *a_value = nullptr;
+    const char *a_update = nullptr;
+    bool isBoolean = false;
 
     size_t i = 0;
     while (attrs[i] != nullptr) {
-        if (strEq(attrs[i], "name")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addSettingFromAttributes: name is null");
-                return BAD_VALUE;
-            }
-            name = attrs[i];
-        } else if (strEq(attrs[i], "value")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addSettingFromAttributes: value is null");
-                return BAD_VALUE;
-            }
-            value = attrs[i];
-        } else if (strEq(attrs[i], "update")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addSettingFromAttributes: update is null");
-                return BAD_VALUE;
-            }
-            update = attrs[i];
-        } else {
-            ALOGE("addSettingFromAttributes: unrecognized attribute: %s", attrs[i]);
+        CHECK((i & 1) == 0);
+        if (attrs[i + 1] == nullptr) {
+            PLOGD("Setting: attribute '%s' is null", attrs[i]);
             return BAD_VALUE;
         }
+
+        if (strEq(attrs[i], "name")) {
+            a_name = attrs[++i];
+        } else if (strEq(attrs[i], "value") || strEq(attrs[i], "enabled")) {
+            if (a_value) {
+                PLOGD("Setting: redundant attribute '%s'", attrs[i]);
+                return BAD_VALUE;
+            }
+            isBoolean = strEq(attrs[i], "enabled");
+            a_value = attrs[++i];
+        } else if (strEq(attrs[i], "update")) {
+            a_update = attrs[++i];
+        } else {
+            PLOGD("Setting: ignoring unrecognized attribute '%s'", attrs[i]);
+            ++i;
+        }
         ++i;
     }
 
-    if (name == nullptr || value == nullptr) {
-        ALOGE("addSettingFromAttributes: name or value unspecified");
+    if (a_name == nullptr || a_value == nullptr) {
+        PLOGD("Setting with no 'name' or 'value' attribute");
         return BAD_VALUE;
     }
 
     // Boolean values are converted to "0" or "1".
-    if (strHasPrefix(name, "supports-")) {
-        value = parseBoolean(value) ? "1" : "0";
+    if (strHasPrefix(a_name, "supports-") || isBoolean) {
+        a_value = parseBoolean(a_value) ? "1" : "0";
     }
 
-    mUpdate = (update != nullptr) && parseBoolean(update);
-    auto attribute = mServiceAttributeMap.find(name);
+    bool update = (a_update != nullptr) && parseBoolean(a_update);
+    Result res = mState->data().addGlobal(std::string(prefix ? : "") + a_name, a_value, update);
+    if (res != OK) {
+        PLOGD("Setting: %s", res.error().c_str());
+    }
+    return res;
+}
+
+MediaCodecsXmlParser::Impl::Result MediaCodecsXmlParser::Impl::Data::addGlobal(
+        std::string key, std::string value, bool updating) {
+    auto attribute = mServiceAttributeMap.find(key);
     if (attribute == mServiceAttributeMap.end()) { // New attribute name
-        if (mUpdate) {
-            ALOGE("addSettingFromAttributes: updating non-existing setting");
-            return BAD_VALUE;
+        if (updating) {
+            return { NAME_NOT_FOUND, "cannot update non-existing setting" };
         }
-        mServiceAttributeMap.insert(Attribute(name, value));
+        mServiceAttributeMap.insert(Attribute(key, value));
     } else { // Existing attribute name
-        if (!mUpdate) {
-            ALOGE("addSettingFromAttributes: adding existing setting");
-        }
         attribute->second = value;
+        if (!updating) {
+            return { ALREADY_EXISTS, "updating existing setting" };
+        }
     }
 
     return OK;
 }
 
-status_t MediaCodecsXmlParser::addMediaCodecFromAttributes(
-        bool encoder, const char **attrs) {
-    const char *name = nullptr;
-    const char *type = nullptr;
-    const char *update = nullptr;
-    const char *rank = nullptr;
+status_t MediaCodecsXmlParser::Impl::Parser::enterMediaCodec(
+        const char **attrs, bool encoder) {
+    const char *a_name = nullptr;
+    const char *a_type = nullptr;
+    const char *a_update = nullptr;
+    const char *a_rank = nullptr;
+    const char *a_domain = nullptr;
+    const char *a_variant = nullptr;
+    const char *a_enabled = nullptr;
 
     size_t i = 0;
     while (attrs[i] != nullptr) {
-        if (strEq(attrs[i], "name")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addMediaCodecFromAttributes: name is null");
-                return BAD_VALUE;
-            }
-            name = attrs[i];
-        } else if (strEq(attrs[i], "type")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addMediaCodecFromAttributes: type is null");
-                return BAD_VALUE;
-            }
-            type = attrs[i];
-        } else if (strEq(attrs[i], "update")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addMediaCodecFromAttributes: update is null");
-                return BAD_VALUE;
-            }
-            update = attrs[i];
-        } else if (strEq(attrs[i], "rank")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addMediaCodecFromAttributes: rank is null");
-                return BAD_VALUE;
-            }
-            rank = attrs[i];
-        } else {
-            ALOGE("addMediaCodecFromAttributes: unrecognized attribute: %s", attrs[i]);
+        CHECK((i & 1) == 0);
+        if (attrs[i + 1] == nullptr) {
+            PLOGD("MediaCodec: attribute '%s' is null", attrs[i]);
             return BAD_VALUE;
         }
+
+        if (strEq(attrs[i], "name")) {
+            a_name = attrs[++i];
+        } else if (strEq(attrs[i], "type")) {
+            a_type = attrs[++i];
+        } else if (strEq(attrs[i], "update")) {
+            a_update = attrs[++i];
+        } else if (strEq(attrs[i], "rank")) {
+            a_rank = attrs[++i];
+        } else if (strEq(attrs[i], "domain")) {
+            a_domain = attrs[++i];
+        } else if (strEq(attrs[i], "variant")) {
+            a_variant = attrs[++i];
+        } else if (strEq(attrs[i], "enabled")) {
+            a_enabled = attrs[++i];
+        } else {
+            PLOGD("MediaCodec: ignoring unrecognized attribute '%s'", attrs[i]);
+            ++i;
+        }
         ++i;
     }
 
-    if (name == nullptr) {
-        ALOGE("addMediaCodecFromAttributes: name not found");
+    if (a_name == nullptr) {
+        PLOGD("MediaCodec with no 'name' attribute");
         return BAD_VALUE;
     }
 
-    mUpdate = (update != nullptr) && parseBoolean(update);
-    mCurrentCodec = mCodecMap.find(name);
-    if (mCurrentCodec == mCodecMap.end()) { // New codec name
-        if (mUpdate) {
-            ALOGW("addMediaCodecFromAttributes: cannot update "
-                  "non-existing codec \"%s\".", name);
-            return BAD_VALUE;
+    bool update = (a_update != nullptr) && parseBoolean(a_update);
+    if (a_domain != nullptr) {
+        // disable codecs with domain by default (unless updating)
+        if (!a_enabled && !update) {
+            a_enabled = "false";
+        }
+    }
+
+    Result res = mState->enterMediaCodec(encoder, a_name, a_type, update);
+    if (res != OK) {
+        logAnyErrors(res);
+        return res;
+    }
+
+    return updateMediaCodec(
+            a_rank, parseCommaSeparatedStringSet(a_domain),
+            parseCommaSeparatedStringSet(a_variant), a_enabled);
+}
+
+MediaCodecsXmlParser::Impl::Result
+MediaCodecsXmlParser::Impl::State::enterMediaCodec(
+        bool encoder, const char *name, const char *type, bool updating) {
+    // store name even in case of an error
+    CodecMap::iterator codecIt = mData->mCodecMap.find(name);
+    TypeMap::iterator typeIt;
+    if (codecIt == mData->mCodecMap.end()) { // New codec name
+        if (updating) {
+            return { NAME_NOT_FOUND, "MediaCodec: cannot update non-existing codec" };
         }
         // Create a new codec in mCodecMap
-        mCurrentCodec = mCodecMap.insert(
-                Codec(name, CodecProperties())).first;
+        codecIt = mData->mCodecMap.insert(Codec(name, CodecProperties())).first;
         if (type != nullptr) {
-            mCurrentType = mCurrentCodec->second.typeMap.insert(
-                    Type(type, AttributeMap())).first;
+            typeIt = codecIt->second.typeMap.insert(Type(type, AttributeMap())).first;
         } else {
-            mCurrentType = mCurrentCodec->second.typeMap.end();
+            typeIt = codecIt->second.typeMap.end();
         }
-        mCurrentCodec->second.isEncoder = encoder;
-        mCurrentCodec->second.order = mCodecCounter++;
+        codecIt->second.isEncoder = encoder;
+        codecIt->second.order = mData->mCodecMap.size();
     } else { // Existing codec name
-        if (!mUpdate) {
-            ALOGW("addMediaCodecFromAttributes: trying to add "
-                  "existing codec \"%s\"", name);
-            return ALREADY_EXISTS;
+        if (!updating) {
+            return { ALREADY_EXISTS, "MediaCodec: cannot add existing codec" };
         }
         if (type != nullptr) {
-            mCurrentType = mCurrentCodec->second.typeMap.find(type);
-            if (mCurrentType == mCurrentCodec->second.typeMap.end()) {
-                ALOGE("addMediaCodecFromAttributes: cannot update "
-                      "non-existing type \"%s\" for codec \"%s\"",
-                        type, name);
-                return BAD_VALUE;
+            typeIt = codecIt->second.typeMap.find(type);
+            if (typeIt == codecIt->second.typeMap.end()) {
+                return { NAME_NOT_FOUND, "MediaCodec: cannot update non-existing type for codec" };
             }
         } else {
             // This should happen only when the codec has at most one type.
-            mCurrentType = mCurrentCodec->second.typeMap.begin();
-            if (mCurrentType == mCurrentCodec->second.typeMap.end()) {
-                ALOGE("addMediaCodecFromAttributes: cannot update "
-                      "codec \"%s\" without type specified", name);
-                return BAD_VALUE;
+            typeIt = codecIt->second.typeMap.begin();
+            if (typeIt == codecIt->second.typeMap.end()
+                    || codecIt->second.typeMap.size() != 1) {
+                return { BAD_VALUE, "MediaCodec: cannot update codec without type specified" };
             }
         }
     }
+    mCurrent.emplace_back(CodecAndType{name, codecIt, typeIt, updating});
+    return OK;
+}
+
+status_t MediaCodecsXmlParser::Impl::Parser::updateMediaCodec(
+        const char *rank, const StringSet &domains, const StringSet &variants,
+        const char *enabled) {
+    CHECK(mState->inCodec());
+    CodecProperties &codec = mState->codec();
 
     if (rank != nullptr) {
-        if (!mCurrentCodec->second.rank.empty() && mCurrentCodec->second.rank != rank) {
-            ALOGE("addMediaCodecFromAttributes: code \"%s\" rank changed from \"%s\" to \"%s\"",
-                    name, mCurrentCodec->second.rank.c_str(), rank);
-            return BAD_VALUE;
-        }
-        mCurrentCodec->second.rank = rank;
+        ALOGD_IF(!codec.rank.empty() && codec.rank != rank,
+                "codec '%s' rank changed from '%s' to '%s'",
+                mState->codecName().c_str(), codec.rank.c_str(), rank);
+        codec.rank = rank;
     }
 
+    codec.variantSet = variants;
+
+    for (const std::string &domain : domains) {
+        if (domain.size() && domain.at(0) == '!') {
+            codec.domainSet.erase(domain.substr(1));
+        } else {
+            codec.domainSet.emplace(domain);
+        }
+    }
+
+    if (enabled != nullptr) {
+        if (parseBoolean(enabled)) {
+            codec.quirkSet.erase("attribute::disabled");
+            ALOGD("enabling %s", mState->codecName().c_str());
+        } else {
+            codec.quirkSet.emplace("attribute::disabled");
+            ALOGD("disabling %s", mState->codecName().c_str());
+        }
+    }
     return OK;
 }
 
-status_t MediaCodecsXmlParser::addQuirk(const char **attrs, const char *tag) {
-    if (mCurrentCodec == mCodecMap.end()) {
-        return BAD_VALUE;
-    }
-
-    const char *name = nullptr;
+status_t MediaCodecsXmlParser::Impl::Parser::addQuirk(const char **attrs, const char *prefix) {
+    CHECK(mState->inCodec());
+    const char *a_name = nullptr;
 
     size_t i = 0;
     while (attrs[i] != nullptr) {
-        if (strEq(attrs[i], "name")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addQuirk: name is null");
-                return BAD_VALUE;
-            }
-            name = attrs[i];
-        } else {
-            ALOGE("addQuirk: unrecognized attribute: %s", attrs[i]);
+        CHECK((i & 1) == 0);
+        if (attrs[i + 1] == nullptr) {
+            PLOGD("Quirk: attribute '%s' is null", attrs[i]);
             return BAD_VALUE;
         }
+
+        if (strEq(attrs[i], "name")) {
+            a_name = attrs[++i];
+        } else {
+            PLOGD("Quirk: ignoring unrecognized attribute '%s'", attrs[i]);
+            ++i;
+        }
         ++i;
     }
 
-    if (name == nullptr) {
-        ALOGE("addQuirk: name not found");
+    if (a_name == nullptr) {
+        PLOGD("Quirk with no 'name' attribute");
         return BAD_VALUE;
     }
 
-    std::string tagString = tag;
-    std::transform(tagString.begin(), tagString.end(), tagString.begin(), ::tolower);
-    tagString.append("::");
-    tagString.append(name);
-    mCurrentCodec->second.quirkSet.emplace(tagString.c_str());
-    ALOGI("adding %s to %s", tagString.c_str(), mCurrentCodec->first.c_str());
+    std::string key = std::string(prefix ? : "") + a_name;
+    mState->codec().quirkSet.emplace(key);
+    ALOGV("adding %s to %s", key.c_str(), mState->codecName().c_str());
     return OK;
 }
 
-status_t MediaCodecsXmlParser::addTypeFromAttributes(const char **attrs, bool encoder) {
-    if (mCurrentCodec == mCodecMap.end()) {
-        return BAD_VALUE;
-    }
+status_t MediaCodecsXmlParser::Impl::Parser::enterType(const char **attrs) {
+    CHECK(mState->inCodec());
 
-    const char *name = nullptr;
-    const char *update = nullptr;
+    const char *a_name = nullptr;
+    const char *a_update = nullptr;
 
     size_t i = 0;
     while (attrs[i] != nullptr) {
+        CHECK((i & 1) == 0);
+        if (attrs[i + 1] == nullptr) {
+            PLOGD("Type: attribute '%s' is null", attrs[i]);
+            return BAD_VALUE;
+        }
+
         if (strEq(attrs[i], "name")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addTypeFromAttributes: name is null");
-                return BAD_VALUE;
-            }
-            name = attrs[i];
+            a_name = attrs[++i];
         } else if (strEq(attrs[i], "update")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addTypeFromAttributes: update is null");
-                return BAD_VALUE;
-            }
-            update = attrs[i];
+            a_update = attrs[++i];
         } else {
-            ALOGE("addTypeFromAttributes: unrecognized attribute: %s", attrs[i]);
-            return BAD_VALUE;
+            PLOGD("Type: ignoring unrecognized attribute '%s'", attrs[i]);
+            ++i;
         }
         ++i;
     }
 
-    if (name == nullptr) {
+    if (a_name == nullptr) {
+        PLOGD("Type with no 'name' attribute");
         return BAD_VALUE;
     }
 
-    mCurrentCodec->second.isEncoder = encoder;
-    mCurrentType = mCurrentCodec->second.typeMap.find(name);
-    if (!mUpdate) {
-        if (mCurrentType != mCurrentCodec->second.typeMap.end()) {
-            ALOGW("addTypeFromAttributes: trying to update "
-                  "existing type \"%s\"", name);
-            return ALREADY_EXISTS;
+    bool update = (a_update != nullptr) && parseBoolean(a_update);
+    return mState->enterType(a_name, update);
+}
+
+MediaCodecsXmlParser::Impl::Result
+MediaCodecsXmlParser::Impl::State::enterType(const char *name, bool update) {
+    update = update || updating(); // handle parent
+
+    CodecMap::iterator codecIt = mCurrent.back().mCodec;
+    TypeMap::iterator typeIt = codecIt->second.typeMap.find(name);
+    if (!update) {
+        if (typeIt != codecIt->second.typeMap.end()) {
+            return { ALREADY_EXISTS, "trying to update existing type '" + std::string(name) + "'" };
         }
-        mCurrentType = mCurrentCodec->second.typeMap.insert(
-                Type(name, AttributeMap())).first;
-    } else if (mCurrentType == mCurrentCodec->second.typeMap.end()) {
-        ALOGE("addTypeFromAttributes: updating non-existing type");
-        return BAD_VALUE;
+        typeIt = codecIt->second.typeMap.insert(Type(name, AttributeMap())).first;
+    } else if (typeIt == codecIt->second.typeMap.end()) {
+        return { NAME_NOT_FOUND, "addType: updating non-existing type" };
     }
+    mCurrent.push_back({ codecName(), codecIt, typeIt, update });
+    CHECK(inType());
     return OK;
 }
 
-status_t MediaCodecsXmlParser::addLimit(const char **attrs) {
-    if (mCurrentCodec == mCodecMap.end()) {
-        return BAD_VALUE;
-    }
-    if (mCurrentType == mCurrentCodec->second.typeMap.end()) {
-        return BAD_VALUE;
-    }
-
+status_t MediaCodecsXmlParser::Impl::Parser::addLimit(const char **attrs) {
+    CHECK(mState->inType());
     const char* a_name = nullptr;
     const char* a_default = nullptr;
     const char* a_in = nullptr;
@@ -704,69 +1132,39 @@
 
     size_t i = 0;
     while (attrs[i] != nullptr) {
-        if (strEq(attrs[i], "name")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addLimit: name is null");
-                return BAD_VALUE;
-            }
-            a_name = attrs[i];
-        } else if (strEq(attrs[i], "default")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addLimit: default is null");
-                return BAD_VALUE;
-            }
-            a_default = attrs[i];
-        } else if (strEq(attrs[i], "in")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addLimit: in is null");
-                return BAD_VALUE;
-            }
-            a_in = attrs[i];
-        } else if (strEq(attrs[i], "max")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addLimit: max is null");
-                return BAD_VALUE;
-            }
-            a_max = attrs[i];
-        } else if (strEq(attrs[i], "min")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addLimit: min is null");
-                return BAD_VALUE;
-            }
-            a_min = attrs[i];
-        } else if (strEq(attrs[i], "range")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addLimit: range is null");
-                return BAD_VALUE;
-            }
-            a_range = attrs[i];
-        } else if (strEq(attrs[i], "ranges")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addLimit: ranges is null");
-                return BAD_VALUE;
-            }
-            a_ranges = attrs[i];
-        } else if (strEq(attrs[i], "scale")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addLimit: scale is null");
-                return BAD_VALUE;
-            }
-            a_scale = attrs[i];
-        } else if (strEq(attrs[i], "value")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addLimit: value is null");
-                return BAD_VALUE;
-            }
-            a_value = attrs[i];
-        } else {
-            ALOGE("addLimit: unrecognized limit: %s", attrs[i]);
+        CHECK((i & 1) == 0);
+        if (attrs[i + 1] == nullptr) {
+            PLOGD("Limit: attribute '%s' is null", attrs[i]);
             return BAD_VALUE;
         }
+
+        if (strEq(attrs[i], "name")) {
+            a_name = attrs[++i];
+        } else if (strEq(attrs[i], "default")) {
+            a_default = attrs[++i];
+        } else if (strEq(attrs[i], "in")) {
+            a_in = attrs[++i];
+        } else if (strEq(attrs[i], "max")) {
+            a_max = attrs[++i];
+        } else if (strEq(attrs[i], "min")) {
+            a_min = attrs[++i];
+        } else if (strEq(attrs[i], "range")) {
+            a_range = attrs[++i];
+        } else if (strEq(attrs[i], "ranges")) {
+            a_ranges = attrs[++i];
+        } else if (strEq(attrs[i], "scale")) {
+            a_scale = attrs[++i];
+        } else if (strEq(attrs[i], "value")) {
+            a_value = attrs[++i];
+        } else {
+            PLOGD("Limit: ignoring unrecognized limit: %s", attrs[i]);
+            ++i;
+        }
         ++i;
     }
 
     if (a_name == nullptr) {
-        ALOGE("limit with no 'name' attribute");
+        PLOGD("Limit with no 'name' attribute");
         return BAD_VALUE;
     }
 
@@ -774,8 +1172,50 @@
     // measured-frame-rate, measured-blocks-per-second: range
     // quality: range + default + [scale]
     // complexity: range + default
-    std::string range;
-    if (strEq(a_name, "aspect-ratio") ||
+    std::string key = a_name, value;
+
+    // don't allow specifying more than one of value, range or min/max
+    if ((a_value != nullptr) + (a_range != nullptr) + (a_ranges != nullptr)
+            + (a_min != nullptr || a_max != nullptr) > 1) {
+        PLOGD("Limit '%s' has multiple 'min'/'max', 'range', 'ranges' or 'value' attributes",
+                a_name);
+        return BAD_VALUE;
+    }
+
+    // Min/max limits (only containing min or max attribute)
+    //
+    // Current "max" limits are "channel-count", "concurrent-instances".
+    // There are no current "min" limits
+    //
+    // Range limits. "range" is specified in exactly one of the following forms:
+    // 1) min-max
+    // 2) value-value
+    // 3) range
+    //
+    // Current range limits are "aspect-ratio", "bitrate", "block-count", "blocks-per-second",
+    // "complexity", "frame-rate", "quality", "size", "measured-blocks-per-second",
+    // "performance-point-*", "measured-frame-rate-*"
+    //
+    // Other limits (containing only value or ranges)
+    //
+    // Current ranges limit is "sample-rate"
+    if ((a_min != nullptr) ^ (a_max != nullptr)) {
+        // min/max limit
+        if (a_max != nullptr) {
+            key = "max-" + key;
+            value = a_max;
+        } else if (a_min != nullptr) {
+            key = "min-" + key;
+            value = a_min;
+        }
+    } else if (a_min != nullptr && a_max != nullptr) {
+        // min-max
+        key += "-range";
+        value = a_min + std::string("-") + a_max;
+    } else if (a_value != nullptr) {
+        // value-value or value
+        value = a_value;
+        if (strEq(a_name, "aspect-ratio") ||
             strEq(a_name, "bitrate") ||
             strEq(a_name, "block-count") ||
             strEq(a_name, "blocks-per-second") ||
@@ -786,249 +1226,199 @@
             strEq(a_name, "measured-blocks-per-second") ||
             strHasPrefix(a_name, "performance-point-") ||
             strHasPrefix(a_name, "measured-frame-rate-")) {
-        // "range" is specified in exactly one of the following forms:
-        // 1) min-max
-        // 2) value-value
-        // 3) range
-        if (a_min != nullptr && a_max != nullptr) {
-            // min-max
-            if (a_range != nullptr || a_value != nullptr) {
-                return limitError(a_name, "has 'min' and 'max' as well as 'range' or "
-                        "'value' attributes");
-            }
-            range = a_min;
-            range += '-';
-            range += a_max;
-        } else if (a_min != nullptr || a_max != nullptr) {
-            return limitError(a_name, "has only 'min' or 'max' attribute");
-        } else if (a_value != nullptr) {
-            // value-value
-            if (a_range != nullptr) {
-                return limitError(a_name, "has both 'range' and 'value' attributes");
-            }
-            range = a_value;
-            range += '-';
-            range += a_value;
-        } else if (a_range == nullptr) {
-            return limitError(a_name, "with no 'range', 'value' or 'min'/'max' attributes");
-        } else {
-            // range
-            range = a_range;
+            key += "-range";
+            value += std::string("-") + a_value;
         }
-
-        // "aspect-ratio" requires some special treatment.
-        if (strEq(a_name, "aspect-ratio")) {
-            // "aspect-ratio" must have "in".
-            if (a_in == nullptr) {
-                return limitFoundMissingAttr(a_name, "in", false);
-            }
-            // "in" must be either "pixels" or "blocks".
-            if (!strEq(a_in, "pixels") && !strEq(a_in, "blocks")) {
-                return limitInvalidAttr(a_name, "in", a_in);
-            }
-            // name will be "pixel-aspect-ratio-range" or
-            // "block-aspect-ratio-range".
-            mCurrentType->second[
-                    std::string(a_in).substr(0, strlen(a_in) - 1) +
-                    "-aspect-ratio-range"] = range;
-        } else {
-            // For everything else (apart from "aspect-ratio"), simply append
-            // "-range" to the name for the range-type property.
-            mCurrentType->second[std::string(a_name) + "-range"] = range;
-
-            // Only "quality" may have "scale".
-            if (!strEq(a_name, "quality") && a_scale != nullptr) {
-                return limitFoundMissingAttr(a_name, "scale");
-            } else if (strEq(a_name, "quality")) {
-                // The default value of "quality-scale" is "linear".
-                mCurrentType->second["quality-scale"] = a_scale == nullptr ?
-                        "linear" : a_scale;
-            }
-
-            // "quality" and "complexity" must have "default".
-            // Other limits must not have "default".
-            if (strEq(a_name, "quality") || strEq(a_name, "complexity")) {
-                if (a_default == nullptr) {
-                    return limitFoundMissingAttr(a_name, "default", false);
-                }
-                // name will be "quality-default" or "complexity-default".
-                mCurrentType->second[std::string(a_name) + "-default"] = a_default;
-            } else if (a_default != nullptr) {
-                return limitFoundMissingAttr(a_name, "default", true);
-            }
-        }
+    } else if (a_range != nullptr) {
+        // range
+        key += "-range";
+        value = a_range;
+    } else if (a_ranges != nullptr) {
+        // ranges
+        key += "-ranges";
+        value = a_ranges;
     } else {
-        if (a_default != nullptr) {
-            return limitFoundMissingAttr(a_name, "default");
-        }
-        if (a_in != nullptr) {
-            return limitFoundMissingAttr(a_name, "in");
-        }
-        if (a_scale != nullptr) {
-            return limitFoundMissingAttr(a_name, "scale");
-        }
-        if (a_range != nullptr) {
-            return limitFoundMissingAttr(a_name, "range");
-        }
-        if (a_min != nullptr) {
-            return limitFoundMissingAttr(a_name, "min");
-        }
-
-        if (a_max != nullptr) {
-            // "max" must exist if and only if name is "channel-count" or
-            // "concurrent-instances".
-            // "min" is not ncessary.
-            if (strEq(a_name, "channel-count") ||
-                    strEq(a_name, "concurrent-instances")) {
-                mCurrentType->second[std::string("max-") + a_name] = a_max;
-            } else {
-                return limitFoundMissingAttr(a_name, "max", false);
-            }
-        } else if (strEq(a_name, "channel-count") ||
-                strEq(a_name, "concurrent-instances")) {
-            return limitFoundMissingAttr(a_name, "max");
-        }
-
-        if (a_ranges != nullptr) {
-            // "ranges" must exist if and only if name is "sample-rate".
-            if (strEq(a_name, "sample-rate")) {
-                mCurrentType->second["sample-rate-ranges"] = a_ranges;
-            } else {
-                return limitFoundMissingAttr(a_name, "ranges", false);
-            }
-        } else if (strEq(a_name, "sample-rate")) {
-            return limitFoundMissingAttr(a_name, "ranges");
-        }
-
-        if (a_value != nullptr) {
-            // "value" must exist if and only if name is "alignment" or
-            // "block-size".
-            if (strEq(a_name, "alignment") || strEq(a_name, "block-size")) {
-                mCurrentType->second[a_name] = a_value;
-            } else {
-                return limitFoundMissingAttr(a_name, "value", false);
-            }
-        } else if (strEq(a_name, "alignment") || strEq(a_name, "block-size")) {
-            return limitFoundMissingAttr(a_name, "value", false);
-        }
-
+        PLOGD("Limit '%s' with no 'range', 'value' or 'min'/'max' attributes", a_name);
+        return BAD_VALUE;
     }
 
+    // handle 'in' attribute - this changes the key
+    if (a_in != nullptr) {
+        // Currently "aspect-ratio" uses in attribute
+        const size_t a_in_len = strlen(a_in);
+        key = std::string(a_in, a_in_len - a_in[a_in_len] == 's') + '-' + key;
+    }
+
+    // handle 'scale' attribute - this adds a new detail
+    if (a_scale != nullptr) {
+        mState->addDetail(a_name + std::string("-scale"), a_scale);
+    } else if (strEq(a_name, "quality")) {
+        // The default value of "quality-scale" is "linear" even if unspecified.
+        mState->addDetail(a_name + std::string("-scale"), "linear");
+    }
+
+    // handle 'default' attribute - this adds a new detail
+    if (a_default != nullptr) {
+        mState->addDetail(a_name + std::string("-default"), a_default);
+    }
+
+    mState->addDetail(key, value);
     return OK;
 }
 
-status_t MediaCodecsXmlParser::addFeature(const char **attrs) {
-    if (mCurrentCodec == mCodecMap.end()) {
-        return BAD_VALUE;
+void MediaCodecsXmlParser::Impl::State::addDetail(
+        const std::string &key, const std::string &value) {
+    CHECK(inType());
+    ALOGI("limit: %s = %s", key.c_str(), value.c_str());
+    const StringSet &variants = mVariantsStack.back();
+    if (variants.empty()) {
+        type()[key] = value;
+    } else {
+        for (const std::string &variant : variants) {
+            type()[variant + ":::" + key] = value;
+        }
     }
-    if (mCurrentType == mCurrentCodec->second.typeMap.end()) {
+}
+
+status_t MediaCodecsXmlParser::Impl::Parser::limitVariants(const char **attrs) {
+    const char* a_name = nullptr;
+
+    size_t i = 0;
+    while (attrs[i] != nullptr) {
+        CHECK((i & 1) == 0);
+        if (attrs[i + 1] == nullptr) {
+            PLOGD("Variant: attribute '%s' is null", attrs[i]);
+            return BAD_VALUE;
+        }
+        if (strEq(attrs[i], "name")) {
+            a_name = attrs[++i];
+        } else {
+            PLOGD("Variant: ignoring unrecognized attribute: %s", attrs[i]);
+            ++i;
+        }
+        ++i;
+    }
+
+    if (a_name == nullptr || *a_name == '\0') {
+        PLOGD("Variant with no or empty 'name' attribute");
         return BAD_VALUE;
     }
 
+    StringSet variants;
+    for (const std::string &variant : parseCommaSeparatedStringSet(a_name)) {
+        if (mState->variants().count(variant)) {
+            variants.emplace(variant);
+        } else {
+            PLOGD("Variant: variant '%s' not in parent variants", variant.c_str());
+            return BAD_VALUE;
+        }
+    }
+    mState->enterVariants(variants);
+    return OK;
+}
+
+status_t MediaCodecsXmlParser::Impl::Parser::addFeature(const char **attrs) {
+    CHECK(mState->inType());
     size_t i = 0;
-    const char *name = nullptr;
+    const char *a_name = nullptr;
     int32_t optional = -1;
     int32_t required = -1;
-    const char *value = nullptr;
+    const char *a_value = nullptr;
 
     while (attrs[i] != nullptr) {
-        if (strEq(attrs[i], "name")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addFeature: name is null");
-                return BAD_VALUE;
-            }
-            name = attrs[i];
-        } else if (strEq(attrs[i], "optional")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addFeature: optional is null");
-                return BAD_VALUE;
-            }
-            optional = parseBoolean(attrs[i]) ? 1 : 0;
-        } else if (strEq(attrs[i], "required")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addFeature: required is null");
-                return BAD_VALUE;
-            }
-            required = parseBoolean(attrs[i]) ? 1 : 0;
-        } else if (strEq(attrs[i], "value")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addFeature: value is null");
-                return BAD_VALUE;
-            }
-            value = attrs[i];
-        } else {
-            ALOGE("addFeature: unrecognized attribute: %s", attrs[i]);
+        CHECK((i & 1) == 0);
+        if (attrs[i + 1] == nullptr) {
+            PLOGD("Feature: attribute '%s' is null", attrs[i]);
             return BAD_VALUE;
         }
+
+        if (strEq(attrs[i], "name")) {
+            a_name = attrs[++i];
+        } else if (strEq(attrs[i], "optional")) {
+            optional = parseBoolean(attrs[++i]) ? 1 : 0;
+        } else if (strEq(attrs[i], "required")) {
+            required = parseBoolean(attrs[++i]) ? 1 : 0;
+        } else if (strEq(attrs[i], "value")) {
+            a_value = attrs[++i];
+        } else {
+            PLOGD("Feature: ignoring unrecognized attribute '%s'", attrs[i]);
+            ++i;
+        }
         ++i;
     }
 
     // Every feature must have a name.
-    if (name == nullptr) {
-        ALOGE("feature with no 'name' attribute");
+    if (a_name == nullptr) {
+        PLOGD("Feature with no 'name' attribute");
         return BAD_VALUE;
     }
 
-    if ((optional != -1) || (required != -1)) {
-        if (optional == required) {
-            ALOGE("feature '%s' is both/neither optional and required", name);
+    if (a_value != nullptr) {
+        if (optional != -1 || required != -1) {
+            PLOGD("Feature '%s' has both value and optional/required attributes", a_name);
             return BAD_VALUE;
         }
-        if ((optional == 1) || (required == 1)) {
-            if (value != nullptr) {
-                ALOGE("feature '%s' cannot have extra 'value'", name);
-                return BAD_VALUE;
-            }
-            mCurrentType->second[std::string("feature-") + name] =
-                    optional == 1 ? "0" : "1";
-            return OK;
+    } else {
+        if (optional == required && optional != -1) {
+            PLOGD("Feature '%s' is both/neither optional and required", a_name);
+            return BAD_VALUE;
         }
+        a_value = (required == 1 || optional == 0) ? "1" : "0";
     }
-    mCurrentType->second[std::string("feature-") + name] = value == nullptr ?
-            "0" : value;
+
+    mState->addDetail(std::string("feature-") + a_name, a_value ? : "0");
     return OK;
 }
 
-status_t MediaCodecsXmlParser::addAlias(const char **attrs) {
+status_t MediaCodecsXmlParser::Impl::Parser::addAlias(const char **attrs) {
+    CHECK(mState->inCodec());
     size_t i = 0;
-    const char *name = nullptr;
+    const char *a_name = nullptr;
 
     while (attrs[i] != nullptr) {
-        if (strEq(attrs[i], "name")) {
-            if (attrs[++i] == nullptr) {
-                ALOGE("addAlias: name is null");
-                return BAD_VALUE;
-            }
-            name = attrs[i];
-        } else {
-            ALOGE("addAlias: unrecognized attribute: %s", attrs[i]);
+        CHECK((i & 1) == 0);
+        if (attrs[i + 1] == nullptr) {
+            PLOGD("Alias: attribute '%s' is null", attrs[i]);
             return BAD_VALUE;
         }
+
+        if (strEq(attrs[i], "name")) {
+            a_name = attrs[++i];
+        } else {
+            PLOGD("Alias: ignoring unrecognized attribute '%s'", attrs[i]);
+            ++i;
+        }
         ++i;
     }
 
     // Every feature must have a name.
-    if (name == nullptr) {
-        ALOGE("alias with no 'name' attribute");
+    if (a_name == nullptr) {
+        PLOGD("Alias with no 'name' attribute");
         return BAD_VALUE;
     }
 
-    mCurrentCodec->second.aliases.emplace_back(name);
+    mState->codec().aliases.emplace_back(a_name);
     return OK;
 }
 
 const MediaCodecsXmlParser::AttributeMap&
-        MediaCodecsXmlParser::getServiceAttributeMap() const {
-    return mServiceAttributeMap;
+MediaCodecsXmlParser::getServiceAttributeMap() const {
+    return mImpl->getServiceAttributeMap();
 }
 
 const MediaCodecsXmlParser::CodecMap&
-        MediaCodecsXmlParser::getCodecMap() const {
-    return mCodecMap;
+MediaCodecsXmlParser::getCodecMap() const {
+    return mImpl->getCodecMap();
 }
 
 const MediaCodecsXmlParser::RoleMap&
-        MediaCodecsXmlParser::getRoleMap() const {
+MediaCodecsXmlParser::getRoleMap() const {
+    return mImpl->getRoleMap();
+}
+
+const MediaCodecsXmlParser::RoleMap&
+MediaCodecsXmlParser::Impl::getRoleMap() const {
+    std::lock_guard<std::mutex> guard(mLock);
     if (mRoleMap.empty()) {
         generateRoleMap();
     }
@@ -1036,6 +1426,11 @@
 }
 
 const char* MediaCodecsXmlParser::getCommonPrefix() const {
+    return mImpl->getCommonPrefix();
+}
+
+const char* MediaCodecsXmlParser::Impl::getCommonPrefix() const {
+    std::lock_guard<std::mutex> guard(mLock);
     if (mCommonPrefix.empty()) {
         generateCommonPrefix();
     }
@@ -1043,12 +1438,15 @@
 }
 
 status_t MediaCodecsXmlParser::getParsingStatus() const {
-    return mParsingStatus;
+    return mImpl->getParsingStatus();
 }
 
-void MediaCodecsXmlParser::generateRoleMap() const {
-    for (const auto& codec : mCodecMap) {
-        const auto& codecName = codec.first;
+void MediaCodecsXmlParser::Impl::generateRoleMap() const {
+    for (const auto& codec : mData.mCodecMap) {
+        const auto &codecName = codec.first;
+        if (codecName == "<dummy>") {
+            continue;
+        }
         bool isEncoder = codec.second.isEncoder;
         size_t order = codec.second.order;
         std::string rank = codec.second.rank;
@@ -1116,14 +1514,14 @@
     }
 }
 
-void MediaCodecsXmlParser::generateCommonPrefix() const {
-    if (mCodecMap.empty()) {
+void MediaCodecsXmlParser::Impl::generateCommonPrefix() const {
+    if (mData.mCodecMap.empty()) {
         return;
     }
-    auto i = mCodecMap.cbegin();
+    auto i = mData.mCodecMap.cbegin();
     auto first = i->first.cbegin();
     auto last = i->first.cend();
-    for (++i; i != mCodecMap.cend(); ++i) {
+    for (++i; i != mData.mCodecMap.cend(); ++i) {
         last = std::mismatch(
                 first, last, i->first.cbegin(), i->first.cend()).first;
     }
@@ -1131,4 +1529,3 @@
 }
 
 } // namespace android
-
diff --git a/media/libstagefright/xmlparser/include/media/stagefright/xmlparser/MediaCodecsXmlParser.h b/media/libstagefright/xmlparser/include/media/stagefright/xmlparser/MediaCodecsXmlParser.h
index 7a986b7..b666de4 100644
--- a/media/libstagefright/xmlparser/include/media/stagefright/xmlparser/MediaCodecsXmlParser.h
+++ b/media/libstagefright/xmlparser/include/media/stagefright/xmlparser/MediaCodecsXmlParser.h
@@ -19,34 +19,31 @@
 
 #include <sys/types.h>
 #include <utils/Errors.h>
-#include <utils/Vector.h>
-#include <utils/StrongPointer.h>
 
-#include <string>
-#include <set>
 #include <map>
+#include <mutex>
+#include <set>
+#include <string>
 #include <vector>
 
+struct XML_ParserStruct; // from expat library
+
 namespace android {
 
 class MediaCodecsXmlParser {
 public:
 
     // Treblized media codec list will be located in /odm/etc or /vendor/etc.
-    static constexpr char const* defaultSearchDirs[] =
-            {"/odm/etc", "/vendor/etc", "/etc", nullptr};
-    static constexpr char const* defaultMainXmlName =
-            "media_codecs.xml";
-    static constexpr char const* defaultPerformanceXmlName =
-            "media_codecs_performance.xml";
+    static std::vector<std::string> getDefaultSearchDirs() {
+            return { "/odm/etc", "/vendor/etc", "/etc" };
+    }
+    static std::vector<std::string> getDefaultXmlNames() {
+            return { "media_codecs.xml", "media_codecs_performance.xml" };
+    }
     static constexpr char const* defaultProfilingResultsXmlPath =
             "/data/misc/media/media_codecs_profiling_results.xml";
 
-    MediaCodecsXmlParser(
-            const char* const* searchDirs = defaultSearchDirs,
-            const char* mainXmlName = defaultMainXmlName,
-            const char* performanceXmlName = defaultPerformanceXmlName,
-            const char* profilingResultsXmlPath = defaultProfilingResultsXmlPath);
+    MediaCodecsXmlParser();
     ~MediaCodecsXmlParser();
 
     typedef std::pair<std::string, std::string> Attribute;
@@ -55,7 +52,7 @@
     typedef std::pair<std::string, AttributeMap> Type;
     typedef std::map<std::string, AttributeMap> TypeMap;
 
-    typedef std::set<std::string> QuirkSet;
+    typedef std::set<std::string> StringSet;
 
     /**
      * Properties of a codec (node)
@@ -63,7 +60,9 @@
     struct CodecProperties {
         bool isEncoder;    ///< Whether this codec is an encoder or a decoder
         size_t order;      ///< Order of appearance in the file (starting from 0)
-        QuirkSet quirkSet; ///< Set of quirks requested by this codec
+        StringSet quirkSet; ///< Set of quirks requested by this codec
+        StringSet domainSet; ///< Set of domains this codec is in
+        StringSet variantSet; ///< Set of variants this codec is enabled on
         TypeMap typeMap;   ///< Map of types supported by this codec
         std::vector<std::string> aliases; ///< Name aliases for this codec
         std::string rank;  ///< Rank of this codec. This is a numeric string.
@@ -119,70 +118,31 @@
 
     status_t getParsingStatus() const;
 
+    /**
+     * Parse top level XML files from a group of search directories.
+     *
+     * @param xmlFiles ordered list of XML file names (no paths)
+     * @param searchDirs ordered list of paths to consider
+     *
+     * @return parsing status
+     */
+    status_t parseXmlFilesInSearchDirs(
+            const std::vector<std::string> &xmlFiles = getDefaultXmlNames(),
+            const std::vector<std::string> &searchDirs = getDefaultSearchDirs());
+
+
+    /**
+     * Parse a top level XML file.
+     *
+     * @param path XML file path
+     *
+     * @return parsing status
+     */
+    status_t parseXmlPath(const std::string &path);
+
 private:
-    enum Section {
-        SECTION_TOPLEVEL,
-        SECTION_SETTINGS,
-        SECTION_DECODERS,
-        SECTION_DECODER,
-        SECTION_DECODER_TYPE,
-        SECTION_ENCODERS,
-        SECTION_ENCODER,
-        SECTION_ENCODER_TYPE,
-        SECTION_INCLUDE,
-    };
-
-    status_t mParsingStatus;
-    Section mCurrentSection;
-    bool mUpdate;
-    std::vector<Section> mSectionStack;
-    std::string mHrefBase;
-
-    // Service attributes
-    AttributeMap mServiceAttributeMap;
-
-    // Codec attributes
-    std::string mCurrentName;
-    std::set<std::string> mCodecSet;
-    Codec mCodecListTemp[2048];
-    CodecMap mCodecMap;
-    size_t mCodecCounter;
-    CodecMap::iterator mCurrentCodec;
-    TypeMap::iterator mCurrentType;
-
-    // Role map
-    mutable RoleMap mRoleMap;
-
-    // Computed longest common prefix
-    mutable std::string mCommonPrefix;
-
-    bool parseTopLevelXMLFile(const char *path, bool ignore_errors = false);
-
-    void parseXMLFile(const char *path);
-
-    static void StartElementHandlerWrapper(
-            void *me, const char *name, const char **attrs);
-
-    static void EndElementHandlerWrapper(void *me, const char *name);
-
-    void startElementHandler(const char *name, const char **attrs);
-    void endElementHandler(const char *name);
-
-    status_t includeXMLFile(const char **attrs);
-    status_t addSettingFromAttributes(const char **attrs);
-    status_t addMediaCodecFromAttributes(bool encoder, const char **attrs);
-    void addMediaCodec(bool encoder, const char *name,
-            const char *type = nullptr);
-
-    status_t addQuirk(const char **attrs, const char *tag);
-    status_t addTypeFromAttributes(const char **attrs, bool encoder);
-    status_t addAlias(const char **attrs);
-    status_t addLimit(const char **attrs);
-    status_t addFeature(const char **attrs);
-    void addType(const char *name);
-
-    void generateRoleMap() const;
-    void generateCommonPrefix() const;
+    struct Impl;
+    std::shared_ptr<Impl> mImpl;
 
     MediaCodecsXmlParser(const MediaCodecsXmlParser&) = delete;
     MediaCodecsXmlParser& operator=(const MediaCodecsXmlParser&) = delete;
@@ -191,4 +151,3 @@
 } // namespace android
 
 #endif // MEDIA_STAGEFRIGHT_XMLPARSER_H_
-
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 7d1c88b..a3cabd8 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -74,6 +74,7 @@
         "android.hardware.graphics.bufferqueue@1.0",
         "android.hidl.token@1.0-utils",
         "libandroid_runtime_lazy",
+        "libbase",
         "libbinder",
         "libmedia",
         "libmedia_omx",
diff --git a/media/ndk/NdkMediaDrm.cpp b/media/ndk/NdkMediaDrm.cpp
index 2deb1a4..85dbffe 100644
--- a/media/ndk/NdkMediaDrm.cpp
+++ b/media/ndk/NdkMediaDrm.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "NdkMediaDrm"
 
 #include <inttypes.h>
+#include <unistd.h>
 
 #include <media/NdkMediaDrm.h>
 
@@ -26,6 +27,8 @@
 #include <utils/StrongPointer.h>
 #include <gui/Surface.h>
 
+#include <android-base/properties.h>
+#include <binder/PermissionController.h>
 #include <media/IDrm.h>
 #include <media/IDrmClient.h>
 #include <media/stagefright/MediaErrors.h>
@@ -231,6 +234,39 @@
     return result;
 }
 
+static bool ShouldGetAppPackageName(void) {
+    // Check what this device's first API level was.
+    int32_t firstApiLevel = android::base::GetIntProperty<int32_t>("ro.product.first_api_level", 0);
+    if (firstApiLevel == 0) {
+        // First API Level is 0 on factory ROMs, but we can assume the current SDK
+        // version is the first if it's a factory ROM.
+        firstApiLevel = android::base::GetIntProperty<int32_t>("ro.build.version.sdk", 0);
+    }
+    return firstApiLevel >= 29;  // Android Q
+}
+
+static status_t GetAppPackageName(String8 *packageName) {
+    sp<IServiceManager> serviceManager = defaultServiceManager();
+    sp<IBinder> binder = serviceManager->getService(String16("permission"));
+
+    sp<IPermissionController> permissionContol = interface_cast<IPermissionController>(binder);
+    if (permissionContol == NULL) {
+        ALOGE("Failed to get permission service");
+        return UNKNOWN_ERROR;
+    }
+
+    Vector<String16> packages;
+    permissionContol->getPackagesForUid(getuid(), packages);
+
+    if (packages.isEmpty()) {
+        ALOGE("Unable to get package name for current UID");
+        return UNKNOWN_ERROR;
+    }
+
+    *packageName = String8(packages[0]);
+    return OK;
+}
+
 static sp<IDrm> CreateDrm() {
     sp<IServiceManager> sm = defaultServiceManager();
     sp<IBinder> binder = sm->getService(String16("media.drm"));
@@ -255,8 +291,16 @@
         return NULL;
     }
 
-    String8 nullPackageName;
-    status_t err = drm->createPlugin(uuid, nullPackageName);
+    String8 packageName;
+    if (ShouldGetAppPackageName()) {
+        status_t err = GetAppPackageName(&packageName);
+
+        if (err != OK) {
+            return NULL;
+        }
+    }
+
+    status_t err = drm->createPlugin(uuid, packageName);
 
     if (err != OK) {
         return NULL;
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 3adb40f..d81cde8 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -25,11 +25,13 @@
         "ServiceUtilities.cpp",
         "TimeCheck.cpp",
     ],
+    static_libs: [
+        "libc_malloc_debug_backtrace",
+    ],
     shared_libs: [
         "libbinder",
         "liblog",
         "libutils",
-        "libmemunreachable",
         "libhidlbase",
         "android.hardware.graphics.bufferqueue@1.0",
         "android.hidl.token@1.0-utils",
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 768cd1e..16fdeaf 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -35,6 +35,8 @@
 namespace android {
 
 static const String16 sAndroidPermissionRecordAudio("android.permission.RECORD_AUDIO");
+static const String16 sModifyPhoneState("android.permission.MODIFY_PHONE_STATE");
+static const String16 sModifyAudioRouting("android.permission.MODIFY_AUDIO_ROUTING");
 
 static String16 resolveCallingPackage(PermissionController& permissionController,
         const String16& opPackageName, uid_t uid) {
@@ -162,9 +164,8 @@
 }
 
 bool modifyAudioRoutingAllowed() {
-    static const String16 sModifyAudioRoutingAllowed("android.permission.MODIFY_AUDIO_ROUTING");
     // IMPORTANT: Use PermissionCache - not a runtime permission and may not change.
-    bool ok = PermissionCache::checkCallingPermission(sModifyAudioRoutingAllowed);
+    bool ok = PermissionCache::checkCallingPermission(sModifyAudioRouting);
     if (!ok) ALOGE("android.permission.MODIFY_AUDIO_ROUTING");
     return ok;
 }
@@ -200,9 +201,19 @@
 }
 
 bool modifyPhoneStateAllowed(pid_t pid, uid_t uid) {
-    static const String16 sModifyPhoneState("android.permission.MODIFY_PHONE_STATE");
     bool ok = PermissionCache::checkPermission(sModifyPhoneState, pid, uid);
-    if (!ok) ALOGE("Request requires android.permission.MODIFY_PHONE_STATE");
+    ALOGE_IF(!ok, "Request requires %s", String8(sModifyPhoneState).c_str());
+    return ok;
+}
+
+// privileged behavior needed by Dialer, Settings, SetupWizard and CellBroadcastReceiver
+bool bypassInterruptionPolicyAllowed(pid_t pid, uid_t uid) {
+    static const String16 sWriteSecureSettings("android.permission.WRITE_SECURE_SETTINGS");
+    bool ok = PermissionCache::checkPermission(sModifyPhoneState, pid, uid)
+        || PermissionCache::checkPermission(sWriteSecureSettings, pid, uid)
+        || PermissionCache::checkPermission(sModifyAudioRouting, pid, uid);
+    ALOGE_IF(!ok, "Request requires %s or %s",
+             String8(sModifyPhoneState).c_str(), String8(sWriteSecureSettings).c_str());
     return ok;
 }
 
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index c5fe05f..2a6e609 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -81,6 +81,8 @@
 bool modifyDefaultAudioEffectsAllowed();
 bool dumpAllowed();
 bool modifyPhoneStateAllowed(pid_t pid, uid_t uid);
+bool bypassInterruptionPolicyAllowed(pid_t pid, uid_t uid);
+
 status_t checkIMemory(const sp<IMemory>& iMemory);
 
 class MediaPackageManager {
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 0f03b7e..0b745ac 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -296,13 +296,15 @@
     audio_stream_type_t streamType = AUDIO_STREAM_DEFAULT;
     audio_io_handle_t io = AUDIO_IO_HANDLE_NONE;
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    audio_attributes_t localAttr = *attr;
     if (direction == MmapStreamInterface::DIRECTION_OUTPUT) {
         audio_config_t fullConfig = AUDIO_CONFIG_INITIALIZER;
         fullConfig.sample_rate = config->sample_rate;
         fullConfig.channel_mask = config->channel_mask;
         fullConfig.format = config->format;
         std::vector<audio_io_handle_t> secondaryOutputs;
-        ret = AudioSystem::getOutputForAttr(attr, &io,
+
+        ret = AudioSystem::getOutputForAttr(&localAttr, &io,
                                             actualSessionId,
                                             &streamType, client.clientPid, client.clientUid,
                                             &fullConfig,
@@ -312,7 +314,7 @@
         ALOGW_IF(!secondaryOutputs.empty(),
                  "%s does not support secondary outputs, ignoring them", __func__);
     } else {
-        ret = AudioSystem::getInputForAttr(attr, &io,
+        ret = AudioSystem::getInputForAttr(&localAttr, &io,
                                               RECORD_RIID_INVALID,
                                               actualSessionId,
                                               client.clientPid,
@@ -330,7 +332,7 @@
     sp<MmapThread> thread = mMmapThreads.valueFor(io);
     if (thread != 0) {
         interface = new MmapThreadHandle(thread);
-        thread->configure(attr, streamType, actualSessionId, callback, *deviceId, portId);
+        thread->configure(&localAttr, streamType, actualSessionId, callback, *deviceId, portId);
         *handle = portId;
         *sessionId = actualSessionId;
     } else {
@@ -691,7 +693,7 @@
     uid_t clientUid = input.clientInfo.clientUid;
     audio_io_handle_t effectThreadId = AUDIO_IO_HANDLE_NONE;
     std::vector<int> effectIds;
-
+    audio_attributes_t localAttr = input.attr;
 
     if (!isAudioServerOrMediaServerUid(callingUid)) {
         ALOGW_IF(clientUid != callingUid,
@@ -701,8 +703,8 @@
         updatePid = true;
     }
     pid_t clientPid = input.clientInfo.clientPid;
+    const pid_t callingPid = IPCThreadState::self()->getCallingPid();
     if (updatePid) {
-        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
         ALOGW_IF(clientPid != -1 && clientPid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
                  __func__, callingUid, callingPid, clientPid);
@@ -720,8 +722,7 @@
     output.sessionId = sessionId;
     output.outputId = AUDIO_IO_HANDLE_NONE;
     output.selectedDeviceId = input.selectedDeviceId;
-
-    lStatus = AudioSystem::getOutputForAttr(&input.attr, &output.outputId, sessionId, &streamType,
+    lStatus = AudioSystem::getOutputForAttr(&localAttr, &output.outputId, sessionId, &streamType,
                                             clientPid, clientUid, &input.config, input.flags,
                                             &output.selectedDeviceId, &portId, &secondaryOutputs);
 
@@ -782,12 +783,13 @@
         output.notificationFrameCount = input.notificationFrameCount;
         output.flags = input.flags;
 
-        track = thread->createTrack_l(client, streamType, input.attr, &output.sampleRate,
+        track = thread->createTrack_l(client, streamType, localAttr, &output.sampleRate,
                                       input.config.format, input.config.channel_mask,
                                       &output.frameCount, &output.notificationFrameCount,
                                       input.notificationsPerBuffer, input.speed,
                                       input.sharedBuffer, sessionId, &output.flags,
-                                      input.clientInfo.clientTid, clientUid, &lStatus, portId);
+                                      callingPid, input.clientInfo.clientTid, clientUid,
+                                      &lStatus, portId);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
         // we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
 
@@ -826,6 +828,12 @@
                     ALOGE("Secondary output patchRecord init failed: %d", status);
                     continue;
                 }
+
+                // TODO: We could check compatibility of the secondaryThread with the PatchTrack
+                // for fast usage: thread has fast mixer, sample rate matches, etc.;
+                // for now, we exclude fast tracks by removing the Fast flag.
+                const audio_output_flags_t outputFlags =
+                        (audio_output_flags_t)(output.flags & ~AUDIO_OUTPUT_FLAG_FAST);
                 sp patchTrack = new PlaybackThread::PatchTrack(secondaryThread,
                                                                streamType,
                                                                output.sampleRate,
@@ -834,7 +842,7 @@
                                                                frameCount,
                                                                patchRecord->buffer(),
                                                                patchRecord->bufferSize(),
-                                                               output.flags,
+                                                               outputFlags,
                                                                0ns /* timeout */);
                 status = patchTrack->initCheck();
                 if (status != NO_ERROR) {
@@ -1841,8 +1849,8 @@
         updatePid = true;
     }
     pid_t clientPid = input.clientInfo.clientPid;
+    const pid_t callingPid = IPCThreadState::self()->getCallingPid();
     if (updatePid) {
-        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
         ALOGW_IF(clientPid != -1 && clientPid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
                  __func__, callingUid, callingPid, clientPid);
@@ -1899,12 +1907,16 @@
                                       input.opPackageName,
                                       &input.config,
                                       output.flags, &output.selectedDeviceId, &portId);
+    if (lStatus != NO_ERROR) {
+        ALOGE("createRecord() getInputForAttr return error %d", lStatus);
+        goto Exit;
+    }
 
     {
         Mutex::Autolock _l(mLock);
         RecordThread *thread = checkRecordThread_l(output.inputId);
         if (thread == NULL) {
-            ALOGE("createRecord() checkRecordThread_l failed");
+            ALOGE("createRecord() checkRecordThread_l failed, input handle %d", output.inputId);
             lStatus = BAD_VALUE;
             goto Exit;
         }
@@ -1919,7 +1931,7 @@
                                                   input.config.format, input.config.channel_mask,
                                                   &output.frameCount, sessionId,
                                                   &output.notificationFrameCount,
-                                                  clientUid, &output.flags,
+                                                  callingPid, clientUid, &output.flags,
                                                   input.clientInfo.clientTid,
                                                   &lStatus, portId);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (recordTrack == 0));
diff --git a/services/audioflinger/MmapTracks.h b/services/audioflinger/MmapTracks.h
index 968d5aa..b83f6b5 100644
--- a/services/audioflinger/MmapTracks.h
+++ b/services/audioflinger/MmapTracks.h
@@ -31,6 +31,7 @@
                             bool isOut,
                             uid_t uid,
                             pid_t pid,
+                            pid_t creatorPid,
                             audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
     virtual             ~MmapTrack();
 
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index a210a6d..edb331d 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -506,7 +506,10 @@
     }
     if (mPlayback.thread()->hasFastMixer()) {
         // Create a fast track if the playback thread has fast mixer to get better performance.
+        // Note: we should have matching channel mask, sample rate, and format by the logic above.
         outputFlags = (audio_output_flags_t) (outputFlags | AUDIO_OUTPUT_FLAG_FAST);
+    } else {
+        outputFlags = (audio_output_flags_t) (outputFlags & ~AUDIO_OUTPUT_FLAG_FAST);
     }
 
     // create a special playback track to render to playback thread.
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 56be433..7008cee 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -26,7 +26,7 @@
     bool hasOpPlayAudio() const;
 
     static sp<OpPlayAudioMonitor> createIfNeeded(
-            uid_t uid, audio_usage_t usage, int id, audio_stream_type_t streamType);
+            uid_t uid, const audio_attributes_t& attr, int id, audio_stream_type_t streamType);
 
 private:
     OpPlayAudioMonitor(uid_t uid, audio_usage_t usage, int id);
@@ -69,6 +69,7 @@
                                 size_t bufferSize,
                                 const sp<IMemory>& sharedBuffer,
                                 audio_session_t sessionId,
+                                pid_t creatorPid,
                                 uid_t uid,
                                 audio_output_flags_t flags,
                                 track_type type,
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index ec1f86c..08660dd 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -32,6 +32,7 @@
                                 void *buffer,
                                 size_t bufferSize,
                                 audio_session_t sessionId,
+                                pid_t creatorPid,
                                 uid_t uid,
                                 audio_input_flags_t flags,
                                 track_type type,
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index f3d537b..31ef15d 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -566,14 +566,16 @@
     return status;
 }
 
-void AudioFlinger::ThreadBase::sendIoConfigEvent(audio_io_config_event event, pid_t pid)
+void AudioFlinger::ThreadBase::sendIoConfigEvent(audio_io_config_event event, pid_t pid,
+                                                 audio_port_handle_t portId)
 {
     Mutex::Autolock _l(mLock);
-    sendIoConfigEvent_l(event, pid);
+    sendIoConfigEvent_l(event, pid, portId);
 }
 
 // sendIoConfigEvent_l() must be called with ThreadBase::mLock held
-void AudioFlinger::ThreadBase::sendIoConfigEvent_l(audio_io_config_event event, pid_t pid)
+void AudioFlinger::ThreadBase::sendIoConfigEvent_l(audio_io_config_event event, pid_t pid,
+                                                   audio_port_handle_t portId)
 {
     // The audio statistics history is exponentially weighted to forget events
     // about five or more seconds in the past.  In order to have
@@ -584,7 +586,7 @@
     mProcessTimeMs.reset();
     mTimestampVerifier.discontinuity();
 
-    sp<ConfigEvent> configEvent = (ConfigEvent *)new IoConfigEvent(event, pid);
+    sp<ConfigEvent> configEvent = (ConfigEvent *)new IoConfigEvent(event, pid, portId);
     sendConfigEvent_l(configEvent);
 }
 
@@ -667,7 +669,7 @@
         } break;
         case CFG_EVENT_IO: {
             IoConfigEventData *data = (IoConfigEventData *)event->mData.get();
-            ioConfigChanged(data->mEvent, data->mPid);
+            ioConfigChanged(data->mEvent, data->mPid, data->mPortId);
         } break;
         case CFG_EVENT_SET_PARAMETER: {
             SetParameterConfigEventData *data = (SetParameterConfigEventData *)event->mData.get();
@@ -1952,6 +1954,7 @@
         const sp<IMemory>& sharedBuffer,
         audio_session_t sessionId,
         audio_output_flags_t *flags,
+        pid_t creatorPid,
         pid_t tid,
         uid_t uid,
         status_t *status,
@@ -2235,7 +2238,7 @@
         track = new Track(this, client, streamType, attr, sampleRate, format,
                           channelMask, frameCount,
                           nullptr /* buffer */, (size_t)0 /* bufferSize */, sharedBuffer,
-                          sessionId, uid, *flags, TrackBase::TYPE_DEFAULT, portId);
+                          sessionId, creatorPid, uid, *flags, TrackBase::TYPE_DEFAULT, portId);
 
         lStatus = track != 0 ? track->initCheck() : (status_t) NO_MEMORY;
         if (lStatus != NO_ERROR) {
@@ -2391,6 +2394,7 @@
             // to track the speaker usage
             addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStart);
 #endif
+            sendIoConfigEvent_l(AUDIO_CLIENT_STARTED, track->creatorPid(), track->portId());
         }
 
         // set retry count for buffer fill
@@ -2499,7 +2503,8 @@
     return mOutput->stream->selectPresentation(presentationId, programId);
 }
 
-void AudioFlinger::PlaybackThread::ioConfigChanged(audio_io_config_event event, pid_t pid) {
+void AudioFlinger::PlaybackThread::ioConfigChanged(audio_io_config_event event, pid_t pid,
+                                                   audio_port_handle_t portId) {
     sp<AudioIoDescriptor> desc = new AudioIoDescriptor();
     ALOGV("PlaybackThread::ioConfigChanged, thread %p, event %d", this, event);
 
@@ -2518,7 +2523,10 @@
         desc->mFrameCountHAL = mFrameCount;
         desc->mLatency = latency_l();
         break;
-
+    case AUDIO_CLIENT_STARTED:
+        desc->mPatch = mPatch;
+        desc->mPortId = portId;
+        break;
     case AUDIO_OUTPUT_CLOSED:
     default:
         break;
@@ -3268,6 +3276,8 @@
         cpuStats.sample(myName);
 
         Vector< sp<EffectChain> > effectChains;
+        audio_session_t activeHapticSessionId = AUDIO_SESSION_NONE;
+        std::vector<sp<Track>> activeTracks;
 
         // If the device is AUDIO_DEVICE_OUT_BUS, check for downstream latency.
         //
@@ -3541,6 +3551,25 @@
             // during mixing and effect process as the audio buffers could be deleted
             // or modified if an effect is created or deleted
             lockEffectChains_l(effectChains);
+
+            // Determine which session to pick up haptic data.
+            // This must be done under the same lock as prepareTracks_l().
+            // TODO: Write haptic data directly to sink buffer when mixing.
+            if (mHapticChannelCount > 0 && effectChains.size() > 0) {
+                for (const auto& track : mActiveTracks) {
+                    if (track->getHapticPlaybackEnabled()) {
+                        activeHapticSessionId = track->sessionId();
+                        break;
+                    }
+                }
+            }
+
+            // Acquire a local copy of active tracks with lock (release w/o lock).
+            //
+            // Control methods on the track acquire the ThreadBase lock (e.g. start()
+            // stop(), pause(), etc.), but the threadLoop is entitled to call audio
+            // data / buffer methods on tracks from activeTracks without the ThreadBase lock.
+            activeTracks.insert(activeTracks.end(), mActiveTracks.begin(), mActiveTracks.end());
         } // mLock scope ends
 
         if (mBytesRemaining == 0) {
@@ -3555,6 +3584,13 @@
                 threadLoop_sleepTime();
                 if (mSleepTimeUs == 0) {
                     mCurrentWriteLength = mSinkBufferSize;
+
+                    // Tally underrun frames as we are inserting 0s here.
+                    for (const auto& track : activeTracks) {
+                        if (track->mFillingUpStatus == Track::FS_ACTIVE) {
+                            track->mAudioTrackServerProxy->tallyUnderrunFrames(mNormalFrameCount);
+                        }
+                    }
                 }
             }
             // Either threadLoop_mix() or threadLoop_sleepTime() should have set
@@ -3613,20 +3649,11 @@
 
             // only process effects if we're going to write
             if (mSleepTimeUs == 0 && mType != OFFLOAD) {
-                audio_session_t activeHapticId = AUDIO_SESSION_NONE;
-                if (mHapticChannelCount > 0 && effectChains.size() > 0) {
-                    for (auto track : mActiveTracks) {
-                        if (track->getHapticPlaybackEnabled()) {
-                            activeHapticId = track->sessionId();
-                            break;
-                        }
-                    }
-                }
                 for (size_t i = 0; i < effectChains.size(); i ++) {
                     effectChains[i]->process_l();
                     // TODO: Write haptic data directly to sink buffer when mixing.
-                    if (activeHapticId != AUDIO_SESSION_NONE
-                            && activeHapticId == effectChains[i]->sessionId()) {
+                    if (activeHapticSessionId != AUDIO_SESSION_NONE
+                            && activeHapticSessionId == effectChains[i]->sessionId()) {
                         // Haptic data is active in this case, copy it directly from
                         // in buffer to out buffer.
                         const size_t audioBufferSize = mNormalFrameCount
@@ -4591,6 +4618,10 @@
 
         // process fast tracks
         if (track->isFastTrack()) {
+            LOG_ALWAYS_FATAL_IF(mFastMixer.get() == nullptr,
+                    "%s(%d): FastTrack(%d) present without FastMixer",
+                     __func__, id(), track->id());
+
             if (track->getHapticPlaybackEnabled()) {
                 noFastHapticTrack = false;
             }
@@ -7405,6 +7436,7 @@
         size_t *pFrameCount,
         audio_session_t sessionId,
         size_t *pNotificationFrameCount,
+        pid_t creatorPid,
         uid_t uid,
         audio_input_flags_t *flags,
         pid_t tid,
@@ -7542,7 +7574,7 @@
 
         track = new RecordTrack(this, client, attr, sampleRate,
                       format, channelMask, frameCount,
-                      nullptr /* buffer */, (size_t)0 /* bufferSize */, sessionId, uid,
+                      nullptr /* buffer */, (size_t)0 /* bufferSize */, sessionId, creatorPid, uid,
                       *flags, TrackBase::TYPE_DEFAULT, portId);
 
         lStatus = track->initCheck();
@@ -7650,6 +7682,8 @@
                 recordTrack->clearSyncStartEvent();
                 return status;
             }
+            sendIoConfigEvent_l(
+                AUDIO_CLIENT_STARTED, recordTrack->creatorPid(), recordTrack->portId());
         }
         // Catch up with current buffer indices if thread is already running.
         // This is what makes a new client discard all buffered data.  If the track's mRsmpInFront
@@ -8131,7 +8165,8 @@
     return String8();
 }
 
-void AudioFlinger::RecordThread::ioConfigChanged(audio_io_config_event event, pid_t pid) {
+void AudioFlinger::RecordThread::ioConfigChanged(audio_io_config_event event, pid_t pid,
+                                                 audio_port_handle_t portId) {
     sp<AudioIoDescriptor> desc = new AudioIoDescriptor();
 
     desc->mIoHandle = mId;
@@ -8148,7 +8183,10 @@
         desc->mFrameCountHAL = mFrameCount;
         desc->mLatency = 0;
         break;
-
+    case AUDIO_CLIENT_STARTED:
+        desc->mPatch = mPatch;
+        desc->mPortId = portId;
+        break;
     case AUDIO_INPUT_CLOSED:
     default:
         break;
@@ -8606,7 +8644,8 @@
 
     // Given that MmapThread::mAttr is mutable, should a MmapTrack have attributes ?
     sp<MmapTrack> track = new MmapTrack(this, mAttr, mSampleRate, mFormat, mChannelMask, mSessionId,
-                                        isOutput(), client.clientUid, client.clientPid, portId);
+                                        isOutput(), client.clientUid, client.clientPid,
+                                        IPCThreadState::self()->getCallingPid(), portId);
 
     if (isOutput()) {
         // force volume update when a new track is added
@@ -8832,7 +8871,8 @@
     return String8();
 }
 
-void AudioFlinger::MmapThread::ioConfigChanged(audio_io_config_event event, pid_t pid) {
+void AudioFlinger::MmapThread::ioConfigChanged(audio_io_config_event event, pid_t pid,
+                                               audio_port_handle_t portId __unused) {
     sp<AudioIoDescriptor> desc = new AudioIoDescriptor();
 
     desc->mIoHandle = mId;
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 37b2d08..336c2b4 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -103,8 +103,9 @@
 
     class IoConfigEventData : public ConfigEventData {
     public:
-        IoConfigEventData(audio_io_config_event event, pid_t pid) :
-            mEvent(event), mPid(pid) {}
+        IoConfigEventData(audio_io_config_event event, pid_t pid,
+                          audio_port_handle_t portId) :
+            mEvent(event), mPid(pid), mPortId(portId) {}
 
         virtual  void dump(char *buffer, size_t size) {
             snprintf(buffer, size, "IO event: event %d\n", mEvent);
@@ -112,13 +113,14 @@
 
         const audio_io_config_event mEvent;
         const pid_t                 mPid;
+        const audio_port_handle_t   mPortId;
     };
 
     class IoConfigEvent : public ConfigEvent {
     public:
-        IoConfigEvent(audio_io_config_event event, pid_t pid) :
+        IoConfigEvent(audio_io_config_event event, pid_t pid, audio_port_handle_t portId) :
             ConfigEvent(CFG_EVENT_IO) {
-            mData = new IoConfigEventData(event, pid);
+            mData = new IoConfigEventData(event, pid, portId);
         }
         virtual ~IoConfigEvent() {}
     };
@@ -260,13 +262,16 @@
                                                     status_t& status) = 0;
     virtual     status_t    setParameters(const String8& keyValuePairs);
     virtual     String8     getParameters(const String8& keys) = 0;
-    virtual     void        ioConfigChanged(audio_io_config_event event, pid_t pid = 0) = 0;
+    virtual     void        ioConfigChanged(audio_io_config_event event, pid_t pid = 0,
+                                        audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) = 0;
                 // sendConfigEvent_l() must be called with ThreadBase::mLock held
                 // Can temporarily release the lock if waiting for a reply from
                 // processConfigEvents_l().
                 status_t    sendConfigEvent_l(sp<ConfigEvent>& event);
-                void        sendIoConfigEvent(audio_io_config_event event, pid_t pid = 0);
-                void        sendIoConfigEvent_l(audio_io_config_event event, pid_t pid = 0);
+                void        sendIoConfigEvent(audio_io_config_event event, pid_t pid = 0,
+                                              audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
+                void        sendIoConfigEvent_l(audio_io_config_event event, pid_t pid = 0,
+                                            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
                 void        sendPrioConfigEvent(pid_t pid, pid_t tid, int32_t prio, bool forApp);
                 void        sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio, bool forApp);
                 status_t    sendSetParameterConfigEvent_l(const String8& keyValuePair);
@@ -802,6 +807,7 @@
                                 const sp<IMemory>& sharedBuffer,
                                 audio_session_t sessionId,
                                 audio_output_flags_t *flags,
+                                pid_t creatorPid,
                                 pid_t tid,
                                 uid_t uid,
                                 status_t *status /*non-NULL*/,
@@ -825,7 +831,8 @@
                                 { return android_atomic_acquire_load(&mSuspended) > 0; }
 
     virtual     String8     getParameters(const String8& keys);
-    virtual     void        ioConfigChanged(audio_io_config_event event, pid_t pid = 0);
+    virtual     void        ioConfigChanged(audio_io_config_event event, pid_t pid = 0,
+                                            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
                 status_t    getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames);
                 // Consider also removing and passing an explicit mMainBuffer initialization
                 // parameter to AF::PlaybackThread::Track::Track().
@@ -1540,6 +1547,7 @@
                     size_t *pFrameCount,
                     audio_session_t sessionId,
                     size_t *pNotificationFrameCount,
+                    pid_t creatorPid,
                     uid_t uid,
                     audio_input_flags_t *flags,
                     pid_t tid,
@@ -1562,7 +1570,8 @@
                                                status_t& status);
     virtual void        cacheParameters_l() {}
     virtual String8     getParameters(const String8& keys);
-    virtual void        ioConfigChanged(audio_io_config_event event, pid_t pid = 0);
+    virtual void        ioConfigChanged(audio_io_config_event event, pid_t pid = 0,
+                                        audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
     virtual status_t    createAudioPatch_l(const struct audio_patch *patch,
                                            audio_patch_handle_t *handle);
     virtual status_t    releaseAudioPatch_l(const audio_patch_handle_t handle);
@@ -1743,7 +1752,8 @@
     virtual     bool        checkForNewParameter_l(const String8& keyValuePair,
                                                     status_t& status);
     virtual     String8     getParameters(const String8& keys);
-    virtual     void        ioConfigChanged(audio_io_config_event event, pid_t pid = 0);
+    virtual     void        ioConfigChanged(audio_io_config_event event, pid_t pid = 0,
+                                            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
                 void        readHalParameters_l();
     virtual     void        cacheParameters_l() {}
     virtual     status_t    createAudioPatch_l(const struct audio_patch *patch,
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index 4402d99..8f720b5 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -64,6 +64,7 @@
                                 void *buffer,
                                 size_t bufferSize,
                                 audio_session_t sessionId,
+                                pid_t creatorPid,
                                 uid_t uid,
                                 bool isOut,
                                 alloc_type alloc = ALLOC_CBLK,
@@ -79,6 +80,8 @@
             audio_track_cblk_t* cblk() const { return mCblk; }
             audio_session_t sessionId() const { return mSessionId; }
             uid_t       uid() const { return mUid; }
+            pid_t       creatorPid() const { return mCreatorPid; }
+
             audio_port_handle_t portId() const { return mPortId; }
     virtual status_t    setSyncEvent(const sp<SyncEvent>& event);
 
@@ -310,6 +313,8 @@
     std::atomic<bool>   mServerLatencyFromTrack{}; // latency from track or server timestamp.
     std::atomic<double> mServerLatencyMs{};        // last latency pushed from server thread.
     std::atomic<FrameTime> mKernelFrameTime{};     // last frame time on kernel side.
+    const pid_t         mCreatorPid;  // can be different from mclient->pid() for instance
+                                      // when created by NuPlayer on behalf of a client
 };
 
 // PatchProxyBufferProvider interface is implemented by PatchTrack and PatchRecord.
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 8d59431..b0817ed 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -73,6 +73,7 @@
             void *buffer,
             size_t bufferSize,
             audio_session_t sessionId,
+            pid_t creatorPid,
             uid_t clientUid,
             bool isOut,
             alloc_type alloc,
@@ -101,7 +102,8 @@
         mType(type),
         mThreadIoHandle(thread ? thread->id() : AUDIO_IO_HANDLE_NONE),
         mPortId(portId),
-        mIsInvalid(false)
+        mIsInvalid(false),
+        mCreatorPid(creatorPid)
 {
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
     if (!isAudioServerOrMediaServerUid(callingUid) || clientUid == AUDIO_UID_INVALID) {
@@ -385,18 +387,24 @@
 // static
 sp<AudioFlinger::PlaybackThread::OpPlayAudioMonitor>
 AudioFlinger::PlaybackThread::OpPlayAudioMonitor::createIfNeeded(
-            uid_t uid, audio_usage_t usage, int id, audio_stream_type_t streamType)
+            uid_t uid, const audio_attributes_t& attr, int id, audio_stream_type_t streamType)
 {
     if (isAudioServerOrRootUid(uid)) {
-        ALOGD("OpPlayAudio: not muting track:%d usage:%d root or audioserver", id, usage);
+        ALOGD("OpPlayAudio: not muting track:%d usage:%d root or audioserver", id, attr.usage);
         return nullptr;
     }
     // stream type has been filtered by audio policy to indicate whether it can be muted
     if (streamType == AUDIO_STREAM_ENFORCED_AUDIBLE) {
-        ALOGD("OpPlayAudio: not muting track:%d usage:%d ENFORCED_AUDIBLE", id, usage);
+        ALOGD("OpPlayAudio: not muting track:%d usage:%d ENFORCED_AUDIBLE", id, attr.usage);
         return nullptr;
     }
-    return new OpPlayAudioMonitor(uid, usage, id);
+    if ((attr.flags & AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY)
+            == AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY) {
+        ALOGD("OpPlayAudio: not muting track:%d flags %#x have FLAG_BYPASS_INTERRUPTION_POLICY",
+            id, attr.flags);
+        return nullptr;
+    }
+    return new OpPlayAudioMonitor(uid, attr.usage, id);
 }
 
 AudioFlinger::PlaybackThread::OpPlayAudioMonitor::OpPlayAudioMonitor(
@@ -485,6 +493,7 @@
             size_t bufferSize,
             const sp<IMemory>& sharedBuffer,
             audio_session_t sessionId,
+            pid_t creatorPid,
             uid_t uid,
             audio_output_flags_t flags,
             track_type type,
@@ -492,7 +501,7 @@
     :   TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
                   (sharedBuffer != 0) ? sharedBuffer->pointer() : buffer,
                   (sharedBuffer != 0) ? sharedBuffer->size() : bufferSize,
-                  sessionId, uid, true /*isOut*/,
+                  sessionId, creatorPid, uid, true /*isOut*/,
                   (type == TYPE_PATCH) ? ( buffer == NULL ? ALLOC_LOCAL : ALLOC_NONE) : ALLOC_CBLK,
                   type, portId),
     mFillingUpStatus(FS_INVALID),
@@ -505,7 +514,7 @@
     mPresentationCompleteFrames(0),
     mFrameMap(16 /* sink-frame-to-track-frame map memory */),
     mVolumeHandler(new media::VolumeHandler(sampleRate)),
-    mOpPlayAudioMonitor(OpPlayAudioMonitor::createIfNeeded(uid, attr.usage, id(), streamType)),
+    mOpPlayAudioMonitor(OpPlayAudioMonitor::createIfNeeded(uid, attr, id(), streamType)),
     // mSinkTimestamp
     mFastIndex(-1),
     mCachedVolume(1.0),
@@ -1543,7 +1552,7 @@
               audio_attributes_t{} /* currently unused for output track */,
               sampleRate, format, channelMask, frameCount,
               nullptr /* buffer */, (size_t)0 /* bufferSize */, nullptr /* sharedBuffer */,
-              AUDIO_SESSION_NONE, uid, AUDIO_OUTPUT_FLAG_NONE,
+              AUDIO_SESSION_NONE, getpid(), uid, AUDIO_OUTPUT_FLAG_NONE,
               TYPE_OUTPUT),
     mActive(false), mSourceThread(sourceThread)
 {
@@ -1772,7 +1781,7 @@
               audio_attributes_t{} /* currently unused for patch track */,
               sampleRate, format, channelMask, frameCount,
               buffer, bufferSize, nullptr /* sharedBuffer */,
-              AUDIO_SESSION_NONE, AID_AUDIOSERVER, flags, TYPE_PATCH),
+              AUDIO_SESSION_NONE, getpid(), AID_AUDIOSERVER, flags, TYPE_PATCH),
         PatchTrackBase(new ClientProxy(mCblk, mBuffer, frameCount, mFrameSize, true, true),
                        *playbackThread, timeout)
 {
@@ -1927,12 +1936,14 @@
             void *buffer,
             size_t bufferSize,
             audio_session_t sessionId,
+            pid_t creatorPid,
             uid_t uid,
             audio_input_flags_t flags,
             track_type type,
             audio_port_handle_t portId)
     :   TrackBase(thread, client, attr, sampleRate, format,
-                  channelMask, frameCount, buffer, bufferSize, sessionId, uid, false /*isOut*/,
+                  channelMask, frameCount, buffer, bufferSize, sessionId,
+                  creatorPid, uid, false /*isOut*/,
                   (type == TYPE_DEFAULT) ?
                           ((flags & AUDIO_INPUT_FLAG_FAST) ? ALLOC_PIPE : ALLOC_CBLK) :
                           ((buffer == NULL) ? ALLOC_LOCAL : ALLOC_NONE),
@@ -2242,7 +2253,7 @@
     :   RecordTrack(recordThread, NULL,
                 audio_attributes_t{} /* currently unused for patch track */,
                 sampleRate, format, channelMask, frameCount,
-                buffer, bufferSize, AUDIO_SESSION_NONE, AID_AUDIOSERVER,
+                buffer, bufferSize, AUDIO_SESSION_NONE, getpid(), AID_AUDIOSERVER,
                 flags, TYPE_PATCH),
         PatchTrackBase(new ClientProxy(mCblk, mBuffer, frameCount, mFrameSize, false, true),
                        *recordThread, timeout)
@@ -2310,11 +2321,12 @@
         bool isOut,
         uid_t uid,
         pid_t pid,
+        pid_t creatorPid,
         audio_port_handle_t portId)
     :   TrackBase(thread, NULL, attr, sampleRate, format,
                   channelMask, (size_t)0 /* frameCount */,
                   nullptr /* buffer */, (size_t)0 /* bufferSize */,
-                  sessionId, uid, isOut,
+                  sessionId, creatorPid, uid, isOut,
                   ALLOC_NONE,
                   TYPE_DEFAULT, portId),
         mPid(pid), mSilenced(false), mSilencedNotified(false)
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 49937f0..30f29d6 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -258,7 +258,7 @@
     virtual status_t getHwOffloadEncodingFormatsSupportedForA2DP(
                 std::vector<audio_format_t> *formats) = 0;
 
-    virtual void     setAppState(uid_t uid, app_state_t state);
+    virtual void     setAppState(uid_t uid, app_state_t state) = 0;
 
     virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies) = 0;
 
diff --git a/services/audiopolicy/audio_policy.conf b/services/audiopolicy/audio_policy.conf
deleted file mode 100644
index 9b83fef..0000000
--- a/services/audiopolicy/audio_policy.conf
+++ /dev/null
@@ -1,145 +0,0 @@
-#
-# Template audio policy configuration file
-#
-
-# Global configuration section:
-# - before audio HAL version 3.0:
-#   lists input and output devices always present on the device
-#   as well as the output device selected by default.
-#   Devices are designated by a string that corresponds to the enum in audio.h
-#
-#  global_configuration {
-#    attached_output_devices AUDIO_DEVICE_OUT_SPEAKER
-#    default_output_device AUDIO_DEVICE_OUT_SPEAKER
-#    attached_input_devices AUDIO_DEVICE_IN_BUILTIN_MIC|AUDIO_DEVICE_IN_REMOTE_SUBMIX
-#  }
-#
-# - after and including audio HAL 3.0 the global_configuration section is included in each
-#   hardware module section.
-#   it also includes the audio HAL version of this hw module:
-#  global_configuration {
-#    ...
-#     audio_hal_version <major.minor>  # audio HAL version in e.g. 3.0
-#  }
-#   other attributes (attached devices, default device) have to be included in the
-#   global_configuration section of each hardware module
-
-
-# audio hardware module section: contains descriptors for all audio hw modules present on the
-# device. Each hw module node is named after the corresponding hw module library base name.
-# For instance, "primary" corresponds to audio.primary.<device>.so.
-# The "primary" module is mandatory and must include at least one output with
-# AUDIO_OUTPUT_FLAG_PRIMARY flag.
-# Each module descriptor contains one or more output profile descriptors and zero or more
-# input profile descriptors. Each profile lists all the parameters supported by a given output
-# or input stream category.
-# The "channel_masks", "formats", "devices" and "flags" are specified using strings corresponding
-# to enums in audio.h and audio_policy.h. They are concatenated by use of "|" without space or "\n".
-#
-# For audio HAL version posterior to 3.0 the following sections or sub sections can be present in
-# a hw module section:
-# - A "global_configuration" section: see above
-# - Optionally a "devices" section:
-#   This section contains descriptors for audio devices with attributes like an address or a
-#   gain controller. The syntax for the devices section and device descriptor is as follows:
-#    devices {
-#      <device name> {              # <device name>: any string without space
-#        type <device type>         # <device type> e.g. AUDIO_DEVICE_OUT_SPEAKER
-#        address <address>          # optional: device address, char string less than 64 in length
-#      }
-#    }
-# - one or more "gains" sections can be present in a device descriptor section.
-#   If present, they describe the capabilities of gain controllers attached to this input or
-#   output device. e.g. :
-#   <device name> {                  # <device name>: any string without space
-#     type <device type>             # <device type> e.g. AUDIO_DEVICE_OUT_SPEAKER
-#     address <address>              # optional: device address, char string less than 64 in length
-#     gains {
-#       <gain name> {
-#         mode <gain modes supported>              # e.g. AUDIO_GAIN_MODE_CHANNELS
-#         channel_mask <controlled channels>       # needed if mode AUDIO_GAIN_MODE_CHANNELS
-#         min_value_mB <min value in millibel>
-#         max_value_mB <max value in millibel>
-#         default_value_mB <default value in millibel>
-#         step_value_mB <step value in millibel>
-#         min_ramp_ms <min duration in ms>         # needed if mode AUDIO_GAIN_MODE_RAMP
-#         max_ramp_ms <max duration ms>            # needed if mode AUDIO_GAIN_MODE_RAMP
-#       }
-#     }
-#   }
-# - when a device descriptor is present, output and input profiles can refer to this device by
-# its name in their "devices" section instead of specifying a device type. e.g. :
-#   outputs {
-#     primary {
-#       sampling_rates 44100
-#       channel_masks AUDIO_CHANNEL_OUT_STEREO
-#       formats AUDIO_FORMAT_PCM_16_BIT
-#       devices <device name>
-#       flags AUDIO_OUTPUT_FLAG_PRIMARY
-#     }
-#   }
-# sample audio_policy.conf file below
-
-audio_hw_modules {
-  primary {
-    global_configuration {
-      attached_output_devices AUDIO_DEVICE_OUT_SPEAKER
-      default_output_device AUDIO_DEVICE_OUT_SPEAKER
-      attached_input_devices AUDIO_DEVICE_IN_BUILTIN_MIC
-      audio_hal_version 3.0
-    }
-    devices {
-      speaker {
-        type AUDIO_DEVICE_OUT_SPEAKER
-        gains {
-          gain_1 {
-            mode AUDIO_GAIN_MODE_JOINT
-            min_value_mB -8400
-            max_value_mB 4000
-            default_value_mB 0
-            step_value_mB 100
-          }
-        }
-      }
-    }
-    outputs {
-      primary {
-        sampling_rates 48000
-        channel_masks AUDIO_CHANNEL_OUT_STEREO
-        formats AUDIO_FORMAT_PCM_16_BIT
-        devices speaker
-        flags AUDIO_OUTPUT_FLAG_PRIMARY
-      }
-    }
-    inputs {
-      primary {
-        sampling_rates 8000|16000
-        channel_masks AUDIO_CHANNEL_IN_MONO
-        formats AUDIO_FORMAT_PCM_16_BIT
-        devices AUDIO_DEVICE_IN_BUILTIN_MIC
-      }
-    }
-  }
-  r_submix {
-    global_configuration {
-      attached_input_devices AUDIO_DEVICE_IN_REMOTE_SUBMIX
-      audio_hal_version 2.0
-    }
-    outputs {
-      submix {
-        sampling_rates 48000
-        channel_masks AUDIO_CHANNEL_OUT_STEREO
-        formats AUDIO_FORMAT_PCM_16_BIT
-        devices AUDIO_DEVICE_OUT_REMOTE_SUBMIX
-      }
-    }
-    inputs {
-      submix {
-        sampling_rates 48000
-        channel_masks AUDIO_CHANNEL_IN_STEREO
-        formats AUDIO_FORMAT_PCM_16_BIT
-        devices AUDIO_DEVICE_IN_REMOTE_SUBMIX
-      }
-    }
-  }
-}
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index f2b51d9..12b5e7d 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -89,6 +89,16 @@
 
     status_t getInputMixForAttr(audio_attributes_t attr, sp<AudioPolicyMix> *policyMix);
 
+    /**
+     * Updates the mix rules in order to make streams associated with the given uid
+     * be routed to the given audio devices.
+     * @param uid the uid for which the device affinity is set
+     * @param devices the vector of devices that this uid may be routed to. A typical
+     *    use is to pass the devices associated with a given zone in a multi-zone setup.
+     * @return NO_ERROR if the update was successful, INVALID_OPERATION otherwise.
+     *    An example of failure is when there are already rules in place to restrict
+     *    a mix to the given uid (i.e. when a MATCH_UID rule was set for it).
+     */
     status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices);
     status_t removeUidDeviceAffinities(uid_t uid);
     status_t getDevicesForUid(uid_t uid, Vector<AudioDeviceTypeAddr>& devices) const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPort.h b/services/audiopolicy/common/managerdefinitions/include/AudioPort.h
index 2d182bd..d906f11 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPort.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPort.h
@@ -112,6 +112,9 @@
     static bool isBetterFormatMatch(audio_format_t newFormat,
                                         audio_format_t currentFormat,
                                         audio_format_t targetFormat);
+    static uint32_t formatDistance(audio_format_t format1,
+                                   audio_format_t format2);
+    static const uint32_t kFormatDistanceMax = 4;
 
     audio_module_handle_t getModuleHandle() const;
     uint32_t getModuleVersionMajor() const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/audio_policy_conf.h b/services/audiopolicy/common/managerdefinitions/include/audio_policy_conf.h
deleted file mode 100644
index 0a27947..0000000
--- a/services/audiopolicy/common/managerdefinitions/include/audio_policy_conf.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-
-/////////////////////////////////////////////////
-//      Definitions for audio policy configuration file (audio_policy.conf)
-/////////////////////////////////////////////////
-
-#define AUDIO_HARDWARE_MODULE_ID_MAX_LEN 32
-
-#define AUDIO_POLICY_CONFIG_FILE "/system/etc/audio_policy.conf"
-#define AUDIO_POLICY_VENDOR_CONFIG_FILE "/vendor/etc/audio_policy.conf"
-
-// global configuration
-#define GLOBAL_CONFIG_TAG "global_configuration"
-
-#define ATTACHED_OUTPUT_DEVICES_TAG "attached_output_devices"
-#define DEFAULT_OUTPUT_DEVICE_TAG "default_output_device"
-#define ATTACHED_INPUT_DEVICES_TAG "attached_input_devices"
-#define SPEAKER_DRC_ENABLED_TAG "speaker_drc_enabled"
-#define AUDIO_HAL_VERSION_TAG "audio_hal_version"
-
-// hw modules descriptions
-#define AUDIO_HW_MODULE_TAG "audio_hw_modules"
-
-#define OUTPUTS_TAG "outputs"
-#define INPUTS_TAG "inputs"
-
-#define SAMPLING_RATES_TAG "sampling_rates"
-#define FORMATS_TAG "formats"
-#define CHANNELS_TAG "channel_masks"
-#define DEVICES_TAG "devices"
-#define FLAGS_TAG "flags"
-
-#define APM_DEVICES_TAG "devices"
-#define APM_DEVICE_TYPE "type"
-#define APM_DEVICE_ADDRESS "address"
-
-#define MIXERS_TAG "mixers"
-#define MIXER_TYPE "type"
-#define MIXER_TYPE_MUX "mux"
-#define MIXER_TYPE_MIX "mix"
-
-#define GAINS_TAG "gains"
-#define GAIN_MODE "mode"
-#define GAIN_CHANNELS "channel_mask"
-#define GAIN_MIN_VALUE "min_value_mB"
-#define GAIN_MAX_VALUE "max_value_mB"
-#define GAIN_DEFAULT_VALUE "default_value_mB"
-#define GAIN_STEP_VALUE "step_value_mB"
-#define GAIN_MIN_RAMP_MS "min_ramp_ms"
-#define GAIN_MAX_RAMP_MS "max_ramp_ms"
-
-#define DYNAMIC_VALUE_TAG "dynamic" // special value for "channel_masks", "sampling_rates" and
-                                    // "formats" in outputs descriptors indicating that supported
-                                    // values should be queried after opening the output.
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index 26bb354..98a7800 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -400,13 +400,29 @@
 
 status_t AudioPolicyMixCollection::setUidDeviceAffinities(uid_t uid,
         const Vector<AudioDeviceTypeAddr>& devices) {
+    // verify feasibility: for each player mix: if it already contains a
+    //    "match uid" rule for this uid, return an error
+    //    (adding a uid-device affinity would result in contradictory rules)
+    for (size_t i = 0; i < size(); i++) {
+        const AudioPolicyMix* mix = valueAt(i).get();
+        if (!mix->isDeviceAffinityCompatible()) {
+            continue;
+        }
+        if (mix->hasUidRule(true /*match*/, uid)) {
+            return INVALID_OPERATION;
+        }
+    }
+
     // remove existing rules for this uid
     removeUidDeviceAffinities(uid);
 
-    // for each player mix: add a rule to match or exclude the uid based on the device
+    // for each player mix:
+    //   IF    device is not a target for the mix,
+    //     AND it doesn't have a "match uid" rule
+    //   THEN add a rule to exclude the uid
     for (size_t i = 0; i < size(); i++) {
         const AudioPolicyMix *mix = valueAt(i).get();
-        if (mix->mMixType != MIX_TYPE_PLAYERS) {
+        if (!mix->isDeviceAffinityCompatible()) {
             continue;
         }
         // check if this mix goes to a device in the list of devices
@@ -418,12 +434,14 @@
                 break;
             }
         }
-        if (deviceMatch) {
-            mix->setMatchUid(uid);
-        } else {
+        if (!deviceMatch && !mix->hasMatchUidRule()) {
             // this mix doesn't go to one of the listed devices for the given uid,
+            // and it's not already restricting the mix on a uid,
             // modify its rules to exclude the uid
-            mix->setExcludeUid(uid);
+            if (!mix->hasUidRule(false /*match*/, uid)) {
+                // no need to do it again if uid is already excluded
+                mix->setExcludeUid(uid);
+            }
         }
     }
 
@@ -435,14 +453,15 @@
     for (size_t i = 0; i < size(); i++) {
         bool foundUidRule = false;
         const AudioPolicyMix *mix = valueAt(i).get();
-        if (mix->mMixType != MIX_TYPE_PLAYERS) {
+        if (!mix->isDeviceAffinityCompatible()) {
             continue;
         }
         std::vector<size_t> criteriaToRemove;
         for (size_t j = 0; j < mix->mCriteria.size(); j++) {
             const uint32_t rule = mix->mCriteria[j].mRule;
-            // is this rule affecting the uid?
-            if ((rule == RULE_EXCLUDE_UID || rule == RULE_MATCH_UID)
+            // is this rule excluding the uid? (not considering uid match rules
+            // as those are not used for uid-device affinity)
+            if (rule == RULE_EXCLUDE_UID
                     && uid == mix->mCriteria[j].mValue.mUid) {
                 foundUidRule = true;
                 criteriaToRemove.insert(criteriaToRemove.begin(), j);
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp
index a66c695..c11490a 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp
@@ -282,30 +282,25 @@
     return index1 - index2;
 }
 
+uint32_t AudioPort::formatDistance(audio_format_t format1, audio_format_t format2)
+{
+    if (format1 == format2) {
+        return 0;
+    }
+    if (format1 == AUDIO_FORMAT_INVALID || format2 == AUDIO_FORMAT_INVALID) {
+        return kFormatDistanceMax;
+    }
+    int diffBytes = (int)audio_bytes_per_sample(format1) -
+            audio_bytes_per_sample(format2);
+
+    return abs(diffBytes);
+}
+
 bool AudioPort::isBetterFormatMatch(audio_format_t newFormat,
                                     audio_format_t currentFormat,
                                     audio_format_t targetFormat)
 {
-    if (newFormat == currentFormat) {
-        return false;
-    }
-    if (currentFormat == AUDIO_FORMAT_INVALID) {
-        return true;
-    }
-    if (newFormat == targetFormat) {
-        return true;
-    }
-    int currentDiffBytes = (int)audio_bytes_per_sample(targetFormat) -
-            audio_bytes_per_sample(currentFormat);
-    int newDiffBytes = (int)audio_bytes_per_sample(targetFormat) -
-            audio_bytes_per_sample(newFormat);
-
-    if (abs(newDiffBytes) < abs(currentDiffBytes)) {
-        return true;
-    } else if (abs(newDiffBytes) == abs(currentDiffBytes)) {
-        return (newDiffBytes >= 0);
-    }
-    return false;
+    return formatDistance(newFormat, targetFormat) < formatDistance(currentFormat, targetFormat);
 }
 
 void AudioPort::pickAudioProfile(uint32_t &samplingRate,
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioProfile.cpp
index a645e02..69d6b0c 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioProfile.cpp
@@ -178,6 +178,7 @@
     const bool isRecordThread = portType == AUDIO_PORT_TYPE_MIX && portRole == AUDIO_PORT_ROLE_SINK;
     const bool isIndex = audio_channel_mask_get_representation(channelMask)
             == AUDIO_CHANNEL_REPRESENTATION_INDEX;
+    const uint32_t channelCount = audio_channel_count_from_in_mask(channelMask);
     int bestMatch = 0;
     for (size_t i = 0; i < mChannelMasks.size(); i ++) {
         audio_channel_mask_t supported = mChannelMasks[i];
@@ -201,11 +202,15 @@
             // OR
             // match score += 100 if the channel mask representations match
             // match score += number of channels matched.
+            // match score += 100 if the channel mask representations DO NOT match
+            //   but the profile has positional channel mask and less than 2 channels.
+            //   This is for audio HAL convention to not list index masks for less than 2 channels
             //
             // If there are no matched channels, the mask may still be accepted
             // but the playback or record will be silent.
             const bool isSupportedIndex = (audio_channel_mask_get_representation(supported)
                     == AUDIO_CHANNEL_REPRESENTATION_INDEX);
+            const uint32_t supportedChannelCount = audio_channel_count_from_in_mask(supported);
             int match;
             if (isIndex && isSupportedIndex) {
                 // index equivalence
@@ -213,13 +218,14 @@
                         audio_channel_mask_get_bits(channelMask)
                             & audio_channel_mask_get_bits(supported));
             } else if (isIndex && !isSupportedIndex) {
-                const uint32_t equivalentBits =
-                        (1 << audio_channel_count_from_in_mask(supported)) - 1 ;
+                const uint32_t equivalentBits = (1 << supportedChannelCount) - 1 ;
                 match = __builtin_popcount(
                         audio_channel_mask_get_bits(channelMask) & equivalentBits);
+                if (supportedChannelCount <= FCC_2) {
+                    match += 100;
+                }
             } else if (!isIndex && isSupportedIndex) {
-                const uint32_t equivalentBits =
-                        (1 << audio_channel_count_from_in_mask(channelMask)) - 1;
+                const uint32_t equivalentBits = (1 << channelCount) - 1;
                 match = __builtin_popcount(
                         equivalentBits & audio_channel_mask_get_bits(supported));
             } else {
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 530a2e4..07a7e65 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -70,20 +70,7 @@
 
 audio_stream_type_t EngineBase::getStreamTypeForAttributes(const audio_attributes_t &attr) const
 {
-    audio_stream_type_t engineStream = mProductStrategies.getStreamTypeForAttributes(attr);
-    // ensure the audibility flag for sonification is honored for stream types
-    // Note this is typically implemented in the product strategy configuration files, but is
-    //   duplicated here for safety.
-    if (attr.usage == AUDIO_USAGE_ASSISTANCE_SONIFICATION
-            && ((attr.flags & AUDIO_FLAG_AUDIBILITY_ENFORCED) != 0)) {
-        engineStream = AUDIO_STREAM_ENFORCED_AUDIBLE;
-    }
-    // ensure the ENFORCED_AUDIBLE stream type reflects the "force use" setting:
-    if ((getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) != AUDIO_POLICY_FORCE_SYSTEM_ENFORCED)
-            && (engineStream == AUDIO_STREAM_ENFORCED_AUDIBLE)) {
-        return AUDIO_STREAM_SYSTEM;
-    }
-    return engineStream;
+    return mProductStrategies.getStreamTypeForAttributes(attr);
 }
 
 audio_attributes_t EngineBase::getAttributesForStreamType(audio_stream_type_t stream) const
diff --git a/services/audiopolicy/managerdefault/Android.mk b/services/audiopolicy/managerdefault/Android.mk
index 684fc9f..c5921c3 100644
--- a/services/audiopolicy/managerdefault/Android.mk
+++ b/services/audiopolicy/managerdefault/Android.mk
@@ -15,10 +15,6 @@
 
 ifeq ($(USE_CONFIGURABLE_AUDIO_POLICY), 1)
 
-ifneq ($(USE_XML_AUDIO_POLICY_CONF), 1)
-$(error Configurable policy does not support legacy conf file)
-endif #ifneq ($(USE_XML_AUDIO_POLICY_CONF), 1)
-
 LOCAL_SHARED_LIBRARIES += libaudiopolicyengineconfigurable
 
 else
@@ -43,10 +39,6 @@
 
 LOCAL_SHARED_LIBRARIES += libbinder libhidlbase libxml2
 
-ifeq ($(USE_XML_AUDIO_POLICY_CONF), 1)
-LOCAL_CFLAGS += -DUSE_XML_AUDIO_POLICY_CONF
-endif #ifeq ($(USE_XML_AUDIO_POLICY_CONF), 1)
-
 LOCAL_CFLAGS += -Wall -Werror
 
 LOCAL_MULTILIB := $(AUDIOSERVER_MULTILIB)
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index c0ca440..c48742d 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -36,12 +36,12 @@
 #define AUDIO_POLICY_BLUETOOTH_LEGACY_HAL_XML_CONFIG_FILE_NAME \
         "audio_policy_configuration_bluetooth_legacy_hal.xml"
 
+#include <algorithm>
 #include <inttypes.h>
 #include <math.h>
 #include <set>
 #include <unordered_set>
 #include <vector>
-
 #include <AudioPolicyManagerInterface.h>
 #include <AudioPolicyEngineInstance.h>
 #include <cutils/properties.h>
@@ -50,7 +50,6 @@
 #include <private/android_filesystem_config.h>
 #include <soundtrigger/SoundTrigger.h>
 #include <system/audio.h>
-#include <audio_policy_conf.h>
 #include "AudioPolicyManager.h"
 #include <Serializer.h>
 #include "TypeConverter.h"
@@ -592,7 +591,7 @@
                     AUDIO_DEVICE_OUT_TELEPHONY_TX, String8(), AUDIO_FORMAT_DEFAULT);
     SortedVector<audio_io_handle_t> outputs =
             getOutputsForDevices(DeviceVector(outputDevice), mOutputs);
-    audio_io_handle_t output = selectOutput(outputs, AUDIO_OUTPUT_FLAG_NONE, AUDIO_FORMAT_INVALID);
+    const audio_io_handle_t output = selectOutput(outputs);
     // request to reuse existing output stream if one is already opened to reach the target device
     if (output != AUDIO_IO_HANDLE_NONE) {
         sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
@@ -777,6 +776,12 @@
     // check for device and output changes triggered by new force usage
     checkForDeviceAndOutputChanges();
 
+    // force client reconnection to reevaluate flag AUDIO_FLAG_AUDIBILITY_ENFORCED
+    if (usage == AUDIO_POLICY_FORCE_FOR_SYSTEM) {
+        mpClientInterface->invalidateStream(AUDIO_STREAM_SYSTEM);
+        mpClientInterface->invalidateStream(AUDIO_STREAM_ENFORCED_AUDIBLE);
+    }
+
     //FIXME: workaround for truncated touch sounds
     // to be removed when the problem is handled by system UI
     uint32_t delayMs = 0;
@@ -883,7 +888,7 @@
     // and AudioSystem::getOutputSamplingRate().
 
     SortedVector<audio_io_handle_t> outputs = getOutputsForDevices(devices, mOutputs);
-    audio_io_handle_t output = selectOutput(outputs, AUDIO_OUTPUT_FLAG_NONE, AUDIO_FORMAT_INVALID);
+    const audio_io_handle_t output = selectOutput(outputs);
 
     ALOGV("getOutput() stream %d selected devices %s, output %d", stream,
           devices.toString().c_str(), output);
@@ -910,6 +915,13 @@
         }
         *dstAttr = mEngine->getAttributesForStreamType(srcStream);
     }
+
+    // Only honor audibility enforced when required. The client will be
+    // forced to reconnect if the forced usage changes.
+    if (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) != AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) {
+        dstAttr->flags &= ~AUDIO_FLAG_AUDIBILITY_ENFORCED;
+    }
+
     return NO_ERROR;
 }
 
@@ -1019,7 +1031,7 @@
     }
     if (*output == AUDIO_IO_HANDLE_NONE) {
         *output = getOutputForDevices(outputDevices, session, *stream, config,
-                flags, attr->flags & AUDIO_FLAG_MUTE_HAPTIC);
+                flags, resultAttr->flags & AUDIO_FLAG_MUTE_HAPTIC);
     }
     if (*output == AUDIO_IO_HANDLE_NONE) {
         return INVALID_OPERATION;
@@ -1430,108 +1442,125 @@
                                                        audio_channel_mask_t channelMask,
                                                        uint32_t samplingRate)
 {
+    LOG_ALWAYS_FATAL_IF(!(format == AUDIO_FORMAT_INVALID || audio_is_linear_pcm(format)),
+        "%s called with format %#x", __func__, format);
+
+    // Flags disqualifying an output: the match must happen before calling selectOutput()
+    static const audio_output_flags_t kExcludedFlags = (audio_output_flags_t)
+        (AUDIO_OUTPUT_FLAG_HW_AV_SYNC | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ | AUDIO_OUTPUT_FLAG_DIRECT);
+
+    // Flags expressing a functional request: must be honored in priority over
+    // other criteria
+    static const audio_output_flags_t kFunctionalFlags = (audio_output_flags_t)
+        (AUDIO_OUTPUT_FLAG_VOIP_RX | AUDIO_OUTPUT_FLAG_INCALL_MUSIC |
+            AUDIO_OUTPUT_FLAG_TTS | AUDIO_OUTPUT_FLAG_DIRECT_PCM);
+    // Flags expressing a performance request: have lower priority than serving
+    // requested sampling rate or channel mask
+    static const audio_output_flags_t kPerformanceFlags = (audio_output_flags_t)
+        (AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_DEEP_BUFFER |
+            AUDIO_OUTPUT_FLAG_RAW | AUDIO_OUTPUT_FLAG_SYNC);
+
+    const audio_output_flags_t functionalFlags =
+        (audio_output_flags_t)(flags & kFunctionalFlags);
+    const audio_output_flags_t performanceFlags =
+        (audio_output_flags_t)(flags & kPerformanceFlags);
+
+    audio_io_handle_t bestOutput = (outputs.size() == 0) ? AUDIO_IO_HANDLE_NONE : outputs[0];
+
     // select one output among several that provide a path to a particular device or set of
     // devices (the list was previously build by getOutputsForDevices()).
     // The priority is as follows:
     // 1: the output supporting haptic playback when requesting haptic playback
-    // 2: the output with the highest number of requested policy flags
-    // 3: the output with the bit depth the closest to the requested one
-    // 4: the primary output
-    // 5: the first output in the list
+    // 2: the output with the highest number of requested functional flags
+    // 3: the output supporting the exact channel mask
+    // 4: the output with a higher channel count than requested
+    // 5: the output with a higher sampling rate than requested
+    // 6: the output with the highest number of requested performance flags
+    // 7: the output with the bit depth the closest to the requested one
+    // 8: the primary output
+    // 9: the first output in the list
 
-    if (outputs.size() == 0) {
-        return AUDIO_IO_HANDLE_NONE;
-    }
-    if (outputs.size() == 1) {
-        return outputs[0];
-    }
+    // matching criteria values in priority order for best matching output so far
+    std::vector<uint32_t> bestMatchCriteria(8, 0);
 
-    int maxCommonFlags = 0;
-    const size_t hapticChannelCount = audio_channel_count_from_out_mask(
-            channelMask & AUDIO_CHANNEL_HAPTIC_ALL);
-    audio_io_handle_t outputForFlags = AUDIO_IO_HANDLE_NONE;
-    audio_io_handle_t outputForPrimary = AUDIO_IO_HANDLE_NONE;
-    audio_io_handle_t outputForFormat = AUDIO_IO_HANDLE_NONE;
-    audio_format_t bestFormat = AUDIO_FORMAT_INVALID;
-    audio_format_t bestFormatForFlags = AUDIO_FORMAT_INVALID;
-
-    // Flags which must be present on both the request and the selected output
-    static const audio_output_flags_t kMandatedFlags = (audio_output_flags_t)
-        (AUDIO_OUTPUT_FLAG_HW_AV_SYNC | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ);
+    const uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
+    const uint32_t hapticChannelCount = audio_channel_count_from_out_mask(
+        channelMask & AUDIO_CHANNEL_HAPTIC_ALL);
 
     for (audio_io_handle_t output : outputs) {
         sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
-        if (!outputDesc->isDuplicated()) {
-            if (outputDesc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) {
-                continue;
-            }
-            // If haptic channel is specified, use the haptic output if present.
-            // When using haptic output, same audio format and sample rate are required.
-            if (hapticChannelCount > 0) {
-                // If haptic channel is specified, use the first output that
-                // support haptic playback.
-                if (audio_channel_count_from_out_mask(
-                        outputDesc->mChannelMask & AUDIO_CHANNEL_HAPTIC_ALL) >= hapticChannelCount
-                        && format == outputDesc->mFormat
-                        && samplingRate == outputDesc->mSamplingRate) {
-                    return output;
-                }
-            } else {
-                // When haptic channel is not specified, skip haptic output.
-                if (outputDesc->mChannelMask & AUDIO_CHANNEL_HAPTIC_ALL) {
-                    continue;
-                }
-            }
-            if ((kMandatedFlags & flags) !=
-                (kMandatedFlags & outputDesc->mProfile->getFlags())) {
-                continue;
-            }
+        // matching criteria values in priority order for current output
+        std::vector<uint32_t> currentMatchCriteria(8, 0);
 
-            // if a valid format is specified, skip output if not compatible
-            if (format != AUDIO_FORMAT_INVALID) {
-                if (!audio_is_linear_pcm(format)) {
-                    continue;
-                }
-                if (AudioPort::isBetterFormatMatch(
-                        outputDesc->mFormat, bestFormat, format)) {
-                    outputForFormat = output;
-                    bestFormat = outputDesc->mFormat;
-                }
-            }
+        if (outputDesc->isDuplicated()) {
+            continue;
+        }
+        if ((kExcludedFlags & outputDesc->mFlags) != 0) {
+            continue;
+        }
 
-            int commonFlags = popcount(outputDesc->mProfile->getFlags() & flags);
-            if (commonFlags >= maxCommonFlags) {
-                if (commonFlags == maxCommonFlags) {
-                    if (format != AUDIO_FORMAT_INVALID
-                            && AudioPort::isBetterFormatMatch(
-                                    outputDesc->mFormat, bestFormatForFlags, format)) {
-                        outputForFlags = output;
-                        bestFormatForFlags = outputDesc->mFormat;
-                    }
-                } else {
-                    outputForFlags = output;
-                    maxCommonFlags = commonFlags;
-                    bestFormatForFlags = outputDesc->mFormat;
-                }
-                ALOGV("selectOutput() commonFlags for output %d, %04x", output, commonFlags);
+        // If haptic channel is specified, use the haptic output if present.
+        // When using haptic output, same audio format and sample rate are required.
+        const uint32_t outputHapticChannelCount = audio_channel_count_from_out_mask(
+            outputDesc->mChannelMask & AUDIO_CHANNEL_HAPTIC_ALL);
+        if ((hapticChannelCount == 0) != (outputHapticChannelCount == 0)) {
+            continue;
+        }
+        if (outputHapticChannelCount >= hapticChannelCount
+            && format == outputDesc->mFormat
+            && samplingRate == outputDesc->mSamplingRate) {
+                currentMatchCriteria[0] = outputHapticChannelCount;
+        }
+
+        // functional flags match
+        currentMatchCriteria[1] = popcount(outputDesc->mFlags & functionalFlags);
+
+        // channel mask and channel count match
+        uint32_t outputChannelCount = audio_channel_count_from_out_mask(outputDesc->mChannelMask);
+        if (channelMask != AUDIO_CHANNEL_NONE && channelCount > 2 &&
+            channelCount <= outputChannelCount) {
+            if ((audio_channel_mask_get_representation(channelMask) ==
+                    audio_channel_mask_get_representation(outputDesc->mChannelMask)) &&
+                    ((channelMask & outputDesc->mChannelMask) == channelMask)) {
+                currentMatchCriteria[2] = outputChannelCount;
             }
-            if (outputDesc->mProfile->getFlags() & AUDIO_OUTPUT_FLAG_PRIMARY) {
-                outputForPrimary = output;
-            }
+            currentMatchCriteria[3] = outputChannelCount;
+        }
+
+        // sampling rate match
+        if (samplingRate > SAMPLE_RATE_HZ_DEFAULT &&
+                samplingRate <= outputDesc->mSamplingRate) {
+            currentMatchCriteria[4] = outputDesc->mSamplingRate;
+        }
+
+        // performance flags match
+        currentMatchCriteria[5] = popcount(outputDesc->mFlags & performanceFlags);
+
+        // format match
+        if (format != AUDIO_FORMAT_INVALID) {
+            currentMatchCriteria[6] =
+                AudioPort::kFormatDistanceMax -
+                AudioPort::formatDistance(format, outputDesc->mFormat);
+        }
+
+        // primary output match
+        currentMatchCriteria[7] = outputDesc->mFlags & AUDIO_OUTPUT_FLAG_PRIMARY;
+
+        // compare match criteria by priority then value
+        if (std::lexicographical_compare(bestMatchCriteria.begin(), bestMatchCriteria.end(),
+                currentMatchCriteria.begin(), currentMatchCriteria.end())) {
+            bestMatchCriteria = currentMatchCriteria;
+            bestOutput = output;
+
+            std::stringstream result;
+            std::copy(bestMatchCriteria.begin(), bestMatchCriteria.end(),
+                std::ostream_iterator<int>(result, " "));
+            ALOGV("%s new bestOutput %d criteria %s",
+                __func__, bestOutput, result.str().c_str());
         }
     }
 
-    if (outputForFlags != AUDIO_IO_HANDLE_NONE) {
-        return outputForFlags;
-    }
-    if (outputForFormat != AUDIO_IO_HANDLE_NONE) {
-        return outputForFormat;
-    }
-    if (outputForPrimary != AUDIO_IO_HANDLE_NONE) {
-        return outputForPrimary;
-    }
-
-    return outputs[0];
+    return bestOutput;
 }
 
 status_t AudioPolicyManager::startOutput(audio_port_handle_t portId)
@@ -1974,16 +2003,25 @@
             strncmp(attributes.tags, "addr=", strlen("addr=")) == 0) {
         status = mPolicyMixes.getInputMixForAttr(attributes, &policyMix);
         if (status != NO_ERROR) {
+            ALOGW("%s could not find input mix for attr %s",
+                    __func__, toString(attributes).c_str());
             goto error;
         }
+        device = mAvailableInputDevices.getDevice(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
+                                                  String8(attr->tags + strlen("addr=")),
+                                                  AUDIO_FORMAT_DEFAULT);
+        if (device == nullptr) {
+            ALOGW("%s could not find in Remote Submix device for source %d, tags %s",
+                    __func__, attributes.source, attributes.tags);
+            status = BAD_VALUE;
+            goto error;
+        }
+
         if (is_mix_loopback_render(policyMix->mRouteFlags)) {
             *inputType = API_INPUT_MIX_PUBLIC_CAPTURE_PLAYBACK;
         } else {
             *inputType = API_INPUT_MIX_EXT_POLICY_REROUTE;
         }
-        device = mAvailableInputDevices.getDevice(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
-                                                  String8(attr->tags + strlen("addr=")),
-                                                  AUDIO_FORMAT_DEFAULT);
     } else {
         if (explicitRoutingDevice != nullptr) {
             device = explicitRoutingDevice;
@@ -2025,7 +2063,7 @@
                 device->getId() : AUDIO_PORT_HANDLE_NONE;
 
     isSoundTrigger = attributes.source == AUDIO_SOURCE_HOTWORD &&
-        mSoundTriggerSessions.indexOfKey(session) > 0;
+        mSoundTriggerSessions.indexOfKey(session) >= 0;
     *portId = AudioPort::getNextUniqueId();
 
     clientDesc = new RecordClientDescriptor(*portId, riid, uid, session, attributes, *config,
@@ -2110,6 +2148,7 @@
         for (size_t i = 0; i < mInputs.size(); ) {
             sp <AudioInputDescriptor> desc = mInputs.valueAt(i);
             if (desc->mProfile != profile) {
+                i++;
                 continue;
             }
             // if sound trigger, reuse input if used by other sound trigger on same session
@@ -2850,14 +2889,12 @@
             rSubmixModule->addInputProfile(address, &inputConfig,
                     AUDIO_DEVICE_IN_REMOTE_SUBMIX, address);
 
-            if (mix.mMixType == MIX_TYPE_PLAYERS) {
-                setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
-                        AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
-                        address.string(), "remote-submix", AUDIO_FORMAT_DEFAULT);
-            } else {
-                setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
-                        AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
-                        address.string(), "remote-submix", AUDIO_FORMAT_DEFAULT);
+            if ((res = setDeviceConnectionStateInt(mix.mDeviceType,
+                    AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                    address.string(), "remote-submix", AUDIO_FORMAT_DEFAULT)) != NO_ERROR) {
+                ALOGE("Failed to set remote submix device available, type %u, address %s",
+                        mix.mDeviceType, address.string());
+                break;
             }
         } else if ((mix.mRouteFlags & MIX_ROUTE_FLAG_RENDER) == MIX_ROUTE_FLAG_RENDER) {
             String8 address = mix.mDeviceAddress;
@@ -2933,17 +2970,17 @@
                 continue;
             }
 
-            if (getDeviceConnectionState(AUDIO_DEVICE_IN_REMOTE_SUBMIX, address.string()) ==
-                    AUDIO_POLICY_DEVICE_STATE_AVAILABLE)  {
-                setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
-                        AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-                        address.string(), "remote-submix", AUDIO_FORMAT_DEFAULT);
-            }
-            if (getDeviceConnectionState(AUDIO_DEVICE_OUT_REMOTE_SUBMIX, address.string()) ==
-                    AUDIO_POLICY_DEVICE_STATE_AVAILABLE)  {
-                setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
-                        AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-                        address.string(), "remote-submix", AUDIO_FORMAT_DEFAULT);
+            for (auto device : {AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_DEVICE_OUT_REMOTE_SUBMIX}) {
+                if (getDeviceConnectionState(device, address.string()) ==
+                        AUDIO_POLICY_DEVICE_STATE_AVAILABLE)  {
+                    res = setDeviceConnectionStateInt(device, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                      address.string(), "remote-submix",
+                                                      AUDIO_FORMAT_DEFAULT);
+                    if (res != OK) {
+                        ALOGE("Error making RemoteSubmix device unavailable for mix "
+                              "with type %d, address %s", device, address.string());
+                    }
+                }
             }
             rSubmixModule->removeOutputProfile(address);
             rSubmixModule->removeInputProfile(address);
@@ -3003,22 +3040,11 @@
 
 status_t AudioPolicyManager::removeUidDeviceAffinities(uid_t uid) {
     ALOGV("%s() uid=%d", __FUNCTION__, uid);
-    Vector<AudioDeviceTypeAddr> devices;
-    status_t res =  mPolicyMixes.getDevicesForUid(uid, devices);
-    if (res == NO_ERROR) {
-        // reevaluate outputs for all found devices
-        for (size_t i = 0; i < devices.size(); i++) {
-            sp<DeviceDescriptor> devDesc = mHwModules.getDeviceDescriptor(
-                    devices[i].mType, devices[i].mAddress, String8(),
-                    AUDIO_FORMAT_DEFAULT);
-            SortedVector<audio_io_handle_t> outputs;
-            if (checkOutputsForDevice(devDesc, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-                    outputs) != NO_ERROR) {
-                ALOGE("%s() error in checkOutputsForDevice for device=%08x addr=%s",
-                        __FUNCTION__, devices[i].mType, devices[i].mAddress.string());
-                return INVALID_OPERATION;
-            }
-        }
+    status_t res = mPolicyMixes.removeUidDeviceAffinities(uid);
+    if (res != NO_ERROR) {
+        ALOGE("%s() Could not remove all device affinities fo uid = %d",
+            __FUNCTION__, uid);
+        return INVALID_OPERATION;
     }
 
     return res;
@@ -3485,7 +3511,7 @@
                             getOutputsForDevices(DeviceVector(sinkDevice), mOutputs);
                     // if the sink device is reachable via an opened output stream, request to go via
                     // this output stream by adding a second source to the patch description
-                    audio_io_handle_t output = selectOutput(outputs);
+                    const audio_io_handle_t output = selectOutput(outputs);
                     if (output != AUDIO_IO_HANDLE_NONE) {
                         sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
                         if (outputDesc->isDuplicated()) {
@@ -4263,14 +4289,6 @@
     initialize();
 }
 
-//  This check is to catch any legacy platform updating to Q without having
-//  switched to XML since its deprecation on O.
-// TODO: after Q release, remove this check and flag as XML is now the only
-//        option and all legacy platform should have transitioned to XML.
-#ifndef USE_XML_AUDIO_POLICY_CONF
-#error Audio policy no longer supports legacy .conf configuration format
-#endif
-
 void AudioPolicyManager::loadConfig() {
     if (deserializeAudioPolicyXmlConfig(getConfig()) != NO_ERROR) {
         ALOGE("could not load audio policy configuration file, setting defaults");
@@ -5679,7 +5697,16 @@
         const float maxVoiceVolDb =
                 computeVolume(voiceCurves, callVolumeSrc, voiceVolumeIndex, device)
                 + IN_CALL_EARPIECE_HEADROOM_DB;
-        if (volumeDb > maxVoiceVolDb) {
+        // FIXME: Workaround for call screening applications until a proper audio mode is defined
+        // to support this scenario : Exempt the RING stream from the audio cap if the audio was
+        // programmatically muted.
+        // VOICE_CALL stream has minVolumeIndex > 0 : Users cannot set the volume of voice calls to
+        // 0. We don't want to cap volume when the system has programmatically muted the voice call
+        // stream. See setVolumeCurveIndex() for more information.
+        bool exemptFromCapping = (volumeSource == ringVolumeSrc) && (voiceVolumeIndex == 0);
+        ALOGV_IF(exemptFromCapping, "%s volume source %d at vol=%f not capped", __func__,
+                 volumeSource, volumeDb);
+        if ((volumeDb > maxVoiceVolDb) && !exemptFromCapping) {
             ALOGV("%s volume source %d at vol=%f overriden by volume group %d at vol=%f", __func__,
                   volumeSource, volumeDb, callVolumeSrc, maxVoiceVolDb);
             volumeDb = maxVoiceVolDb;
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 06e68a9..b0a8541 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -166,7 +166,7 @@
     return mAudioPolicyManager->getOutput(stream);
 }
 
-status_t AudioPolicyService::getOutputForAttr(const audio_attributes_t *originalAttr,
+status_t AudioPolicyService::getOutputForAttr(audio_attributes_t *attr,
                                               audio_io_handle_t *output,
                                               audio_session_t session,
                                               audio_stream_type_t *stream,
@@ -190,13 +190,15 @@
                 "%s uid %d tried to pass itself off as %d", __FUNCTION__, callingUid, uid);
         uid = callingUid;
     }
-    audio_attributes_t attr = *originalAttr;
     if (!mPackageManager.allowPlaybackCapture(uid)) {
-        attr.flags |= AUDIO_FLAG_NO_MEDIA_PROJECTION;
+        attr->flags |= AUDIO_FLAG_NO_MEDIA_PROJECTION;
+    }
+    if (!bypassInterruptionPolicyAllowed(pid, uid)) {
+        attr->flags &= ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE);
     }
     audio_output_flags_t originalFlags = flags;
     AutoCallerClear acc;
-    status_t result = mAudioPolicyManager->getOutputForAttr(&attr, output, session, stream, uid,
+    status_t result = mAudioPolicyManager->getOutputForAttr(attr, output, session, stream, uid,
                                                  config,
                                                  &flags, selectedDeviceId, portId,
                                                  secondaryOutputs);
@@ -212,14 +214,14 @@
         *selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
         *portId = AUDIO_PORT_HANDLE_NONE;
         secondaryOutputs->clear();
-        result = mAudioPolicyManager->getOutputForAttr(&attr, output, session, stream, uid, config,
+        result = mAudioPolicyManager->getOutputForAttr(attr, output, session, stream, uid, config,
                                                        &flags, selectedDeviceId, portId,
                                                        secondaryOutputs);
     }
 
     if (result == NO_ERROR) {
         sp <AudioPlaybackClient> client =
-            new AudioPlaybackClient(attr, *output, uid, pid, session, *selectedDeviceId, *stream);
+            new AudioPlaybackClient(*attr, *output, uid, pid, session, *selectedDeviceId, *stream);
         mAudioPlaybackClients.add(*portId, client);
     }
     return result;
@@ -511,6 +513,7 @@
     }
 
     // including successes gets very verbose
+    // but once we cut over to westworld, log them all.
     if (status != NO_ERROR) {
 
         static constexpr char kAudioPolicy[] = "audiopolicy";
@@ -571,6 +574,9 @@
             delete item;
             item = NULL;
         }
+    }
+
+    if (status != NO_ERROR) {
         client->active = false;
         client->startTimeNs = 0;
         updateUidStates_l();
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 5389c08..e3c72ae 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -411,19 +411,19 @@
 //    OR The client is the assistant
 //        AND an accessibility service is on TOP
 //               AND the source is VOICE_RECOGNITION or HOTWORD
-//        OR uses VOICE_RECOGNITION AND is on TOP OR latest started
+//        OR uses VOICE_RECOGNITION AND is on TOP
 //               OR uses HOTWORD
 //            AND there is no active privacy sensitive capture or call
 //                OR client has CAPTURE_AUDIO_OUTPUT privileged permission
 //    OR The client is an accessibility service
-//        AND is on TOP OR latest started
+//        AND is on TOP
 //        AND the source is VOICE_RECOGNITION or HOTWORD
 //    OR the client source is virtual (remote submix, call audio TX or RX...)
-//    OR Any other client
+//    OR Any client
 //        AND The assistant is not on TOP
+//        AND is on TOP or latest started
 //        AND there is no active privacy sensitive capture or call
 //                OR client has CAPTURE_AUDIO_OUTPUT privileged permission
-//TODO: mamanage pre processing effects according to use case priority
 
     sp<AudioRecordClient> topActive;
     sp<AudioRecordClient> latestActive;
@@ -505,9 +505,11 @@
 
         // By default allow capture if:
         //     The assistant is not on TOP
+        //     AND is on TOP or latest started
         //     AND there is no active privacy sensitive capture or call
         //             OR client has CAPTURE_AUDIO_OUTPUT privileged permission
         bool allowCapture = !isAssistantOnTop
+                && ((isTopOrLatestActive && !isLatestSensitive) || isLatestSensitive)
                 && !(isSensitiveActive && !(isLatestSensitive || current->canCaptureOutput))
                 && !(isInCall && !current->canCaptureOutput);
 
@@ -518,7 +520,7 @@
             // For assistant allow capture if:
             //     An accessibility service is on TOP
             //            AND the source is VOICE_RECOGNITION or HOTWORD
-            //     OR is on TOP OR latest started AND uses VOICE_RECOGNITION
+            //     OR is on TOP AND uses VOICE_RECOGNITION
             //            OR uses HOTWORD
             //         AND there is no active privacy sensitive capture or call
             //             OR client has CAPTURE_AUDIO_OUTPUT privileged permission
@@ -527,7 +529,7 @@
                     allowCapture = true;
                 }
             } else {
-                if (((isTopOrLatestActive && source == AUDIO_SOURCE_VOICE_RECOGNITION) ||
+                if (((isAssistantOnTop && source == AUDIO_SOURCE_VOICE_RECOGNITION) ||
                         source == AUDIO_SOURCE_HOTWORD) &&
                         (!(isSensitiveActive || isInCall) || current->canCaptureOutput)) {
                     allowCapture = true;
@@ -535,9 +537,9 @@
             }
         } else if (mUidPolicy->isA11yUid(current->uid)) {
             // For accessibility service allow capture if:
-            //     Is on TOP OR latest started
+            //     Is on TOP
             //     AND the source is VOICE_RECOGNITION or HOTWORD
-            if (isTopOrLatestActive &&
+            if (isA11yOnTop &&
                     (source == AUDIO_SOURCE_VOICE_RECOGNITION || source == AUDIO_SOURCE_HOTWORD)) {
                 allowCapture = true;
             }
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 58256f7..8db63a5 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -75,7 +75,7 @@
     virtual status_t setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config);
     virtual audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage);
     virtual audio_io_handle_t getOutput(audio_stream_type_t stream);
-    status_t getOutputForAttr(const audio_attributes_t *attr,
+    status_t getOutputForAttr(audio_attributes_t *attr,
                               audio_io_handle_t *output,
                               audio_session_t session,
                               audio_stream_type_t *stream,
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index fc6d6be..a87ebdf 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -117,6 +117,11 @@
 
 static const String16 sManageCameraPermission("android.permission.MANAGE_CAMERA");
 
+// Matches with PERCEPTIBLE_APP_ADJ in ProcessList.java
+static constexpr int32_t kVendorClientScore = 200;
+// Matches with PROCESS_STATE_PERSISTENT_UI in ActivityManager.java
+static constexpr int32_t kVendorClientState = 1;
+
 Mutex CameraService::sProxyMutex;
 sp<hardware::ICameraServiceProxy> CameraService::sCameraServiceProxy;
 
@@ -1120,7 +1125,8 @@
         std::map<int,resource_policy::ClientPriority> pidToPriorityMap;
         for (size_t i = 0; i < ownerPids.size() - 1; i++) {
             pidToPriorityMap.emplace(ownerPids[i],
-                    resource_policy::ClientPriority(priorityScores[i], states[i]));
+                    resource_policy::ClientPriority(priorityScores[i], states[i],
+                            /* isVendorClient won't get copied over*/ false));
         }
         mActiveClientManager.updatePriorities(pidToPriorityMap);
 
@@ -2980,8 +2986,12 @@
         const std::set<String8>& conflictingKeys, int32_t score, int32_t ownerId,
         int32_t state) {
 
+    bool isVendorClient = hardware::IPCThreadState::self()->isServingCall();
+    int32_t score_adj = isVendorClient ? kVendorClientScore : score;
+    int32_t state_adj = isVendorClient ? kVendorClientState: state;
+
     return std::make_shared<resource_policy::ClientDescriptor<String8, sp<BasicClient>>>(
-            key, value, cost, conflictingKeys, score, ownerId, state);
+            key, value, cost, conflictingKeys, score_adj, ownerId, state_adj, isVendorClient);
 }
 
 CameraService::DescriptorPtr CameraService::CameraClientManager::makeClientDescriptor(
diff --git a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
index 6d96163..fc79150 100644
--- a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
+++ b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
@@ -64,6 +64,10 @@
 namespace android {
 namespace camera3 {
 
+// Depth samples with low confidence can skew the
+// near/far values and impact the range inverse coding.
+static const float CONFIDENCE_THRESHOLD = .15f;
+
 ExifOrientation getExifOrientation(const unsigned char *jpegBuffer, size_t jpegBufferSize) {
     if ((jpegBuffer == nullptr) || (jpegBufferSize == 0)) {
         return ExifOrientation::ORIENTATION_UNDEFINED;
@@ -238,6 +242,9 @@
     auto conf = (value >> 13) & 0x7;
     float normConfidence = (conf == 0) ? 1.f : (static_cast<float>(conf) - 1) / 7.f;
     confidence->push_back(normConfidence);
+    if (normConfidence < CONFIDENCE_THRESHOLD) {
+        return;
+    }
 
     if (*near > point) {
         *near = point;
@@ -358,8 +365,12 @@
     auto pointIt = points.begin();
     auto confidenceIt = confidence.begin();
     while ((pointIt != points.end()) && (confidenceIt != confidence.end())) {
-        pointsQuantized.push_back(floorf(((far * (*pointIt - near)) /
-                (*pointIt * (far - near))) * 255.0f));
+        auto point = *pointIt;
+        if ((*confidenceIt) < CONFIDENCE_THRESHOLD) {
+            point = std::clamp(point, near, far);
+        }
+        pointsQuantized.push_back(floorf(((far * (point - near)) /
+                (point * (far - near))) * 255.0f));
         confidenceQuantized.push_back(floorf(*confidenceIt * 255.0f));
         confidenceIt++; pointIt++;
     }
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.cpp b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
index 41c953b..3d56cd2 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.cpp
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
@@ -89,12 +89,27 @@
     write(fd, result.string(), result.size());
 
     CameraMetadata lastFrame;
+    std::map<std::string, CameraMetadata> lastPhysicalFrames;
     {
         // Don't race while dumping metadata
         Mutex::Autolock al(mLastFrameMutex);
         lastFrame = CameraMetadata(mLastFrame);
+
+        for (const auto& physicalFrame : mLastPhysicalFrames) {
+            lastPhysicalFrames.emplace(String8(physicalFrame.mPhysicalCameraId),
+                    physicalFrame.mPhysicalCameraMetadata);
+        }
     }
-    lastFrame.dump(fd, 2, 6);
+    lastFrame.dump(fd, /*verbosity*/2, /*indentation*/6);
+
+    for (const auto& physicalFrame : lastPhysicalFrames) {
+        result = String8::format("   Latest received frame for physical camera %s:\n",
+                physicalFrame.first.c_str());
+        write(fd, result.string(), result.size());
+        CameraMetadata lastPhysicalMetadata = CameraMetadata(physicalFrame.second);
+        lastPhysicalMetadata.sort();
+        lastPhysicalMetadata.dump(fd, /*verbosity*/2, /*indentation*/6);
+    }
 }
 
 bool FrameProcessorBase::threadLoop() {
@@ -145,6 +160,8 @@
         if (!result.mMetadata.isEmpty()) {
             Mutex::Autolock al(mLastFrameMutex);
             mLastFrame.acquire(result.mMetadata);
+
+            mLastPhysicalFrames = std::move(result.mPhysicalMetadatas);
         }
     }
     if (res != NOT_ENOUGH_DATA) {
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.h b/services/camera/libcameraservice/common/FrameProcessorBase.h
index 00763a4..ae6d15d 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.h
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.h
@@ -83,6 +83,8 @@
                               const sp<CameraDeviceBase> &device);
 
     CameraMetadata mLastFrame;
+    std::vector<PhysicalCaptureResultInfo> mLastPhysicalFrames;
+
 };
 
 
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 415b2d8..00f0d86 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -77,8 +77,10 @@
         mTimestampOffset(0),
         mNextResultFrameNumber(0),
         mNextReprocessResultFrameNumber(0),
+        mNextZslStillResultFrameNumber(0),
         mNextShutterFrameNumber(0),
         mNextReprocessShutterFrameNumber(0),
+        mNextZslStillShutterFrameNumber(0),
         mListener(NULL),
         mVendorTagId(CAMERA_METADATA_INVALID_VENDOR_ID),
         mLastTemplateId(-1),
@@ -1306,7 +1308,7 @@
 void Camera3Device::processOneCaptureResultLocked(
         const hardware::camera::device::V3_2::CaptureResult& result,
         const hardware::hidl_vec<
-                hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadatas) {
+                hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata) {
     camera3_capture_result r;
     status_t res;
     r.frame_number = result.frameNumber;
@@ -1322,21 +1324,21 @@
     r.result = reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
 
     // Read and validate physical camera metadata
-    size_t physResultCount = physicalCameraMetadatas.size();
+    size_t physResultCount = physicalCameraMetadata.size();
     std::vector<const char*> physCamIds(physResultCount);
     std::vector<const camera_metadata_t *> phyCamMetadatas(physResultCount);
     std::vector<hardware::camera::device::V3_2::CameraMetadata> physResultMetadata;
     physResultMetadata.resize(physResultCount);
-    for (size_t i = 0; i < physicalCameraMetadatas.size(); i++) {
-        res = readOneCameraMetadataLocked(physicalCameraMetadatas[i].fmqMetadataSize,
-                physResultMetadata[i], physicalCameraMetadatas[i].metadata);
+    for (size_t i = 0; i < physicalCameraMetadata.size(); i++) {
+        res = readOneCameraMetadataLocked(physicalCameraMetadata[i].fmqMetadataSize,
+                physResultMetadata[i], physicalCameraMetadata[i].metadata);
         if (res != OK) {
             ALOGE("%s: Frame %d: Failed to read capture result metadata for camera %s",
                     __FUNCTION__, result.frameNumber,
-                    physicalCameraMetadatas[i].physicalCameraId.c_str());
+                    physicalCameraMetadata[i].physicalCameraId.c_str());
             return;
         }
-        physCamIds[i] = physicalCameraMetadatas[i].physicalCameraId.c_str();
+        physCamIds[i] = physicalCameraMetadata[i].physicalCameraId.c_str();
         phyCamMetadatas[i] = reinterpret_cast<const camera_metadata_t*>(
                 physResultMetadata[i].data());
     }
@@ -2522,6 +2524,9 @@
         CLOGE("Stream %d is unknown", streamId);
         return BAD_VALUE;
     }
+
+    // isConsumerConfigurationDeferred will be off after setConsumers
+    bool isDeferred = stream->isConsumerConfigurationDeferred();
     status_t res = stream->setConsumers(consumers);
     if (res != OK) {
         CLOGE("Stream %d set consumer failed (error %d %s) ", streamId, res, strerror(-res));
@@ -2537,7 +2542,7 @@
         surfaceIds->push_back(id);
     }
 
-    if (stream->isConsumerConfigurationDeferred()) {
+    if (isDeferred) {
         if (!stream->isConfiguring()) {
             CLOGE("Stream %d was already fully configured.", streamId);
             return INVALID_OPERATION;
@@ -2612,7 +2617,6 @@
 sp<Camera3Device::CaptureRequest> Camera3Device::createCaptureRequest(
         const PhysicalCameraSettingsList &request, const SurfaceMap &surfaceMap) {
     ATRACE_CALL();
-    status_t res;
 
     sp<CaptureRequest> newRequest = new CaptureRequest;
     newRequest->mSettingsList = request;
@@ -2626,16 +2630,11 @@
                     inputStreams.data.u8[0]);
             return NULL;
         }
-        // Lazy completion of stream configuration (allocation/registration)
-        // on first use
+
         if (mInputStream->isConfiguring()) {
-            res = mInputStream->finishConfiguration();
-            if (res != OK) {
-                SET_ERR_L("Unable to finish configuring input stream %d:"
-                        " %s (%d)",
-                        mInputStream->getId(), strerror(-res), res);
-                return NULL;
-            }
+            SET_ERR_L("%s: input stream %d is not configured!",
+                    __FUNCTION__, mInputStream->getId());
+            return NULL;
         }
         // Check if stream prepare is blocking requests.
         if (mInputStream->isBlockedByPrepare()) {
@@ -2675,15 +2674,9 @@
             newRequest->mOutputSurfaces[streams.data.i32[i]] = surfaces;
         }
 
-        // Lazy completion of stream configuration (allocation/registration)
-        // on first use
         if (stream->isConfiguring()) {
-            res = stream->finishConfiguration();
-            if (res != OK) {
-                SET_ERR_L("Unable to finish configuring stream %d: %s (%d)",
-                        stream->getId(), strerror(-res), res);
-                return NULL;
-            }
+            SET_ERR_L("%s: stream %d is not configured!", __FUNCTION__, stream->getId());
+            return NULL;
         }
         // Check if stream prepare is blocking requests.
         if (stream->isBlockedByPrepare()) {
@@ -2908,7 +2901,8 @@
     // faster
 
     if (mInputStream != NULL && mInputStream->isConfiguring()) {
-        res = mInputStream->finishConfiguration();
+        bool streamReConfigured = false;
+        res = mInputStream->finishConfiguration(&streamReConfigured);
         if (res != OK) {
             CLOGE("Can't finish configuring input stream %d: %s (%d)",
                     mInputStream->getId(), strerror(-res), res);
@@ -2918,12 +2912,16 @@
             }
             return BAD_VALUE;
         }
+        if (streamReConfigured) {
+            mInterface->onStreamReConfigured(mInputStream->getId());
+        }
     }
 
     for (size_t i = 0; i < mOutputStreams.size(); i++) {
         sp<Camera3OutputStreamInterface> outputStream = mOutputStreams[i];
         if (outputStream->isConfiguring() && !outputStream->isConsumerConfigurationDeferred()) {
-            res = outputStream->finishConfiguration();
+            bool streamReConfigured = false;
+            res = outputStream->finishConfiguration(&streamReConfigured);
             if (res != OK) {
                 CLOGE("Can't finish configuring output stream %d: %s (%d)",
                         outputStream->getId(), strerror(-res), res);
@@ -2933,6 +2931,9 @@
                 }
                 return BAD_VALUE;
             }
+            if (streamReConfigured) {
+                mInterface->onStreamReConfigured(outputStream->getId());
+            }
         }
     }
 
@@ -3420,6 +3421,14 @@
         return;
     }
 
+    // Update vendor tag id for physical metadata
+    for (auto& physicalMetadata : result->mPhysicalMetadatas) {
+        camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
+                physicalMetadata.mPhysicalCameraMetadata.getAndLock());
+        set_camera_metadata_vendor_id(pmeta, mVendorTagId);
+        physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
+    }
+
     // Valid result, insert into queue
     List<CaptureResult>::iterator queuedResult =
             mResultQueue.insert(mResultQueue.end(), CaptureResult(*result));
@@ -3457,7 +3466,7 @@
         CaptureResultExtras &resultExtras,
         CameraMetadata &collectedPartialResult,
         uint32_t frameNumber,
-        bool reprocess,
+        bool reprocess, bool zslStillCapture,
         const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
     ATRACE_CALL();
     if (pendingMetadata.isEmpty())
@@ -3474,6 +3483,14 @@
             return;
         }
         mNextReprocessResultFrameNumber = frameNumber + 1;
+    } else if (zslStillCapture) {
+        if (frameNumber < mNextZslStillResultFrameNumber) {
+            SET_ERR("Out-of-order ZSL still capture result metadata submitted! "
+                "(got frame number %d, expecting %d)",
+                frameNumber, mNextZslStillResultFrameNumber);
+            return;
+        }
+        mNextZslStillResultFrameNumber = frameNumber + 1;
     } else {
         if (frameNumber < mNextResultFrameNumber) {
             SET_ERR("Out-of-order capture result metadata submitted! "
@@ -3551,8 +3568,14 @@
         }
     }
 
+    std::unordered_map<std::string, CameraMetadata> monitoredPhysicalMetadata;
+    for (auto& m : physicalMetadatas) {
+        monitoredPhysicalMetadata.emplace(String8(m.mPhysicalCameraId).string(),
+                CameraMetadata(m.mPhysicalCameraMetadata));
+    }
     mTagMonitor.monitorMetadata(TagMonitor::RESULT,
-            frameNumber, timestamp.data.i64[0], captureResult.mMetadata);
+            frameNumber, timestamp.data.i64[0], captureResult.mMetadata,
+            monitoredPhysicalMetadata);
 
     insertResultLocked(&captureResult, frameNumber);
 }
@@ -3728,7 +3751,8 @@
                 metadata = result->result;
                 sendCaptureResult(metadata, request.resultExtras,
                     collectedPartialResult, frameNumber,
-                    hasInputBufferInRequest, request.physicalMetadatas);
+                    hasInputBufferInRequest, request.zslCapture && request.stillCapture,
+                    request.physicalMetadatas);
             }
         }
 
@@ -3906,12 +3930,20 @@
                 // TODO: need to track errors for tighter bounds on expected frame number.
                 if (r.hasInputBuffer) {
                     if (msg.frame_number < mNextReprocessShutterFrameNumber) {
-                        SET_ERR("Shutter notification out-of-order. Expected "
+                        SET_ERR("Reprocess shutter notification out-of-order. Expected "
                                 "notification for frame %d, got frame %d",
                                 mNextReprocessShutterFrameNumber, msg.frame_number);
                         return;
                     }
                     mNextReprocessShutterFrameNumber = msg.frame_number + 1;
+                } else if (r.zslCapture && r.stillCapture) {
+                    if (msg.frame_number < mNextZslStillShutterFrameNumber) {
+                        SET_ERR("ZSL still capture shutter notification out-of-order. Expected "
+                                "notification for frame %d, got frame %d",
+                                mNextZslStillShutterFrameNumber, msg.frame_number);
+                        return;
+                    }
+                    mNextZslStillShutterFrameNumber = msg.frame_number + 1;
                 } else {
                     if (msg.frame_number < mNextShutterFrameNumber) {
                         SET_ERR("Shutter notification out-of-order. Expected "
@@ -3935,7 +3967,8 @@
                 // send pending result and buffers
                 sendCaptureResult(r.pendingMetadata, r.resultExtras,
                     r.collectedPartialResult, msg.frame_number,
-                    r.hasInputBuffer, r.physicalMetadatas);
+                    r.hasInputBuffer, r.zslCapture && r.stillCapture,
+                    r.physicalMetadatas);
             }
             bool timestampIncreasing = !(r.zslCapture || r.hasInputBuffer);
             returnOutputBuffers(r.pendingOutputBuffers.array(),
@@ -3966,8 +3999,11 @@
 
 
 void Camera3Device::monitorMetadata(TagMonitor::eventSource source,
-        int64_t frameNumber, nsecs_t timestamp, const CameraMetadata& metadata) {
-    mTagMonitor.monitorMetadata(source, frameNumber, timestamp, metadata);
+        int64_t frameNumber, nsecs_t timestamp, const CameraMetadata& metadata,
+        const std::unordered_map<std::string, CameraMetadata>& physicalMetadata) {
+
+    mTagMonitor.monitorMetadata(source, frameNumber, timestamp, metadata,
+            physicalMetadata);
 }
 
 /**
@@ -4016,10 +4052,6 @@
     mHidlSession.clear();
 }
 
-bool Camera3Device::HalInterface::supportBatchRequest() {
-    return mHidlSession != nullptr;
-}
-
 status_t Camera3Device::HalInterface::constructDefaultRequestSettings(
         camera3_request_template_t templateId,
         /*out*/ camera_metadata_t **requestTemplate) {
@@ -4369,11 +4401,12 @@
 
 status_t Camera3Device::HalInterface::wrapAsHidlRequest(camera3_capture_request_t* request,
         /*out*/device::V3_2::CaptureRequest* captureRequest,
-        /*out*/std::vector<native_handle_t*>* handlesCreated) {
+        /*out*/std::vector<native_handle_t*>* handlesCreated,
+        /*out*/std::vector<std::pair<int32_t, int32_t>>* inflightBuffers) {
     ATRACE_CALL();
-    if (captureRequest == nullptr || handlesCreated == nullptr) {
-        ALOGE("%s: captureRequest (%p) and handlesCreated (%p) must not be null",
-                __FUNCTION__, captureRequest, handlesCreated);
+    if (captureRequest == nullptr || handlesCreated == nullptr || inflightBuffers == nullptr) {
+        ALOGE("%s: captureRequest (%p), handlesCreated (%p), and inflightBuffers(%p) "
+                "must not be null", __FUNCTION__, captureRequest, handlesCreated, inflightBuffers);
         return BAD_VALUE;
     }
 
@@ -4403,8 +4436,8 @@
             captureRequest->inputBuffer.releaseFence = nullptr;
 
             pushInflightBufferLocked(captureRequest->frameNumber, streamId,
-                    request->input_buffer->buffer,
-                    request->input_buffer->acquire_fence);
+                    request->input_buffer->buffer);
+            inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
         } else {
             captureRequest->inputBuffer.streamId = -1;
             captureRequest->inputBuffer.bufferId = BUFFER_ID_NO_BUFFER;
@@ -4443,14 +4476,31 @@
 
             // Output buffers are empty when using HAL buffer manager
             if (!mUseHalBufManager) {
-                pushInflightBufferLocked(captureRequest->frameNumber, streamId,
-                        src->buffer, src->acquire_fence);
+                pushInflightBufferLocked(captureRequest->frameNumber, streamId, src->buffer);
+                inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
             }
         }
     }
     return OK;
 }
 
+void Camera3Device::HalInterface::cleanupNativeHandles(
+        std::vector<native_handle_t*> *handles, bool closeFd) {
+    if (handles == nullptr) {
+        return;
+    }
+    if (closeFd) {
+        for (auto& handle : *handles) {
+            native_handle_close(handle);
+        }
+    }
+    for (auto& handle : *handles) {
+        native_handle_delete(handle);
+    }
+    handles->clear();
+    return;
+}
+
 status_t Camera3Device::HalInterface::processBatchCaptureRequests(
         std::vector<camera3_capture_request_t*>& requests,/*out*/uint32_t* numRequestProcessed) {
     ATRACE_NAME("CameraHal::processBatchCaptureRequests");
@@ -4471,17 +4521,20 @@
         captureRequests.resize(batchSize);
     }
     std::vector<native_handle_t*> handlesCreated;
+    std::vector<std::pair<int32_t, int32_t>> inflightBuffers;
 
     status_t res = OK;
     for (size_t i = 0; i < batchSize; i++) {
         if (hidlSession_3_4 != nullptr) {
             res = wrapAsHidlRequest(requests[i], /*out*/&captureRequests_3_4[i].v3_2,
-                    /*out*/&handlesCreated);
+                    /*out*/&handlesCreated, /*out*/&inflightBuffers);
         } else {
-            res = wrapAsHidlRequest(requests[i],
-                    /*out*/&captureRequests[i], /*out*/&handlesCreated);
+            res = wrapAsHidlRequest(requests[i], /*out*/&captureRequests[i],
+                    /*out*/&handlesCreated, /*out*/&inflightBuffers);
         }
         if (res != OK) {
+            popInflightBuffers(inflightBuffers);
+            cleanupNativeHandles(&handlesCreated);
             return res;
         }
     }
@@ -4578,31 +4631,29 @@
     }
     if (!err.isOk()) {
         ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
-        return DEAD_OBJECT;
+        status = common::V1_0::Status::CAMERA_DISCONNECTED;
     }
+
     if (status == common::V1_0::Status::OK && *numRequestProcessed != batchSize) {
         ALOGE("%s: processCaptureRequest returns OK but processed %d/%zu requests",
                 __FUNCTION__, *numRequestProcessed, batchSize);
         status = common::V1_0::Status::INTERNAL_ERROR;
     }
 
-    for (auto& handle : handlesCreated) {
-        native_handle_delete(handle);
+    res = CameraProviderManager::mapToStatusT(status);
+    if (res == OK) {
+        if (mHidlSession->isRemote()) {
+            // Only close acquire fence FDs when the HIDL transaction succeeds (so the FDs have been
+            // sent to camera HAL processes)
+            cleanupNativeHandles(&handlesCreated, /*closeFd*/true);
+        } else {
+            // In passthrough mode the FDs are now owned by HAL
+            cleanupNativeHandles(&handlesCreated);
+        }
+    } else {
+        popInflightBuffers(inflightBuffers);
+        cleanupNativeHandles(&handlesCreated);
     }
-    return CameraProviderManager::mapToStatusT(status);
-}
-
-status_t Camera3Device::HalInterface::processCaptureRequest(
-        camera3_capture_request_t *request) {
-    ATRACE_NAME("CameraHal::processCaptureRequest");
-    if (!valid()) return INVALID_OPERATION;
-    status_t res = OK;
-
-    uint32_t numRequestProcessed = 0;
-    std::vector<camera3_capture_request_t*> requests(1);
-    requests[0] = request;
-    res = processBatchCaptureRequests(requests, &numRequestProcessed);
-
     return res;
 }
 
@@ -4685,10 +4736,9 @@
 }
 
 status_t Camera3Device::HalInterface::pushInflightBufferLocked(
-        int32_t frameNumber, int32_t streamId, buffer_handle_t *buffer, int acquireFence) {
+        int32_t frameNumber, int32_t streamId, buffer_handle_t *buffer) {
     uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
-    auto pair = std::make_pair(buffer, acquireFence);
-    mInflightBufferMap[key] = pair;
+    mInflightBufferMap[key] = buffer;
     return OK;
 }
 
@@ -4700,16 +4750,22 @@
     uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
     auto it = mInflightBufferMap.find(key);
     if (it == mInflightBufferMap.end()) return NAME_NOT_FOUND;
-    auto pair = it->second;
-    *buffer = pair.first;
-    int acquireFence = pair.second;
-    if (acquireFence > 0) {
-        ::close(acquireFence);
+    if (buffer != nullptr) {
+        *buffer = it->second;
     }
     mInflightBufferMap.erase(it);
     return OK;
 }
 
+void Camera3Device::HalInterface::popInflightBuffers(
+        const std::vector<std::pair<int32_t, int32_t>>& buffers) {
+    for (const auto& pair : buffers) {
+        int32_t frameNumber = pair.first;
+        int32_t streamId = pair.second;
+        popInflightBuffer(frameNumber, streamId, nullptr);
+    }
+}
+
 status_t Camera3Device::HalInterface::pushInflightRequestBuffer(
         uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) {
     std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
@@ -4780,7 +4836,7 @@
                 __FUNCTION__, handle, streamId);
         return;
     } else {
-        bufferId =  it->second;
+        bufferId = it->second;
         bIdMap.erase(it);
         ALOGV("%s: stream %d now have %zu buffer caches after removing buf %p",
                 __FUNCTION__, streamId, bIdMap.size(), handle);
@@ -4788,6 +4844,22 @@
     mFreedBuffers.push_back(std::make_pair(streamId, bufferId));
 }
 
+void Camera3Device::HalInterface::onStreamReConfigured(int streamId) {
+    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+    auto mapIt = mBufferIdMaps.find(streamId);
+    if (mapIt == mBufferIdMaps.end()) {
+        ALOGE("%s: streamId %d not found!", __FUNCTION__, streamId);
+        return;
+    }
+
+    BufferIdMap& bIdMap = mapIt->second;
+    for (const auto& it : bIdMap) {
+        uint64_t bufferId = it.second;
+        mFreedBuffers.push_back(std::make_pair(streamId, bufferId));
+    }
+    bIdMap.clear();
+}
+
 /**
  * RequestThread inner class methods
  */
@@ -5107,28 +5179,7 @@
         NextRequest& nextRequest = mNextRequests.editItemAt(i);
         nextRequest.submitted = true;
 
-
-        // Update the latest request sent to HAL
-        if (nextRequest.halRequest.settings != NULL) { // Don't update if they were unchanged
-            Mutex::Autolock al(mLatestRequestMutex);
-
-            camera_metadata_t* cloned = clone_camera_metadata(nextRequest.halRequest.settings);
-            mLatestRequest.acquire(cloned);
-
-            sp<Camera3Device> parent = mParent.promote();
-            if (parent != NULL) {
-                parent->monitorMetadata(TagMonitor::REQUEST,
-                        nextRequest.halRequest.frame_number,
-                        0, mLatestRequest);
-            }
-        }
-
-        if (nextRequest.halRequest.settings != NULL) {
-            nextRequest.captureRequest->mSettingsList.begin()->metadata.unlock(
-                    nextRequest.halRequest.settings);
-        }
-
-        cleanupPhysicalSettings(nextRequest.captureRequest, &nextRequest.halRequest);
+        updateNextRequest(nextRequest);
 
         if (!triggerRemoveFailed) {
             // Remove any previously queued triggers (after unlock)
@@ -5161,62 +5212,6 @@
     return true;
 }
 
-bool Camera3Device::RequestThread::sendRequestsOneByOne() {
-    status_t res;
-
-    for (auto& nextRequest : mNextRequests) {
-        // Submit request and block until ready for next one
-        ATRACE_ASYNC_BEGIN("frame capture", nextRequest.halRequest.frame_number);
-        res = mInterface->processCaptureRequest(&nextRequest.halRequest);
-
-        if (res != OK) {
-            // Should only get a failure here for malformed requests or device-level
-            // errors, so consider all errors fatal.  Bad metadata failures should
-            // come through notify.
-            SET_ERR("RequestThread: Unable to submit capture request %d to HAL"
-                    " device: %s (%d)", nextRequest.halRequest.frame_number, strerror(-res),
-                    res);
-            cleanUpFailedRequests(/*sendRequestError*/ false);
-            return false;
-        }
-
-        // Mark that the request has be submitted successfully.
-        nextRequest.submitted = true;
-
-        // Update the latest request sent to HAL
-        if (nextRequest.halRequest.settings != NULL) { // Don't update if they were unchanged
-            Mutex::Autolock al(mLatestRequestMutex);
-
-            camera_metadata_t* cloned = clone_camera_metadata(nextRequest.halRequest.settings);
-            mLatestRequest.acquire(cloned);
-
-            sp<Camera3Device> parent = mParent.promote();
-            if (parent != NULL) {
-                parent->monitorMetadata(TagMonitor::REQUEST, nextRequest.halRequest.frame_number,
-                        0, mLatestRequest);
-            }
-        }
-
-        if (nextRequest.halRequest.settings != NULL) {
-            nextRequest.captureRequest->mSettingsList.begin()->metadata.unlock(
-                    nextRequest.halRequest.settings);
-        }
-
-        cleanupPhysicalSettings(nextRequest.captureRequest, &nextRequest.halRequest);
-
-        // Remove any previously queued triggers (after unlock)
-        res = removeTriggers(mPrevRequest);
-        if (res != OK) {
-            SET_ERR("RequestThread: Unable to remove triggers "
-                  "(capture request %d, HAL device: %s (%d)",
-                  nextRequest.halRequest.frame_number, strerror(-res), res);
-            cleanUpFailedRequests(/*sendRequestError*/ false);
-            return false;
-        }
-    }
-    return true;
-}
-
 nsecs_t Camera3Device::RequestThread::calculateMaxExpectedDuration(const camera_metadata_t *request) {
     nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
     camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
@@ -5264,6 +5259,37 @@
     return false;
 }
 
+void Camera3Device::RequestThread::updateNextRequest(NextRequest& nextRequest) {
+    // Update the latest request sent to HAL
+    if (nextRequest.halRequest.settings != NULL) { // Don't update if they were unchanged
+        Mutex::Autolock al(mLatestRequestMutex);
+
+        camera_metadata_t* cloned = clone_camera_metadata(nextRequest.halRequest.settings);
+        mLatestRequest.acquire(cloned);
+
+        mLatestPhysicalRequest.clear();
+        for (uint32_t i = 0; i < nextRequest.halRequest.num_physcam_settings; i++) {
+            cloned = clone_camera_metadata(nextRequest.halRequest.physcam_settings[i]);
+            mLatestPhysicalRequest.emplace(nextRequest.halRequest.physcam_id[i],
+                    CameraMetadata(cloned));
+        }
+
+        sp<Camera3Device> parent = mParent.promote();
+        if (parent != NULL) {
+            parent->monitorMetadata(TagMonitor::REQUEST,
+                    nextRequest.halRequest.frame_number,
+                    0, mLatestRequest, mLatestPhysicalRequest);
+        }
+    }
+
+    if (nextRequest.halRequest.settings != NULL) {
+        nextRequest.captureRequest->mSettingsList.begin()->metadata.unlock(
+                nextRequest.halRequest.settings);
+    }
+
+    cleanupPhysicalSettings(nextRequest.captureRequest, &nextRequest.halRequest);
+}
+
 bool Camera3Device::RequestThread::updateSessionParameters(const CameraMetadata& settings) {
     ATRACE_CALL();
     bool updatesDetected = false;
@@ -5439,11 +5465,8 @@
 
     bool submitRequestSuccess = false;
     nsecs_t tRequestStart = systemTime(SYSTEM_TIME_MONOTONIC);
-    if (mInterface->supportBatchRequest()) {
-        submitRequestSuccess = sendRequestsBatch();
-    } else {
-        submitRequestSuccess = sendRequestsOneByOne();
-    }
+    submitRequestSuccess = sendRequestsBatch();
+
     nsecs_t tRequestEnd = systemTime(SYSTEM_TIME_MONOTONIC);
     mRequestLatency.add(tRequestStart, tRequestEnd);
 
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index f8245df..6e8ac84 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -289,9 +289,6 @@
         // Reset this HalInterface object (does not call close())
         void clear();
 
-        // Check if HalInterface support sending requests in batch
-        bool supportBatchRequest();
-
         // Calls into the HAL interface
 
         // Caller takes ownership of requestTemplate
@@ -300,7 +297,11 @@
         status_t configureStreams(const camera_metadata_t *sessionParams,
                 /*inout*/ camera3_stream_configuration *config,
                 const std::vector<uint32_t>& bufferSizes);
-        status_t processCaptureRequest(camera3_capture_request_t *request);
+
+        // When the call succeeds, the ownership of acquire fences in requests is transferred to
+        // HalInterface. More specifically, the current implementation will send the fence to
+        // HAL process and close the FD in cameraserver process. When the call fails, the ownership
+        // of the acquire fence still belongs to the caller.
         status_t processBatchCaptureRequests(
                 std::vector<camera3_capture_request_t*>& requests,
                 /*out*/uint32_t* numRequestProcessed);
@@ -336,6 +337,8 @@
         // Get a vector of bufferId of currently inflight buffers
         void getInflightRequestBufferKeys(std::vector<uint64_t>* out);
 
+        void onStreamReConfigured(int streamId);
+
         static const uint64_t BUFFER_ID_NO_BUFFER = 0;
       private:
         // Always valid
@@ -355,13 +358,21 @@
         // Do not free input camera3_capture_request_t before output HIDL request
         status_t wrapAsHidlRequest(camera3_capture_request_t* in,
                 /*out*/hardware::camera::device::V3_2::CaptureRequest* out,
-                /*out*/std::vector<native_handle_t*>* handlesCreated);
+                /*out*/std::vector<native_handle_t*>* handlesCreated,
+                /*out*/std::vector<std::pair<int32_t, int32_t>>* inflightBuffers);
 
         status_t pushInflightBufferLocked(int32_t frameNumber, int32_t streamId,
-                buffer_handle_t *buffer, int acquireFence);
+                buffer_handle_t *buffer);
+
+        // Pop inflight buffers based on pairs of (frameNumber,streamId)
+        void popInflightBuffers(const std::vector<std::pair<int32_t, int32_t>>& buffers);
+
         // Cache of buffer handles keyed off (frameNumber << 32 | streamId)
-        // value is a pair of (buffer_handle_t*, acquire_fence FD)
-        std::unordered_map<uint64_t, std::pair<buffer_handle_t*, int>> mInflightBufferMap;
+        std::unordered_map<uint64_t, buffer_handle_t*> mInflightBufferMap;
+
+        // Delete and optionally close native handles and clear the input vector afterward
+        static void cleanupNativeHandles(
+                std::vector<native_handle_t*> *handles, bool closeFd = false);
 
         struct BufferHasher {
             size_t operator()(const buffer_handle_t& buf) const {
@@ -554,7 +565,7 @@
     void processOneCaptureResultLocked(
             const hardware::camera::device::V3_2::CaptureResult& result,
             const hardware::hidl_vec<
-            hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadatas);
+            hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata);
     status_t readOneCameraMetadataLocked(uint64_t fmqResultSize,
             hardware::camera::device::V3_2::CameraMetadata& resultMetadata,
             const hardware::camera::device::V3_2::CameraMetadata& result);
@@ -893,9 +904,6 @@
         // Clear repeating requests. Must be called with mRequestLock held.
         status_t clearRepeatingRequestsLocked(/*out*/ int64_t *lastFrameNumber = NULL);
 
-        // send request in mNextRequests to HAL one by one. Return true = sucssess
-        bool sendRequestsOneByOne();
-
         // send request in mNextRequests to HAL in a batch. Return true = sucssess
         bool sendRequestsBatch();
 
@@ -910,8 +918,8 @@
         bool skipHFRTargetFPSUpdate(int32_t tag, const camera_metadata_ro_entry_t& newEntry,
                 const camera_metadata_entry_t& currentEntry);
 
-        // Re-configure camera using the latest session parameters.
-        bool reconfigureCamera();
+        // Update next request sent to HAL
+        void updateNextRequest(NextRequest& nextRequest);
 
         wp<Camera3Device>  mParent;
         wp<camera3::StatusTracker>  mStatusTracker;
@@ -955,6 +963,7 @@
         // android.request.id for latest process_capture_request
         int32_t            mLatestRequestId;
         CameraMetadata     mLatestRequest;
+        std::unordered_map<std::string, CameraMetadata> mLatestPhysicalRequest;
 
         typedef KeyedVector<uint32_t/*tag*/, RequestTrigger> TriggerMap;
         Mutex              mTriggerMutex;
@@ -1183,10 +1192,14 @@
     uint32_t               mNextResultFrameNumber;
     // the minimal frame number of the next reprocess result
     uint32_t               mNextReprocessResultFrameNumber;
+    // the minimal frame number of the next ZSL still capture result
+    uint32_t               mNextZslStillResultFrameNumber;
     // the minimal frame number of the next non-reprocess shutter
     uint32_t               mNextShutterFrameNumber;
     // the minimal frame number of the next reprocess shutter
     uint32_t               mNextReprocessShutterFrameNumber;
+    // the minimal frame number of the next ZSL still capture shutter
+    uint32_t               mNextZslStillShutterFrameNumber;
     List<CaptureResult>   mResultQueue;
     Condition              mResultSignal;
     wp<NotificationListener>  mListener;
@@ -1223,7 +1236,8 @@
     void sendCaptureResult(CameraMetadata &pendingMetadata,
             CaptureResultExtras &resultExtras,
             CameraMetadata &collectedPartialResult, uint32_t frameNumber,
-            bool reprocess, const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas);
+            bool reprocess, bool zslStillCapture,
+            const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas);
 
     bool isLastFullResult(const InFlightRequest& inFlightRequest);
 
@@ -1263,7 +1277,8 @@
     TagMonitor mTagMonitor;
 
     void monitorMetadata(TagMonitor::eventSource source, int64_t frameNumber,
-            nsecs_t timestamp, const CameraMetadata& metadata);
+            nsecs_t timestamp, const CameraMetadata& metadata,
+            const std::unordered_map<std::string, CameraMetadata>& physicalMetadata);
 
     metadata_vendor_id_t mVendorTagId;
 
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index 2e909a0..ef0d919 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -82,6 +82,9 @@
     lines.appendFormat("      Max size: %zu\n", mMaxSize);
     lines.appendFormat("      Combined usage: %" PRIu64 ", max HAL buffers: %d\n",
             mUsage | consumerUsage, camera3_stream::max_buffers);
+    if (strlen(camera3_stream::physical_camera_id) > 0) {
+        lines.appendFormat("      Physical camera id: %s\n", camera3_stream::physical_camera_id);
+    }
     lines.appendFormat("      Frames produced: %d, last timestamp: %" PRId64 " ns\n",
             mFrameCount, mLastTimestamp);
     lines.appendFormat("      Total buffers: %zu, currently dequeued: %zu\n",
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 12ff130..d73a2f9 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -287,8 +287,11 @@
     return (mState == STATE_IN_CONFIG) || (mState == STATE_IN_RECONFIG);
 }
 
-status_t Camera3Stream::finishConfiguration() {
+status_t Camera3Stream::finishConfiguration(/*out*/bool* streamReconfigured) {
     ATRACE_CALL();
+    if (streamReconfigured != nullptr) {
+        *streamReconfigured = false;
+    }
     Mutex::Autolock l(mLock);
     switch (mState) {
         case STATE_ERROR:
@@ -313,7 +316,7 @@
 
     // Register for idle tracking
     sp<StatusTracker> statusTracker = mStatusTracker.promote();
-    if (statusTracker != 0) {
+    if (statusTracker != 0 && mStatusId == StatusTracker::NO_STATUS_ID) {
         mStatusId = statusTracker->addComponent();
     }
 
@@ -332,6 +335,7 @@
     mPrepareBlockRequest = true;
     mStreamUnpreparable = false;
 
+    bool reconfiguring = (mState == STATE_IN_RECONFIG);
     status_t res;
     res = configureQueueLocked();
     // configureQueueLocked could return error in case of abandoned surface.
@@ -348,6 +352,9 @@
         return res;
     }
 
+    if (reconfiguring && streamReconfigured != nullptr) {
+        *streamReconfigured = true;
+    }
     mState = STATE_CONFIGURED;
 
     return res;
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 3d21029..c916fe8 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -197,6 +197,8 @@
      * after this call, but can still be read until the destruction of the
      * stream.
      *
+     * streamReconfigured: set to true when a stream is being reconfigured.
+     *
      * Returns:
      *   OK on a successful configuration
      *   NO_INIT in case of a serious error from the HAL device
@@ -204,7 +206,7 @@
      *   INVALID_OPERATION in case connecting to the consumer failed or consumer
      *       doesn't exist yet.
      */
-    status_t         finishConfiguration();
+    status_t         finishConfiguration(/*out*/bool* streamReconfigured = nullptr);
 
     /**
      * Cancels the stream configuration process. This returns the stream to the
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 5cd11b7..73f501a 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -130,13 +130,15 @@
      * modified after this call, but can still be read until the destruction of
      * the stream.
      *
+     * streamReconfigured: set to true when a stream is being reconfigured.
+     *
      * Returns:
      *   OK on a successful configuration
      *   NO_INIT in case of a serious error from the HAL device
      *   NO_MEMORY in case of an error registering buffers
      *   INVALID_OPERATION in case connecting to the consumer failed
      */
-    virtual status_t finishConfiguration() = 0;
+    virtual status_t finishConfiguration(/*out*/bool* streamReconfigured = nullptr) = 0;
 
     /**
      * Cancels the stream configuration process. This returns the stream to the
diff --git a/services/camera/libcameraservice/utils/ClientManager.h b/services/camera/libcameraservice/utils/ClientManager.h
index d7135f1..ec6f01c 100644
--- a/services/camera/libcameraservice/utils/ClientManager.h
+++ b/services/camera/libcameraservice/utils/ClientManager.h
@@ -33,12 +33,38 @@
 
 class ClientPriority {
 public:
-    ClientPriority(int32_t score, int32_t state) :
-        mScore(score), mState(state) {}
+    /**
+     * Choosing to set mIsVendorClient through a parameter instead of calling
+     * hardware::IPCThreadState::self()->isServingCall() to protect against the
+     * case where the construction is offloaded to another thread which isn't a
+     * hwbinder thread.
+     */
+    ClientPriority(int32_t score, int32_t state, bool isVendorClient) :
+            mScore(score), mState(state), mIsVendorClient(isVendorClient) { }
 
     int32_t getScore() const { return mScore; }
     int32_t getState() const { return mState; }
 
+    void setScore(int32_t score) {
+        // For vendor clients, the score is set once and for all during
+        // construction. Otherwise, it can get reset each time cameraserver
+        // queries ActivityManagerService for oom_adj scores / states .
+        if (!mIsVendorClient) {
+            mScore = score;
+        }
+    }
+
+    void setState(int32_t state) {
+      // For vendor clients, the score is set once and for all during
+      // construction. Otherwise, it can get reset each time cameraserver
+      // queries ActivityManagerService for oom_adj scores / states
+      // (ActivityManagerService returns a vendor process' state as
+      // PROCESS_STATE_NONEXISTENT.
+      if (!mIsVendorClient) {
+          mState = state;
+      }
+    }
+
     bool operator==(const ClientPriority& rhs) const {
         return (this->mScore == rhs.mScore) && (this->mState == rhs.mState);
     }
@@ -66,6 +92,7 @@
 private:
         int32_t mScore;
         int32_t mState;
+        bool mIsVendorClient = false;
 };
 
 // --------------------------------------------------------------------------------
@@ -82,9 +109,10 @@
 class ClientDescriptor final {
 public:
     ClientDescriptor(const KEY& key, const VALUE& value, int32_t cost,
-            const std::set<KEY>& conflictingKeys, int32_t score, int32_t ownerId, int32_t state);
+            const std::set<KEY>& conflictingKeys, int32_t score, int32_t ownerId, int32_t state,
+            bool isVendorClient);
     ClientDescriptor(KEY&& key, VALUE&& value, int32_t cost, std::set<KEY>&& conflictingKeys,
-            int32_t score, int32_t ownerId, int32_t state);
+            int32_t score, int32_t ownerId, int32_t state, bool isVendorClient);
 
     ~ClientDescriptor();
 
@@ -148,17 +176,19 @@
 
 template<class KEY, class VALUE>
 ClientDescriptor<KEY, VALUE>::ClientDescriptor(const KEY& key, const VALUE& value, int32_t cost,
-        const std::set<KEY>& conflictingKeys, int32_t score, int32_t ownerId, int32_t state) :
+        const std::set<KEY>& conflictingKeys, int32_t score, int32_t ownerId, int32_t state,
+        bool isVendorClient) :
         mKey{key}, mValue{value}, mCost{cost}, mConflicting{conflictingKeys},
-        mPriority(score, state),
+        mPriority(score, state, isVendorClient),
         mOwnerId{ownerId} {}
 
 template<class KEY, class VALUE>
 ClientDescriptor<KEY, VALUE>::ClientDescriptor(KEY&& key, VALUE&& value, int32_t cost,
-        std::set<KEY>&& conflictingKeys, int32_t score, int32_t ownerId, int32_t state) :
+        std::set<KEY>&& conflictingKeys, int32_t score, int32_t ownerId, int32_t state,
+        bool isVendorClient) :
         mKey{std::forward<KEY>(key)}, mValue{std::forward<VALUE>(value)}, mCost{cost},
         mConflicting{std::forward<std::set<KEY>>(conflictingKeys)},
-        mPriority(score, state), mOwnerId{ownerId} {}
+        mPriority(score, state, isVendorClient), mOwnerId{ownerId} {}
 
 template<class KEY, class VALUE>
 ClientDescriptor<KEY, VALUE>::~ClientDescriptor() {}
@@ -204,7 +234,13 @@
 
 template<class KEY, class VALUE>
 void ClientDescriptor<KEY, VALUE>::setPriority(const ClientPriority& priority) {
-    mPriority = priority;
+    // We don't use the usual copy constructor here since we want to remember
+    // whether a client is a vendor client or not. This could have been wiped
+    // off in the incoming priority argument since an AIDL thread might have
+    // called hardware::IPCThreadState::self()->isServingCall() after refreshing
+    // priorities for old clients through ProcessInfoService::getProcessStatesScoresFromPids().
+    mPriority.setScore(priority.getScore());
+    mPriority.setState(priority.getState());
 }
 
 // --------------------------------------------------------------------------------
diff --git a/services/camera/libcameraservice/utils/TagMonitor.cpp b/services/camera/libcameraservice/utils/TagMonitor.cpp
index f4c49ec..4037a66 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.cpp
+++ b/services/camera/libcameraservice/utils/TagMonitor.cpp
@@ -100,10 +100,13 @@
     mMonitoringEnabled = false;
     mLastMonitoredRequestValues.clear();
     mLastMonitoredResultValues.clear();
+    mLastMonitoredPhysicalRequestKeys.clear();
+    mLastMonitoredPhysicalResultKeys.clear();
 }
 
 void TagMonitor::monitorMetadata(eventSource source, int64_t frameNumber, nsecs_t timestamp,
-        const CameraMetadata& metadata) {
+        const CameraMetadata& metadata,
+        const std::unordered_map<std::string, CameraMetadata>& physicalMetadata) {
     if (!mMonitoringEnabled) return;
 
     std::lock_guard<std::mutex> lock(mMonitorMutex);
@@ -112,62 +115,77 @@
         timestamp = systemTime(SYSTEM_TIME_BOOTTIME);
     }
 
+    std::string emptyId;
     for (auto tag : mMonitoredTagList) {
-        camera_metadata_ro_entry entry = metadata.find(tag);
-        CameraMetadata &lastValues = (source == REQUEST) ?
-                mLastMonitoredRequestValues : mLastMonitoredResultValues;
-        if (lastValues.isEmpty()) {
-            lastValues = CameraMetadata(mMonitoredTagList.size());
-            const camera_metadata_t *metaBuffer =
-                    lastValues.getAndLock();
-            set_camera_metadata_vendor_id(
-                    const_cast<camera_metadata_t *> (metaBuffer), mVendorTagId);
-            lastValues.unlock(metaBuffer);
+        monitorSingleMetadata(source, frameNumber, timestamp, emptyId, tag, metadata);
+
+        for (auto& m : physicalMetadata) {
+            monitorSingleMetadata(source, frameNumber, timestamp, m.first, tag, m.second);
         }
+    }
+}
 
-        camera_metadata_entry lastEntry = lastValues.find(tag);
+void TagMonitor::monitorSingleMetadata(eventSource source, int64_t frameNumber, nsecs_t timestamp,
+        const std::string& cameraId, uint32_t tag, const CameraMetadata& metadata) {
 
-        if (entry.count > 0) {
-            bool isDifferent = false;
-            if (lastEntry.count > 0) {
-                // Have a last value, compare to see if changed
-                if (lastEntry.type == entry.type &&
-                        lastEntry.count == entry.count) {
-                    // Same type and count, compare values
-                    size_t bytesPerValue = camera_metadata_type_size[lastEntry.type];
-                    size_t entryBytes = bytesPerValue * lastEntry.count;
-                    int cmp = memcmp(entry.data.u8, lastEntry.data.u8, entryBytes);
-                    if (cmp != 0) {
-                        isDifferent = true;
-                    }
-                } else {
-                    // Count or type has changed
+    CameraMetadata &lastValues = (source == REQUEST) ?
+            (cameraId.empty() ? mLastMonitoredRequestValues :
+                    mLastMonitoredPhysicalRequestKeys[cameraId]) :
+            (cameraId.empty() ? mLastMonitoredResultValues :
+                    mLastMonitoredPhysicalResultKeys[cameraId]);
+
+    camera_metadata_ro_entry entry = metadata.find(tag);
+    if (lastValues.isEmpty()) {
+        lastValues = CameraMetadata(mMonitoredTagList.size());
+        const camera_metadata_t *metaBuffer =
+                lastValues.getAndLock();
+        set_camera_metadata_vendor_id(
+                const_cast<camera_metadata_t *> (metaBuffer), mVendorTagId);
+        lastValues.unlock(metaBuffer);
+    }
+
+    camera_metadata_entry lastEntry = lastValues.find(tag);
+
+    if (entry.count > 0) {
+        bool isDifferent = false;
+        if (lastEntry.count > 0) {
+            // Have a last value, compare to see if changed
+            if (lastEntry.type == entry.type &&
+                    lastEntry.count == entry.count) {
+                // Same type and count, compare values
+                size_t bytesPerValue = camera_metadata_type_size[lastEntry.type];
+                size_t entryBytes = bytesPerValue * lastEntry.count;
+                int cmp = memcmp(entry.data.u8, lastEntry.data.u8, entryBytes);
+                if (cmp != 0) {
                     isDifferent = true;
                 }
             } else {
-                // No last entry, so always consider to be different
+                // Count or type has changed
                 isDifferent = true;
             }
+        } else {
+            // No last entry, so always consider to be different
+            isDifferent = true;
+        }
 
-            if (isDifferent) {
-                ALOGV("%s: Tag %s changed", __FUNCTION__,
-                      get_local_camera_metadata_tag_name_vendor_id(
-                              tag, mVendorTagId));
-                lastValues.update(entry);
-                mMonitoringEvents.emplace(source, frameNumber, timestamp, entry);
-            }
-        } else if (lastEntry.count > 0) {
-            // Value has been removed
-            ALOGV("%s: Tag %s removed", __FUNCTION__,
+        if (isDifferent) {
+            ALOGV("%s: Tag %s changed", __FUNCTION__,
                   get_local_camera_metadata_tag_name_vendor_id(
                           tag, mVendorTagId));
-            lastValues.erase(tag);
-            entry.tag = tag;
-            entry.type = get_local_camera_metadata_tag_type_vendor_id(tag,
-                    mVendorTagId);
-            entry.count = 0;
-            mMonitoringEvents.emplace(source, frameNumber, timestamp, entry);
+            lastValues.update(entry);
+            mMonitoringEvents.emplace(source, frameNumber, timestamp, entry, cameraId);
         }
+    } else if (lastEntry.count > 0) {
+        // Value has been removed
+        ALOGV("%s: Tag %s removed", __FUNCTION__,
+              get_local_camera_metadata_tag_name_vendor_id(
+                      tag, mVendorTagId));
+        lastValues.erase(tag);
+        entry.tag = tag;
+        entry.type = get_local_camera_metadata_tag_type_vendor_id(tag,
+                mVendorTagId);
+        entry.count = 0;
+        mMonitoringEvents.emplace(source, frameNumber, timestamp, entry, cameraId);
     }
 }
 
@@ -190,8 +208,9 @@
         dprintf(fd, "     Monitored tag event log:\n");
         for (const auto& event : mMonitoringEvents) {
             int indentation = (event.source == REQUEST) ? 15 : 30;
-            dprintf(fd, "        f%d:%" PRId64 "ns: %*s%s.%s: ",
+            dprintf(fd, "        f%d:%" PRId64 "ns:%*s%*s%s.%s: ",
                     event.frameNumber, event.timestamp,
+                    2, event.cameraId.c_str(),
                     indentation,
                     event.source == REQUEST ? "REQ:" : "RES:",
                     get_local_camera_metadata_section_name_vendor_id(event.tag,
@@ -296,13 +315,14 @@
 
 template<typename T>
 TagMonitor::MonitorEvent::MonitorEvent(eventSource src, uint32_t frameNumber, nsecs_t timestamp,
-        const T &value) :
+        const T &value, const std::string& cameraId) :
         source(src),
         frameNumber(frameNumber),
         timestamp(timestamp),
         tag(value.tag),
         type(value.type),
-        newData(value.data.u8, value.data.u8 + camera_metadata_type_size[value.type] * value.count) {
+        newData(value.data.u8, value.data.u8 + camera_metadata_type_size[value.type] * value.count),
+        cameraId(cameraId) {
 }
 
 TagMonitor::MonitorEvent::~MonitorEvent() {
diff --git a/services/camera/libcameraservice/utils/TagMonitor.h b/services/camera/libcameraservice/utils/TagMonitor.h
index 2dece62..1b7b033 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.h
+++ b/services/camera/libcameraservice/utils/TagMonitor.h
@@ -20,6 +20,7 @@
 #include <vector>
 #include <atomic>
 #include <mutex>
+#include <unordered_map>
 
 #include <utils/RefBase.h>
 #include <utils/String8.h>
@@ -62,7 +63,8 @@
 
     // Scan through the metadata and update the monitoring information
     void monitorMetadata(eventSource source, int64_t frameNumber,
-            nsecs_t timestamp, const CameraMetadata& metadata);
+            nsecs_t timestamp, const CameraMetadata& metadata,
+            const std::unordered_map<std::string, CameraMetadata>& physicalMetadata);
 
     // Dump current event log to the provided fd
     void dumpMonitoredMetadata(int fd);
@@ -72,6 +74,10 @@
     static void printData(int fd, const uint8_t *data_ptr, uint32_t tag,
             int type, int count, int indentation);
 
+    void monitorSingleMetadata(TagMonitor::eventSource source, int64_t frameNumber,
+            nsecs_t timestamp, const std::string& cameraId, uint32_t tag,
+            const CameraMetadata& metadata);
+
     std::atomic<bool> mMonitoringEnabled;
     std::mutex mMonitorMutex;
 
@@ -82,6 +88,9 @@
     CameraMetadata mLastMonitoredRequestValues;
     CameraMetadata mLastMonitoredResultValues;
 
+    std::unordered_map<std::string, CameraMetadata> mLastMonitoredPhysicalRequestKeys;
+    std::unordered_map<std::string, CameraMetadata> mLastMonitoredPhysicalResultKeys;
+
     /**
      * A monitoring event
      * Stores a new metadata field value and the timestamp at which it changed.
@@ -90,7 +99,7 @@
     struct MonitorEvent {
         template<typename T>
         MonitorEvent(eventSource src, uint32_t frameNumber, nsecs_t timestamp,
-                const T &newValue);
+                const T &newValue, const std::string& cameraId);
         ~MonitorEvent();
 
         eventSource source;
@@ -99,6 +108,7 @@
         uint32_t tag;
         uint8_t type;
         std::vector<uint8_t> newData;
+        std::string cameraId;
     };
 
     // A ring buffer for tracking the last kMaxMonitorEvents metadata changes
diff --git a/services/mediaanalytics/Android.bp b/services/mediaanalytics/Android.bp
index c93c120..72f4b52 100644
--- a/services/mediaanalytics/Android.bp
+++ b/services/mediaanalytics/Android.bp
@@ -7,8 +7,22 @@
     srcs: [
         "main_mediametrics.cpp",
         "MediaAnalyticsService.cpp",
+        "iface_statsd.cpp",
+        "statsd_audiopolicy.cpp",
+        "statsd_audiorecord.cpp",
+        "statsd_audiothread.cpp",
+        "statsd_audiotrack.cpp",
+        "statsd_codec.cpp",
+        "statsd_drm.cpp",
+        "statsd_extractor.cpp",
+        "statsd_nuplayer.cpp",
+        "statsd_recorder.cpp",
     ],
 
+    proto: {
+        type: "lite",
+    },
+
     shared_libs: [
         "libcutils",
         "liblog",
@@ -21,10 +35,15 @@
         "libmediautils",
         "libmediametrics",
         "libstagefright_foundation",
+        "libstatslog",
         "libutils",
+        "libprotobuf-cpp-lite",
     ],
 
-    static_libs: ["libregistermsext"],
+    static_libs: [
+        "libplatformprotos",
+        "libregistermsext",
+    ],
 
     include_dirs: [
         "frameworks/av/media/libstagefright/include",
diff --git a/services/mediaanalytics/MediaAnalyticsService.cpp b/services/mediaanalytics/MediaAnalyticsService.cpp
index 06baac9..3626ad1 100644
--- a/services/mediaanalytics/MediaAnalyticsService.cpp
+++ b/services/mediaanalytics/MediaAnalyticsService.cpp
@@ -210,21 +210,24 @@
 
     // XXX: if we have a sessionid in the new record, look to make
     // sure it doesn't appear in the finalized list.
-    // XXX: this is for security / DOS prevention.
-    // may also require that we persist the unique sessionIDs
-    // across boots [instead of within a single boot]
 
     if (item->count() == 0) {
-        // drop empty records
+        ALOGV("dropping empty record...");
         delete item;
         item = NULL;
         return MediaAnalyticsItem::SessionIDInvalid;
     }
 
     // save the new record
+    //
+    // send a copy to statsd
+    dump2Statsd(item);
+
+    // and keep our copy for dumpsys
     MediaAnalyticsItem::SessionID_t id = item->getSessionID();
     saveItem(item);
     mItemsFinalized++;
+
     return id;
 }
 
diff --git a/services/mediaanalytics/MediaAnalyticsService.h b/services/mediaanalytics/MediaAnalyticsService.h
index 632c692..6c9cbaa 100644
--- a/services/mediaanalytics/MediaAnalyticsService.h
+++ b/services/mediaanalytics/MediaAnalyticsService.h
@@ -112,6 +112,9 @@
 
 };
 
+// hook to send things off to the statsd subsystem
+extern bool dump2Statsd(MediaAnalyticsItem *item);
+
 // ----------------------------------------------------------------------------
 
 }; // namespace android
diff --git a/services/mediaanalytics/iface_statsd.cpp b/services/mediaanalytics/iface_statsd.cpp
new file mode 100644
index 0000000..6845f06
--- /dev/null
+++ b/services/mediaanalytics/iface_statsd.cpp
@@ -0,0 +1,92 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "iface_statsd"
+#include <utils/Log.h>
+
+#include <stdint.h>
+#include <inttypes.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <dirent.h>
+#include <pthread.h>
+#include <unistd.h>
+
+#include <string.h>
+#include <pwd.h>
+
+#include "MediaAnalyticsService.h"
+#include "iface_statsd.h"
+
+#include <statslog.h>
+
+namespace android {
+
+// set of routines that crack a MediaAnalyticsItem
+// and send it off to statsd with the appropriate hooks
+//
+// each MediaAnalyticsItem type (extractor, codec, nuplayer, etc)
+// has its own routine to handle this.
+//
+
+bool enabled_statsd = true;
+
+struct statsd_hooks {
+    const char *key;
+    bool (*handler)(MediaAnalyticsItem *);
+};
+
+// keep this sorted, so we can do binary searches
+struct statsd_hooks  statsd_handlers[] =
+{
+    { "audiopolicy", statsd_audiopolicy },
+    { "audiorecord", statsd_audiorecord },
+    { "audiothread", statsd_audiothread },
+    { "audiotrack", statsd_audiotrack },
+    { "codec", statsd_codec},
+    { "drm.vendor.Google.WidevineCDM", statsd_widevineCDM },
+    { "extractor", statsd_extractor },
+    { "mediadrm", statsd_mediadrm },
+    { "nuplayer", statsd_nuplayer },
+    { "nuplayer2", statsd_nuplayer },
+    { "recorder", statsd_recorder },
+};
+
+
+// give me a record, i'll look at the type and upload appropriately
+bool dump2Statsd(MediaAnalyticsItem *item) {
+    if (item == NULL) return false;
+
+    // get the key
+    std::string key = item->getKey();
+
+    if (!enabled_statsd) {
+        ALOGV("statsd logging disabled for record key=%s", key.c_str());
+        return false;
+    }
+
+    int i;
+    for(i = 0;i < sizeof(statsd_handlers) / sizeof(statsd_handlers[0]) ; i++) {
+        if (key == statsd_handlers[i].key) {
+            return (*statsd_handlers[i].handler)(item);
+        }
+    }
+    return false;
+}
+
+} // namespace android
diff --git a/services/mediaanalytics/iface_statsd.h b/services/mediaanalytics/iface_statsd.h
new file mode 100644
index 0000000..f85d303
--- /dev/null
+++ b/services/mediaanalytics/iface_statsd.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+namespace android {
+
+extern bool enabled_statsd;
+
+// component specific dumpers
+extern bool statsd_audiopolicy(MediaAnalyticsItem *);
+extern bool statsd_audiorecord(MediaAnalyticsItem *);
+extern bool statsd_audiothread(MediaAnalyticsItem *);
+extern bool statsd_audiotrack(MediaAnalyticsItem *);
+extern bool statsd_codec(MediaAnalyticsItem *);
+extern bool statsd_extractor(MediaAnalyticsItem *);
+extern bool statsd_nuplayer(MediaAnalyticsItem *);
+extern bool statsd_recorder(MediaAnalyticsItem *);
+
+extern bool statsd_mediadrm(MediaAnalyticsItem *);
+extern bool statsd_widevineCDM(MediaAnalyticsItem *);
+
+} // namespace android
diff --git a/services/mediaanalytics/statsd_audiopolicy.cpp b/services/mediaanalytics/statsd_audiopolicy.cpp
new file mode 100644
index 0000000..06c4dde
--- /dev/null
+++ b/services/mediaanalytics/statsd_audiopolicy.cpp
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "statsd_audiopolicy"
+#include <utils/Log.h>
+
+#include <dirent.h>
+#include <inttypes.h>
+#include <pthread.h>
+#include <pwd.h>
+#include <stdint.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <statslog.h>
+
+#include "MediaAnalyticsService.h"
+#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "iface_statsd.h"
+
+namespace android {
+
+bool statsd_audiopolicy(MediaAnalyticsItem *item)
+{
+    if (item == NULL) return false;
+
+    // these go into the statsd wrapper
+    nsecs_t timestamp = item->getTimestamp();
+    std::string pkgName = item->getPkgName();
+    int64_t pkgVersionCode = item->getPkgVersionCode();
+    int64_t mediaApexVersion = 0;
+
+
+    // the rest into our own proto
+    //
+    ::android::stats::mediametrics::AudioPolicyData metrics_proto;
+
+    // flesh out the protobuf we'll hand off with our data
+    //
+    //int32 char kAudioPolicyStatus[] = "android.media.audiopolicy.status";
+    int32_t status = -1;
+    if (item->getInt32("android.media.audiopolicy.status", &status)) {
+        metrics_proto.set_status(status);
+    }
+    //string char kAudioPolicyRqstSrc[] = "android.media.audiopolicy.rqst.src";
+    char *rqst_src = NULL;
+    if (item->getCString("android.media.audiopolicy.rqst.src", &rqst_src)) {
+        metrics_proto.set_request_source(rqst_src);
+    }
+    //string char kAudioPolicyRqstPkg[] = "android.media.audiopolicy.rqst.pkg";
+    char *rqst_pkg = NULL;
+    if (item->getCString("android.media.audiopolicy.rqst.pkg", &rqst_pkg)) {
+        metrics_proto.set_request_package(rqst_pkg);
+    }
+    //int32 char kAudioPolicyRqstSession[] = "android.media.audiopolicy.rqst.session";
+    int32_t rqst_session = -1;
+    if (item->getInt32("android.media.audiopolicy.rqst.session", &rqst_session)) {
+        metrics_proto.set_request_session(rqst_session);
+    }
+    //string char kAudioPolicyRqstDevice[] = "android.media.audiopolicy.rqst.device";
+    char *rqst_device = NULL;
+    if (item->getCString("android.media.audiopolicy.rqst.device", &rqst_device)) {
+        metrics_proto.set_request_device(rqst_device);
+    }
+
+    //string char kAudioPolicyActiveSrc[] = "android.media.audiopolicy.active.src";
+    char *active_src = NULL;
+    if (item->getCString("android.media.audiopolicy.active.src", &active_src)) {
+        metrics_proto.set_active_source(active_src);
+    }
+    //string char kAudioPolicyActivePkg[] = "android.media.audiopolicy.active.pkg";
+    char *active_pkg = NULL;
+    if (item->getCString("android.media.audiopolicy.active.pkg", &active_pkg)) {
+        metrics_proto.set_active_package(active_pkg);
+    }
+    //int32 char kAudioPolicyActiveSession[] = "android.media.audiopolicy.active.session";
+    int32_t active_session = -1;
+    if (item->getInt32("android.media.audiopolicy.active.session", &active_session)) {
+        metrics_proto.set_active_session(active_session);
+    }
+    //string char kAudioPolicyActiveDevice[] = "android.media.audiopolicy.active.device";
+    char *active_device = NULL;
+    if (item->getCString("android.media.audiopolicy.active.device", &active_device)) {
+        metrics_proto.set_active_device(active_device);
+    }
+
+
+    std::string serialized;
+    if (!metrics_proto.SerializeToString(&serialized)) {
+        ALOGE("Failed to serialize audipolicy metrics");
+        return false;
+    }
+
+    if (enabled_statsd) {
+        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
+                                   timestamp, pkgName.c_str(), pkgVersionCode,
+                                   mediaApexVersion,
+                                   bf_serialized);
+
+    } else {
+        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
+    }
+
+    // must free the strings that we were given
+    free(rqst_src);
+    free(rqst_pkg);
+    free(rqst_device);
+    free(active_src);
+    free(active_pkg);
+    free(active_device);
+
+    return true;
+}
+
+};
diff --git a/services/mediaanalytics/statsd_audiorecord.cpp b/services/mediaanalytics/statsd_audiorecord.cpp
new file mode 100644
index 0000000..c9edb27
--- /dev/null
+++ b/services/mediaanalytics/statsd_audiorecord.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "statsd_audiorecord"
+#include <utils/Log.h>
+
+#include <dirent.h>
+#include <inttypes.h>
+#include <pthread.h>
+#include <pwd.h>
+#include <stdint.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <statslog.h>
+
+#include "MediaAnalyticsService.h"
+#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "iface_statsd.h"
+
+namespace android {
+
+bool statsd_audiorecord(MediaAnalyticsItem *item)
+{
+    if (item == NULL) return false;
+
+    // these go into the statsd wrapper
+    nsecs_t timestamp = item->getTimestamp();
+    std::string pkgName = item->getPkgName();
+    int64_t pkgVersionCode = item->getPkgVersionCode();
+    int64_t mediaApexVersion = 0;
+
+
+    // the rest into our own proto
+    //
+    ::android::stats::mediametrics::AudioRecordData metrics_proto;
+
+    // flesh out the protobuf we'll hand off with our data
+    //
+    char *encoding = NULL;
+    if (item->getCString("android.media.audiorecord.encoding", &encoding)) {
+        metrics_proto.set_encoding(encoding);
+    }
+
+    char *source = NULL;
+    if (item->getCString("android.media.audiorecord.source", &source)) {
+        metrics_proto.set_source(source);
+    }
+
+    int32_t latency = -1;
+    if (item->getInt32("android.media.audiorecord.latency", &latency)) {
+        metrics_proto.set_latency(latency);
+    }
+
+    int32_t samplerate = -1;
+    if (item->getInt32("android.media.audiorecord.samplerate", &samplerate)) {
+        metrics_proto.set_samplerate(samplerate);
+    }
+
+    int32_t channels = -1;
+    if (item->getInt32("android.media.audiorecord.channels", &channels)) {
+        metrics_proto.set_channels(channels);
+    }
+
+    int64_t createdMs = -1;
+    if (item->getInt64("android.media.audiorecord.createdMs", &createdMs)) {
+        metrics_proto.set_created_millis(createdMs);
+    }
+
+    int64_t durationMs = -1;
+    if (item->getInt64("android.media.audiorecord.durationMs", &durationMs)) {
+        metrics_proto.set_duration_millis(durationMs);
+    }
+
+    int32_t count = -1;
+    if (item->getInt32("android.media.audiorecord.n", &count)) {
+        metrics_proto.set_count(count);
+    }
+
+    int32_t errcode = -1;
+    if (item->getInt32("android.media.audiorecord.errcode", &errcode)) {
+        metrics_proto.set_error_code(errcode);
+    } else if (item->getInt32("android.media.audiorecord.lastError.code", &errcode)) {
+        metrics_proto.set_error_code(errcode);
+    }
+
+    char *errfunc = NULL;
+    if (item->getCString("android.media.audiorecord.errfunc", &errfunc)) {
+        metrics_proto.set_error_function(errfunc);
+    } else if (item->getCString("android.media.audiorecord.lastError.at", &errfunc)) {
+        metrics_proto.set_error_function(errfunc);
+    }
+
+    // portId (int32)
+    int32_t port_id = -1;
+    if (item->getInt32("android.media.audiorecord.portId", &port_id)) {
+        metrics_proto.set_port_id(count);
+    }
+    // frameCount (int32)
+    int32_t frameCount = -1;
+    if (item->getInt32("android.media.audiorecord.frameCount", &frameCount)) {
+        metrics_proto.set_frame_count(frameCount);
+    }
+    // attributes (string)
+    char *attributes = NULL;
+    if (item->getCString("android.media.audiorecord.attributes", &attributes)) {
+        metrics_proto.set_attributes(attributes);
+    }
+    // channelMask (int64)
+    int64_t channelMask = -1;
+    if (item->getInt64("android.media.audiorecord.channelMask", &channelMask)) {
+        metrics_proto.set_channel_mask(channelMask);
+    }
+    // startcount (int64)
+    int64_t startcount = -1;
+    if (item->getInt64("android.media.audiorecord.startcount", &startcount)) {
+        metrics_proto.set_start_count(startcount);
+    }
+
+
+    std::string serialized;
+    if (!metrics_proto.SerializeToString(&serialized)) {
+        ALOGE("Failed to serialize audiorecord metrics");
+        return false;
+    }
+
+    if (enabled_statsd) {
+        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED,
+                                   timestamp, pkgName.c_str(), pkgVersionCode,
+                                   mediaApexVersion,
+                                   bf_serialized);
+
+    } else {
+        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
+    }
+
+    // must free the strings that we were given
+    free(encoding);
+    free(source);
+    free(errfunc);
+    free(attributes);
+
+    return true;
+}
+
+};
diff --git a/services/mediaanalytics/statsd_audiothread.cpp b/services/mediaanalytics/statsd_audiothread.cpp
new file mode 100644
index 0000000..8232424
--- /dev/null
+++ b/services/mediaanalytics/statsd_audiothread.cpp
@@ -0,0 +1,217 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "statsd_audiothread"
+#include <utils/Log.h>
+
+#include <dirent.h>
+#include <inttypes.h>
+#include <pthread.h>
+#include <pwd.h>
+#include <stdint.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <statslog.h>
+
+#include "MediaAnalyticsService.h"
+#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "iface_statsd.h"
+
+namespace android {
+
+bool statsd_audiothread(MediaAnalyticsItem *item)
+{
+    if (item == NULL) return false;
+
+    // these go into the statsd wrapper
+    nsecs_t timestamp = item->getTimestamp();
+    std::string pkgName = item->getPkgName();
+    int64_t pkgVersionCode = item->getPkgVersionCode();
+    int64_t mediaApexVersion = 0;
+
+
+    // the rest into our own proto
+    //
+    ::android::stats::mediametrics::AudioThreadData metrics_proto;
+
+#define	MM_PREFIX "android.media.audiothread."
+
+    // flesh out the protobuf we'll hand off with our data
+    //
+    char *mytype = NULL;
+    if (item->getCString(MM_PREFIX "type", &mytype)) {
+        metrics_proto.set_type(mytype);
+    }
+    int32_t framecount = -1;
+    if (item->getInt32(MM_PREFIX "framecount", &framecount)) {
+        metrics_proto.set_framecount(framecount);
+    }
+    int32_t samplerate = -1;
+    if (item->getInt32(MM_PREFIX "samplerate", &samplerate)) {
+        metrics_proto.set_samplerate(samplerate);
+    }
+    char *workhist = NULL;
+    if (item->getCString(MM_PREFIX "workMs.hist", &workhist)) {
+        metrics_proto.set_work_millis_hist(workhist);
+    }
+    char *latencyhist = NULL;
+    if (item->getCString(MM_PREFIX "latencyMs.hist", &latencyhist)) {
+        metrics_proto.set_latency_millis_hist(latencyhist);
+    }
+    char *warmuphist = NULL;
+    if (item->getCString(MM_PREFIX "warmupMs.hist", &warmuphist)) {
+        metrics_proto.set_warmup_millis_hist(warmuphist);
+    }
+    int64_t underruns = -1;
+    if (item->getInt64(MM_PREFIX "underruns", &underruns)) {
+        metrics_proto.set_underruns(underruns);
+    }
+    int64_t overruns = -1;
+    if (item->getInt64(MM_PREFIX "overruns", &overruns)) {
+        metrics_proto.set_overruns(overruns);
+    }
+    int64_t activeMs = -1;
+    if (item->getInt64(MM_PREFIX "activeMs", &activeMs)) {
+        metrics_proto.set_active_millis(activeMs);
+    }
+    int64_t durationMs = -1;
+    if (item->getInt64(MM_PREFIX "durationMs", &durationMs)) {
+        metrics_proto.set_duration_millis(durationMs);
+    }
+
+    // item->setInt32(MM_PREFIX "id", (int32_t)mId); // IO handle
+    int32_t id = -1;
+    if (item->getInt32(MM_PREFIX "id", &id)) {
+        metrics_proto.set_id(id);
+    }
+    // item->setInt32(MM_PREFIX "portId", (int32_t)mPortId);
+    int32_t port_id = -1;
+    if (item->getInt32(MM_PREFIX "portId", &id)) {
+        metrics_proto.set_port_id(port_id);
+    }
+    // item->setCString(MM_PREFIX "type", threadTypeToString(mType));
+    char *type = NULL;
+    if (item->getCString(MM_PREFIX "type", &type)) {
+        metrics_proto.set_type(type);
+    }
+    // item->setInt32(MM_PREFIX "sampleRate", (int32_t)mSampleRate);
+    int32_t sample_rate = -1;
+    if (item->getInt32(MM_PREFIX "sampleRate", &sample_rate)) {
+        metrics_proto.set_sample_rate(sample_rate);
+    }
+    // item->setInt64(MM_PREFIX "channelMask", (int64_t)mChannelMask);
+    int32_t channel_mask = -1;
+    if (item->getInt32(MM_PREFIX "channelMask", &channel_mask)) {
+        metrics_proto.set_channel_mask(channel_mask);
+    }
+    // item->setCString(MM_PREFIX "encoding", toString(mFormat).c_str());
+    char *encoding = NULL;
+    if (item->getCString(MM_PREFIX "encoding", &encoding)) {
+        metrics_proto.set_encoding(encoding);
+    }
+    // item->setInt32(MM_PREFIX "frameCount", (int32_t)mFrameCount);
+    int32_t frame_count = -1;
+    if (item->getInt32(MM_PREFIX "frameCount", &frame_count)) {
+        metrics_proto.set_frame_count(frame_count);
+    }
+    // item->setCString(MM_PREFIX "outDevice", toString(mOutDevice).c_str());
+    char *outDevice = NULL;
+    if (item->getCString(MM_PREFIX "outDevice", &outDevice)) {
+        metrics_proto.set_output_device(outDevice);
+    }
+    // item->setCString(MM_PREFIX "inDevice", toString(mInDevice).c_str());
+    char *inDevice = NULL;
+    if (item->getCString(MM_PREFIX "inDevice", &inDevice)) {
+        metrics_proto.set_input_device(inDevice);
+    }
+    // item->setDouble(MM_PREFIX "ioJitterMs.mean", mIoJitterMs.getMean());
+    double iojitters_ms_mean = -1;
+    if (item->getDouble(MM_PREFIX "ioJitterMs.mean", &iojitters_ms_mean)) {
+        metrics_proto.set_io_jitter_mean_millis(iojitters_ms_mean);
+    }
+    // item->setDouble(MM_PREFIX "ioJitterMs.std", mIoJitterMs.getStdDev());
+    double iojitters_ms_std = -1;
+    if (item->getDouble(MM_PREFIX "ioJitterMs.std", &iojitters_ms_std)) {
+        metrics_proto.set_io_jitter_stddev_millis(iojitters_ms_std);
+    }
+    // item->setDouble(MM_PREFIX "processTimeMs.mean", mProcessTimeMs.getMean());
+    double process_time_ms_mean = -1;
+    if (item->getDouble(MM_PREFIX "processTimeMs.mean", &process_time_ms_mean)) {
+        metrics_proto.set_process_time_mean_millis(process_time_ms_mean);
+    }
+    // item->setDouble(MM_PREFIX "processTimeMs.std", mProcessTimeMs.getStdDev());
+    double process_time_ms_std = -1;
+    if (item->getDouble(MM_PREFIX "processTimeMs.std", &process_time_ms_std)) {
+        metrics_proto.set_process_time_stddev_millis(process_time_ms_std);
+    }
+    // item->setDouble(MM_PREFIX "timestampJitterMs.mean", tsjitter.getMean());
+    double timestamp_jitter_ms_mean = -1;
+    if (item->getDouble(MM_PREFIX "timestampJitterMs.mean", &timestamp_jitter_ms_mean)) {
+        metrics_proto.set_timestamp_jitter_mean_millis(timestamp_jitter_ms_mean);
+    }
+    // item->setDouble(MM_PREFIX "timestampJitterMs.std", tsjitter.getStdDev());
+    double timestamp_jitter_ms_stddev = -1;
+    if (item->getDouble(MM_PREFIX "timestampJitterMs.std", &timestamp_jitter_ms_stddev)) {
+        metrics_proto.set_timestamp_jitter_stddev_millis(timestamp_jitter_ms_stddev);
+    }
+    // item->setDouble(MM_PREFIX "latencyMs.mean", mLatencyMs.getMean());
+    double latency_ms_mean = -1;
+    if (item->getDouble(MM_PREFIX "latencyMs.mean", &latency_ms_mean)) {
+        metrics_proto.set_latency_mean_millis(latency_ms_mean);
+    }
+    // item->setDouble(MM_PREFIX "latencyMs.std", mLatencyMs.getStdDev());
+    double latency_ms_stddev = -1;
+    if (item->getDouble(MM_PREFIX "latencyMs.std", &latency_ms_stddev)) {
+        metrics_proto.set_latency_stddev_millis(latency_ms_stddev);
+    }
+
+
+    std::string serialized;
+    if (!metrics_proto.SerializeToString(&serialized)) {
+        ALOGE("Failed to serialize audiothread metrics");
+        return false;
+    }
+
+    if (enabled_statsd) {
+        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
+                                   timestamp, pkgName.c_str(), pkgVersionCode,
+                                   mediaApexVersion,
+                                   bf_serialized);
+
+    } else {
+        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
+    }
+
+    // must free the strings that we were given
+    free(mytype);
+    free(workhist);
+    free(latencyhist);
+    free(warmuphist);
+    free(type);
+    free(encoding);
+    free(inDevice);
+    free(outDevice);
+
+    return true;
+}
+
+};
diff --git a/services/mediaanalytics/statsd_audiotrack.cpp b/services/mediaanalytics/statsd_audiotrack.cpp
new file mode 100644
index 0000000..f250ced
--- /dev/null
+++ b/services/mediaanalytics/statsd_audiotrack.cpp
@@ -0,0 +1,156 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "statsd_audiotrack"
+#include <utils/Log.h>
+
+#include <dirent.h>
+#include <inttypes.h>
+#include <pthread.h>
+#include <pwd.h>
+#include <stdint.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <statslog.h>
+
+#include "MediaAnalyticsService.h"
+#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "iface_statsd.h"
+
+namespace android {
+
+bool statsd_audiotrack(MediaAnalyticsItem *item)
+{
+    if (item == NULL) return false;
+
+    // these go into the statsd wrapper
+    nsecs_t timestamp = item->getTimestamp();
+    std::string pkgName = item->getPkgName();
+    int64_t pkgVersionCode = item->getPkgVersionCode();
+    int64_t mediaApexVersion = 0;
+
+
+    // the rest into our own proto
+    //
+    ::android::stats::mediametrics::AudioTrackData metrics_proto;
+
+    // flesh out the protobuf we'll hand off with our data
+    //
+
+    // static constexpr char kAudioTrackStreamType[] = "android.media.audiotrack.streamtype";
+    // optional string streamType;
+    char *streamtype = NULL;
+    if (item->getCString("android.media.audiotrack.streamtype", &streamtype)) {
+        metrics_proto.set_stream_type(streamtype);
+    }
+
+    // static constexpr char kAudioTrackContentType[] = "android.media.audiotrack.type";
+    // optional string contentType;
+    char *contenttype = NULL;
+    if (item->getCString("android.media.audiotrack.type", &contenttype)) {
+        metrics_proto.set_content_type(contenttype);
+    }
+
+    // static constexpr char kAudioTrackUsage[] = "android.media.audiotrack.usage";
+    // optional string trackUsage;
+    char *trackusage = NULL;
+    if (item->getCString("android.media.audiotrack.usage", &trackusage)) {
+        metrics_proto.set_track_usage(trackusage);
+    }
+
+    // static constexpr char kAudioTrackSampleRate[] = "android.media.audiotrack.samplerate";
+    // optional int32 samplerate;
+    int32_t samplerate = -1;
+    if (item->getInt32("android.media.audiotrack.samplerate", &samplerate)) {
+        metrics_proto.set_sample_rate(samplerate);
+    }
+
+    // static constexpr char kAudioTrackChannelMask[] = "android.media.audiotrack.channelmask";
+    // optional int64 channelMask;
+    int64_t channelMask = -1;
+    if (item->getInt64("android.media.audiotrack.channelmask", &channelMask)) {
+        metrics_proto.set_channel_mask(channelMask);
+    }
+
+    // NB: These are not yet exposed as public Java API constants.
+    // static constexpr char kAudioTrackUnderrunFrames[] = "android.media.audiotrack.underrunframes";
+    // optional int32 underrunframes;
+    int32_t underrunframes = -1;
+    if (item->getInt32("android.media.audiotrack.underrunframes", &underrunframes)) {
+        metrics_proto.set_underrun_frames(underrunframes);
+    }
+
+    // static constexpr char kAudioTrackStartupGlitch[] = "android.media.audiotrack.glitch.startup";
+    // optional int32 startupglitch;
+    int32_t startupglitch = -1;
+    if (item->getInt32("android.media.audiotrack.glitch.startup", &startupglitch)) {
+        metrics_proto.set_startup_glitch(startupglitch);
+    }
+
+    // portId (int32)
+    int32_t port_id = -1;
+    if (item->getInt32("android.media.audiotrack.portId", &port_id)) {
+        metrics_proto.set_port_id(port_id);
+    }
+    // encoding (string)
+    char *encoding = NULL;
+    if (item->getCString("android.media.audiotrack.encoding", &encoding)) {
+        metrics_proto.set_encoding(encoding);
+    }
+    // frameCount (int32)
+    int32_t frame_count = -1;
+    if (item->getInt32("android.media.audiotrack.frameCount", &frame_count)) {
+        metrics_proto.set_frame_count(frame_count);
+    }
+    // attributes (string)
+    char *attributes = NULL;
+    if (item->getCString("android.media.audiotrack.attributes", &attributes)) {
+        metrics_proto.set_attributes(attributes);
+    }
+
+    std::string serialized;
+    if (!metrics_proto.SerializeToString(&serialized)) {
+        ALOGE("Failed to serialize audiotrack metrics");
+        return false;
+    }
+
+    if (enabled_statsd) {
+        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED,
+                                   timestamp, pkgName.c_str(), pkgVersionCode,
+                                   mediaApexVersion,
+                                   bf_serialized);
+
+    } else {
+        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
+    }
+
+    // must free the strings that we were given
+    free(streamtype);
+    free(contenttype);
+    free(trackusage);
+    free(encoding);
+    free(attributes);
+
+    return true;
+}
+
+};
diff --git a/services/mediaanalytics/statsd_codec.cpp b/services/mediaanalytics/statsd_codec.cpp
new file mode 100644
index 0000000..dc8e4ef
--- /dev/null
+++ b/services/mediaanalytics/statsd_codec.cpp
@@ -0,0 +1,185 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "statsd_codec"
+#include <utils/Log.h>
+
+#include <dirent.h>
+#include <inttypes.h>
+#include <pthread.h>
+#include <pwd.h>
+#include <stdint.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <statslog.h>
+
+#include "MediaAnalyticsService.h"
+#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "iface_statsd.h"
+
+namespace android {
+
+bool statsd_codec(MediaAnalyticsItem *item)
+{
+    if (item == NULL) return false;
+
+    // these go into the statsd wrapper
+    nsecs_t timestamp = item->getTimestamp();
+    std::string pkgName = item->getPkgName();
+    int64_t pkgVersionCode = item->getPkgVersionCode();
+    int64_t mediaApexVersion = 0;
+
+
+    // the rest into our own proto
+    //
+    ::android::stats::mediametrics::CodecData metrics_proto;
+
+    // flesh out the protobuf we'll hand off with our data
+    //
+    // android.media.mediacodec.codec   string
+    char *codec = NULL;
+    if (item->getCString("android.media.mediacodec.codec", &codec)) {
+        metrics_proto.set_codec(codec);
+    }
+    // android.media.mediacodec.mime    string
+    char *mime = NULL;
+    if (item->getCString("android.media.mediacodec.mime", &mime)) {
+        metrics_proto.set_mime(mime);
+    }
+    // android.media.mediacodec.mode    string
+    char *mode = NULL;
+    if ( item->getCString("android.media.mediacodec.mode", &mode)) {
+        metrics_proto.set_mode(mode);
+    }
+    // android.media.mediacodec.encoder int32
+    int32_t encoder = -1;
+    if ( item->getInt32("android.media.mediacodec.encoder", &encoder)) {
+        metrics_proto.set_encoder(encoder);
+    }
+    // android.media.mediacodec.secure  int32
+    int32_t secure = -1;
+    if ( item->getInt32("android.media.mediacodec.secure", &secure)) {
+        metrics_proto.set_secure(secure);
+    }
+    // android.media.mediacodec.width   int32
+    int32_t width = -1;
+    if ( item->getInt32("android.media.mediacodec.width", &width)) {
+        metrics_proto.set_width(width);
+    }
+    // android.media.mediacodec.height  int32
+    int32_t height = -1;
+    if ( item->getInt32("android.media.mediacodec.height", &height)) {
+        metrics_proto.set_height(height);
+    }
+    // android.media.mediacodec.rotation-degrees        int32
+    int32_t rotation = -1;
+    if ( item->getInt32("android.media.mediacodec.rotation-degrees", &rotation)) {
+        metrics_proto.set_rotation(rotation);
+    }
+    // android.media.mediacodec.crypto  int32 (although missing if not needed
+    int32_t crypto = -1;
+    if ( item->getInt32("android.media.mediacodec.crypto", &crypto)) {
+        metrics_proto.set_crypto(crypto);
+    }
+    // android.media.mediacodec.profile int32
+    int32_t profile = -1;
+    if ( item->getInt32("android.media.mediacodec.profile", &profile)) {
+        metrics_proto.set_profile(profile);
+    }
+    // android.media.mediacodec.level   int32
+    int32_t level = -1;
+    if ( item->getInt32("android.media.mediacodec.level", &level)) {
+        metrics_proto.set_level(level);
+    }
+    // android.media.mediacodec.maxwidth        int32
+    int32_t maxwidth = -1;
+    if ( item->getInt32("android.media.mediacodec.maxwidth", &maxwidth)) {
+        metrics_proto.set_max_width(maxwidth);
+    }
+    // android.media.mediacodec.maxheight       int32
+    int32_t maxheight = -1;
+    if ( item->getInt32("android.media.mediacodec.maxheight", &maxheight)) {
+        metrics_proto.set_max_height(maxheight);
+    }
+    // android.media.mediacodec.errcode         int32
+    int32_t errcode = -1;
+    if ( item->getInt32("android.media.mediacodec.errcode", &errcode)) {
+        metrics_proto.set_error_code(errcode);
+    }
+    // android.media.mediacodec.errstate        string
+    char *errstate = NULL;
+    if ( item->getCString("android.media.mediacodec.errstate", &errstate)) {
+        metrics_proto.set_error_state(errstate);
+    }
+    // android.media.mediacodec.latency.max  int64
+    int64_t latency_max = -1;
+    if ( item->getInt64("android.media.mediacodec.latency.max", &latency_max)) {
+        metrics_proto.set_latency_max(latency_max);
+    }
+    // android.media.mediacodec.latency.min  int64
+    int64_t latency_min = -1;
+    if ( item->getInt64("android.media.mediacodec.latency.min", &latency_min)) {
+        metrics_proto.set_latency_min(latency_min);
+    }
+    // android.media.mediacodec.latency.avg  int64
+    int64_t latency_avg = -1;
+    if ( item->getInt64("android.media.mediacodec.latency.avg", &latency_avg)) {
+        metrics_proto.set_latency_avg(latency_avg);
+    }
+    // android.media.mediacodec.latency.n    int64
+    int64_t latency_count = -1;
+    if ( item->getInt64("android.media.mediacodec.latency.n", &latency_count)) {
+        metrics_proto.set_latency_count(latency_count);
+    }
+    // android.media.mediacodec.latency.unknown    int64
+    int64_t latency_unknown = -1;
+    if ( item->getInt64("android.media.mediacodec.latency.unknown", &latency_unknown)) {
+        metrics_proto.set_latency_unknown(latency_unknown);
+    }
+    // android.media.mediacodec.latency.hist    NOT EMITTED
+
+    std::string serialized;
+    if (!metrics_proto.SerializeToString(&serialized)) {
+        ALOGE("Failed to serialize codec metrics");
+        return false;
+    }
+
+    if (enabled_statsd) {
+        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+        (void)android::util::stats_write(android::util::MEDIAMETRICS_CODEC_REPORTED,
+                                   timestamp, pkgName.c_str(), pkgVersionCode,
+                                   mediaApexVersion,
+                                   bf_serialized);
+
+    } else {
+        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
+    }
+
+    // must free the strings that we were given
+    free(codec);
+    free(mime);
+    free(mode);
+    free(errstate);
+
+    return true;
+}
+
+};
diff --git a/services/mediaanalytics/statsd_drm.cpp b/services/mediaanalytics/statsd_drm.cpp
new file mode 100644
index 0000000..902483a
--- /dev/null
+++ b/services/mediaanalytics/statsd_drm.cpp
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "statsd_drm"
+#include <utils/Log.h>
+
+#include <stdint.h>
+#include <inttypes.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <dirent.h>
+#include <pthread.h>
+#include <unistd.h>
+
+#include <string.h>
+#include <pwd.h>
+
+#include "MediaAnalyticsService.h"
+#include "iface_statsd.h"
+
+#include <statslog.h>
+
+namespace android {
+
+// mediadrm
+bool statsd_mediadrm(MediaAnalyticsItem *item)
+{
+    if (item == NULL) return false;
+
+    nsecs_t timestamp = item->getTimestamp();
+    std::string pkgName = item->getPkgName();
+    int64_t pkgVersionCode = item->getPkgVersionCode();
+    int64_t mediaApexVersion = 0;
+
+    char *vendor = NULL;
+    (void) item->getCString("vendor", &vendor);
+    char *description = NULL;
+    (void) item->getCString("description", &description);
+    char *serialized_metrics = NULL;
+    (void) item->getCString("serialized_metrics", &serialized_metrics);
+
+    if (enabled_statsd) {
+        android::util::BytesField bf_serialized(serialized_metrics ? serialized_metrics : NULL,
+                                                serialized_metrics ? strlen(serialized_metrics)
+                                                                   : 0);
+        android::util::stats_write(android::util::MEDIAMETRICS_MEDIADRM_REPORTED,
+                                   timestamp, pkgName.c_str(), pkgVersionCode,
+                                   mediaApexVersion,
+                                   vendor, description,
+                                   bf_serialized);
+    } else {
+        ALOGV("NOT sending: mediadrm private data (len=%zu)",
+              serialized_metrics ? strlen(serialized_metrics) : 0);
+    }
+
+    free(vendor);
+    free(description);
+    free(serialized_metrics);
+    return true;
+}
+
+// widevineCDM
+bool statsd_widevineCDM(MediaAnalyticsItem *item)
+{
+    if (item == NULL) return false;
+
+    nsecs_t timestamp = item->getTimestamp();
+    std::string pkgName = item->getPkgName();
+    int64_t pkgVersionCode = item->getPkgVersionCode();
+    int64_t mediaApexVersion = 0;
+
+    char *serialized_metrics = NULL;
+    (void) item->getCString("serialized_metrics", &serialized_metrics);
+
+    if (enabled_statsd) {
+        android::util::BytesField bf_serialized(serialized_metrics ? serialized_metrics : NULL,
+                                                serialized_metrics ? strlen(serialized_metrics)
+                                                                   : 0);
+        android::util::stats_write(android::util::MEDIAMETRICS_DRM_WIDEVINE_REPORTED,
+                                   timestamp, pkgName.c_str(), pkgVersionCode,
+                                   mediaApexVersion,
+                                   bf_serialized);
+    } else {
+        ALOGV("NOT sending: widevine private data (len=%zu)",
+              serialized_metrics ? strlen(serialized_metrics) : 0);
+    }
+
+    free(serialized_metrics);
+    return true;
+}
+
+} // namespace android
diff --git a/services/mediaanalytics/statsd_extractor.cpp b/services/mediaanalytics/statsd_extractor.cpp
new file mode 100644
index 0000000..395c912
--- /dev/null
+++ b/services/mediaanalytics/statsd_extractor.cpp
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "statsd_extractor"
+#include <utils/Log.h>
+
+#include <dirent.h>
+#include <inttypes.h>
+#include <pthread.h>
+#include <pwd.h>
+#include <stdint.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <statslog.h>
+
+#include "MediaAnalyticsService.h"
+#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "iface_statsd.h"
+
+namespace android {
+
+bool statsd_extractor(MediaAnalyticsItem *item)
+{
+    if (item == NULL) return false;
+
+    // these go into the statsd wrapper
+    nsecs_t timestamp = item->getTimestamp();
+    std::string pkgName = item->getPkgName();
+    int64_t pkgVersionCode = item->getPkgVersionCode();
+    int64_t mediaApexVersion = 0;
+
+
+    // the rest into our own proto
+    //
+    ::android::stats::mediametrics::ExtractorData metrics_proto;
+
+    // flesh out the protobuf we'll hand off with our data
+    //
+
+    // android.media.mediaextractor.fmt         string
+    char *fmt = NULL;
+    if (item->getCString("android.media.mediaextractor.fmt", &fmt)) {
+        metrics_proto.set_format(fmt);
+    }
+    // android.media.mediaextractor.mime        string
+    char *mime = NULL;
+    if (item->getCString("android.media.mediaextractor.mime", &mime)) {
+        metrics_proto.set_mime(mime);
+    }
+    // android.media.mediaextractor.ntrk        int32
+    int32_t ntrk = -1;
+    if (item->getInt32("android.media.mediaextractor.ntrk", &ntrk)) {
+        metrics_proto.set_tracks(ntrk);
+    }
+
+    std::string serialized;
+    if (!metrics_proto.SerializeToString(&serialized)) {
+        ALOGE("Failed to serialize extractor metrics");
+        return false;
+    }
+
+    if (enabled_statsd) {
+        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+        (void)android::util::stats_write(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED,
+                                   timestamp, pkgName.c_str(), pkgVersionCode,
+                                   mediaApexVersion,
+                                   bf_serialized);
+
+    } else {
+        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
+    }
+
+    // must free the strings that we were given
+    free(fmt);
+    free(mime);
+
+    return true;
+}
+
+};
diff --git a/services/mediaanalytics/statsd_nuplayer.cpp b/services/mediaanalytics/statsd_nuplayer.cpp
new file mode 100644
index 0000000..5ec118a
--- /dev/null
+++ b/services/mediaanalytics/statsd_nuplayer.cpp
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "statsd_nuplayer"
+#include <utils/Log.h>
+
+#include <dirent.h>
+#include <inttypes.h>
+#include <pthread.h>
+#include <pwd.h>
+#include <stdint.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <statslog.h>
+
+#include "MediaAnalyticsService.h"
+#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "iface_statsd.h"
+
+namespace android {
+
+/*
+ *  handles nuplayer AND nuplayer2
+ *  checks for the union of what the two players generate
+ */
+bool statsd_nuplayer(MediaAnalyticsItem *item)
+{
+    if (item == NULL) return false;
+
+    // these go into the statsd wrapper
+    nsecs_t timestamp = item->getTimestamp();
+    std::string pkgName = item->getPkgName();
+    int64_t pkgVersionCode = item->getPkgVersionCode();
+    int64_t mediaApexVersion = 0;
+
+
+    // the rest into our own proto
+    //
+    ::android::stats::mediametrics::NuPlayerData metrics_proto;
+
+    // flesh out the protobuf we'll hand off with our data
+    //
+
+    // differentiate between nuplayer and nuplayer2
+    metrics_proto.set_whichplayer(item->getKey().c_str());
+
+    char *video_mime = NULL;
+    if (item->getCString("android.media.mediaplayer.video.mime", &video_mime)) {
+        metrics_proto.set_video_mime(video_mime);
+    }
+    char *video_codec = NULL;
+    if (item->getCString("android.media.mediaplayer.video.codec", &video_codec)) {
+        metrics_proto.set_video_codec(video_codec);
+    }
+
+    int32_t width = -1;
+    if (item->getInt32("android.media.mediaplayer.width", &width)) {
+        metrics_proto.set_width(width);
+    }
+    int32_t height = -1;
+    if (item->getInt32("android.media.mediaplayer.height", &height)) {
+        metrics_proto.set_height(height);
+    }
+
+    int64_t frames = -1;
+    if (item->getInt64("android.media.mediaplayer.frames", &frames)) {
+        metrics_proto.set_frames(frames);
+    }
+    int64_t frames_dropped = -1;
+    if (item->getInt64("android.media.mediaplayer.dropped", &frames_dropped)) {
+        metrics_proto.set_frames_dropped(frames_dropped);
+    }
+    int64_t frames_dropped_startup = -1;
+    if (item->getInt64("android.media.mediaplayer.startupdropped", &frames_dropped_startup)) {
+        metrics_proto.set_frames_dropped_startup(frames_dropped_startup);
+    }
+    double fps = -1.0;
+    if (item->getDouble("android.media.mediaplayer.fps", &fps)) {
+        metrics_proto.set_framerate(fps);
+    }
+
+    char *audio_mime = NULL;
+    if (item->getCString("android.media.mediaplayer.audio.mime", &audio_mime)) {
+        metrics_proto.set_audio_mime(audio_mime);
+    }
+    char *audio_codec = NULL;
+    if (item->getCString("android.media.mediaplayer.audio.codec", &audio_codec)) {
+        metrics_proto.set_audio_codec(audio_codec);
+    }
+
+    int64_t duration_ms = -1;
+    if (item->getInt64("android.media.mediaplayer.durationMs", &duration_ms)) {
+        metrics_proto.set_duration_millis(duration_ms);
+    }
+    int64_t playing_ms = -1;
+    if (item->getInt64("android.media.mediaplayer.playingMs", &playing_ms)) {
+        metrics_proto.set_playing_millis(playing_ms);
+    }
+
+    int32_t err = -1;
+    if (item->getInt32("android.media.mediaplayer.err", &err)) {
+        metrics_proto.set_error(err);
+    }
+    int32_t error_code = -1;
+    if (item->getInt32("android.media.mediaplayer.errcode", &error_code)) {
+        metrics_proto.set_error_code(error_code);
+    }
+    char *error_state = NULL;
+    if (item->getCString("android.media.mediaplayer.errstate", &error_state)) {
+        metrics_proto.set_error_state(error_state);
+    }
+
+    char *data_source_type = NULL;
+    if (item->getCString("android.media.mediaplayer.dataSource", &data_source_type)) {
+        metrics_proto.set_data_source_type(data_source_type);
+    }
+
+    int64_t rebufferingMs = -1;
+    if (item->getInt64("android.media.mediaplayer.rebufferingMs", &rebufferingMs)) {
+        metrics_proto.set_rebuffering_millis(rebufferingMs);
+    }
+    int32_t rebuffers = -1;
+    if (item->getInt32("android.media.mediaplayer.rebuffers", &rebuffers)) {
+        metrics_proto.set_rebuffers(rebuffers);
+    }
+    int32_t rebufferExit = -1;
+    if (item->getInt32("android.media.mediaplayer.rebufferExit", &rebufferExit)) {
+        metrics_proto.set_rebuffer_at_exit(rebufferExit);
+    }
+
+
+    std::string serialized;
+    if (!metrics_proto.SerializeToString(&serialized)) {
+        ALOGE("Failed to serialize nuplayer metrics");
+        return false;
+    }
+
+    if (enabled_statsd) {
+        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+        (void)android::util::stats_write(android::util::MEDIAMETRICS_NUPLAYER_REPORTED,
+                                   timestamp, pkgName.c_str(), pkgVersionCode,
+                                   mediaApexVersion,
+                                   bf_serialized);
+
+    } else {
+        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
+    }
+
+    // must free the strings that we were given
+    free(video_mime);
+    free(video_codec);
+    free(audio_mime);
+    free(audio_codec);
+    free(error_state);
+    free(data_source_type);
+
+    return true;
+}
+
+};
diff --git a/services/mediaanalytics/statsd_recorder.cpp b/services/mediaanalytics/statsd_recorder.cpp
new file mode 100644
index 0000000..4d981b4
--- /dev/null
+++ b/services/mediaanalytics/statsd_recorder.cpp
@@ -0,0 +1,193 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "statsd_recorder"
+#include <utils/Log.h>
+
+#include <dirent.h>
+#include <inttypes.h>
+#include <pthread.h>
+#include <pwd.h>
+#include <stdint.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <statslog.h>
+
+#include "MediaAnalyticsService.h"
+#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "iface_statsd.h"
+
+namespace android {
+
+bool statsd_recorder(MediaAnalyticsItem *item)
+{
+    if (item == NULL) return false;
+
+    // these go into the statsd wrapper
+    nsecs_t timestamp = item->getTimestamp();
+    std::string pkgName = item->getPkgName();
+    int64_t pkgVersionCode = item->getPkgVersionCode();
+    int64_t mediaApexVersion = 0;
+
+
+    // the rest into our own proto
+    //
+    ::android::stats::mediametrics::RecorderData metrics_proto;
+
+    // flesh out the protobuf we'll hand off with our data
+    //
+
+    // string kRecorderAudioMime = "android.media.mediarecorder.audio.mime";
+    char *audio_mime = NULL;
+    if (item->getCString("android.media.mediarecorder.audio.mime", &audio_mime)) {
+        metrics_proto.set_audio_mime(audio_mime);
+    }
+    // string kRecorderVideoMime = "android.media.mediarecorder.video.mime";
+    char *video_mime = NULL;
+    if (item->getCString("android.media.mediarecorder.video.mime", &video_mime)) {
+        metrics_proto.set_video_mime(video_mime);
+    }
+    // int32 kRecorderVideoProfile = "android.media.mediarecorder.video-encoder-profile";
+    int32_t videoProfile = -1;
+    if (item->getInt32("android.media.mediarecorder.video-encoder-profile", &videoProfile)) {
+        metrics_proto.set_video_profile(videoProfile);
+    }
+    // int32 kRecorderVideoLevel = "android.media.mediarecorder.video-encoder-level";
+    int32_t videoLevel = -1;
+    if (item->getInt32("android.media.mediarecorder.video-encoder-level", &videoLevel)) {
+        metrics_proto.set_video_level(videoLevel);
+    }
+    // int32 kRecorderWidth = "android.media.mediarecorder.width";
+    int32_t width = -1;
+    if (item->getInt32("android.media.mediarecorder.width", &width)) {
+        metrics_proto.set_width(width);
+    }
+    // int32 kRecorderHeight = "android.media.mediarecorder.height";
+    int32_t height = -1;
+    if (item->getInt32("android.media.mediarecorder.height", &height)) {
+        metrics_proto.set_height(height);
+    }
+    // int32 kRecorderRotation = "android.media.mediarecorder.rotation";
+    int32_t rotation = -1;                      // default to 0?
+    if (item->getInt32("android.media.mediarecorder.rotation", &rotation)) {
+        metrics_proto.set_rotation(rotation);
+    }
+    // int32 kRecorderFrameRate = "android.media.mediarecorder.frame-rate";
+    int32_t framerate = -1;
+    if (item->getInt32("android.media.mediarecorder.frame-rate", &framerate)) {
+        metrics_proto.set_framerate(framerate);
+    }
+
+    // int32 kRecorderCaptureFps = "android.media.mediarecorder.capture-fps";
+    int32_t captureFps = -1;
+    if (item->getInt32("android.media.mediarecorder.capture-fps", &captureFps)) {
+        metrics_proto.set_capture_fps(captureFps);
+    }
+    // double kRecorderCaptureFpsEnable = "android.media.mediarecorder.capture-fpsenable";
+    double captureFpsEnable = -1;
+    if (item->getDouble("android.media.mediarecorder.capture-fpsenable", &captureFpsEnable)) {
+        metrics_proto.set_capture_fps_enable(captureFpsEnable);
+    }
+
+    // int64 kRecorderDurationMs = "android.media.mediarecorder.durationMs";
+    int64_t durationMs = -1;
+    if (item->getInt64("android.media.mediarecorder.durationMs", &durationMs)) {
+        metrics_proto.set_duration_millis(durationMs);
+    }
+    // int64 kRecorderPaused = "android.media.mediarecorder.pausedMs";
+    int64_t pausedMs = -1;
+    if (item->getInt64("android.media.mediarecorder.pausedMs", &pausedMs)) {
+        metrics_proto.set_paused_millis(pausedMs);
+    }
+    // int32 kRecorderNumPauses = "android.media.mediarecorder.NPauses";
+    int32_t pausedCount = -1;
+    if (item->getInt32("android.media.mediarecorder.NPauses", &pausedCount)) {
+        metrics_proto.set_paused_count(pausedCount);
+    }
+
+    // int32 kRecorderAudioBitrate = "android.media.mediarecorder.audio-bitrate";
+    int32_t audioBitrate = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-bitrate", &audioBitrate)) {
+        metrics_proto.set_audio_bitrate(audioBitrate);
+    }
+    // int32 kRecorderAudioChannels = "android.media.mediarecorder.audio-channels";
+    int32_t audioChannels = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-channels", &audioChannels)) {
+        metrics_proto.set_audio_channels(audioChannels);
+    }
+    // int32 kRecorderAudioSampleRate = "android.media.mediarecorder.audio-samplerate";
+    int32_t audioSampleRate = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-samplerate", &audioSampleRate)) {
+        metrics_proto.set_audio_samplerate(audioSampleRate);
+    }
+
+    // int32 kRecorderMovieTimescale = "android.media.mediarecorder.movie-timescale";
+    int32_t movieTimescale = -1;
+    if (item->getInt32("android.media.mediarecorder.movie-timescale", &movieTimescale)) {
+        metrics_proto.set_movie_timescale(movieTimescale);
+    }
+    // int32 kRecorderAudioTimescale = "android.media.mediarecorder.audio-timescale";
+    int32_t audioTimescale = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-timescale", &audioTimescale)) {
+        metrics_proto.set_audio_timescale(audioTimescale);
+    }
+    // int32 kRecorderVideoTimescale = "android.media.mediarecorder.video-timescale";
+    int32_t videoTimescale = -1;
+    if (item->getInt32("android.media.mediarecorder.video-timescale", &videoTimescale)) {
+        metrics_proto.set_video_timescale(videoTimescale);
+    }
+
+    // int32 kRecorderVideoBitrate = "android.media.mediarecorder.video-bitrate";
+    int32_t videoBitRate = -1;
+    if (item->getInt32("android.media.mediarecorder.video-bitrate", &videoBitRate)) {
+        metrics_proto.set_video_bitrate(videoBitRate);
+    }
+    // int32 kRecorderVideoIframeInterval = "android.media.mediarecorder.video-iframe-interval";
+    int32_t iFrameInterval = -1;
+    if (item->getInt32("android.media.mediarecorder.video-iframe-interval", &iFrameInterval)) {
+        metrics_proto.set_iframe_interval(iFrameInterval);
+    }
+
+    std::string serialized;
+    if (!metrics_proto.SerializeToString(&serialized)) {
+        ALOGE("Failed to serialize recorder metrics");
+        return false;
+    }
+
+    if (enabled_statsd) {
+        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+        (void)android::util::stats_write(android::util::MEDIAMETRICS_RECORDER_REPORTED,
+                                   timestamp, pkgName.c_str(), pkgVersionCode,
+                                   mediaApexVersion,
+                                   bf_serialized);
+
+    } else {
+        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
+    }
+
+    // must free the strings that we were given
+    free(audio_mime);
+    free(video_mime);
+
+    return true;
+}
+
+};
diff --git a/services/mediacodec/Android.mk b/services/mediacodec/Android.mk
index 473e21c..ecd437b 100644
--- a/services/mediacodec/Android.mk
+++ b/services/mediacodec/Android.mk
@@ -22,6 +22,7 @@
     libstagefright_soft_vorbisdec \
     libstagefright_soft_vpxdec \
     libstagefright_soft_vpxenc \
+    libstagefright_softomx_plugin \
 
 # service executable
 include $(CLEAR_VARS)
diff --git a/services/mediacodec/main_codecservice.cpp b/services/mediacodec/main_codecservice.cpp
index 6ffbd26..f668c33 100644
--- a/services/mediacodec/main_codecservice.cpp
+++ b/services/mediacodec/main_codecservice.cpp
@@ -49,12 +49,6 @@
 
     // Default codec services
     using namespace ::android::hardware::media::omx::V1_0;
-    sp<IOmxStore> omxStore = new implementation::OmxStore();
-    if (omxStore == nullptr) {
-        LOG(ERROR) << "Cannot create IOmxStore HAL service.";
-    } else if (omxStore->registerAsService() != OK) {
-        LOG(ERROR) << "Cannot register IOmxStore HAL service.";
-    }
     sp<IOmx> omx = new implementation::Omx();
     if (omx == nullptr) {
         LOG(ERROR) << "Cannot create IOmx HAL service.";
@@ -63,6 +57,12 @@
     } else {
         LOG(INFO) << "IOmx HAL service created.";
     }
+    sp<IOmxStore> omxStore = new implementation::OmxStore(omx);
+    if (omxStore == nullptr) {
+        LOG(ERROR) << "Cannot create IOmxStore HAL service.";
+    } else if (omxStore->registerAsService() != OK) {
+        LOG(ERROR) << "Cannot register IOmxStore HAL service.";
+    }
 
     ::android::hardware::joinRpcThreadpool();
 }
diff --git a/services/mediacodec/registrant/Android.bp b/services/mediacodec/registrant/Android.bp
index 1470de2..17c2e02 100644
--- a/services/mediacodec/registrant/Android.bp
+++ b/services/mediacodec/registrant/Android.bp
@@ -49,7 +49,6 @@
         "libcodec2_soft_flacdec",
         "libcodec2_soft_flacenc",
         "libcodec2_soft_gsmdec",
-        "libcodec2_soft_xaacdec",
     ],
 }
 
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
index 02cedba..9042cd7 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
@@ -42,6 +42,7 @@
 lseek: 1
 rt_sigprocmask: 1
 openat: 1
+open: 1
 fstat64: 1
 write: 1
 nanosleep: 1
@@ -49,6 +50,7 @@
 set_tid_address: 1
 getdents64: 1
 readlinkat: 1
+readlink: 1
 read: 1
 pread64: 1
 fstatfs64: 1
@@ -61,3 +63,24 @@
 restart_syscall: 1
 rt_sigreturn: 1
 getrandom: 1
+madvise: 1
+
+# crash dump policy additions
+sigreturn: 1
+clock_gettime: 1
+futex: 1
+getpid: 1
+gettid: 1
+pipe2: 1
+recvmsg: 1
+process_vm_readv: 1
+tgkill: 1
+rt_sigaction: 1
+rt_tgsigqueueinfo: 1
+#prctl: arg0 == PR_GET_NO_NEW_PRIVS || arg0 == 0x53564d41
+#mprotect: arg2 in 0x1|0x2
+#mmap2: arg2 in 0x1|0x2
+geteuid32: 1
+getgid32: 1
+getegid32: 1
+getgroups32: 1
diff --git a/services/mediaextractor/Android.bp b/services/mediaextractor/Android.bp
new file mode 100644
index 0000000..98cc69f
--- /dev/null
+++ b/services/mediaextractor/Android.bp
@@ -0,0 +1,72 @@
+// service library
+cc_library_shared {
+    name: "libmediaextractorservice",
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    srcs: ["MediaExtractorService.cpp"],
+
+    shared_libs: [
+        "libmedia",
+        "libstagefright",
+        "libbinder",
+        "libutils",
+        "liblog",
+    ],
+}
+
+// service executable
+cc_binary {
+    name: "mediaextractor",
+
+    srcs: ["main_extractorservice.cpp"],
+    shared_libs: [
+        "libmedia",
+        "libmediaextractorservice",
+        "libbinder",
+        "libutils",
+        "liblog",
+        "libavservices_minijail",
+    ],
+    target: {
+        android: {
+            product_variables: {
+                malloc_not_svelte: {
+                    // Scudo increases memory footprint, so only enable on
+                    // non-svelte devices.
+                    shared_libs: ["libc_scudo"],
+                },
+            },
+        },
+    },
+    init_rc: ["mediaextractor.rc"],
+
+    include_dirs: ["frameworks/av/media/libmedia"],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+}
+
+prebuilt_etc {
+    name: "mediaextractor.policy",
+    sub_dir: "seccomp_policy",
+    arch: {
+        arm: {
+            src: "seccomp_policy/mediaextractor-arm.policy",
+        },
+        arm64: {
+            src: "seccomp_policy/mediaextractor-arm64.policy",
+        },
+        x86: {
+            src: "seccomp_policy/mediaextractor-x86.policy",
+        },
+        x86_64: {
+            src: "seccomp_policy/mediaextractor-x86_64.policy",
+        },
+    },
+    required: ["crash_dump.policy"],
+}
+
diff --git a/services/mediaextractor/Android.mk b/services/mediaextractor/Android.mk
deleted file mode 100644
index e8a59df..0000000
--- a/services/mediaextractor/Android.mk
+++ /dev/null
@@ -1,43 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-# service library
-include $(CLEAR_VARS)
-LOCAL_CFLAGS := -Wall -Werror
-LOCAL_SRC_FILES := \
-    MediaExtractorService.cpp
-
-LOCAL_SHARED_LIBRARIES := libmedia libstagefright libbinder libutils
-LOCAL_MODULE:= libmediaextractorservice
-include $(BUILD_SHARED_LIBRARY)
-
-
-# service executable
-include $(CLEAR_VARS)
-# seccomp filters are defined for the following architectures:
-LOCAL_REQUIRED_MODULES_arm := crash_dump.policy mediaextractor.policy
-LOCAL_REQUIRED_MODULES_arm64 := crash_dump.policy mediaextractor.policy
-LOCAL_REQUIRED_MODULES_x86 := crash_dump.policy mediaextractor.policy
-LOCAL_REQUIRED_MODULES_x86_64 := crash_dump.policy mediaextractor.policy
-
-LOCAL_SRC_FILES := main_extractorservice.cpp
-ifneq (true, $(filter true, $(MALLOC_SVELTE)))
-# Scudo increases memory footprint, so only use on non-svelte configs.
-LOCAL_SHARED_LIBRARIES := libc_scudo
-endif
-LOCAL_SHARED_LIBRARIES += libmedia libmediaextractorservice libbinder libutils \
-    liblog libavservices_minijail
-LOCAL_MODULE:= mediaextractor
-LOCAL_INIT_RC := mediaextractor.rc
-LOCAL_C_INCLUDES := frameworks/av/media/libmedia
-LOCAL_CFLAGS := -Wall -Werror
-include $(BUILD_EXECUTABLE)
-
-# service seccomp filter
-ifeq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), arm arm64 x86 x86_64))
-include $(CLEAR_VARS)
-LOCAL_MODULE := mediaextractor.policy
-LOCAL_MODULE_CLASS := ETC
-LOCAL_MODULE_PATH := $(TARGET_OUT)/etc/seccomp_policy
-LOCAL_SRC_FILES := seccomp_policy/mediaextractor-$(TARGET_ARCH).policy
-include $(BUILD_PREBUILT)
-endif
diff --git a/services/mediaextractor/MediaExtractorService.cpp b/services/mediaextractor/MediaExtractorService.cpp
index de5c3e4..36e084b 100644
--- a/services/mediaextractor/MediaExtractorService.cpp
+++ b/services/mediaextractor/MediaExtractorService.cpp
@@ -30,7 +30,9 @@
 namespace android {
 
 MediaExtractorService::MediaExtractorService()
-        : BnMediaExtractorService() { }
+        : BnMediaExtractorService() {
+    MediaExtractorFactory::LoadExtractors();
+}
 
 sp<IMediaExtractor> MediaExtractorService::makeExtractor(
         const sp<IDataSource> &remoteSource, const char *mime) {
diff --git a/services/mediaextractor/main_extractorservice.cpp b/services/mediaextractor/main_extractorservice.cpp
index 4a85140..3c4125b 100644
--- a/services/mediaextractor/main_extractorservice.cpp
+++ b/services/mediaextractor/main_extractorservice.cpp
@@ -36,7 +36,7 @@
 using namespace android;
 
 static const char kSystemSeccompPolicyPath[] =
-        "/system/etc/seccomp_policy/mediaextractor.policy";
+        "/apex/com.android.media/etc/seccomp_policy/mediaextractor.policy";
 static const char kVendorSeccompPolicyPath[] =
         "/vendor/etc/seccomp_policy/mediaextractor.policy";
 
diff --git a/services/oboeservice/AAudioClientTracker.cpp b/services/oboeservice/AAudioClientTracker.cpp
index 83704ba..8572561 100644
--- a/services/oboeservice/AAudioClientTracker.cpp
+++ b/services/oboeservice/AAudioClientTracker.cpp
@@ -67,6 +67,12 @@
                                          const sp<IAAudioClient>& client) {
     ALOGV("registerClient(), calling pid = %d, getpid() = %d\n", pid, getpid());
 
+    if (client.get() == nullptr) {
+        ALOGE("AAudioClientTracker::%s() client is NULL!", __func__);
+        android_errorWriteLog(0x534e4554, "116230453");
+        return AAUDIO_ERROR_NULL;
+    }
+
     std::lock_guard<std::mutex> lock(mLock);
     if (mNotificationClients.count(pid) == 0) {
         sp<NotificationClient> notificationClient = new NotificationClient(pid);
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
new file mode 100644
index 0000000..655f017
--- /dev/null
+++ b/services/oboeservice/Android.bp
@@ -0,0 +1,57 @@
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+cc_library_shared {
+
+    name: "libaaudioservice",
+
+    srcs: [
+        "AAudioClientTracker.cpp",
+        "AAudioEndpointManager.cpp",
+        "AAudioMixer.cpp",
+        "AAudioService.cpp",
+        "AAudioServiceEndpoint.cpp",
+        "AAudioServiceEndpointCapture.cpp",
+        "AAudioServiceEndpointMMAP.cpp",
+        "AAudioServiceEndpointPlay.cpp",
+        "AAudioServiceEndpointShared.cpp",
+        "AAudioServiceStreamBase.cpp",
+        "AAudioServiceStreamMMAP.cpp",
+        "AAudioServiceStreamShared.cpp",
+        "AAudioStreamTracker.cpp",
+        "AAudioThread.cpp",
+        "SharedMemoryProxy.cpp",
+        "SharedRingBuffer.cpp",
+        "TimestampScheduler.cpp",
+    ],
+
+    cflags: [
+        "-Wno-unused-parameter",
+        "-Wall",
+        "-Werror",
+    ],
+
+    shared_libs: [
+        "libaaudio",
+        "libaudioclient",
+        "libaudioflinger",
+        "libbase",
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libmediautils",
+        "libutils",
+    ],
+
+}
diff --git a/services/oboeservice/Android.mk b/services/oboeservice/Android.mk
deleted file mode 100644
index 3d5f140..0000000
--- a/services/oboeservice/Android.mk
+++ /dev/null
@@ -1,61 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-# AAudio Service
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := libaaudioservice
-LOCAL_MODULE_TAGS := optional
-
-LIBAAUDIO_DIR := ../../media/libaaudio
-LIBAAUDIO_SRC_DIR := $(LIBAAUDIO_DIR)/src
-
-LOCAL_C_INCLUDES := \
-    $(TOPDIR)frameworks/av/services/audioflinger \
-    $(call include-path-for, audio-utils) \
-    frameworks/native/include \
-    system/core/base/include \
-    $(TOP)/frameworks/native/media/libaaudio/include/include \
-    $(TOP)/frameworks/av/media/libaaudio/include \
-    $(TOP)/frameworks/av/media/utils/include \
-    frameworks/native/include \
-    $(TOP)/external/tinyalsa/include \
-    $(TOP)/frameworks/av/media/libaaudio/src
-
-LOCAL_SRC_FILES += \
-    SharedMemoryProxy.cpp \
-    SharedRingBuffer.cpp \
-    AAudioClientTracker.cpp \
-    AAudioEndpointManager.cpp \
-    AAudioMixer.cpp \
-    AAudioService.cpp \
-    AAudioServiceEndpoint.cpp \
-    AAudioServiceEndpointCapture.cpp \
-    AAudioServiceEndpointMMAP.cpp \
-    AAudioServiceEndpointPlay.cpp \
-    AAudioServiceEndpointShared.cpp \
-    AAudioServiceStreamBase.cpp \
-    AAudioServiceStreamMMAP.cpp \
-    AAudioServiceStreamShared.cpp \
-    AAudioStreamTracker.cpp \
-    TimestampScheduler.cpp \
-    AAudioThread.cpp
-
-LOCAL_MULTILIB := $(AUDIOSERVER_MULTILIB)
-
-# LOCAL_CFLAGS += -fvisibility=hidden
-LOCAL_CFLAGS += -Wno-unused-parameter
-LOCAL_CFLAGS += -Wall -Werror
-
-LOCAL_SHARED_LIBRARIES :=  \
-    libaaudio \
-    libaudioflinger \
-    libaudioclient \
-    libbinder \
-    libcutils \
-    libmediautils \
-    libutils \
-    liblog
-
-include $(BUILD_SHARED_LIBRARY)
-
-
diff --git a/tools/resampler_tools/Android.bp b/tools/resampler_tools/Android.bp
new file mode 100644
index 0000000..7549359
--- /dev/null
+++ b/tools/resampler_tools/Android.bp
@@ -0,0 +1,15 @@
+// Copyright 2005 The Android Open Source Project
+//
+// Android.mk for resampler_tools
+//
+
+cc_binary_host {
+    name: "fir",
+
+    srcs: ["fir.cpp"],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+}
diff --git a/tools/resampler_tools/Android.mk b/tools/resampler_tools/Android.mk
deleted file mode 100644
index bba5199..0000000
--- a/tools/resampler_tools/Android.mk
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2005 The Android Open Source Project
-#
-# Android.mk for resampler_tools
-#
-
-
-LOCAL_PATH:= $(call my-dir)
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES := \
-	fir.cpp
-
-LOCAL_MODULE := fir
-
-LOCAL_CFLAGS := -Werror -Wall
-
-include $(BUILD_HOST_EXECUTABLE)