Merge "Transcoder: Name transcoder threads for easier trace identification." into sc-dev
diff --git a/apex/mediatranscoding.rc b/apex/mediatranscoding.rc
index fa4acf8..24306a2 100644
--- a/apex/mediatranscoding.rc
+++ b/apex/mediatranscoding.rc
@@ -6,5 +6,6 @@
     user media
     group media
     ioprio rt 4
-    task_profiles ProcessCapacityHigh HighPerformance
+    # Restrict to little cores only with system-background cpuset.
+    writepid /dev/cpuset/system-background/tasks
     disabled
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 8af704d..459ad15 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -174,6 +174,13 @@
     oneway void notifySystemEvent(int eventId, in int[] args);
 
     /**
+     * Notify the camera service of a display configuration change.
+     *
+     * Callers require the android.permission.CAMERA_SEND_SYSTEM_EVENTS permission.
+     */
+    oneway void notifyDisplayConfigurationChange();
+
+    /**
      * Notify the camera service of a device physical status change. May only be called from
      * a privileged process.
      *
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 7387442..dab2fef 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -24,6 +24,28 @@
 
 using namespace android;
 
+// Formats not listed in the public API, but still available to AImageReader
+// Enum value must match corresponding enum in ui/PublicFormat.h (which is not
+// available to VNDK)
+enum AIMAGE_PRIVATE_FORMATS {
+    /**
+     * Unprocessed implementation-dependent raw
+     * depth measurements, opaque with 16 bit
+     * samples.
+     *
+     */
+
+    AIMAGE_FORMAT_RAW_DEPTH = 0x1002,
+
+    /**
+     * Device specific 10 bits depth RAW image format.
+     *
+     * <p>Unprocessed implementation-dependent raw depth measurements, opaque with 10 bit samples
+     * and device specific bit layout.</p>
+     */
+    AIMAGE_FORMAT_RAW_DEPTH10 = 0x1003,
+};
+
 /**
  * ACameraMetadata Implementation
  */
@@ -290,6 +312,10 @@
             format = AIMAGE_FORMAT_DEPTH_POINT_CLOUD;
         } else if (format == HAL_PIXEL_FORMAT_Y16) {
             format = AIMAGE_FORMAT_DEPTH16;
+        } else if (format == HAL_PIXEL_FORMAT_RAW16) {
+            format = static_cast<int32_t>(AIMAGE_FORMAT_RAW_DEPTH);
+        } else if (format == HAL_PIXEL_FORMAT_RAW10) {
+            format = static_cast<int32_t>(AIMAGE_FORMAT_RAW_DEPTH10);
         }
 
         filteredDepthStreamConfigs.push_back(format);
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 6c1cf33..2b7f040 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -61,6 +61,10 @@
  */
 typedef void (*ACameraCaptureSession_stateCallback)(void* context, ACameraCaptureSession *session);
 
+/**
+ * Capture session state callbacks used in {@link ACameraDevice_createCaptureSession} and
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters}
+ */
 typedef struct ACameraCaptureSession_stateCallbacks {
     /// optional application context.
     void*                               context;
@@ -246,6 +250,10 @@
         void* context, ACameraCaptureSession* session,
         ACaptureRequest* request, ACameraWindowType* window, int64_t frameNumber);
 
+/**
+ * ACaptureCaptureSession_captureCallbacks structure used in
+ * {@link ACameraCaptureSession_capture} and {@link ACameraCaptureSession_setRepeatingRequest}.
+ */
 typedef struct ACameraCaptureSession_captureCallbacks {
     /// optional application context.
     void*                                               context;
@@ -413,7 +421,10 @@
  */
 void ACameraCaptureSession_close(ACameraCaptureSession* session);
 
-struct ACameraDevice;
+/**
+ * ACameraDevice is opaque type that provides access to a camera device.
+ * A pointer can be obtained using {@link ACameraManager_openCamera} method.
+ */
 typedef struct ACameraDevice ACameraDevice;
 
 /**
@@ -591,6 +602,10 @@
 camera_status_t ACameraCaptureSession_abortCaptures(ACameraCaptureSession* session)
         __INTRODUCED_IN(24);
 
+/**
+ * Opaque object for capture session output, use {@link ACaptureSessionOutput_create} or
+ * {@link ACaptureSessionSharedOutput_create} to create an instance.
+ */
 typedef struct ACaptureSessionOutput ACaptureSessionOutput;
 
 /**
@@ -604,9 +619,9 @@
  *
  * <p>Native windows that get removed must not be part of any active repeating or single/burst
  * request or have any pending results. Consider updating repeating requests via
- * {@link ACaptureSessionOutput_setRepeatingRequest} and then wait for the last frame number
+ * {@link ACameraCaptureSession_setRepeatingRequest} and then wait for the last frame number
  * when the sequence completes
- * {@link ACameraCaptureSession_captureCallback#onCaptureSequenceCompleted}.</p>
+ * {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceCompleted}.</p>
  *
  * <p>Native windows that get added must not be part of any other registered ACaptureSessionOutput
  * and must be compatible. Compatible windows must have matching format, rotation and
@@ -713,7 +728,15 @@
      * Same as ACameraCaptureSession_captureCallbacks
      */
     void*                                               context;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureStarted}.
+     */
     ACameraCaptureSession_captureCallback_start         onCaptureStarted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureProgressed}.
+     */
     ACameraCaptureSession_captureCallback_result        onCaptureProgressed;
 
     /**
@@ -751,10 +774,18 @@
     ACameraCaptureSession_logicalCamera_captureCallback_failed onLogicalCameraCaptureFailed;
 
     /**
-     * Same as ACameraCaptureSession_captureCallbacks
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceCompleted}.
      */
     ACameraCaptureSession_captureCallback_sequenceEnd   onCaptureSequenceCompleted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceAborted}.
+     */
     ACameraCaptureSession_captureCallback_sequenceAbort onCaptureSequenceAborted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureBufferLost}.
+     */
     ACameraCaptureSession_captureCallback_bufferLost    onCaptureBufferLost;
 } ACameraCaptureSession_logicalCamera_captureCallbacks;
 
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index f72fe8d..7be4bd3 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -124,6 +124,10 @@
  */
 typedef void (*ACameraDevice_ErrorStateCallback)(void* context, ACameraDevice* device, int error);
 
+/**
+ * Applications' callbacks for camera device state changes, register with
+ * {@link ACameraManager_openCamera}.
+ */
 typedef struct ACameraDevice_StateCallbacks {
     /// optional application context.
     void*                             context;
@@ -198,6 +202,10 @@
  */
 const char* ACameraDevice_getId(const ACameraDevice* device) __INTRODUCED_IN(24);
 
+/**
+ * Capture request pre-defined template types, used in {@link ACameraDevice_createCaptureRequest}
+ * and {@link ACameraDevice_createCaptureRequest_withPhysicalIds}.
+ */
 typedef enum {
     /**
      * Create a request suitable for a camera preview window. Specifically, this
@@ -301,10 +309,12 @@
         const ACameraDevice* device, ACameraDevice_request_template templateId,
         /*out*/ACaptureRequest** request) __INTRODUCED_IN(24);
 
-
+/**
+ * Opaque object for CaptureSessionOutput container, use
+ * {@link ACaptureSessionOutputContainer_create} to create an instance.
+ */
 typedef struct ACaptureSessionOutputContainer ACaptureSessionOutputContainer;
 
-typedef struct ACaptureSessionOutput ACaptureSessionOutput;
 
 /**
  * Create a capture session output container.
@@ -844,7 +854,7 @@
         /*out*/ACaptureRequest** request) __INTRODUCED_IN(29);
 
 /**
- * Check whether a particular {@ACaptureSessionOutputContainer} is supported by
+ * Check whether a particular {@link ACaptureSessionOutputContainer} is supported by
  * the camera device.
  *
  * <p>This method performs a runtime check of a given {@link
@@ -875,6 +885,7 @@
  *                                                         device.</li>
  *        <li>{@link ACAMERA_ERROR_UNSUPPORTED_OPERATION} if the query operation is not
  *                                                        supported by the camera device.</li>
+ *        </ul>
  */
 camera_status_t ACameraDevice_isSessionConfigurationSupported(
         const ACameraDevice* device,
diff --git a/camera/ndk/include/camera/NdkCameraError.h b/camera/ndk/include/camera/NdkCameraError.h
index 9d77eb4..26db7f2 100644
--- a/camera/ndk/include/camera/NdkCameraError.h
+++ b/camera/ndk/include/camera/NdkCameraError.h
@@ -40,7 +40,13 @@
 
 __BEGIN_DECLS
 
+/**
+ * Camera status enum types.
+ */
 typedef enum {
+    /**
+     * Camera operation has succeeded.
+     */
     ACAMERA_OK = 0,
 
     ACAMERA_ERROR_BASE                  = -10000,
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index be32b11..729182e 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -326,7 +326,7 @@
  * @see ACameraManager_registerExtendedAvailabilityCallback
  */
 typedef struct ACameraManager_ExtendedAvailabilityListener {
-    ///
+    /// Called when a camera becomes available or unavailable
     ACameraManager_AvailabilityCallbacks availabilityCallbacks;
 
     /// Called when there is camera access permission change
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index 0d5e6c4..b331d50 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -256,10 +256,12 @@
 
 /**
  * Return a {@link ACameraMetadata} that references the same data as
- * {@link cameraMetadata}, which is an instance of
- * {@link android.hardware.camera2.CameraMetadata} (e.g., a
- * {@link android.hardware.camera2.CameraCharacteristics} or
- * {@link android.hardware.camera2.CaptureResult}).
+ * <a href="/reference/android/hardware/camera2/CameraMetadata">
+ *     android.hardware.camera2.CameraMetadata</a> from Java API. (e.g., a
+ * <a href="/reference/android/hardware/camera2/CameraCharacteristics">
+ *     android.hardware.camera2.CameraCharacteristics</a>
+ * or <a href="/reference/android/hardware/camera2/CaptureResult">
+ *     android.hardware.camera2.CaptureResult</a>).
  *
  * <p>The returned ACameraMetadata must be freed by the application by {@link ACameraMetadata_free}
  * after application is done using it.</p>
@@ -269,11 +271,13 @@
  * the Java metadata is garbage collected.
  *
  * @param env the JNI environment.
- * @param cameraMetadata the source {@link android.hardware.camera2.CameraMetadata} from which the
+ * @param cameraMetadata the source <a href="/reference/android/hardware/camera2/CameraMetadata">
+                         android.hardware.camera2.CameraMetadata </a>from which the
  *                       returned {@link ACameraMetadata} is a view.
  *
- * @return a valid ACameraMetadata pointer or NULL if {@link cameraMetadata} is null or not a valid
- *         instance of {@link android.hardware.camera2.CameraMetadata}.
+ * @return a valid ACameraMetadata pointer or NULL if cameraMetadata is null or not a valid
+ *         instance of <a href="android/hardware/camera2/CameraMetadata">
+ *         android.hardware.camera2.CameraMetadata</a>.
  *
  */
 ACameraMetadata* ACameraMetadata_fromCameraMetadata(JNIEnv* env, jobject cameraMetadata)
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 70ce864..20ffd48 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -1868,7 +1868,7 @@
      * <li>If the camera device has BURST_CAPTURE capability, the frame rate requirement of
      * BURST_CAPTURE must still be met.</li>
      * <li>All streams not larger than the maximum streaming dimension for BOKEH_STILL_CAPTURE mode
-     * (queried via {@link ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES })
+     * (queried via {@link ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES })
      * will have preview bokeh effect applied.</li>
      * </ul>
      * <p>When set to BOKEH_CONTINUOUS mode, configured streams dimension should not exceed this mode's
@@ -3502,7 +3502,7 @@
      * preCorrectionActiveArraySize covers the camera device's field of view "after" zoom.  See
      * ACAMERA_CONTROL_ZOOM_RATIO for details.</p>
      * <p>For camera devices with the
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
      * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
      * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
@@ -3964,7 +3964,7 @@
      * configurations which belong to this physical camera, and it will advertise and will only
      * advertise the maximum supported resolutions for a particular format.</p>
      * <p>If this camera device isn't a physical camera device constituting a logical camera,
-     * but a standalone <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * but a standalone <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * camera, this field represents the multi-resolution input/output stream configurations of
      * default mode and max resolution modes. The sizes will be the maximum resolution of a
      * particular format for default mode and max resolution mode.</p>
@@ -4867,12 +4867,12 @@
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode.
      * When operating in
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode, sensors
-     * with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * capability would typically perform pixel binning in order to improve low light
      * performance, noise reduction etc. However, in
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
      * mode (supported only
-     * by <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * by <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * sensors), sensors typically operate in unbinned mode allowing for a larger image size.
      * The stream configurations supported in
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
@@ -4905,7 +4905,7 @@
      * </ul></p>
      *
      * <p>This key will only be present in devices advertisting the
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * capability which also advertise <code>REMOSAIC_REPROCESSING</code> capability. On all other devices
      * RAW targets will have a regular bayer pattern.</p>
      */
@@ -5231,7 +5231,7 @@
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
      * counterparts.
      * This key will only be present for devices which advertise the
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * capability.</p>
      * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
      *
@@ -5263,7 +5263,7 @@
      * is, when ACAMERA_SENSOR_PIXEL_MODE is set to
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
      * This key will only be present for devices which advertise the
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * capability.</p>
      *
      * @see ACAMERA_SENSOR_INFO_PHYSICAL_SIZE
@@ -5291,7 +5291,7 @@
      * when ACAMERA_SENSOR_PIXEL_MODE is set to
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
      * This key will only be present for devices which advertise the
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * capability.</p>
      * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
      *
@@ -5321,7 +5321,7 @@
      * <p>This key will not be present if REMOSAIC_REPROCESSING is not supported, since RAW images
      * will have a regular bayer pattern.</p>
      * <p>This key will not be present for sensors which don't have the
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * capability.</p>
      */
     ACAMERA_SENSOR_INFO_BINNING_FACTOR =                        // int32[2]
@@ -9264,13 +9264,13 @@
     /**
      * <p>This is the default sensor pixel mode. This is the only sensor pixel mode
      * supported unless a camera device advertises
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.</p>
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.</p>
      */
     ACAMERA_SENSOR_PIXEL_MODE_DEFAULT                                = 0,
 
     /**
      * <p>This sensor pixel mode is offered by devices with capability
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.
      * In this mode, sensors typically do not bin pixels, as a result can offer larger
      * image sizes.</p>
      */
diff --git a/camera/ndk/include/camera/NdkCaptureRequest.h b/camera/ndk/include/camera/NdkCaptureRequest.h
index a4dc374..d83c5b3 100644
--- a/camera/ndk/include/camera/NdkCaptureRequest.h
+++ b/camera/ndk/include/camera/NdkCaptureRequest.h
@@ -44,10 +44,10 @@
 
 __BEGIN_DECLS
 
-// Container for output targets
+/** Container for output targets */
 typedef struct ACameraOutputTargets ACameraOutputTargets;
 
-// Container for a single output target
+/** Container for a single output target */
 typedef struct ACameraOutputTarget ACameraOutputTarget;
 
 /**
@@ -383,10 +383,10 @@
  * Set/change a camera capture control entry with unsigned 8 bits data type for
  * a physical camera backing a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_u8, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_u8, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -413,10 +413,10 @@
  * Set/change a camera capture control entry with signed 32 bits data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_i32, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_i32, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -443,10 +443,10 @@
  * Set/change a camera capture control entry with float data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_float, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_float, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -473,10 +473,10 @@
  * Set/change a camera capture control entry with signed 64 bits data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_i64, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_i64, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -503,10 +503,10 @@
  * Set/change a camera capture control entry with double data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_double, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_double, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -533,10 +533,10 @@
  * Set/change a camera capture control entry with rational data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_rational, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_rational, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index ea76cbb..d865ab2 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -272,8 +272,9 @@
         return UNKNOWN_ERROR;
     }
 
-    if (sbrMode != -1 && aacProfile == C2Config::PROFILE_AAC_ELD) {
-        if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, sbrMode)) {
+    if (sbrMode != C2Config::AAC_SBR_AUTO && aacProfile == C2Config::PROFILE_AAC_ELD) {
+        int aacSbrMode = sbrMode != C2Config::AAC_SBR_OFF;
+        if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, aacSbrMode)) {
             ALOGE("Failed to set AAC encoder parameters");
             return UNKNOWN_ERROR;
         }
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index f4a6e17..e8287f9 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -22,7 +22,6 @@
 
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
-#include <Codec2BufferUtils.h>
 #include <Codec2Mapper.h>
 #include <SimpleC2Interface.h>
 
@@ -332,14 +331,6 @@
     free(mem);
 }
 
-static IV_COLOR_FORMAT_T GetIvColorFormat() {
-    static IV_COLOR_FORMAT_T sColorFormat =
-        (GetYuv420FlexibleLayout() == FLEX_LAYOUT_SEMIPLANAR_UV) ? IV_YUV_420SP_UV :
-        (GetYuv420FlexibleLayout() == FLEX_LAYOUT_SEMIPLANAR_VU) ? IV_YUV_420SP_VU :
-        IV_YUV_420P;
-    return sColorFormat;
-}
-
 C2SoftAvcDec::C2SoftAvcDec(
         const char *name,
         c2_node_id_t id,
@@ -348,6 +339,7 @@
       mIntf(intfImpl),
       mDecHandle(nullptr),
       mOutBufferFlush(nullptr),
+      mIvColorFormat(IV_YUV_420P),
       mOutputDelay(kDefaultOutputDelay),
       mWidth(320),
       mHeight(240),
@@ -426,13 +418,7 @@
     s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
     s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
     s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
-    s_create_ip.s_ivd_create_ip_t.e_output_format = GetIvColorFormat();
-    switch (s_create_ip.s_ivd_create_ip_t.e_output_format) {
-        case IV_YUV_420P:       ALOGD("Flex Planar");           break;
-        case IV_YUV_420SP_UV:   ALOGD("Flex Semi-planar UV");   break;
-        case IV_YUV_420SP_VU:   ALOGD("Flex Semi-planar VU");   break;
-        default:                ALOGD("Unknown");               break;
-    }
+    s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorFormat;
     s_create_ip.s_ivd_create_ip_t.pf_aligned_alloc = ivd_aligned_malloc;
     s_create_ip.s_ivd_create_ip_t.pf_aligned_free = ivd_aligned_free;
     s_create_ip.s_ivd_create_ip_t.pv_mem_ctxt = nullptr;
@@ -569,12 +555,8 @@
         ps_decode_ip->u4_num_Bytes = 0;
     }
     ps_decode_ip->s_out_buffer.u4_min_out_buf_size[0] = lumaSize;
-    if (GetIvColorFormat() == IV_YUV_420P) {
-        ps_decode_ip->s_out_buffer.u4_min_out_buf_size[1] = chromaSize;
-        ps_decode_ip->s_out_buffer.u4_min_out_buf_size[2] = chromaSize;
-    } else {
-        ps_decode_ip->s_out_buffer.u4_min_out_buf_size[1] = chromaSize * 2;
-    }
+    ps_decode_ip->s_out_buffer.u4_min_out_buf_size[1] = chromaSize;
+    ps_decode_ip->s_out_buffer.u4_min_out_buf_size[2] = chromaSize;
     if (outBuffer) {
         if (outBuffer->height() < displayHeight) {
             ALOGE("Output buffer too small: provided (%dx%d) required (%ux%u)",
@@ -583,23 +565,13 @@
         }
         ps_decode_ip->s_out_buffer.pu1_bufs[0] = outBuffer->data()[C2PlanarLayout::PLANE_Y];
         ps_decode_ip->s_out_buffer.pu1_bufs[1] = outBuffer->data()[C2PlanarLayout::PLANE_U];
-        if (GetIvColorFormat() == IV_YUV_420P) {
-            ps_decode_ip->s_out_buffer.pu1_bufs[2] = outBuffer->data()[C2PlanarLayout::PLANE_V];
-        } else if (GetIvColorFormat() == IV_YUV_420SP_VU) {
-            ps_decode_ip->s_out_buffer.pu1_bufs[1] = outBuffer->data()[C2PlanarLayout::PLANE_V];
-        }
+        ps_decode_ip->s_out_buffer.pu1_bufs[2] = outBuffer->data()[C2PlanarLayout::PLANE_V];
     } else {
         ps_decode_ip->s_out_buffer.pu1_bufs[0] = mOutBufferFlush;
         ps_decode_ip->s_out_buffer.pu1_bufs[1] = mOutBufferFlush + lumaSize;
-        if (GetIvColorFormat() == IV_YUV_420P) {
-            ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize;
-        }
+        ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize;
     }
-    if (GetIvColorFormat() == IV_YUV_420P) {
-        ps_decode_ip->s_out_buffer.u4_num_bufs = 3;
-    } else {
-        ps_decode_ip->s_out_buffer.u4_num_bufs = 2;
-    }
+    ps_decode_ip->s_out_buffer.u4_num_bufs = 3;
     ps_decode_op->u4_size = sizeof(ih264d_video_decode_op_t);
 
     return true;
@@ -809,7 +781,7 @@
         mOutBlock.reset();
     }
     if (!mOutBlock) {
-        uint32_t format = HAL_PIXEL_FORMAT_YCBCR_420_888;
+        uint32_t format = HAL_PIXEL_FORMAT_YV12;
         C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
         c2_status_t err =
             pool->fetchGraphicBlock(ALIGN32(mWidth), mHeight, format, usage, &mOutBlock);
@@ -825,6 +797,8 @@
 }
 
 // TODO: can overall error checking be improved?
+// TODO: allow configuration of color format and usage for graphic buffers instead
+//       of hard coding them to HAL_PIXEL_FORMAT_YV12
 // TODO: pass coloraspects information to surface
 // TODO: test support for dynamic change in resolution
 // TODO: verify if the decoder sent back all frames
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
index ed99ad1..5c07d29 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.h
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -155,6 +155,7 @@
     uint8_t *mOutBufferFlush;
 
     size_t mNumCores;
+    IV_COLOR_FORMAT_T mIvColorFormat;
     uint32_t mOutputDelay;
     uint32_t mWidth;
     uint32_t mHeight;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index fc5b75d..bab651f 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -454,19 +454,11 @@
 
 }  // namespace
 
-static IV_COLOR_FORMAT_T GetIvColorFormat() {
-    static IV_COLOR_FORMAT_T sColorFormat =
-        (GetYuv420FlexibleLayout() == FLEX_LAYOUT_SEMIPLANAR_UV) ? IV_YUV_420SP_UV :
-        (GetYuv420FlexibleLayout() == FLEX_LAYOUT_SEMIPLANAR_VU) ? IV_YUV_420SP_VU :
-        IV_YUV_420P;
-    return sColorFormat;
-}
-
 C2SoftAvcEnc::C2SoftAvcEnc(
         const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl)
     : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
       mIntf(intfImpl),
-      mIvVideoColorFormat(GetIvColorFormat()),
+      mIvVideoColorFormat(IV_YUV_420P),
       mAVCEncProfile(IV_PROFILE_BASE),
       mAVCEncLevel(41),
       mStarted(false),
@@ -1034,7 +1026,8 @@
     // Assume worst case output buffer size to be equal to number of bytes in input
     mOutBufferSize = std::max(width * height * 3 / 2, kMinOutBufferSize);
 
-    mIvVideoColorFormat = GetIvColorFormat();
+    // TODO
+    mIvVideoColorFormat = IV_YUV_420P;
 
     ALOGD("Params width %d height %d level %d colorFormat %d bframes %d", width,
             height, mAVCEncLevel, mIvVideoColorFormat, mBframes);
@@ -1332,6 +1325,7 @@
               mSize->width, input->height(), mSize->height);
         return C2_BAD_VALUE;
     }
+    ALOGV("width = %d, height = %d", input->width(), input->height());
     const C2PlanarLayout &layout = input->layout();
     uint8_t *yPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_Y]);
     uint8_t *uPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_U]);
@@ -1368,8 +1362,7 @@
                 return C2_BAD_VALUE;
             }
 
-            if (mIvVideoColorFormat == IV_YUV_420P
-                    && layout.planes[layout.PLANE_Y].colInc == 1
+            if (layout.planes[layout.PLANE_Y].colInc == 1
                     && layout.planes[layout.PLANE_U].colInc == 1
                     && layout.planes[layout.PLANE_V].colInc == 1
                     && uStride == vStride
@@ -1377,61 +1370,21 @@
                 // I420 compatible - already set up above
                 break;
             }
-            if (mIvVideoColorFormat == IV_YUV_420SP_UV
-                    && layout.planes[layout.PLANE_Y].colInc == 1
-                    && layout.planes[layout.PLANE_U].colInc == 2
-                    && layout.planes[layout.PLANE_V].colInc == 2
-                    && uStride == vStride
-                    && yStride == vStride
-                    && uPlane + 1 == vPlane) {
-                // NV12 compatible - already set up above
-                break;
-            }
-            if (mIvVideoColorFormat == IV_YUV_420SP_VU
-                    && layout.planes[layout.PLANE_Y].colInc == 1
-                    && layout.planes[layout.PLANE_U].colInc == 2
-                    && layout.planes[layout.PLANE_V].colInc == 2
-                    && uStride == vStride
-                    && yStride == vStride
-                    && uPlane == vPlane + 1) {
-                // NV21 compatible - already set up above
-                break;
-            }
 
             // copy to I420
             yStride = width;
             uStride = vStride = yStride / 2;
             MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
             mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
-            MediaImage2 img;
-            switch (mIvVideoColorFormat) {
-                case IV_YUV_420P:
-                    img = CreateYUV420PlanarMediaImage2(width, height, yStride, height);
-                    yPlane = conversionBuffer.data();
-                    uPlane = yPlane + yPlaneSize;
-                    vPlane = uPlane + yPlaneSize / 4;
-                    break;
-                case IV_YUV_420SP_VU:
-                    img = CreateYUV420SemiPlanarMediaImage2(width, height, yStride, height);
-                    img.mPlane[MediaImage2::U].mOffset++;
-                    img.mPlane[MediaImage2::V].mOffset--;
-                    yPlane = conversionBuffer.data();
-                    vPlane = yPlane + yPlaneSize;
-                    uPlane = vPlane + 1;
-                    break;
-                case IV_YUV_420SP_UV:
-                default:
-                    img = CreateYUV420SemiPlanarMediaImage2(width, height, yStride, height);
-                    yPlane = conversionBuffer.data();
-                    uPlane = yPlane + yPlaneSize;
-                    vPlane = uPlane + 1;
-                    break;
-            }
+            MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, yStride, height);
             status_t err = ImageCopy(conversionBuffer.data(), &img, *input);
             if (err != OK) {
                 ALOGE("Buffer conversion failed: %d", err);
                 return C2_BAD_VALUE;
             }
+            yPlane = conversionBuffer.data();
+            uPlane = yPlane + yPlaneSize;
+            vPlane = uPlane + yPlaneSize / 4;
             break;
 
         }
@@ -1477,17 +1430,15 @@
             break;
         }
 
-        case IV_YUV_420SP_VU:
-            uPlane = vPlane;
-            [[fallthrough]];
         case IV_YUV_420SP_UV:
+        case IV_YUV_420SP_VU:
         default:
         {
             ps_inp_raw_buf->apv_bufs[0] = yPlane;
             ps_inp_raw_buf->apv_bufs[1] = uPlane;
 
             ps_inp_raw_buf->au4_wd[0] = mSize->width;
-            ps_inp_raw_buf->au4_wd[1] = mSize->width / 2;
+            ps_inp_raw_buf->au4_wd[1] = mSize->width;
 
             ps_inp_raw_buf->au4_ht[0] = mSize->height;
             ps_inp_raw_buf->au4_ht[1] = mSize->height / 2;
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index fb3fbd0..dfad226 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -110,17 +110,20 @@
         }
         case kWhatStop: {
             int32_t err = thiz->onStop();
+            thiz->mOutputBlockPool.reset();
             Reply(msg, &err);
             break;
         }
         case kWhatReset: {
             thiz->onReset();
+            thiz->mOutputBlockPool.reset();
             mRunning = false;
             Reply(msg);
             break;
         }
         case kWhatRelease: {
             thiz->onRelease();
+            thiz->mOutputBlockPool.reset();
             mRunning = false;
             Reply(msg);
             break;
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hidl/client/client.cpp
index d49141c..71857e0 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hidl/client/client.cpp
@@ -1482,7 +1482,8 @@
 c2_status_t Codec2Client::Component::setOutputSurface(
         C2BlockPool::local_id_t blockPoolId,
         const sp<IGraphicBufferProducer>& surface,
-        uint32_t generation) {
+        uint32_t generation,
+        int maxDequeueCount) {
     uint64_t bqId = 0;
     sp<IGraphicBufferProducer> nullIgbp;
     sp<HGraphicBufferProducer2> nullHgbp;
@@ -1496,14 +1497,15 @@
     std::shared_ptr<SurfaceSyncObj> syncObj;
 
     if (!surface) {
-        mOutputBufferQueue->configure(nullIgbp, generation, 0, nullptr);
+        mOutputBufferQueue->configure(nullIgbp, generation, 0, maxDequeueCount, nullptr);
     } else if (surface->getUniqueId(&bqId) != OK) {
         LOG(ERROR) << "setOutputSurface -- "
                    "cannot obtain bufferqueue id.";
         bqId = 0;
-        mOutputBufferQueue->configure(nullIgbp, generation, 0, nullptr);
+        mOutputBufferQueue->configure(nullIgbp, generation, 0, maxDequeueCount, nullptr);
     } else {
-        mOutputBufferQueue->configure(surface, generation, bqId, nullptr);
+        mOutputBufferQueue->configure(surface, generation, bqId, maxDequeueCount, mBase1_2 ?
+                                      &syncObj : nullptr);
     }
     ALOGD("surface generation remote change %u HAL ver: %s",
           generation, syncObj ? "1.2" : "1.0");
diff --git a/media/codec2/hidl/client/include/codec2/hidl/client.h b/media/codec2/hidl/client/include/codec2/hidl/client.h
index eca268e..347e58a 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/client.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/client.h
@@ -384,7 +384,8 @@
     c2_status_t setOutputSurface(
             C2BlockPool::local_id_t blockPoolId,
             const sp<IGraphicBufferProducer>& surface,
-            uint32_t generation);
+            uint32_t generation,
+            int maxDequeueBufferCount);
 
     // Extract a slot number from of the block, then call
     // IGraphicBufferProducer::queueBuffer().
diff --git a/media/codec2/hidl/client/include/codec2/hidl/output.h b/media/codec2/hidl/client/include/codec2/hidl/output.h
index 0f03b36..877148a 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/output.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/output.h
@@ -47,6 +47,7 @@
     bool configure(const sp<IGraphicBufferProducer>& igbp,
                    uint32_t generation,
                    uint64_t bqId,
+                   int maxDequeueBufferCount,
                    std::shared_ptr<V1_2::SurfaceSyncObj> *syncObj);
 
     // Render a graphic block to current surface.
diff --git a/media/codec2/hidl/client/output.cpp b/media/codec2/hidl/client/output.cpp
index 7df0da2..283ed8d 100644
--- a/media/codec2/hidl/client/output.cpp
+++ b/media/codec2/hidl/client/output.cpp
@@ -178,6 +178,7 @@
 bool OutputBufferQueue::configure(const sp<IGraphicBufferProducer>& igbp,
                                   uint32_t generation,
                                   uint64_t bqId,
+                                  int maxDequeueBufferCount,
                                   std::shared_ptr<V1_2::SurfaceSyncObj> *syncObj) {
     uint64_t consumerUsage = 0;
     if (igbp->getConsumerUsage(&consumerUsage) != OK) {
@@ -219,6 +220,20 @@
     {
         std::scoped_lock<std::mutex> l(mMutex);
         if (generation == mGeneration) {
+            // case of old BlockPool destruction
+            C2SyncVariables *var = mSyncMem ? mSyncMem->mem() : nullptr;
+            if (var) {
+                *syncObj = std::make_shared<V1_2::SurfaceSyncObj>();
+                (*syncObj)->bqId = bqId;
+                (*syncObj)->syncMemory = mSyncMem->handle();
+                (*syncObj)->generationId = generation;
+                (*syncObj)->consumerUsage = consumerUsage;
+                mMaxDequeueBufferCount = maxDequeueBufferCount;
+                var->lock();
+                var->setSyncStatusLocked(C2SyncVariables::STATUS_INIT);
+                var->setInitialDequeueCountLocked(mMaxDequeueBufferCount, 0);
+                var->unlock();
+            }
             return false;
         }
         std::shared_ptr<C2SurfaceSyncMemory> oldMem = mSyncMem;
@@ -238,6 +253,7 @@
         mGeneration = generation;
         mBqId = bqId;
         mOwner = std::make_shared<int>(0);
+        mMaxDequeueBufferCount = maxDequeueBufferCount;
         for (int i = 0; i < BufferQueueDefs::NUM_BUFFER_SLOTS; ++i) {
             if (mBqId == 0 || !mBuffers[i]) {
                 continue;
@@ -288,7 +304,9 @@
             mPoolDatas[i] = poolDatas[i];
         }
         if (newSync) {
-            newSync->setInitialDequeueCount(mMaxDequeueBufferCount, success);
+            newSync->lock();
+            newSync->setInitialDequeueCountLocked(mMaxDequeueBufferCount, success);
+            newSync->unlock();
         }
     }
     ALOGD("remote graphic buffer migration %zu/%zu",
@@ -452,6 +470,7 @@
         syncVar->unlock();
     }
     mMutex.unlock();
+    ALOGD("set max dequeue count %d from update", maxDequeueBufferCount);
 }
 
 }  // namespace c2
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
index f701987..5d0284f 100644
--- a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
+++ b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
@@ -35,7 +35,7 @@
 # on ARM is statically loaded at 0xffff 0000. See
 # http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.ddi0211h/Babfeega.html
 # for more details.
-mremap: arg3 == 3
+mremap: arg3 == 3 || arg3 == MREMAP_MAYMOVE
 munmap: 1
 prctl: 1
 writev: 1
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 63ae5cd..15d2989 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -1798,17 +1798,19 @@
 }
 
 status_t CCodec::setSurface(const sp<Surface> &surface) {
-    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
-    const std::unique_ptr<Config> &config = *configLocked;
-    if (config->mTunneled && config->mSidebandHandle != nullptr) {
-        sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(surface.get());
-        status_t err = native_window_set_sideband_stream(
-                nativeWindow.get(),
-                const_cast<native_handle_t *>(config->mSidebandHandle->handle()));
-        if (err != OK) {
-            ALOGE("NativeWindow(%p) native_window_set_sideband_stream(%p) failed! (err %d).",
-                    nativeWindow.get(), config->mSidebandHandle->handle(), err);
-            return err;
+    {
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        if (config->mTunneled && config->mSidebandHandle != nullptr) {
+            sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(surface.get());
+            status_t err = native_window_set_sideband_stream(
+                    nativeWindow.get(),
+                    const_cast<native_handle_t *>(config->mSidebandHandle->handle()));
+            if (err != OK) {
+                ALOGE("NativeWindow(%p) native_window_set_sideband_stream(%p) failed! (err %d).",
+                        nativeWindow.get(), config->mSidebandHandle->handle(), err);
+                return err;
+            }
         }
     }
     return mChannel->setSurface(surface);
@@ -2149,80 +2151,92 @@
             }
 
             // handle configuration changes in work done
-            Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
-            const std::unique_ptr<Config> &config = *configLocked;
-            Config::Watcher<C2StreamInitDataInfo::output> initData =
-                config->watch<C2StreamInitDataInfo::output>();
-            if (!work->worklets.empty()
-                    && (work->worklets.front()->output.flags
-                            & C2FrameData::FLAG_DISCARD_FRAME) == 0) {
+            std::unique_ptr<C2Param> initData;
+            sp<AMessage> outputFormat = nullptr;
+            {
+                Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+                const std::unique_ptr<Config> &config = *configLocked;
+                Config::Watcher<C2StreamInitDataInfo::output> initDataWatcher =
+                    config->watch<C2StreamInitDataInfo::output>();
+                if (!work->worklets.empty()
+                        && (work->worklets.front()->output.flags
+                                & C2FrameData::FLAG_DISCARD_FRAME) == 0) {
 
-                // copy buffer info to config
-                std::vector<std::unique_ptr<C2Param>> updates;
-                for (const std::unique_ptr<C2Param> &param
-                        : work->worklets.front()->output.configUpdate) {
-                    updates.push_back(C2Param::Copy(*param));
-                }
-                unsigned stream = 0;
-                for (const std::shared_ptr<C2Buffer> &buf : work->worklets.front()->output.buffers) {
-                    for (const std::shared_ptr<const C2Info> &info : buf->info()) {
-                        // move all info into output-stream #0 domain
-                        updates.emplace_back(C2Param::CopyAsStream(*info, true /* output */, stream));
+                    // copy buffer info to config
+                    std::vector<std::unique_ptr<C2Param>> updates;
+                    for (const std::unique_ptr<C2Param> &param
+                            : work->worklets.front()->output.configUpdate) {
+                        updates.push_back(C2Param::Copy(*param));
+                    }
+                    unsigned stream = 0;
+                    std::vector<std::shared_ptr<C2Buffer>> &outputBuffers =
+                        work->worklets.front()->output.buffers;
+                    for (const std::shared_ptr<C2Buffer> &buf : outputBuffers) {
+                        for (const std::shared_ptr<const C2Info> &info : buf->info()) {
+                            // move all info into output-stream #0 domain
+                            updates.emplace_back(
+                                    C2Param::CopyAsStream(*info, true /* output */, stream));
+                        }
+
+                        const std::vector<C2ConstGraphicBlock> blocks = buf->data().graphicBlocks();
+                        // for now only do the first block
+                        if (!blocks.empty()) {
+                            // ALOGV("got output buffer with crop %u,%u+%u,%u and size %u,%u",
+                            //      block.crop().left, block.crop().top,
+                            //      block.crop().width, block.crop().height,
+                            //      block.width(), block.height());
+                            const C2ConstGraphicBlock &block = blocks[0];
+                            updates.emplace_back(new C2StreamCropRectInfo::output(
+                                    stream, block.crop()));
+                            updates.emplace_back(new C2StreamPictureSizeInfo::output(
+                                    stream, block.crop().width, block.crop().height));
+                        }
+                        ++stream;
                     }
 
-                    const std::vector<C2ConstGraphicBlock> blocks = buf->data().graphicBlocks();
-                    // for now only do the first block
-                    if (!blocks.empty()) {
-                        // ALOGV("got output buffer with crop %u,%u+%u,%u and size %u,%u",
-                        //      block.crop().left, block.crop().top,
-                        //      block.crop().width, block.crop().height,
-                        //      block.width(), block.height());
-                        const C2ConstGraphicBlock &block = blocks[0];
-                        updates.emplace_back(new C2StreamCropRectInfo::output(stream, block.crop()));
-                        updates.emplace_back(new C2StreamPictureSizeInfo::output(
-                                stream, block.crop().width, block.crop().height));
-                    }
-                    ++stream;
-                }
+                    sp<AMessage> oldFormat = config->mOutputFormat;
+                    config->updateConfiguration(updates, config->mOutputDomain);
+                    RevertOutputFormatIfNeeded(oldFormat, config->mOutputFormat);
 
-                sp<AMessage> outputFormat = config->mOutputFormat;
-                config->updateConfiguration(updates, config->mOutputDomain);
-                RevertOutputFormatIfNeeded(outputFormat, config->mOutputFormat);
-
-                // copy standard infos to graphic buffers if not already present (otherwise, we
-                // may overwrite the actual intermediate value with a final value)
-                stream = 0;
-                const static C2Param::Index stdGfxInfos[] = {
-                    C2StreamRotationInfo::output::PARAM_TYPE,
-                    C2StreamColorAspectsInfo::output::PARAM_TYPE,
-                    C2StreamDataSpaceInfo::output::PARAM_TYPE,
-                    C2StreamHdrStaticInfo::output::PARAM_TYPE,
-                    C2StreamHdr10PlusInfo::output::PARAM_TYPE,
-                    C2StreamPixelAspectRatioInfo::output::PARAM_TYPE,
-                    C2StreamSurfaceScalingInfo::output::PARAM_TYPE
-                };
-                for (const std::shared_ptr<C2Buffer> &buf : work->worklets.front()->output.buffers) {
-                    if (buf->data().graphicBlocks().size()) {
-                        for (C2Param::Index ix : stdGfxInfos) {
-                            if (!buf->hasInfo(ix)) {
-                                const C2Param *param =
-                                    config->getConfigParameterValue(ix.withStream(stream));
-                                if (param) {
-                                    std::shared_ptr<C2Param> info(C2Param::Copy(*param));
-                                    buf->setInfo(std::static_pointer_cast<C2Info>(info));
+                    // copy standard infos to graphic buffers if not already present (otherwise, we
+                    // may overwrite the actual intermediate value with a final value)
+                    stream = 0;
+                    const static C2Param::Index stdGfxInfos[] = {
+                        C2StreamRotationInfo::output::PARAM_TYPE,
+                        C2StreamColorAspectsInfo::output::PARAM_TYPE,
+                        C2StreamDataSpaceInfo::output::PARAM_TYPE,
+                        C2StreamHdrStaticInfo::output::PARAM_TYPE,
+                        C2StreamHdr10PlusInfo::output::PARAM_TYPE,
+                        C2StreamPixelAspectRatioInfo::output::PARAM_TYPE,
+                        C2StreamSurfaceScalingInfo::output::PARAM_TYPE
+                    };
+                    for (const std::shared_ptr<C2Buffer> &buf : outputBuffers) {
+                        if (buf->data().graphicBlocks().size()) {
+                            for (C2Param::Index ix : stdGfxInfos) {
+                                if (!buf->hasInfo(ix)) {
+                                    const C2Param *param =
+                                        config->getConfigParameterValue(ix.withStream(stream));
+                                    if (param) {
+                                        std::shared_ptr<C2Param> info(C2Param::Copy(*param));
+                                        buf->setInfo(std::static_pointer_cast<C2Info>(info));
+                                    }
                                 }
                             }
                         }
+                        ++stream;
                     }
-                    ++stream;
                 }
-            }
-            if (config->mInputSurface) {
-                config->mInputSurface->onInputBufferDone(work->input.ordinal.frameIndex);
+                if (config->mInputSurface) {
+                    config->mInputSurface->onInputBufferDone(work->input.ordinal.frameIndex);
+                }
+                if (initDataWatcher.hasChanged()) {
+                    initData = C2Param::Copy(*initDataWatcher.update().get());
+                }
+                outputFormat = config->mOutputFormat;
             }
             mChannel->onWorkDone(
-                    std::move(work), config->mOutputFormat,
-                    initData.hasChanged() ? initData.update().get() : nullptr);
+                    std::move(work), outputFormat,
+                    initData ? (C2StreamInitDataInfo::output *)initData.get() : nullptr);
             break;
         }
         case kWhatWatch: {
@@ -2307,9 +2321,13 @@
             pendingDeadline = true;
         }
     }
-    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
-    const std::unique_ptr<Config> &config = *configLocked;
-    if (config->mTunneled == false && name.empty()) {
+    bool tunneled = false;
+    {
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        tunneled = config->mTunneled;
+    }
+    if (!tunneled && name.empty()) {
         constexpr std::chrono::steady_clock::duration kWorkDurationThreshold = 3s;
         std::chrono::steady_clock::duration elapsed = mChannel->elapsed();
         if (elapsed >= kWorkDurationThreshold) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index c4f9d84..e33a5ba 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1189,9 +1189,6 @@
             }
             outputGeneration = output->generation;
         }
-        if (maxDequeueCount > 0) {
-            mComponent->setOutputSurfaceMaxDequeueCount(maxDequeueCount);
-        }
 
         bool graphic = (oStreamFormat.value == C2BufferData::GRAPHIC);
         C2BlockPool::local_id_t outputPoolId_;
@@ -1331,7 +1328,8 @@
             mComponent->setOutputSurface(
                     outputPoolId_,
                     outputSurface,
-                    outputGeneration);
+                    outputGeneration,
+                    maxDequeueCount);
         }
 
         if (oStreamFormat.value == C2BufferData::LINEAR) {
@@ -1368,7 +1366,7 @@
     // about buffers from the previous generation do not interfere with the
     // newly initialized pipeline capacity.
 
-    {
+    if (inputFormat || outputFormat) {
         Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
         watcher->inputDelay(inputDelayValue)
                 .pipelineDelay(pipelineDelayValue)
@@ -1468,14 +1466,14 @@
 void CCodecBufferChannel::stop() {
     mSync.stop();
     mFirstValidFrameIndex = mFrameIndex.load(std::memory_order_relaxed);
-    if (mInputSurface != nullptr) {
-        mInputSurface.reset();
-    }
-    mPipelineWatcher.lock()->flush();
 }
 
 void CCodecBufferChannel::reset() {
     stop();
+    if (mInputSurface != nullptr) {
+        mInputSurface.reset();
+    }
+    mPipelineWatcher.lock()->flush();
     {
         Mutexed<Input>::Locked input(mInput);
         input->buffers.reset(new DummyInputBuffers(""));
@@ -1503,8 +1501,10 @@
 
 void CCodecBufferChannel::flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) {
     ALOGV("[%s] flush", mName);
+    std::vector<uint64_t> indices;
     std::list<std::unique_ptr<C2Work>> configs;
     for (const std::unique_ptr<C2Work> &work : flushedWork) {
+        indices.push_back(work->input.ordinal.frameIndex.peeku());
         if (!(work->input.flags & C2FrameData::FLAG_CODEC_CONFIG)) {
             continue;
         }
@@ -1517,6 +1517,7 @@
         std::unique_ptr<C2Work> copy(new C2Work);
         copy->input.flags = C2FrameData::flags_t(work->input.flags | C2FrameData::FLAG_DROP_FRAME);
         copy->input.ordinal = work->input.ordinal;
+        copy->input.ordinal.frameIndex = mFrameIndex++;
         copy->input.buffers.insert(
                 copy->input.buffers.begin(),
                 work->input.buffers.begin(),
@@ -1545,7 +1546,12 @@
             output->buffers->flushStash();
         }
     }
-    mPipelineWatcher.lock()->flush();
+    {
+        Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
+        for (uint64_t index : indices) {
+            watcher->onWorkDone(index);
+        }
+    }
 }
 
 void CCodecBufferChannel::onWorkDone(
@@ -1939,10 +1945,11 @@
                 & ((1 << 10) - 1));
 
     sp<IGraphicBufferProducer> producer;
+    int maxDequeueCount = mOutputSurface.lock()->maxDequeueBuffers;
     if (newSurface) {
         newSurface->setScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
         newSurface->setDequeueTimeout(kDequeueTimeoutNs);
-        newSurface->setMaxDequeuedBufferCount(mOutputSurface.lock()->maxDequeueBuffers);
+        newSurface->setMaxDequeuedBufferCount(maxDequeueCount);
         producer = newSurface->getIGraphicBufferProducer();
         producer->setGenerationNumber(generation);
     } else {
@@ -1962,7 +1969,8 @@
         if (mComponent->setOutputSurface(
                 outputPoolId,
                 producer,
-                generation) != C2_OK) {
+                generation,
+                maxDequeueCount) != C2_OK) {
             ALOGI("[%s] setSurface: component setOutputSurface failed", mName);
             return INVALID_OPERATION;
         }
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 7969a6f..27e87e6 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -362,7 +362,10 @@
         .limitTo(D::OUTPUT & D::READ));
 
     add(ConfigMapper(KEY_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
-        .limitTo(D::ENCODER & D::OUTPUT));
+        .limitTo(D::ENCODER & D::CODED));
+    // Some audio decoders require bitrate information to be set
+    add(ConfigMapper(KEY_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
+        .limitTo(D::AUDIO & D::DECODER & D::CODED));
     // we also need to put the bitrate in the max bitrate field
     add(ConfigMapper(KEY_MAX_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
         .limitTo(D::ENCODER & D::READ & D::OUTPUT));
@@ -730,6 +733,17 @@
             return C2Value();
         }));
 
+    add(ConfigMapper(KEY_AAC_PROFILE, C2_PARAMKEY_PROFILE_LEVEL, "profile")
+        .limitTo(D::AUDIO & D::ENCODER & (D::CONFIG | D::PARAM))
+        .withMapper([mapper](C2Value v) -> C2Value {
+            C2Config::profile_t c2 = PROFILE_UNUSED;
+            int32_t sdk;
+            if (mapper && v.get(&sdk) && mapper->mapProfile(sdk, &c2)) {
+                return c2;
+            }
+            return PROFILE_UNUSED;
+        }));
+
     // convert to dBFS and add default
     add(ConfigMapper(KEY_AAC_DRC_TARGET_REFERENCE_LEVEL, C2_PARAMKEY_DRC_TARGET_REFERENCE_LEVEL, "value")
         .limitTo(D::AUDIO & D::DECODER & (D::CONFIG | D::PARAM | D::READ))
@@ -1322,6 +1336,14 @@
         }
     }
 
+    // Remove KEY_AAC_SBR_MODE from SDK message if it is outside supported range
+    // as SDK doesn't have a way to signal default sbr mode based on profile and
+    // requires that the key isn't present in format to signal that
+    int sbrMode;
+    if (msg->findInt32(KEY_AAC_SBR_MODE, &sbrMode) && (sbrMode < 0 || sbrMode > 2)) {
+        msg->removeEntryAt(msg->findEntryByName(KEY_AAC_SBR_MODE));
+    }
+
     { // convert color info
         // move default color to color aspect if not read from the component
         int32_t tmp;
diff --git a/media/codec2/sfplugin/PipelineWatcher.cpp b/media/codec2/sfplugin/PipelineWatcher.cpp
index 0ee9056..bc9197c 100644
--- a/media/codec2/sfplugin/PipelineWatcher.cpp
+++ b/media/codec2/sfplugin/PipelineWatcher.cpp
@@ -95,6 +95,7 @@
 }
 
 void PipelineWatcher::flush() {
+    ALOGV("flush");
     mFramesInPipeline.clear();
 }
 
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index a78d811..0966988 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -346,7 +346,7 @@
     }
     return (img->mPlane[1].mColInc == 2
             && img->mPlane[2].mColInc == 2
-            && (img->mPlane[2].mOffset - img->mPlane[1].mOffset == 1));
+            && (img->mPlane[2].mOffset == img->mPlane[1].mOffset + 1));
 }
 
 bool IsNV21(const MediaImage2 *img) {
@@ -355,7 +355,7 @@
     }
     return (img->mPlane[1].mColInc == 2
             && img->mPlane[2].mColInc == 2
-            && (img->mPlane[1].mOffset - img->mPlane[2].mOffset == 1));
+            && (img->mPlane[1].mOffset == img->mPlane[2].mOffset + 1));
 }
 
 bool IsI420(const MediaImage2 *img) {
diff --git a/media/codec2/vndk/include/C2SurfaceSyncObj.h b/media/codec2/vndk/include/C2SurfaceSyncObj.h
index 16e9a9d..ac87fe4 100644
--- a/media/codec2/vndk/include/C2SurfaceSyncObj.h
+++ b/media/codec2/vndk/include/C2SurfaceSyncObj.h
@@ -53,7 +53,7 @@
      * \param maxDequeueCount           Initial value of # of max dequeued buffer count
      * \param curDequeueCount           Initial value of # of current dequeued buffer count
      */
-    void setInitialDequeueCount(int32_t maxDequeueCount, int32_t curDequeueCount);
+    void setInitialDequeueCountLocked(int32_t maxDequeueCount, int32_t curDequeueCount);
 
     /**
      * Get a waitId which will be used to implement fence.
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 2944925..169de0c 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -316,12 +316,15 @@
                     }
                     return C2_BLOCKING;
                 }
+                syncVar->notifyDequeuedLocked();
+                syncVar->unlock();
                 c2Status = dequeueBuffer(width, height, format, androidUsage,
                               &slot, &bufferNeedsReallocation, &fence);
-                if (c2Status == C2_OK) {
-                    syncVar->notifyDequeuedLocked();
+                if (c2Status != C2_OK) {
+                    syncVar->lock();
+                    syncVar->notifyQueuedLocked();
+                    syncVar->unlock();
                 }
-                syncVar->unlock();
             } else {
                 c2Status = dequeueBuffer(width, height, format, usage,
                               &slot, &bufferNeedsReallocation, &fence);
@@ -789,7 +792,7 @@
         sp<GraphicBuffer> newBuffer = new GraphicBuffer(
             graphicBuffer->handle, GraphicBuffer::CLONE_HANDLE,
             graphicBuffer->width, graphicBuffer->height, graphicBuffer->format,
-            graphicBuffer->layerCount, toUsage, graphicBuffer->stride);
+            graphicBuffer->layerCount, toUsage | graphicBuffer->getUsage(), graphicBuffer->stride);
         if (newBuffer->initCheck() == android::NO_ERROR) {
             graphicBuffer = std::move(newBuffer);
         } else {
diff --git a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
index 587992e..e55bdc0 100644
--- a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
+++ b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
@@ -157,12 +157,10 @@
     return 0;
 }
 
-void C2SyncVariables::setInitialDequeueCount(
+void C2SyncVariables::setInitialDequeueCountLocked(
         int32_t maxDequeueCount, int32_t curDequeueCount) {
-    lock();
     mMaxDequeueCount = maxDequeueCount;
     mCurDequeueCount = curDequeueCount;
-    unlock();
 }
 
 uint32_t C2SyncVariables::getWaitIdLocked() {
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
index 1bbe443..f4a40a8 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
@@ -20,6 +20,7 @@
 #include <algorithm>
 #include <audio_utils/primitives.h>
 #include <aaudio/AAudio.h>
+#include <media/MediaMetricsItem.h>
 
 #include "client/AudioStreamInternalCapture.h"
 #include "utility/AudioClock.h"
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 3f17e6b..71bde90 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -19,6 +19,7 @@
 
 #define ATRACE_TAG ATRACE_TAG_AUDIO
 
+#include <media/MediaMetricsItem.h>
 #include <utils/Trace.h>
 
 #include "client/AudioStreamInternalPlay.h"
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index e8f71be..ef83c8e 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -59,6 +59,10 @@
     if (!mMetricsId.empty()) {
         android::mediametrics::LogItem(mMetricsId)
                 .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAAUDIOSTREAM)
+                .set(AMEDIAMETRICS_PROP_ENCODINGREQUESTED,
+                     android::toString(mDeviceFormat).c_str())
+                .set(AMEDIAMETRICS_PROP_PERFORMANCEMODEACTUAL,
+                     AudioGlobal_convertPerformanceModeToText(getPerformanceMode()))
                 .record();
     }
 
@@ -124,7 +128,12 @@
             .set(AMEDIAMETRICS_PROP_PERFORMANCEMODE,
                 AudioGlobal_convertPerformanceModeToText(getPerformanceMode()))
             .set(AMEDIAMETRICS_PROP_SHARINGMODE,
-                AudioGlobal_convertSharingModeToText(getSharingMode()));
+                AudioGlobal_convertSharingModeToText(getSharingMode()))
+            .set(AMEDIAMETRICS_PROP_BUFFERCAPACITYFRAMES, getBufferCapacity())
+            .set(AMEDIAMETRICS_PROP_BURSTFRAMES, getFramesPerBurst())
+            .set(AMEDIAMETRICS_PROP_DIRECTION,
+                AudioGlobal_convertDirectionToText(getDirection()));
+
         if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
             item.set(AMEDIAMETRICS_PROP_PLAYERIID, mPlayerBase->getPlayerIId());
         }
@@ -338,6 +347,22 @@
     return AAUDIO_OK;
 }
 
+void AudioStream::close_l() {
+    // Releasing the stream will set the state to CLOSING.
+    assert(getState() == AAUDIO_STREAM_STATE_CLOSING);
+    // setState() prevents a transition from CLOSING to any state other than CLOSED.
+    // State is checked by destructor.
+    setState(AAUDIO_STREAM_STATE_CLOSED);
+
+    if (!mMetricsId.empty()) {
+        android::mediametrics::LogItem(mMetricsId)
+                .set(AMEDIAMETRICS_PROP_FRAMESTRANSFERRED,
+                        getDirection() == AAUDIO_DIRECTION_INPUT ? getFramesWritten()
+                                                                 : getFramesRead())
+                .record();
+    }
+}
+
 void AudioStream::setState(aaudio_stream_state_t state) {
     ALOGD("%s(s#%d) from %d to %d", __func__, getId(), mState, state);
     if (state == mState) {
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index abf62f3..3930964 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -146,13 +146,7 @@
      * Free any resources not already freed by release_l().
      * Assume release_l() already called.
      */
-    virtual void close_l() REQUIRES(mStreamLock) {
-        // Releasing the stream will set the state to CLOSING.
-        assert(getState() == AAUDIO_STREAM_STATE_CLOSING);
-        // setState() prevents a transition from CLOSING to any state other than CLOSED.
-        // State is checked by destructor.
-        setState(AAUDIO_STREAM_STATE_CLOSED);
-    }
+    virtual void close_l() REQUIRES(mStreamLock);
 
 public:
     // This is only used to identify a stream in the logs without
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index 7733a04..e3ac6ff 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -308,11 +308,19 @@
 }
 
 void AudioStreamRecord::close_l() {
+    // The callbacks are normally joined in the AudioRecord destructor.
+    // But if another object has a reference to the AudioRecord then
+    // it will not get deleted here.
+    // So we should join callbacks explicitly before returning.
+    // Unlock around the join to avoid deadlocks if the callback tries to lock.
+    // This can happen if the callback returns AAUDIO_CALLBACK_RESULT_STOP
+    mStreamLock.unlock();
+    mAudioRecord->stopAndJoinCallbacks();
+    mStreamLock.lock();
+
     mAudioRecord.clear();
-    // Do not close mFixedBlockWriter because a data callback
-    // thread might still be running if someone else has a reference
-    // to mAudioRecord.
-    // It has a unique_ptr to its buffer so it will clean up by itself.
+    // Do not close mFixedBlockReader. It has a unique_ptr to its buffer
+    // so it will clean up by itself.
     AudioStream::close_l();
 }
 
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 142a85c..df97658 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -259,12 +259,18 @@
 }
 
 void AudioStreamTrack::close_l() {
-    // Stop callbacks before deleting mFixedBlockReader memory.
+    // The callbacks are normally joined in the AudioTrack destructor.
+    // But if another object has a reference to the AudioTrack then
+    // it will not get deleted here.
+    // So we should join callbacks explicitly before returning.
+    // Unlock around the join to avoid deadlocks if the callback tries to lock.
+    // This can happen if the callback returns AAUDIO_CALLBACK_RESULT_STOP
+    mStreamLock.unlock();
+    mAudioTrack->stopAndJoinCallbacks();
+    mStreamLock.lock();
     mAudioTrack.clear();
-    // Do not close mFixedBlockReader because a data callback
-    // thread might still be running if someone else has a reference
-    // to mAudioRecord.
-    // It has a unique_ptr to its buffer so it will clean up by itself.
+    // Do not close mFixedBlockReader. It has a unique_ptr to its buffer
+    // so it will clean up by itself.
     AudioStream::close_l();
 }
 
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index f9eebd7..98e9727 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -209,9 +209,9 @@
 }
 
 cc_test {
-    name: "test_stop_hang",
+    name: "test_callback_race",
     defaults: ["libaaudio_tests_defaults"],
-    srcs: ["test_stop_hang.cpp"],
+    srcs: ["test_callback_race.cpp"],
     shared_libs: [
         "libaaudio",
         "libbinder",
diff --git a/media/libaaudio/tests/test_callback_race.cpp b/media/libaaudio/tests/test_callback_race.cpp
new file mode 100644
index 0000000..843d5d7
--- /dev/null
+++ b/media/libaaudio/tests/test_callback_race.cpp
@@ -0,0 +1,209 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Test whether the callback is joined before the close finishes.
+ *
+ * Start a stream with a callback.
+ * The callback just sleeps for a long time.
+ * While the callback is sleeping, close() the stream from the main thread.
+ * Then check to make sure the callback was joined before the close() returns.
+ *
+ * This can hang if there are deadlocks. So make sure you get a PASSED result.
+ */
+
+#include <atomic>
+#include <stdio.h>
+#include <unistd.h>
+
+#include <gtest/gtest.h>
+
+#include <aaudio/AAudio.h>
+
+// Sleep long enough that the foreground has a change to call close.
+static constexpr int kCallbackSleepMicros = 600 * 1000;
+
+class AudioEngine {
+public:
+
+    // Check for a crash or late callback if we close without stopping.
+    void checkCloseJoins(aaudio_direction_t direction,
+                             aaudio_performance_mode_t perfMode,
+                             aaudio_data_callback_result_t callbackResult) {
+
+        // Make printf print immediately so that debug info is not stuck
+        // in a buffer if we hang or crash.
+        setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
+
+        mCallbackResult = callbackResult;
+        startStreamForStall(direction, perfMode);
+        // When the callback starts it will go to sleep.
+        waitForCallbackToStart();
+
+        printf("call AAudioStream_close()\n");
+        ASSERT_FALSE(mCallbackFinished); // Still sleeping?
+        aaudio_result_t result = AAudioStream_close(mStream); // May hang here!
+        ASSERT_TRUE(mCallbackFinished);
+        ASSERT_EQ(AAUDIO_OK, result);
+        printf("AAudioStream_close() returned %d\n", result);
+
+        ASSERT_EQ(AAUDIO_OK, mError.load());
+        // Did calling stop() from callback fail? It should have.
+        ASSERT_NE(AAUDIO_OK, mStopResult.load());
+    }
+
+private:
+    void startStreamForStall(aaudio_direction_t direction,
+                             aaudio_performance_mode_t perfMode) {
+        AAudioStreamBuilder* builder = nullptr;
+        aaudio_result_t result = AAUDIO_OK;
+
+        // Use an AAudioStreamBuilder to contain requested parameters.
+        result = AAudio_createStreamBuilder(&builder);
+        ASSERT_EQ(AAUDIO_OK, result);
+
+        // Request stream properties.
+        AAudioStreamBuilder_setDirection(builder, direction);
+        AAudioStreamBuilder_setPerformanceMode(builder, perfMode);
+        AAudioStreamBuilder_setDataCallback(builder, s_myDataCallbackProc, this);
+        AAudioStreamBuilder_setErrorCallback(builder, s_myErrorCallbackProc, this);
+
+        // Create an AAudioStream using the Builder.
+        result = AAudioStreamBuilder_openStream(builder, &mStream);
+        AAudioStreamBuilder_delete(builder);
+        ASSERT_EQ(AAUDIO_OK, result);
+
+        // Check to see what kind of stream we actually got.
+        int32_t deviceId = AAudioStream_getDeviceId(mStream);
+        aaudio_performance_mode_t
+            actualPerfMode = AAudioStream_getPerformanceMode(mStream);
+        printf("-------- opened: deviceId = %3d, perfMode = %d\n",
+               deviceId,
+               actualPerfMode);
+
+        // Start stream.
+        result = AAudioStream_requestStart(mStream);
+        ASSERT_EQ(AAUDIO_OK, result);
+    }
+
+    void waitForCallbackToStart() {
+        // Wait for callback to say it has been called.
+        int countDownMillis = 2000;
+        constexpr int countDownPeriodMillis = 50;
+        while (!mCallbackStarted && countDownMillis > 0) {
+            printf("Waiting for callback to start, %d\n", countDownMillis);
+            usleep(countDownPeriodMillis * 1000);
+            countDownMillis -= countDownPeriodMillis;
+        }
+        ASSERT_LT(0, countDownMillis);
+        ASSERT_TRUE(mCallbackStarted);
+    }
+
+// Callback function that fills the audio output buffer.
+    static aaudio_data_callback_result_t s_myDataCallbackProc(
+            AAudioStream *stream,
+            void *userData,
+            void * /*audioData */,
+            int32_t /* numFrames */
+    ) {
+        AudioEngine* engine = (AudioEngine*) userData;
+        engine->mCallbackStarted = true;
+        usleep(kCallbackSleepMicros);
+        // it is illegal to call stop() from the callback. It should
+        // return an error and not hang.
+        engine->mStopResult = AAudioStream_requestStop(stream);
+        engine->mCallbackFinished = true;
+        return engine->mCallbackResult;
+    }
+
+    static void s_myErrorCallbackProc(
+                AAudioStream * /* stream */,
+                void *userData,
+                aaudio_result_t error) {
+        AudioEngine *engine = (AudioEngine *)userData;
+        engine->mError = error;
+    }
+
+    AAudioStream* mStream = nullptr;
+
+    std::atomic<aaudio_result_t> mError{AAUDIO_OK}; // written by error callback
+    std::atomic<bool> mCallbackStarted{false};   // written by data callback
+    std::atomic<bool> mCallbackFinished{false};  // written by data callback
+    std::atomic<aaudio_data_callback_result_t> mCallbackResult{AAUDIO_CALLBACK_RESULT_CONTINUE};
+    std::atomic<aaudio_result_t> mStopResult{AAUDIO_OK};
+};
+
+/*********************************************************************/
+// Tell the callback to return AAUDIO_CALLBACK_RESULT_CONTINUE.
+
+TEST(test_close_timing, aaudio_close_joins_input_none) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_INPUT,
+        AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_CALLBACK_RESULT_CONTINUE);
+}
+
+TEST(test_close_timing, aaudio_close_joins_output_none) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_OUTPUT,
+        AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_CALLBACK_RESULT_CONTINUE);
+}
+
+TEST(test_close_timing, aaudio_close_joins_input_lowlat) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_INPUT,
+        AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_CALLBACK_RESULT_CONTINUE);
+}
+
+TEST(test_close_timing, aaudio_close_joins_output_lowlat) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_OUTPUT,
+        AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_CALLBACK_RESULT_CONTINUE);
+}
+
+/*********************************************************************/
+// Tell the callback to return AAUDIO_CALLBACK_RESULT_STOP.
+
+TEST(test_close_timing, aaudio_close_joins_input_lowlat_stop) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_INPUT,
+        AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_CALLBACK_RESULT_STOP);
+}
+
+TEST(test_close_timing, aaudio_close_joins_output_lowlat_stop) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_OUTPUT,
+        AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_CALLBACK_RESULT_STOP);
+}
+
+TEST(test_close_timing, aaudio_close_joins_output_none_stop) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_OUTPUT,
+        AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_CALLBACK_RESULT_STOP);
+}
+
+TEST(test_close_timing, aaudio_close_joins_input_none_stop) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_INPUT,
+        AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_CALLBACK_RESULT_STOP);
+}
diff --git a/media/libaaudio/tests/test_stop_hang.cpp b/media/libaaudio/tests/test_stop_hang.cpp
deleted file mode 100644
index 982ff4a..0000000
--- a/media/libaaudio/tests/test_stop_hang.cpp
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Return stop from the callback
- * and then close the stream immediately.
- */
-
-#include <atomic>
-#include <mutex>
-#include <stdio.h>
-#include <thread>
-#include <unistd.h>
-
-#include <aaudio/AAudio.h>
-
-#define DURATION_SECONDS   5
-
-struct AudioEngine {
-    AAudioStreamBuilder *builder = nullptr;
-    AAudioStream        *stream = nullptr;
-    std::thread         *thread = nullptr;
-
-    std::atomic<bool>   started{false};
-    std::mutex          doneLock; // Use a mutex so we can sleep on it while join()ing.
-    std::atomic<bool>   done{false};
-
-    aaudio_result_t join() {
-        aaudio_result_t result = AAUDIO_ERROR_INVALID_STATE;
-        if (stream != nullptr) {
-            while (true) {
-                {
-                    // Will block if the thread is running.
-                    // This mutex is used to close() immediately after the callback returns
-                    // and before the requestStop_l() is called.
-                    std::lock_guard<std::mutex> lock(doneLock);
-                    if (done) break;
-                }
-                printf("join() got mutex but stream not done!");
-                usleep(10 * 1000); // sleep then check again
-            }
-            result = AAudioStream_close(stream);
-            stream = nullptr;
-        }
-        return result;
-    }
-};
-
-// Callback function that fills the audio output buffer.
-static aaudio_data_callback_result_t s_myDataCallbackProc(
-        AAudioStream *stream,
-        void *userData,
-        void *audioData,
-        int32_t numFrames
-) {
-    (void) stream;
-    (void) audioData;
-    (void) numFrames;
-    AudioEngine *engine = (struct AudioEngine *)userData;
-    std::lock_guard<std::mutex> lock(engine->doneLock);
-    engine->started = true;
-    usleep(DURATION_SECONDS * 1000 * 1000); // Mimic SynthMark procedure.
-    engine->done = true;
-    return AAUDIO_CALLBACK_RESULT_STOP;
-}
-
-static void s_myErrorCallbackProc(
-    AAudioStream *stream __unused,
-    void *userData __unused,
-    aaudio_result_t error) {
-    printf("%s() - error = %d\n", __func__, error);
-}
-
-static aaudio_result_t s_OpenAudioStream(struct AudioEngine *engine) {
-    // Use an AAudioStreamBuilder to contain requested parameters.
-    aaudio_result_t result = AAudio_createStreamBuilder(&engine->builder);
-    if (result != AAUDIO_OK) {
-        printf("AAudio_createStreamBuilder returned %s",
-               AAudio_convertResultToText(result));
-        return result;
-    }
-
-    // Request stream properties.
-    AAudioStreamBuilder_setPerformanceMode(engine->builder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
-    AAudioStreamBuilder_setDataCallback(engine->builder, s_myDataCallbackProc, engine);
-    AAudioStreamBuilder_setErrorCallback(engine->builder, s_myErrorCallbackProc, engine);
-
-    // Create an AAudioStream using the Builder.
-    result = AAudioStreamBuilder_openStream(engine->builder, &engine->stream);
-    if (result != AAUDIO_OK) {
-        printf("AAudioStreamBuilder_openStream returned %s",
-               AAudio_convertResultToText(result));
-        return result;
-    }
-
-    return result;
-}
-
-int main(int argc, char **argv) {
-    (void) argc;
-    (void) argv;
-    struct AudioEngine engine;
-    aaudio_result_t result = AAUDIO_OK;
-    int errorCount = 0;
-
-    // Make printf print immediately so that debug info is not stuck
-    // in a buffer if we hang or crash.
-    setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
-
-    printf("Test Return Stop Hang V1.0\n");
-
-    result = s_OpenAudioStream(&engine);
-    if (result != AAUDIO_OK) {
-        printf("s_OpenAudioStream returned %s\n",
-               AAudio_convertResultToText(result));
-        errorCount++;
-    }
-
-    // Check to see what kind of stream we actually got.
-    int32_t deviceId = AAudioStream_getDeviceId(engine.stream);
-    aaudio_performance_mode_t actualPerfMode = AAudioStream_getPerformanceMode(engine.stream);
-    printf("-------- opened: deviceId = %3d, perfMode = %d\n", deviceId, actualPerfMode);
-
-    // Start stream.
-    result = AAudioStream_requestStart(engine.stream);
-    printf("AAudioStream_requestStart() returned %d >>>>>>>>>>>>>>>>>>>>>>\n", result);
-    if (result != AAUDIO_OK) {
-        errorCount++;
-    } else {
-        int counter = 0;
-        while (!engine.started) {
-            printf("Waiting for stream to start, %d\n", counter++);
-            usleep(5 * 1000);
-        }
-        printf("You should see more messages %d seconds after this. If not then the test failed!\n",
-               DURATION_SECONDS);
-        result = engine.join(); // This might hang!
-        AAudioStreamBuilder_delete(engine.builder);
-        engine.builder = nullptr;
-    }
-
-    printf("aaudio result = %d = %s\n", result, AAudio_convertResultToText(result));
-    printf("test %s\n", errorCount ? "FAILED" : "PASSED");
-
-    return errorCount ? EXIT_FAILURE : EXIT_SUCCESS;
-}
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index e15ef3d..1a4bde9 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -181,21 +181,9 @@
         .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)mStatus)
         .record();
 
+    stopAndJoinCallbacks(); // checks mStatus
+
     if (mStatus == NO_ERROR) {
-        // Make sure that callback function exits in the case where
-        // it is looping on buffer empty condition in obtainBuffer().
-        // Otherwise the callback thread will never exit.
-        stop();
-        if (mAudioRecordThread != 0) {
-            mProxy->interrupt();
-            mAudioRecordThread->requestExit();  // see comment in AudioRecord.h
-            mAudioRecordThread->requestExitAndWait();
-            mAudioRecordThread.clear();
-        }
-        // No lock here: worst case we remove a NULL callback which will be a nop
-        if (mDeviceCallback != 0 && mInput != AUDIO_IO_HANDLE_NONE) {
-            AudioSystem::removeAudioDeviceCallback(this, mInput, mPortId);
-        }
         IInterface::asBinder(mAudioRecord)->unlinkToDeath(mDeathNotifier, this);
         mAudioRecord.clear();
         mCblkMemory.clear();
@@ -208,6 +196,27 @@
     }
 }
 
+void AudioRecord::stopAndJoinCallbacks() {
+    // Prevent nullptr crash if it did not open properly.
+    if (mStatus != NO_ERROR) return;
+
+    // Make sure that callback function exits in the case where
+    // it is looping on buffer empty condition in obtainBuffer().
+    // Otherwise the callback thread will never exit.
+    stop();
+    if (mAudioRecordThread != 0) {
+        mProxy->interrupt();
+        mAudioRecordThread->requestExit();  // see comment in AudioRecord.h
+        mAudioRecordThread->requestExitAndWait();
+        mAudioRecordThread.clear();
+    }
+    // No lock here: worst case we remove a NULL callback which will be a nop
+    if (mDeviceCallback != 0 && mInput != AUDIO_IO_HANDLE_NONE) {
+        // This may not stop all of these device callbacks!
+        // TODO: Add some sort of protection.
+        AudioSystem::removeAudioDeviceCallback(this, mInput, mPortId);
+    }
+}
 status_t AudioRecord::set(
         audio_source_t inputSource,
         uint32_t sampleRate,
@@ -226,7 +235,8 @@
         const audio_attributes_t* pAttributes,
         audio_port_handle_t selectedDeviceId,
         audio_microphone_direction_t selectedMicDirection,
-        float microphoneFieldDimension)
+        float microphoneFieldDimension,
+        int32_t maxSharedAudioHistoryMs)
 {
     status_t status = NO_ERROR;
     uint32_t channelCount;
@@ -259,6 +269,7 @@
     mSelectedDeviceId = selectedDeviceId;
     mSelectedMicDirection = selectedMicDirection;
     mSelectedMicFieldDimension = microphoneFieldDimension;
+    mMaxSharedAudioHistoryMs = maxSharedAudioHistoryMs;
 
     switch (transferType) {
     case TRANSFER_DEFAULT:
@@ -807,6 +818,7 @@
     input.selectedDeviceId = mSelectedDeviceId;
     input.sessionId = mSessionId;
     originalSessionId = mSessionId;
+    input.maxSharedAudioHistoryMs = mMaxSharedAudioHistoryMs;
 
     do {
         media::CreateRecordResponse response;
@@ -828,7 +840,7 @@
         usleep((20 + rand() % 30) * 10000);
     } while (1);
 
-    ALOG_ASSERT(record != 0);
+    ALOG_ASSERT(output.audioRecord != 0);
 
     // AudioFlinger now owns the reference to the I/O handle,
     // so we are no longer responsible for releasing it.
@@ -916,6 +928,10 @@
         AudioSystem::addAudioDeviceCallback(this, output.inputId, output.portId);
     }
 
+    if (!mSharedAudioPackageName.empty()) {
+        mAudioRecord->shareAudioHistory(mSharedAudioPackageName, mSharedAudioStartMs);
+    }
+
     mPortId = output.portId;
     // We retain a copy of the I/O handle, but don't own the reference
     mInput = output.inputId;
@@ -1569,7 +1585,7 @@
 
 void AudioRecord::setLogSessionId(const char *logSessionId)
 {
-     AutoMutex lock(mLock);
+    AutoMutex lock(mLock);
     if (logSessionId == nullptr) logSessionId = "";  // an empty string is an unset session id.
     if (mLogSessionId == logSessionId) return;
 
@@ -1580,6 +1596,22 @@
          .record();
 }
 
+status_t AudioRecord::shareAudioHistory(const std::string& sharedPackageName,
+                                        int64_t sharedStartMs)
+{
+    AutoMutex lock(mLock);
+    if (mAudioRecord == 0) {
+        return NO_INIT;
+    }
+    status_t status = statusTFromBinderStatus(
+            mAudioRecord->shareAudioHistory(sharedPackageName, sharedStartMs));
+    if (status == NO_ERROR) {
+        mSharedAudioPackageName = sharedPackageName;
+        mSharedAudioStartMs = sharedStartMs;
+    }
+    return status;
+}
+
 // =========================================================================
 
 void AudioRecord::DeathNotifier::binderDied(const wp<IBinder>& who __unused)
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 6c9e85c..1bc3baa 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -327,21 +327,9 @@
         .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)mStatus)
         .record();
 
+    stopAndJoinCallbacks(); // checks mStatus
+
     if (mStatus == NO_ERROR) {
-        // Make sure that callback function exits in the case where
-        // it is looping on buffer full condition in obtainBuffer().
-        // Otherwise the callback thread will never exit.
-        stop();
-        if (mAudioTrackThread != 0) {
-            mProxy->interrupt();
-            mAudioTrackThread->requestExit();   // see comment in AudioTrack.h
-            mAudioTrackThread->requestExitAndWait();
-            mAudioTrackThread.clear();
-        }
-        // No lock here: worst case we remove a NULL callback which will be a nop
-        if (mDeviceCallback != 0 && mOutput != AUDIO_IO_HANDLE_NONE) {
-            AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
-        }
         IInterface::asBinder(mAudioTrack)->unlinkToDeath(mDeathNotifier, this);
         mAudioTrack.clear();
         mCblkMemory.clear();
@@ -355,6 +343,29 @@
     }
 }
 
+void AudioTrack::stopAndJoinCallbacks() {
+    // Prevent nullptr crash if it did not open properly.
+    if (mStatus != NO_ERROR) return;
+
+    // Make sure that callback function exits in the case where
+    // it is looping on buffer full condition in obtainBuffer().
+    // Otherwise the callback thread will never exit.
+    stop();
+    if (mAudioTrackThread != 0) { // not thread safe
+        mProxy->interrupt();
+        mAudioTrackThread->requestExit();   // see comment in AudioTrack.h
+        mAudioTrackThread->requestExitAndWait();
+        mAudioTrackThread.clear();
+    }
+    // No lock here: worst case we remove a NULL callback which will be a nop
+    if (mDeviceCallback != 0 && mOutput != AUDIO_IO_HANDLE_NONE) {
+        // This may not stop all of these device callbacks!
+        // TODO: Add some sort of protection.
+        AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
+        mDeviceCallback.clear();
+    }
+}
+
 status_t AudioTrack::set(
         audio_stream_type_t streamType,
         uint32_t sampleRate,
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 0feafc5..7656307 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -138,6 +138,8 @@
     aidl.config = VALUE_OR_RETURN(legacy2aidl_audio_config_base_t_AudioConfigBase(config));
     aidl.clientInfo = VALUE_OR_RETURN(legacy2aidl_AudioClient_AudioClient(clientInfo));
     aidl.riid = VALUE_OR_RETURN(legacy2aidl_audio_unique_id_t_int32_t(riid));
+    aidl.maxSharedAudioHistoryMs = VALUE_OR_RETURN(
+            convertIntegral<int32_t>(maxSharedAudioHistoryMs));
     aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_input_flags_t_int32_t_mask(flags));
     aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
     aidl.notificationFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(notificationFrameCount));
@@ -151,10 +153,13 @@
 IAudioFlinger::CreateRecordInput::fromAidl(
         const media::CreateRecordRequest& aidl) {
     IAudioFlinger::CreateRecordInput legacy;
-    legacy.attr = VALUE_OR_RETURN(aidl2legacy_AudioAttributesInternal_audio_attributes_t(aidl.attr));
+    legacy.attr = VALUE_OR_RETURN(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(aidl.attr));
     legacy.config = VALUE_OR_RETURN(aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.config));
     legacy.clientInfo = VALUE_OR_RETURN(aidl2legacy_AudioClient_AudioClient(aidl.clientInfo));
     legacy.riid = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_unique_id_t(aidl.riid));
+    legacy.maxSharedAudioHistoryMs = VALUE_OR_RETURN(
+            convertIntegral<int32_t>(aidl.maxSharedAudioHistoryMs));
     legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_input_flags_t_mask(aidl.flags));
     legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
     legacy.notificationFrameCount = VALUE_OR_RETURN(
diff --git a/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl b/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
index 62007da..5b26d22 100644
--- a/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
@@ -34,6 +34,7 @@
     AudioClient clientInfo;
     /** Interpreted as audio_unique_id_t. */
     int riid;
+    int maxSharedAudioHistoryMs;
     /** Bitmask, indexed by AudioInputFlags. */
     int flags;
     long frameCount;
diff --git a/media/libaudioclient/aidl/android/media/IAudioRecord.aidl b/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
index 1772653..44ef80b 100644
--- a/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
@@ -48,4 +48,6 @@
   /* Set the microphone zoom (for processing).
    */
   void setPreferredMicrophoneFieldDimension(float zoom);
+
+  void shareAudioHistory(@utf8InCpp String sharedAudioPackageName, long sharedAudioStartMs);
 }
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index 82a29d4..9965e25 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -241,7 +241,8 @@
                             audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
                             audio_microphone_direction_t
                                 selectedMicDirection = MIC_DIRECTION_UNSPECIFIED,
-                            float selectedMicFieldDimension = MIC_FIELD_DIMENSION_DEFAULT);
+                            float selectedMicFieldDimension = MIC_FIELD_DIMENSION_DEFAULT,
+                            int32_t maxSharedAudioHistoryMs = 0);
 
     /* Result of constructing the AudioRecord. This must be checked for successful initialization
      * before using any AudioRecord API (except for set()), because using
@@ -303,6 +304,19 @@
             void        stop();
             bool        stopped() const;
 
+    /* Calls stop() and then wait for all of the callbacks to return.
+     * It is safe to call this if stop() or pause() has already been called.
+     *
+     * This function is called from the destructor. But since AudioRecord
+     * is ref counted, the destructor may be called later than desired.
+     * This can be called explicitly as part of closing an AudioRecord
+     * if you want to be certain that callbacks have completely finished.
+     *
+     * This is not thread safe and should only be called from one thread,
+     * ideally as the AudioRecord is being closed.
+     */
+            void        stopAndJoinCallbacks();
+
     /* Return the sink sample rate for this record track in Hz.
      * If specified as zero in constructor or set(), this will be the source sample rate.
      * Unlike AudioTrack, the sample rate is const after initialization, so doesn't need a lock.
@@ -583,6 +597,10 @@
      */
             void setLogSessionId(const char *logSessionId);
 
+
+            status_t shareAudioHistory(const std::string& sharedPackageName,
+                                       int64_t sharedStartMs);
+
      /*
       * Dumps the state of an audio record.
       */
@@ -766,6 +784,10 @@
     audio_microphone_direction_t mSelectedMicDirection;
     float mSelectedMicFieldDimension;
 
+    int32_t                    mMaxSharedAudioHistoryMs = 0;
+    std::string                mSharedAudioPackageName = {};
+    int64_t                    mSharedAudioStartMs = 0;
+
 private:
     class MediaMetrics {
       public:
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index d167c40..c293343 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -479,6 +479,19 @@
             void        stop();
             bool        stopped() const;
 
+    /* Call stop() and then wait for all of the callbacks to return.
+     * It is safe to call this if stop() or pause() has already been called.
+     *
+     * This function is called from the destructor. But since AudioTrack
+     * is ref counted, the destructor may be called later than desired.
+     * This can be called explicitly as part of closing an AudioTrack
+     * if you want to be certain that callbacks have completely finished.
+     *
+     * This is not thread safe and should only be called from one thread,
+     * ideally as the AudioTrack is being closed.
+     */
+            void        stopAndJoinCallbacks();
+
     /* Flush a stopped or paused track. All previously buffered data is discarded immediately.
      * This has the effect of draining the buffers without mixing or output.
      * Flush is intended for streaming mode, for example before switching to non-contiguous content.
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 7f7ca85..3a5d164 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -130,6 +130,7 @@
         AudioClient clientInfo;
         media::permission::Identity identity;
         audio_unique_id_t riid;
+        int32_t maxSharedAudioHistoryMs;
 
         /* input/output */
         audio_input_flags_t flags;
diff --git a/media/libeffects/lvm/lib/Android.bp b/media/libeffects/lvm/lib/Android.bp
index 5d75055..7998879 100644
--- a/media/libeffects/lvm/lib/Android.bp
+++ b/media/libeffects/lvm/lib/Android.bp
@@ -63,7 +63,6 @@
         "Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp",
         "Common/src/Copy_16.cpp",
         "Common/src/MonoTo2I_32.cpp",
-        "Common/src/LoadConst_32.cpp",
         "Common/src/dB_to_Lin32.cpp",
         "Common/src/Shift_Sat_v16xv16.cpp",
         "Common/src/Shift_Sat_v32xv32.cpp",
@@ -148,7 +147,6 @@
         "Reverb/src/LVREV_Process.cpp",
         "Reverb/src/LVREV_SetControlParameters.cpp",
         "Reverb/src/LVREV_Tables.cpp",
-        "Common/src/LoadConst_32.cpp",
         "Common/src/From2iToMono_32.cpp",
         "Common/src/Mult3s_32x16.cpp",
         "Common/src/Copy_16.cpp",
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
index 9f5f448..12b86f3 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
@@ -137,9 +137,9 @@
 
         pInstance->pBufferManagement->pScratch = (LVM_FLOAT*)pInstance->pScratch;
 
-        LoadConst_Float(0, /* Clear the input delay buffer */
-                        (LVM_FLOAT*)&pInstance->pBufferManagement->InDelayBuffer,
-                        (LVM_INT16)(LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE));
+        memset(pInstance->pBufferManagement->InDelayBuffer, 0,
+                LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE *
+                sizeof(pInstance->pBufferManagement->InDelayBuffer[0]));
         pInstance->pBufferManagement->InDelaySamples =
                 MIN_INTERNAL_BLOCKSIZE;                    /* Set the number of delay samples */
         pInstance->pBufferManagement->OutDelaySamples = 0; /* No samples in the output buffer */
diff --git a/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h b/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
index 18de85b..10f351e 100644
--- a/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
+++ b/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
@@ -24,8 +24,6 @@
     VARIOUS FUNCTIONS
 ***********************************************************************************/
 
-void LoadConst_Float(const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n);
-
 void Copy_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
 void Copy_Float_Mc_Stereo(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 NrFrames,
                           LVM_INT32 NrChannels);
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
index be19fa0..5a67bda 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
@@ -19,6 +19,7 @@
    INCLUDE FILES
 ***********************************************************************************/
 
+#include <string.h>
 #include "LVC_Mixer_Private.h"
 #include "VectorArithmetic.h"
 #include "ScalarArithmetic.h"
@@ -68,7 +69,7 @@
 
     if (HardMixing) {
         if (pInstance->Target == 0)
-            LoadConst_Float(0.0, dst, n);
+            memset(dst, 0, n * sizeof(*dst));
         else {
             if ((pInstance->Target) != 1.0f)
                 Mult3s_Float(src, (pInstance->Target), dst, n);
@@ -150,7 +151,7 @@
 
     if (HardMixing) {
         if (pInstance->Target == 0)
-            LoadConst_Float(0.0, dst, NrFrames * NrChannels);
+            memset(dst, 0, NrFrames * NrChannels * sizeof(*dst));
         else {
             if ((pInstance->Target) != 1.0f)
                 Mult3s_Float(src, (pInstance->Target), dst, NrFrames * NrChannels);
diff --git a/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp b/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp
deleted file mode 100644
index df7a558..0000000
--- a/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**********************************************************************************
-   INCLUDE FILES
-***********************************************************************************/
-
-#include "VectorArithmetic.h"
-
-/**********************************************************************************
-   FUNCTION LoadConst_32
-***********************************************************************************/
-void LoadConst_Float(const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n) {
-    LVM_INT16 ii;
-
-    for (ii = n; ii != 0; ii--) {
-        *dst = val;
-        dst++;
-    }
-
-    return;
-}
-
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp b/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
index 8408962..58a9102 100644
--- a/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
@@ -19,6 +19,7 @@
    INCLUDE FILES
 ***********************************************************************************/
 
+#include <string.h>
 #include "Mixer_private.h"
 #include "VectorArithmetic.h"
 
@@ -61,7 +62,7 @@
 
     if (HardMixing) {
         if (pInstance->Target == 0)
-            LoadConst_Float(0, dst, n);
+            memset(dst, 0, n * sizeof(*dst));
         else if ((pInstance->Target) == 1.0f) {
             if (src != dst) Copy_Float((LVM_FLOAT*)src, (LVM_FLOAT*)dst, (LVM_INT16)(n));
         } else
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
index d4b321f..be3505f 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
@@ -60,7 +60,8 @@
     pLVREV_Private->pRevLPFBiquad->clear();
     for (size_t i = 0; i < pLVREV_Private->InstanceParams.NumDelays; i++) {
         pLVREV_Private->revLPFBiquad[i]->clear();
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[i], LVREV_MAX_T_DELAY[i]);
+        memset(pLVREV_Private->pDelay_T[i], 0, LVREV_MAX_T_DELAY[i] *
+                sizeof(pLVREV_Private->pDelay_T[i][0]));
     }
     return LVREV_SUCCESS;
 }
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
index c5b6598..de23d07 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
@@ -81,10 +81,7 @@
         pConfig->DelaySize =
                 (pParams->NrChannels == FCC_1) ? (LVM_INT16)Delay : (LVM_INT16)(FCC_2 * Delay);
         pConfig->DelayOffset = 0;
-        LoadConst_Float(0,                                      /* Value */
-                        (LVM_FLOAT*)&pConfig->StereoSamples[0], /* Destination */
-                        /* Number of words */
-                        (LVM_UINT16)(sizeof(pConfig->StereoSamples) / sizeof(LVM_FLOAT)));
+        memset(pConfig->StereoSamples, 0, sizeof(pConfig->StereoSamples));
         /*
          * Setup the filters
          */
diff --git a/media/libmediaformatshaper/Android.bp b/media/libmediaformatshaper/Android.bp
index 3107e12..bdd1465 100644
--- a/media/libmediaformatshaper/Android.bp
+++ b/media/libmediaformatshaper/Android.bp
@@ -56,6 +56,10 @@
         "include",
     ],
 
+    header_libs: [
+        "libstagefright_headers",
+    ],
+
     shared_libs: [
         "liblog",
         "libutils",
diff --git a/media/libmediaformatshaper/CodecProperties.cpp b/media/libmediaformatshaper/CodecProperties.cpp
index e6b3c46..315b3ec 100644
--- a/media/libmediaformatshaper/CodecProperties.cpp
+++ b/media/libmediaformatshaper/CodecProperties.cpp
@@ -21,7 +21,9 @@
 #include <string>
 #include <stdlib.h>
 
-#include <media/formatshaper/CodecProperties.h>
+#include "CodecProperties.h"
+
+#include <media/stagefright/MediaCodecConstants.h>
 
 
 // we aren't going to mess with shaping points dimensions beyond this
@@ -68,11 +70,11 @@
     ALOGD("setFeatureValue(%s,%d)", key.c_str(), value);
     mFeatures.insert({key, value});
 
-    if (!strcmp(key.c_str(), "qp-bounds")) {               // official key
+    if (!strcmp(key.c_str(), FEATURE_QpBounds)) {
         setSupportsQp(1);
-    } else if (!strcmp(key.c_str(), "vq-supports-qp")) {   // key from prototyping
-        setSupportsQp(1);
-    } else if (!strcmp(key.c_str(), "vq-minimum-quality")) {
+    } else if (!strcmp(key.c_str(), "video-minimum-quality")) {
+        setSupportedMinimumQuality(1);
+    } else if (!strcmp(key.c_str(), "vq-minimum-quality")) { // from prototyping
         setSupportedMinimumQuality(1);
     }
 }
diff --git a/media/libmediaformatshaper/include/media/formatshaper/CodecProperties.h b/media/libmediaformatshaper/CodecProperties.h
similarity index 100%
rename from media/libmediaformatshaper/include/media/formatshaper/CodecProperties.h
rename to media/libmediaformatshaper/CodecProperties.h
diff --git a/media/libmediaformatshaper/CodecSeeding.cpp b/media/libmediaformatshaper/CodecSeeding.cpp
index a7fcc66..7fe1075 100644
--- a/media/libmediaformatshaper/CodecSeeding.cpp
+++ b/media/libmediaformatshaper/CodecSeeding.cpp
@@ -20,7 +20,7 @@
 
 #include <string>
 
-#include <media/formatshaper/CodecProperties.h>
+#include "CodecProperties.h"
 
 namespace android {
 namespace mediaformatshaper {
@@ -49,7 +49,7 @@
  */
 
 static preloadTuning_t featuresAvc[] = {
-      {true, "vq-target-bpp", "2.45"},
+      // {true, "vq-target-bpp", "2.45"},
       {true, "vq-target-bpp-1080p", "2.40"},
       {true, "vq-target-bpp-540p", "2.60"},
       {true, "vq-target-bpp-480p", "3.00"},
@@ -58,8 +58,11 @@
 };
 
 static preloadTuning_t featuresHevc[] = {
-      {true, "vq-target-bpp", "2.30"},
-      {true, "vq-target-qpmax", "40"}, // nop, since hevc codecs don't declare qp support
+      // {true, "vq-target-bpp", "1.80"},
+      {true, "vq-target-bpp-1080p", "1.50"},
+      {true, "vq-target-bpp-720p", "1.80"},
+      {true, "vq-target-bpp-540p", "2.10"},
+      // no qp for hevc, at least for now
       {true, nullptr, 0}
 };
 
diff --git a/media/libmediaformatshaper/FormatShaper.cpp b/media/libmediaformatshaper/FormatShaper.cpp
index 42502e0..451f772 100644
--- a/media/libmediaformatshaper/FormatShaper.cpp
+++ b/media/libmediaformatshaper/FormatShaper.cpp
@@ -23,10 +23,11 @@
 
 #include <media/NdkMediaFormat.h>
 
-#include <media/formatshaper/VQops.h>
-#include <media/formatshaper/CodecProperties.h>
+#include "CodecProperties.h"
+#include "VideoShaper.h"
+#include "VQops.h"
+
 #include <media/formatshaper/FormatShaper.h>
-#include <media/formatshaper/VideoShaper.h>
 
 namespace android {
 namespace mediaformatshaper {
diff --git a/media/libmediaformatshaper/ManageShapingCodecs.cpp b/media/libmediaformatshaper/ManageShapingCodecs.cpp
index bdc395f..3061d0b 100644
--- a/media/libmediaformatshaper/ManageShapingCodecs.cpp
+++ b/media/libmediaformatshaper/ManageShapingCodecs.cpp
@@ -23,7 +23,8 @@
 #include <inttypes.h>
 
 #include <media/NdkMediaFormat.h>
-#include <media/formatshaper/CodecProperties.h>
+
+#include "CodecProperties.h"
 
 namespace android {
 namespace mediaformatshaper {
diff --git a/media/libmediaformatshaper/VQApply.cpp b/media/libmediaformatshaper/VQApply.cpp
index 08e23cc..4f6a6c3 100644
--- a/media/libmediaformatshaper/VQApply.cpp
+++ b/media/libmediaformatshaper/VQApply.cpp
@@ -23,9 +23,9 @@
 
 #include <media/NdkMediaFormat.h>
 
-#include <media/formatshaper/VQops.h>
-#include <media/formatshaper/CodecProperties.h>
-#include <media/formatshaper/VideoShaper.h>
+#include "VQops.h"
+#include "CodecProperties.h"
+#include "VideoShaper.h"
 
 namespace android {
 namespace mediaformatshaper {
@@ -51,17 +51,18 @@
 
 // constants we use within the calculations
 //
-constexpr double BITRATE_LEAVE_UNTOUCHED = 2.0;
-constexpr double BITRATE_QP_UNAVAILABLE = 1.20;
-// 10% didn't work so hot on bonito (with no QP support)
-// 15% is next.. still leaves a few short
-// 20% ? this is on the edge of what I want do do
+constexpr double BITRATE_LEAVE_UNTOUCHED = 1.75;
+
+// 20% bump if QP is configured but it is unavailable
+constexpr double BITRATE_QP_UNAVAILABLE_BOOST = 0.20;
+
 
 //
 // Caller retains ownership of and responsibility for inFormat
 //
 int VQApply(CodecProperties *codec, vqOps_t *info, AMediaFormat* inFormat, int flags) {
     ALOGV("codecName %s inFormat %p flags x%x", codec->getName().c_str(), inFormat, flags);
+    (void) info; // unused for now
 
     int32_t bitRateMode = -1;
     if (AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_BITRATE_MODE, &bitRateMode)
@@ -99,14 +100,16 @@
     double minimumBpp = codec->getBpp(width, height);
 
     int64_t bitrateFloor = pixels * minimumBpp;
+    int64_t bitrateCeiling = bitrateFloor * BITRATE_LEAVE_UNTOUCHED;
     if (bitrateFloor > INT32_MAX) bitrateFloor = INT32_MAX;
+    if (bitrateCeiling > INT32_MAX) bitrateCeiling = INT32_MAX;
 
     // if we are far enough above the target bpp, leave it alone
     //
     ALOGV("bitrate: configured %" PRId64 " floor %" PRId64, bitrateConfigured, bitrateFloor);
-    if (bitrateConfigured >= BITRATE_LEAVE_UNTOUCHED * bitrateFloor) {
-        ALOGV("high enough bitrate: configured %" PRId64 " >= %f * floor %" PRId64,
-                bitrateConfigured, BITRATE_LEAVE_UNTOUCHED, bitrateFloor);
+    if (bitrateConfigured >= bitrateCeiling) {
+        ALOGV("high enough bitrate: configured %" PRId64 " >= ceiling %" PRId64,
+                bitrateConfigured, bitrateCeiling);
         return 0;
     }
 
@@ -117,38 +120,54 @@
         bitrateChosen = bitrateFloor;
     }
 
-    bool qpPresent = hasQp(inFormat);
+    bool qpPresent = hasQpMax(inFormat);
 
-    // add QP, if not already present
+    // calculate a target QP value
+    int32_t qpmax = codec->targetQpMax();
     if (!qpPresent) {
-        int32_t qpmax = codec->targetQpMax();
+        // user didn't, so shaper wins
         if (qpmax != INT32_MAX) {
             ALOGV("choosing qp=%d", qpmax);
             qpChosen = qpmax;
         }
+    } else if (qpmax == INT32_MAX) {
+        // shaper didn't so user wins
+        qpChosen = INT32_MAX;
+        AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_VIDEO_QP_MAX, &qpChosen);
+    } else {
+        // both sides want it, choose most restrictive
+        int32_t value = INT32_MAX;
+        AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_VIDEO_QP_MAX, &value);
+        qpChosen = std::min(qpmax, value);
     }
 
     // if QP is desired but not supported, compensate with additional bits
     if (!codec->supportsQp()) {
-        if (qpPresent || qpChosen != INT32_MAX) {
-            ALOGD("minquality: desired QP, but unsupported, boost bitrate %" PRId64 " to %" PRId64,
-                bitrateChosen, (int64_t)(bitrateChosen * BITRATE_QP_UNAVAILABLE));
-            bitrateChosen =  bitrateChosen * BITRATE_QP_UNAVAILABLE;
+        if (qpChosen != INT32_MAX) {
+            int64_t boost = 0;
+            boost = bitrateChosen * BITRATE_QP_UNAVAILABLE_BOOST;
+            ALOGD("minquality: requested QP unsupported, boost bitrate %" PRId64 " by %" PRId64,
+                bitrateChosen, boost);
+            bitrateChosen =  bitrateChosen + boost;
             qpChosen = INT32_MAX;
         }
     }
 
+    // limits
     // apply our chosen values
     //
     if (qpChosen != INT32_MAX) {
         ALOGD("minquality by QP: inject %s=%d", AMEDIAFORMAT_VIDEO_QP_MAX, qpChosen);
         AMediaFormat_setInt32(inFormat, AMEDIAFORMAT_VIDEO_QP_MAX, qpChosen);
 
-        // force spreading the QP across frame types, since we are imposing a value
-        qpSpreadMaxPerFrameType(inFormat, info->qpDelta, info->qpMax, /* override */ true);
+        // caller (VideoShaper) handles spreading this across the subframes
     }
 
     if (bitrateChosen != bitrateConfigured) {
+        if (bitrateChosen > bitrateCeiling) {
+            ALOGD("minquality: bitrate clamped at ceiling %" PRId64,  bitrateCeiling);
+            bitrateChosen = bitrateCeiling;
+        }
         ALOGD("minquality/target bitrate raised from %" PRId64 " to %" PRId64 " bps",
               bitrateConfigured, bitrateChosen);
         AMediaFormat_setInt32(inFormat, AMEDIAFORMAT_KEY_BIT_RATE, (int32_t)bitrateChosen);
@@ -158,7 +177,7 @@
 }
 
 
-bool hasQpPerFrameType(AMediaFormat *format) {
+bool hasQpMaxPerFrameType(AMediaFormat *format) {
     int32_t value;
 
     if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MAX, &value)
@@ -176,19 +195,29 @@
     return false;
 }
 
-bool hasQp(AMediaFormat *format) {
+bool hasQpMaxGlobal(AMediaFormat *format) {
     int32_t value;
     if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_MAX, &value)
         || AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_MIN, &value)) {
         return true;
     }
-    return hasQpPerFrameType(format);
+    return false;
+}
+
+bool hasQpMax(AMediaFormat *format) {
+    if (hasQpMaxGlobal(format)) {
+        return true;
+    }
+    return hasQpMaxPerFrameType(format);
 }
 
 void qpSpreadPerFrameType(AMediaFormat *format, int delta,
                            int qplow, int qphigh, bool override) {
-     qpSpreadMaxPerFrameType(format, delta, qphigh, override);
+
      qpSpreadMinPerFrameType(format, qplow, override);
+     qpSpreadMaxPerFrameType(format, delta, qphigh, override);
+     // make sure that min<max for all the QP fields.
+     qpVerifyMinMaxOrdering(format);
 }
 
 void qpSpreadMaxPerFrameType(AMediaFormat *format, int delta, int qphigh, bool override) {
@@ -196,20 +225,26 @@
 
     int32_t qpOffered = 0;
     if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_MAX, &qpOffered)) {
-        // propagate to otherwise unspecified frame-specific keys
-        int32_t maxI;
-        if (override || !AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MAX, &maxI)) {
-            int32_t value = std::min(qphigh, qpOffered);
+        // propagate to frame-specific keys, choosing most restrictive
+        // ensure that we don't violate min<=max rules
+        {
+            int32_t maxI = INT32_MAX;
+            AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MAX, &maxI);
+            int32_t value = std::min({qpOffered, qphigh, maxI});
             AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MAX, value);
         }
-        int32_t maxP;
-        if (override || !AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MAX, &maxP)) {
-            int32_t value = std::min(qphigh, (std::min(qpOffered, INT32_MAX-delta) + delta));
+        {
+            int32_t maxP = INT32_MAX;
+            AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MAX, &maxP);
+            int32_t value = std::min({(std::min(qpOffered, INT32_MAX-1*delta) + 1*delta),
+                                     qphigh, maxP});
             AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MAX, value);
         }
-        int32_t maxB;
-        if (override || !AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MAX, &maxB)) {
-            int32_t value = std::min(qphigh, (std::min(qpOffered, INT32_MAX-2*delta) + 2*delta));
+        {
+            int32_t maxB = INT32_MAX;
+            AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MAX, &maxB);
+            int32_t value = std::min({(std::min(qpOffered, INT32_MAX-2*delta) + 2*delta),
+                                     qphigh, maxB});
             AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MAX, value);
         }
     }
@@ -221,19 +256,47 @@
     int32_t qpOffered = 0;
     if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_MIN, &qpOffered)) {
         int value = std::max(qplow, qpOffered);
-        // propagate to otherwise unspecified frame-specific keys
-        int32_t minI;
-        if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, &minI)) {
-            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, value);
-        }
-        int32_t minP;
-        if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, &minP)) {
-            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, value);
-        }
-        int32_t minB;
-        if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, &minB)) {
-            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, value);
-        }
+        // propagate to frame-specific keys, use lowest of this and existing per-frame value
+        int32_t minI = INT32_MAX;
+        AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, &minI);
+        int32_t setI = std::min(value, minI);
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, setI);
+
+        int32_t minP = INT32_MAX;
+        AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, &minP);
+        int32_t setP = std::min(value, minP);
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, setP);
+
+        int32_t minB = INT32_MAX;
+        AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, &minB);
+        int32_t setB = std::min(value, minB);
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, setB);
+    }
+}
+
+// XXX whether we allow min==max, or if we'll insist that min<max
+void qpVerifyMinMaxOrdering(AMediaFormat *format) {
+    // ensure that we don't violate min<=max rules
+    int32_t maxI = INT32_MAX;
+    int32_t minI = INT32_MIN;
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MAX, &maxI)
+        && AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, &minI)
+        && minI > maxI) {
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, maxI);
+    }
+    int32_t maxP = INT32_MAX;
+    int32_t minP = INT32_MIN;
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MAX, &maxP)
+        && AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, &minP)
+        && minP > maxP) {
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, maxP);
+    }
+    int32_t maxB = INT32_MAX;
+    int32_t minB = INT32_MIN;
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MAX, &maxB)
+        && AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, &minB)
+        && minB > maxB) {
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, maxB);
     }
 }
 
diff --git a/media/libmediaformatshaper/include/media/formatshaper/VQops.h b/media/libmediaformatshaper/VQops.h
similarity index 88%
rename from media/libmediaformatshaper/include/media/formatshaper/VQops.h
rename to media/libmediaformatshaper/VQops.h
index 807e8af..74cce18 100644
--- a/media/libmediaformatshaper/include/media/formatshaper/VQops.h
+++ b/media/libmediaformatshaper/VQops.h
@@ -17,7 +17,7 @@
 #ifndef LIBMEDIAFORMATSHAPER_VQOPS_H_
 #define LIBMEDIAFORMATSHAPER_VQOPS_H_
 
-#include <media/formatshaper/CodecProperties.h>
+#include "CodecProperties.h"
 #include <media/NdkMediaFormat.h>
 
 namespace android {
@@ -39,10 +39,12 @@
 void qpSpreadPerFrameType(AMediaFormat *format, int delta, int qplow, int qphigh, bool override);
 void qpSpreadMaxPerFrameType(AMediaFormat *format, int delta, int qphigh, bool override);
 void qpSpreadMinPerFrameType(AMediaFormat *format, int qplow, bool override);
+void qpVerifyMinMaxOrdering(AMediaFormat *format);
 
 // does the format have QP bounding entries
-bool hasQp(AMediaFormat *format);
-bool hasQpPerFrameType(AMediaFormat *format);
+bool hasQpMax(AMediaFormat *format);
+bool hasQpMaxGlobal(AMediaFormat *format);
+bool hasQpMaxPerFrameType(AMediaFormat *format);
 
 }  // namespace mediaformatshaper
 }  // namespace android
diff --git a/media/libmediaformatshaper/VideoShaper.cpp b/media/libmediaformatshaper/VideoShaper.cpp
index f772a66..cf8b50f 100644
--- a/media/libmediaformatshaper/VideoShaper.cpp
+++ b/media/libmediaformatshaper/VideoShaper.cpp
@@ -23,9 +23,9 @@
 
 #include <media/NdkMediaFormat.h>
 
-#include <media/formatshaper/VQops.h>
-#include <media/formatshaper/CodecProperties.h>
-#include <media/formatshaper/VideoShaper.h>
+#include "CodecProperties.h"
+#include "VideoShaper.h"
+#include "VQops.h"
 
 namespace android {
 namespace mediaformatshaper {
@@ -83,10 +83,10 @@
     // apply any quality transforms in here..
     (void) VQApply(codec, info, inFormat, flags);
 
-    // We must always spread any QP parameters.
+    // We always spread any QP parameters.
     // Sometimes it's something we inserted here, sometimes it's a value that the user injected.
     //
-    qpSpreadPerFrameType(inFormat, info->qpDelta, info->qpMin, info->qpMax, /* override */ false);
+    qpSpreadPerFrameType(inFormat, info->qpDelta, info->qpMin, info->qpMax, /* override */ true);
 
     //
     return 0;
diff --git a/media/libmediaformatshaper/include/media/formatshaper/VideoShaper.h b/media/libmediaformatshaper/VideoShaper.h
similarity index 100%
rename from media/libmediaformatshaper/include/media/formatshaper/VideoShaper.h
rename to media/libmediaformatshaper/VideoShaper.h
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index de4f8d4..383bae8 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -160,6 +160,12 @@
 #define AMEDIAMETRICS_PROP_VOLUME_LEFT    "volume.left"    // double (AudioTrack)
 #define AMEDIAMETRICS_PROP_VOLUME_RIGHT   "volume.right"   // double (AudioTrack)
 #define AMEDIAMETRICS_PROP_WHERE          "where"          // string value
+// EncodingRequested is the encoding format requested by the app
+#define AMEDIAMETRICS_PROP_ENCODINGREQUESTED "encodingRequested" // string
+// PerformanceModeActual is the actual selected performance mode, could be "none', "loeLatency" or
+// "powerSaving"
+#define AMEDIAMETRICS_PROP_PERFORMANCEMODEACTUAL "performanceModeActual" // string
+#define AMEDIAMETRICS_PROP_FRAMESTRANSFERRED "framesTransferred" // int64_t, transferred frames
 
 // Timing values: millisecond values are suffixed with MS and the type is double
 // nanosecond values are suffixed with NS and the type is int64.
diff --git a/media/libmediatranscoding/TEST_MAPPING b/media/libmediatranscoding/TEST_MAPPING
index f8a9db9..40f7b21 100644
--- a/media/libmediatranscoding/TEST_MAPPING
+++ b/media/libmediatranscoding/TEST_MAPPING
@@ -26,6 +26,9 @@
         },
         {
             "name": "VideoTrackTranscoderTests"
+        },
+        {
+            "name": "CtsMediaTranscodingTestCases"
         }
     ]
 }
diff --git a/media/libmediatranscoding/TranscodingSessionController.cpp b/media/libmediatranscoding/TranscodingSessionController.cpp
index 68e2875..9705f3c 100644
--- a/media/libmediatranscoding/TranscodingSessionController.cpp
+++ b/media/libmediatranscoding/TranscodingSessionController.cpp
@@ -19,6 +19,7 @@
 
 #define VALIDATE_STATE 1
 
+#include <android/permission_manager.h>
 #include <inttypes.h>
 #include <media/TranscodingSessionController.h>
 #include <media/TranscodingUidPolicy.h>
@@ -193,7 +194,7 @@
 
     ~Pacer() = default;
 
-    bool onSessionStarted(uid_t uid);
+    bool onSessionStarted(uid_t uid, uid_t callingUid);
     void onSessionCompleted(uid_t uid, std::chrono::microseconds runningTime);
     void onSessionCancelled(uid_t uid);
 
@@ -212,9 +213,49 @@
         std::chrono::steady_clock::time_point lastCompletedTime;
     };
     std::map<uid_t, UidHistoryEntry> mUidHistoryMap;
+    std::unordered_set<uid_t> mMtpUids;
+    std::unordered_set<uid_t> mNonMtpUids;
+
+    bool isSubjectToQuota(uid_t uid, uid_t callingUid);
 };
 
-bool TranscodingSessionController::Pacer::onSessionStarted(uid_t uid) {
+bool TranscodingSessionController::Pacer::isSubjectToQuota(uid_t uid, uid_t callingUid) {
+    // Submitting with self uid is not limited (which can only happen if it's used as an
+    // app-facing API). MediaProvider usage always submit on behalf of other uids.
+    if (uid == callingUid) {
+        return false;
+    }
+
+    if (mMtpUids.find(uid) != mMtpUids.end()) {
+        return false;
+    }
+
+    if (mNonMtpUids.find(uid) != mNonMtpUids.end()) {
+        return true;
+    }
+
+    // We don't have MTP permission info about this uid yet, check permission and save the result.
+    int32_t result;
+    if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+        if (APermissionManager_checkPermission("android.permission.ACCESS_MTP", -1 /*pid*/, uid,
+                                               &result) == PERMISSION_MANAGER_STATUS_OK &&
+            result == PERMISSION_MANAGER_PERMISSION_GRANTED) {
+            mMtpUids.insert(uid);
+            return false;
+        }
+    }
+
+    mNonMtpUids.insert(uid);
+    return true;
+}
+
+bool TranscodingSessionController::Pacer::onSessionStarted(uid_t uid, uid_t callingUid) {
+    if (!isSubjectToQuota(uid, callingUid)) {
+        ALOGI("Pacer::onSessionStarted: uid %d (caling uid: %d): not subject to quota", uid,
+              callingUid);
+        return true;
+    }
+
     // If uid doesn't exist, only insert the entry and mark session active. Skip quota checking.
     if (mUidHistoryMap.find(uid) == mUidHistoryMap.end()) {
         mUidHistoryMap.emplace(uid, UidHistoryEntry{});
@@ -494,7 +535,7 @@
             // Check if at least one client has quota to start the session.
             bool keepForClient = false;
             for (uid_t uid : topSession->allClientUids) {
-                if (mPacer->onSessionStarted(uid)) {
+                if (mPacer->onSessionStarted(uid, topSession->callingUid)) {
                     keepForClient = true;
                     // DO NOT break here, because book-keeping still needs to happen
                     // for the other uids.
diff --git a/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp b/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
index 9e7fa95..ef9c4f8 100644
--- a/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
+++ b/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
@@ -339,19 +339,37 @@
         EXPECT_EQ(mTranscoder.use_count(), 2);
     }
 
-    void testPacerHelper(int numSubmits, int sessionDurationMs, int expectedSuccess,
-                         bool pauseLastSuccessSession = false) {
+    void testPacerHelper(int numSubmits, int sessionDurationMs, int expectedSuccess) {
         testPacerHelper(numSubmits, sessionDurationMs, expectedSuccess, mClientCallback0, {},
-                        pauseLastSuccessSession);
+                        false /*pauseLastSuccessSession*/, true /*useRealCallingUid*/);
+    }
+
+    void testPacerHelperWithPause(int numSubmits, int sessionDurationMs, int expectedSuccess) {
+        testPacerHelper(numSubmits, sessionDurationMs, expectedSuccess, mClientCallback0, {},
+                        true /*pauseLastSuccessSession*/, true /*useRealCallingUid*/);
+    }
+
+    void testPacerHelperWithMultipleUids(int numSubmits, int sessionDurationMs, int expectedSuccess,
+                                         const std::shared_ptr<TestClientCallback>& client,
+                                         const std::vector<int>& additionalClientUids) {
+        testPacerHelper(numSubmits, sessionDurationMs, expectedSuccess, client,
+                        additionalClientUids, false /*pauseLastSuccessSession*/,
+                        true /*useRealCallingUid*/);
+    }
+
+    void testPacerHelperWithSelfUid(int numSubmits, int sessionDurationMs, int expectedSuccess) {
+        testPacerHelper(numSubmits, sessionDurationMs, expectedSuccess, mClientCallback0, {},
+                        false /*pauseLastSuccessSession*/, false /*useRealCallingUid*/);
     }
 
     void testPacerHelper(int numSubmits, int sessionDurationMs, int expectedSuccess,
                          const std::shared_ptr<TestClientCallback>& client,
-                         const std::vector<int>& additionalClientUids,
-                         bool pauseLastSuccessSession) {
+                         const std::vector<int>& additionalClientUids, bool pauseLastSuccessSession,
+                         bool useRealCallingUid) {
+        uid_t callingUid = useRealCallingUid ? ::getuid() : client->clientUid();
         for (int i = 0; i < numSubmits; i++) {
-            mController->submit(client->clientId(), SESSION(i), client->clientUid(),
-                                client->clientUid(), mRealtimeRequest, client);
+            mController->submit(client->clientId(), SESSION(i), callingUid, client->clientUid(),
+                                mRealtimeRequest, client);
             for (int additionalUid : additionalClientUids) {
                 mController->addClientUid(client->clientId(), SESSION(i), additionalUid);
             }
@@ -1294,8 +1312,7 @@
 
 TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerWithPause) {
     ALOGD("TestTranscoderPacerDuringPause");
-    testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/,
-                    true /*pauseLastSuccessSession*/);
+    testPacerHelperWithPause(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/);
 }
 
 /*
@@ -1305,17 +1322,26 @@
 TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerMultipleUids) {
     ALOGD("TestTranscoderPacerMultipleUids");
     // First, run mClientCallback0 to the point of no quota.
-    testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/,
-                    mClientCallback0, {}, false /*pauseLastSuccessSession*/);
+    testPacerHelperWithMultipleUids(12 /*numSubmits*/, 400 /*sessionDurationMs*/,
+                                    10 /*expectedSuccess*/, mClientCallback0, {});
     // Make UID(0) block on Client1's sessions too, Client1's quota should not be affected.
-    testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/,
-                    mClientCallback1, {UID(0)}, false /*pauseLastSuccessSession*/);
+    testPacerHelperWithMultipleUids(12 /*numSubmits*/, 400 /*sessionDurationMs*/,
+                                    10 /*expectedSuccess*/, mClientCallback1, {UID(0)});
     // Make UID(10) block on Client2's sessions. We expect to see 11 succeeds (instead of 10),
     // because the addClientUid() is called after the submit, and first session is already
     // started by the time UID(10) is added. UID(10) allowed us to run the 11th session,
     // after that both UID(10) and UID(2) are out of quota.
-    testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 11 /*expectedSuccess*/,
-                    mClientCallback2, {UID(10)}, false /*pauseLastSuccessSession*/);
+    testPacerHelperWithMultipleUids(12 /*numSubmits*/, 400 /*sessionDurationMs*/,
+                                    11 /*expectedSuccess*/, mClientCallback2, {UID(10)});
+}
+
+/*
+ * Use same uid for clientUid and callingUid, should not be limited by quota.
+ */
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerSelfUid) {
+    ALOGD("TestTranscoderPacerSelfUid");
+    testPacerHelperWithSelfUid(12 /*numSubmits*/, 400 /*sessionDurationMs*/,
+                               12 /*expectedSuccess*/);
 }
 
 }  // namespace android
diff --git a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
index 6ed45ed..af4ef69 100644
--- a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
@@ -220,16 +220,15 @@
         return AMEDIA_ERROR_INVALID_PARAMETER;
     }
 
-    int32_t bitrate;
-    if (!AMediaFormat_getInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, &bitrate)) {
-        status = mMediaSampleReader->getEstimatedBitrateForTrack(mTrackIndex, &bitrate);
+    if (!AMediaFormat_getInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, &mConfiguredBitrate)) {
+        status = mMediaSampleReader->getEstimatedBitrateForTrack(mTrackIndex, &mConfiguredBitrate);
         if (status != AMEDIA_OK) {
             LOG(ERROR) << "Unable to estimate bitrate. Using default " << kDefaultBitrateMbps;
-            bitrate = kDefaultBitrateMbps;
+            mConfiguredBitrate = kDefaultBitrateMbps;
         }
 
-        LOG(INFO) << "Configuring bitrate " << bitrate;
-        AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, bitrate);
+        LOG(INFO) << "Configuring bitrate " << mConfiguredBitrate;
+        AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, mConfiguredBitrate);
     }
 
     SetDefaultFormatValueFloat(AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, encoderFormat,
diff --git a/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
index 8a506a0..3e72882 100644
--- a/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
+++ b/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
@@ -45,6 +45,7 @@
 
 private:
     friend struct AsyncCodecCallbackDispatch;
+    friend class VideoTrackTranscoderTests;
 
     // Minimal blocking queue used as a message queue by VideoTrackTranscoder.
     template <typename T>
@@ -101,6 +102,7 @@
     uid_t mUid;
     uint64_t mInputFrameCount = 0;
     uint64_t mOutputFrameCount = 0;
+    int32_t mConfiguredBitrate = 0;
 };
 
 }  // namespace android
diff --git a/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml b/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
index e40a507..c3a0ced 100644
--- a/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
+++ b/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
@@ -24,7 +24,7 @@
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="{MODULE}" />
-        <option name="native-test-timeout" value="10m" />
+        <option name="native-test-timeout" value="30m" />
     </test>
 </configuration>
 
diff --git a/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
index 1f9ec77..88c3fd3 100644
--- a/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
@@ -86,6 +86,10 @@
 
     ~VideoTrackTranscoderTests() { LOG(DEBUG) << "VideoTrackTranscoderTests destroyed"; }
 
+    static int32_t getConfiguredBitrate(const std::shared_ptr<VideoTrackTranscoder>& transcoder) {
+        return transcoder->mConfiguredBitrate;
+    }
+
     std::shared_ptr<MediaSampleReader> mMediaSampleReader;
     int mTrackIndex;
     std::shared_ptr<AMediaFormat> mSourceFormat;
@@ -140,7 +144,7 @@
 TEST_F(VideoTrackTranscoderTests, PreserveBitrate) {
     LOG(DEBUG) << "Testing PreserveBitrate";
     auto callback = std::make_shared<TestTrackTranscoderCallback>();
-    std::shared_ptr<MediaTrackTranscoder> transcoder = VideoTrackTranscoder::create(callback);
+    auto transcoder = VideoTrackTranscoder::create(callback);
 
     auto destFormat = TrackTranscoderTestUtils::getDefaultVideoDestinationFormat(
             mSourceFormat.get(), false /* includeBitrate*/);
@@ -155,15 +159,11 @@
     ASSERT_TRUE(transcoder->start());
 
     callback->waitUntilTrackFormatAvailable();
-
-    auto outputFormat = transcoder->getOutputFormat();
-    ASSERT_NE(outputFormat, nullptr);
-
     transcoder->stop();
     EXPECT_EQ(callback->waitUntilFinished(), AMEDIA_OK);
 
-    int32_t outBitrate;
-    EXPECT_TRUE(AMediaFormat_getInt32(outputFormat.get(), AMEDIAFORMAT_KEY_BIT_RATE, &outBitrate));
+    int32_t outBitrate = getConfiguredBitrate(transcoder);
+    ASSERT_GT(outBitrate, 0);
 
     EXPECT_EQ(srcBitrate, outBitrate);
 }
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 01190b5..0fd4ef2 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -113,7 +113,7 @@
         return NULL;
     }
     sp<IMemory> frameMem = new MemoryBase(heap, 0, size);
-    if (frameMem == NULL) {
+    if (frameMem == NULL || frameMem->unsecurePointer() == NULL) {
         ALOGE("not enough memory for VideoFrame size=%zu", size);
         return NULL;
     }
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index c21ea8f..50ebeef 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -29,7 +29,6 @@
 #include <C2Buffer.h>
 
 #include "include/SoftwareRenderer.h"
-#include "PlaybackDurationAccumulator.h"
 
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -140,8 +139,6 @@
 static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg";      /* in us */
 static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
 static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist";    /* in us */
-static const char *kCodecPlaybackDuration =
-        "android.media.mediacodec.playback-duration"; /* in sec */
 
 static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";    /* 0/1 */
 
@@ -722,8 +719,6 @@
       mHaveInputSurface(false),
       mHavePendingInputBuffers(false),
       mCpuBoostRequested(false),
-      mPlaybackDurationAccumulator(new PlaybackDurationAccumulator()),
-      mIsSurfaceToScreen(false),
       mLatencyUnknown(0),
       mBytesEncoded(0),
       mEarliestEncodedPtsUs(INT64_MAX),
@@ -830,10 +825,6 @@
     if (mLatencyUnknown > 0) {
         mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
     }
-    int64_t playbackDuration = mPlaybackDurationAccumulator->getDurationInSeconds();
-    if (playbackDuration > 0) {
-        mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDuration, playbackDuration);
-    }
     if (mLifetimeStartNs > 0) {
         nsecs_t lifetime = systemTime(SYSTEM_TIME_MONOTONIC) - mLifetimeStartNs;
         lifetime = lifetime / (1000 * 1000);    // emitted in ms, truncated not rounded
@@ -971,22 +962,6 @@
     ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
 }
 
-void MediaCodec::updatePlaybackDuration(const sp<AMessage> &msg) {
-    if (msg->what() != kWhatOutputFramesRendered) {
-        ALOGE("updatePlaybackDuration: expected kWhatOuputFramesRendered (%d)", msg->what());
-        return;
-    }
-    // Playback duration only counts if the buffers are going to the screen.
-    if (!mIsSurfaceToScreen) {
-        return;
-    }
-    int64_t renderTimeNs;
-    size_t index = 0;
-    while (msg->findInt64(AStringPrintf("%zu-system-nano", index++).c_str(), &renderTimeNs)) {
-        mPlaybackDurationAccumulator->processRenderTime(renderTimeNs);
-    }
-}
-
 bool MediaCodec::Histogram::setup(int nbuckets, int64_t width, int64_t floor)
 {
     if (nbuckets <= 0 || width <= 0) {
@@ -3149,7 +3124,6 @@
                     ALOGV("TunnelPeekState: %s -> %s",
                           asString(previousState),
                           asString(TunnelPeekState::kBufferRendered));
-                    updatePlaybackDuration(msg);
                     // check that we have a notification set
                     if (mOnFrameRenderedNotification != NULL) {
                         sp<AMessage> notify = mOnFrameRenderedNotification->dup();
@@ -4850,10 +4824,6 @@
             return ALREADY_EXISTS;
         }
 
-        // in case we don't connect, ensure that we don't signal the surface is
-        // connected to the screen
-        mIsSurfaceToScreen = false;
-
         err = nativeWindowConnect(surface.get(), "connectToSurface");
         if (err == OK) {
             // Require a fresh set of buffers after each connect by using a unique generation
@@ -4879,10 +4849,6 @@
             if (!mAllowFrameDroppingBySurface) {
                 disableLegacyBufferDropPostQ(surface);
             }
-            // keep track whether or not the buffers of the connected surface go to the screen
-            int result = 0;
-            surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
-            mIsSurfaceToScreen = result != 0;
         }
     }
     // do not return ALREADY_EXISTS unless surfaces are the same
@@ -4900,7 +4866,6 @@
         }
         // assume disconnected even on error
         mSurface.clear();
-        mIsSurfaceToScreen = false;
     }
     return err;
 }
diff --git a/media/libstagefright/PlaybackDurationAccumulator.h b/media/libstagefright/PlaybackDurationAccumulator.h
deleted file mode 100644
index cb5f0c4..0000000
--- a/media/libstagefright/PlaybackDurationAccumulator.h
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef PLAYBACK_DURATION_ACCUMULATOR_H_
-
-namespace android {
-
-// Accumulates playback duration by processing render times of individual frames and by ignoring
-// frames rendered during inactive playbacks such as seeking, pausing, or re-buffering.
-class PlaybackDurationAccumulator {
-private:
-    // Controls the maximum delta between render times before considering the playback is not
-    // active and has stalled.
-    static const int64_t MAX_PRESENTATION_DURATION_NS = 500 * 1000 * 1000;
-
-public:
-    PlaybackDurationAccumulator() {
-        mPlaybackDurationNs = 0;
-        mPreviousRenderTimeNs = 0;
-    }
-
-    // Process a render time expressed in nanoseconds.
-    void processRenderTime(int64_t newRenderTimeNs) {
-        // If we detect wrap-around or out of order frames, just ignore the duration for this
-        // and the next frame.
-        if (newRenderTimeNs < mPreviousRenderTimeNs) {
-            mPreviousRenderTimeNs = 0;
-        }
-        if (mPreviousRenderTimeNs > 0) {
-            int64_t presentationDurationNs = newRenderTimeNs - mPreviousRenderTimeNs;
-            if (presentationDurationNs < MAX_PRESENTATION_DURATION_NS) {
-                mPlaybackDurationNs += presentationDurationNs;
-            }
-        }
-        mPreviousRenderTimeNs = newRenderTimeNs;
-    }
-
-    int64_t getDurationInSeconds() {
-        return mPlaybackDurationNs / 1000 / 1000 / 1000; // Nanoseconds to seconds.
-    }
-
-private:
-    // The playback duration accumulated so far.
-    int64_t mPlaybackDurationNs;
-    // The previous render time used to compute the next presentation duration.
-    int64_t mPreviousRenderTimeNs;
-};
-
-}
-
-#endif
-
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index dff7b22..7ce2968 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -34,6 +34,9 @@
   "presubmit": [
     {
       "name": "mediacodecTest"
+    },
+    {
+      "name": "CtsMediaTranscodingTestCases"
     }
   ],
   "postsubmit": [
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 04a9b17..b381033 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -1678,7 +1678,7 @@
     if (msg->findString("mime", &mime)) {
         meta->setCString(kKeyMIMEType, mime.c_str());
     } else {
-        ALOGI("did not find mime type");
+        ALOGV("did not find mime type");
         return BAD_VALUE;
     }
 
@@ -1728,7 +1728,7 @@
             meta->setInt32(kKeyWidth, width);
             meta->setInt32(kKeyHeight, height);
         } else {
-            ALOGI("did not find width and/or height");
+            ALOGV("did not find width and/or height");
             return BAD_VALUE;
         }
 
@@ -1817,7 +1817,7 @@
         int32_t numChannels, sampleRate;
         if (!msg->findInt32("channel-count", &numChannels) ||
                 !msg->findInt32("sample-rate", &sampleRate)) {
-            ALOGI("did not find channel-count and/or sample-rate");
+            ALOGV("did not find channel-count and/or sample-rate");
             return BAD_VALUE;
         }
         meta->setInt32(kKeyChannelCount, numChannels);
diff --git a/media/libstagefright/foundation/OpusHeader.cpp b/media/libstagefright/foundation/OpusHeader.cpp
index 784e802..30d0ae6 100644
--- a/media/libstagefright/foundation/OpusHeader.cpp
+++ b/media/libstagefright/foundation/OpusHeader.cpp
@@ -146,6 +146,10 @@
 int WriteOpusHeader(const OpusHeader &header, int input_sample_rate,
                     uint8_t* output, size_t output_size) {
     // See https://wiki.xiph.org/OggOpus#ID_Header.
+    if (header.channels < 1 || header.channels > kMaxChannels) {
+        ALOGE("Invalid channel count: %d", header.channels);
+        return -1;
+    }
     const size_t total_size = kOpusHeaderStreamMapOffset + header.channels;
     if (output_size < total_size) {
         ALOGE("Output buffer too small for header.");
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index d7b1794..3f93e6d 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -58,7 +58,6 @@
 struct PersistentSurface;
 class SoftwareRenderer;
 class Surface;
-class PlaybackDurationAccumulator;
 namespace hardware {
 namespace cas {
 namespace native {
@@ -414,7 +413,6 @@
     void updateLowLatency(const sp<AMessage> &msg);
     constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
     void updateTunnelPeek(const sp<AMessage> &msg);
-    void updatePlaybackDuration(const sp<AMessage> &msg);
 
     sp<AMessage> mOutputFormat;
     sp<AMessage> mInputFormat;
@@ -482,9 +480,6 @@
 
     std::shared_ptr<BufferChannelBase> mBufferChannel;
 
-    PlaybackDurationAccumulator * mPlaybackDurationAccumulator;
-    bool mIsSurfaceToScreen;
-
     MediaCodec(
             const sp<ALooper> &looper, pid_t pid, uid_t uid,
             std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase = nullptr,
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 1a5609a..6371769 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -684,6 +684,7 @@
 constexpr char FEATURE_AdaptivePlayback[]       = "adaptive-playback";
 constexpr char FEATURE_IntraRefresh[] = "intra-refresh";
 constexpr char FEATURE_PartialFrame[] = "partial-frame";
+constexpr char FEATURE_QpBounds[] = "qp-bounds";
 constexpr char FEATURE_SecurePlayback[]         = "secure-playback";
 constexpr char FEATURE_TunneledPlayback[]       = "tunneled-playback";
 
diff --git a/media/ndk/NdkImagePriv.h b/media/ndk/NdkImagePriv.h
index b019448..05115b9 100644
--- a/media/ndk/NdkImagePriv.h
+++ b/media/ndk/NdkImagePriv.h
@@ -40,6 +40,14 @@
      */
 
     AIMAGE_FORMAT_RAW_DEPTH = 0x1002,
+
+    /**
+     * Device specific 10 bits depth RAW image format.
+     *
+     * <p>Unprocessed implementation-dependent raw depth measurements, opaque with 10 bit samples
+     * and device specific bit layout.</p>
+     */
+    AIMAGE_FORMAT_RAW_DEPTH10 = 0x1003,
 };
 
 // TODO: this only supports ImageReader
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index b75901a..1067e24 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -72,6 +72,7 @@
         case AIMAGE_FORMAT_Y8:
         case AIMAGE_FORMAT_HEIC:
         case AIMAGE_FORMAT_DEPTH_JPEG:
+        case AIMAGE_FORMAT_RAW_DEPTH10:
             return true;
         case AIMAGE_FORMAT_PRIVATE:
             // For private format, cpu usage is prohibited.
@@ -102,6 +103,7 @@
         case AIMAGE_FORMAT_Y8:
         case AIMAGE_FORMAT_HEIC:
         case AIMAGE_FORMAT_DEPTH_JPEG:
+        case AIMAGE_FORMAT_RAW_DEPTH10:
             return 1;
         case AIMAGE_FORMAT_PRIVATE:
             return 0;
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index e19dd3a..71bc6d9 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -50,7 +50,10 @@
  */
 typedef struct AImage AImage;
 
-// Formats not listed here will not be supported by AImageReader
+/**
+ * AImage supported formats: AImageReader only guarantees the support for the formats
+ * listed here.
+ */
 enum AIMAGE_FORMATS {
     /**
      * 32 bits RGBA format, 8 bits for each of the four channels.
@@ -813,7 +816,7 @@
  * Available since API level 26.
  *
  * @param image the {@link AImage} of interest.
- * @param outBuffer The memory area pointed to by buffer will contain the acquired AHardwareBuffer
+ * @param buffer The memory area pointed to by buffer will contain the acquired AHardwareBuffer
  *         handle.
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index d86f3c7..4bd7f2a 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -328,10 +328,10 @@
  * still acquire images from this {@link AImageReader} and access {@link AHardwareBuffer} via
  * {@link AImage_getHardwareBuffer()}. The {@link AHardwareBuffer} gained this way can then
  * be passed back to hardware (such as GPU or hardware encoder if supported) for future processing.
- * For example, you can obtain an {@link EGLClientBuffer} from the {@link AHardwareBuffer} by using
- * {@link eglGetNativeClientBufferANDROID} extension and pass that {@link EGLClientBuffer} to {@link
- * eglCreateImageKHR} to create an {@link EGLImage} resource type, which may then be bound to a
- * texture via {@link glEGLImageTargetTexture2DOES} on supported devices. This can be useful for
+ * For example, you can obtain an EGLClientBuffer from the {@link AHardwareBuffer} by using
+ * eglGetNativeClientBufferANDROID extension and pass that EGLClientBuffer to
+ * eglCreateImageKHR to create an EGLImage resource type, which may then be bound to a
+ * texture via glEGLImageTargetTexture2DOES on supported devices. This can be useful for
  * transporting textures that may be shared cross-process.</p>
  * <p>In general, when software access to image data is not necessary, an {@link AImageReader}
  * created with {@link AIMAGE_FORMAT_PRIVATE} format is more efficient, compared with {@link
@@ -339,7 +339,7 @@
  *
  * <p>Note that not all format and usage flag combination is supported by the {@link AImageReader},
  * especially if \c format is {@link AIMAGE_FORMAT_PRIVATE}, \c usage must not include either
- * {@link AHARDWAREBUFFER_USAGE_READ_RARELY} or {@link AHARDWAREBUFFER_USAGE_READ_OFTEN}</p>
+ * {@link AHARDWAREBUFFER_USAGE_CPU_READ_RARELY} or {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN}</p>
  *
  * @param width The default width in pixels of the Images that this reader will produce.
  * @param height The default height in pixels of the Images that this reader will produce.
@@ -358,7 +358,7 @@
  *   <th>Compatible usage flags</th>
  * </tr>
  * <tr>
- *   <td>non-{@link AIMAGE_FORMAT_PRIVATE PRIVATE} formats defined in {@link AImage.h}
+ *   <td>non-{@link AIMAGE_FORMAT_PRIVATE} formats defined in {@link NdkImage.h}
  * </td>
  *   <td>{@link AHARDWAREBUFFER_USAGE_CPU_READ_RARELY} or
  *   {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN}</td>
@@ -441,6 +441,10 @@
         AImageReader* reader,
         AHardwareBuffer* buffer);
 
+/**
+ * A listener to the AHardwareBuffer removal event, use
+ * {@link AImageReader_setBufferRemovedListener} to register the listener object to AImageReader.
+ */
 typedef struct AImageReader_BufferRemovedListener {
     /// Optional application context passed as the first parameter of the callback.
     void*                      context;
diff --git a/media/ndk/include/media/NdkMediaError.h b/media/ndk/include/media/NdkMediaError.h
index 2be1d6e..02fdc79 100644
--- a/media/ndk/include/media/NdkMediaError.h
+++ b/media/ndk/include/media/NdkMediaError.h
@@ -40,7 +40,11 @@
 
 __BEGIN_DECLS
 
+/**
+ * Media error message types returned from NDK media functions.
+ */
 typedef enum {
+    /** The requested media operation completed successfully. */
     AMEDIA_OK = 0,
 
     /**
@@ -55,14 +59,34 @@
     AMEDIACODEC_ERROR_RECLAIMED             = 1101,
 
     AMEDIA_ERROR_BASE                  = -10000,
+
+    /** The called media function failed with an unknown error. */
     AMEDIA_ERROR_UNKNOWN               = AMEDIA_ERROR_BASE,
+
+    /** The input media data is corrupt or incomplete. */
     AMEDIA_ERROR_MALFORMED             = AMEDIA_ERROR_BASE - 1,
+
+    /** The required operation or media formats are not supported. */
     AMEDIA_ERROR_UNSUPPORTED           = AMEDIA_ERROR_BASE - 2,
+
+    /** An invalid (or already closed) object is used in the function call. */
     AMEDIA_ERROR_INVALID_OBJECT        = AMEDIA_ERROR_BASE - 3,
+
+    /** At least one of the invalid parameters is used. */
     AMEDIA_ERROR_INVALID_PARAMETER     = AMEDIA_ERROR_BASE - 4,
+
+    /** The media object is not in the right state for the required operation. */
     AMEDIA_ERROR_INVALID_OPERATION     = AMEDIA_ERROR_BASE - 5,
+
+    /** Media stream ends while processing the requested operation. */
     AMEDIA_ERROR_END_OF_STREAM         = AMEDIA_ERROR_BASE - 6,
+
+    /** An Error occurred when the Media object is carrying IO operation. */
     AMEDIA_ERROR_IO                    = AMEDIA_ERROR_BASE - 7,
+
+    /** The required operation would have to be blocked (on I/O or others),
+     *   but blocking is not enabled.
+     */
     AMEDIA_ERROR_WOULD_BLOCK           = AMEDIA_ERROR_BASE - 8,
 
     AMEDIA_DRM_ERROR_BASE              = -20000,
@@ -77,10 +101,20 @@
     AMEDIA_DRM_LICENSE_EXPIRED         = AMEDIA_DRM_ERROR_BASE - 9,
 
     AMEDIA_IMGREADER_ERROR_BASE          = -30000,
+
+    /** There are no more image buffers to read/write image data. */
     AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE = AMEDIA_IMGREADER_ERROR_BASE - 1,
+
+    /** The AImage object has used up the allowed maximum image buffers. */
     AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED = AMEDIA_IMGREADER_ERROR_BASE - 2,
+
+    /** The required image buffer could not be locked to read. */
     AMEDIA_IMGREADER_CANNOT_LOCK_IMAGE   = AMEDIA_IMGREADER_ERROR_BASE - 3,
+
+    /** The media data or buffer could not be unlocked. */
     AMEDIA_IMGREADER_CANNOT_UNLOCK_IMAGE = AMEDIA_IMGREADER_ERROR_BASE - 4,
+
+    /** The media/buffer needs to be locked to perform the required operation. */
     AMEDIA_IMGREADER_IMAGE_NOT_LOCKED    = AMEDIA_IMGREADER_ERROR_BASE - 5,
 
 } media_status_t;
diff --git a/media/ndk/include/media/NdkMediaMuxer.h b/media/ndk/include/media/NdkMediaMuxer.h
index 866ebfd..d7eccb8 100644
--- a/media/ndk/include/media/NdkMediaMuxer.h
+++ b/media/ndk/include/media/NdkMediaMuxer.h
@@ -152,12 +152,17 @@
 /**
  * Creates a new media muxer for appending data to an existing MPEG4 file.
  * This is a synchronous API call and could take a while to return if the existing file is large.
- * Works for only MPEG4 files that contain a) a single audio track, b) a single video track,
- * c) a single audio and a single video track.
- * @param(fd): needs to be opened with read and write permission.  Does not take ownership of
+ * Only works for MPEG4 files matching one of the following characteristics:
+ * <ul>
+ *    <li>a single audio track.</li>
+ *    <li>a single video track.</li>
+ *    <li>a single audio and a single video track.</li>
+ * </ul>
+ * @param fd Must be opened with read and write permission. Does not take ownership of
  * this fd i.e., caller is responsible for closing fd.
- * @param(mode): AppendMode is an enum that specifies one of the modes of appending data.
- * @return : Pointer to AMediaMuxer if the file(fd) has tracks already, otherwise, nullptr.
+ * @param mode Specifies how data will be appended; the AppendMode enum describes
+ *             the possible methods for appending..
+ * @return Pointer to AMediaMuxer if the file(fd) has tracks already, otherwise, nullptr.
  * {@link AMediaMuxer_delete} should be used to free the returned pointer.
  *
  * Available since API level 31.
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 52dc0cf..9e48c1f 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -46,6 +46,7 @@
         "libbinder",
         "libcutils",
         "liblog",
+        "libpermission",
         "libutils",
         "libhidlbase",
         "android.hardware.graphics.bufferqueue@1.0",
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index e2e1043..68a5059 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -212,8 +212,10 @@
 
     if (ok) {
         static const String16 sCaptureHotwordAllowed("android.permission.CAPTURE_AUDIO_HOTWORD");
-        // IMPORTANT: Use PermissionCache - not a runtime permission and may not change.
-        ok = PermissionCache::checkPermission(sCaptureHotwordAllowed, pid, uid);
+        //TODO: b/185972521: see if permission cache can be used with shell identity for CTS
+        PermissionController permissionController;
+        ok = permissionController.checkPermission(sCaptureHotwordAllowed,
+            pid, uid);
     }
     if (!ok) ALOGV("android.permission.CAPTURE_AUDIO_HOTWORD");
     return ok;
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 2294c49..a7d47fb 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -74,6 +74,7 @@
         "libmediautils",
         "libnbaio",
         "libnblog",
+        "libpermission",
         "libpowermanager",
         "libmediautils",
         "libmemunreachable",
@@ -95,6 +96,7 @@
     ],
 
     export_shared_lib_headers: [
+        "libpermission",
         "media_permission-aidl-cpp",
     ],
 
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 7cdac30..20812bf 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -163,6 +163,33 @@
     }
 };
 
+// TODO b/182392769: use identity util
+/* static */
+media::permission::Identity AudioFlinger::checkIdentityPackage(
+        const media::permission::Identity& identity) {
+    Vector<String16> packages;
+    PermissionController{}.getPackagesForUid(identity.uid, packages);
+
+    Identity checkedIdentity = identity;
+    if (!identity.packageName.has_value() || identity.packageName.value().size() == 0) {
+        if (!packages.isEmpty()) {
+            checkedIdentity.packageName =
+                std::move(legacy2aidl_String16_string(packages[0]).value());
+        }
+    } else {
+        String16 opPackageLegacy = VALUE_OR_FATAL(
+            aidl2legacy_string_view_String16(identity.packageName.value_or("")));
+        if (std::find_if(packages.begin(), packages.end(),
+                [&opPackageLegacy](const auto& package) {
+                return opPackageLegacy == package; }) == packages.end()) {
+            ALOGW("The package name(%s) provided does not correspond to the uid %d",
+                    identity.packageName.value_or("").c_str(), identity.uid);
+            checkedIdentity.packageName = std::optional<std::string>();
+        }
+    }
+    return checkedIdentity;
+}
+
 // ----------------------------------------------------------------------------
 
 std::string formatToString(audio_format_t format) {
@@ -2157,7 +2184,7 @@
                                                   &output.notificationFrameCount,
                                                   callingPid, adjIdentity, &output.flags,
                                                   input.clientInfo.clientTid,
-                                                  &lStatus, portId);
+                                                  &lStatus, portId, input.maxSharedAudioHistoryMs);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (recordTrack == 0));
 
         // lStatus == BAD_TYPE means FAST flag was rejected: request a new input from
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index a980752..c66ecb0 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -129,6 +129,9 @@
 public:
     static void instantiate() ANDROID_API;
 
+    static media::permission::Identity checkIdentityPackage(
+                                            const media::permission::Identity& identity);
+
     status_t dump(int fd, const Vector<String16>& args) override;
 
     // IAudioFlinger interface, in binder opcode order
@@ -673,6 +676,8 @@
         virtual binder::Status   setPreferredMicrophoneDirection(
                 int /*audio_microphone_direction_t*/ direction);
         virtual binder::Status   setPreferredMicrophoneFieldDimension(float zoom);
+        virtual binder::Status   shareAudioHistory(const std::string& sharedAudioPackageName,
+                                                   int64_t sharedAudioStartMs);
 
     private:
         const sp<RecordThread::RecordTrack> mRecordTrack;
@@ -978,6 +983,9 @@
     std::vector<media::AudioVibratorInfo> mAudioVibratorInfos;
 
     static inline constexpr const char *mMetricsId = AMEDIAMETRICS_KEY_AUDIO_FLINGER;
+
+    // Keep in sync with java definition in media/java/android/media/AudioRecord.java
+    static constexpr int32_t kMaxSharedAudioHistoryMs = 5000;
 };
 
 #undef INCLUDING_FROM_AUDIOFLINGER_H
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 2e59baa..2436248 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -219,6 +219,10 @@
     void flushAck();
     bool isResumePending();
     void resumeAck();
+    // For direct or offloaded tracks ensure that the pause state is acknowledged
+    // by the playback thread in case of an immediate flush.
+    bool isPausePending() const { return mPauseHwPending; }
+    void pauseAck();
     void updateTrackFrameInfo(int64_t trackFramesReleased, int64_t sinkFramesWritten,
             uint32_t halSampleRate, const ExtendedTimestamp &timeStamp);
 
@@ -314,6 +318,7 @@
     sp<AudioTrackServerProxy>  mAudioTrackServerProxy;
     bool                mResumeToStopping; // track was paused in stopping state.
     bool                mFlushHwPending; // track requests for thread flush
+    bool                mPauseHwPending = false; // direct/offload track request for thread pause
     audio_output_flags_t mFlags;
     // If the last track change was notified to the client with readAndClearHasChanged
     std::atomic_flag     mChangeNotified = ATOMIC_FLAG_INIT;
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index 5f248e1..b953c0b 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -69,7 +69,8 @@
                                 const media::permission::Identity& identity,
                                 audio_input_flags_t flags,
                                 track_type type,
-                                audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
+                                audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
+                                int64_t startTimeMs = -1);
     virtual             ~RecordTrack();
     virtual status_t    initCheck() const;
 
@@ -107,6 +108,9 @@
 
             status_t    setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
             status_t    setPreferredMicrophoneFieldDimension(float zoom);
+            status_t    shareAudioHistory(const std::string& sharedAudioPackageName,
+                                          int64_t sharedAudioStartMs);
+            int64_t     startTimeMs() { return mStartTimeMs; }
 
     static  bool        checkServerLatencySupported(
                                 audio_format_t format, audio_input_flags_t flags) {
@@ -146,8 +150,9 @@
             bool                               mSilenced;
 
             // used to enforce OP_RECORD_AUDIO
-            uid_t                              mUid;
             sp<OpRecordAudioMonitor>           mOpRecordAudioMonitor;
+            std::string                        mSharedAudioPackageName = {};
+            int64_t                            mStartTimeMs = -1;
 };
 
 // playback track, used by PatchPanel
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index c83fc80..6da4543 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -700,6 +700,13 @@
     return sendConfigEvent_l(configEvent);
 }
 
+void AudioFlinger::ThreadBase::sendResizeBufferConfigEvent_l(int32_t maxSharedAudioHistoryMs)
+{
+    ALOG_ASSERT(type() == RECORD, "sendResizeBufferConfigEvent_l() called on non record thread");
+    sp<ConfigEvent> configEvent =
+            (ConfigEvent *)new ResizeBufferConfigEvent(maxSharedAudioHistoryMs);
+    sendConfigEvent_l(configEvent);
+}
 
 // post condition: mConfigEvents.isEmpty()
 void AudioFlinger::ThreadBase::processConfigEvents_l()
@@ -758,6 +765,11 @@
                     (UpdateOutDevicesConfigEventData *)event->mData.get();
             updateOutDevices(data->mOutDevices);
         } break;
+        case CFG_EVENT_RESIZE_BUFFER: {
+            ResizeBufferConfigEventData *data =
+                    (ResizeBufferConfigEventData *)event->mData.get();
+            resizeInputBuffer_l(data->mMaxSharedAudioHistoryMs);
+        } break;
         default:
             ALOG_ASSERT(false, "processConfigEvents_l() unknown event type %d", event->mType);
             break;
@@ -1079,6 +1091,11 @@
     ALOGE("%s should only be called in RecordThread", __func__);
 }
 
+void AudioFlinger::ThreadBase::resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs __unused)
+{
+    ALOGE("%s should only be called in RecordThread", __func__);
+}
+
 void AudioFlinger::ThreadBase::PMDeathRecipient::binderDied(const wp<IBinder>& who __unused)
 {
     sp<ThreadBase> thread = mThread.promote();
@@ -5880,8 +5897,15 @@
         sp<Track> l = mActiveTracks.getLatest();
         bool last = l.get() == track;
 
-        if (track->isPausing()) {
-            track->setPaused();
+        if (track->isPausePending()) {
+            track->pauseAck();
+            // It is possible a track might have been flushed or stopped.
+            // Other operations such as flush pending might occur on the next prepare.
+            if (track->isPausing()) {
+                track->setPaused();
+            }
+            // Always perform pause, as an immediate flush will change
+            // the pause state to be no longer isPausing().
             if (mHwSupportsPause && last && !mHwPaused) {
                 doHwPause = true;
                 mHwPaused = true;
@@ -6423,8 +6447,15 @@
             continue;
         }
 
-        if (track->isPausing()) {
-            track->setPaused();
+        if (track->isPausePending()) {
+            track->pauseAck();
+            // It is possible a track might have been flushed or stopped.
+            // Other operations such as flush pending might occur on the next prepare.
+            if (track->isPausing()) {
+                track->setPaused();
+            }
+            // Always perform pause if last, as an immediate flush will change
+            // the pause state to be no longer isPausing().
             if (last) {
                 if (mHwSupportsPause && !mHwPaused) {
                     doHwPause = true;
@@ -7752,7 +7783,8 @@
         audio_input_flags_t *flags,
         pid_t tid,
         status_t *status,
-        audio_port_handle_t portId)
+        audio_port_handle_t portId,
+        int32_t maxSharedAudioHistoryMs)
 {
     size_t frameCount = *pFrameCount;
     size_t notificationFrameCount = *pNotificationFrameCount;
@@ -7761,6 +7793,7 @@
     audio_input_flags_t inputFlags = mInput->flags;
     audio_input_flags_t requestedFlags = *flags;
     uint32_t sampleRate;
+    Identity checkedIdentity = AudioFlinger::checkIdentityPackage(identity);
 
     lStatus = initCheck();
     if (lStatus != NO_ERROR) {
@@ -7774,6 +7807,23 @@
         goto Exit;
     }
 
+    if (maxSharedAudioHistoryMs != 0) {
+        if (!captureHotwordAllowed(checkedIdentity)) {
+            lStatus = PERMISSION_DENIED;
+            goto Exit;
+        }
+        //TODO: b/185972521 allow resampling buffer resizing on fast mixers by pausing
+        // the fast mixer thread while resizing the buffer in the normal thread
+        if (hasFastCapture()) {
+            lStatus = BAD_VALUE;
+            goto Exit;
+        }
+        if (maxSharedAudioHistoryMs < 0
+                || maxSharedAudioHistoryMs > AudioFlinger::kMaxSharedAudioHistoryMs) {
+            lStatus = BAD_VALUE;
+            goto Exit;
+        }
+    }
     if (*pSampleRate == 0) {
         *pSampleRate = mSampleRate;
     }
@@ -7882,11 +7932,18 @@
 
     { // scope for mLock
         Mutex::Autolock _l(mLock);
+        long startTimeMs = -1;
+        if (!mSharedAudioPackageName.empty()
+                && mSharedAudioPackageName == checkedIdentity.packageName
+                && mSharedAudioSessionId == sessionId
+                && captureHotwordAllowed(checkedIdentity)) {
+            startTimeMs = mSharedAudioStartMs;
+        }
 
         track = new RecordTrack(this, client, attr, sampleRate,
                       format, channelMask, frameCount,
                       nullptr /* buffer */, (size_t)0 /* bufferSize */, sessionId, creatorPid,
-                      identity, *flags, TrackBase::TYPE_DEFAULT, portId);
+                      checkedIdentity, *flags, TrackBase::TYPE_DEFAULT, portId, startTimeMs);
 
         lStatus = track->initCheck();
         if (lStatus != NO_ERROR) {
@@ -7902,6 +7959,11 @@
             // so ask activity manager to do this on our behalf
             sendPrioConfigEvent_l(callingPid, tid, kPriorityAudioApp, true /*forApp*/);
         }
+
+        if (maxSharedAudioHistoryMs != 0) {
+            sendResizeBufferConfigEvent_l(maxSharedAudioHistoryMs);
+        }
+
     }
 
     lStatus = NO_ERROR;
@@ -8094,6 +8156,9 @@
 {
     ALOGV("RecordThread::getActiveMicrophones");
     AutoMutex _l(mLock);
+    if (mInput == nullptr || mInput->stream == nullptr) {
+        return NO_INIT;
+    }
     status_t status = mInput->stream->getActiveMicrophones(activeMicrophones);
     return status;
 }
@@ -8103,6 +8168,9 @@
 {
     ALOGV("setPreferredMicrophoneDirection(%d)", direction);
     AutoMutex _l(mLock);
+    if (mInput == nullptr || mInput->stream == nullptr) {
+        return NO_INIT;
+    }
     return mInput->stream->setPreferredMicrophoneDirection(direction);
 }
 
@@ -8110,9 +8178,43 @@
 {
     ALOGV("setPreferredMicrophoneFieldDimension(%f)", zoom);
     AutoMutex _l(mLock);
+    if (mInput == nullptr || mInput->stream == nullptr) {
+        return NO_INIT;
+    }
     return mInput->stream->setPreferredMicrophoneFieldDimension(zoom);
 }
 
+status_t AudioFlinger::RecordThread::shareAudioHistory(
+        const std::string& sharedAudioPackageName, audio_session_t sharedSessionId,
+        int64_t sharedAudioStartMs) {
+    AutoMutex _l(mLock);
+    return shareAudioHistory_l(sharedAudioPackageName, sharedSessionId, sharedAudioStartMs);
+}
+
+status_t AudioFlinger::RecordThread::shareAudioHistory_l(
+        const std::string& sharedAudioPackageName, audio_session_t sharedSessionId,
+        int64_t sharedAudioStartMs) {
+    if (hasFastCapture()) {
+        return BAD_VALUE;
+    }
+    if ((hasAudioSession_l(sharedSessionId) & ThreadBase::TRACK_SESSION) == 0) {
+        return BAD_VALUE;
+    }
+    if (sharedAudioStartMs < 0 || sharedAudioStartMs * mSampleRate / 1000 > mRsmpInRear) {
+        return BAD_VALUE;
+    }
+
+    mSharedAudioPackageName = sharedAudioPackageName;
+    if (mSharedAudioPackageName.empty()) {
+        mSharedAudioSessionId = AUDIO_SESSION_NONE;
+        mSharedAudioStartMs = -1;
+    } else {
+        mSharedAudioSessionId = sharedSessionId;
+        mSharedAudioStartMs = sharedAudioStartMs;
+    }
+    return NO_ERROR;
+}
+
 void AudioFlinger::RecordThread::updateMetadata_l()
 {
     if (mInput == nullptr || mInput->stream == nullptr ||
@@ -8144,6 +8246,7 @@
 {
     track->terminate();
     track->mState = TrackBase::STOPPED;
+
     // active tracks are removed by threadLoop()
     if (mActiveTracks.indexOf(track) < 0) {
         removeTrack_l(track);
@@ -8251,8 +8354,23 @@
 {
     sp<ThreadBase> threadBase = mRecordTrack->mThread.promote();
     RecordThread *recordThread = (RecordThread *) threadBase.get();
-    mRsmpInFront = recordThread->mRsmpInRear;
     mRsmpInUnrel = 0;
+    const int32_t rear = recordThread->mRsmpInRear;
+    ssize_t deltaFrames = 0;
+    if (mRecordTrack->startTimeMs() >= 0) {
+        int32_t startFrames = mRecordTrack->startTimeMs() * recordThread->sampleRate()  / 1000;
+        // start frame has to be in the past
+        //TODO: b/185972521 fix in case rear or startFrames wrap around
+        if (startFrames > rear) {
+            startFrames = rear;
+        }
+        deltaFrames = rear - startFrames;
+        // start frame cannot be further in the past than start of resampling buffer
+        if ((size_t) deltaFrames > recordThread->mRsmpInFrames) {
+            deltaFrames = recordThread->mRsmpInFrames;
+        }
+    }
+    mRsmpInFront = audio_utils::safe_sub_overflow(rear, static_cast<int32_t>(deltaFrames));
 }
 
 void AudioFlinger::RecordThread::ResamplerBufferProvider::sync(
@@ -8517,31 +8635,10 @@
     ALOGV("%p RecordThread params: mChannelCount=%u, mFormat=%#x, mFrameSize=%zu, "
             "mBufferSize=%zu, mFrameCount=%zu",
             this, mChannelCount, mFormat, mFrameSize, mBufferSize, mFrameCount);
-    // This is the formula for calculating the temporary buffer size.
-    // With 7 HAL buffers, we can guarantee ability to down-sample the input by ratio of 6:1 to
-    // 1 full output buffer, regardless of the alignment of the available input.
-    // The value is somewhat arbitrary, and could probably be even larger.
-    // A larger value should allow more old data to be read after a track calls start(),
-    // without increasing latency.
-    //
-    // Note this is independent of the maximum downsampling ratio permitted for capture.
-    mRsmpInFrames = mFrameCount * 7;
-    mRsmpInFramesP2 = roundup(mRsmpInFrames);
-    free(mRsmpInBuffer);
-    mRsmpInBuffer = NULL;
 
-    // TODO optimize audio capture buffer sizes ...
-    // Here we calculate the size of the sliding buffer used as a source
-    // for resampling.  mRsmpInFramesP2 is currently roundup(mFrameCount * 7).
-    // For current HAL frame counts, this is usually 2048 = 40 ms.  It would
-    // be better to have it derived from the pipe depth in the long term.
-    // The current value is higher than necessary.  However it should not add to latency.
-
-    // Over-allocate beyond mRsmpInFramesP2 to permit a HAL read past end of buffer
-    mRsmpInFramesOA = mRsmpInFramesP2 + mFrameCount - 1;
-    (void)posix_memalign(&mRsmpInBuffer, 32, mRsmpInFramesOA * mFrameSize);
-    // if posix_memalign fails, will segv here.
-    memset(mRsmpInBuffer, 0, mRsmpInFramesOA * mFrameSize);
+    // mRsmpInFrames must be 0 before calling resizeInputBuffer_l for the first time
+    mRsmpInFrames = 0;
+    resizeInputBuffer_l();
 
     // AudioRecord mSampleRate and mChannelCount are constant due to AudioRecord API constraints.
     // But if thread's mSampleRate or mChannelCount changes, how will that affect active tracks?
@@ -8724,6 +8821,124 @@
     }
 }
 
+int32_t AudioFlinger::RecordThread::getOldestFront_l()
+{
+    if (mTracks.size() == 0) {
+        return 0;
+    }
+    //TODO: b/185972521 fix in case of wrap around on one track:
+    //  want the max(rear - front) for all tracks.
+    int32_t front = INT_MAX;
+    for (size_t i = 0; i < mTracks.size(); i++) {
+        front = std::min(front, mTracks[i]->mResamplerBufferProvider->getFront());
+    }
+    // discard any audio past the buffer size
+    if (audio_utils::safe_add_overflow(front, (int32_t)mRsmpInFrames) < mRsmpInRear) {
+        front = audio_utils::safe_sub_overflow(mRsmpInRear, (int32_t)mRsmpInFrames);
+    }
+    return front;
+}
+
+void AudioFlinger::RecordThread::updateFronts_l(int32_t offset)
+{
+    if (offset == 0) {
+        return;
+    }
+    for (size_t i = 0; i < mTracks.size(); i++) {
+        int32_t front = mTracks[i]->mResamplerBufferProvider->getFront();
+        front = audio_utils::safe_sub_overflow(front, offset);
+        mTracks[i]->mResamplerBufferProvider->setFront(front);
+    }
+}
+
+void AudioFlinger::RecordThread::resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs)
+{
+    // This is the formula for calculating the temporary buffer size.
+    // With 7 HAL buffers, we can guarantee ability to down-sample the input by ratio of 6:1 to
+    // 1 full output buffer, regardless of the alignment of the available input.
+    // The value is somewhat arbitrary, and could probably be even larger.
+    // A larger value should allow more old data to be read after a track calls start(),
+    // without increasing latency.
+    //
+    // Note this is independent of the maximum downsampling ratio permitted for capture.
+    size_t minRsmpInFrames = mFrameCount * 7;
+
+    // maxSharedAudioHistoryMs != 0 indicates a request to possibly make some part of the audio
+    // capture history available to another client using the same session ID:
+    // dimension the resampler input buffer accordingly.
+
+    // Get oldest client read position:  getOldestFront_l() must be called before altering
+    // mRsmpInRear, or mRsmpInFrames
+    int32_t previousFront = getOldestFront_l();
+    size_t previousRsmpInFramesP2 = mRsmpInFramesP2;
+    int32_t previousRear = mRsmpInRear;
+    mRsmpInRear = 0;
+
+    if (maxSharedAudioHistoryMs != 0) {
+        // resizeInputBuffer_l should never be called with a non zero shared history if the
+        // buffer was not already allocated
+        ALOG_ASSERT(mRsmpInBuffer != nullptr && mRsmpInFrames != 0,
+                "resizeInputBuffer_l() called with shared history and unallocated buffer");
+        size_t rsmpInFrames = (size_t)maxSharedAudioHistoryMs * mSampleRate / 1000;
+        // never reduce resampler input buffer size
+        if (rsmpInFrames < mRsmpInFrames) {
+            return;
+        }
+        mRsmpInFrames = rsmpInFrames;
+    }
+    // Note: mRsmpInFrames is 0 when called with maxSharedAudioHistoryMs equals to 0 so it is always
+    // initialized
+    if (mRsmpInFrames < minRsmpInFrames) {
+        mRsmpInFrames = minRsmpInFrames;
+    }
+    mRsmpInFramesP2 = roundup(mRsmpInFrames);
+
+    // TODO optimize audio capture buffer sizes ...
+    // Here we calculate the size of the sliding buffer used as a source
+    // for resampling.  mRsmpInFramesP2 is currently roundup(mFrameCount * 7).
+    // For current HAL frame counts, this is usually 2048 = 40 ms.  It would
+    // be better to have it derived from the pipe depth in the long term.
+    // The current value is higher than necessary.  However it should not add to latency.
+
+    // Over-allocate beyond mRsmpInFramesP2 to permit a HAL read past end of buffer
+    mRsmpInFramesOA = mRsmpInFramesP2 + mFrameCount - 1;
+
+    void *rsmpInBuffer;
+    (void)posix_memalign(&rsmpInBuffer, 32, mRsmpInFramesOA * mFrameSize);
+    // if posix_memalign fails, will segv here.
+    memset(rsmpInBuffer, 0, mRsmpInFramesOA * mFrameSize);
+
+    // Copy audio history if any from old buffer before freeing it
+    if (previousRear != 0) {
+        ALOG_ASSERT(mRsmpInBuffer != nullptr,
+                "resizeInputBuffer_l() called with null buffer but frames already read from HAL");
+
+        ssize_t unread = audio_utils::safe_sub_overflow(previousRear, previousFront);
+        previousFront &= previousRsmpInFramesP2 - 1;
+        size_t part1 = previousRsmpInFramesP2 - previousFront;
+        if (part1 > (size_t) unread) {
+            part1 = unread;
+        }
+        if (part1 != 0) {
+            memcpy(rsmpInBuffer, (const uint8_t*)mRsmpInBuffer + previousFront * mFrameSize,
+                   part1 * mFrameSize);
+            mRsmpInRear = part1;
+            part1 = unread - part1;
+            if (part1 != 0) {
+                memcpy((uint8_t*)rsmpInBuffer + mRsmpInRear * mFrameSize,
+                       (const uint8_t*)mRsmpInBuffer, part1 * mFrameSize);
+                mRsmpInRear += part1;
+            }
+        }
+        // Update front for all clients according to new rear
+        updateFronts_l(audio_utils::safe_sub_overflow(previousRear, mRsmpInRear));
+    } else {
+        mRsmpInRear = 0;
+    }
+    free(mRsmpInBuffer);
+    mRsmpInBuffer = rsmpInBuffer;
+}
+
 void AudioFlinger::RecordThread::addPatchTrack(const sp<PatchRecord>& record)
 {
     Mutex::Autolock _l(mLock);
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index e63642b..03ed6fd 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -53,6 +53,7 @@
         CFG_EVENT_CREATE_AUDIO_PATCH,
         CFG_EVENT_RELEASE_AUDIO_PATCH,
         CFG_EVENT_UPDATE_OUT_DEVICE,
+        CFG_EVENT_RESIZE_BUFFER
     };
 
     class ConfigEventData: public RefBase {
@@ -242,6 +243,28 @@
         virtual ~UpdateOutDevicesConfigEvent();
     };
 
+    class ResizeBufferConfigEventData : public ConfigEventData {
+    public:
+        explicit ResizeBufferConfigEventData(int32_t maxSharedAudioHistoryMs) :
+            mMaxSharedAudioHistoryMs(maxSharedAudioHistoryMs) {}
+
+        virtual void dump(char *buffer, size_t size) {
+            snprintf(buffer, size, "mMaxSharedAudioHistoryMs: %d", mMaxSharedAudioHistoryMs);
+        }
+
+        int32_t mMaxSharedAudioHistoryMs;
+    };
+
+    class ResizeBufferConfigEvent : public ConfigEvent {
+    public:
+        explicit ResizeBufferConfigEvent(int32_t maxSharedAudioHistoryMs) :
+            ConfigEvent(CFG_EVENT_RESIZE_BUFFER) {
+            mData = new ResizeBufferConfigEventData(maxSharedAudioHistoryMs);
+        }
+
+        virtual ~ResizeBufferConfigEvent() {}
+    };
+
     class PMDeathRecipient : public IBinder::DeathRecipient {
     public:
         explicit    PMDeathRecipient(const wp<ThreadBase>& thread) : mThread(thread) {}
@@ -306,6 +329,7 @@
                 status_t    sendReleaseAudioPatchConfigEvent(audio_patch_handle_t handle);
                 status_t    sendUpdateOutDeviceConfigEvent(
                                     const DeviceDescriptorBaseVector& outDevices);
+                void        sendResizeBufferConfigEvent_l(int32_t maxSharedAudioHistoryMs);
                 void        processConfigEvents_l();
     virtual     void        cacheParameters_l() = 0;
     virtual     status_t    createAudioPatch_l(const struct audio_patch *patch,
@@ -314,6 +338,9 @@
     virtual     void        updateOutDevices(const DeviceDescriptorBaseVector& outDevices);
     virtual     void        toAudioPortConfig(struct audio_port_config *config) = 0;
 
+    virtual     void        resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs = 0);
+
+
 
                 // see note at declaration of mStandby, mOutDevice and mInDevice
                 bool        standby() const { return mStandby; }
@@ -1613,6 +1640,9 @@
         // AudioBufferProvider interface
         virtual status_t    getNextBuffer(AudioBufferProvider::Buffer* buffer);
         virtual void        releaseBuffer(AudioBufferProvider::Buffer* buffer);
+
+                int32_t     getFront() const { return mRsmpInFront; }
+                void        setFront(int32_t front) { mRsmpInFront = front; }
     private:
         RecordTrack * const mRecordTrack;
         size_t              mRsmpInUnrel;   // unreleased frames remaining from
@@ -1662,7 +1692,8 @@
                     audio_input_flags_t *flags,
                     pid_t tid,
                     status_t *status /*non-NULL*/,
-                    audio_port_handle_t portId);
+                    audio_port_handle_t portId,
+                    int32_t maxSharedAudioHistoryMs);
 
             status_t    start(RecordTrack* recordTrack,
                               AudioSystem::sync_event_t event,
@@ -1686,6 +1717,7 @@
                                            audio_patch_handle_t *handle);
     virtual status_t    releaseAudioPatch_l(const audio_patch_handle_t handle);
             void        updateOutDevices(const DeviceDescriptorBaseVector& outDevices) override;
+            void        resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs = 0) override;
 
             void        addPatchTrack(const sp<PatchRecord>& record);
             void        deletePatchTrack(const sp<PatchRecord>& record);
@@ -1741,6 +1773,13 @@
                                     && inDeviceType() == mTimestampCorrectedDevice;
                         }
 
+            status_t    shareAudioHistory(const std::string& sharedAudioPackageName,
+                                          audio_session_t sharedSessionId = AUDIO_SESSION_NONE,
+                                          int64_t sharedAudioStartMs = -1);
+            status_t    shareAudioHistory_l(const std::string& sharedAudioPackageName,
+                                          audio_session_t sharedSessionId = AUDIO_SESSION_NONE,
+                                          int64_t sharedAudioStartMs = -1);
+
 protected:
             void        dumpInternals_l(int fd, const Vector<String16>& args) override;
             void        dumpTracks_l(int fd, const Vector<String16>& args) override;
@@ -1754,6 +1793,9 @@
 
             void    checkBtNrec_l();
 
+            int32_t getOldestFront_l();
+            void    updateFronts_l(int32_t offset);
+
             AudioStreamIn                       *mInput;
             Source                              *mSource;
             SortedVector < sp<RecordTrack> >    mTracks;
@@ -1819,6 +1861,10 @@
             int64_t                             mFramesRead = 0;    // continuous running counter.
 
             DeviceDescriptorBaseVector          mOutDevices;
+
+            std::string                         mSharedAudioPackageName = {};
+            long                                mSharedAudioStartMs = 0;
+            audio_session_t                     mSharedAudioSessionId = AUDIO_SESSION_NONE;
 };
 
 class MmapThread : public ThreadBase
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index db7528d..6549236 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -525,31 +525,8 @@
         return nullptr;
     }
 
-    // TODO b/182392769: use identity util
-    std::optional<std::string> opPackageNameStr = identity.packageName;
-    if (!identity.packageName.has_value()) {
-        // If no package name is provided by the client, use the first associated with the uid
-        if (!packages.isEmpty()) {
-            opPackageNameStr =
-                VALUE_OR_FATAL(legacy2aidl_String16_string(packages[0]));
-        }
-    } else {
-        // If the provided package name is invalid, we force app ops denial by clearing the package
-        // name passed to OpPlayAudioMonitor
-        String16 opPackageLegacy = VALUE_OR_FATAL(
-            aidl2legacy_string_view_String16(opPackageNameStr.value_or("")));
-        if (std::find_if(packages.begin(), packages.end(),
-                [&opPackageLegacy](const auto& package) {
-                return opPackageLegacy == package; }) == packages.end()) {
-            ALOGW("The package name(%s) provided does not correspond to the uid %d, "
-                  "force muting the track", opPackageNameStr.value().c_str(), uid);
-            // Set null package name so hasOpPlayAudio will always return false.
-            opPackageNameStr = std::optional<std::string>();
-        }
-    }
-    Identity adjIdentity = identity;
-    adjIdentity.packageName = opPackageNameStr;
-    return new OpPlayAudioMonitor(adjIdentity, attr.usage, id);
+    Identity checkedIdentity = AudioFlinger::checkIdentityPackage(identity);
+    return new OpPlayAudioMonitor(checkedIdentity, attr.usage, id);
 }
 
 AudioFlinger::PlaybackThread::OpPlayAudioMonitor::OpPlayAudioMonitor(
@@ -1219,6 +1196,9 @@
             mState = PAUSING;
             ALOGV("%s(%d): ACTIVE/RESUMING => PAUSING on thread %d",
                     __func__, mId, (int)mThreadIoHandle);
+            if (isOffloadedOrDirect()) {
+                mPauseHwPending = true;
+            }
             playbackThread->broadcast_l();
             break;
 
@@ -1306,6 +1286,11 @@
     mFlushHwPending = false;
 }
 
+void AudioFlinger::PlaybackThread::Track::pauseAck()
+{
+    mPauseHwPending = false;
+}
+
 void AudioFlinger::PlaybackThread::Track::reset()
 {
     // Do not reset twice to avoid discarding data written just after a flush and before
@@ -2235,24 +2220,12 @@
         return nullptr;
     }
 
-    if (!identity.packageName.has_value() || identity.packageName.value().size() == 0) {
-        Vector<String16> packages;
-        // no package name, happens with SL ES clients
-        // query package manager to find one
-        PermissionController permissionController;
-        permissionController.getPackagesForUid(identity.uid, packages);
-        if (packages.isEmpty()) {
-            return nullptr;
-        } else {
-            Identity adjIdentity = identity;
-            adjIdentity.packageName =
-                VALUE_OR_FATAL(legacy2aidl_String16_string(packages[0]));
-            ALOGV("using identity:%s", adjIdentity.toString().c_str());
-            return new OpRecordAudioMonitor(adjIdentity);
-        }
+    Identity checkedIdentity = AudioFlinger::checkIdentityPackage(identity);
+    if (!checkedIdentity.packageName.has_value()
+            || checkedIdentity.packageName.value().size() == 0) {
+        return nullptr;
     }
-
-    return new OpRecordAudioMonitor(identity);
+    return new OpRecordAudioMonitor(checkedIdentity);
 }
 
 AudioFlinger::RecordThread::OpRecordAudioMonitor::OpRecordAudioMonitor(
@@ -2378,6 +2351,12 @@
     return binderStatusFromStatusT(mRecordTrack->setPreferredMicrophoneFieldDimension(zoom));
 }
 
+binder::Status AudioFlinger::RecordHandle::shareAudioHistory(
+        const std::string& sharedAudioPackageName, int64_t sharedAudioStartMs) {
+    return binderStatusFromStatusT(
+            mRecordTrack->shareAudioHistory(sharedAudioPackageName, sharedAudioStartMs));
+}
+
 // ----------------------------------------------------------------------------
 #undef LOG_TAG
 #define LOG_TAG "AF::RecordTrack"
@@ -2398,7 +2377,8 @@
             const Identity& identity,
             audio_input_flags_t flags,
             track_type type,
-            audio_port_handle_t portId)
+            audio_port_handle_t portId,
+            int64_t startTimeMs)
     :   TrackBase(thread, client, attr, sampleRate, format,
                   channelMask, frameCount, buffer, bufferSize, sessionId,
                   creatorPid,
@@ -2415,7 +2395,8 @@
         mRecordBufferConverter(NULL),
         mFlags(flags),
         mSilenced(false),
-        mOpRecordAudioMonitor(OpRecordAudioMonitor::createIfNeeded(identity, attr))
+        mOpRecordAudioMonitor(OpRecordAudioMonitor::createIfNeeded(identity, attr)),
+        mStartTimeMs(startTimeMs)
 {
     if (mCblk == NULL) {
         return;
@@ -2525,6 +2506,9 @@
             Mutex::Autolock _l(thread->mLock);
             RecordThread *recordThread = (RecordThread *) thread.get();
             priorState = mState;
+            if (!mSharedAudioPackageName.empty()) {
+                recordThread->shareAudioHistory_l("");
+            }
             recordThread->destroyTrack_l(this); // move mState to STOPPED, terminate
         }
         // APM portid/client management done outside of lock.
@@ -2711,6 +2695,37 @@
     }
 }
 
+status_t AudioFlinger::RecordThread::RecordTrack::shareAudioHistory(
+        const std::string& sharedAudioPackageName, int64_t sharedAudioStartMs) {
+
+    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+    const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+    if (callingUid != mUid || callingPid != mCreatorPid) {
+        return PERMISSION_DENIED;
+    }
+
+    Identity identity{};
+    identity.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
+    identity.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingPid));
+    if (!captureHotwordAllowed(identity)) {
+        return PERMISSION_DENIED;
+    }
+
+    sp<ThreadBase> thread = mThread.promote();
+    if (thread != 0) {
+        RecordThread *recordThread = (RecordThread *)thread.get();
+        status_t status = recordThread->shareAudioHistory(
+                sharedAudioPackageName, mSessionId, sharedAudioStartMs);
+        if (status == NO_ERROR) {
+            mSharedAudioPackageName = sharedAudioPackageName;
+        }
+        return status;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+
 // ----------------------------------------------------------------------------
 #undef LOG_TAG
 #define LOG_TAG "AF::PatchRecord"
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 0537365..552919d 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -226,6 +226,8 @@
             return AUDIO_DEVICE_OUT_SPEAKER_SAFE;
         } else if (deviceTypes.count(AUDIO_DEVICE_OUT_HDMI_ARC) != 0) {
             return AUDIO_DEVICE_OUT_HDMI_ARC;
+        } else if (deviceTypes.count(AUDIO_DEVICE_OUT_HDMI_EARC) != 0) {
+            return AUDIO_DEVICE_OUT_HDMI_EARC;
         } else if (deviceTypes.count(AUDIO_DEVICE_OUT_AUX_LINE) != 0) {
             return AUDIO_DEVICE_OUT_AUX_LINE;
         } else if (deviceTypes.count(AUDIO_DEVICE_OUT_SPDIF) != 0) {
@@ -240,4 +242,4 @@
             return a2dpDevices.empty() ? AUDIO_DEVICE_NONE : a2dpDevices[0];
         }
     }
-}
\ No newline at end of file
+}
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 562c213..84ed656 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -253,6 +253,18 @@
     // Children: ModulesTraits, VolumeTraits, SurroundSoundTraits (optional)
 };
 
+// Deleter using free() for use with std::unique_ptr<>. See also UniqueCPtr<> below.
+struct FreeDelete {
+    // NOTE: Deleting a const object is valid but free() takes a non-const pointer.
+    void operator()(const void* ptr) const {
+        free(const_cast<void*>(ptr));
+    }
+};
+
+// Alias for std::unique_ptr<> that uses the C function free() to delete objects.
+template <typename T>
+using UniqueCPtr = std::unique_ptr<T, FreeDelete>;
+
 template <class T>
 constexpr void (*xmlDeleter)(T* t);
 template <>
@@ -608,7 +620,7 @@
     }
     // Tokenize and Convert Sources name to port pointer
     PolicyAudioPortVector sources;
-    std::unique_ptr<char[]> sourcesLiteral{strndup(
+    UniqueCPtr<char> sourcesLiteral{strndup(
                 sourcesAttr.c_str(), strlen(sourcesAttr.c_str()))};
     char *devTag = strtok(sourcesLiteral.get(), ",");
     while (devTag != NULL) {
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 27f89e3..edcdf5a 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -196,7 +196,7 @@
             if (desc->isActive() && !audio_is_linear_pcm(desc->getFormat())) {
                 availableOutputDevices.remove(desc->devices().getDevicesFromTypes({
                         AUDIO_DEVICE_OUT_HDMI, AUDIO_DEVICE_OUT_SPDIF,
-                        AUDIO_DEVICE_OUT_HDMI_ARC}));
+                        AUDIO_DEVICE_OUT_HDMI_ARC, AUDIO_DEVICE_OUT_HDMI_EARC}));
             }
         }
         } break;
@@ -366,7 +366,9 @@
         if (strategy == STRATEGY_MEDIA) {
             // ARC, SPDIF and AUX_LINE can co-exist with others.
             devices3 = availableOutputDevices.getDevicesFromTypes({
-                    AUDIO_DEVICE_OUT_HDMI_ARC, AUDIO_DEVICE_OUT_SPDIF, AUDIO_DEVICE_OUT_AUX_LINE});
+                    AUDIO_DEVICE_OUT_HDMI_ARC, AUDIO_DEVICE_OUT_HDMI_EARC,
+                    AUDIO_DEVICE_OUT_SPDIF, AUDIO_DEVICE_OUT_AUX_LINE,
+                    });
         }
 
         devices2.add(devices3);
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index dd44c54..485188a 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -2229,7 +2229,8 @@
             // Prevent from storing invalid requested device id in clients
             requestedDeviceId = AUDIO_PORT_HANDLE_NONE;
             device = mEngine->getInputDeviceForAttributes(attributes, uid, &policyMix);
-            ALOGV("%s found device type is 0x%X", __FUNCTION__, device->type());
+            ALOGV_IF(device != nullptr, "%s found device type is 0x%X",
+                __FUNCTION__, device->type());
         }
         if (device == nullptr) {
             ALOGW("getInputForAttr() could not find device for source %d", attributes.source);
@@ -2347,6 +2348,21 @@
         return input;
     }
 
+    // Reuse an already opened input if a client with the same session ID already exists
+    // on that input
+    for (size_t i = 0; i < mInputs.size(); i++) {
+        sp <AudioInputDescriptor> desc = mInputs.valueAt(i);
+        if (desc->mProfile != profile) {
+            continue;
+        }
+        RecordClientVector clients = desc->clientsList();
+        for (const auto &client : clients) {
+            if (session == client->session()) {
+                return desc->mIoHandle;
+            }
+        }
+    }
+
     if (!profile->canOpenNewIo()) {
         for (size_t i = 0; i < mInputs.size(); ) {
             sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 639fa58..551013f 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -197,6 +197,7 @@
     mAudioPolicyManager->setPhoneState(state);
     mPhoneState = state;
     mPhoneStateOwnerUid = uid;
+    updateUidStates_l();
     return Status::ok();
 }
 
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 4ffa9cc..b5eb98f 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -537,35 +537,34 @@
 {
 //    Go over all active clients and allow capture (does not force silence) in the
 //    following cases:
-//    The client source is virtual (remote submix, call audio TX or RX...)
-//    OR The user the client is running in has microphone sensor privacy disabled
-//        AND The client is the assistant
-//                AND an accessibility service is on TOP or a RTT call is active
-//                        AND the source is VOICE_RECOGNITION or HOTWORD
-//                    OR uses VOICE_RECOGNITION AND is on TOP
-//                        OR uses HOTWORD
-//                    AND there is no active privacy sensitive capture or call
-//                        OR client has CAPTURE_AUDIO_OUTPUT privileged permission
-//            OR The client is an accessibility service
-//                AND Is on TOP
-//                        AND the source is VOICE_RECOGNITION or HOTWORD
-//                    OR The assistant is not on TOP
-//                        AND there is no active privacy sensitive capture or call
-//                            OR client has CAPTURE_AUDIO_OUTPUT privileged permission
-//                AND is on TOP
+//    The client is the assistant
+//        AND an accessibility service is on TOP or a RTT call is active
 //                AND the source is VOICE_RECOGNITION or HOTWORD
-//            OR the client source is HOTWORD
-//                AND is on TOP
-//                    OR all active clients are using HOTWORD source
-//                AND no call is active
-//                    OR client has CAPTURE_AUDIO_OUTPUT privileged permission
-//            OR the client is the current InputMethodService
-//                AND a RTT call is active AND the source is VOICE_RECOGNITION
-//            OR Any client
-//                AND The assistant is not on TOP
-//                AND is on TOP or latest started
+//            OR uses VOICE_RECOGNITION AND is on TOP
+//                OR uses HOTWORD
+//            AND there is no active privacy sensitive capture or call
+//                OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+//    OR The client is an accessibility service
+//        AND Is on TOP
+//                AND the source is VOICE_RECOGNITION or HOTWORD
+//            OR The assistant is not on TOP
 //                AND there is no active privacy sensitive capture or call
 //                    OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+//        AND is on TOP
+//        AND the source is VOICE_RECOGNITION or HOTWORD
+//    OR the client source is virtual (remote submix, call audio TX or RX...)
+//    OR the client source is HOTWORD
+//        AND is on TOP
+//            OR all active clients are using HOTWORD source
+//        AND no call is active
+//            OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+//    OR the client is the current InputMethodService
+//        AND a RTT call is active AND the source is VOICE_RECOGNITION
+//    OR Any client
+//        AND The assistant is not on TOP
+//        AND is on TOP or latest started
+//        AND there is no active privacy sensitive capture or call
+//            OR client has CAPTURE_AUDIO_OUTPUT privileged permission
 
 
     sp<AudioRecordClient> topActive;
@@ -596,8 +595,7 @@
     for (size_t i =0; i < mAudioRecordClients.size(); i++) {
         sp<AudioRecordClient> current = mAudioRecordClients[i];
         uid_t currentUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(current->identity.uid));
-        if (!current->active || (!isVirtualSource(current->attributes.source)
-                && isUserSensorPrivacyEnabledForUid(currentUid))) {
+        if (!current->active) {
             continue;
         }
 
@@ -734,9 +732,6 @@
         if (isVirtualSource(source)) {
             // Allow capture for virtual (remote submix, call audio TX or RX...) sources
             allowCapture = true;
-        } else if (isUserSensorPrivacyEnabledForUid(currentUid)) {
-            // If sensor privacy is enabled, don't allow capture
-            allowCapture = false;
         } else if (mUidPolicy->isAssistantUid(currentUid)) {
             // For assistant allow capture if:
             //     An accessibility service is on TOP or a RTT call is active
@@ -1145,16 +1140,6 @@
     return NO_INIT;
 }
 
-bool AudioPolicyService::isUserSensorPrivacyEnabledForUid(uid_t uid) {
-    userid_t userId = multiuser_get_user_id(uid);
-    if (mMicrophoneSensorPrivacyPolicies.find(userId) == mMicrophoneSensorPrivacyPolicies.end()) {
-        sp<SensorPrivacyPolicy> userPolicy = new SensorPrivacyPolicy(this);
-        userPolicy->registerSelfForMicrophoneOnly(userId);
-        mMicrophoneSensorPrivacyPolicies[userId] = userPolicy;
-    }
-    return mMicrophoneSensorPrivacyPolicies[userId]->isSensorPrivacyEnabled();
-}
-
 status_t AudioPolicyService::printHelp(int out) {
     return dprintf(out, "Audio policy service commands:\n"
         "  get-uid-state <PACKAGE> [--user USER_ID] gets the uid state\n"
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 145ba06..00d9670 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -346,8 +346,6 @@
     status_t validateUsage(audio_usage_t usage);
     status_t validateUsage(audio_usage_t usage, const media::permission::Identity& identity);
 
-    bool isUserSensorPrivacyEnabledForUid(uid_t uid);
-
     void updateUidStates();
     void updateUidStates_l() REQUIRES(mLock);
 
@@ -908,8 +906,6 @@
     void *mLibraryHandle = nullptr;
     CreateAudioPolicyManagerInstance mCreateAudioPolicyManager;
     DestroyAudioPolicyManagerInstance mDestroyAudioPolicyManager;
-
-    std::map<userid_t, sp<SensorPrivacyPolicy>> mMicrophoneSensorPrivacyPolicies;
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 32c0267..07c889b 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -114,6 +114,7 @@
         "libutils",
         "libbinder",
         "libactivitymanager_aidl",
+        "libpermission",
         "libcutils",
         "libmedia",
         "libmediautils",
@@ -163,6 +164,7 @@
     export_shared_lib_headers: [
         "libbinder",
         "libactivitymanager_aidl",
+        "libpermission",
         "libcamera_client",
         "libfmq",
         "libsensorprivacy",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index eb24a93..b0f386d 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -29,6 +29,7 @@
 #include <inttypes.h>
 #include <pthread.h>
 
+#include <android/content/pm/IPackageManagerNative.h>
 #include <android/hardware/ICamera.h>
 #include <android/hardware/ICameraClient.h>
 
@@ -145,6 +146,7 @@
 
 void CameraService::onFirstRef()
 {
+
     ALOGI("CameraService process starting");
 
     BnCameraService::onFirstRef();
@@ -242,10 +244,6 @@
     VendorTagDescriptor::clearGlobalVendorTagDescriptor();
     mUidPolicy->unregisterSelf();
     mSensorPrivacyPolicy->unregisterSelf();
-
-    for (auto const& [_, policy] : mCameraSensorPrivacyPolicies) {
-        policy->unregisterSelf();
-    }
 }
 
 void CameraService::onNewProviderRegistered() {
@@ -755,6 +753,10 @@
     return Status::ok();
 }
 
+void CameraService::clearCachedVariables() {
+    BasicClient::BasicClient::sCameraService = nullptr;
+}
+
 int CameraService::getDeviceVersion(const String8& cameraId, int* facing, int* orientation) {
     ATRACE_CALL();
 
@@ -800,8 +802,8 @@
 Status CameraService::makeClient(const sp<CameraService>& cameraService,
         const sp<IInterface>& cameraCb, const String16& packageName,
         const std::optional<String16>& featureId,  const String8& cameraId,
-        int api1CameraId, int facing, int clientPid, uid_t clientUid, int servicePid,
-        int deviceVersion, apiLevel effectiveApiLevel,
+        int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
+        int servicePid, int deviceVersion, apiLevel effectiveApiLevel,
         /*out*/sp<BasicClient>* client) {
 
     // Create CameraClient based on device version reported by the HAL.
@@ -824,13 +826,13 @@
                 sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
                 *client = new Camera2Client(cameraService, tmp, packageName, featureId,
                         cameraId, api1CameraId,
-                        facing, clientPid, clientUid,
+                        facing, sensorOrientation, clientPid, clientUid,
                         servicePid);
             } else { // Camera2 API route
                 sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
                         static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
                 *client = new CameraDeviceClient(cameraService, tmp, packageName, featureId,
-                        cameraId, facing, clientPid, clientUid, servicePid);
+                        cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid);
             }
             break;
         default:
@@ -1645,7 +1647,7 @@
 
         sp<BasicClient> tmp = nullptr;
         if(!(ret = makeClient(this, cameraCb, clientPackageName, clientFeatureId,
-                cameraId, api1CameraId, facing,
+                cameraId, api1CameraId, facing, orientation,
                 clientPid, clientUid, getpid(),
                 deviceVersion, effectiveApiLevel,
                 /*out*/&tmp)).isOk()) {
@@ -1711,8 +1713,9 @@
 
         // Set camera muting behavior
         if (client->supportsCameraMute()) {
-            client->setCameraMute(mOverrideCameraMuteMode ||
-                    isUserSensorPrivacyEnabledForUid(clientUid));
+            bool isCameraPrivacyEnabled =
+                    mSensorPrivacyPolicy->isCameraPrivacyEnabled(multiuser_get_user_id(clientUid));
+            client->setCameraMute(mOverrideCameraMuteMode || isCameraPrivacyEnabled);
         }
 
         if (shimUpdateOnly) {
@@ -2030,7 +2033,50 @@
     return Status::ok();
 }
 
- Status CameraService::getConcurrentCameraIds(
+Status CameraService::notifyDisplayConfigurationChange() {
+    ATRACE_CALL();
+    const int callingPid = CameraThreadState::getCallingPid();
+    const int selfPid = getpid();
+
+    // Permission checks
+    if (callingPid != selfPid) {
+        // Ensure we're being called by system_server, or similar process with
+        // permissions to notify the camera service about system events
+        if (!checkCallingPermission(sCameraSendSystemEventsPermission)) {
+            const int uid = CameraThreadState::getCallingUid();
+            ALOGE("Permission Denial: cannot send updates to camera service about orientation"
+                    " changes from pid=%d, uid=%d", callingPid, uid);
+            return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
+                    "No permission to send updates to camera service about orientation"
+                    " changes from pid=%d, uid=%d", callingPid, uid);
+        }
+    }
+
+    Mutex::Autolock lock(mServiceLock);
+
+    // Don't do anything if rotate-and-crop override via cmd is active
+    if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) return Status::ok();
+
+    const auto clients = mActiveClientManager.getAll();
+    for (auto& current : clients) {
+        if (current != nullptr) {
+            const auto basicClient = current->getValue();
+            if (basicClient.get() != nullptr) {
+                if (CameraServiceProxyWrapper::isRotateAndCropOverrideNeeded(
+                            basicClient->getPackageName(), basicClient->getCameraOrientation(),
+                            basicClient->getCameraFacing())) {
+                    basicClient->setRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_90);
+                } else {
+                    basicClient->setRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE);
+                }
+            }
+        }
+    }
+
+    return Status::ok();
+}
+
+Status CameraService::getConcurrentCameraIds(
         std::vector<ConcurrentCameraIdCombination>* concurrentCameraIds) {
     ATRACE_CALL();
     if (!concurrentCameraIds) {
@@ -2113,10 +2159,15 @@
     return addListenerHelper(listener, cameraStatuses);
 }
 
+binder::Status CameraService::addListenerTest(const sp<hardware::ICameraServiceListener>& listener,
+            std::vector<hardware::CameraStatus>* cameraStatuses) {
+    return addListenerHelper(listener, cameraStatuses, false, true);
+}
+
 Status CameraService::addListenerHelper(const sp<ICameraServiceListener>& listener,
         /*out*/
         std::vector<hardware::CameraStatus> *cameraStatuses,
-        bool isVendorListener) {
+        bool isVendorListener, bool isProcessLocalTest) {
 
     ATRACE_CALL();
 
@@ -2147,7 +2198,7 @@
         sp<ServiceListener> serviceListener =
                 new ServiceListener(this, listener, clientUid, clientPid, isVendorListener,
                         openCloseCallbackAllowed);
-        auto ret = serviceListener->initialize();
+        auto ret = serviceListener->initialize(isProcessLocalTest);
         if (ret != NO_ERROR) {
             String8 msg = String8::format("Failed to initialize service listener: %s (%d)",
                     strerror(-ret), ret);
@@ -2690,13 +2741,13 @@
         const String16& clientPackageName,
         const std::optional<String16>& clientFeatureId,
         const String8& cameraIdStr,
-        int api1CameraId, int cameraFacing,
+        int api1CameraId, int cameraFacing, int sensorOrientation,
         int clientPid, uid_t clientUid,
         int servicePid) :
         CameraService::BasicClient(cameraService,
                 IInterface::asBinder(cameraClient),
                 clientPackageName, clientFeatureId,
-                cameraIdStr, cameraFacing,
+                cameraIdStr, cameraFacing, sensorOrientation,
                 clientPid, clientUid,
                 servicePid),
         mCameraId(api1CameraId)
@@ -2726,22 +2777,23 @@
 CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService,
         const sp<IBinder>& remoteCallback,
         const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
-        const String8& cameraIdStr, int cameraFacing,
+        const String8& cameraIdStr, int cameraFacing, int sensorOrientation,
         int clientPid, uid_t clientUid,
         int servicePid):
-        mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing),
+        mDestructionStarted(false),
+        mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
         mClientPackageName(clientPackageName), mClientFeatureId(clientFeatureId),
         mClientPid(clientPid), mClientUid(clientUid),
         mServicePid(servicePid),
         mDisconnected(false), mUidIsTrusted(false),
         mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE),
-        mRemoteBinder(remoteCallback)
+        mRemoteBinder(remoteCallback),
+        mOpsActive(false),
+        mOpsStreaming(false)
 {
     if (sCameraService == nullptr) {
         sCameraService = cameraService;
     }
-    mOpsActive = false;
-    mDestructionStarted = false;
 
     // In some cases the calling code has no access to the package it runs under.
     // For example, NDK camera API.
@@ -2826,6 +2878,13 @@
     return mClientPackageName;
 }
 
+int CameraService::BasicClient::getCameraFacing() const {
+    return mCameraFacing;
+}
+
+int CameraService::BasicClient::getCameraOrientation() const {
+    return mOrientation;
+}
 
 int CameraService::BasicClient::getClientPid() const {
     return mClientPid;
@@ -2869,6 +2928,29 @@
     }
 }
 
+status_t CameraService::BasicClient::handleAppOpMode(int32_t mode) {
+    if (mode == AppOpsManager::MODE_ERRORED) {
+        ALOGI("Camera %s: Access for \"%s\" has been revoked",
+                mCameraIdStr.string(), String8(mClientPackageName).string());
+        return PERMISSION_DENIED;
+    } else if (!mUidIsTrusted && mode == AppOpsManager::MODE_IGNORED) {
+        // If the calling Uid is trusted (a native service), the AppOpsManager could
+        // return MODE_IGNORED. Do not treat such case as error.
+        bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid,
+                mClientPackageName);
+        bool isCameraPrivacyEnabled =
+                sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled(
+                    multiuser_get_user_id(mClientUid));
+        if (!isUidActive || !isCameraPrivacyEnabled) {
+            ALOGI("Camera %s: Access for \"%s\" has been restricted",
+                    mCameraIdStr.string(), String8(mClientPackageName).string());
+            // Return the same error as for device policy manager rejection
+            return -EACCES;
+        }
+    }
+    return OK;
+}
+
 status_t CameraService::BasicClient::startCameraOps() {
     ATRACE_CALL();
 
@@ -2879,26 +2961,16 @@
     if (mAppOpsManager != nullptr) {
         // Notify app ops that the camera is not available
         mOpsCallback = new OpsCallback(this);
-        int32_t res;
         mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
                 mClientPackageName, mOpsCallback);
-        res = mAppOpsManager->startOpNoThrow(AppOpsManager::OP_CAMERA, mClientUid,
-                mClientPackageName, /*startIfModeDefault*/ false, mClientFeatureId,
-                String16("start camera ") + String16(mCameraIdStr));
 
-        if (res == AppOpsManager::MODE_ERRORED) {
-            ALOGI("Camera %s: Access for \"%s\" has been revoked",
-                    mCameraIdStr.string(), String8(mClientPackageName).string());
-            return PERMISSION_DENIED;
-        }
-
-        // If the calling Uid is trusted (a native service), the AppOpsManager could
-        // return MODE_IGNORED. Do not treat such case as error.
-        if (!mUidIsTrusted && res == AppOpsManager::MODE_IGNORED) {
-            ALOGI("Camera %s: Access for \"%s\" has been restricted",
-                    mCameraIdStr.string(), String8(mClientPackageName).string());
-            // Return the same error as for device policy manager rejection
-            return -EACCES;
+        // Just check for camera acccess here on open - delay startOp until
+        // camera frames start streaming in startCameraStreamingOps
+        int32_t mode = mAppOpsManager->checkOp(AppOpsManager::OP_CAMERA, mClientUid,
+                mClientPackageName);
+        status_t res = handleAppOpMode(mode);
+        if (res != OK) {
+            return res;
         }
     }
 
@@ -2915,17 +2987,69 @@
     return OK;
 }
 
+status_t CameraService::BasicClient::startCameraStreamingOps() {
+    ATRACE_CALL();
+
+    if (!mOpsActive) {
+        ALOGE("%s: Calling streaming start when not yet active", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+    if (mOpsStreaming) {
+        ALOGV("%s: Streaming already active!", __FUNCTION__);
+        return OK;
+    }
+
+    ALOGV("%s: Start camera streaming ops, package name = %s, client UID = %d",
+            __FUNCTION__, String8(mClientPackageName).string(), mClientUid);
+
+    if (mAppOpsManager != nullptr) {
+        int32_t mode = mAppOpsManager->startOpNoThrow(AppOpsManager::OP_CAMERA, mClientUid,
+                mClientPackageName, /*startIfModeDefault*/ false, mClientFeatureId,
+                String16("start camera ") + String16(mCameraIdStr));
+        status_t res = handleAppOpMode(mode);
+        if (res != OK) {
+            return res;
+        }
+    }
+
+    mOpsStreaming = true;
+
+    return OK;
+}
+
+status_t CameraService::BasicClient::finishCameraStreamingOps() {
+    ATRACE_CALL();
+
+    if (!mOpsActive) {
+        ALOGE("%s: Calling streaming start when not yet active", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+    if (!mOpsStreaming) {
+        ALOGV("%s: Streaming not active!", __FUNCTION__);
+        return OK;
+    }
+
+    if (mAppOpsManager != nullptr) {
+        mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, mClientUid,
+                mClientPackageName, mClientFeatureId);
+        mOpsStreaming = false;
+    }
+
+    return OK;
+}
+
 status_t CameraService::BasicClient::finishCameraOps() {
     ATRACE_CALL();
 
+    if (mOpsStreaming) {
+        // Make sure we've notified everyone about camera stopping
+        finishCameraStreamingOps();
+    }
+
     // Check if startCameraOps succeeded, and if so, finish the camera op
     if (mOpsActive) {
-        // Notify app ops that the camera is available again
-        if (mAppOpsManager != nullptr) {
-            mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, mClientUid,
-                    mClientPackageName, mClientFeatureId);
-            mOpsActive = false;
-        }
+        mOpsActive = false;
+
         // This function is called when a client disconnects. This should
         // release the camera, but actually only if it was in a proper
         // functional state, i.e. with status NOT_AVAILABLE
@@ -2976,15 +3100,22 @@
         block();
     } else if (res == AppOpsManager::MODE_IGNORED) {
         bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid, mClientPackageName);
+        bool isCameraPrivacyEnabled =
+                sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled(
+                        multiuser_get_user_id(mClientUid));
         ALOGI("Camera %s: Access for \"%s\" has been restricted, isUidTrusted %d, isUidActive %d",
                 mCameraIdStr.string(), String8(mClientPackageName).string(),
                 mUidIsTrusted, isUidActive);
         // If the calling Uid is trusted (a native service), or the client Uid is active (WAR for
         // b/175320666), the AppOpsManager could return MODE_IGNORED. Do not treat such cases as
         // error.
-        if (!mUidIsTrusted && !isUidActive) {
+        if (!mUidIsTrusted && isUidActive && isCameraPrivacyEnabled) {
+            setCameraMute(true);
+        } else if (!mUidIsTrusted && !isUidActive) {
             block();
         }
+    } else if (res == AppOpsManager::MODE_ALLOWED) {
+        setCameraMute(sCameraService->mOverrideCameraMuteMode);
     }
 }
 
@@ -3257,6 +3388,7 @@
     if (mRegistered) {
         return;
     }
+    hasCameraPrivacyFeature(); // Called so the result is cached
     mSpm.addSensorPrivacyListener(this);
     mSensorPrivacyEnabled = mSpm.isSensorPrivacyEnabled();
     status_t res = mSpm.linkToDeath(this);
@@ -3266,39 +3398,6 @@
     }
 }
 
-status_t CameraService::SensorPrivacyPolicy::registerSelfForIndividual(int userId) {
-    Mutex::Autolock _l(mSensorPrivacyLock);
-    if (mRegistered) {
-        return OK;
-    }
-
-    status_t res = mSpm.addIndividualSensorPrivacyListener(userId,
-            SensorPrivacyManager::INDIVIDUAL_SENSOR_CAMERA, this);
-    if (res != OK) {
-        ALOGE("Unable to register camera privacy listener: %s (%d)", strerror(-res), res);
-        return res;
-    }
-
-    res = mSpm.isIndividualSensorPrivacyEnabled(userId,
-        SensorPrivacyManager::INDIVIDUAL_SENSOR_CAMERA, mSensorPrivacyEnabled);
-    if (res != OK) {
-        ALOGE("Unable to check camera privacy: %s (%d)", strerror(-res), res);
-        return res;
-    }
-
-    res = mSpm.linkToDeath(this);
-    if (res != OK) {
-        ALOGE("Register link to death failed for sensor privacy: %s (%d)", strerror(-res), res);
-        return res;
-    }
-
-    mRegistered = true;
-    mIsIndividual = true;
-    mUserId = userId;
-    ALOGV("SensorPrivacyPolicy: Registered with SensorPrivacyManager");
-    return OK;
-}
-
 void CameraService::SensorPrivacyPolicy::unregisterSelf() {
     Mutex::Autolock _l(mSensorPrivacyLock);
     mSpm.removeSensorPrivacyListener(this);
@@ -3312,20 +3411,24 @@
     return mSensorPrivacyEnabled;
 }
 
+bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled(userid_t userId) {
+    if (!hasCameraPrivacyFeature()) {
+        return false;
+    }
+    return mSpm.isIndividualSensorPrivacyEnabled(userId,
+        SensorPrivacyManager::INDIVIDUAL_SENSOR_CAMERA);
+}
+
 binder::Status CameraService::SensorPrivacyPolicy::onSensorPrivacyChanged(bool enabled) {
     {
         Mutex::Autolock _l(mSensorPrivacyLock);
         mSensorPrivacyEnabled = enabled;
     }
     // if sensor privacy is enabled then block all clients from accessing the camera
-    sp<CameraService> service = mService.promote();
-    if (service != nullptr) {
-        if (mIsIndividual) {
-            service->setMuteForAllClients(mUserId, enabled);
-        } else {
-            if (enabled) {
-                service->blockAllClients();
-            }
+    if (enabled) {
+        sp<CameraService> service = mService.promote();
+        if (service != nullptr) {
+            service->blockAllClients();
         }
     }
     return binder::Status::ok();
@@ -3337,6 +3440,31 @@
     mRegistered = false;
 }
 
+bool CameraService::SensorPrivacyPolicy::hasCameraPrivacyFeature() {
+    if (!mNeedToCheckCameraPrivacyFeature) {
+        return mHasCameraPrivacyFeature;
+    }
+    bool hasCameraPrivacyFeature = false;
+    sp<IBinder> binder = defaultServiceManager()->getService(String16("package_native"));
+    if (binder != nullptr) {
+        sp<content::pm::IPackageManagerNative> packageManager =
+                interface_cast<content::pm::IPackageManagerNative>(binder);
+        if (packageManager != nullptr) {
+            binder::Status status = packageManager->hasSystemFeature(
+                    String16("android.hardware.camera.toggle"), 0, &hasCameraPrivacyFeature);
+
+            if (status.isOk()) {
+                mNeedToCheckCameraPrivacyFeature = false;
+                mHasCameraPrivacyFeature = hasCameraPrivacyFeature;
+            } else {
+                ALOGE("Unable to check if camera privacy feature is supported");
+            }
+        }
+    }
+
+    return hasCameraPrivacyFeature;
+}
+
 // ----------------------------------------------------------------------------
 //                  CameraState
 // ----------------------------------------------------------------------------
@@ -3956,19 +4084,6 @@
     }
 }
 
-void CameraService::setMuteForAllClients(userid_t userId, bool enabled) {
-    const auto clients = mActiveClientManager.getAll();
-    for (auto& current : clients) {
-        if (current != nullptr) {
-            const auto basicClient = current->getValue();
-            if (basicClient.get() != nullptr
-                    && multiuser_get_user_id(basicClient->getClientUid()) == userId) {
-                basicClient->setCameraMute(enabled);
-            }
-        }
-    }
-}
-
 // NOTE: This is a remote API - make sure all args are validated
 status_t CameraService::shellCommand(int in, int out, int err, const Vector<String16>& args) {
     if (!checkCallingPermission(sManageCameraPermission, nullptr, nullptr)) {
@@ -4175,16 +4290,4 @@
     return mode;
 }
 
-bool CameraService::isUserSensorPrivacyEnabledForUid(uid_t uid) {
-    userid_t userId = multiuser_get_user_id(uid);
-    if (mCameraSensorPrivacyPolicies.find(userId) == mCameraSensorPrivacyPolicies.end()) {
-        sp<SensorPrivacyPolicy> userPolicy = new SensorPrivacyPolicy(this);
-        if (userPolicy->registerSelfForIndividual(userId) != OK) {
-            return false;
-        }
-        mCameraSensorPrivacyPolicies[userId] = userPolicy;
-    }
-    return mCameraSensorPrivacyPolicies[userId]->isSensorPrivacyEnabled();
-}
-
 }; // namespace android
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 98d4500..2b4f9a2 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -167,6 +167,8 @@
 
     virtual binder::Status    notifyDeviceStateChange(int64_t newState);
 
+    virtual binder::Status    notifyDisplayConfigurationChange();
+
     // OK = supports api of that version, -EOPNOTSUPP = does not support
     virtual binder::Status    supportsCameraApi(
             const String16& cameraId, int32_t apiVersion,
@@ -188,7 +190,8 @@
 
     binder::Status      addListenerHelper(const sp<hardware::ICameraServiceListener>& listener,
             /*out*/
-            std::vector<hardware::CameraStatus>* cameraStatuses, bool isVendor = false);
+            std::vector<hardware::CameraStatus>* cameraStatuses, bool isVendor = false,
+            bool isProcessLocalTest = false);
 
     // Monitored UIDs availability notification
     void                notifyMonitoredUids();
@@ -217,6 +220,19 @@
             int* orientation = nullptr);
 
     /////////////////////////////////////////////////////////////////////
+    // Methods to be used in CameraService class tests only
+    //
+    // CameraService class test method only - clear static variables in the
+    // cameraserver process, which otherwise might affect multiple test runs.
+    void                clearCachedVariables();
+
+    // Add test listener, linkToDeath won't be called since this is for process
+    // local testing.
+    binder::Status    addListenerTest(const sp<hardware::ICameraServiceListener>& listener,
+            /*out*/
+            std::vector<hardware::CameraStatus>* cameraStatuses);
+
+    /////////////////////////////////////////////////////////////////////
     // Shared utilities
     static binder::Status filterGetInfoErrorCode(status_t err);
 
@@ -224,6 +240,7 @@
     // CameraClient functionality
 
     class BasicClient : public virtual RefBase {
+    friend class CameraService;
     public:
         virtual status_t       initialize(sp<CameraProviderManager> manager,
                 const String8& monitorTags) = 0;
@@ -246,6 +263,12 @@
         // Return the package name for this client
         virtual String16 getPackageName() const;
 
+        // Return the camera facing for this client
+        virtual int getCameraFacing() const;
+
+        // Return the camera orientation for this client
+        virtual int getCameraOrientation() const;
+
         // Notify client about a fatal error
         virtual void notifyError(int32_t errorCode,
                 const CaptureResultExtras& resultExtras) = 0;
@@ -292,6 +315,7 @@
                 const std::optional<String16>& clientFeatureId,
                 const String8& cameraIdStr,
                 int cameraFacing,
+                int sensorOrientation,
                 int clientPid,
                 uid_t clientUid,
                 int servicePid);
@@ -308,6 +332,7 @@
         static sp<CameraService>        sCameraService;
         const String8                   mCameraIdStr;
         const int                       mCameraFacing;
+        const int                       mOrientation;
         String16                        mClientPackageName;
         std::optional<String16>         mClientFeatureId;
         pid_t                           mClientPid;
@@ -322,9 +347,18 @@
         // - The app-side Binder interface to receive callbacks from us
         sp<IBinder>                     mRemoteBinder;   // immutable after constructor
 
-        // permissions management
+        // Permissions management methods for camera lifecycle
+
+        // Notify rest of system/apps about camera opening, and check appops
         virtual status_t                startCameraOps();
+        // Notify rest of system/apps about camera starting to stream data, and confirm appops
+        virtual status_t                startCameraStreamingOps();
+        // Notify rest of system/apps about camera stopping streaming data
+        virtual status_t                finishCameraStreamingOps();
+        // Notify rest of system/apps about camera closing
         virtual status_t                finishCameraOps();
+        // Handle errors for start/checkOps
+        virtual status_t                handleAppOpMode(int32_t mode);
 
         std::unique_ptr<AppOpsManager>  mAppOpsManager = nullptr;
 
@@ -339,9 +373,12 @@
         }; // class OpsCallback
 
         sp<OpsCallback> mOpsCallback;
-        // Track whether startCameraOps was called successfully, to avoid
-        // finishing what we didn't start.
+        // Track whether checkOps was called successfully, to avoid
+        // finishing what we didn't start, on camera open.
         bool            mOpsActive;
+        // Track whether startOps was called successfully on start of
+        // camera streaming.
+        bool            mOpsStreaming;
 
         // IAppOpsCallback interface, indirected through opListener
         virtual void opChanged(int32_t op, const String16& packageName);
@@ -385,6 +422,7 @@
                 const String8& cameraIdStr,
                 int api1CameraId,
                 int cameraFacing,
+                int sensorOrientation,
                 int clientPid,
                 uid_t clientUid,
                 int servicePid);
@@ -639,13 +677,13 @@
         public:
             explicit SensorPrivacyPolicy(wp<CameraService> service)
                     : mService(service), mSensorPrivacyEnabled(false), mRegistered(false),
-                      mIsIndividual(false), mUserId(0) {}
+                    mHasCameraPrivacyFeature(false), mNeedToCheckCameraPrivacyFeature(true) {}
 
             void registerSelf();
-            status_t registerSelfForIndividual(int userId);
             void unregisterSelf();
 
             bool isSensorPrivacyEnabled();
+            bool isCameraPrivacyEnabled(userid_t userId);
 
             binder::Status onSensorPrivacyChanged(bool enabled);
 
@@ -658,8 +696,10 @@
             Mutex mSensorPrivacyLock;
             bool mSensorPrivacyEnabled;
             bool mRegistered;
-            bool mIsIndividual;
-            userid_t mUserId;
+            bool mHasCameraPrivacyFeature;
+            bool mNeedToCheckCameraPrivacyFeature;
+
+            bool hasCameraPrivacyFeature();
     };
 
     sp<UidPolicy> mUidPolicy;
@@ -915,7 +955,10 @@
                       mIsVendorListener(isVendorClient),
                       mOpenCloseCallbackAllowed(openCloseCallbackAllowed) { }
 
-            status_t initialize() {
+            status_t initialize(bool isProcessLocalTest) {
+                if (isProcessLocalTest) {
+                    return OK;
+                }
                 return IInterface::asBinder(mListener)->linkToDeath(this);
             }
 
@@ -1035,9 +1078,6 @@
     // Blocks all active clients.
     void blockAllClients();
 
-    // Mutes all active clients for a user.
-    void setMuteForAllClients(userid_t userId, bool enabled);
-
     // Overrides the UID state as if it is idle
     status_t handleSetUidState(const Vector<String16>& args, int err);
 
@@ -1073,7 +1113,7 @@
     static binder::Status makeClient(const sp<CameraService>& cameraService,
             const sp<IInterface>& cameraCb, const String16& packageName,
             const std::optional<String16>& featureId, const String8& cameraId, int api1CameraId,
-            int facing, int clientPid, uid_t clientUid, int servicePid,
+            int facing, int sensorOrientation, int clientPid, uid_t clientUid, int servicePid,
             int deviceVersion, apiLevel effectiveApiLevel,
             /*out*/sp<BasicClient>* client);
 
@@ -1102,7 +1142,7 @@
     // Aggreated audio restriction mode for all camera clients
     int32_t mAudioRestriction;
 
-    // Current override rotate-and-crop mode
+    // Current override cmd rotate-and-crop mode; AUTO means no override
     uint8_t mOverrideRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
 
     // Current image dump mask
@@ -1110,12 +1150,6 @@
 
     // Current camera mute mode
     bool mOverrideCameraMuteMode = false;
-
-    // Map from user to sensor privacy policy
-    std::map<userid_t, sp<SensorPrivacyPolicy>> mCameraSensorPrivacyPolicies;
-
-    // Checks if the sensor privacy is enabled for the uid
-    bool isUserSensorPrivacyEnabledForUid(uid_t uid);
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 31cfed6..72b3c40 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -55,11 +55,12 @@
         const String8& cameraDeviceId,
         int api1CameraId,
         int cameraFacing,
+        int sensorOrientation,
         int clientPid,
         uid_t clientUid,
         int servicePid):
         Camera2ClientBase(cameraService, cameraClient, clientPackageName, clientFeatureId,
-                cameraDeviceId, api1CameraId, cameraFacing,
+                cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation,
                 clientPid, clientUid, servicePid),
         mParameters(api1CameraId, cameraFacing)
 {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 4d667e3..d16b242 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -101,6 +101,7 @@
             const String8& cameraDeviceId,
             int api1CameraId,
             int cameraFacing,
+            int sensorOrientation,
             int clientPid,
             uid_t clientUid,
             int servicePid);
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 1b65d1a..343f4a7 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -61,6 +61,7 @@
         const String8& cameraId,
         int api1CameraId,
         int cameraFacing,
+        int sensorOrientation,
         int clientPid,
         uid_t clientUid,
         int servicePid) :
@@ -70,6 +71,7 @@
             clientFeatureId,
             cameraId,
             cameraFacing,
+            sensorOrientation,
             clientPid,
             clientUid,
             servicePid),
@@ -86,12 +88,13 @@
         const std::optional<String16>& clientFeatureId,
         const String8& cameraId,
         int cameraFacing,
+        int sensorOrientation,
         int clientPid,
         uid_t clientUid,
         int servicePid) :
     Camera2ClientBase(cameraService, remoteCallback, clientPackageName, clientFeatureId,
                 cameraId, /*API1 camera ID*/ -1,
-                cameraFacing, clientPid, clientUid, servicePid),
+                cameraFacing, sensorOrientation, clientPid, clientUid, servicePid),
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
     mRequestIdCounter(0) {
@@ -1711,7 +1714,8 @@
     if (offlineSession.get() != nullptr) {
         offlineClient = new CameraOfflineSessionClient(sCameraService,
                 offlineSession, offlineCompositeStreamMap, cameraCb, mClientPackageName,
-                mClientFeatureId, mCameraIdStr, mCameraFacing, mClientPid, mClientUid, mServicePid);
+                mClientFeatureId, mCameraIdStr, mCameraFacing, mOrientation, mClientPid, mClientUid,
+                mServicePid);
         ret = sCameraService->addOfflineClient(mCameraIdStr, offlineClient);
     }
 
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index adedf92..44ffeef 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -55,6 +55,7 @@
             const String8& cameraId,
             int api1CameraId,
             int cameraFacing,
+            int sensorOrientation,
             int clientPid,
             uid_t clientUid,
             int servicePid);
@@ -181,6 +182,7 @@
             const std::optional<String16>& clientFeatureId,
             const String8& cameraId,
             int cameraFacing,
+            int sensorOrientation,
             int clientPid,
             uid_t clientUid,
             int servicePid);
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 6765c3b..ef15f2d 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -275,12 +275,17 @@
     }
 }
 
+status_t CameraOfflineSessionClient::notifyActive() {
+    return startCameraStreamingOps();
+}
+
 void CameraOfflineSessionClient::notifyIdle(
         int64_t /*requestCount*/, int64_t /*resultErrorCount*/, bool /*deviceError*/,
         const std::vector<hardware::CameraStreamStats>& /*streamStats*/) {
     if (mRemoteCallback.get() != nullptr) {
         mRemoteCallback->onDeviceIdle();
     }
+    finishCameraStreamingOps();
 }
 
 void CameraOfflineSessionClient::notifyAutoFocus(uint8_t newState, int triggerId) {
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 5c5fcda..b219a4c 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -49,13 +49,13 @@
             const sp<ICameraDeviceCallbacks>& remoteCallback,
             const String16& clientPackageName,
             const std::optional<String16>& clientFeatureId,
-            const String8& cameraIdStr, int cameraFacing,
+            const String8& cameraIdStr, int cameraFacing, int sensorOrientation,
             int clientPid, uid_t clientUid, int servicePid) :
             CameraService::BasicClient(
                     cameraService,
                     IInterface::asBinder(remoteCallback),
                     clientPackageName, clientFeatureId,
-                    cameraIdStr, cameraFacing, clientPid, clientUid, servicePid),
+                    cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid),
             mRemoteCallback(remoteCallback), mOfflineSession(session),
             mCompositeStreamMap(offlineCompositeStreamMap) {}
 
@@ -89,6 +89,7 @@
     // NotificationListener API
     void notifyError(int32_t errorCode, const CaptureResultExtras& resultExtras) override;
     void notifyShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
+    status_t notifyActive() override;
     void notifyIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
             const std::vector<hardware::CameraStreamStats>& streamStats) override;
     void notifyAutoFocus(uint8_t newState, int triggerId) override;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 6fd8d45..ce479a1 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -51,11 +51,13 @@
         const String8& cameraId,
         int api1CameraId,
         int cameraFacing,
+        int sensorOrientation,
         int clientPid,
         uid_t clientUid,
         int servicePid):
         TClientBase(cameraService, remoteCallback, clientPackageName, clientFeatureId,
-                cameraId, api1CameraId, cameraFacing, clientPid, clientUid, servicePid),
+                cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid, clientUid,
+                servicePid),
         mSharedCameraCallbacks(remoteCallback),
         mDeviceVersion(cameraService->getDeviceVersion(TClientBase::mCameraIdStr)),
         mDevice(new Camera3Device(cameraId)),
@@ -248,10 +250,32 @@
 }
 
 template <typename TClientBase>
+status_t Camera2ClientBase<TClientBase>::notifyActive() {
+    if (!mDeviceActive) {
+        status_t res = TClientBase::startCameraStreamingOps();
+        if (res != OK) {
+            ALOGE("%s: Camera %s: Error starting camera streaming ops: %d", __FUNCTION__,
+                    TClientBase::mCameraIdStr.string(), res);
+            return res;
+        }
+        CameraServiceProxyWrapper::logActive(TClientBase::mCameraIdStr);
+    }
+    mDeviceActive = true;
+
+    ALOGV("Camera device is now active");
+    return OK;
+}
+
+template <typename TClientBase>
 void Camera2ClientBase<TClientBase>::notifyIdle(
         int64_t requestCount, int64_t resultErrorCount, bool deviceError,
         const std::vector<hardware::CameraStreamStats>& streamStats) {
     if (mDeviceActive) {
+        status_t res = TClientBase::finishCameraStreamingOps();
+        if (res != OK) {
+            ALOGE("%s: Camera %s: Error finishing streaming ops: %d", __FUNCTION__,
+                    TClientBase::mCameraIdStr.string(), res);
+        }
         CameraServiceProxyWrapper::logIdle(TClientBase::mCameraIdStr,
                 requestCount, resultErrorCount, deviceError, streamStats);
     }
@@ -266,11 +290,6 @@
     (void)resultExtras;
     (void)timestamp;
 
-    if (!mDeviceActive) {
-        CameraServiceProxyWrapper::logActive(TClientBase::mCameraIdStr);
-    }
-    mDeviceActive = true;
-
     ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
             __FUNCTION__, resultExtras.requestId, timestamp);
 }
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 1ce4393..b3a38a2 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -52,6 +52,7 @@
                       const String8& cameraId,
                       int api1CameraId,
                       int cameraFacing,
+                      int sensorOrientation,
                       int clientPid,
                       uid_t clientUid,
                       int servicePid);
@@ -66,6 +67,7 @@
 
     virtual void          notifyError(int32_t errorCode,
                                       const CaptureResultExtras& resultExtras);
+    virtual status_t      notifyActive();  // Returns errors on app ops permission failures
     virtual void          notifyIdle(int64_t requestCount, int64_t resultErrorCount,
                                      bool deviceError,
                                      const std::vector<hardware::CameraStreamStats>& streamStats);
diff --git a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
index e02e146..54e42a6 100644
--- a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
+++ b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
@@ -40,10 +40,11 @@
     // Required for API 1 and 2
     virtual void notifyError(int32_t errorCode,
                              const CaptureResultExtras &resultExtras) = 0;
-
-    // Required only for API2
+    virtual status_t notifyActive() = 0; // May return an error since it checks appops
     virtual void notifyIdle(int64_t requestCount, int64_t resultError, bool deviceError,
             const std::vector<hardware::CameraStreamStats>& streamStats) = 0;
+
+    // Required only for API2
     virtual void notifyShutter(const CaptureResultExtras &resultExtras,
             nsecs_t timestamp) = 0;
     virtual void notifyPrepared(int streamId) = 0;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 8942d05..6dffc5d 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -474,12 +474,12 @@
 hardware::Return<void> CameraProviderManager::onRegistration(
         const hardware::hidl_string& /*fqName*/,
         const hardware::hidl_string& name,
-        bool /*preexisting*/) {
+        bool preexisting) {
     std::lock_guard<std::mutex> providerLock(mProviderLifecycleLock);
     {
         std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
-        addProviderLocked(name);
+        addProviderLocked(name, preexisting);
     }
 
     sp<StatusListener> listener = getStatusListener();
@@ -1230,33 +1230,53 @@
     return falseRet;
 }
 
-status_t CameraProviderManager::addProviderLocked(const std::string& newProvider) {
-    for (const auto& providerInfo : mProviders) {
-        if (providerInfo->mProviderName == newProvider) {
-            ALOGW("%s: Camera provider HAL with name '%s' already registered", __FUNCTION__,
-                    newProvider.c_str());
-            return ALREADY_EXISTS;
-        }
-    }
-
+status_t CameraProviderManager::tryToInitializeProviderLocked(
+        const std::string& providerName, const sp<ProviderInfo>& providerInfo) {
     sp<provider::V2_4::ICameraProvider> interface;
-    interface = mServiceProxy->tryGetService(newProvider);
+    interface = mServiceProxy->tryGetService(providerName);
 
     if (interface == nullptr) {
         // The interface may not be started yet. In that case, this is not a
         // fatal error.
         ALOGW("%s: Camera provider HAL '%s' is not actually available", __FUNCTION__,
-                newProvider.c_str());
+                providerName.c_str());
         return BAD_VALUE;
     }
 
-    sp<ProviderInfo> providerInfo = new ProviderInfo(newProvider, this);
-    status_t res = providerInfo->initialize(interface, mDeviceState);
-    if (res != OK) {
-        return res;
+    return providerInfo->initialize(interface, mDeviceState);
+}
+
+status_t CameraProviderManager::addProviderLocked(const std::string& newProvider,
+        bool preexisting) {
+    // Several camera provider instances can be temporarily present.
+    // Defer initialization of a new instance until the older instance is properly removed.
+    auto providerInstance = newProvider + "-" + std::to_string(mProviderInstanceId);
+    bool providerPresent = false;
+    for (const auto& providerInfo : mProviders) {
+        if (providerInfo->mProviderName == newProvider) {
+            ALOGW("%s: Camera provider HAL with name '%s' already registered",
+                    __FUNCTION__, newProvider.c_str());
+            if (preexisting) {
+                return ALREADY_EXISTS;
+            } else{
+                ALOGW("%s: The new provider instance will get initialized immediately after the"
+                        " currently present instance is removed!", __FUNCTION__);
+                providerPresent = true;
+                break;
+            }
+        }
+    }
+
+    sp<ProviderInfo> providerInfo = new ProviderInfo(newProvider, providerInstance, this);
+    if (!providerPresent) {
+        status_t res = tryToInitializeProviderLocked(newProvider, providerInfo);
+        if (res != OK) {
+            return res;
+        }
     }
 
     mProviders.push_back(providerInfo);
+    mProviderInstanceId++;
 
     return OK;
 }
@@ -1266,12 +1286,14 @@
     std::unique_lock<std::mutex> lock(mInterfaceMutex);
     std::vector<String8> removedDeviceIds;
     status_t res = NAME_NOT_FOUND;
+    std::string removedProviderName;
     for (auto it = mProviders.begin(); it != mProviders.end(); it++) {
-        if ((*it)->mProviderName == provider) {
+        if ((*it)->mProviderInstance == provider) {
             removedDeviceIds.reserve((*it)->mDevices.size());
             for (auto& deviceInfo : (*it)->mDevices) {
                 removedDeviceIds.push_back(String8(deviceInfo->mId.c_str()));
             }
+            removedProviderName = (*it)->mProviderName;
             mProviders.erase(it);
             res = OK;
             break;
@@ -1281,6 +1303,14 @@
         ALOGW("%s: Camera provider HAL with name '%s' is not registered", __FUNCTION__,
                 provider.c_str());
     } else {
+        // Check if there are any newer camera instances from the same provider and try to
+        // initialize.
+        for (const auto& providerInfo : mProviders) {
+            if (providerInfo->mProviderName == removedProviderName) {
+                return tryToInitializeProviderLocked(removedProviderName, providerInfo);
+            }
+        }
+
         // Inform camera service of loss of presence for all the devices from this provider,
         // without lock held for reentrancy
         sp<StatusListener> listener = getStatusListener();
@@ -1289,7 +1319,9 @@
             for (auto& id : removedDeviceIds) {
                 listener->onDeviceStatusChanged(id, CameraDeviceStatus::NOT_PRESENT);
             }
+            lock.lock();
         }
+
     }
     return res;
 }
@@ -1303,8 +1335,10 @@
 
 CameraProviderManager::ProviderInfo::ProviderInfo(
         const std::string &providerName,
+        const std::string &providerInstance,
         CameraProviderManager *manager) :
         mProviderName(providerName),
+        mProviderInstance(providerInstance),
         mProviderTagid(generateVendorTagId(providerName)),
         mUniqueDeviceCount(0),
         mManager(manager) {
@@ -1628,7 +1662,7 @@
 
 status_t CameraProviderManager::ProviderInfo::dump(int fd, const Vector<String16>&) const {
     dprintf(fd, "== Camera Provider HAL %s (v2.%d, %s) static info: %zu devices: ==\n",
-            mProviderName.c_str(),
+            mProviderInstance.c_str(),
             mMinorVersion,
             mIsRemote ? "remote" : "passthrough",
             mDevices.size());
@@ -1944,12 +1978,12 @@
 void CameraProviderManager::ProviderInfo::serviceDied(uint64_t cookie,
         const wp<hidl::base::V1_0::IBase>& who) {
     (void) who;
-    ALOGI("Camera provider '%s' has died; removing it", mProviderName.c_str());
+    ALOGI("Camera provider '%s' has died; removing it", mProviderInstance.c_str());
     if (cookie != mId) {
         ALOGW("%s: Unexpected serviceDied cookie %" PRIu64 ", expected %" PRIu32,
                 __FUNCTION__, cookie, mId);
     }
-    mManager->removeProvider(mProviderName);
+    mManager->removeProvider(mProviderInstance);
 }
 
 status_t CameraProviderManager::ProviderInfo::setUpVendorTags() {
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 12bda9b..5531dd7 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -365,6 +365,7 @@
             virtual public hardware::hidl_death_recipient
     {
         const std::string mProviderName;
+        const std::string mProviderInstance;
         const metadata_vendor_id_t mProviderTagid;
         int mMinorVersion;
         sp<VendorTagDescriptor> mVendorTagDescriptor;
@@ -379,7 +380,7 @@
 
         sp<hardware::camera::provider::V2_4::ICameraProvider> mSavedInterface;
 
-        ProviderInfo(const std::string &providerName,
+        ProviderInfo(const std::string &providerName, const std::string &providerInstance,
                 CameraProviderManager *manager);
         ~ProviderInfo();
 
@@ -657,7 +658,10 @@
             hardware::hidl_version minVersion = hardware::hidl_version{0,0},
             hardware::hidl_version maxVersion = hardware::hidl_version{1000,0}) const;
 
-    status_t addProviderLocked(const std::string& newProvider);
+    status_t addProviderLocked(const std::string& newProvider, bool preexisting = false);
+
+    status_t tryToInitializeProviderLocked(const std::string& providerName,
+            const sp<ProviderInfo>& providerInfo);
 
     bool isLogicalCameraLocked(const std::string& id, std::vector<std::string>* physicalCameraIds);
 
@@ -666,6 +670,7 @@
 
     bool isValidDeviceLocked(const std::string &id, uint16_t majorVersion) const;
 
+    size_t mProviderInstanceId = 0;
     std::vector<sp<ProviderInfo>> mProviders;
 
     void addProviderToMap(
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index bf7e597..d93b9e5 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -2189,31 +2189,40 @@
         std::lock_guard<std::mutex> l(mOutputLock);
         listener = mListener.promote();
     }
-    if (idle && listener != NULL) {
-        // Get session stats from the builder, and notify the listener.
-        int64_t requestCount, resultErrorCount;
-        bool deviceError;
-        std::map<int, StreamStats> streamStatsMap;
-        mSessionStatsBuilder.buildAndReset(&requestCount, &resultErrorCount,
-                &deviceError, &streamStatsMap);
-        for (size_t i = 0; i < streamIds.size(); i++) {
-            int streamId = streamIds[i];
-            auto stats = streamStatsMap.find(streamId);
-            if (stats != streamStatsMap.end()) {
-                streamStats[i].mRequestCount = stats->second.mRequestedFrameCount;
-                streamStats[i].mErrorCount = stats->second.mDroppedFrameCount;
-                streamStats[i].mStartLatencyMs = stats->second.mStartLatencyMs;
-                streamStats[i].mHistogramType =
-                        hardware::CameraStreamStats::HISTOGRAM_TYPE_CAPTURE_LATENCY;
-                streamStats[i].mHistogramBins.assign(
-                        stats->second.mCaptureLatencyBins.begin(),
-                        stats->second.mCaptureLatencyBins.end());
-                streamStats[i].mHistogramCounts.assign(
-                        stats->second.mCaptureLatencyHistogram.begin(),
-                        stats->second.mCaptureLatencyHistogram.end());
+    status_t res = OK;
+    if (listener != nullptr) {
+        if (idle) {
+            // Get session stats from the builder, and notify the listener.
+            int64_t requestCount, resultErrorCount;
+            bool deviceError;
+            std::map<int, StreamStats> streamStatsMap;
+            mSessionStatsBuilder.buildAndReset(&requestCount, &resultErrorCount,
+                    &deviceError, &streamStatsMap);
+            for (size_t i = 0; i < streamIds.size(); i++) {
+                int streamId = streamIds[i];
+                auto stats = streamStatsMap.find(streamId);
+                if (stats != streamStatsMap.end()) {
+                    streamStats[i].mRequestCount = stats->second.mRequestedFrameCount;
+                    streamStats[i].mErrorCount = stats->second.mDroppedFrameCount;
+                    streamStats[i].mStartLatencyMs = stats->second.mStartLatencyMs;
+                    streamStats[i].mHistogramType =
+                            hardware::CameraStreamStats::HISTOGRAM_TYPE_CAPTURE_LATENCY;
+                    streamStats[i].mHistogramBins.assign(
+                            stats->second.mCaptureLatencyBins.begin(),
+                            stats->second.mCaptureLatencyBins.end());
+                    streamStats[i].mHistogramCounts.assign(
+                           stats->second.mCaptureLatencyHistogram.begin(),
+                           stats->second.mCaptureLatencyHistogram.end());
+                }
             }
+            listener->notifyIdle(requestCount, resultErrorCount, deviceError, streamStats);
+        } else {
+            res = listener->notifyActive();
         }
-        listener->notifyIdle(requestCount, resultErrorCount, deviceError, streamStats);
+    }
+    if (res != OK) {
+        SET_ERR("Camera access permission lost mid-operation: %s (%d)",
+                strerror(-res), res);
     }
 }
 
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index c7d7c4b..3d74f0b 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -44,6 +44,7 @@
         "libcutils",
         "libcameraservice",
         "libcamera_client",
+        "liblog",
         "libui",
         "libgui",
         "android.hardware.camera.common@1.0",
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 54550a5..985b2f8 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -18,8 +18,18 @@
  * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
  */
 
+#define LOG_TAG "CameraServiceFuzzer"
+//#define LOG_NDEBUG 0
+
 #include <CameraService.h>
+#include <device3/Camera3StreamInterface.h>
+#include <android/hardware/BnCameraServiceListener.h>
+#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
 #include <android/hardware/ICameraServiceListener.h>
+#include <android/hardware/camera2/ICameraDeviceUser.h>
+#include <camera/camera2/OutputConfiguration.h>
+#include <gui/BufferItemConsumer.h>
+#include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 #include <private/android_filesystem_config.h>
@@ -30,6 +40,7 @@
 using namespace std;
 
 const int32_t kPreviewThreshold = 8;
+const int32_t kNumRequestsTested = 8;
 const nsecs_t kPreviewTimeout = 5000000000;  // .5 [s.]
 const nsecs_t kEventTimeout = 10000000000;   // 1 [s.]
 const size_t kMaxNumLines = USHRT_MAX;
@@ -39,6 +50,23 @@
                             hardware::ICameraService::CAMERA_TYPE_ALL};
 const int kCameraApiVersion[] = {android::CameraService::API_VERSION_1,
                                  android::CameraService::API_VERSION_2};
+const uint8_t kSensorPixelModes[] = {ANDROID_SENSOR_PIXEL_MODE_DEFAULT,
+        ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION};
+const int32_t kRequestTemplates[] = {
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_PREVIEW,
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_STILL_CAPTURE,
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_RECORD,
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_VIDEO_SNAPSHOT,
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_MANUAL,
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_ZERO_SHUTTER_LAG
+};
+
+const int32_t kRotations[] = {
+    camera3::CAMERA_STREAM_ROTATION_0,
+    camera3::CAMERA_STREAM_ROTATION_90,
+    camera3::CAMERA_STREAM_ROTATION_270
+};
+
 const int kLayerMetadata[] = {
     0x00100000 /*GRALLOC_USAGE_RENDERSCRIPT*/, 0x00000003 /*GRALLOC_USAGE_SW_READ_OFTEN*/,
     0x00000100 /*GRALLOC_USAGE_HW_TEXTURE*/,   0x00000800 /*GRALLOC_USAGE_HW_COMPOSER*/,
@@ -69,15 +97,15 @@
 
 class CameraFuzzer : public ::android::hardware::BnCameraClient {
    public:
-    CameraFuzzer() = default;
+    CameraFuzzer(sp<CameraService> cs, std::shared_ptr<FuzzedDataProvider> fp) :
+          mCameraService(cs), mFuzzedDataProvider(fp) {};
     ~CameraFuzzer() { deInit(); }
-    bool init();
-    void process(const uint8_t *data, size_t size);
+    void process();
     void deInit();
 
    private:
-    FuzzedDataProvider *mFuzzedDataProvider = nullptr;
     sp<CameraService> mCameraService = nullptr;
+    std::shared_ptr<FuzzedDataProvider> mFuzzedDataProvider = nullptr;
     sp<SurfaceComposerClient> mComposerClient = nullptr;
     int32_t mNumCameras = 0;
     size_t mPreviewBufferCount = 0;
@@ -167,19 +195,7 @@
     return rc;
 }
 
-bool CameraFuzzer::init() {
-    setuid(AID_MEDIA);
-    mCameraService = new CameraService();
-    if (mCameraService) {
-        return true;
-    }
-    return false;
-}
-
 void CameraFuzzer::deInit() {
-    if (mCameraService) {
-        mCameraService = nullptr;
-    }
     if (mComposerClient) {
         mComposerClient->dispose();
     }
@@ -298,12 +314,12 @@
     for (int32_t cameraId = 0; cameraId < mNumCameras; ++cameraId) {
         getCameraInformation(cameraId);
 
-        const String16 opPackageName("com.fuzzer.poc");
         ::android::binder::Status rc;
         sp<ICamera> cameraDevice;
 
-        rc = mCameraService->connect(this, cameraId, opPackageName, AID_MEDIA, AID_ROOT,
-                                     &cameraDevice);
+        rc = mCameraService->connect(this, cameraId, String16(),
+                android::CameraService::USE_CALLING_UID, android::CameraService::USE_CALLING_PID,
+                &cameraDevice);
         if (!rc.isOk()) {
             // camera not connected
             return;
@@ -405,8 +421,7 @@
     }
 }
 
-void CameraFuzzer::process(const uint8_t *data, size_t size) {
-    mFuzzedDataProvider = new FuzzedDataProvider(data, size);
+void CameraFuzzer::process() {
     getNumCameras();
     invokeCameraSound();
     if (mNumCameras > 0) {
@@ -415,19 +430,169 @@
     invokeDump();
     invokeShellCommand();
     invokeNotifyCalls();
-    delete mFuzzedDataProvider;
+}
+
+class TestCameraServiceListener : public hardware::BnCameraServiceListener {
+public:
+    virtual ~TestCameraServiceListener() {};
+
+    virtual binder::Status onStatusChanged(int32_t , const String16&) {
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
+            const String16& /*cameraId*/, const String16& /*physicalCameraId*/) {
+        // No op
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onTorchStatusChanged(int32_t /*status*/, const String16& /*cameraId*/) {
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onCameraAccessPrioritiesChanged() {
+        // No op
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCameraOpened(const String16& /*cameraId*/,
+            const String16& /*clientPackageName*/) {
+        // No op
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCameraClosed(const String16& /*cameraId*/) {
+        // No op
+        return binder::Status::ok();
+    }
+};
+
+class TestCameraDeviceCallbacks : public hardware::camera2::BnCameraDeviceCallbacks {
+public:
+    TestCameraDeviceCallbacks() {}
+
+    virtual ~TestCameraDeviceCallbacks() {}
+
+    virtual binder::Status onDeviceError(int /*errorCode*/,
+            const CaptureResultExtras& /*resultExtras*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onDeviceIdle() {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCaptureStarted(const CaptureResultExtras& /*resultExtras*/,
+            int64_t /*timestamp*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onResultReceived(const CameraMetadata& /*metadata*/,
+            const CaptureResultExtras& /*resultExtras*/,
+            const std::vector<PhysicalCaptureResultInfo>& /*physicalResultInfos*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onPrepared(int /*streamId*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onRepeatingRequestError(
+            int64_t /*lastFrameNumber*/, int32_t /*stoppedSequenceId*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onRequestQueueEmpty() {
+        return binder::Status::ok();
+    }
+};
+
+class Camera2Fuzzer {
+   public:
+    Camera2Fuzzer(sp<CameraService> cs, std::shared_ptr<FuzzedDataProvider> fp) :
+          mCameraService(cs), mFuzzedDataProvider(fp) { };
+    ~Camera2Fuzzer() {}
+    void process();
+   private:
+    sp<CameraService> mCameraService = nullptr;
+    std::shared_ptr<FuzzedDataProvider> mFuzzedDataProvider = nullptr;
+};
+
+void Camera2Fuzzer::process() {
+    sp<TestCameraServiceListener> listener = new TestCameraServiceListener();
+    std::vector<hardware::CameraStatus> statuses;
+    mCameraService->addListenerTest(listener, &statuses);
+    for (auto s : statuses) {
+        sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
+        sp<hardware::camera2::ICameraDeviceUser> device;
+        mCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, &device);
+        if (device == nullptr) {
+            continue;
+        }
+        device->beginConfigure();
+        sp<IGraphicBufferProducer> gbProducer;
+        sp<IGraphicBufferConsumer> gbConsumer;
+        BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
+        sp<BufferItemConsumer> opaqueConsumer = new BufferItemConsumer(gbConsumer,
+                GRALLOC_USAGE_SW_READ_NEVER, /*maxImages*/8, /*controlledByApp*/true);
+        opaqueConsumer->setName(String8("Roger"));
+
+        // Set to VGA dimension for default, as that is guaranteed to be present
+        gbConsumer->setDefaultBufferSize(640, 480);
+        gbConsumer->setDefaultBufferFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+
+        sp<Surface> surface(new Surface(gbProducer, /*controlledByApp*/false));
+
+        String16 noPhysicalId;
+        size_t rotations = sizeof(kRotations) / sizeof(int32_t) - 1;
+        OutputConfiguration output(gbProducer,
+                kRotations[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, rotations)],
+                noPhysicalId);
+        int streamId;
+        device->createStream(output, &streamId);
+        CameraMetadata sessionParams;
+        std::vector<int> offlineStreamIds;
+        device->endConfigure(/*isConstrainedHighSpeed*/ mFuzzedDataProvider->ConsumeBool(),
+                sessionParams, ns2ms(systemTime()), &offlineStreamIds);
+
+        CameraMetadata requestTemplate;
+        size_t requestTemplatesSize =  sizeof(kRequestTemplates) /sizeof(int32_t)  - 1;
+        device->createDefaultRequest(kRequestTemplates[
+                mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, requestTemplatesSize)],
+                /*out*/&requestTemplate);
+        hardware::camera2::CaptureRequest request;
+        request.mSurfaceList.add(surface);
+        request.mIsReprocess = false;
+        hardware::camera2::utils::SubmitInfo info;
+        for (int i = 0; i < kNumRequestsTested; i++) {
+            uint8_t sensorPixelMode =
+                    kSensorPixelModes[mFuzzedDataProvider->ConsumeBool() ? 1 : 0];
+            requestTemplate.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1);
+            request.mPhysicalCameraSettings.clear();
+            request.mPhysicalCameraSettings.push_back({s.cameraId.string(), requestTemplate});
+            device->submitRequest(request, /*streaming*/false, /*out*/&info);
+            ALOGV("%s : camera id %s submit request id %d",__FUNCTION__, s.cameraId.string(),
+                    info.mRequestId);
+        }
+        device->disconnect();
+    }
 }
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
     if (size < 1) {
         return 0;
     }
-    sp<CameraFuzzer> camerafuzzer = new CameraFuzzer();
+    setuid(AID_CAMERASERVER);
+    std::shared_ptr<FuzzedDataProvider> fp = std::make_shared<FuzzedDataProvider>(data, size);
+    sp<CameraService> cs = new CameraService();
+    cs->clearCachedVariables();
+    sp<CameraFuzzer> camerafuzzer = new CameraFuzzer(cs, fp);
     if (!camerafuzzer) {
         return 0;
     }
-    if (camerafuzzer->init()) {
-        camerafuzzer->process(data, size);
-    }
+    camerafuzzer->process();
+    Camera2Fuzzer camera2fuzzer(cs, fp);
+    camera2fuzzer.process();
     return 0;
 }
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index 855b5ab..a74fd9d 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -23,7 +23,9 @@
 #include <android/hardware/camera/device/3.2/ICameraDeviceCallback.h>
 #include <android/hardware/camera/device/3.2/ICameraDeviceSession.h>
 #include <camera_metadata_hidden.h>
+#include <hidl/HidlBinderSupport.h>
 #include <gtest/gtest.h>
+#include <utility>
 
 using namespace android;
 using namespace android::hardware::camera;
@@ -173,6 +175,25 @@
         return hardware::Void();
     }
 
+    virtual ::android::hardware::Return<bool> linkToDeath(
+            const ::android::sp<::android::hardware::hidl_death_recipient>& recipient,
+            uint64_t cookie) {
+        if (mInitialDeathRecipient.get() == nullptr) {
+            mInitialDeathRecipient =
+                std::make_unique<::android::hardware::hidl_binder_death_recipient>(recipient,
+                        cookie, this);
+        }
+        return true;
+    }
+
+    void signalInitialBinderDeathRecipient() {
+        if (mInitialDeathRecipient.get() != nullptr) {
+            mInitialDeathRecipient->binderDied(nullptr /*who*/);
+        }
+    }
+
+    std::unique_ptr<::android::hardware::hidl_binder_death_recipient> mInitialDeathRecipient;
+
     enum MethodNames {
         SET_CALLBACK,
         GET_VENDOR_TAGS,
@@ -567,3 +588,47 @@
     ASSERT_EQ(serviceProxy.mLastRequestedServiceNames.back(), testProviderInstanceName) <<
             "Incorrect instance requested from service manager";
 }
+
+// Test that CameraProviderManager can handle races between provider death notifications and
+// provider registration callbacks
+TEST(CameraProviderManagerTest, BinderDeathRegistrationRaceTest) {
+
+    std::vector<hardware::hidl_string> deviceNames;
+    deviceNames.push_back("device@3.2/test/0");
+    deviceNames.push_back("device@3.2/test/1");
+    hardware::hidl_vec<common::V1_0::VendorTagSection> vendorSection;
+    status_t res;
+
+    sp<CameraProviderManager> providerManager = new CameraProviderManager();
+    sp<TestStatusListener> statusListener = new TestStatusListener();
+    TestInteractionProxy serviceProxy;
+    sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
+            vendorSection);
+
+    // Not setting up provider in the service proxy yet, to test cases where a
+    // HAL isn't starting right
+    res = providerManager->initialize(statusListener, &serviceProxy);
+    ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
+
+    // Now set up provider and trigger a registration
+    serviceProxy.setProvider(provider);
+
+    hardware::hidl_string testProviderFqInterfaceName =
+            "android.hardware.camera.provider@2.4::ICameraProvider";
+    hardware::hidl_string testProviderInstanceName = "test/0";
+    serviceProxy.mManagerNotificationInterface->onRegistration(
+            testProviderFqInterfaceName,
+            testProviderInstanceName, false);
+
+    // Simulate artificial delay of the registration callback which arrives before the
+    // death notification
+    serviceProxy.mManagerNotificationInterface->onRegistration(
+            testProviderFqInterfaceName,
+            testProviderInstanceName, false);
+
+    provider->signalInitialBinderDeathRecipient();
+
+    auto deviceCount = static_cast<unsigned> (providerManager->getCameraCount().second);
+    ASSERT_EQ(deviceCount, deviceNames.size()) <<
+            "Unexpected amount of camera devices";
+}
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index ca918a9..dbc68b2 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -207,8 +207,10 @@
     return { result, ss.str() };
 }
 
-AudioAnalytics::AudioAnalytics()
+AudioAnalytics::AudioAnalytics(const std::shared_ptr<StatsdLog>& statsdLog)
     : mDeliverStatistics(property_get_bool(PROP_AUDIO_ANALYTICS_CLOUD_ENABLED, true))
+    , mStatsdLog(statsdLog)
+    , mAudioPowerUsage(this, statsdLog)
 {
     SetMinimumLogSeverity(android::base::DEBUG); // for LOG().
     ALOGD("%s", __func__);
@@ -243,7 +245,7 @@
                 });
             }));
 
-    // Handle legacy aaudio stream statistics
+    // Handle legacy aaudio playback stream statistics
     mActions.addAction(
         AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK "*." AMEDIAMETRICS_PROP_EVENT,
         std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAAUDIOSTREAM),
@@ -252,6 +254,15 @@
                 mAAudioStreamInfo.endAAudioStream(item, AAudioStreamInfo::CALLER_PATH_LEGACY);
             }));
 
+    // Handle legacy aaudio capture stream statistics
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD "*." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAAUDIOSTREAM),
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item) {
+                mAAudioStreamInfo.endAAudioStream(item, AAudioStreamInfo::CALLER_PATH_LEGACY);
+            }));
+
     // Handle mmap aaudio stream statistics
     mActions.addAction(
         AMEDIAMETRICS_KEY_PREFIX_AUDIO_STREAM "*." AMEDIAMETRICS_PROP_EVENT,
@@ -407,20 +418,6 @@
         ll -= l;
     }
 
-    if (ll > 0) {
-        // Print the statsd atoms we sent out.
-        const std::string statsd = mStatsdLog.dumpToString("  " /* prefix */, ll - 1);
-        const size_t n = std::count(statsd.begin(), statsd.end(), '\n') + 1; // we control this.
-        if ((size_t)ll >= n) {
-            if (n == 1) {
-                ss << "Statsd atoms: empty or truncated\n";
-            } else {
-                ss << "Statsd atoms:\n" << statsd;
-            }
-            ll -= (int32_t)n;
-        }
-    }
-
     if (ll > 0 && prefix == nullptr) {
         auto [s, l] = mAudioPowerUsage.dump(ll);
         ss << s;
@@ -602,7 +599,8 @@
                     , logSessionIdForStats.c_str()
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED, str);
         }
     } break;
     case THREAD: {
@@ -650,7 +648,8 @@
                 , ENUM_EXTRACT(typeForStats)
             );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED, str);
         }
     } break;
     case TRACK: {
@@ -770,7 +769,8 @@
                     , logSessionIdForStats.c_str()
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED, str);
         }
         } break;
     }
@@ -846,7 +846,8 @@
                     , /* connection_count */ 1
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
         }
     }
 }
@@ -899,7 +900,8 @@
                     , /* connection_count */ 1
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
         }
         return;
     }
@@ -925,7 +927,8 @@
                 , /* connection_count */ 1
                 );
         ALOGV("%s: statsd %s", __func__, str.c_str());
-        mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+        mAudioAnalytics.mStatsdLog->log(
+                android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
     }
 }
 
@@ -955,7 +958,8 @@
             key, AMEDIAMETRICS_PROP_CHANNELCOUNT, &channelCount);
 
     int64_t totalFramesTransferred = -1;
-    // TODO: log and get total frames transferred
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_FRAMESTRANSFERRED, &totalFramesTransferred);
 
     std::string perfModeRequestedStr;
     mAudioAnalytics.mAnalyticsState->timeMachine().get(
@@ -963,8 +967,11 @@
     const auto perfModeRequested =
             types::lookup<types::AAUDIO_PERFORMANCE_MODE, int32_t>(perfModeRequestedStr);
 
-    int32_t perfModeActual = 0;
-    // TODO: log and get actual performance mode
+    std::string perfModeActualStr;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_PERFORMANCEMODEACTUAL, &perfModeActualStr);
+    const auto perfModeActual =
+            types::lookup<types::AAUDIO_PERFORMANCE_MODE, int32_t>(perfModeActualStr);
 
     std::string sharingModeStr;
     mAudioAnalytics.mAnalyticsState->timeMachine().get(
@@ -978,8 +985,10 @@
     std::string serializedDeviceTypes;
     // TODO: only routed device id is logged, but no device type
 
-    int32_t formatApp = 0;
-    // TODO: log format from app
+    std::string formatAppStr;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_ENCODINGREQUESTED, &formatAppStr);
+    const auto formatApp = types::lookup<types::ENCODING, int32_t>(formatAppStr);
 
     std::string formatDeviceStr;
     mAudioAnalytics.mAnalyticsState->timeMachine().get(
@@ -987,7 +996,8 @@
     const auto formatDevice = types::lookup<types::ENCODING, int32_t>(formatDeviceStr);
 
     std::string logSessionId;
-    // TODO: log logSessionId
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_LOGSESSIONID, &logSessionId);
 
     int32_t sampleRate = 0;
     mAudioAnalytics.mAnalyticsState->timeMachine().get(
@@ -1007,11 +1017,11 @@
             << " channel_count:" << channelCount
             << " total_frames_transferred:" << totalFramesTransferred
             << " perf_mode_requested:" << perfModeRequested << "(" << perfModeRequestedStr << ")"
-            << " perf_mode_actual:" << perfModeActual
+            << " perf_mode_actual:" << perfModeActual << "(" << perfModeActualStr << ")"
             << " sharing:" << sharingMode << "(" << sharingModeStr << ")"
             << " xrun_count:" << xrunCount
             << " device_type:" << serializedDeviceTypes
-            << " format_app:" << formatApp
+            << " format_app:" << formatApp << "(" << formatAppStr << ")"
             << " format_device: " << formatDevice << "(" << formatDeviceStr << ")"
             << " log_session_id: " << logSessionId
             << " sample_rate: " << sampleRate
@@ -1065,7 +1075,7 @@
         ss << " " << fieldsStr;
         std::string str = ss.str();
         ALOGV("%s: statsd %s", __func__, str.c_str());
-        mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+        mAudioAnalytics.mStatsdLog->log(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED, str);
     }
 }
 
diff --git a/services/mediametrics/AudioAnalytics.h b/services/mediametrics/AudioAnalytics.h
index 07872ef..2b41a95 100644
--- a/services/mediametrics/AudioAnalytics.h
+++ b/services/mediametrics/AudioAnalytics.h
@@ -17,10 +17,10 @@
 #pragma once
 
 #include <android-base/thread_annotations.h>
-#include <audio_utils/SimpleLog.h>
 #include "AnalyticsActions.h"
 #include "AnalyticsState.h"
 #include "AudioPowerUsage.h"
+#include "StatsdLog.h"
 #include "TimedAction.h"
 #include "Wrap.h"
 
@@ -32,7 +32,7 @@
     friend AudioPowerUsage;
 
 public:
-    AudioAnalytics();
+    explicit AudioAnalytics(const std::shared_ptr<StatsdLog>& statsdLog);
     ~AudioAnalytics();
 
     /**
@@ -122,8 +122,7 @@
     SharedPtrWrap<AnalyticsState> mPreviousAnalyticsState;
 
     TimedAction mTimedAction; // locked internally
-
-    SimpleLog mStatsdLog{16 /* log lines */}; // locked internally
+    const std::shared_ptr<StatsdLog> mStatsdLog; // locked internally, ok for multiple threads.
 
     // DeviceUse is a nested class which handles audio device usage accounting.
     // We define this class at the end to ensure prior variables all properly constructed.
@@ -212,7 +211,7 @@
         AudioAnalytics &mAudioAnalytics;
     } mAAudioStreamInfo{*this};
 
-    AudioPowerUsage mAudioPowerUsage{this};
+    AudioPowerUsage mAudioPowerUsage;
 };
 
 } // namespace android::mediametrics
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index e584f12..ab74c8e 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -127,14 +127,13 @@
     return deviceMask;
 }
 
-/* static */
-void AudioPowerUsage::sendItem(const std::shared_ptr<const mediametrics::Item>& item)
+void AudioPowerUsage::sendItem(const std::shared_ptr<const mediametrics::Item>& item) const
 {
     int32_t type;
     if (!item->getInt32(AUDIO_POWER_USAGE_PROP_TYPE, &type)) return;
 
-    int32_t device;
-    if (!item->getInt32(AUDIO_POWER_USAGE_PROP_DEVICE, &device)) return;
+    int32_t audio_device;
+    if (!item->getInt32(AUDIO_POWER_USAGE_PROP_DEVICE, &audio_device)) return;
 
     int64_t duration_ns;
     if (!item->getInt64(AUDIO_POWER_USAGE_PROP_DURATION_NS, &duration_ns)) return;
@@ -142,11 +141,24 @@
     double volume;
     if (!item->getDouble(AUDIO_POWER_USAGE_PROP_VOLUME, &volume)) return;
 
-    (void)android::util::stats_write(android::util::AUDIO_POWER_USAGE_DATA_REPORTED,
-                                         device,
-                                         (int32_t)(duration_ns / NANOS_PER_SECOND),
-                                         (float)volume,
+    const int32_t duration_secs = (int32_t)(duration_ns / NANOS_PER_SECOND);
+    const float average_volume = (float)volume;
+    const int result = android::util::stats_write(android::util::AUDIO_POWER_USAGE_DATA_REPORTED,
+                                         audio_device,
+                                         duration_secs,
+                                         average_volume,
                                          type);
+
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audio_power_usage_data_reported:"
+            << android::util::AUDIO_POWER_USAGE_DATA_REPORTED
+            << " audio_device:" << audio_device
+            << " duration_secs:" << duration_secs
+            << " average_volume:" << average_volume
+            << " type:" << type
+            << " }";
+    mStatsdLog->log(android::util::AUDIO_POWER_USAGE_DATA_REPORTED, log.str());
 }
 
 bool AudioPowerUsage::saveAsItem_l(
@@ -360,8 +372,10 @@
     mPrimaryDevice = device;
 }
 
-AudioPowerUsage::AudioPowerUsage(AudioAnalytics *audioAnalytics)
+AudioPowerUsage::AudioPowerUsage(
+        AudioAnalytics *audioAnalytics, const std::shared_ptr<StatsdLog>& statsdLog)
     : mAudioAnalytics(audioAnalytics)
+    , mStatsdLog(statsdLog)
     , mDisabled(property_get_bool(PROP_AUDIO_METRICS_DISABLED, AUDIO_METRICS_DISABLED_DEFAULT))
     , mIntervalHours(property_get_int32(PROP_AUDIO_METRICS_INTERVAL_HR, INTERVAL_HR_DEFAULT))
 {
diff --git a/services/mediametrics/AudioPowerUsage.h b/services/mediametrics/AudioPowerUsage.h
index b705a6a..7021902 100644
--- a/services/mediametrics/AudioPowerUsage.h
+++ b/services/mediametrics/AudioPowerUsage.h
@@ -22,13 +22,15 @@
 #include <mutex>
 #include <thread>
 
+#include "StatsdLog.h"
+
 namespace android::mediametrics {
 
 class AudioAnalytics;
 
 class AudioPowerUsage {
 public:
-    explicit AudioPowerUsage(AudioAnalytics *audioAnalytics);
+    AudioPowerUsage(AudioAnalytics *audioAnalytics, const std::shared_ptr<StatsdLog>& statsdLog);
     ~AudioPowerUsage();
 
     void checkTrackRecord(const std::shared_ptr<const mediametrics::Item>& item, bool isTrack);
@@ -83,12 +85,13 @@
 private:
     bool saveAsItem_l(int32_t device, int64_t duration, int32_t type, double average_vol)
          REQUIRES(mLock);
-    static void sendItem(const std::shared_ptr<const mediametrics::Item>& item);
+    void sendItem(const std::shared_ptr<const mediametrics::Item>& item) const;
     void collect();
     bool saveAsItems_l(int32_t device, int64_t duration, int32_t type, double average_vol)
          REQUIRES(mLock);
 
     AudioAnalytics * const mAudioAnalytics;
+    const std::shared_ptr<StatsdLog> mStatsdLog;  // mStatsdLog is internally locked
     const bool mDisabled;
     const int32_t mIntervalHours;
 
diff --git a/services/mediametrics/AudioTypes.cpp b/services/mediametrics/AudioTypes.cpp
index 1756c98..838cdd5 100644
--- a/services/mediametrics/AudioTypes.cpp
+++ b/services/mediametrics/AudioTypes.cpp
@@ -77,6 +77,7 @@
         {"AUDIO_DEVICE_IN_DEFAULT",                1LL << 28},
         // R values above.
         {"AUDIO_DEVICE_IN_BLE_HEADSET",            1LL << 29},
+        {"AUDIO_DEVICE_IN_HDMI_EARC",              1LL << 30},
     };
     return map;
 }
@@ -123,7 +124,8 @@
         {"AUDIO_DEVICE_OUT_DEFAULT",                   1LL << 30},
         // R values above.
         {"AUDIO_DEVICE_OUT_BLE_HEADSET",               1LL << 31},
-        {"AUDIO_DEVICE_OUT_BLE_SPAEKER",               1LL << 32},
+        {"AUDIO_DEVICE_OUT_BLE_SPEAKER",               1LL << 32},
+        {"AUDIO_DEVICE_OUT_HDMI_EARC",                 1LL << 33},
     };
     return map;
 }
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index 7ee731e..1d64878 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -206,7 +206,7 @@
 
     (void)mAudioAnalytics.submit(sitem, isTrusted);
 
-    (void)dump2Statsd(sitem);  // failure should be logged in function.
+    (void)dump2Statsd(sitem, mStatsdLog);  // failure should be logged in function.
     saveItem(sitem);
     return NO_ERROR;
 }
@@ -308,6 +308,11 @@
             if (lines == linesToDump) {
                 result << "-- some lines may be truncated --\n";
             }
+
+            // Dump the statsd atoms we sent out.
+            result << "Statsd atoms:\n"
+                   << mStatsdLog->dumpToString("  " /* prefix */,
+                           all ? STATSD_LOG_LINES_MAX : STATSD_LOG_LINES_DUMP);
         }
     }
     const std::string str = result.str();
@@ -540,12 +545,13 @@
         return AStatsManager_PULL_SKIP;
     }
     std::lock_guard _l(mLock);
+    bool dumped = false;
     for (auto &item : mPullableItems[key]) {
         if (const auto sitem = item.lock()) {
-            dump2Statsd(sitem, data);
+            dumped |= dump2Statsd(sitem, data, mStatsdLog);
         }
     }
     mPullableItems[key].clear();
-    return AStatsManager_PULL_SUCCESS;
+    return dumped ? AStatsManager_PULL_SUCCESS : AStatsManager_PULL_SKIP;
 }
 } // namespace android
diff --git a/services/mediametrics/MediaMetricsService.h b/services/mediametrics/MediaMetricsService.h
index 6234656..8d0b1cf 100644
--- a/services/mediametrics/MediaMetricsService.h
+++ b/services/mediametrics/MediaMetricsService.h
@@ -124,7 +124,14 @@
 
     std::atomic<int64_t> mItemsSubmitted{}; // accessed outside of lock.
 
-    mediametrics::AudioAnalytics mAudioAnalytics; // mAudioAnalytics is locked internally.
+    // mStatsdLog is locked internally (thread-safe) and shows the last atoms logged
+    static constexpr size_t STATSD_LOG_LINES_MAX = 30; // recent log lines to keep
+    static constexpr size_t STATSD_LOG_LINES_DUMP = 4; // normal amount of lines to dump
+    const std::shared_ptr<mediametrics::StatsdLog> mStatsdLog{
+            std::make_shared<mediametrics::StatsdLog>(STATSD_LOG_LINES_MAX)};
+
+    // mAudioAnalytics is locked internally.
+    mediametrics::AudioAnalytics mAudioAnalytics{mStatsdLog};
 
     std::mutex mLock;
     // statistics about our analytics
diff --git a/services/mediametrics/StatsdLog.h b/services/mediametrics/StatsdLog.h
new file mode 100644
index 0000000..e207bac
--- /dev/null
+++ b/services/mediametrics/StatsdLog.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <audio_utils/SimpleLog.h>
+#include <map>
+#include <mutex>
+#include <sstream>
+
+namespace android::mediametrics {
+
+class StatsdLog {
+public:
+    explicit StatsdLog(size_t lines) : mSimpleLog(lines) {}
+
+    void log(int atom, const std::string& string) {
+        {
+            std::lock_guard lock(mLock);
+            ++mCountMap[atom];
+        }
+        mSimpleLog.log("%s", string.c_str());
+    }
+
+   std::string dumpToString(const char *prefix = "", size_t logLines = 0) const {
+       std::stringstream ss;
+
+       {   // first print out the atom counts
+           std::lock_guard lock(mLock);
+
+           size_t col = 0;
+           for (const auto& count : mCountMap) {
+               if (col == 8) {
+                   col = 0;
+                   ss << "\n" << prefix;
+               } else {
+                   ss << " ";
+               }
+               ss << "[ " << count.first << " : " << count.second << " ]";
+               ++col;
+           }
+           ss << "\n";
+       }
+
+       // then print out the log lines
+       ss << mSimpleLog.dumpToString(prefix, logLines);
+       return ss.str();
+   }
+
+private:
+    SimpleLog mSimpleLog; // internally locked
+    std::map<int /* atom */, size_t /* count */> mCountMap GUARDED_BY(mLock); // sorted
+    mutable std::mutex mLock;
+};
+
+} // namespace android::mediametrics
diff --git a/services/mediametrics/StringUtils.h b/services/mediametrics/StringUtils.h
index 37ed173..01034d9 100644
--- a/services/mediametrics/StringUtils.h
+++ b/services/mediametrics/StringUtils.h
@@ -16,6 +16,8 @@
 
 #pragma once
 
+#include <iomanip>
+#include <sstream>
 #include <string>
 #include <vector>
 
@@ -146,4 +148,23 @@
     return {}; // if not a logSessionId, return an empty string.
 }
 
+inline std::string bytesToString(const std::vector<uint8_t>& bytes, size_t maxSize = SIZE_MAX) {
+    if (bytes.size() == 0) {
+        return "{}";
+    }
+    std::stringstream ss;
+    ss << "{";
+    ss << std::hex << std::setfill('0');
+    maxSize = std::min(maxSize, bytes.size());
+    for (size_t i = 0; i < maxSize; ++i) {
+        ss << " " << std::setw(2) << (int)bytes[i];
+    }
+    if (maxSize != bytes.size()) {
+        ss << " ... }";
+    } else {
+        ss << " }";
+    }
+    return ss.str();
+}
+
 } // namespace android::mediametrics::stringutils
diff --git a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
index 0cb2594..8b0b479 100644
--- a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
+++ b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
@@ -320,7 +320,9 @@
 
 void MediaMetricsServiceFuzzer::invokeAudioAnalytics(const uint8_t *data, size_t size) {
     FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-    android::mediametrics::AudioAnalytics audioAnalytics;
+    std::shared_ptr<android::mediametrics::StatsdLog> statsdLog =
+            std::make_shared<android::mediametrics::StatsdLog>(10);
+    android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
     while (fdp.remaining_bytes()) {
         auto item = std::make_shared<mediametrics::Item>(fdp.ConsumeRandomLengthString().c_str());
diff --git a/services/mediametrics/iface_statsd.cpp b/services/mediametrics/iface_statsd.cpp
index b7c5296..776f878 100644
--- a/services/mediametrics/iface_statsd.cpp
+++ b/services/mediametrics/iface_statsd.cpp
@@ -48,10 +48,7 @@
 // has its own routine to handle this.
 //
 
-bool enabled_statsd = true;
-
-using statsd_pusher = bool (*)(const mediametrics::Item *);
-using statsd_puller = bool (*)(const mediametrics::Item *, AStatsEventList *);
+static bool enabled_statsd = true;
 
 namespace {
 template<typename Handler, typename... Args>
@@ -68,15 +65,17 @@
     }
 
     if (handlers.count(key)) {
-        return (handlers.at(key))(item.get(), args...);
+        return (handlers.at(key))(item, args...);
     }
     return false;
 }
 } // namespace
 
 // give me a record, I'll look at the type and upload appropriately
-bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item) {
-    static const std::map<std::string, statsd_pusher> statsd_pushers =
+bool dump2Statsd(
+        const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog) {
+    static const std::map<std::string, statsd_pusher*> statsd_pushers =
     {
         { "audiopolicy", statsd_audiopolicy },
         { "audiorecord", statsd_audiorecord },
@@ -91,15 +90,16 @@
         { "nuplayer2", statsd_nuplayer },
         { "recorder", statsd_recorder },
     };
-    return dump2StatsdInternal(statsd_pushers, item);
+    return dump2StatsdInternal(statsd_pushers, item, statsdLog);
 }
 
-bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item, AStatsEventList* out) {
-    static const std::map<std::string, statsd_puller> statsd_pullers =
+bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item, AStatsEventList* out,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog) {
+    static const std::map<std::string, statsd_puller*> statsd_pullers =
     {
         { "mediadrm", statsd_mediadrm_puller },
     };
-    return dump2StatsdInternal(statsd_pullers, item, out);
+    return dump2StatsdInternal(statsd_pullers, item, out, statsdLog);
 }
 
 } // namespace android
diff --git a/services/mediametrics/iface_statsd.h b/services/mediametrics/iface_statsd.h
index 1b6c79a..c2a8b3c 100644
--- a/services/mediametrics/iface_statsd.h
+++ b/services/mediametrics/iface_statsd.h
@@ -22,26 +22,29 @@
 class Item;
 }
 
-extern bool enabled_statsd;
-
+using statsd_pusher = bool (const std::shared_ptr<const mediametrics::Item>& item,
+         const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
 // component specific dumpers
-extern bool statsd_audiopolicy(const mediametrics::Item *);
-extern bool statsd_audiorecord(const mediametrics::Item *);
-extern bool statsd_audiothread(const mediametrics::Item *);
-extern bool statsd_audiotrack(const mediametrics::Item *);
-extern bool statsd_codec(const mediametrics::Item *);
-extern bool statsd_extractor(const mediametrics::Item *);
-extern bool statsd_mediaparser(const mediametrics::Item *);
-extern bool statsd_nuplayer(const mediametrics::Item *);
-extern bool statsd_recorder(const mediametrics::Item *);
+extern statsd_pusher statsd_audiopolicy;
+extern statsd_pusher statsd_audiorecord;
+extern statsd_pusher statsd_audiothread;
+extern statsd_pusher statsd_audiotrack;
+extern statsd_pusher statsd_codec;
+extern statsd_pusher statsd_extractor;
+extern statsd_pusher statsd_mediaparser;
 
-extern bool statsd_mediadrm(const mediametrics::Item *);
-extern bool statsd_drmmanager(const mediametrics::Item *);
+extern statsd_pusher statsd_nuplayer;
+extern statsd_pusher statsd_recorder;
+extern statsd_pusher statsd_mediadrm;
+extern statsd_pusher statsd_drmmanager;
 
+using statsd_puller = bool (const std::shared_ptr<const mediametrics::Item>& item,
+        AStatsEventList *, const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
 // component specific pullers
-extern bool statsd_mediadrm_puller(const mediametrics::Item *, AStatsEventList *);
+extern statsd_puller statsd_mediadrm_puller;
 
-bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item);
-bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item, AStatsEventList* out);
-
+bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
+bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item, AStatsEventList* out,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
 } // namespace android
diff --git a/services/mediametrics/statsd_audiopolicy.cpp b/services/mediametrics/statsd_audiopolicy.cpp
index 6ef2f2c..f44b7c4 100644
--- a/services/mediametrics/statsd_audiopolicy.cpp
+++ b/services/mediametrics/statsd_audiopolicy.cpp
@@ -37,16 +37,16 @@
 
 namespace android {
 
-bool statsd_audiopolicy(const mediametrics::Item *item)
+bool statsd_audiopolicy(const std::shared_ptr<const mediametrics::Item>& item,
+       const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -60,35 +60,35 @@
         metrics_proto.set_status(status);
     }
     //string char kAudioPolicyRqstSrc[] = "android.media.audiopolicy.rqst.src";
-    std::string rqst_src;
-    if (item->getString("android.media.audiopolicy.rqst.src", &rqst_src)) {
-        metrics_proto.set_request_source(std::move(rqst_src));
+    std::string request_source;
+    if (item->getString("android.media.audiopolicy.rqst.src", &request_source)) {
+        metrics_proto.set_request_source(request_source);
     }
     //string char kAudioPolicyRqstPkg[] = "android.media.audiopolicy.rqst.pkg";
-    std::string rqst_pkg;
-    if (item->getString("android.media.audiopolicy.rqst.pkg", &rqst_pkg)) {
-        metrics_proto.set_request_package(std::move(rqst_pkg));
+    std::string request_package;
+    if (item->getString("android.media.audiopolicy.rqst.pkg", &request_package)) {
+        metrics_proto.set_request_package(request_package);
     }
     //int32 char kAudioPolicyRqstSession[] = "android.media.audiopolicy.rqst.session";
-    int32_t rqst_session = -1;
-    if (item->getInt32("android.media.audiopolicy.rqst.session", &rqst_session)) {
-        metrics_proto.set_request_session(rqst_session);
+    int32_t request_session = -1;
+    if (item->getInt32("android.media.audiopolicy.rqst.session", &request_session)) {
+        metrics_proto.set_request_session(request_session);
     }
     //string char kAudioPolicyRqstDevice[] = "android.media.audiopolicy.rqst.device";
-    std::string rqst_device;
-    if (item->getString("android.media.audiopolicy.rqst.device", &rqst_device)) {
-        metrics_proto.set_request_device(std::move(rqst_device));
+    std::string request_device;
+    if (item->getString("android.media.audiopolicy.rqst.device", &request_device)) {
+        metrics_proto.set_request_device(request_device);
     }
 
     //string char kAudioPolicyActiveSrc[] = "android.media.audiopolicy.active.src";
-    std::string active_src;
-    if (item->getString("android.media.audiopolicy.active.src", &active_src)) {
-        metrics_proto.set_active_source(std::move(active_src));
+    std::string active_source;
+    if (item->getString("android.media.audiopolicy.active.src", &active_source)) {
+        metrics_proto.set_active_source(active_source);
     }
     //string char kAudioPolicyActivePkg[] = "android.media.audiopolicy.active.pkg";
-    std::string active_pkg;
-    if (item->getString("android.media.audiopolicy.active.pkg", &active_pkg)) {
-        metrics_proto.set_active_package(std::move(active_pkg));
+    std::string active_package;
+    if (item->getString("android.media.audiopolicy.active.pkg", &active_package)) {
+        metrics_proto.set_active_package(active_package);
     }
     //int32 char kAudioPolicyActiveSession[] = "android.media.audiopolicy.active.session";
     int32_t active_session = -1;
@@ -98,27 +98,40 @@
     //string char kAudioPolicyActiveDevice[] = "android.media.audiopolicy.active.device";
     std::string active_device;
     if (item->getString("android.media.audiopolicy.active.device", &active_device)) {
-        metrics_proto.set_active_device(std::move(active_device));
+        metrics_proto.set_active_device(active_device);
     }
 
-
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
         ALOGE("Failed to serialize audipolicy metrics");
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audiopolicy_reported:"
+            << android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
-
+            << " status:" << status
+            << " request_source:" << request_source
+            << " request_package:" << request_package
+            << " request_session:" << request_session
+            << " request_device:" << request_device
+            << " active_source:" << active_source
+            << " active_package:" << active_package
+            << " active_session:" << active_session
+            << " active_device:" << active_device
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiorecord.cpp b/services/mediametrics/statsd_audiorecord.cpp
index db809dc..70a67ae 100644
--- a/services/mediametrics/statsd_audiorecord.cpp
+++ b/services/mediametrics/statsd_audiorecord.cpp
@@ -38,16 +38,15 @@
 
 namespace android {
 
-bool statsd_audiorecord(const mediametrics::Item *item)
-{
+bool statsd_audiorecord(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog) {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -57,12 +56,12 @@
     //
     std::string encoding;
     if (item->getString("android.media.audiorecord.encoding", &encoding)) {
-        metrics_proto.set_encoding(std::move(encoding));
+        metrics_proto.set_encoding(encoding);
     }
 
     std::string source;
     if (item->getString("android.media.audiorecord.source", &source)) {
-        metrics_proto.set_source(std::move(source));
+        metrics_proto.set_source(source);
     }
 
     int32_t latency = -1;
@@ -80,14 +79,14 @@
         metrics_proto.set_channels(channels);
     }
 
-    int64_t createdMs = -1;
-    if (item->getInt64("android.media.audiorecord.createdMs", &createdMs)) {
-        metrics_proto.set_created_millis(createdMs);
+    int64_t created_millis = -1;
+    if (item->getInt64("android.media.audiorecord.createdMs", &created_millis)) {
+        metrics_proto.set_created_millis(created_millis);
     }
 
-    int64_t durationMs = -1;
-    if (item->getInt64("android.media.audiorecord.durationMs", &durationMs)) {
-        metrics_proto.set_duration_millis(durationMs);
+    int64_t duration_millis = -1;
+    if (item->getInt64("android.media.audiorecord.durationMs", &duration_millis)) {
+        metrics_proto.set_duration_millis(duration_millis);
     }
 
     int32_t count = -1;
@@ -95,44 +94,43 @@
         metrics_proto.set_count(count);
     }
 
-    int32_t errcode = -1;
-    if (item->getInt32("android.media.audiorecord.errcode", &errcode)) {
-        metrics_proto.set_error_code(errcode);
-    } else if (item->getInt32("android.media.audiorecord.lastError.code", &errcode)) {
-        metrics_proto.set_error_code(errcode);
+    int32_t error_code = -1;
+    if (item->getInt32("android.media.audiorecord.errcode", &error_code)) {
+        metrics_proto.set_error_code(error_code);
+    } else if (item->getInt32("android.media.audiorecord.lastError.code", &error_code)) {
+        metrics_proto.set_error_code(error_code);
     }
 
-    std::string errfunc;
-    if (item->getString("android.media.audiorecord.errfunc", &errfunc)) {
-        metrics_proto.set_error_function(std::move(errfunc));
-    } else if (item->getString("android.media.audiorecord.lastError.at", &errfunc)) {
-        metrics_proto.set_error_function(std::move(errfunc));
+    std::string error_function;
+    if (item->getString("android.media.audiorecord.errfunc", &error_function)) {
+        metrics_proto.set_error_function(error_function);
+    } else if (item->getString("android.media.audiorecord.lastError.at", &error_function)) {
+        metrics_proto.set_error_function(error_function);
     }
 
-    // portId (int32)
     int32_t port_id = -1;
     if (item->getInt32("android.media.audiorecord.portId", &port_id)) {
         metrics_proto.set_port_id(count);
     }
-    // frameCount (int32)
-    int32_t frameCount = -1;
-    if (item->getInt32("android.media.audiorecord.frameCount", &frameCount)) {
-        metrics_proto.set_frame_count(frameCount);
+
+    int32_t frame_count = -1;
+    if (item->getInt32("android.media.audiorecord.frameCount", &frame_count)) {
+        metrics_proto.set_frame_count(frame_count);
     }
-    // attributes (string)
+
     std::string attributes;
     if (item->getString("android.media.audiorecord.attributes", &attributes)) {
-        metrics_proto.set_attributes(std::move(attributes));
+        metrics_proto.set_attributes(attributes);
     }
-    // channelMask (int64)
-    int64_t channelMask = -1;
-    if (item->getInt64("android.media.audiorecord.channelMask", &channelMask)) {
-        metrics_proto.set_channel_mask(channelMask);
+
+    int64_t channel_mask = -1;
+    if (item->getInt64("android.media.audiorecord.channelMask", &channel_mask)) {
+        metrics_proto.set_channel_mask(channel_mask);
     }
-    // startcount (int64)
-    int64_t startcount = -1;
-    if (item->getInt64("android.media.audiorecord.startcount", &startcount)) {
-        metrics_proto.set_start_count(startcount);
+
+    int64_t start_count = -1;
+    if (item->getInt64("android.media.audiorecord.startcount", &start_count)) {
+        metrics_proto.set_start_count(start_count);
     }
 
     std::string serialized;
@@ -145,21 +143,44 @@
     // log_session_id (string)
     std::string logSessionId;
     (void)item->getString("android.media.audiorecord.logSessionId", &logSessionId);
-    const auto logSessionIdForStats =
+    const auto log_session_id =
             mediametrics::stringutils::sanitizeLogSessionId(logSessionId);
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized,
-                                   logSessionIdForStats.c_str());
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized,
+        log_session_id.c_str());
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audiorecord_reported:"
+            << android::util::MEDIAMETRICS_AUDIORECORD_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " encoding:" << encoding
+            << " source:" << source
+            << " latency:" << latency
+            << " samplerate:" << samplerate
+            << " channels:" << channels
+            << " created_millis:" << created_millis
+            << " duration_millis:" << duration_millis
+            << " count:" << count
+            << " error_code:" << error_code
+            << " error_function:" << error_function
 
+            << " port_id:" << port_id
+            << " frame_count:" << frame_count
+            << " attributes:" << attributes
+            << " channel_mask:" << channel_mask
+            << " start_count:" << start_count
+
+            << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiothread.cpp b/services/mediametrics/statsd_audiothread.cpp
index 2ad2562..34cc923 100644
--- a/services/mediametrics/statsd_audiothread.cpp
+++ b/services/mediametrics/statsd_audiothread.cpp
@@ -37,16 +37,16 @@
 
 namespace android {
 
-bool statsd_audiothread(const mediametrics::Item *item)
+bool statsd_audiothread(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -68,17 +68,17 @@
     if (item->getInt32(MM_PREFIX "samplerate", &samplerate)) {
         metrics_proto.set_samplerate(samplerate);
     }
-    std::string workhist;
-    if (item->getString(MM_PREFIX "workMs.hist", &workhist)) {
-        metrics_proto.set_work_millis_hist(std::move(workhist));
+    std::string work_millis_hist;
+    if (item->getString(MM_PREFIX "workMs.hist", &work_millis_hist)) {
+        metrics_proto.set_work_millis_hist(work_millis_hist);
     }
-    std::string latencyhist;
-    if (item->getString(MM_PREFIX "latencyMs.hist", &latencyhist)) {
-        metrics_proto.set_latency_millis_hist(std::move(latencyhist));
+    std::string latency_millis_hist;
+    if (item->getString(MM_PREFIX "latencyMs.hist", &latency_millis_hist)) {
+        metrics_proto.set_latency_millis_hist(latency_millis_hist);
     }
-    std::string warmuphist;
-    if (item->getString(MM_PREFIX "warmupMs.hist", &warmuphist)) {
-        metrics_proto.set_warmup_millis_hist(std::move(warmuphist));
+    std::string warmup_millis_hist;
+    if (item->getString(MM_PREFIX "warmupMs.hist", &warmup_millis_hist)) {
+        metrics_proto.set_warmup_millis_hist(warmup_millis_hist);
     }
     int64_t underruns = -1;
     if (item->getInt64(MM_PREFIX "underruns", &underruns)) {
@@ -88,101 +88,99 @@
     if (item->getInt64(MM_PREFIX "overruns", &overruns)) {
         metrics_proto.set_overruns(overruns);
     }
-    int64_t activeMs = -1;
-    if (item->getInt64(MM_PREFIX "activeMs", &activeMs)) {
-        metrics_proto.set_active_millis(activeMs);
+    int64_t active_millis = -1;
+    if (item->getInt64(MM_PREFIX "activeMs", &active_millis)) {
+        metrics_proto.set_active_millis(active_millis);
     }
-    int64_t durationMs = -1;
-    if (item->getInt64(MM_PREFIX "durationMs", &durationMs)) {
-        metrics_proto.set_duration_millis(durationMs);
+    int64_t duration_millis = -1;
+    if (item->getInt64(MM_PREFIX "durationMs", &duration_millis)) {
+        metrics_proto.set_duration_millis(duration_millis);
     }
 
-    // item->setInt32(MM_PREFIX "id", (int32_t)mId); // IO handle
     int32_t id = -1;
     if (item->getInt32(MM_PREFIX "id", &id)) {
         metrics_proto.set_id(id);
     }
-    // item->setInt32(MM_PREFIX "portId", (int32_t)mPortId);
+
     int32_t port_id = -1;
-    if (item->getInt32(MM_PREFIX "portId", &id)) {
+    if (item->getInt32(MM_PREFIX "portId", &port_id)) {
         metrics_proto.set_port_id(port_id);
     }
     // item->setCString(MM_PREFIX "type", threadTypeToString(mType));
     std::string type;
     if (item->getString(MM_PREFIX "type", &type)) {
-        metrics_proto.set_type(std::move(type));
+        metrics_proto.set_type(type);
     }
-    // item->setInt32(MM_PREFIX "sampleRate", (int32_t)mSampleRate);
+
     int32_t sample_rate = -1;
     if (item->getInt32(MM_PREFIX "sampleRate", &sample_rate)) {
         metrics_proto.set_sample_rate(sample_rate);
     }
-    // item->setInt64(MM_PREFIX "channelMask", (int64_t)mChannelMask);
+
     int32_t channel_mask = -1;
     if (item->getInt32(MM_PREFIX "channelMask", &channel_mask)) {
         metrics_proto.set_channel_mask(channel_mask);
     }
-    // item->setCString(MM_PREFIX "encoding", toString(mFormat).c_str());
+
     std::string encoding;
     if (item->getString(MM_PREFIX "encoding", &encoding)) {
-        metrics_proto.set_encoding(std::move(encoding));
+        metrics_proto.set_encoding(encoding);
     }
-    // item->setInt32(MM_PREFIX "frameCount", (int32_t)mFrameCount);
+
     int32_t frame_count = -1;
     if (item->getInt32(MM_PREFIX "frameCount", &frame_count)) {
         metrics_proto.set_frame_count(frame_count);
     }
-    // item->setCString(MM_PREFIX "outDevice", toString(mOutDevice).c_str());
-    std::string outDevice;
-    if (item->getString(MM_PREFIX "outDevice", &outDevice)) {
-        metrics_proto.set_output_device(std::move(outDevice));
-    }
-    // item->setCString(MM_PREFIX "inDevice", toString(mInDevice).c_str());
-    std::string inDevice;
-    if (item->getString(MM_PREFIX "inDevice", &inDevice)) {
-        metrics_proto.set_input_device(std::move(inDevice));
-    }
-    // item->setDouble(MM_PREFIX "ioJitterMs.mean", mIoJitterMs.getMean());
-    double iojitters_ms_mean = -1;
-    if (item->getDouble(MM_PREFIX "ioJitterMs.mean", &iojitters_ms_mean)) {
-        metrics_proto.set_io_jitter_mean_millis(iojitters_ms_mean);
-    }
-    // item->setDouble(MM_PREFIX "ioJitterMs.std", mIoJitterMs.getStdDev());
-    double iojitters_ms_std = -1;
-    if (item->getDouble(MM_PREFIX "ioJitterMs.std", &iojitters_ms_std)) {
-        metrics_proto.set_io_jitter_stddev_millis(iojitters_ms_std);
-    }
-    // item->setDouble(MM_PREFIX "processTimeMs.mean", mProcessTimeMs.getMean());
-    double process_time_ms_mean = -1;
-    if (item->getDouble(MM_PREFIX "processTimeMs.mean", &process_time_ms_mean)) {
-        metrics_proto.set_process_time_mean_millis(process_time_ms_mean);
-    }
-    // item->setDouble(MM_PREFIX "processTimeMs.std", mProcessTimeMs.getStdDev());
-    double process_time_ms_std = -1;
-    if (item->getDouble(MM_PREFIX "processTimeMs.std", &process_time_ms_std)) {
-        metrics_proto.set_process_time_stddev_millis(process_time_ms_std);
-    }
-    // item->setDouble(MM_PREFIX "timestampJitterMs.mean", tsjitter.getMean());
-    double timestamp_jitter_ms_mean = -1;
-    if (item->getDouble(MM_PREFIX "timestampJitterMs.mean", &timestamp_jitter_ms_mean)) {
-        metrics_proto.set_timestamp_jitter_mean_millis(timestamp_jitter_ms_mean);
-    }
-    // item->setDouble(MM_PREFIX "timestampJitterMs.std", tsjitter.getStdDev());
-    double timestamp_jitter_ms_stddev = -1;
-    if (item->getDouble(MM_PREFIX "timestampJitterMs.std", &timestamp_jitter_ms_stddev)) {
-        metrics_proto.set_timestamp_jitter_stddev_millis(timestamp_jitter_ms_stddev);
-    }
-    // item->setDouble(MM_PREFIX "latencyMs.mean", mLatencyMs.getMean());
-    double latency_ms_mean = -1;
-    if (item->getDouble(MM_PREFIX "latencyMs.mean", &latency_ms_mean)) {
-        metrics_proto.set_latency_mean_millis(latency_ms_mean);
-    }
-    // item->setDouble(MM_PREFIX "latencyMs.std", mLatencyMs.getStdDev());
-    double latency_ms_stddev = -1;
-    if (item->getDouble(MM_PREFIX "latencyMs.std", &latency_ms_stddev)) {
-        metrics_proto.set_latency_stddev_millis(latency_ms_stddev);
+
+    std::string output_device;
+    if (item->getString(MM_PREFIX "outDevice", &output_device)) {
+        metrics_proto.set_output_device(output_device);
     }
 
+    std::string input_device;
+    if (item->getString(MM_PREFIX "inDevice", &input_device)) {
+        metrics_proto.set_input_device(input_device);
+    }
+
+    double io_jitter_mean_millis = -1;
+    if (item->getDouble(MM_PREFIX "ioJitterMs.mean", &io_jitter_mean_millis)) {
+        metrics_proto.set_io_jitter_mean_millis(io_jitter_mean_millis);
+    }
+
+    double io_jitter_stddev_millis = -1;
+    if (item->getDouble(MM_PREFIX "ioJitterMs.std", &io_jitter_stddev_millis)) {
+        metrics_proto.set_io_jitter_stddev_millis(io_jitter_stddev_millis);
+    }
+
+    double process_time_mean_millis = -1;
+    if (item->getDouble(MM_PREFIX "processTimeMs.mean", &process_time_mean_millis)) {
+        metrics_proto.set_process_time_mean_millis(process_time_mean_millis);
+    }
+
+    double process_time_stddev_millis = -1;
+    if (item->getDouble(MM_PREFIX "processTimeMs.std", &process_time_stddev_millis)) {
+        metrics_proto.set_process_time_stddev_millis(process_time_stddev_millis);
+    }
+
+    double timestamp_jitter_mean_millis = -1;
+    if (item->getDouble(MM_PREFIX "timestampJitterMs.mean", &timestamp_jitter_mean_millis)) {
+        metrics_proto.set_timestamp_jitter_mean_millis(timestamp_jitter_mean_millis);
+    }
+
+    double timestamp_jitter_stddev_millis = -1;
+    if (item->getDouble(MM_PREFIX "timestampJitterMs.std", &timestamp_jitter_stddev_millis)) {
+        metrics_proto.set_timestamp_jitter_stddev_millis(timestamp_jitter_stddev_millis);
+    }
+
+    double latency_mean_millis = -1;
+    if (item->getDouble(MM_PREFIX "latencyMs.mean", &latency_mean_millis)) {
+        metrics_proto.set_latency_mean_millis(latency_mean_millis);
+    }
+
+    double latency_stddev_millis = -1;
+    if (item->getDouble(MM_PREFIX "latencyMs.std", &latency_stddev_millis)) {
+        metrics_proto.set_latency_stddev_millis(latency_stddev_millis);
+    }
 
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
@@ -190,17 +188,50 @@
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audiothread_reported:"
+            << android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " type:" << type
+            << " framecount:" << framecount
+            << " samplerate:" << samplerate
+            << " work_millis_hist:" << work_millis_hist
+            << " latency_millis_hist:" << latency_millis_hist
+            << " warmup_millis_hist:" << warmup_millis_hist
+            << " underruns:" << underruns
+            << " overruns:" << overruns
+            << " active_millis:" << active_millis
+            << " duration_millis:" << duration_millis
 
+            << " id:" << id
+            << " port_id:" << port_id
+            << " sample_rate:" << sample_rate
+            << " channel_mask:" << channel_mask
+            << " encoding:" << encoding
+            << " frame_count:" << frame_count
+            << " output_device:" << output_device
+            << " input_device:" << input_device
+            << " io_jitter_mean_millis:" << io_jitter_mean_millis
+            << " io_jitter_stddev_millis:" << io_jitter_stddev_millis
+
+            << " process_time_mean_millis:" << process_time_mean_millis
+            << " process_time_stddev_millis:" << process_time_stddev_millis
+            << " timestamp_jitter_mean_millis:" << timestamp_jitter_mean_millis
+            << " timestamp_jitter_stddev_millis:" << timestamp_jitter_stddev_millis
+            << " latency_mean_millis:" << latency_mean_millis
+            << " latency_stddev_millis:" << latency_stddev_millis
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiotrack.cpp b/services/mediametrics/statsd_audiotrack.cpp
index fd809c8..fe269a1 100644
--- a/services/mediametrics/statsd_audiotrack.cpp
+++ b/services/mediametrics/statsd_audiotrack.cpp
@@ -38,16 +38,16 @@
 
 namespace android {
 
-bool statsd_audiotrack(const mediametrics::Item *item)
+bool statsd_audiotrack(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -58,52 +58,52 @@
 
     // static constexpr char kAudioTrackStreamType[] = "android.media.audiotrack.streamtype";
     // optional string streamType;
-    std::string streamtype;
-    if (item->getString("android.media.audiotrack.streamtype", &streamtype)) {
-        metrics_proto.set_stream_type(std::move(streamtype));
+    std::string stream_type;
+    if (item->getString("android.media.audiotrack.streamtype", &stream_type)) {
+        metrics_proto.set_stream_type(stream_type);
     }
 
     // static constexpr char kAudioTrackContentType[] = "android.media.audiotrack.type";
     // optional string contentType;
-    std::string contenttype;
-    if (item->getString("android.media.audiotrack.type", &contenttype)) {
-        metrics_proto.set_content_type(std::move(contenttype));
+    std::string content_type;
+    if (item->getString("android.media.audiotrack.type", &content_type)) {
+        metrics_proto.set_content_type(content_type);
     }
 
     // static constexpr char kAudioTrackUsage[] = "android.media.audiotrack.usage";
     // optional string trackUsage;
-    std::string trackusage;
-    if (item->getString("android.media.audiotrack.usage", &trackusage)) {
-        metrics_proto.set_track_usage(std::move(trackusage));
+    std::string track_usage;
+    if (item->getString("android.media.audiotrack.usage", &track_usage)) {
+        metrics_proto.set_track_usage(track_usage);
     }
 
     // static constexpr char kAudioTrackSampleRate[] = "android.media.audiotrack.samplerate";
     // optional int32 samplerate;
-    int32_t samplerate = -1;
-    if (item->getInt32("android.media.audiotrack.samplerate", &samplerate)) {
-        metrics_proto.set_sample_rate(samplerate);
+    int32_t sample_rate = -1;
+    if (item->getInt32("android.media.audiotrack.samplerate", &sample_rate)) {
+        metrics_proto.set_sample_rate(sample_rate);
     }
 
     // static constexpr char kAudioTrackChannelMask[] = "android.media.audiotrack.channelmask";
     // optional int64 channelMask;
-    int64_t channelMask = -1;
-    if (item->getInt64("android.media.audiotrack.channelmask", &channelMask)) {
-        metrics_proto.set_channel_mask(channelMask);
+    int64_t channel_mask = -1;
+    if (item->getInt64("android.media.audiotrack.channelmask", &channel_mask)) {
+        metrics_proto.set_channel_mask(channel_mask);
     }
 
     // NB: These are not yet exposed as public Java API constants.
     // static constexpr char kAudioTrackUnderrunFrames[] = "android.media.audiotrack.underrunframes";
     // optional int32 underrunframes;
-    int32_t underrunframes = -1;
-    if (item->getInt32("android.media.audiotrack.underrunframes", &underrunframes)) {
-        metrics_proto.set_underrun_frames(underrunframes);
+    int32_t underrun_frames = -1;
+    if (item->getInt32("android.media.audiotrack.underrunframes", &underrun_frames)) {
+        metrics_proto.set_underrun_frames(underrun_frames);
     }
 
     // static constexpr char kAudioTrackStartupGlitch[] = "android.media.audiotrack.glitch.startup";
     // optional int32 startupglitch;
-    int32_t startupglitch = -1;
-    if (item->getInt32("android.media.audiotrack.glitch.startup", &startupglitch)) {
-        metrics_proto.set_startup_glitch(startupglitch);
+    int32_t startup_glitch = -1;
+    if (item->getInt32("android.media.audiotrack.glitch.startup", &startup_glitch)) {
+        metrics_proto.set_startup_glitch(startup_glitch);
     }
 
     // portId (int32)
@@ -114,7 +114,7 @@
     // encoding (string)
     std::string encoding;
     if (item->getString("android.media.audiotrack.encoding", &encoding)) {
-        metrics_proto.set_encoding(std::move(encoding));
+        metrics_proto.set_encoding(encoding);
     }
     // frameCount (int32)
     int32_t frame_count = -1;
@@ -124,7 +124,7 @@
     // attributes (string)
     std::string attributes;
     if (item->getString("android.media.audiotrack.attributes", &attributes)) {
-        metrics_proto.set_attributes(std::move(attributes));
+        metrics_proto.set_attributes(attributes);
     }
 
     std::string serialized;
@@ -137,21 +137,40 @@
     // log_session_id (string)
     std::string logSessionId;
     (void)item->getString("android.media.audiotrack.logSessionId", &logSessionId);
-    const auto logSessionIdForStats =
+    const auto log_session_id =
             mediametrics::stringutils::sanitizeLogSessionId(logSessionId);
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized,
-                                   logSessionIdForStats.c_str());
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED,
+                               timestamp_nanos, package_name.c_str(), package_version_code,
+                               media_apex_version,
+                               bf_serialized,
+                               log_session_id.c_str());
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audiotrack_reported:"
+            << android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " stream_type:" << stream_type
+            << " content_type:" << content_type
+            << " track_usage:" << track_usage
+            << " sample_rate:" << sample_rate
+            << " channel_mask:" << channel_mask
+            << " underrun_frames:" << underrun_frames
+            << " startup_glitch:" << startup_glitch
+            << " port_id:" << port_id
+            << " encoding:" << encoding
+            << " frame_count:" << frame_count
 
+            << " attributes:" << attributes
+
+            << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index 1c5ab77..381f441 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -33,64 +33,64 @@
 
 #include "cleaner.h"
 #include "MediaMetricsService.h"
-#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
+#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
 #include "iface_statsd.h"
 
 namespace android {
 
-bool statsd_codec(const mediametrics::Item *item)
+bool statsd_codec(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
-    ::android::stats::mediametrics::CodecData metrics_proto;
+    ::android::stats::mediametrics_message::CodecData metrics_proto;
 
     // flesh out the protobuf we'll hand off with our data
     //
     // android.media.mediacodec.codec   string
     std::string codec;
     if (item->getString("android.media.mediacodec.codec", &codec)) {
-        metrics_proto.set_codec(std::move(codec));
+        metrics_proto.set_codec(codec);
     }
-    // android.media.mediacodec.mime    string
+
     std::string mime;
     if (item->getString("android.media.mediacodec.mime", &mime)) {
-        metrics_proto.set_mime(std::move(mime));
+        metrics_proto.set_mime(mime);
     }
-    // android.media.mediacodec.mode    string
+
     std::string mode;
     if ( item->getString("android.media.mediacodec.mode", &mode)) {
-        metrics_proto.set_mode(std::move(mode));
+        metrics_proto.set_mode(mode);
     }
-    // android.media.mediacodec.encoder int32
+
     int32_t encoder = -1;
     if ( item->getInt32("android.media.mediacodec.encoder", &encoder)) {
         metrics_proto.set_encoder(encoder);
     }
-    // android.media.mediacodec.secure  int32
+
     int32_t secure = -1;
     if ( item->getInt32("android.media.mediacodec.secure", &secure)) {
         metrics_proto.set_secure(secure);
     }
-    // android.media.mediacodec.width   int32
+
     int32_t width = -1;
     if ( item->getInt32("android.media.mediacodec.width", &width)) {
         metrics_proto.set_width(width);
     }
-    // android.media.mediacodec.height  int32
+
     int32_t height = -1;
     if ( item->getInt32("android.media.mediacodec.height", &height)) {
         metrics_proto.set_height(height);
     }
-    // android.media.mediacodec.rotation-degrees        int32
+
     int32_t rotation = -1;
     if ( item->getInt32("android.media.mediacodec.rotation-degrees", &rotation)) {
         metrics_proto.set_rotation(rotation);
@@ -100,90 +100,89 @@
     if ( item->getInt32("android.media.mediacodec.crypto", &crypto)) {
         metrics_proto.set_crypto(crypto);
     }
-    // android.media.mediacodec.profile int32
+
     int32_t profile = -1;
     if ( item->getInt32("android.media.mediacodec.profile", &profile)) {
         metrics_proto.set_profile(profile);
     }
-    // android.media.mediacodec.level   int32
+
     int32_t level = -1;
     if ( item->getInt32("android.media.mediacodec.level", &level)) {
         metrics_proto.set_level(level);
     }
-    // android.media.mediacodec.maxwidth        int32
-    int32_t maxwidth = -1;
-    if ( item->getInt32("android.media.mediacodec.maxwidth", &maxwidth)) {
-        metrics_proto.set_max_width(maxwidth);
+
+    int32_t max_width = -1;
+    if ( item->getInt32("android.media.mediacodec.maxwidth", &max_width)) {
+        metrics_proto.set_max_width(max_width);
     }
-    // android.media.mediacodec.maxheight       int32
-    int32_t maxheight = -1;
-    if ( item->getInt32("android.media.mediacodec.maxheight", &maxheight)) {
-        metrics_proto.set_max_height(maxheight);
+
+    int32_t max_height = -1;
+    if ( item->getInt32("android.media.mediacodec.maxheight", &max_height)) {
+        metrics_proto.set_max_height(max_height);
     }
-    // android.media.mediacodec.errcode         int32
-    int32_t errcode = -1;
-    if ( item->getInt32("android.media.mediacodec.errcode", &errcode)) {
-        metrics_proto.set_error_code(errcode);
+
+    int32_t error_code = -1;
+    if ( item->getInt32("android.media.mediacodec.errcode", &error_code)) {
+        metrics_proto.set_error_code(error_code);
     }
-    // android.media.mediacodec.errstate        string
-    std::string errstate;
-    if ( item->getString("android.media.mediacodec.errstate", &errstate)) {
-        metrics_proto.set_error_state(std::move(errstate));
+
+    std::string error_state;
+    if ( item->getString("android.media.mediacodec.errstate", &error_state)) {
+        metrics_proto.set_error_state(error_state);
     }
-    // android.media.mediacodec.latency.max  int64
+
     int64_t latency_max = -1;
     if ( item->getInt64("android.media.mediacodec.latency.max", &latency_max)) {
         metrics_proto.set_latency_max(latency_max);
     }
-    // android.media.mediacodec.latency.min  int64
+
     int64_t latency_min = -1;
     if ( item->getInt64("android.media.mediacodec.latency.min", &latency_min)) {
         metrics_proto.set_latency_min(latency_min);
     }
-    // android.media.mediacodec.latency.avg  int64
+
     int64_t latency_avg = -1;
     if ( item->getInt64("android.media.mediacodec.latency.avg", &latency_avg)) {
         metrics_proto.set_latency_avg(latency_avg);
     }
-    // android.media.mediacodec.latency.n    int64
+
     int64_t latency_count = -1;
     if ( item->getInt64("android.media.mediacodec.latency.n", &latency_count)) {
         metrics_proto.set_latency_count(latency_count);
     }
-    // android.media.mediacodec.latency.unknown    int64
+
     int64_t latency_unknown = -1;
     if ( item->getInt64("android.media.mediacodec.latency.unknown", &latency_unknown)) {
         metrics_proto.set_latency_unknown(latency_unknown);
     }
-    // android.media.mediacodec.queueSecureInputBufferError  int32
-    if (int32_t queueSecureInputBufferError = -1;
-        item->getInt32("android.media.mediacodec.queueSecureInputBufferError",
-                &queueSecureInputBufferError)) {
-        metrics_proto.set_queue_secure_input_buffer_error(queueSecureInputBufferError);
+
+    int32_t queue_secure_input_buffer_error = -1;
+    if (item->getInt32("android.media.mediacodec.queueSecureInputBufferError",
+                &queue_secure_input_buffer_error)) {
+        metrics_proto.set_queue_secure_input_buffer_error(queue_secure_input_buffer_error);
     }
-    // android.media.mediacodec.queueInputBufferError  int32
-    if (int32_t queueInputBufferError = -1;
-        item->getInt32("android.media.mediacodec.queueInputBufferError",
-                &queueInputBufferError)) {
-        metrics_proto.set_queue_input_buffer_error(queueInputBufferError);
+
+    int32_t queue_input_buffer_error = -1;
+    if (item->getInt32("android.media.mediacodec.queueInputBufferError",
+                &queue_input_buffer_error)) {
+        metrics_proto.set_queue_input_buffer_error(queue_input_buffer_error);
     }
     // android.media.mediacodec.latency.hist    NOT EMITTED
 
-    // android.media.mediacodec.bitrate_mode string
     std::string bitrate_mode;
     if (item->getString("android.media.mediacodec.bitrate_mode", &bitrate_mode)) {
-        metrics_proto.set_bitrate_mode(std::move(bitrate_mode));
+        metrics_proto.set_bitrate_mode(bitrate_mode);
     }
-    // android.media.mediacodec.bitrate int32
+
     int32_t bitrate = -1;
     if (item->getInt32("android.media.mediacodec.bitrate", &bitrate)) {
         metrics_proto.set_bitrate(bitrate);
     }
-    // android.media.mediacodec.lifetimeMs int64
-    int64_t lifetimeMs = -1;
-    if ( item->getInt64("android.media.mediacodec.lifetimeMs", &lifetimeMs)) {
-        lifetimeMs = mediametrics::bucket_time_minutes(lifetimeMs);
-        metrics_proto.set_lifetime_millis(lifetimeMs);
+
+    int64_t lifetime_millis = -1;
+    if (item->getInt64("android.media.mediacodec.lifetimeMs", &lifetime_millis)) {
+        lifetime_millis = mediametrics::bucket_time_minutes(lifetime_millis);
+        metrics_proto.set_lifetime_millis(lifetime_millis);
     }
 
     // new for S; need to plumb through to westworld
@@ -201,18 +200,51 @@
         ALOGE("Failed to serialize codec metrics");
         return false;
     }
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_CODEC_REPORTED,
+                               timestamp_nanos, package_name.c_str(), package_version_code,
+                               media_apex_version,
+                               bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_codec_reported:"
+            << android::util::MEDIAMETRICS_CODEC_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_CODEC_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+            << " codec:" << codec
+            << " mime:" << mime
+            << " mode:" << mode
+            << " encoder:" << encoder
+            << " secure:" << secure
+            << " width:" << width
+            << " height:" << height
+            << " rotation:" << rotation
+            << " crypto:" << crypto
+            << " profile:" << profile
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " level:" << level
+            << " max_width:" << max_width
+            << " max_height:" << max_height
+            << " error_code:" << error_code
+            << " error_state:" << error_state
+            << " latency_max:" << latency_max
+            << " latency_min:" << latency_min
+            << " latency_avg:" << latency_avg
+            << " latency_count:" << latency_count
+            << " latency_unknown:" << latency_unknown
 
+            << " queue_input_buffer_error:" << queue_input_buffer_error
+            << " queue_secure_input_buffer_error:" << queue_secure_input_buffer_error
+            << " bitrate_mode:" << bitrate_mode
+            << " bitrate:" << bitrate
+            << " lifetime_millis:" << lifetime_millis
+            // TODO: add when log_session_id is merged.
+            // << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_CODEC_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_drm.cpp b/services/mediametrics/statsd_drm.cpp
index 071c549..73b8872 100644
--- a/services/mediametrics/statsd_drm.cpp
+++ b/services/mediametrics/statsd_drm.cpp
@@ -32,6 +32,7 @@
 #include <pwd.h>
 
 #include "MediaMetricsService.h"
+#include "StringUtils.h"
 #include "iface_statsd.h"
 
 #include <statslog.h>
@@ -43,53 +44,60 @@
 namespace android {
 
 // mediadrm
-bool statsd_mediadrm(const mediametrics::Item *item)
+bool statsd_mediadrm(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     std::string vendor;
     (void) item->getString("vendor", &vendor);
     std::string description;
     (void) item->getString("description", &description);
 
-    if (enabled_statsd) {
-        // This field is left here for backward compatibility.
-        // This field is not used anymore.
-        const std::string  kUnusedField("unused");
-        android::util::BytesField bf_serialized(kUnusedField.c_str(), kUnusedField.size());
-        android::util::stats_write(android::util::MEDIAMETRICS_MEDIADRM_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   vendor.c_str(),
-                                   description.c_str(),
-                                   bf_serialized);
-    } else {
-        ALOGV("NOT sending: mediadrm data(%s, %s)", vendor.c_str(), description.c_str());
-    }
+    // This field is left here for backward compatibility.
+    // This field is not used anymore.
+    const std::string  kUnusedField("unused");
+    android::util::BytesField bf_serialized(kUnusedField.c_str(), kUnusedField.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_MEDIADRM_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        vendor.c_str(),
+        description.c_str(),
+        bf_serialized);
 
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_mediadrm_reported:"
+            << android::util::MEDIAMETRICS_MEDIADRM_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
+
+            << " vendor:" << vendor
+            << " description:" << description
+            // omitting serialized
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_MEDIADRM_REPORTED, log.str());
     return true;
 }
 
 // drmmanager
-bool statsd_drmmanager(const mediametrics::Item *item)
+bool statsd_drmmanager(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     using namespace std::string_literals;
     if (item == nullptr) return false;
 
-    if (!enabled_statsd) {
-        ALOGV("NOT sending: drmmanager data");
-        return true;
-    }
-
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     std::string plugin_id;
     (void) item->getString("plugin_id", &plugin_id);
@@ -107,8 +115,9 @@
         item->getInt64(("method"s + std::to_string(i)).c_str(), &methodCounts[i]);
     }
 
-    android::util::stats_write(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED,
-                               timestamp, pkgName.c_str(), pkgVersionCode, mediaApexVersion,
+    const int result = android::util::stats_write(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED,
+                               timestamp_nanos, package_name.c_str(), package_version_code,
+                               media_apex_version,
                                plugin_id.c_str(), description.c_str(),
                                method_id, mime_types.c_str(),
                                methodCounts[0], methodCounts[1], methodCounts[2],
@@ -117,6 +126,25 @@
                                methodCounts[9], methodCounts[10], methodCounts[11],
                                methodCounts[12]);
 
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_drmmanager_reported:"
+            << android::util::MEDIAMETRICS_DRMMANAGER_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
+
+            << " plugin_id:" << plugin_id
+            << " description:" << description
+            << " method_id:" << method_id
+            << " mime_types:" << mime_types;
+
+    for (size_t i = 0; i < methodCounts.size(); ++i) {
+        log << " method_" << i << ":" << methodCounts[i];
+    }
+    log << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED, log.str());
     return true;
 }
 
@@ -144,17 +172,14 @@
 } // namespace
 
 // |out| and its contents are memory-managed by statsd.
-bool statsd_mediadrm_puller(const mediametrics::Item* item, AStatsEventList* out)
+bool statsd_mediadrm_puller(
+        const std::shared_ptr<const mediametrics::Item>& item, AStatsEventList* out,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) {
         return false;
     }
 
-    if (!enabled_statsd) {
-        ALOGV("NOT pulling: mediadrm activity");
-        return true;
-    }
-
     std::string serialized_metrics;
     (void) item->getString("serialized_metrics", &serialized_metrics);
     const auto framework_raw(base64DecodeNoPad(serialized_metrics));
@@ -163,6 +188,11 @@
     (void) item->getString("plugin_metrics", &plugin_metrics);
     const auto plugin_raw(base64DecodeNoPad(plugin_metrics));
 
+    if (serialized_metrics.size() == 0 && plugin_metrics.size() == 0) {
+        ALOGD("statsd_mediadrm_puller skipping empty entry");
+        return false;
+    }
+
     std::string vendor;
     (void) item->getString("vendor", &vendor);
     std::string description;
@@ -178,6 +208,19 @@
     AStatsEvent_writeByteArray(event, framework_raw.data(), framework_raw.size());
     AStatsEvent_writeByteArray(event, plugin_raw.data(), plugin_raw.size());
     AStatsEvent_build(event);
+
+    std::stringstream log;
+    log << "pulled:" << " {"
+            << " media_drm_activity_info:"
+            << android::util::MEDIA_DRM_ACTIVITY_INFO
+            << " package_name:" << item->getPkgName()
+            << " package_version_code:" << item->getPkgVersionCode()
+            << " vendor:" << vendor
+            << " description:" << description
+            << " framework_metrics:" << mediametrics::stringutils::bytesToString(framework_raw, 8)
+            << " vendor_metrics:" <<  mediametrics::stringutils::bytesToString(plugin_raw, 8)
+            << " }";
+    statsdLog->log(android::util::MEDIA_DRM_ACTIVITY_INFO, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_extractor.cpp b/services/mediametrics/statsd_extractor.cpp
index 4180e0c..e228f07 100644
--- a/services/mediametrics/statsd_extractor.cpp
+++ b/services/mediametrics/statsd_extractor.cpp
@@ -37,16 +37,16 @@
 
 namespace android {
 
-bool statsd_extractor(const mediametrics::Item *item)
+bool statsd_extractor(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -55,26 +55,25 @@
     // flesh out the protobuf we'll hand off with our data
     //
 
-    // android.media.mediaextractor.fmt         string
-    std::string fmt;
-    if (item->getString("android.media.mediaextractor.fmt", &fmt)) {
-        metrics_proto.set_format(std::move(fmt));
-    }
-    // android.media.mediaextractor.mime        string
-    std::string mime;
-    if (item->getString("android.media.mediaextractor.mime", &mime)) {
-        metrics_proto.set_mime(std::move(mime));
-    }
-    // android.media.mediaextractor.ntrk        int32
-    int32_t ntrk = -1;
-    if (item->getInt32("android.media.mediaextractor.ntrk", &ntrk)) {
-        metrics_proto.set_tracks(ntrk);
+    std::string format;
+    if (item->getString("android.media.mediaextractor.fmt", &format)) {
+        metrics_proto.set_format(format);
     }
 
-    // android.media.mediaextractor.entry       string
+    std::string mime;
+    if (item->getString("android.media.mediaextractor.mime", &mime)) {
+        metrics_proto.set_mime(mime);
+    }
+
+    int32_t tracks = -1;
+    if (item->getInt32("android.media.mediaextractor.ntrk", &tracks)) {
+        metrics_proto.set_tracks(tracks);
+    }
+
     std::string entry_point_string;
+    stats::mediametrics::ExtractorData::EntryPoint entry_point =
+            stats::mediametrics::ExtractorData_EntryPoint_OTHER;
     if (item->getString("android.media.mediaextractor.entry", &entry_point_string)) {
-      stats::mediametrics::ExtractorData::EntryPoint entry_point;
       if (entry_point_string == "sdk") {
         entry_point = stats::mediametrics::ExtractorData_EntryPoint_SDK;
       } else if (entry_point_string == "ndk-with-jvm") {
@@ -93,17 +92,30 @@
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_extractor_reported:"
+            << android::util::MEDIAMETRICS_EXTRACTOR_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " format:" << format
+            << " mime:" << mime
+            << " tracks:" << tracks
+            << " entry_point:" << entry_point_string << "(" << entry_point << ")"
 
+            // TODO: Add MediaExtractor log_session_id
+            // << " log_session_id:" << log_session_id
+
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_mediaparser.cpp b/services/mediametrics/statsd_mediaparser.cpp
index 262b2ae..f543425 100644
--- a/services/mediametrics/statsd_mediaparser.cpp
+++ b/services/mediametrics/statsd_mediaparser.cpp
@@ -36,16 +36,15 @@
 
 namespace android {
 
-bool statsd_mediaparser(const mediametrics::Item *item)
+bool statsd_mediaparser(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
-    if (item == nullptr) {
-        return false;
-    }
+    static constexpr bool enabled_statsd = true; // TODO: Remove, dup with dump2StatsdInternal().
+    if (item == nullptr) return false;
 
-    // statsd wrapper data.
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
 
     std::string parserName;
     item->getString("android.media.mediaparser.parserName", &parserName);
@@ -82,9 +81,9 @@
 
     if (enabled_statsd) {
         (void) android::util::stats_write(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED,
-                                   timestamp,
-                                   pkgName.c_str(),
-                                   pkgVersionCode,
+                                   timestamp_nanos,
+                                   package_name.c_str(),
+                                   package_version_code,
                                    parserName.c_str(),
                                    createdByName,
                                    parserPool.c_str(),
@@ -99,7 +98,29 @@
     } else {
         ALOGV("NOT sending MediaParser media metrics.");
     }
-
+    // TODO: Cleanup after playback_id is merged.
+    std::stringstream log;
+    log << "result:" << "(result)" << " {"
+            << " mediametrics_mediaparser_reported:"
+            << android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " parser_name:" << parserName
+            << " created_by_name:" << createdByName
+            << " parser_pool:" << parserPool
+            << " last_exception:" << lastException
+            << " resource_byte_count:" << resourceByteCount
+            << " duration_millis:" << durationMillis
+            << " track_mime_types:" << trackMimeTypes
+            << " track_codecs:" << trackCodecs
+            << " altered_parameters:" << alteredParameters
+            << " video_width:" << videoWidth
+            << " video_height:" << videoHeight
+            // TODO: Add MediaParser playback_id
+            // << " playback_id:" << playbackId
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_nuplayer.cpp b/services/mediametrics/statsd_nuplayer.cpp
index a8d0f55..33da81e 100644
--- a/services/mediametrics/statsd_nuplayer.cpp
+++ b/services/mediametrics/statsd_nuplayer.cpp
@@ -41,16 +41,16 @@
  *  handles nuplayer AND nuplayer2
  *  checks for the union of what the two players generate
  */
-bool statsd_nuplayer(const mediametrics::Item *item)
+bool statsd_nuplayer(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -60,15 +60,16 @@
     //
 
     // differentiate between nuplayer and nuplayer2
-    metrics_proto.set_whichplayer(item->getKey().c_str());
+    std::string whichPlayer = item->getKey();
+    metrics_proto.set_whichplayer(whichPlayer.c_str());
 
     std::string video_mime;
     if (item->getString("android.media.mediaplayer.video.mime", &video_mime)) {
-        metrics_proto.set_video_mime(std::move(video_mime));
+        metrics_proto.set_video_mime(video_mime);
     }
     std::string video_codec;
     if (item->getString("android.media.mediaplayer.video.codec", &video_codec)) {
-        metrics_proto.set_video_codec(std::move(video_codec));
+        metrics_proto.set_video_codec(video_codec);
     }
 
     int32_t width = -1;
@@ -92,32 +93,32 @@
     if (item->getInt64("android.media.mediaplayer.startupdropped", &frames_dropped_startup)) {
         metrics_proto.set_frames_dropped_startup(frames_dropped_startup);
     }
-    double fps = -1.0;
-    if (item->getDouble("android.media.mediaplayer.fps", &fps)) {
-        metrics_proto.set_framerate(fps);
+    double framerate = -1.0;
+    if (item->getDouble("android.media.mediaplayer.fps", &framerate)) {
+        metrics_proto.set_framerate(framerate);
     }
 
     std::string audio_mime;
     if (item->getString("android.media.mediaplayer.audio.mime", &audio_mime)) {
-        metrics_proto.set_audio_mime(std::move(audio_mime));
+        metrics_proto.set_audio_mime(audio_mime);
     }
     std::string audio_codec;
     if (item->getString("android.media.mediaplayer.audio.codec", &audio_codec)) {
-        metrics_proto.set_audio_codec(std::move(audio_codec));
+        metrics_proto.set_audio_codec(audio_codec);
     }
 
-    int64_t duration_ms = -1;
-    if (item->getInt64("android.media.mediaplayer.durationMs", &duration_ms)) {
-        metrics_proto.set_duration_millis(duration_ms);
+    int64_t duration_millis = -1;
+    if (item->getInt64("android.media.mediaplayer.durationMs", &duration_millis)) {
+        metrics_proto.set_duration_millis(duration_millis);
     }
-    int64_t playing_ms = -1;
-    if (item->getInt64("android.media.mediaplayer.playingMs", &playing_ms)) {
-        metrics_proto.set_playing_millis(playing_ms);
+    int64_t playing_millis = -1;
+    if (item->getInt64("android.media.mediaplayer.playingMs", &playing_millis)) {
+        metrics_proto.set_playing_millis(playing_millis);
     }
 
-    int32_t err = -1;
-    if (item->getInt32("android.media.mediaplayer.err", &err)) {
-        metrics_proto.set_error(err);
+    int32_t error = -1;
+    if (item->getInt32("android.media.mediaplayer.err", &error)) {
+        metrics_proto.set_error(error);
     }
     int32_t error_code = -1;
     if (item->getInt32("android.media.mediaplayer.errcode", &error_code)) {
@@ -125,45 +126,74 @@
     }
     std::string error_state;
     if (item->getString("android.media.mediaplayer.errstate", &error_state)) {
-        metrics_proto.set_error_state(std::move(error_state));
+        metrics_proto.set_error_state(error_state);
     }
 
     std::string data_source_type;
     if (item->getString("android.media.mediaplayer.dataSource", &data_source_type)) {
-        metrics_proto.set_data_source_type(std::move(data_source_type));
+        metrics_proto.set_data_source_type(data_source_type);
     }
 
-    int64_t rebufferingMs = -1;
-    if (item->getInt64("android.media.mediaplayer.rebufferingMs", &rebufferingMs)) {
-        metrics_proto.set_rebuffering_millis(rebufferingMs);
+    int64_t rebuffering_millis = -1;
+    if (item->getInt64("android.media.mediaplayer.rebufferingMs", &rebuffering_millis)) {
+        metrics_proto.set_rebuffering_millis(rebuffering_millis);
     }
     int32_t rebuffers = -1;
     if (item->getInt32("android.media.mediaplayer.rebuffers", &rebuffers)) {
         metrics_proto.set_rebuffers(rebuffers);
     }
-    int32_t rebufferExit = -1;
-    if (item->getInt32("android.media.mediaplayer.rebufferExit", &rebufferExit)) {
-        metrics_proto.set_rebuffer_at_exit(rebufferExit);
+    int32_t rebuffer_at_exit = -1;
+    if (item->getInt32("android.media.mediaplayer.rebufferExit", &rebuffer_at_exit)) {
+        metrics_proto.set_rebuffer_at_exit(rebuffer_at_exit);
     }
 
-
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
         ALOGE("Failed to serialize nuplayer metrics");
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_NUPLAYER_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_NUPLAYER_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_nuplayer_reported:"
+            << android::util::MEDIAMETRICS_NUPLAYER_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
+            << " whichPlayer:" << whichPlayer
+            << " video_mime:" << video_mime
+            << " video_codec:" << video_codec
+            << " width:" << width
+            << " height:" << height
+            << " frames:" << frames
+            << " frames_dropped:" << frames_dropped
+            << " framerate:" << framerate
+            << " audio_mime:" << audio_mime
+            << " audio_codec:" << media_apex_version
+
+            << " duration_millis:" << duration_millis
+            << " playing_millis:" << playing_millis
+            << " error:" << error
+            << " error_code:" << error_code
+            << " error_state:" << error_state
+            << " data_source_type:" << data_source_type
+            << " rebuffering_millis:" << rebuffering_millis
+            << " rebuffers:" << rebuffers
+            << " rebuffer_at_exit:" << rebuffer_at_exit
+            << " frames_dropped_startup:" << frames_dropped_startup
+
+            // TODO NuPlayer - add log_session_id
+            // << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_NUPLAYER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_recorder.cpp b/services/mediametrics/statsd_recorder.cpp
index 2e5ada4..23b884f 100644
--- a/services/mediametrics/statsd_recorder.cpp
+++ b/services/mediametrics/statsd_recorder.cpp
@@ -37,16 +37,16 @@
 
 namespace android {
 
-bool statsd_recorder(const mediametrics::Item *item)
+bool statsd_recorder(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
@@ -58,22 +58,22 @@
     // string kRecorderAudioMime = "android.media.mediarecorder.audio.mime";
     std::string audio_mime;
     if (item->getString("android.media.mediarecorder.audio.mime", &audio_mime)) {
-        metrics_proto.set_audio_mime(std::move(audio_mime));
+        metrics_proto.set_audio_mime(audio_mime);
     }
     // string kRecorderVideoMime = "android.media.mediarecorder.video.mime";
     std::string video_mime;
     if (item->getString("android.media.mediarecorder.video.mime", &video_mime)) {
-        metrics_proto.set_video_mime(std::move(video_mime));
+        metrics_proto.set_video_mime(video_mime);
     }
     // int32 kRecorderVideoProfile = "android.media.mediarecorder.video-encoder-profile";
-    int32_t videoProfile = -1;
-    if (item->getInt32("android.media.mediarecorder.video-encoder-profile", &videoProfile)) {
-        metrics_proto.set_video_profile(videoProfile);
+    int32_t video_profile = -1;
+    if (item->getInt32("android.media.mediarecorder.video-encoder-profile", &video_profile)) {
+        metrics_proto.set_video_profile(video_profile);
     }
     // int32 kRecorderVideoLevel = "android.media.mediarecorder.video-encoder-level";
-    int32_t videoLevel = -1;
-    if (item->getInt32("android.media.mediarecorder.video-encoder-level", &videoLevel)) {
-        metrics_proto.set_video_level(videoLevel);
+    int32_t video_level = -1;
+    if (item->getInt32("android.media.mediarecorder.video-encoder-level", &video_level)) {
+        metrics_proto.set_video_level(video_level);
     }
     // int32 kRecorderWidth = "android.media.mediarecorder.width";
     int32_t width = -1;
@@ -97,73 +97,73 @@
     }
 
     // int32 kRecorderCaptureFps = "android.media.mediarecorder.capture-fps";
-    int32_t captureFps = -1;
-    if (item->getInt32("android.media.mediarecorder.capture-fps", &captureFps)) {
-        metrics_proto.set_capture_fps(captureFps);
+    int32_t capture_fps = -1;
+    if (item->getInt32("android.media.mediarecorder.capture-fps", &capture_fps)) {
+        metrics_proto.set_capture_fps(capture_fps);
     }
     // double kRecorderCaptureFpsEnable = "android.media.mediarecorder.capture-fpsenable";
-    double captureFpsEnable = -1;
-    if (item->getDouble("android.media.mediarecorder.capture-fpsenable", &captureFpsEnable)) {
-        metrics_proto.set_capture_fps_enable(captureFpsEnable);
+    double capture_fps_enable = -1;
+    if (item->getDouble("android.media.mediarecorder.capture-fpsenable", &capture_fps_enable)) {
+        metrics_proto.set_capture_fps_enable(capture_fps_enable);
     }
 
     // int64 kRecorderDurationMs = "android.media.mediarecorder.durationMs";
-    int64_t durationMs = -1;
-    if (item->getInt64("android.media.mediarecorder.durationMs", &durationMs)) {
-        metrics_proto.set_duration_millis(durationMs);
+    int64_t duration_millis = -1;
+    if (item->getInt64("android.media.mediarecorder.durationMs", &duration_millis)) {
+        metrics_proto.set_duration_millis(duration_millis);
     }
     // int64 kRecorderPaused = "android.media.mediarecorder.pausedMs";
-    int64_t pausedMs = -1;
-    if (item->getInt64("android.media.mediarecorder.pausedMs", &pausedMs)) {
-        metrics_proto.set_paused_millis(pausedMs);
+    int64_t paused_millis = -1;
+    if (item->getInt64("android.media.mediarecorder.pausedMs", &paused_millis)) {
+        metrics_proto.set_paused_millis(paused_millis);
     }
     // int32 kRecorderNumPauses = "android.media.mediarecorder.NPauses";
-    int32_t pausedCount = -1;
-    if (item->getInt32("android.media.mediarecorder.NPauses", &pausedCount)) {
-        metrics_proto.set_paused_count(pausedCount);
+    int32_t paused_count = -1;
+    if (item->getInt32("android.media.mediarecorder.NPauses", &paused_count)) {
+        metrics_proto.set_paused_count(paused_count);
     }
 
     // int32 kRecorderAudioBitrate = "android.media.mediarecorder.audio-bitrate";
-    int32_t audioBitrate = -1;
-    if (item->getInt32("android.media.mediarecorder.audio-bitrate", &audioBitrate)) {
-        metrics_proto.set_audio_bitrate(audioBitrate);
+    int32_t audio_bitrate = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-bitrate", &audio_bitrate)) {
+        metrics_proto.set_audio_bitrate(audio_bitrate);
     }
     // int32 kRecorderAudioChannels = "android.media.mediarecorder.audio-channels";
-    int32_t audioChannels = -1;
-    if (item->getInt32("android.media.mediarecorder.audio-channels", &audioChannels)) {
-        metrics_proto.set_audio_channels(audioChannels);
+    int32_t audio_channels = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-channels", &audio_channels)) {
+        metrics_proto.set_audio_channels(audio_channels);
     }
     // int32 kRecorderAudioSampleRate = "android.media.mediarecorder.audio-samplerate";
-    int32_t audioSampleRate = -1;
-    if (item->getInt32("android.media.mediarecorder.audio-samplerate", &audioSampleRate)) {
-        metrics_proto.set_audio_samplerate(audioSampleRate);
+    int32_t audio_samplerate = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-samplerate", &audio_samplerate)) {
+        metrics_proto.set_audio_samplerate(audio_samplerate);
     }
 
     // int32 kRecorderMovieTimescale = "android.media.mediarecorder.movie-timescale";
-    int32_t movieTimescale = -1;
-    if (item->getInt32("android.media.mediarecorder.movie-timescale", &movieTimescale)) {
-        metrics_proto.set_movie_timescale(movieTimescale);
+    int32_t movie_timescale = -1;
+    if (item->getInt32("android.media.mediarecorder.movie-timescale", &movie_timescale)) {
+        metrics_proto.set_movie_timescale(movie_timescale);
     }
     // int32 kRecorderAudioTimescale = "android.media.mediarecorder.audio-timescale";
-    int32_t audioTimescale = -1;
-    if (item->getInt32("android.media.mediarecorder.audio-timescale", &audioTimescale)) {
-        metrics_proto.set_audio_timescale(audioTimescale);
+    int32_t audio_timescale = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-timescale", &audio_timescale)) {
+        metrics_proto.set_audio_timescale(audio_timescale);
     }
     // int32 kRecorderVideoTimescale = "android.media.mediarecorder.video-timescale";
-    int32_t videoTimescale = -1;
-    if (item->getInt32("android.media.mediarecorder.video-timescale", &videoTimescale)) {
-        metrics_proto.set_video_timescale(videoTimescale);
+    int32_t video_timescale = -1;
+    if (item->getInt32("android.media.mediarecorder.video-timescale", &video_timescale)) {
+        metrics_proto.set_video_timescale(video_timescale);
     }
 
     // int32 kRecorderVideoBitrate = "android.media.mediarecorder.video-bitrate";
-    int32_t videoBitRate = -1;
-    if (item->getInt32("android.media.mediarecorder.video-bitrate", &videoBitRate)) {
-        metrics_proto.set_video_bitrate(videoBitRate);
+    int32_t video_bitrate = -1;
+    if (item->getInt32("android.media.mediarecorder.video-bitrate", &video_bitrate)) {
+        metrics_proto.set_video_bitrate(video_bitrate);
     }
     // int32 kRecorderVideoIframeInterval = "android.media.mediarecorder.video-iframe-interval";
-    int32_t iFrameInterval = -1;
-    if (item->getInt32("android.media.mediarecorder.video-iframe-interval", &iFrameInterval)) {
-        metrics_proto.set_iframe_interval(iFrameInterval);
+    int32_t iframe_interval = -1;
+    if (item->getInt32("android.media.mediarecorder.video-iframe-interval", &iframe_interval)) {
+        metrics_proto.set_iframe_interval(iframe_interval);
     }
 
     std::string serialized;
@@ -172,17 +172,47 @@
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_RECORDER_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_RECORDER_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_recorder_reported:"
+            << android::util::MEDIAMETRICS_RECORDER_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " audio_mime:" << audio_mime
+            << " video_mime:" << video_mime
+            << " video_profile:" << video_profile
+            << " video_level:" << video_level
+            << " width:" << width
+            << " height:" << height
+            << " rotation:" << rotation
+            << " framerate:" << framerate
+            << " capture_fps:" << capture_fps
+            << " capture_fps_enable:" << capture_fps_enable
 
+            << " duration_millis:" << duration_millis
+            << " paused_millis:" << paused_millis
+            << " paused_count:" << paused_count
+            << " audio_bitrate:" << audio_bitrate
+            << " audio_channels:" << audio_channels
+            << " audio_samplerate:" << audio_samplerate
+            << " movie_timescale:" << movie_timescale
+            << " audio_timescale:" << audio_timescale
+            << " video_timescale:" << video_timescale
+            << " video_bitrate:" << video_bitrate
+
+            << " iframe_interval:" << iframe_interval
+            // TODO Recorder - add log_session_id
+            // << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_RECORDER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index ac9c7fa..2336d6f 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -809,7 +809,9 @@
   (*item3).set("four", (int32_t)4)
           .setTimestamp(12);
 
-  android::mediametrics::AudioAnalytics audioAnalytics;
+  std::shared_ptr<mediametrics::StatsdLog> statsdLog =
+          std::make_shared<mediametrics::StatsdLog>(10);
+  android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
   // untrusted entities cannot create a new key.
   ASSERT_EQ(PERMISSION_DENIED, audioAnalytics.submit(item, false /* isTrusted */));
@@ -817,7 +819,7 @@
 
   // TODO: Verify contents of AudioAnalytics.
   // Currently there is no getter API in AudioAnalytics besides dump.
-  ASSERT_EQ(11, audioAnalytics.dump(1000).second /* lines */);
+  ASSERT_EQ(10, audioAnalytics.dump(1000).second /* lines */);
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
@@ -845,7 +847,9 @@
   (*item3).set("four", (int32_t)4)
           .setTimestamp(12);
 
-  android::mediametrics::AudioAnalytics audioAnalytics;
+  std::shared_ptr<mediametrics::StatsdLog> statsdLog =
+          std::make_shared<mediametrics::StatsdLog>(10);
+  android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
   // untrusted entities cannot create a new key.
   ASSERT_EQ(PERMISSION_DENIED, audioAnalytics.submit(item, false /* isTrusted */));
@@ -853,7 +857,7 @@
 
   // TODO: Verify contents of AudioAnalytics.
   // Currently there is no getter API in AudioAnalytics besides dump.
-  ASSERT_EQ(11, audioAnalytics.dump(1000).second /* lines */);
+  ASSERT_EQ(10, audioAnalytics.dump(1000).second /* lines */);
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
@@ -877,7 +881,9 @@
   (*item3).set("four", (int32_t)4)
           .setTimestamp(12);
 
-  android::mediametrics::AudioAnalytics audioAnalytics;
+  std::shared_ptr<mediametrics::StatsdLog> statsdLog =
+          std::make_shared<mediametrics::StatsdLog>(10);
+  android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index faea58f..13dd3d3 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -38,6 +38,10 @@
 using namespace android;  // TODO just import names needed
 using namespace aaudio;   // TODO just import names needed
 
+AAudioServiceEndpoint::~AAudioServiceEndpoint() {
+    ALOGD("%s() called", __func__);
+}
+
 std::string AAudioServiceEndpoint::dump() const NO_THREAD_SAFETY_ANALYSIS {
     std::stringstream result;
 
diff --git a/services/oboeservice/AAudioServiceEndpoint.h b/services/oboeservice/AAudioServiceEndpoint.h
index 72090c2..a7f63d3 100644
--- a/services/oboeservice/AAudioServiceEndpoint.h
+++ b/services/oboeservice/AAudioServiceEndpoint.h
@@ -43,7 +43,7 @@
         , public AAudioStreamParameters {
 public:
 
-    virtual ~AAudioServiceEndpoint() = default;
+    virtual ~AAudioServiceEndpoint();
 
     virtual std::string dump() const;
 
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 556710d..7294a58 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -51,8 +51,6 @@
         : mMmapStream(nullptr)
         , mAAudioService(audioService) {}
 
-AAudioServiceEndpointMMAP::~AAudioServiceEndpointMMAP() {}
-
 std::string AAudioServiceEndpointMMAP::dump() const {
     std::stringstream result;
 
@@ -357,7 +355,10 @@
 // This is called by AudioFlinger when it wants to destroy a stream.
 void AAudioServiceEndpointMMAP::onTearDown(audio_port_handle_t portHandle) {
     ALOGD("%s(portHandle = %d) called", __func__, portHandle);
-    std::thread asyncTask(&AAudioServiceEndpointMMAP::handleTearDownAsync, this, portHandle);
+    android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
+    std::thread asyncTask([holdEndpoint, portHandle]() {
+        holdEndpoint->handleTearDownAsync(portHandle);
+    });
     asyncTask.detach();
 }
 
@@ -378,9 +379,11 @@
     ALOGD("%s() called with dev %d, old = %d", __func__, deviceId, getDeviceId());
     if (getDeviceId() != deviceId) {
         if (getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
-            std::thread asyncTask([this, deviceId]() {
-                disconnectRegisteredStreams();
-                setDeviceId(deviceId);
+            android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
+            std::thread asyncTask([holdEndpoint, deviceId]() {
+                ALOGD("onRoutingChanged() asyncTask launched");
+                holdEndpoint->disconnectRegisteredStreams();
+                holdEndpoint->setDeviceId(deviceId);
             });
             asyncTask.detach();
         } else {
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 24b161d..5a53885 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -44,7 +44,7 @@
 public:
     explicit AAudioServiceEndpointMMAP(android::AAudioService &audioService);
 
-    virtual ~AAudioServiceEndpointMMAP();
+    virtual ~AAudioServiceEndpointMMAP() = default;
 
     std::string dump() const override;
 
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 694094c..dbacd75 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -67,8 +67,7 @@
     // If the stream is deleted when OPEN or in use then audio resources will leak.
     // This would indicate an internal error. So we want to find this ASAP.
     LOG_ALWAYS_FATAL_IF(!(getState() == AAUDIO_STREAM_STATE_CLOSED
-                        || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED
-                        || getState() == AAUDIO_STREAM_STATE_DISCONNECTED),
+                        || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED),
                         "service stream %p still open, state = %d",
                         this, getState());
 }
@@ -229,7 +228,7 @@
     aaudio_result_t result = AAUDIO_OK;
 
     if (auto state = getState();
-        state == AAUDIO_STREAM_STATE_CLOSED || state == AAUDIO_STREAM_STATE_DISCONNECTED) {
+        state == AAUDIO_STREAM_STATE_CLOSED || isDisconnected_l()) {
         ALOGW("%s() already CLOSED, returns INVALID_STATE, handle = %d",
                 __func__, getHandle());
         return AAUDIO_ERROR_INVALID_STATE;
@@ -261,8 +260,14 @@
     sendServiceEvent(AAUDIO_SERVICE_EVENT_STARTED);
     setState(AAUDIO_STREAM_STATE_STARTED);
     mThreadEnabled.store(true);
+    // Make sure this object does not get deleted before the run() method
+    // can protect it by making a strong pointer.
+    incStrong(nullptr); // See run() method.
     result = mTimestampThread.start(this);
-    if (result != AAUDIO_OK) goto error;
+    if (result != AAUDIO_OK) {
+        decStrong(nullptr); // run() can't do it so we have to do it here.
+        goto error;
+    }
 
     return result;
 
@@ -291,10 +296,6 @@
             .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)result)
             .record(); });
 
-    // Send it now because the timestamp gets rounded up when stopStream() is called below.
-    // Also we don't need the timestamps while we are shutting down.
-    sendCurrentTimestamp();
-
     result = stopTimestampThread();
     if (result != AAUDIO_OK) {
         disconnect_l();
@@ -340,10 +341,12 @@
 
     setState(AAUDIO_STREAM_STATE_STOPPING);
 
-    // Send it now because the timestamp gets rounded up when stopStream() is called below.
-    // Also we don't need the timestamps while we are shutting down.
-    sendCurrentTimestamp(); // warning - this calls a virtual function
+    // Temporarily unlock because we are joining the timestamp thread and it may try
+    // to acquire mLock.
+    mLock.unlock();
     result = stopTimestampThread();
+    mLock.lock();
+
     if (result != AAUDIO_OK) {
         disconnect_l();
         return result;
@@ -403,15 +406,21 @@
 __attribute__((no_sanitize("integer")))
 void AAudioServiceStreamBase::run() {
     ALOGD("%s() %s entering >>>>>>>>>>>>>> TIMESTAMPS", __func__, getTypeText());
+    // Hold onto the ref counted stream until the end.
+    android::sp<AAudioServiceStreamBase> holdStream(this);
     TimestampScheduler timestampScheduler;
+    // Balance the incStrong from when the thread was launched.
+    holdStream->decStrong(nullptr);
+
     timestampScheduler.setBurstPeriod(mFramesPerBurst, getSampleRate());
     timestampScheduler.start(AudioClock::getNanoseconds());
     int64_t nextTime = timestampScheduler.nextAbsoluteTime();
     int32_t loopCount = 0;
+    aaudio_result_t result = AAUDIO_OK;
     while(mThreadEnabled.load()) {
         loopCount++;
         if (AudioClock::getNanoseconds() >= nextTime) {
-            aaudio_result_t result = sendCurrentTimestamp();
+            result = sendCurrentTimestamp();
             if (result != AAUDIO_OK) {
                 ALOGE("%s() timestamp thread got result = %d", __func__, result);
                 break;
@@ -423,6 +432,11 @@
             AudioClock::sleepUntilNanoTime(nextTime);
         }
     }
+    // This was moved from the calls in stop_l() and pause_l(), which could cause a deadlock
+    // if it resulted in a call to disconnect.
+    if (result == AAUDIO_OK) {
+        (void) sendCurrentTimestamp();
+    }
     ALOGD("%s() %s exiting after %d loops <<<<<<<<<<<<<< TIMESTAMPS",
           __func__, getTypeText(), loopCount);
 }
@@ -433,8 +447,7 @@
 }
 
 void AAudioServiceStreamBase::disconnect_l() {
-    if (getState() != AAUDIO_STREAM_STATE_DISCONNECTED
-        && getState() != AAUDIO_STREAM_STATE_CLOSED) {
+    if (!isDisconnected_l() && getState() != AAUDIO_STREAM_STATE_CLOSED) {
 
         mediametrics::LogItem(mMetricsId)
             .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
@@ -442,7 +455,7 @@
             .record();
 
         sendServiceEvent(AAUDIO_SERVICE_EVENT_DISCONNECTED);
-        setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+        setDisconnected_l(true);
     }
 }
 
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index 06c9f21..c42df0f 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -80,7 +80,7 @@
     // because we had to wait until we generated the handle.
     void logOpen(aaudio_handle_t streamHandle);
 
-    aaudio_result_t close();
+    aaudio_result_t close() EXCLUDES(mLock);
 
     /**
      * Start the flow of audio data.
@@ -88,7 +88,7 @@
      * This is not guaranteed to be synchronous but it currently is.
      * An AAUDIO_SERVICE_EVENT_STARTED will be sent to the client when complete.
      */
-    aaudio_result_t start();
+    aaudio_result_t start() EXCLUDES(mLock);
 
     /**
      * Stop the flow of data so that start() can resume without loss of data.
@@ -96,7 +96,7 @@
      * This is not guaranteed to be synchronous but it currently is.
      * An AAUDIO_SERVICE_EVENT_PAUSED will be sent to the client when complete.
     */
-    aaudio_result_t pause();
+    aaudio_result_t pause() EXCLUDES(mLock);
 
     /**
      * Stop the flow of data after the currently queued data has finished playing.
@@ -105,14 +105,14 @@
      * An AAUDIO_SERVICE_EVENT_STOPPED will be sent to the client when complete.
      *
      */
-    aaudio_result_t stop();
+    aaudio_result_t stop() EXCLUDES(mLock);
 
     /**
      * Discard any data held by the underlying HAL or Service.
      *
      * An AAUDIO_SERVICE_EVENT_FLUSHED will be sent to the client when complete.
      */
-    aaudio_result_t flush();
+    aaudio_result_t flush() EXCLUDES(mLock);
 
     virtual aaudio_result_t startClient(const android::AudioClient& client,
                                         const audio_attributes_t *attr __unused,
@@ -126,9 +126,9 @@
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
-    aaudio_result_t registerAudioThread(pid_t clientThreadId, int priority);
+    aaudio_result_t registerAudioThread(pid_t clientThreadId, int priority) EXCLUDES(mLock);
 
-    aaudio_result_t unregisterAudioThread(pid_t clientThreadId);
+    aaudio_result_t unregisterAudioThread(pid_t clientThreadId) EXCLUDES(mLock);
 
     bool isRunning() const {
         return mState == AAUDIO_STREAM_STATE_STARTED;
@@ -137,7 +137,7 @@
     /**
      * Fill in a parcelable description of stream.
      */
-    aaudio_result_t getDescription(AudioEndpointParcelable &parcelable);
+    aaudio_result_t getDescription(AudioEndpointParcelable &parcelable) EXCLUDES(mLock);
 
     void setRegisteredThread(pid_t pid) {
         mRegisteredClientThread = pid;
@@ -153,7 +153,7 @@
 
     void run() override; // to implement Runnable
 
-    void disconnect();
+    void disconnect() EXCLUDES(mLock);
 
     const android::AudioClient &getAudioClient() {
         return mMmapClient;
@@ -248,7 +248,7 @@
 
     aaudio_result_t writeUpMessageQueue(AAudioServiceMessage *command);
 
-    aaudio_result_t sendCurrentTimestamp();
+    aaudio_result_t sendCurrentTimestamp() EXCLUDES(mLock);
 
     aaudio_result_t sendXRunCount(int32_t xRunCount);
 
@@ -265,6 +265,13 @@
 
     aaudio_stream_state_t   mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
 
+    bool isDisconnected_l() const REQUIRES(mLock) {
+        return mDisconnected;
+    }
+    void setDisconnected_l(bool flag) REQUIRES(mLock) {
+        mDisconnected = flag;
+    }
+
     pid_t                   mRegisteredClientThread = ILLEGAL_THREAD_ID;
 
     std::mutex              mUpMessageQueueLock;
@@ -322,6 +329,8 @@
     // for example a full message queue. Note that this atomic is unrelated to mCloseNeeded.
     std::atomic<bool>       mSuspended{false};
 
+    bool                    mDisconnected GUARDED_BY(mLock) {false};
+
 protected:
     // Locking order is important.
     // Acquire mLock before acquiring AAudioServiceEndpoint::mLockStreams
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 6ba1725..667465a 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -73,7 +73,8 @@
 
     aaudio_result_t getAudioDataDescription(AudioEndpointParcelable &parcelable) override;
 
-    aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
+    aaudio_result_t getFreeRunningPosition(int64_t *positionFrames,
+            int64_t *timeNanos) EXCLUDES(mLock) override;
 
     aaudio_result_t getHardwareTimestamp(int64_t *positionFrames, int64_t *timeNanos) override;
 
diff --git a/services/tuner/TunerDescrambler.cpp b/services/tuner/TunerDescrambler.cpp
index bdf826c..b7ae167 100644
--- a/services/tuner/TunerDescrambler.cpp
+++ b/services/tuner/TunerDescrambler.cpp
@@ -111,11 +111,11 @@
     DemuxPid hidlPid;
     switch (pid.getTag()) {
         case TunerDemuxPid::tPid: {
-            hidlPid.tPid((uint16_t)pid.tPid);
+            hidlPid.tPid((uint16_t)pid.get<TunerDemuxPid::tPid>());
             break;
         }
         case TunerDemuxPid::mmtpPid: {
-            hidlPid.mmtpPid((uint16_t)pid.mmtpPid);
+            hidlPid.mmtpPid((uint16_t)pid.get<TunerDemuxPid::mmtpPid>());
             break;
         }
     }
diff --git a/services/tuner/TunerFilter.cpp b/services/tuner/TunerFilter.cpp
index 39a6723..039fd31 100644
--- a/services/tuner/TunerFilter.cpp
+++ b/services/tuner/TunerFilter.cpp
@@ -57,10 +57,10 @@
         return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    MQDesc dvrMQDesc;
+    MQDesc filterMQDesc;
     Result res;
     mFilter->getQueueDesc([&](Result r, const MQDesc& desc) {
-        dvrMQDesc = desc;
+        filterMQDesc = desc;
         res = r;
     });
     if (res != Result::SUCCESS) {
@@ -69,7 +69,7 @@
 
     AidlMQDesc aidlMQDesc;
     unsafeHidlToAidlMQDescriptor<uint8_t, int8_t, SynchronizedReadWrite>(
-                dvrMQDesc,  &aidlMQDesc);
+                filterMQDesc,  &aidlMQDesc);
     *_aidl_return = move(aidlMQDesc);
     return Status::ok();
 }
@@ -471,7 +471,7 @@
         res = r;
         if (res == Result::SUCCESS) {
             TunerFilterSharedHandleInfo info{
-                .handle = dupToAidl(hidl_handle(avMemory.getNativeHandle())),
+                .handle = dupToAidl(avMemory),
                 .size = static_cast<int64_t>(avMemSize),
             };
             *_aidl_return = move(info);
@@ -480,7 +480,10 @@
         }
     });
 
-    return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
 }
 
 Status TunerFilter::releaseAvHandle(
@@ -497,7 +500,6 @@
     return Status::ok();
 }
 
-
 Status TunerFilter::start() {
     if (mFilter == nullptr) {
         ALOGE("IFilter is not initialized");
@@ -683,7 +685,7 @@
         DemuxFilterMediaEvent mediaEvent = e.media();
         TunerFilterMediaEvent tunerMedia;
 
-        tunerMedia.streamId = static_cast<int>(mediaEvent.streamId);
+        tunerMedia.streamId = static_cast<char16_t>(mediaEvent.streamId);
         tunerMedia.isPtsPresent = mediaEvent.isPtsPresent;
         tunerMedia.pts = static_cast<long>(mediaEvent.pts);
         tunerMedia.dataLength = static_cast<int>(mediaEvent.dataLength);
@@ -730,10 +732,10 @@
         DemuxFilterSectionEvent sectionEvent = e.section();
         TunerFilterSectionEvent tunerSection;
 
-        tunerSection.tableId = static_cast<char>(sectionEvent.tableId);
-        tunerSection.version = static_cast<char>(sectionEvent.version);
-        tunerSection.sectionNum = static_cast<char>(sectionEvent.sectionNum);
-        tunerSection.dataLength = static_cast<char>(sectionEvent.dataLength);
+        tunerSection.tableId = static_cast<char16_t>(sectionEvent.tableId);
+        tunerSection.version = static_cast<char16_t>(sectionEvent.version);
+        tunerSection.sectionNum = static_cast<char16_t>(sectionEvent.sectionNum);
+        tunerSection.dataLength = static_cast<char16_t>(sectionEvent.dataLength);
 
         TunerFilterEvent tunerEvent;
         tunerEvent.set<TunerFilterEvent::section>(move(tunerSection));
@@ -747,8 +749,8 @@
         DemuxFilterPesEvent pesEvent = e.pes();
         TunerFilterPesEvent tunerPes;
 
-        tunerPes.streamId = static_cast<char>(pesEvent.streamId);
-        tunerPes.dataLength = static_cast<int>(pesEvent.dataLength);
+        tunerPes.streamId = static_cast<char16_t>(pesEvent.streamId);
+        tunerPes.dataLength = static_cast<char16_t>(pesEvent.dataLength);
         tunerPes.mpuSequenceNumber = static_cast<int>(pesEvent.mpuSequenceNumber);
 
         TunerFilterEvent tunerEvent;
@@ -775,9 +777,9 @@
         }
 
         if (tsRecordEvent.pid.getDiscriminator() == DemuxPid::hidl_discriminator::tPid) {
-            tunerTsRecord.pid = static_cast<char>(tsRecordEvent.pid.tPid());
+            tunerTsRecord.pid = static_cast<char16_t>(tsRecordEvent.pid.tPid());
         } else {
-            tunerTsRecord.pid = static_cast<char>(Constant::INVALID_TS_PID);
+            tunerTsRecord.pid = static_cast<char16_t>(Constant::INVALID_TS_PID);
         }
 
         tunerTsRecord.scIndexMask = scIndexMask;
@@ -837,7 +839,7 @@
         tunerDownload.itemFragmentIndex = static_cast<int>(downloadEvent.itemFragmentIndex);
         tunerDownload.mpuSequenceNumber = static_cast<int>(downloadEvent.mpuSequenceNumber);
         tunerDownload.lastItemFragmentIndex = static_cast<int>(downloadEvent.lastItemFragmentIndex);
-        tunerDownload.dataLength = static_cast<char>(downloadEvent.dataLength);
+        tunerDownload.dataLength = static_cast<char16_t>(downloadEvent.dataLength);
 
         TunerFilterEvent tunerEvent;
         tunerEvent.set<TunerFilterEvent::download>(move(tunerDownload));
@@ -851,7 +853,7 @@
         DemuxFilterIpPayloadEvent ipPayloadEvent = e.ipPayload();
         TunerFilterIpPayloadEvent tunerIpPayload;
 
-        tunerIpPayload.dataLength = static_cast<char>(ipPayloadEvent.dataLength);
+        tunerIpPayload.dataLength = static_cast<char16_t>(ipPayloadEvent.dataLength);
 
         TunerFilterEvent tunerEvent;
         tunerEvent.set<TunerFilterEvent::ipPayload>(move(tunerIpPayload));
diff --git a/services/tuner/TunerLnb.cpp b/services/tuner/TunerLnb.cpp
index 4a5acf5..77248d4 100644
--- a/services/tuner/TunerLnb.cpp
+++ b/services/tuner/TunerLnb.cpp
@@ -48,7 +48,10 @@
 
     sp<ILnbCallback> lnbCallback = new LnbCallback(tunerLnbCallback);
     Result status = mLnb->setCallback(lnbCallback);
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerLnb::setVoltage(int voltage) {
@@ -58,7 +61,10 @@
     }
 
     Result status = mLnb->setVoltage(static_cast<LnbVoltage>(voltage));
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerLnb::setTone(int tone) {
@@ -68,7 +74,10 @@
     }
 
     Result status = mLnb->setTone(static_cast<LnbTone>(tone));
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerLnb::setSatellitePosition(int position) {
@@ -78,7 +87,10 @@
     }
 
     Result status = mLnb->setSatellitePosition(static_cast<LnbPosition>(position));
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerLnb::sendDiseqcMessage(const vector<uint8_t>& diseqcMessage) {
@@ -88,7 +100,10 @@
     }
 
     Result status = mLnb->sendDiseqcMessage(diseqcMessage);
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerLnb::close() {
diff --git a/services/tuner/TunerTimeFilter.cpp b/services/tuner/TunerTimeFilter.cpp
index 25e1ad9..ea9da30 100644
--- a/services/tuner/TunerTimeFilter.cpp
+++ b/services/tuner/TunerTimeFilter.cpp
@@ -38,7 +38,10 @@
     }
 
     Result status = mTimeFilter->setTimeStamp(timeStamp);
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerTimeFilter::clearTimeStamp() {
@@ -48,7 +51,10 @@
     }
 
     Result status = mTimeFilter->clearTimeStamp();
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
 }
 
 Status TunerTimeFilter::getSourceTime(int64_t* _aidl_return) {
@@ -66,8 +72,9 @@
             });
     if (status != Result::SUCCESS) {
         *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
     }
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    return Status::ok();
 }
 
 Status TunerTimeFilter::getTimeStamp(int64_t* _aidl_return) {
@@ -85,8 +92,9 @@
             });
     if (status != Result::SUCCESS) {
         *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
     }
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    return Status::ok();
 }
 
 Status TunerTimeFilter::close() {
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerDemuxPid.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerDemuxPid.aidl
index 51c6378..8b238b6 100644
--- a/services/tuner/aidl/android/media/tv/tuner/TunerDemuxPid.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerDemuxPid.aidl
@@ -22,7 +22,7 @@
  * {@hide}
  */
 union TunerDemuxPid {
-    int tPid;
+    char tPid;
 
-    int mmtpPid;
+    char mmtpPid;
 }
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterMediaEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterMediaEvent.aidl
index 5842c0d..c3dbce9 100644
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterMediaEvent.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterMediaEvent.aidl
@@ -25,7 +25,7 @@
  * {@hide}
  */
 parcelable TunerFilterMediaEvent {
-    int streamId;
+    char streamId;
 
     /**
      * true if PTS is present in PES header.
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterPesEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterPesEvent.aidl
index f7ee286..dc1ecc6 100644
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterPesEvent.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterPesEvent.aidl
@@ -27,7 +27,7 @@
     /**
      * Data size in bytes of PES data
      */
-    int dataLength;
+    char dataLength;
 
     /**
      * MPU sequence number of filtered data
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Settings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Settings.aidl
index 0923868..9a11fd5 100644
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Settings.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Settings.aidl
@@ -27,7 +27,7 @@
      */
     int frequency;
 
-    int streamId;
+    char streamId;
 
     int streamIdType;
 
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsSettings.aidl
index 2ae9092..dff9f4a 100644
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsSettings.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsSettings.aidl
@@ -27,7 +27,7 @@
      */
     int frequency;
 
-    int streamId;
+    char streamId;
 
     int streamIdType;