Merge "Camera: Connect activity resizing state to camera service" into sc-dev
diff --git a/apex/Android.bp b/apex/Android.bp
index aacb9a8..6c45749 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -51,7 +51,10 @@
         },
     },
     // JNI
-    native_shared_libs: ["libmediaparser-jni"],
+    native_shared_libs: [
+        "libmediaparser-jni",
+        "libmediaformatshaper",
+    ],
     compile_multilib: "both",
     prebuilts: [
         "code_coverage.policy",
@@ -74,6 +77,9 @@
     // - build artifacts (lib/javalib/bin) against Android 10 SDK
     //   so that the artifacts can run.
     min_sdk_version: "29",
+    // Indicates that pre-installed version of this apex can be compressed.
+    // Whether it actually will be compressed is controlled on per-device basis.
+    compressible: true,
 }
 
 apex {
@@ -107,6 +113,7 @@
     native_shared_libs: [
         "libcodec2_hidl@1.0",
         "libcodec2_hidl@1.1",
+        "libcodec2_hidl@1.2",
         "libstagefright_foundation",
     ],
     prebuilts: [
@@ -131,6 +138,9 @@
     // - build artifacts (lib/javalib/bin) against Android 10 SDK
     //   so that the artifacts can run.
     min_sdk_version: "29",
+    // Indicates that pre-installed version of this apex can be compressed.
+    // Whether it actually will be compressed is controlled on per-device basis.
+    compressible: true,
 }
 
 prebuilt_etc {
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 2f6bc30..d6642f3 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -72,6 +72,10 @@
     return mIsMultiResolution;
 }
 
+const std::vector<int32_t> &OutputConfiguration::getSensorPixelModesUsed() const {
+    return mSensorPixelModesUsed;
+}
+
 OutputConfiguration::OutputConfiguration() :
         mRotation(INVALID_ROTATION),
         mSurfaceSetID(INVALID_SET_ID),
@@ -156,6 +160,11 @@
         return err;
     }
 
+    std::vector<int32_t> sensorPixelModesUsed;
+    if ((err = parcel->readParcelableVector(&sensorPixelModesUsed)) != OK) {
+        ALOGE("%s: Failed to read sensor pixel mode(s) from parcel", __FUNCTION__);
+        return err;
+    }
     mRotation = rotation;
     mSurfaceSetID = setID;
     mSurfaceType = surfaceType;
@@ -171,6 +180,8 @@
         mGbps.push_back(surface.graphicBufferProducer);
     }
 
+    mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
+
     ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
           " physicalCameraId = %s, isMultiResolution = %d", __FUNCTION__, mRotation,
           mSurfaceSetID, mSurfaceType, String8(mPhysicalCameraId).string(), mIsMultiResolution);
@@ -240,24 +251,51 @@
     err = parcel->writeInt32(mIsMultiResolution ? 1 : 0);
     if (err != OK) return err;
 
+    err = parcel->writeParcelableVector(mSensorPixelModesUsed);
+    if (err != OK) return err;
+
     return OK;
 }
 
+template <typename T>
+static bool simpleVectorsEqual(T first, T second) {
+    if (first.size() != second.size()) {
+        return false;
+    }
+
+    for (size_t i = 0; i < first.size(); i++) {
+        if (first[i] != second[i]) {
+            return false;
+        }
+    }
+    return true;
+}
+
 bool OutputConfiguration::gbpsEqual(const OutputConfiguration& other) const {
     const std::vector<sp<IGraphicBufferProducer> >& otherGbps =
             other.getGraphicBufferProducers();
+    return simpleVectorsEqual(otherGbps, mGbps);
+}
 
-    if (mGbps.size() != otherGbps.size()) {
-        return false;
+bool OutputConfiguration::sensorPixelModesUsedEqual(const OutputConfiguration& other) const {
+    const std::vector<int32_t>& othersensorPixelModesUsed = other.getSensorPixelModesUsed();
+    return simpleVectorsEqual(othersensorPixelModesUsed, mSensorPixelModesUsed);
+}
+
+bool OutputConfiguration::sensorPixelModesUsedLessThan(const OutputConfiguration& other) const {
+    const std::vector<int32_t>& spms = other.getSensorPixelModesUsed();
+
+    if (mSensorPixelModesUsed.size() !=  spms.size()) {
+        return mSensorPixelModesUsed.size() < spms.size();
     }
 
-    for (size_t i = 0; i < mGbps.size(); i++) {
-        if (mGbps[i] != otherGbps[i]) {
-            return false;
+    for (size_t i = 0; i < spms.size(); i++) {
+        if (mSensorPixelModesUsed[i] != spms[i]) {
+            return mSensorPixelModesUsed[i] < spms[i];
         }
     }
 
-    return true;
+    return false;
 }
 
 bool OutputConfiguration::gbpsLessThan(const OutputConfiguration& other) const {
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 6009370..f80ed3a 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -49,6 +49,8 @@
     String16                   getPhysicalCameraId() const;
     bool                       isMultiResolution() const;
 
+    // set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
+    const std::vector<int32_t>&            getSensorPixelModesUsed() const;
     /**
      * Keep impl up-to-date with OutputConfiguration.java in frameworks/base
      */
@@ -86,7 +88,8 @@
                 mIsShared == other.mIsShared &&
                 gbpsEqual(other) &&
                 mPhysicalCameraId == other.mPhysicalCameraId &&
-                mIsMultiResolution == other.mIsMultiResolution);
+                mIsMultiResolution == other.mIsMultiResolution &&
+                sensorPixelModesUsedEqual(other));
     }
     bool operator != (const OutputConfiguration& other) const {
         return !(*this == other);
@@ -120,13 +123,19 @@
         if (mIsMultiResolution != other.mIsMultiResolution) {
             return mIsMultiResolution < other.mIsMultiResolution;
         }
+        if (!sensorPixelModesUsedEqual(other)) {
+            return sensorPixelModesUsedLessThan(other);
+        }
         return gbpsLessThan(other);
     }
+
     bool operator > (const OutputConfiguration& other) const {
         return (*this != other && !(*this < other));
     }
 
     bool gbpsEqual(const OutputConfiguration& other) const;
+    bool sensorPixelModesUsedEqual(const OutputConfiguration& other) const;
+    bool sensorPixelModesUsedLessThan(const OutputConfiguration& other) const;
     bool gbpsLessThan(const OutputConfiguration& other) const;
     void addGraphicProducer(sp<IGraphicBufferProducer> gbp) {mGbps.push_back(gbp);}
 private:
@@ -140,6 +149,7 @@
     bool                       mIsShared;
     String16                   mPhysicalCameraId;
     bool                       mIsMultiResolution;
+    std::vector<int32_t>       mSensorPixelModesUsed;
 };
 } // namespace params
 } // namespace camera2
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 895514e..7387442 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -534,6 +534,7 @@
         case ACAMERA_SENSOR_SENSITIVITY:
         case ACAMERA_SENSOR_TEST_PATTERN_DATA:
         case ACAMERA_SENSOR_TEST_PATTERN_MODE:
+        case ACAMERA_SENSOR_PIXEL_MODE:
         case ACAMERA_SHADING_MODE:
         case ACAMERA_STATISTICS_FACE_DETECT_MODE:
         case ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE:
@@ -584,6 +585,7 @@
     ANDROID_SENSOR_PROFILE_HUE_SAT_MAP,
     ANDROID_SENSOR_PROFILE_TONE_CURVE,
     ANDROID_SENSOR_OPAQUE_RAW_SIZE,
+    ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,
     ANDROID_SHADING_STRENGTH,
     ANDROID_STATISTICS_HISTOGRAM_MODE,
     ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 4e07c5c..70ce864 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -527,6 +527,13 @@
      * scene as they do before. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use
      * activeArraySize or preCorrectionActiveArraySize still depends on distortion correction
      * mode.</p>
+     * <p>For camera devices with the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability,
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
+     * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
      * <p>The data representation is <code>int[5 * area_count]</code>.
      * Every five elements represent a metering region of <code>(xmin, ymin, xmax, ymax, weight)</code>.
      * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
@@ -536,7 +543,10 @@
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
      */
     ACAMERA_CONTROL_AE_REGIONS =                                // int32[5*area_count]
             ACAMERA_CONTROL_START + 4,
@@ -718,6 +728,12 @@
      * scene as they do before. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use
      * activeArraySize or preCorrectionActiveArraySize still depends on distortion correction
      * mode.</p>
+     * <p>For camera devices with the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
+     * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
      * <p>The data representation is <code>int[5 * area_count]</code>.
      * Every five elements represent a metering region of <code>(xmin, ymin, xmax, ymax, weight)</code>.
      * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
@@ -727,7 +743,10 @@
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
      */
     ACAMERA_CONTROL_AF_REGIONS =                                // int32[5*area_count]
             ACAMERA_CONTROL_START + 8,
@@ -904,6 +923,12 @@
      * the scene as they do before. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use
      * activeArraySize or preCorrectionActiveArraySize still depends on distortion correction
      * mode.</p>
+     * <p>For camera devices with the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
+     * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
      * <p>The data representation is <code>int[5 * area_count]</code>.
      * Every five elements represent a metering region of <code>(xmin, ymin, xmax, ymax, weight)</code>.
      * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
@@ -913,7 +938,10 @@
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
      */
     ACAMERA_CONTROL_AWB_REGIONS =                               // int32[5*area_count]
             ACAMERA_CONTROL_START + 12,
@@ -2801,6 +2829,51 @@
      */
     ACAMERA_LENS_DISTORTION =                                   // float[5]
             ACAMERA_LENS_START + 13,
+    /**
+     * <p>The correction coefficients to correct for this camera device's
+     * radial and tangential lens distortion for a
+     * CaptureRequest with ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: float[5]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_LENS_DISTORTION, when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_LENS_DISTORTION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_LENS_DISTORTION_MAXIMUM_RESOLUTION =                // float[5]
+            ACAMERA_LENS_START + 14,
+    /**
+     * <p>The parameters for this camera device's intrinsic
+     * calibration when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: float[5]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_LENS_INTRINSIC_CALIBRATION, when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION =     // float[5]
+            ACAMERA_LENS_START + 15,
     ACAMERA_LENS_END,
 
     /**
@@ -3428,6 +3501,12 @@
      * coordinate system is post-zoom, meaning that the activeArraySize or
      * preCorrectionActiveArraySize covers the camera device's field of view "after" zoom.  See
      * ACAMERA_CONTROL_ZOOM_RATIO for details.</p>
+     * <p>For camera devices with the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
+     * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
      * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      *
      * @see ACAMERA_CONTROL_AE_TARGET_FPS_RANGE
@@ -3436,7 +3515,10 @@
      * @see ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM
      * @see ACAMERA_SCALER_CROPPING_TYPE
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
      */
     ACAMERA_SCALER_CROP_REGION =                                // int32[4]
             ACAMERA_SCALER_START,
@@ -3538,8 +3620,6 @@
      * set to either OFF or FAST.</p>
      * <p>When multiple streams are used in a request, the minimum frame
      * duration will be max(individual stream min durations).</p>
-     * <p>The minimum frame duration of a stream (of a particular format, size)
-     * is the same regardless of whether the stream is input or output.</p>
      * <p>See ACAMERA_SENSOR_FRAME_DURATION and
      * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
      * calculating the max frame rate.</p>
@@ -3884,10 +3964,10 @@
      * configurations which belong to this physical camera, and it will advertise and will only
      * advertise the maximum supported resolutions for a particular format.</p>
      * <p>If this camera device isn't a physical camera device constituting a logical camera,
-     * but a standalone ULTRA_HIGH_RESOLUTION_SENSOR camera, this field represents the
-     * multi-resolution input/output stream configurations of default mode and max resolution
-     * modes. The sizes will be the maximum resolution of a particular format for default mode
-     * and max resolution mode.</p>
+     * but a standalone <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * camera, this field represents the multi-resolution input/output stream configurations of
+     * default mode and max resolution modes. The sizes will be the maximum resolution of a
+     * particular format for default mode and max resolution mode.</p>
      * <p>This field will only be advertised if the device is a physical camera of a
      * logical multi-camera device or an ultra high resolution sensor camera. For a logical
      * multi-camera, the camera API will derive the logical camera’s multi-resolution stream
@@ -3897,6 +3977,93 @@
     ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS = 
                                                                 // int32[n*4] (acamera_metadata_enum_android_scaler_physical_camera_multi_resolution_stream_configurations_t)
             ACAMERA_SCALER_START + 19,
+    /**
+     * <p>The available stream configurations that this
+     * camera device supports (i.e. format, width, height, output/input stream) for a
+     * CaptureRequest with ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_scaler_available_stream_configurations_maximum_resolution_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     * <p>Not all output formats may be supported in a configuration with
+     * an input stream of a particular format. For more details, see
+     * android.scaler.availableInputOutputFormatsMapMaximumResolution.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_scaler_available_stream_configurations_maximum_resolution_t)
+            ACAMERA_SCALER_START + 20,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination when the camera device is sent a CaptureRequest with
+     * ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     * <p>When multiple streams are used in a request (if supported, when ACAMERA_SENSOR_PIXEL_MODE
+     * is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>), the
+     * minimum frame duration will be max(individual stream min durations).</p>
+     * <p>See ACAMERA_SENSOR_FRAME_DURATION and
+     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION for more details about
+     * calculating the max frame rate.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS
+     * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_FRAME_DURATION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_SCALER_START + 21,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination when CaptureRequests are submitted with
+     * ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a></p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_SCALER_START + 22,
     ACAMERA_SCALER_END,
 
     /**
@@ -4683,6 +4850,67 @@
      */
     ACAMERA_SENSOR_DYNAMIC_WHITE_LEVEL =                        // int32
             ACAMERA_SENSOR_START + 29,
+    /**
+     * <p>Switches sensor pixel mode between maximum resolution mode and default mode.</p>
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_sensor_pixel_mode_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     *   <li>ACaptureRequest</li>
+     * </ul></p>
+     *
+     * <p>This key controls whether the camera sensor operates in
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
+     * mode or not. By default, all camera devices operate in
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode.
+     * When operating in
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode, sensors
+     * with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability would typically perform pixel binning in order to improve low light
+     * performance, noise reduction etc. However, in
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
+     * mode (supported only
+     * by <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * sensors), sensors typically operate in unbinned mode allowing for a larger image size.
+     * The stream configurations supported in
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
+     * mode are also different from those of
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode.
+     * They can be queried through
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#get">CameraCharacteristics#get</a> with
+     * <a href="https://developer.android.com/reference/CameraCharacteristics.html#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION)">CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION)</a>.
+     * Unless reported by both
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html">StreamConfigurationMap</a>s, the outputs from
+     * <code>android.scaler.streamConfigurationMapMaximumResolution</code> and
+     * <code>android.scaler.streamConfigurationMap</code>
+     * must not be mixed in the same CaptureRequest. In other words, these outputs are
+     * exclusive to each other.
+     * This key does not need to be set for reprocess requests.</p>
+     */
+    ACAMERA_SENSOR_PIXEL_MODE =                                 // byte (acamera_metadata_enum_android_sensor_pixel_mode_t)
+            ACAMERA_SENSOR_START + 32,
+    /**
+     * <p>Whether <code>RAW</code> images requested have their bayer pattern as described by
+     * ACAMERA_SENSOR_INFO_BINNING_FACTOR.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_BINNING_FACTOR
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_sensor_raw_binning_factor_used_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     * </ul></p>
+     *
+     * <p>This key will only be present in devices advertisting the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability which also advertise <code>REMOSAIC_REPROCESSING</code> capability. On all other devices
+     * RAW targets will have a regular bayer pattern.</p>
+     */
+    ACAMERA_SENSOR_RAW_BINNING_FACTOR_USED =                    // byte (acamera_metadata_enum_android_sensor_raw_binning_factor_used_t)
+            ACAMERA_SENSOR_START + 33,
     ACAMERA_SENSOR_END,
 
     /**
@@ -4984,6 +5212,120 @@
      */
     ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE =      // int32[4]
             ACAMERA_SENSOR_INFO_START + 10,
+    /**
+     * <p>The area of the image sensor which corresponds to active pixels after any geometric
+     * distortion correction has been applied, when the sensor runs in maximum resolution mode.</p>
+     *
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, when ACAMERA_SENSOR_PIXEL_MODE
+     * is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
+     * Refer to ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE for details, with sensor array related keys
+     * replaced with their
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
+     * counterparts.
+     * This key will only be present for devices which advertise the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability.</p>
+     * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION =  // int32[4]
+            ACAMERA_SENSOR_INFO_START + 11,
+    /**
+     * <p>Dimensions of the full pixel array, possibly
+     * including black calibration pixels, when the sensor runs in maximum resolution mode.
+     * Analogous to ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE, when ACAMERA_SENSOR_PIXEL_MODE is
+     * set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>The pixel count of the full pixel array of the image sensor, which covers
+     * ACAMERA_SENSOR_INFO_PHYSICAL_SIZE area. This represents the full pixel dimensions of
+     * the raw buffers produced by this sensor, when it runs in maximum resolution mode. That
+     * is, when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
+     * This key will only be present for devices which advertise the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_PHYSICAL_SIZE
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION =   // int32[2]
+            ACAMERA_SENSOR_INFO_START + 12,
+    /**
+     * <p>The area of the image sensor which corresponds to active pixels prior to the
+     * application of any geometric distortion correction, when the sensor runs in maximum
+     * resolution mode. This key must be used for crop / metering regions, only when
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
+     * when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
+     * This key will only be present for devices which advertise the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability.</p>
+     * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION = 
+                                                                // int32[4]
+            ACAMERA_SENSOR_INFO_START + 13,
+    /**
+     * <p>Dimensions of the group of pixels which are under the same color filter.
+     * This specifies the width and height (pair of integers) of the group of pixels which fall
+     * under the same color filter for ULTRA_HIGH_RESOLUTION sensors.</p>
+     *
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Sensors can have pixels grouped together under the same color filter in order
+     * to improve various aspects of imaging such as noise reduction, low light
+     * performance etc. These groups can be of various sizes such as 2X2 (quad bayer),
+     * 3X3 (nona-bayer). This key specifies the length and width of the pixels grouped under
+     * the same color filter.</p>
+     * <p>This key will not be present if REMOSAIC_REPROCESSING is not supported, since RAW images
+     * will have a regular bayer pattern.</p>
+     * <p>This key will not be present for sensors which don't have the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability.</p>
+     */
+    ACAMERA_SENSOR_INFO_BINNING_FACTOR =                        // int32[2]
+            ACAMERA_SENSOR_INFO_START + 14,
     ACAMERA_SENSOR_INFO_END,
 
     /**
@@ -5384,7 +5726,7 @@
      * </ul></p>
      *
      * <p>Since optical image stabilization generally involves motion much faster than the duration
-     * of individualq image exposure, multiple OIS samples can be included for a single capture
+     * of individual image exposure, multiple OIS samples can be included for a single capture
      * result. For example, if the OIS reporting operates at 200 Hz, a typical camera operating
      * at 30fps may have 6-7 OIS samples per capture result. This information can be combined
      * with the rolling shutter skew to account for lens motion during image exposure in
@@ -6189,6 +6531,162 @@
      */
     ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS =     // int64[4*n]
             ACAMERA_DEPTH_START + 8,
+    /**
+     * <p>The available depth dataspace stream
+     * configurations that this camera device supports
+     * (i.e. format, width, height, output/input stream) when a CaptureRequest is submitted with
+     * ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_depth_available_depth_stream_configurations_maximum_resolution_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, for configurations which
+     * are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_depth_available_depth_stream_configurations_maximum_resolution_t)
+            ACAMERA_DEPTH_START + 9,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for depth output formats when a CaptureRequest is submitted with
+     * ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS, for configurations which
+     * are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     * <p>See ACAMERA_SENSOR_FRAME_DURATION and
+     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION for more details about
+     * calculating the max frame rate.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS
+     * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_FRAME_DURATION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_DEPTH_START + 10,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for depth streams for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS, for configurations which
+     * are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_DEPTH_START + 11,
+    /**
+     * <p>The available dynamic depth dataspace stream
+     * configurations that this camera device supports (i.e. format, width, height,
+     * output/input stream) for CaptureRequests where ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_depth_available_dynamic_depth_stream_configurations_maximum_resolution_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_depth_available_dynamic_depth_stream_configurations_maximum_resolution_t)
+            ACAMERA_DEPTH_START + 12,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for dynamic depth output streams  for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_DEPTH_START + 13,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for dynamic depth streams for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_DEPTH_START + 14,
     ACAMERA_DEPTH_END,
 
     /**
@@ -6409,6 +6907,71 @@
      */
     ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS =               // int64[4*n]
             ACAMERA_HEIC_START + 2,
+    /**
+     * <p>The available HEIC (ISO/IEC 23008-12) stream
+     * configurations that this camera device supports
+     * (i.e. format, width, height, output/input stream).</p>
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_heic_available_heic_stream_configurations_maximum_resolution_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS for details.</p>
+     * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+     * AIMAGE_FORMAT_HEIC format as OUTPUT only.</p>
+     *
+     * @see ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS
+     */
+    ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_heic_available_heic_stream_configurations_maximum_resolution_t)
+            ACAMERA_HEIC_START + 3,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for HEIC output formats for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS for details.</p>
+     *
+     * @see ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS
+     */
+    ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_HEIC_START + 4,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for HEIC streams for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS for details.</p>
+     *
+     * @see ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS
+     */
+    ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_HEIC_START + 5,
     ACAMERA_HEIC_END,
 
 } acamera_metadata_tag_t;
@@ -8359,6 +8922,20 @@
      */
     ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA             = 14,
 
+    /**
+     * <p>This camera device is capable of producing ultra high resolution images in
+     * addition to the image sizes described in the
+     * android.scaler.streamConfigurationMap.
+     * It can operate in 'default' mode and 'max resolution' mode. It generally does this
+     * by binning pixels in 'default' mode and not binning them in 'max resolution' mode.
+     * <code>android.scaler.streamConfigurationMap</code> describes the streams supported in 'default'
+     * mode.
+     * The stream configurations supported in 'max resolution' mode are described by
+     * <code>android.scaler.streamConfigurationMapMaximumResolution</code>.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR
+                                                                      = 16,
+
 } acamera_metadata_enum_android_request_available_capabilities_t;
 
 
@@ -8514,6 +9091,16 @@
 
 } acamera_metadata_enum_android_scaler_physical_camera_multi_resolution_stream_configurations_t;
 
+// ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_scaler_available_stream_configurations_maximum_resolution {
+    ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_scaler_available_stream_configurations_maximum_resolution_t;
+
 
 // ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
 typedef enum acamera_metadata_enum_acamera_sensor_reference_illuminant1 {
@@ -8672,6 +9259,42 @@
 
 } acamera_metadata_enum_android_sensor_test_pattern_mode_t;
 
+// ACAMERA_SENSOR_PIXEL_MODE
+typedef enum acamera_metadata_enum_acamera_sensor_pixel_mode {
+    /**
+     * <p>This is the default sensor pixel mode. This is the only sensor pixel mode
+     * supported unless a camera device advertises
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.</p>
+     */
+    ACAMERA_SENSOR_PIXEL_MODE_DEFAULT                                = 0,
+
+    /**
+     * <p>This sensor pixel mode is offered by devices with capability
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILTIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.
+     * In this mode, sensors typically do not bin pixels, as a result can offer larger
+     * image sizes.</p>
+     */
+    ACAMERA_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION                     = 1,
+
+} acamera_metadata_enum_android_sensor_pixel_mode_t;
+
+// ACAMERA_SENSOR_RAW_BINNING_FACTOR_USED
+typedef enum acamera_metadata_enum_acamera_sensor_raw_binning_factor_used {
+    /**
+     * <p>The <code>RAW</code> targets in this capture have ACAMERA_SENSOR_INFO_BINNING_FACTOR as the
+     * bayer pattern.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_BINNING_FACTOR
+     */
+    ACAMERA_SENSOR_RAW_BINNING_FACTOR_USED_TRUE                      = 0,
+
+    /**
+     * <p>The <code>RAW</code> targets have a regular bayer pattern in this capture.</p>
+     */
+    ACAMERA_SENSOR_RAW_BINNING_FACTOR_USED_FALSE                     = 1,
+
+} acamera_metadata_enum_android_sensor_raw_binning_factor_used_t;
+
 
 // ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
 typedef enum acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement {
@@ -9156,6 +9779,26 @@
 
 } acamera_metadata_enum_android_depth_available_dynamic_depth_stream_configurations_t;
 
+// ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_depth_available_depth_stream_configurations_maximum_resolution {
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_depth_available_depth_stream_configurations_maximum_resolution_t;
+
+// ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_depth_available_dynamic_depth_stream_configurations_maximum_resolution {
+    ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_depth_available_dynamic_depth_stream_configurations_maximum_resolution_t;
+
 
 // ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE
 typedef enum acamera_metadata_enum_acamera_logical_multi_camera_sensor_sync_type {
@@ -9207,6 +9850,16 @@
 
 } acamera_metadata_enum_android_heic_available_heic_stream_configurations_t;
 
+// ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_heic_available_heic_stream_configurations_maximum_resolution {
+    ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_heic_available_heic_stream_configurations_maximum_resolution_t;
+
 
 
 
diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h
index 9cabd8b..200e92d 100644
--- a/include/private/media/AudioTrackShared.h
+++ b/include/private/media/AudioTrackShared.h
@@ -182,6 +182,7 @@
                 // This is set by AudioTrack.setBufferSizeInFrames().
                 // A write will not fill the buffer above this limit.
     volatile    uint32_t   mBufferSizeInFrames;  // effective size of the buffer
+    volatile    uint32_t   mStartThresholdInFrames; // min frames in buffer to start streaming
 
 public:
 
@@ -216,6 +217,8 @@
     };
 
     size_t frameCount() const { return mFrameCount; }
+    uint32_t getStartThresholdInFrames() const;
+    uint32_t setStartThresholdInFrames(uint32_t startThresholdInFrames);
 
 protected:
     // These refer to shared memory, and are virtual addresses with respect to the current process.
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index 3e6b0ff..332696d 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -55,6 +55,8 @@
 namespace android {
 
 constexpr char COMPONENT_NAME[] = "c2.android.aac.decoder";
+constexpr size_t kDefaultOutputPortDelay = 2;
+constexpr size_t kMaxOutputPortDelay = 16;
 
 class C2SoftAacDec::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
@@ -73,7 +75,9 @@
 
         addParameter(
                 DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
-                .withConstValue(new C2PortActualDelayTuning::output(2u))
+                .withDefault(new C2PortActualDelayTuning::output(kDefaultOutputPortDelay))
+                .withFields({C2F(mActualOutputDelay, value).inRange(0, kMaxOutputPortDelay)})
+                .withSetter(Setter<decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
                 .build());
 
         addParameter(
@@ -263,6 +267,7 @@
       mAACDecoder(nullptr),
       mStreamInfo(nullptr),
       mSignalledError(false),
+      mOutputPortDelay(kDefaultOutputPortDelay),
       mOutputDelayRingBuffer(nullptr) {
 }
 
@@ -915,6 +920,29 @@
 
     int32_t outputDelay = mStreamInfo->outputDelay * mStreamInfo->numChannels;
 
+    size_t numSamplesInOutput = mStreamInfo->frameSize * mStreamInfo->numChannels;
+    if (numSamplesInOutput > 0) {
+        size_t actualOutputPortDelay = (outputDelay + numSamplesInOutput - 1) / numSamplesInOutput;
+        if (actualOutputPortDelay > mOutputPortDelay) {
+            mOutputPortDelay = actualOutputPortDelay;
+            ALOGV("New Output port delay %zu ", mOutputPortDelay);
+
+            C2PortActualDelayTuning::output outputPortDelay(mOutputPortDelay);
+            std::vector<std::unique_ptr<C2SettingResult>> failures;
+            c2_status_t err =
+                mIntf->config({&outputPortDelay}, C2_MAY_BLOCK, &failures);
+            if (err == OK) {
+                work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(outputPortDelay));
+            } else {
+                ALOGE("Cannot set output delay");
+                mSignalledError = true;
+                work->workletsProcessed = 1u;
+                work->result = C2_CORRUPTED;
+                return;
+            }
+        }
+    }
     mBuffersInfo.push_back(std::move(inInfo));
     work->workletsProcessed = 0u;
     if (!eos && mOutputDelayCompensated < outputDelay) {
diff --git a/media/codec2/components/aac/C2SoftAacDec.h b/media/codec2/components/aac/C2SoftAacDec.h
index 965c29e..986187c 100644
--- a/media/codec2/components/aac/C2SoftAacDec.h
+++ b/media/codec2/components/aac/C2SoftAacDec.h
@@ -57,6 +57,7 @@
     size_t mInputBufferCount;
     size_t mOutputBufferCount;
     bool mSignalledError;
+    size_t mOutputPortDelay;
     struct Info {
         uint64_t frameIndex;
         size_t bufferSize;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index 43b2c14..940f57c 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -19,6 +19,8 @@
 #include <log/log.h>
 #include <utils/misc.h>
 
+#include <algorithm>
+
 #include <media/hardware/VideoAPI.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
@@ -121,6 +123,19 @@
                 .build());
 
         addParameter(
+                DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+                .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+                        0 /* flexCount */, 0u /* stream */))
+                .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+                                {C2Config::picture_type_t(I_FRAME),
+                                  C2Config::picture_type_t(P_FRAME),
+                                  C2Config::picture_type_t(B_FRAME)}),
+                             C2F(mPictureQuantization, m.values[0].min).any(),
+                             C2F(mPictureQuantization, m.values[0].max).any()})
+                .withSetter(PictureQuantizationSetter)
+                .build());
+
+        addParameter(
                 DefineParam(mActualInputDelay, C2_PARAMKEY_INPUT_DELAY)
                 .withDefault(new C2PortActualDelayTuning::input(DEFAULT_B_FRAMES))
                 .withFields({C2F(mActualInputDelay, value).inRange(0, MAX_B_FRAMES)})
@@ -186,23 +201,6 @@
                 .build());
 
         addParameter(
-                DefineParam(mQuantization, C2_PARAMKEY_QUANTIZATION)
-                .withDefault(new C2StreamQuantizationInfo::output(0u,
-                                      DEFAULT_QP_MAX, DEFAULT_QP_MIN,
-                                      DEFAULT_QP_MAX, DEFAULT_QP_MIN,
-                                      DEFAULT_QP_MAX, DEFAULT_QP_MIN))
-                .withFields({
-                        C2F(mQuantization, iMax).inRange(1, 51),
-                        C2F(mQuantization, iMin).inRange(1, 51),
-                        C2F(mQuantization, pMax).inRange(1, 51),
-                        C2F(mQuantization, pMin).inRange(1, 51),
-                        C2F(mQuantization, bMax).inRange(1, 51),
-                        C2F(mQuantization, bMin).inRange(1, 51),
-                 })
-                .withSetter(QuantizationSetter)
-                .build());
-
-        addParameter(
                 DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
                 .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
                 .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
@@ -237,70 +235,6 @@
         return res;
     }
 
-    static C2R QuantizationSetter(bool mayBlock, C2P<C2StreamQuantizationInfo::output> &me) {
-        (void)mayBlock;
-        (void)me;
-        C2R res = C2R::Ok();
-
-        ALOGV("QuantizationSetter enters max/min i %d/%d p %d/%d b %d/%d",
-              me.v.iMax, me.v.iMin, me.v.pMax, me.v.pMin, me.v.bMax, me.v.bMin);
-
-        // bounds checking
-        constexpr int qp_lowest = 1;
-        constexpr int qp_highest = 51;
-
-        if (me.v.iMax < qp_lowest) {
-            me.set().iMax = qp_lowest;
-        } else if (me.v.iMax > qp_highest) {
-            me.set().iMax = qp_highest;
-        }
-
-        if (me.v.iMin < qp_lowest) {
-            me.set().iMin = qp_lowest;
-        } else if (me.v.iMin > qp_highest) {
-            me.set().iMin = qp_highest;
-        }
-
-        if (me.v.pMax < qp_lowest) {
-            me.set().pMax = qp_lowest;
-        } else if (me.v.pMax > qp_highest) {
-            me.set().pMax = qp_highest;
-        }
-
-        if (me.v.pMin < qp_lowest) {
-            me.set().pMin = qp_lowest;
-        } else if (me.v.pMin > qp_highest) {
-            me.set().pMin = qp_highest;
-        }
-
-        if (me.v.bMax < qp_lowest) {
-            me.set().bMax = qp_lowest;
-        } else if (me.v.bMax > qp_highest) {
-            me.set().bMax = qp_highest;
-        }
-
-        if (me.v.bMin < qp_lowest) {
-            me.set().bMin = qp_lowest;
-        } else if (me.v.bMin > qp_highest) {
-            me.set().bMin = qp_highest;
-        }
-
-        // consistency checking, e.g. min<max
-        //
-        if (me.v.iMax < me.v.iMin) {
-            me.set().iMax = me.v.iMin;
-        }
-        if (me.v.pMax < me.v.pMin) {
-            me.set().pMax = me.v.pMin;
-        }
-        if (me.v.bMax < me.v.bMin) {
-            me.set().bMax = me.v.bMin;
-        }
-
-        // TODO: enforce any sort of i_max < p_max < b_max?
-
-        return res;
-    }
 
     static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
                           C2P<C2StreamPictureSizeInfo::input> &me) {
@@ -418,6 +352,13 @@
         return C2R::Ok();
     }
 
+    static C2R PictureQuantizationSetter(bool mayBlock,
+                                         C2P<C2StreamPictureQuantizationTuning::output> &me) {
+        (void)mayBlock;
+        (void)me;
+        return C2R::Ok();
+    }
+
     IV_PROFILE_T getProfile_l() const {
         switch (mProfileLevel->profile) {
         case PROFILE_AVC_CONSTRAINED_BASELINE:  [[fallthrough]];
@@ -475,7 +416,8 @@
     std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
     std::shared_ptr<C2StreamGopTuning::output> getGop_l() const { return mGop; }
-    std::shared_ptr<C2StreamQuantizationInfo::output> getQuantization_l() const { return mQuantization; }
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const
+    { return mPictureQuantization; }
 
 private:
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
@@ -487,7 +429,7 @@
     std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
     std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
     std::shared_ptr<C2StreamGopTuning::output> mGop;
-    std::shared_ptr<C2StreamQuantizationInfo::output> mQuantization;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
 };
 
 #define ive_api_function  ih264e_api_function
@@ -748,37 +690,68 @@
     ive_ctl_set_qp_op_t s_qp_op;
     IV_STATUS_T status;
 
+    ALOGV("in setQp()");
+
     // set the defaults
     s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL;
     s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP;
 
-    s_qp_ip.u4_i_qp = DEFAULT_I_QP;
-    s_qp_ip.u4_i_qp_max = DEFAULT_QP_MAX;
-    s_qp_ip.u4_i_qp_min = DEFAULT_QP_MIN;
+    // these are the ones we're going to set, so want them to default ....
+    // to the DEFAULT values for the codec instea dof CODEC_ bounding
+    int32_t iMin = INT32_MIN, pMin = INT32_MIN, bMin = INT32_MIN;
+    int32_t iMax = INT32_MAX, pMax = INT32_MAX, bMax = INT32_MAX;
 
-    s_qp_ip.u4_p_qp = DEFAULT_P_QP;
-    s_qp_ip.u4_p_qp_max = DEFAULT_QP_MAX;
-    s_qp_ip.u4_p_qp_min = DEFAULT_QP_MIN;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> qp =
+                    mIntf->getPictureQuantization_l();
+    for (size_t i = 0; i < qp->flexCount(); ++i) {
+        const C2PictureQuantizationStruct &layer = qp->m.values[i];
 
-    s_qp_ip.u4_b_qp = DEFAULT_P_QP;
-    s_qp_ip.u4_b_qp_max = DEFAULT_QP_MAX;
-    s_qp_ip.u4_b_qp_min = DEFAULT_QP_MIN;
-
-    // parameter parsing ensured proper range 1..51, so only worry about ordering
-    bool valid = true;
-    if (mQuantization->iMax < mQuantization->iMin) valid = false;
-    if (mQuantization->pMax < mQuantization->pMin) valid = false;
-    if (mQuantization->bMax < mQuantization->bMin) valid = false;
-
-    if (valid) {
-        s_qp_ip.u4_i_qp_max = mQuantization->iMax;
-        s_qp_ip.u4_i_qp_min = mQuantization->iMin;
-        s_qp_ip.u4_p_qp_max = mQuantization->pMax;
-        s_qp_ip.u4_p_qp_min = mQuantization->pMin;
-        s_qp_ip.u4_b_qp_max = mQuantization->bMax;
-        s_qp_ip.u4_b_qp_min = mQuantization->bMin;
+        if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+            iMax = layer.max;
+            iMin = layer.min;
+            ALOGV("iMin %d iMax %d", iMin, iMax);
+        } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+            pMax = layer.max;
+            pMin = layer.min;
+            ALOGV("pMin %d pMax %d", pMin, pMax);
+        } else if (layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+            bMax = layer.max;
+            bMin = layer.min;
+            ALOGV("bMin %d bMax %d", bMin, bMax);
+        }
     }
 
+    // INT32_{MIN,MAX} means unspecified, so use the codec's default
+    if (iMax == INT32_MAX) iMax = DEFAULT_I_QP_MAX;
+    if (iMin == INT32_MIN) iMin = DEFAULT_I_QP_MIN;
+    if (pMax == INT32_MAX) pMax = DEFAULT_P_QP_MAX;
+    if (pMin == INT32_MIN) pMin = DEFAULT_P_QP_MIN;
+    if (bMax == INT32_MAX) bMax = DEFAULT_B_QP_MAX;
+    if (bMin == INT32_MIN) bMin = DEFAULT_B_QP_MIN;
+
+    // ensure we have legal values
+    iMax = std::clamp(iMax, CODEC_QP_MIN, CODEC_QP_MAX);
+    iMin = std::clamp(iMin, CODEC_QP_MIN, CODEC_QP_MAX);
+    pMax = std::clamp(pMax, CODEC_QP_MIN, CODEC_QP_MAX);
+    pMin = std::clamp(pMin, CODEC_QP_MIN, CODEC_QP_MAX);
+    bMax = std::clamp(bMax, CODEC_QP_MIN, CODEC_QP_MAX);
+    bMin = std::clamp(bMin, CODEC_QP_MIN, CODEC_QP_MAX);
+
+    s_qp_ip.u4_i_qp_max = iMax;
+    s_qp_ip.u4_i_qp_min = iMin;
+    s_qp_ip.u4_p_qp_max = pMax;
+    s_qp_ip.u4_p_qp_min = pMin;
+    s_qp_ip.u4_b_qp_max = bMax;
+    s_qp_ip.u4_b_qp_min = bMin;
+
+    // ensure initial qp values are within our newly configured bounds...
+    s_qp_ip.u4_i_qp = std::clamp(DEFAULT_I_QP, iMin, iMax);
+    s_qp_ip.u4_p_qp = std::clamp(DEFAULT_P_QP, pMin, pMax);
+    s_qp_ip.u4_b_qp = std::clamp(DEFAULT_B_QP, bMin, bMax);
+
+    ALOGV("setting QP: i %d-%d p %d-%d b %d-%d", iMin, iMax, pMin, pMax, bMin, bMax);
+
+
     s_qp_ip.u4_timestamp_high = -1;
     s_qp_ip.u4_timestamp_low = -1;
 
@@ -1026,7 +999,6 @@
         mIInterval = mIntf->getSyncFramePeriod_l();
         mIDRInterval = mIntf->getSyncFramePeriod_l();
         gop = mIntf->getGop_l();
-        mQuantization = mIntf->getQuantization_l();
     }
     if (gop && gop->flexCount() > 0) {
         uint32_t syncInterval = 1;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index e4bf0b0..ee6d47a 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -100,6 +100,11 @@
 #define STRLENGTH                   500
 #define DEFAULT_CONSTRAINED_INTRA   0
 
+/** limits as specified by h264 */
+#define CODEC_QP_MIN                0
+#define CODEC_QP_MAX                51
+
+
 #define MIN(a, b) ((a) < (b))? (a) : (b)
 #define MAX(a, b) ((a) > (b))? (a) : (b)
 #define ALIGN16(x) ((((x) + 15) >> 4) << 4)
@@ -192,7 +197,6 @@
     std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
     std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
-    std::shared_ptr<C2StreamQuantizationInfo::output> mQuantization;
 
     uint32_t mOutBufferSize;
     UWORD32 mHeaderGenerated;
diff --git a/media/codec2/core/include/C2Buffer.h b/media/codec2/core/include/C2Buffer.h
index fe37b05..a5d6fbf 100644
--- a/media/codec2/core/include/C2Buffer.h
+++ b/media/codec2/core/include/C2Buffer.h
@@ -642,7 +642,8 @@
      * \retval C2_REFUSED   no permission to complete the allocation
      * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
      * \retval C2_OMITTED   this allocator does not support 1D allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during allocation
+     *                      (unexpected)
      */
     virtual c2_status_t newLinearAllocation(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
@@ -666,7 +667,8 @@
      * \retval C2_REFUSED   no permission to recreate the allocation
      * \retval C2_BAD_VALUE invalid handle (caller error)
      * \retval C2_OMITTED   this allocator does not support 1D allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during allocation
+     *                      (unexpected)
      */
     virtual c2_status_t priorLinearAllocation(
             const C2Handle *handle __unused,
@@ -699,7 +701,8 @@
      * \retval C2_REFUSED   no permission to complete the allocation
      * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller error)
      * \retval C2_OMITTED   this allocator does not support 2D allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during allocation
+     *                      (unexpected)
      */
     virtual c2_status_t newGraphicAllocation(
             uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
@@ -724,7 +727,8 @@
      * \retval C2_REFUSED   no permission to recreate the allocation
      * \retval C2_BAD_VALUE invalid handle (caller error)
      * \retval C2_OMITTED   this allocator does not support 2D allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during recreation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during recreation
+     *                      (unexpected)
      */
     virtual c2_status_t priorGraphicAllocation(
             const C2Handle *handle __unused,
@@ -908,7 +912,8 @@
      * \retval C2_REFUSED   no permission to complete any required allocation
      * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
      * \retval C2_OMITTED   this pool does not support linear blocks
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
      */
     virtual c2_status_t fetchLinearBlock(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
@@ -937,7 +942,8 @@
      * \retval C2_REFUSED   no permission to complete any required allocation
      * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
      * \retval C2_OMITTED   this pool does not support circular blocks
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
      */
     virtual c2_status_t fetchCircularBlock(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
@@ -969,7 +975,8 @@
      * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller
      *                      error)
      * \retval C2_OMITTED   this pool does not support 2D blocks
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
      */
     virtual c2_status_t fetchGraphicBlock(
             uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
@@ -980,6 +987,90 @@
     }
 
     virtual ~C2BlockPool() = default;
+
+    /**
+     * Blocking fetch for linear block. Obtains a linear writable block of given |capacity|
+     * and |usage|. If a block can be successfully obtained, the block is stored in |block|,
+     * |fence| is set to a null-fence and C2_OK is returned.
+     *
+     * If a block cannot be temporarily obtained, |block| is set to nullptr, a waitable fence
+     * is stored into |fence| and C2_BLOCKING is returned.
+     *
+     * Otherwise, |block| is set to nullptr and |fence| is set to a null-fence. The waitable
+     * fence is signalled when the temporary restriction on fetch is lifted.
+     * e.g. more memory is available to fetch because some meomory or prior blocks were released.
+     *
+     * \param capacity the size of requested block.
+     * \param usage    the memory usage info for the requested block. Returned blocks will be
+     *                 optimized for this usage, but may be used with any usage. One exception:
+     *                 protected blocks/buffers can only be used in a protected scenario.
+     * \param block    pointer to where the obtained block shall be stored on success. nullptr will
+     *                 be stored here on failure
+     * \param fence    pointer to where the fence shall be stored on C2_BLOCKING error.
+     *
+     * \retval C2_OK        the operation was successful
+     * \retval C2_NO_MEMORY not enough memory to complete any required allocation
+     * \retval C2_TIMED_OUT the operation timed out
+     * \retval C2_BLOCKING  the operation is blocked
+     * \retval C2_REFUSED   no permission to complete any required allocation
+     * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
+     * \retval C2_OMITTED   this pool does not support linear blocks nor fence.
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
+     */
+    virtual c2_status_t fetchLinearBlock(
+            uint32_t capacity __unused, C2MemoryUsage usage __unused,
+            std::shared_ptr<C2LinearBlock> *block /* nonnull */,
+            C2Fence *fence /* nonnull */) {
+        *block = nullptr;
+        (void) fence;
+        return C2_OMITTED;
+    }
+
+    /**
+     * Blocking fetch for 2D graphic block. Obtains a 2D graphic writable block of given |capacity|
+     * and |usage|. If a block can be successfully obtained, the block is stored in |block|,
+     * |fence| is set to a null-fence and C2_OK is returned.
+     *
+     * If a block cannot be temporarily obtained, |block| is set to nullptr, a waitable fence
+     * is stored into |fence| and C2_BLOCKING is returned.
+     *
+     * Otherwise, |block| is set to nullptr and |fence| is set to a null-fence. The waitable
+     * fence is signalled when the temporary restriction on fetch is lifted.
+     * e.g. more memory is available to fetch because some meomory or prior blocks were released.
+     *
+     * \param width  the width of requested block (the obtained block could be slightly larger, e.g.
+     *               to accommodate any system-required alignment)
+     * \param height the height of requested block (the obtained block could be slightly larger,
+     *               e.g. to accommodate any system-required alignment)
+     * \param format the pixel format of requested block. This could be a vendor specific format.
+     * \param usage  the memory usage info for the requested block. Returned blocks will be
+     *               optimized for this usage, but may be used with any usage. One exception:
+     *               protected blocks/buffers can only be used in a protected scenario.
+     * \param block  pointer to where the obtained block shall be stored on success. nullptr
+     *               will be stored here on failure
+     * \param fence  pointer to where the fence shall be stored on C2_BLOCKING error.
+     *
+     * \retval C2_OK        the operation was successful
+     * \retval C2_NO_MEMORY not enough memory to complete any required allocation
+     * \retval C2_TIMED_OUT the operation timed out
+     * \retval C2_BLOCKING  the operation is blocked
+     * \retval C2_REFUSED   no permission to complete any required allocation
+     * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller
+     *                      error)
+     * \retval C2_OMITTED   this pool does not support 2D blocks
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
+     */
+    virtual c2_status_t fetchGraphicBlock(
+            uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
+            C2MemoryUsage usage __unused,
+            std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+            C2Fence *fence /* nonnull */) {
+        *block = nullptr;
+        (void) fence;
+        return C2_OMITTED;
+    }
 protected:
     C2BlockPool() = default;
 };
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 8e8a08b..f8aa672 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -187,7 +187,7 @@
     kParamIndexPictureType,
     kParamIndexHdr10PlusMetadata,
 
-    kParamIndexQuantization,
+    kParamIndexPictureQuantization,
 
     /* ------------------------------------ video components ------------------------------------ */
 
@@ -710,38 +710,6 @@
         C2StreamProfileLevelInfo;
 constexpr char C2_PARAMKEY_PROFILE_LEVEL[] = "coded.pl";
 
-struct C2QuantizationStruct {
-    int32_t iMax;  ///< max/min for I frames
-    int32_t iMin;
-    int32_t pMax;  ///< max/min for P frames
-    int32_t pMin;
-    int32_t bMax;  ///< max/min for B frames
-    int32_t bMin;
-
-    C2QuantizationStruct(
-            int32_t iMax_ = INT32_MAX,
-            int32_t iMin_ = INT32_MIN,
-            int32_t pMax_ = INT32_MAX,
-            int32_t pMin_ = INT32_MIN,
-            int32_t bMax_ = INT32_MAX,
-            int32_t bMin_ = INT32_MIN)
-        : iMax(iMax_), iMin(iMin_),
-          pMax(pMax_), pMin(pMin_),
-          bMax(bMax_), bMin(bMin_) { }
-
-    DEFINE_AND_DESCRIBE_C2STRUCT(Quantization)          // reference?
-    C2FIELD(iMax, "i-max")
-    C2FIELD(iMin, "i-min")
-    C2FIELD(pMax, "p-max")
-    C2FIELD(pMin, "p-min")
-    C2FIELD(bMax, "b-max")
-    C2FIELD(bMin, "b-min")
-};
-
-typedef C2StreamParam<C2Info, C2QuantizationStruct, kParamIndexQuantization>
-        C2StreamQuantizationInfo;
-constexpr char C2_PARAMKEY_QUANTIZATION[] = "coded.qp";
-
 /**
  * Codec-specific initialization data.
  *
@@ -1733,6 +1701,31 @@
 constexpr char C2_PARAMKEY_GOP[] = "coding.gop";
 
 /**
+ * Quantization
+ * min/max for each picture type
+ *
+ */
+struct C2PictureQuantizationStruct {
+    C2PictureQuantizationStruct() : type_((C2Config::picture_type_t)0),
+                                         min(INT32_MIN), max(INT32_MAX) {}
+    C2PictureQuantizationStruct(C2Config::picture_type_t type, int32_t min_, int32_t max_)
+        : type_(type), min(min_), max(max_) { }
+
+    C2Config::picture_type_t type_;
+    int32_t min;      // INT32_MIN == 'no lower bound specified'
+    int32_t max;      // INT32_MAX == 'no upper bound specified'
+
+    DEFINE_AND_DESCRIBE_C2STRUCT(PictureQuantization)
+    C2FIELD(type_, "type")
+    C2FIELD(min, "min")
+    C2FIELD(max, "max")
+};
+
+typedef C2StreamParam<C2Tuning, C2SimpleArrayStruct<C2PictureQuantizationStruct>,
+        kParamIndexPictureQuantization> C2StreamPictureQuantizationTuning;
+constexpr char C2_PARAMKEY_PICTURE_QUANTIZATION[] = "coding.qp";
+
+/**
  * Sync frame can be requested on demand by the client.
  *
  * If true, the next I frame shall be encoded as a sync frame. This config can be passed
diff --git a/media/codec2/hidl/1.0/utils/Android.bp b/media/codec2/hidl/1.0/utils/Android.bp
index 008def8..122aacd 100644
--- a/media/codec2/hidl/1.0/utils/Android.bp
+++ b/media/codec2/hidl/1.0/utils/Android.bp
@@ -15,7 +15,6 @@
     defaults: ["hidl_defaults"],
 
     srcs: [
-        "OutputBufferQueue.cpp",
         "types.cpp",
     ],
 
diff --git a/media/codec2/hidl/1.1/utils/Android.bp b/media/codec2/hidl/1.1/utils/Android.bp
index 839a910..0eeedb6 100644
--- a/media/codec2/hidl/1.1/utils/Android.bp
+++ b/media/codec2/hidl/1.1/utils/Android.bp
@@ -15,7 +15,6 @@
     defaults: ["hidl_defaults"],
 
     srcs: [
-        "OutputBufferQueue.cpp",
         "types.cpp",
     ],
 
@@ -176,14 +175,3 @@
     ],
 }
 
-// Alias to the latest "defaults" for Codec 2.0 HAL service implementations
-cc_defaults {
-    name: "libcodec2-hidl-defaults",
-    defaults: ["libcodec2-hidl-defaults@1.1"],
-}
-
-// Alias to the latest "defaults" for Codec 2.0 HAL client
-cc_defaults {
-    name: "libcodec2-hidl-client-defaults",
-    defaults: ["libcodec2-hidl-client-defaults@1.1"],
-}
diff --git a/media/codec2/hidl/1.1/utils/ComponentStore.cpp b/media/codec2/hidl/1.1/utils/ComponentStore.cpp
index 163686d..d47abdd 100644
--- a/media/codec2/hidl/1.1/utils/ComponentStore.cpp
+++ b/media/codec2/hidl/1.1/utils/ComponentStore.cpp
@@ -366,6 +366,9 @@
             mStore->createComponent(name, &c2component));
 
     if (status == Status::OK) {
+#ifndef __ANDROID_APEX__
+        c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
+#endif
         onInterfaceLoaded(c2component->intf());
         component = new Component(c2component, listener, this, pool);
         if (!component) {
diff --git a/media/codec2/hidl/1.2/utils/Android.bp b/media/codec2/hidl/1.2/utils/Android.bp
new file mode 100644
index 0000000..e4e4ad5
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/Android.bp
@@ -0,0 +1,206 @@
+// DO NOT DEPEND ON THIS DIRECTLY
+// use libcodec2-hidl-client-defaults instead
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
+    name: "libcodec2_hidl_client@1.2",
+
+    defaults: ["hidl_defaults"],
+
+    srcs: [
+        "types.cpp",
+    ],
+
+    header_libs: [
+        "libcodec2_internal", // private
+    ],
+
+    shared_libs: [
+        "android.hardware.media.bufferpool@2.0",
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "libbase",
+        "libcodec2",
+        "libcodec2_hidl_client@1.0",
+        "libcodec2_hidl_client@1.1",
+        "libcodec2_vndk",
+        "libcutils",
+        "libgui",
+        "libhidlbase",
+        "liblog",
+        "libstagefright_bufferpool@2.0.1",
+        "libui",
+        "libutils",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    export_shared_lib_headers: [
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "libcodec2",
+        "libcodec2_hidl_client@1.0",
+        "libcodec2_hidl_client@1.1",
+        "libgui",
+        "libstagefright_bufferpool@2.0.1",
+        "libui",
+    ],
+
+    // Device does not boot when global ThinLTO is enabled for this library.
+    // http://b/170595429
+    lto: {
+        never: true,
+    },
+}
+
+
+// DO NOT DEPEND ON THIS DIRECTLY
+// use libcodec2-hidl-defaults instead
+cc_library {
+    name: "libcodec2_hidl@1.2",
+    vendor_available: true,
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+        "test_com.android.media.swcodec",
+    ],
+
+    defaults: ["hidl_defaults"],
+
+    srcs: [
+        "Component.cpp",
+        "ComponentInterface.cpp",
+        "ComponentStore.cpp",
+        "Configurable.cpp",
+        "InputBufferManager.cpp",
+        "InputSurface.cpp",
+        "InputSurfaceConnection.cpp",
+        "types.cpp",
+    ],
+
+    header_libs: [
+        "libbinder_headers",
+        "libsystem_headers",
+        "libcodec2_internal", // private
+    ],
+
+    shared_libs: [
+        "android.hardware.graphics.bufferqueue@1.0",
+        "android.hardware.graphics.bufferqueue@2.0",
+        "android.hardware.graphics.common@1.0",
+        "android.hardware.media@1.0",
+        "android.hardware.media.bufferpool@2.0",
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "android.hardware.media.omx@1.0",
+        "libbase",
+        "libcodec2",
+        "libcodec2_hidl@1.0",
+        "libcodec2_hidl@1.1",
+        "libcodec2_hidl_plugin_stub",
+        "libcodec2_vndk",
+        "libcutils",
+        "libhidlbase",
+        "liblog",
+        "libstagefright_bufferpool@2.0.1",
+        "libstagefright_bufferqueue_helper_novndk",
+        "libui",
+        "libutils",
+    ],
+
+    target: {
+        vendor: {
+            exclude_shared_libs: [
+                "libstagefright_bufferqueue_helper_novndk",
+                "libcodec2_hidl_plugin_stub",
+            ],
+            shared_libs: [
+                "libstagefright_bufferqueue_helper",
+                "libcodec2_hidl_plugin",
+            ],
+        },
+        apex: {
+            exclude_shared_libs: [
+                "libcodec2_hidl_plugin_stub",
+                "libcodec2_hidl_plugin",
+            ],
+        },
+    },
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    export_shared_lib_headers: [
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "libcodec2",
+        "libcodec2_hidl@1.0",
+        "libcodec2_hidl@1.1",
+        "libcodec2_vndk",
+        "libhidlbase",
+        "libstagefright_bufferpool@2.0.1",
+        "libui",
+    ],
+}
+
+// public dependency for Codec 2.0 HAL service implementations
+cc_defaults {
+    name: "libcodec2-hidl-defaults@1.2",
+    defaults: ["libcodec2-impl-defaults"],
+
+    shared_libs: [
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "libcodec2_hidl@1.0",
+        "libcodec2_hidl@1.1",
+        "libcodec2_hidl@1.2",
+        "libcodec2_vndk",
+        "libhidlbase",
+    ],
+}
+
+// public dependency for Codec 2.0 HAL client
+cc_defaults {
+    name: "libcodec2-hidl-client-defaults@1.2",
+    defaults: ["libcodec2-impl-defaults"],
+
+    shared_libs: [
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "libcodec2_hidl_client@1.0",
+        "libcodec2_hidl_client@1.1",
+        "libcodec2_hidl_client@1.2",
+        "libcodec2_vndk",
+        "libhidlbase",
+    ],
+}
+
+// Alias to the latest "defaults" for Codec 2.0 HAL service implementations
+cc_defaults {
+    name: "libcodec2-hidl-defaults",
+    defaults: ["libcodec2-hidl-defaults@1.2"],
+}
+
+// Alias to the latest "defaults" for Codec 2.0 HAL client
+cc_defaults {
+    name: "libcodec2-hidl-client-defaults",
+    defaults: ["libcodec2-hidl-client-defaults@1.2"],
+}
+
diff --git a/media/codec2/hidl/1.2/utils/Component.cpp b/media/codec2/hidl/1.2/utils/Component.cpp
new file mode 100644
index 0000000..8924e6d
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/Component.cpp
@@ -0,0 +1,535 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-Component@1.2"
+#include <android-base/logging.h>
+
+#include <codec2/hidl/1.2/Component.h>
+#include <codec2/hidl/1.2/ComponentStore.h>
+#include <codec2/hidl/1.2/InputBufferManager.h>
+
+#ifndef __ANDROID_APEX__
+#include <FilterWrapper.h>
+#endif
+
+#include <hidl/HidlBinderSupport.h>
+#include <utils/Timers.h>
+
+#include <C2BqBufferPriv.h>
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+
+#include <chrono>
+#include <thread>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+namespace utils {
+
+using namespace ::android;
+
+// ComponentListener wrapper
+struct Component::Listener : public C2Component::Listener {
+
+    Listener(const sp<Component>& component) :
+        mComponent(component),
+        mListener(component->mListener) {
+    }
+
+    virtual void onError_nb(
+            std::weak_ptr<C2Component> /* c2component */,
+            uint32_t errorCode) override {
+        sp<IComponentListener> listener = mListener.promote();
+        if (listener) {
+            Return<void> transStatus = listener->onError(Status::OK, errorCode);
+            if (!transStatus.isOk()) {
+                LOG(ERROR) << "Component::Listener::onError_nb -- "
+                           << "transaction failed.";
+            }
+        }
+    }
+
+    virtual void onTripped_nb(
+            std::weak_ptr<C2Component> /* c2component */,
+            std::vector<std::shared_ptr<C2SettingResult>> c2settingResult
+            ) override {
+        sp<IComponentListener> listener = mListener.promote();
+        if (listener) {
+            hidl_vec<SettingResult> settingResults(c2settingResult.size());
+            size_t ix = 0;
+            for (const std::shared_ptr<C2SettingResult> &c2result :
+                    c2settingResult) {
+                if (c2result) {
+                    if (!objcpy(&settingResults[ix++], *c2result)) {
+                        break;
+                    }
+                }
+            }
+            settingResults.resize(ix);
+            Return<void> transStatus = listener->onTripped(settingResults);
+            if (!transStatus.isOk()) {
+                LOG(ERROR) << "Component::Listener::onTripped_nb -- "
+                           << "transaction failed.";
+            }
+        }
+    }
+
+    virtual void onWorkDone_nb(
+            std::weak_ptr<C2Component> /* c2component */,
+            std::list<std::unique_ptr<C2Work>> c2workItems) override {
+        for (const std::unique_ptr<C2Work>& work : c2workItems) {
+            if (work) {
+                if (work->worklets.empty()
+                        || !work->worklets.back()
+                        || (work->worklets.back()->output.flags &
+                            C2FrameData::FLAG_INCOMPLETE) == 0) {
+                    InputBufferManager::
+                            unregisterFrameData(mListener, work->input);
+                }
+            }
+        }
+
+        sp<IComponentListener> listener = mListener.promote();
+        if (listener) {
+            WorkBundle workBundle;
+
+            sp<Component> strongComponent = mComponent.promote();
+            beginTransferBufferQueueBlocks(c2workItems, true);
+            if (!objcpy(&workBundle, c2workItems, strongComponent ?
+                    &strongComponent->mBufferPoolSender : nullptr)) {
+                LOG(ERROR) << "Component::Listener::onWorkDone_nb -- "
+                           << "received corrupted work items.";
+                endTransferBufferQueueBlocks(c2workItems, false, true);
+                return;
+            }
+            Return<void> transStatus = listener->onWorkDone(workBundle);
+            if (!transStatus.isOk()) {
+                LOG(ERROR) << "Component::Listener::onWorkDone_nb -- "
+                           << "transaction failed.";
+                endTransferBufferQueueBlocks(c2workItems, false, true);
+                return;
+            }
+            endTransferBufferQueueBlocks(c2workItems, true, true);
+        }
+    }
+
+protected:
+    wp<Component> mComponent;
+    wp<IComponentListener> mListener;
+};
+
+// Component::Sink
+struct Component::Sink : public IInputSink {
+    std::shared_ptr<Component> mComponent;
+    sp<IConfigurable> mConfigurable;
+
+    virtual Return<Status> queue(const WorkBundle& workBundle) override {
+        return mComponent->queue(workBundle);
+    }
+
+    virtual Return<sp<IConfigurable>> getConfigurable() override {
+        return mConfigurable;
+    }
+
+    Sink(const std::shared_ptr<Component>& component);
+    virtual ~Sink() override;
+
+    // Process-wide map: Component::Sink -> C2Component.
+    static std::mutex sSink2ComponentMutex;
+    static std::map<IInputSink*, std::weak_ptr<C2Component>> sSink2Component;
+
+    static std::shared_ptr<C2Component> findLocalComponent(
+            const sp<IInputSink>& sink);
+};
+
+std::mutex
+        Component::Sink::sSink2ComponentMutex{};
+std::map<IInputSink*, std::weak_ptr<C2Component>>
+        Component::Sink::sSink2Component{};
+
+Component::Sink::Sink(const std::shared_ptr<Component>& component)
+        : mComponent{component},
+          mConfigurable{[&component]() -> sp<IConfigurable> {
+              Return<sp<IComponentInterface>> ret1 = component->getInterface();
+              if (!ret1.isOk()) {
+                  LOG(ERROR) << "Sink::Sink -- component's transaction failed.";
+                  return nullptr;
+              }
+              Return<sp<IConfigurable>> ret2 =
+                      static_cast<sp<IComponentInterface>>(ret1)->
+                      getConfigurable();
+              if (!ret2.isOk()) {
+                  LOG(ERROR) << "Sink::Sink -- interface's transaction failed.";
+                  return nullptr;
+              }
+              return static_cast<sp<IConfigurable>>(ret2);
+          }()} {
+    std::lock_guard<std::mutex> lock(sSink2ComponentMutex);
+    sSink2Component.emplace(this, component->mComponent);
+}
+
+Component::Sink::~Sink() {
+    std::lock_guard<std::mutex> lock(sSink2ComponentMutex);
+    sSink2Component.erase(this);
+}
+
+std::shared_ptr<C2Component> Component::Sink::findLocalComponent(
+        const sp<IInputSink>& sink) {
+    std::lock_guard<std::mutex> lock(sSink2ComponentMutex);
+    auto i = sSink2Component.find(sink.get());
+    if (i == sSink2Component.end()) {
+        return nullptr;
+    }
+    return i->second.lock();
+}
+
+// Component
+Component::Component(
+        const std::shared_ptr<C2Component>& component,
+        const sp<IComponentListener>& listener,
+        const sp<ComponentStore>& store,
+        const sp<::android::hardware::media::bufferpool::V2_0::
+        IClientManager>& clientPoolManager)
+      : mComponent{component},
+        mInterface{new ComponentInterface(component->intf(),
+                                          store->getParameterCache())},
+        mListener{listener},
+        mStore{store},
+        mBufferPoolSender{clientPoolManager} {
+    // Retrieve supported parameters from store
+    // TODO: We could cache this per component/interface type
+    mInit = mInterface->status();
+}
+
+c2_status_t Component::status() const {
+    return mInit;
+}
+
+// Methods from ::android::hardware::media::c2::V1_1::IComponent
+Return<Status> Component::queue(const WorkBundle& workBundle) {
+    std::list<std::unique_ptr<C2Work>> c2works;
+
+    if (!objcpy(&c2works, workBundle)) {
+        return Status::CORRUPTED;
+    }
+
+    // Register input buffers.
+    for (const std::unique_ptr<C2Work>& work : c2works) {
+        if (work) {
+            InputBufferManager::
+                    registerFrameData(mListener, work->input);
+        }
+    }
+
+    return static_cast<Status>(mComponent->queue_nb(&c2works));
+}
+
+Return<void> Component::flush(flush_cb _hidl_cb) {
+    std::list<std::unique_ptr<C2Work>> c2flushedWorks;
+    c2_status_t c2res = mComponent->flush_sm(
+            C2Component::FLUSH_COMPONENT,
+            &c2flushedWorks);
+
+    // Unregister input buffers.
+    for (const std::unique_ptr<C2Work>& work : c2flushedWorks) {
+        if (work) {
+            if (work->worklets.empty()
+                    || !work->worklets.back()
+                    || (work->worklets.back()->output.flags &
+                        C2FrameData::FLAG_INCOMPLETE) == 0) {
+                InputBufferManager::
+                        unregisterFrameData(mListener, work->input);
+            }
+        }
+    }
+
+    WorkBundle flushedWorkBundle;
+    Status res = static_cast<Status>(c2res);
+    beginTransferBufferQueueBlocks(c2flushedWorks, true);
+    if (c2res == C2_OK) {
+        if (!objcpy(&flushedWorkBundle, c2flushedWorks, &mBufferPoolSender)) {
+            res = Status::CORRUPTED;
+        }
+    }
+    _hidl_cb(res, flushedWorkBundle);
+    endTransferBufferQueueBlocks(c2flushedWorks, true, true);
+    return Void();
+}
+
+Return<Status> Component::drain(bool withEos) {
+    return static_cast<Status>(mComponent->drain_nb(withEos ?
+            C2Component::DRAIN_COMPONENT_WITH_EOS :
+            C2Component::DRAIN_COMPONENT_NO_EOS));
+}
+
+Return<Status> Component::setOutputSurface(
+        uint64_t blockPoolId,
+        const sp<HGraphicBufferProducer2>& surface) {
+    std::shared_ptr<C2BlockPool> pool;
+    GetCodec2BlockPool(blockPoolId, mComponent, &pool);
+    if (pool && pool->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+        std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                std::static_pointer_cast<C2BufferQueueBlockPool>(pool);
+        C2BufferQueueBlockPool::OnRenderCallback cb =
+            [this](uint64_t producer, int32_t slot, int64_t nsecs) {
+                // TODO: batch this
+                hidl_vec<IComponentListener::RenderedFrame> rendered;
+                rendered.resize(1);
+                rendered[0] = { producer, slot, nsecs };
+                (void)mListener->onFramesRendered(rendered).isOk();
+        };
+        if (bqPool) {
+            bqPool->setRenderCallback(cb);
+            bqPool->configureProducer(surface);
+        }
+    }
+    return Status::OK;
+}
+
+Return<void> Component::connectToInputSurface(
+        const sp<IInputSurface>& inputSurface,
+        connectToInputSurface_cb _hidl_cb) {
+    Status status;
+    sp<IInputSurfaceConnection> connection;
+    auto transStatus = inputSurface->connect(
+            asInputSink(),
+            [&status, &connection](
+                    Status s, const sp<IInputSurfaceConnection>& c) {
+                status = s;
+                connection = c;
+            }
+        );
+    _hidl_cb(status, connection);
+    return Void();
+}
+
+Return<void> Component::connectToOmxInputSurface(
+        const sp<HGraphicBufferProducer1>& producer,
+        const sp<::android::hardware::media::omx::V1_0::
+        IGraphicBufferSource>& source,
+        connectToOmxInputSurface_cb _hidl_cb) {
+    (void)producer;
+    (void)source;
+    (void)_hidl_cb;
+    return Void();
+}
+
+Return<Status> Component::disconnectFromInputSurface() {
+    // TODO implement
+    return Status::OK;
+}
+
+namespace /* unnamed */ {
+
+struct BlockPoolIntf : public ConfigurableC2Intf {
+    BlockPoolIntf(const std::shared_ptr<C2BlockPool>& pool)
+          : ConfigurableC2Intf{
+                "C2BlockPool:" +
+                    (pool ? std::to_string(pool->getLocalId()) : "null"),
+                0},
+            mPool{pool} {
+    }
+
+    virtual c2_status_t config(
+            const std::vector<C2Param*>& params,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures
+            ) override {
+        (void)params;
+        (void)mayBlock;
+        (void)failures;
+        return C2_OK;
+    }
+
+    virtual c2_status_t query(
+            const std::vector<C2Param::Index>& indices,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2Param>>* const params
+            ) const override {
+        (void)indices;
+        (void)mayBlock;
+        (void)params;
+        return C2_OK;
+    }
+
+    virtual c2_status_t querySupportedParams(
+            std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
+            ) const override {
+        (void)params;
+        return C2_OK;
+    }
+
+    virtual c2_status_t querySupportedValues(
+            std::vector<C2FieldSupportedValuesQuery>& fields,
+            c2_blocking_t mayBlock) const override {
+        (void)fields;
+        (void)mayBlock;
+        return C2_OK;
+    }
+
+protected:
+    std::shared_ptr<C2BlockPool> mPool;
+};
+
+} // unnamed namespace
+
+Return<void> Component::createBlockPool(
+        uint32_t allocatorId,
+        createBlockPool_cb _hidl_cb) {
+    std::shared_ptr<C2BlockPool> blockPool;
+#ifdef __ANDROID_APEX__
+    c2_status_t status = CreateCodec2BlockPool(
+            static_cast<C2PlatformAllocatorStore::id_t>(allocatorId),
+            mComponent,
+            &blockPool);
+#else
+    c2_status_t status = ComponentStore::GetFilterWrapper()->createBlockPool(
+            static_cast<C2PlatformAllocatorStore::id_t>(allocatorId),
+            mComponent,
+            &blockPool);
+#endif
+    if (status != C2_OK) {
+        blockPool = nullptr;
+    }
+    if (blockPool) {
+        mBlockPoolsMutex.lock();
+        mBlockPools.emplace(blockPool->getLocalId(), blockPool);
+        mBlockPoolsMutex.unlock();
+    } else if (status == C2_OK) {
+        status = C2_CORRUPTED;
+    }
+
+    _hidl_cb(static_cast<Status>(status),
+            blockPool ? blockPool->getLocalId() : 0,
+            new CachedConfigurable(
+            std::make_unique<BlockPoolIntf>(blockPool)));
+    return Void();
+}
+
+Return<Status> Component::destroyBlockPool(uint64_t blockPoolId) {
+    std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+    return mBlockPools.erase(blockPoolId) == 1 ?
+            Status::OK : Status::CORRUPTED;
+}
+
+Return<Status> Component::start() {
+    return static_cast<Status>(mComponent->start());
+}
+
+Return<Status> Component::stop() {
+    InputBufferManager::unregisterFrameData(mListener);
+    return static_cast<Status>(mComponent->stop());
+}
+
+Return<Status> Component::reset() {
+    Status status = static_cast<Status>(mComponent->reset());
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        mBlockPools.clear();
+    }
+    InputBufferManager::unregisterFrameData(mListener);
+    return status;
+}
+
+Return<Status> Component::release() {
+    Status status = static_cast<Status>(mComponent->release());
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        mBlockPools.clear();
+    }
+    InputBufferManager::unregisterFrameData(mListener);
+    return status;
+}
+
+Return<sp<IComponentInterface>> Component::getInterface() {
+    return sp<IComponentInterface>(mInterface);
+}
+
+Return<sp<IInputSink>> Component::asInputSink() {
+    std::lock_guard<std::mutex> lock(mSinkMutex);
+    if (!mSink) {
+        mSink = new Sink(shared_from_this());
+    }
+    return {mSink};
+}
+
+Return<void> Component::configureVideoTunnel(
+        uint32_t avSyncHwId, configureVideoTunnel_cb _hidl_cb) {
+    (void)avSyncHwId;
+    _hidl_cb(Status::OMITTED, hidl_handle{});
+    return Void();
+}
+
+Return<Status> Component::setOutputSurfaceWithSyncObj(
+        uint64_t blockPoolId, const sp<HGraphicBufferProducer2>& surface,
+        const SurfaceSyncObj& syncObject) {
+    std::shared_ptr<C2BlockPool> pool;
+    GetCodec2BlockPool(blockPoolId, mComponent, &pool);
+    if (pool && pool->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+        std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                std::static_pointer_cast<C2BufferQueueBlockPool>(pool);
+        C2BufferQueueBlockPool::OnRenderCallback cb =
+            [this](uint64_t producer, int32_t slot, int64_t nsecs) {
+                // TODO: batch this
+                hidl_vec<IComponentListener::RenderedFrame> rendered;
+                rendered.resize(1);
+                rendered[0] = { producer, slot, nsecs };
+                (void)mListener->onFramesRendered(rendered).isOk();
+        };
+        if (bqPool) {
+            const native_handle_t *h = syncObject.syncMemory;
+            native_handle_t *syncMemory = h ? native_handle_clone(h) : nullptr;
+            uint64_t bqId = syncObject.bqId;
+            uint32_t generationId = syncObject.generationId;
+            uint64_t consumerUsage = syncObject.consumerUsage;
+
+            bqPool->setRenderCallback(cb);
+            bqPool->configureProducer(surface, syncMemory, bqId,
+                                      generationId, consumerUsage);
+        }
+    }
+    return Status::OK;
+}
+
+std::shared_ptr<C2Component> Component::findLocalComponent(
+        const sp<IInputSink>& sink) {
+    return Component::Sink::findLocalComponent(sink);
+}
+
+void Component::initListener(const sp<Component>& self) {
+    std::shared_ptr<C2Component::Listener> c2listener =
+            std::make_shared<Listener>(self);
+    c2_status_t res = mComponent->setListener_vb(c2listener, C2_DONT_BLOCK);
+    if (res != C2_OK) {
+        mInit = res;
+    }
+}
+
+Component::~Component() {
+    InputBufferManager::unregisterFrameData(mListener);
+    mStore->reportComponentDeath(this);
+}
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/codec2/hidl/1.2/utils/ComponentInterface.cpp
similarity index 84%
rename from media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
rename to media/codec2/hidl/1.2/utils/ComponentInterface.cpp
index 65756e8..30fe4d6 100644
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ b/media/codec2/hidl/1.2/utils/ComponentInterface.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,4 +14,4 @@
  * limitations under the License.
  */
 
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
+#include <codec2/hidl/1.2/ComponentInterface.h>
diff --git a/media/codec2/hidl/1.2/utils/ComponentStore.cpp b/media/codec2/hidl/1.2/utils/ComponentStore.cpp
new file mode 100644
index 0000000..9fac5d5
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/ComponentStore.cpp
@@ -0,0 +1,562 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-ComponentStore@1.2"
+#include <android-base/logging.h>
+
+#include <codec2/hidl/1.2/ComponentStore.h>
+#include <codec2/hidl/1.2/InputSurface.h>
+#include <codec2/hidl/1.2/types.h>
+
+#include <android-base/file.h>
+#include <media/stagefright/bqhelper/GraphicBufferSource.h>
+#include <utils/Errors.h>
+
+#include <C2PlatformSupport.h>
+#include <util/C2InterfaceHelper.h>
+
+#include <chrono>
+#include <ctime>
+#include <iomanip>
+#include <ostream>
+#include <sstream>
+
+#ifndef __ANDROID_APEX__
+#include <codec2/hidl/plugin/FilterPlugin.h>
+#include <dlfcn.h>
+#include <C2Config.h>
+#include <DefaultFilterPlugin.h>
+#include <FilterWrapper.h>
+#endif
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+namespace utils {
+
+using namespace ::android;
+using ::android::GraphicBufferSource;
+using namespace ::android::hardware::media::bufferpool::V2_0::implementation;
+
+namespace /* unnamed */ {
+
+struct StoreIntf : public ConfigurableC2Intf {
+    StoreIntf(const std::shared_ptr<C2ComponentStore>& store)
+          : ConfigurableC2Intf{store ? store->getName() : "", 0},
+            mStore{store} {
+    }
+
+    virtual c2_status_t config(
+            const std::vector<C2Param*> &params,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2SettingResult>> *const failures
+            ) override {
+        // Assume all params are blocking
+        // TODO: Filter for supported params
+        if (mayBlock == C2_DONT_BLOCK && params.size() != 0) {
+            return C2_BLOCKING;
+        }
+        return mStore->config_sm(params, failures);
+    }
+
+    virtual c2_status_t query(
+            const std::vector<C2Param::Index> &indices,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2Param>> *const params) const override {
+        // Assume all params are blocking
+        // TODO: Filter for supported params
+        if (mayBlock == C2_DONT_BLOCK && indices.size() != 0) {
+            return C2_BLOCKING;
+        }
+        return mStore->query_sm({}, indices, params);
+    }
+
+    virtual c2_status_t querySupportedParams(
+            std::vector<std::shared_ptr<C2ParamDescriptor>> *const params
+            ) const override {
+        return mStore->querySupportedParams_nb(params);
+    }
+
+    virtual c2_status_t querySupportedValues(
+            std::vector<C2FieldSupportedValuesQuery> &fields,
+            c2_blocking_t mayBlock) const override {
+        // Assume all params are blocking
+        // TODO: Filter for supported params
+        if (mayBlock == C2_DONT_BLOCK && fields.size() != 0) {
+            return C2_BLOCKING;
+        }
+        return mStore->querySupportedValues_sm(fields);
+    }
+
+protected:
+    std::shared_ptr<C2ComponentStore> mStore;
+};
+
+} // unnamed namespace
+
+struct ComponentStore::StoreParameterCache : public ParameterCache {
+    std::mutex mStoreMutex;
+    ComponentStore* mStore;
+
+    StoreParameterCache(ComponentStore* store): mStore{store} {
+    }
+
+    virtual c2_status_t validate(
+            const std::vector<std::shared_ptr<C2ParamDescriptor>>& params
+            ) override {
+        std::scoped_lock _lock(mStoreMutex);
+        return mStore ? mStore->validateSupportedParams(params) : C2_NO_INIT;
+    }
+
+    void onStoreDestroyed() {
+        std::scoped_lock _lock(mStoreMutex);
+        mStore = nullptr;
+    }
+};
+
+ComponentStore::ComponentStore(const std::shared_ptr<C2ComponentStore>& store)
+      : mConfigurable{new CachedConfigurable(std::make_unique<StoreIntf>(store))},
+        mParameterCache{std::make_shared<StoreParameterCache>(this)},
+        mStore{store} {
+
+    std::shared_ptr<C2ComponentStore> platformStore = android::GetCodec2PlatformComponentStore();
+    SetPreferredCodec2ComponentStore(store);
+
+    // Retrieve struct descriptors
+    mParamReflector = mStore->getParamReflector();
+
+    // Retrieve supported parameters from store
+    using namespace std::placeholders;
+    mInit = mConfigurable->init(mParameterCache);
+}
+
+ComponentStore::~ComponentStore() {
+    mParameterCache->onStoreDestroyed();
+}
+
+c2_status_t ComponentStore::status() const {
+    return mInit;
+}
+
+c2_status_t ComponentStore::validateSupportedParams(
+        const std::vector<std::shared_ptr<C2ParamDescriptor>>& params) {
+    c2_status_t res = C2_OK;
+
+    for (const std::shared_ptr<C2ParamDescriptor> &desc : params) {
+        if (!desc) {
+            // All descriptors should be valid
+            res = res ? res : C2_BAD_VALUE;
+            continue;
+        }
+        C2Param::CoreIndex coreIndex = desc->index().coreIndex();
+        std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
+        auto it = mStructDescriptors.find(coreIndex);
+        if (it == mStructDescriptors.end()) {
+            std::shared_ptr<C2StructDescriptor> structDesc =
+                    mParamReflector->describe(coreIndex);
+            if (!structDesc) {
+                // All supported params must be described
+                res = C2_BAD_INDEX;
+            }
+            mStructDescriptors.insert({ coreIndex, structDesc });
+        }
+    }
+    return res;
+}
+
+std::shared_ptr<ParameterCache> ComponentStore::getParameterCache() const {
+    return mParameterCache;
+}
+
+#ifndef __ANDROID_APEX__
+// static
+std::shared_ptr<FilterWrapper> ComponentStore::GetFilterWrapper() {
+    constexpr const char kPluginPath[] = "libc2filterplugin.so";
+    static std::shared_ptr<FilterWrapper> wrapper = FilterWrapper::Create(
+            std::make_unique<DefaultFilterPlugin>(kPluginPath));
+    return wrapper;
+}
+#endif
+
+// Methods from ::android::hardware::media::c2::V1_0::IComponentStore
+Return<void> ComponentStore::createComponent(
+        const hidl_string& name,
+        const sp<IComponentListener>& listener,
+        const sp<IClientManager>& pool,
+        createComponent_cb _hidl_cb) {
+
+    sp<Component> component;
+    std::shared_ptr<C2Component> c2component;
+    Status status = static_cast<Status>(
+            mStore->createComponent(name, &c2component));
+
+    if (status == Status::OK) {
+#ifndef __ANDROID_APEX__
+        c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
+#endif
+        onInterfaceLoaded(c2component->intf());
+        component = new Component(c2component, listener, this, pool);
+        if (!component) {
+            status = Status::CORRUPTED;
+        } else {
+            reportComponentBirth(component.get());
+            if (component->status() != C2_OK) {
+                status = static_cast<Status>(component->status());
+            } else {
+                component->initListener(component);
+                if (component->status() != C2_OK) {
+                    status = static_cast<Status>(component->status());
+                }
+            }
+        }
+    }
+    _hidl_cb(status, component);
+    return Void();
+}
+
+Return<void> ComponentStore::createInterface(
+        const hidl_string& name,
+        createInterface_cb _hidl_cb) {
+    std::shared_ptr<C2ComponentInterface> c2interface;
+    c2_status_t res = mStore->createInterface(name, &c2interface);
+    sp<IComponentInterface> interface;
+    if (res == C2_OK) {
+#ifndef __ANDROID_APEX__
+        c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
+#endif
+        onInterfaceLoaded(c2interface);
+        interface = new ComponentInterface(c2interface, mParameterCache);
+    }
+    _hidl_cb(static_cast<Status>(res), interface);
+    return Void();
+}
+
+Return<void> ComponentStore::listComponents(listComponents_cb _hidl_cb) {
+    std::vector<std::shared_ptr<const C2Component::Traits>> c2traits =
+            mStore->listComponents();
+    hidl_vec<IComponentStore::ComponentTraits> traits(c2traits.size());
+    size_t ix = 0;
+    for (const std::shared_ptr<const C2Component::Traits> &c2trait : c2traits) {
+        if (c2trait) {
+            if (objcpy(&traits[ix], *c2trait)) {
+                ++ix;
+            } else {
+                break;
+            }
+        }
+    }
+    traits.resize(ix);
+    _hidl_cb(Status::OK, traits);
+    return Void();
+}
+
+Return<void> ComponentStore::createInputSurface(createInputSurface_cb _hidl_cb) {
+    sp<GraphicBufferSource> source = new GraphicBufferSource();
+    if (source->initCheck() != OK) {
+        _hidl_cb(Status::CORRUPTED, nullptr);
+        return Void();
+    }
+    using namespace std::placeholders;
+    sp<InputSurface> inputSurface = new InputSurface(
+            mParameterCache,
+            std::make_shared<C2ReflectorHelper>(),
+            source->getHGraphicBufferProducer(),
+            source);
+    _hidl_cb(inputSurface ? Status::OK : Status::NO_MEMORY,
+             inputSurface);
+    return Void();
+}
+
+void ComponentStore::onInterfaceLoaded(const std::shared_ptr<C2ComponentInterface> &intf) {
+    // invalidate unsupported struct descriptors if a new interface is loaded as it may have
+    // exposed new descriptors
+    std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
+    if (!mLoadedInterfaces.count(intf->getName())) {
+        mUnsupportedStructDescriptors.clear();
+        mLoadedInterfaces.emplace(intf->getName());
+    }
+}
+
+Return<void> ComponentStore::getStructDescriptors(
+        const hidl_vec<uint32_t>& indices,
+        getStructDescriptors_cb _hidl_cb) {
+    hidl_vec<StructDescriptor> descriptors(indices.size());
+    size_t dstIx = 0;
+    Status res = Status::OK;
+    for (size_t srcIx = 0; srcIx < indices.size(); ++srcIx) {
+        std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
+        const C2Param::CoreIndex coreIndex = C2Param::CoreIndex(indices[srcIx]).coreIndex();
+        const auto item = mStructDescriptors.find(coreIndex);
+        if (item == mStructDescriptors.end()) {
+            // not in the cache, and not known to be unsupported, query local reflector
+            if (!mUnsupportedStructDescriptors.count(coreIndex)) {
+                std::shared_ptr<C2StructDescriptor> structDesc =
+                    mParamReflector->describe(coreIndex);
+                if (!structDesc) {
+                    mUnsupportedStructDescriptors.emplace(coreIndex);
+                } else {
+                    mStructDescriptors.insert({ coreIndex, structDesc });
+                    if (objcpy(&descriptors[dstIx], *structDesc)) {
+                        ++dstIx;
+                        continue;
+                    }
+                    res = Status::CORRUPTED;
+                    break;
+                }
+            }
+            res = Status::NOT_FOUND;
+        } else if (item->second) {
+            if (objcpy(&descriptors[dstIx], *item->second)) {
+                ++dstIx;
+                continue;
+            }
+            res = Status::CORRUPTED;
+            break;
+        } else {
+            res = Status::NO_MEMORY;
+            break;
+        }
+    }
+    descriptors.resize(dstIx);
+    _hidl_cb(res, descriptors);
+    return Void();
+}
+
+Return<sp<IClientManager>> ComponentStore::getPoolClientManager() {
+    return ClientManager::getInstance();
+}
+
+Return<Status> ComponentStore::copyBuffer(const Buffer& src, const Buffer& dst) {
+    // TODO implement
+    (void)src;
+    (void)dst;
+    return Status::OMITTED;
+}
+
+Return<sp<IConfigurable>> ComponentStore::getConfigurable() {
+    return mConfigurable;
+}
+
+// Methods from ::android::hardware::media::c2::V1_1::IComponentStore
+Return<void> ComponentStore::createComponent_1_1(
+        const hidl_string& name,
+        const sp<IComponentListener>& listener,
+        const sp<IClientManager>& pool,
+        createComponent_1_1_cb _hidl_cb) {
+
+    sp<Component> component;
+    std::shared_ptr<C2Component> c2component;
+    Status status = static_cast<Status>(
+            mStore->createComponent(name, &c2component));
+
+    if (status == Status::OK) {
+#ifndef __ANDROID_APEX__
+        c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
+#endif
+        onInterfaceLoaded(c2component->intf());
+        component = new Component(c2component, listener, this, pool);
+        if (!component) {
+            status = Status::CORRUPTED;
+        } else {
+            reportComponentBirth(component.get());
+            if (component->status() != C2_OK) {
+                status = static_cast<Status>(component->status());
+            } else {
+                component->initListener(component);
+                if (component->status() != C2_OK) {
+                    status = static_cast<Status>(component->status());
+                }
+            }
+        }
+    }
+    _hidl_cb(status, component);
+    return Void();
+}
+
+// Methods from ::android::hardware::media::c2::V1_2::IComponentStore
+Return<void> ComponentStore::createComponent_1_2(
+        const hidl_string& name,
+        const sp<IComponentListener>& listener,
+        const sp<IClientManager>& pool,
+        createComponent_1_2_cb _hidl_cb) {
+
+    sp<Component> component;
+    std::shared_ptr<C2Component> c2component;
+    Status status = static_cast<Status>(
+            mStore->createComponent(name, &c2component));
+
+    if (status == Status::OK) {
+#ifndef __ANDROID_APEX__
+        c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
+#endif
+        onInterfaceLoaded(c2component->intf());
+        component = new Component(c2component, listener, this, pool);
+        if (!component) {
+            status = Status::CORRUPTED;
+        } else {
+            reportComponentBirth(component.get());
+            if (component->status() != C2_OK) {
+                status = static_cast<Status>(component->status());
+            } else {
+                component->initListener(component);
+                if (component->status() != C2_OK) {
+                    status = static_cast<Status>(component->status());
+                }
+            }
+        }
+    }
+    _hidl_cb(status, component);
+    return Void();
+}
+
+// Called from createComponent() after a successful creation of `component`.
+void ComponentStore::reportComponentBirth(Component* component) {
+    ComponentStatus componentStatus;
+    componentStatus.c2Component = component->mComponent;
+    componentStatus.birthTime = std::chrono::system_clock::now();
+
+    std::lock_guard<std::mutex> lock(mComponentRosterMutex);
+    mComponentRoster.emplace(component, componentStatus);
+}
+
+// Called from within the destructor of `component`. No virtual function calls
+// are made on `component` here.
+void ComponentStore::reportComponentDeath(Component* component) {
+    std::lock_guard<std::mutex> lock(mComponentRosterMutex);
+    mComponentRoster.erase(component);
+}
+
+// Dumps component traits.
+std::ostream& ComponentStore::dump(
+        std::ostream& out,
+        const std::shared_ptr<const C2Component::Traits>& comp) {
+
+    constexpr const char indent[] = "    ";
+
+    out << indent << "name: " << comp->name << std::endl;
+    out << indent << "domain: " << comp->domain << std::endl;
+    out << indent << "kind: " << comp->kind << std::endl;
+    out << indent << "rank: " << comp->rank << std::endl;
+    out << indent << "mediaType: " << comp->mediaType << std::endl;
+    out << indent << "aliases:";
+    for (const auto& alias : comp->aliases) {
+        out << ' ' << alias;
+    }
+    out << std::endl;
+
+    return out;
+}
+
+// Dumps component status.
+std::ostream& ComponentStore::dump(
+        std::ostream& out,
+        ComponentStatus& compStatus) {
+
+    constexpr const char indent[] = "    ";
+
+    // Print birth time.
+    std::chrono::milliseconds ms =
+            std::chrono::duration_cast<std::chrono::milliseconds>(
+                compStatus.birthTime.time_since_epoch());
+    std::time_t birthTime = std::chrono::system_clock::to_time_t(
+            compStatus.birthTime);
+    std::tm tm = *std::localtime(&birthTime);
+    out << indent << "Creation time: "
+        << std::put_time(&tm, "%Y-%m-%d %H:%M:%S")
+        << '.' << std::setfill('0') << std::setw(3) << ms.count() % 1000
+        << std::endl;
+
+    // Print name and id.
+    std::shared_ptr<C2ComponentInterface> intf = compStatus.c2Component->intf();
+    if (!intf) {
+        out << indent << "Unknown component -- null interface" << std::endl;
+        return out;
+    }
+    out << indent << "Name: " << intf->getName() << std::endl;
+    out << indent << "Id: " << intf->getId() << std::endl;
+
+    return out;
+}
+
+// Dumps information when lshal is called.
+Return<void> ComponentStore::debug(
+        const hidl_handle& handle,
+        const hidl_vec<hidl_string>& /* args */) {
+    LOG(INFO) << "debug -- dumping...";
+    const native_handle_t *h = handle.getNativeHandle();
+    if (!h || h->numFds != 1) {
+       LOG(ERROR) << "debug -- dumping failed -- "
+               "invalid file descriptor to dump to";
+       return Void();
+    }
+    std::ostringstream out;
+
+    { // Populate "out".
+
+        constexpr const char indent[] = "  ";
+
+        // Show name.
+        out << "Beginning of dump -- C2ComponentStore: "
+                << mStore->getName() << std::endl << std::endl;
+
+        // Retrieve the list of supported components.
+        std::vector<std::shared_ptr<const C2Component::Traits>> traitsList =
+                mStore->listComponents();
+
+        // Dump the traits of supported components.
+        out << indent << "Supported components:" << std::endl << std::endl;
+        if (traitsList.size() == 0) {
+            out << indent << indent << "NONE" << std::endl << std::endl;
+        } else {
+            for (const auto& traits : traitsList) {
+                dump(out, traits) << std::endl;
+            }
+        }
+
+        // Dump active components.
+        {
+            out << indent << "Active components:" << std::endl << std::endl;
+            std::lock_guard<std::mutex> lock(mComponentRosterMutex);
+            if (mComponentRoster.size() == 0) {
+                out << indent << indent << "NONE" << std::endl << std::endl;
+            } else {
+                for (auto& pair : mComponentRoster) {
+                    dump(out, pair.second) << std::endl;
+                }
+            }
+        }
+
+        out << "End of dump -- C2ComponentStore: "
+                << mStore->getName() << std::endl;
+    }
+
+    if (!android::base::WriteStringToFd(out.str(), h->data[0])) {
+        PLOG(WARNING) << "debug -- dumping failed -- write()";
+    } else {
+        LOG(INFO) << "debug -- dumping succeeded";
+    }
+    return Void();
+}
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/codec2/hidl/1.2/utils/Configurable.cpp
similarity index 84%
copy from media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
copy to media/codec2/hidl/1.2/utils/Configurable.cpp
index 65756e8..243870e 100644
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ b/media/codec2/hidl/1.2/utils/Configurable.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,4 +14,4 @@
  * limitations under the License.
  */
 
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
+#include <codec2/hidl/1.2/Configurable.h>
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/codec2/hidl/1.2/utils/InputBufferManager.cpp
similarity index 84%
copy from media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
copy to media/codec2/hidl/1.2/utils/InputBufferManager.cpp
index 65756e8..1120075 100644
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ b/media/codec2/hidl/1.2/utils/InputBufferManager.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,4 +14,4 @@
  * limitations under the License.
  */
 
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
+#include <codec2/hidl/1.2/InputBufferManager.h>
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/codec2/hidl/1.2/utils/InputSurface.cpp
similarity index 84%
copy from media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
copy to media/codec2/hidl/1.2/utils/InputSurface.cpp
index 65756e8..7c4d28b 100644
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ b/media/codec2/hidl/1.2/utils/InputSurface.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,4 +14,4 @@
  * limitations under the License.
  */
 
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
+#include <codec2/hidl/1.2/InputSurface.h>
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/codec2/hidl/1.2/utils/InputSurfaceConnection.cpp
similarity index 84%
copy from media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
copy to media/codec2/hidl/1.2/utils/InputSurfaceConnection.cpp
index 65756e8..1bd58c2 100644
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ b/media/codec2/hidl/1.2/utils/InputSurfaceConnection.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,4 +14,4 @@
  * limitations under the License.
  */
 
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
+#include <codec2/hidl/1.2/InputSurfaceConnection.h>
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
new file mode 100644
index 0000000..7937664
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_2_UTILS_COMPONENT_H
+#define CODEC2_HIDL_V1_2_UTILS_COMPONENT_H
+
+#include <android/hardware/media/bufferpool/2.0/IClientManager.h>
+#include <android/hardware/media/c2/1.2/IComponent.h>
+#include <android/hardware/media/c2/1.0/IComponentInterface.h>
+#include <android/hardware/media/c2/1.0/IComponentListener.h>
+#include <android/hardware/media/c2/1.2/IComponentStore.h>
+#include <android/hardware/media/c2/1.0/IInputSink.h>
+#include <codec2/hidl/1.2/ComponentInterface.h>
+#include <codec2/hidl/1.2/Configurable.h>
+#include <codec2/hidl/1.2/types.h>
+#include <hidl/Status.h>
+#include <hwbinder/IBinder.h>
+
+#include <C2Component.h>
+#include <C2Buffer.h>
+#include <C2.h>
+
+#include <map>
+#include <memory>
+#include <mutex>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+
+using ::android::hardware::media::c2::V1_2::IComponent;
+using ::android::hardware::media::c2::V1_0::IComponentListener;
+
+namespace utils {
+
+using ::android::hardware::hidl_array;
+using ::android::hardware::hidl_memory;
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::hardware::IBinder;
+using ::android::sp;
+using ::android::wp;
+
+struct ComponentStore;
+
+struct Component : public IComponent,
+                   public std::enable_shared_from_this<Component> {
+    Component(
+            const std::shared_ptr<C2Component>&,
+            const sp<IComponentListener>& listener,
+            const sp<ComponentStore>& store,
+            const sp<::android::hardware::media::bufferpool::V2_0::
+                IClientManager>& clientPoolManager);
+    c2_status_t status() const;
+
+    typedef ::android::hardware::graphics::bufferqueue::V1_0::
+            IGraphicBufferProducer HGraphicBufferProducer1;
+    typedef ::android::hardware::graphics::bufferqueue::V2_0::
+            IGraphicBufferProducer HGraphicBufferProducer2;
+
+    // Methods from IComponent follow.
+    virtual Return<Status> queue(const WorkBundle& workBundle) override;
+    virtual Return<void> flush(flush_cb _hidl_cb) override;
+    virtual Return<Status> drain(bool withEos) override;
+    virtual Return<Status> setOutputSurface(
+            uint64_t blockPoolId,
+            const sp<HGraphicBufferProducer2>& surface) override;
+    virtual Return<void> connectToInputSurface(
+            const sp<IInputSurface>& inputSurface,
+            connectToInputSurface_cb _hidl_cb) override;
+    virtual Return<void> connectToOmxInputSurface(
+            const sp<HGraphicBufferProducer1>& producer,
+            const sp<::android::hardware::media::omx::V1_0::
+            IGraphicBufferSource>& source,
+            connectToOmxInputSurface_cb _hidl_cb) override;
+    virtual Return<Status> disconnectFromInputSurface() override;
+    virtual Return<void> createBlockPool(
+            uint32_t allocatorId,
+            createBlockPool_cb _hidl_cb) override;
+    virtual Return<Status> destroyBlockPool(uint64_t blockPoolId) override;
+    virtual Return<Status> start() override;
+    virtual Return<Status> stop() override;
+    virtual Return<Status> reset() override;
+    virtual Return<Status> release() override;
+    virtual Return<sp<IComponentInterface>> getInterface() override;
+    virtual Return<sp<IInputSink>> asInputSink() override;
+    virtual Return<void> configureVideoTunnel(
+            uint32_t avSyncHwId, configureVideoTunnel_cb _hidl_cb) override;
+    virtual Return<Status> setOutputSurfaceWithSyncObj(
+            uint64_t blockPoolId,
+            const sp<HGraphicBufferProducer2>& surface,
+            const SurfaceSyncObj& syncObject) override;
+
+
+    // Returns a C2Component associated to the given sink if the sink is indeed
+    // a local component. Returns nullptr otherwise.
+    //
+    // This function is used by InputSurface::connect().
+    static std::shared_ptr<C2Component> findLocalComponent(
+            const sp<IInputSink>& sink);
+
+protected:
+    c2_status_t mInit;
+    std::shared_ptr<C2Component> mComponent;
+    sp<ComponentInterface> mInterface;
+    sp<IComponentListener> mListener;
+    sp<ComponentStore> mStore;
+    ::android::hardware::media::c2::V1_2::utils::DefaultBufferPoolSender
+            mBufferPoolSender;
+
+    struct Sink;
+    std::mutex mSinkMutex;
+    sp<Sink> mSink;
+
+    std::mutex mBlockPoolsMutex;
+    // This map keeps C2BlockPool objects that are created by createBlockPool()
+    // alive. These C2BlockPool objects can be deleted by calling
+    // destroyBlockPool(), reset() or release(), or by destroying the component.
+    std::map<uint64_t, std::shared_ptr<C2BlockPool>> mBlockPools;
+
+    void initListener(const sp<Component>& self);
+
+    virtual ~Component() override;
+
+    friend struct ComponentStore;
+
+    struct Listener;
+};
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
+
+#endif // CODEC2_HIDL_V1_2_UTILS_COMPONENT_H
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h
similarity index 65%
copy from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
copy to media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h
index f77852d..09d9f93 100644
--- a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,26 +14,26 @@
  * limitations under the License.
  */
 
-#ifndef CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
-#define CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
+#ifndef CODEC2_HIDL_V1_2_UTILS_COMPONENT_INTERFACE_H
+#define CODEC2_HIDL_V1_2_UTILS_COMPONENT_INTERFACE_H
 
-#include <codec2/hidl/1.0/OutputBufferQueue.h>
-#include <codec2/hidl/1.1/types.h>
+#include <codec2/hidl/1.0/ComponentInterface.h>
+#include <codec2/hidl/1.2/types.h>
 
 namespace android {
 namespace hardware {
 namespace media {
 namespace c2 {
-namespace V1_1 {
+namespace V1_2 {
 namespace utils {
 
-using ::android::hardware::media::c2::V1_0::utils::OutputBufferQueue;
+using ::android::hardware::media::c2::V1_0::utils::ComponentInterface;
 
 } // namespace utils
-} // namespace V1_1
+} // namespace V1_2
 } // namespace c2
 } // namespace media
 } // namespace hardware
 } // namespace android
 
-#endif // CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
+#endif // CODEC2_HIDL_V1_2_UTILS_COMPONENT_INTERFACE_H
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
new file mode 100644
index 0000000..e95a651
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
@@ -0,0 +1,177 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_2_UTILS_COMPONENT_STORE_H
+#define CODEC2_HIDL_V1_2_UTILS_COMPONENT_STORE_H
+
+#include <codec2/hidl/1.2/Component.h>
+#include <codec2/hidl/1.2/ComponentInterface.h>
+#include <codec2/hidl/1.2/Configurable.h>
+#include <codec2/hidl/1.2/types.h>
+
+#include <android/hardware/media/bufferpool/2.0/IClientManager.h>
+#include <android/hardware/media/c2/1.2/IComponentStore.h>
+#include <hidl/Status.h>
+
+#include <C2Component.h>
+#include <C2Param.h>
+#include <C2.h>
+
+#include <chrono>
+#include <map>
+#include <memory>
+#include <mutex>
+#include <set>
+#include <vector>
+
+namespace android {
+class FilterWrapper;
+
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+namespace utils {
+
+using ::android::hardware::media::bufferpool::V2_0::IClientManager;
+
+using ::android::hardware::hidl_handle;
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::sp;
+
+struct ComponentStore : public IComponentStore {
+    ComponentStore(const std::shared_ptr<C2ComponentStore>& store);
+    virtual ~ComponentStore();
+
+    /**
+     * Returns the status of the construction of this object.
+     */
+    c2_status_t status() const;
+
+    /**
+     * This function is called by CachedConfigurable::init() to validate
+     * supported parameters.
+     */
+    c2_status_t validateSupportedParams(
+            const std::vector<std::shared_ptr<C2ParamDescriptor>>& params);
+
+    /**
+     * Returns the store's ParameterCache. This is used for validation by
+     * Configurable::init().
+     */
+    std::shared_ptr<ParameterCache> getParameterCache() const;
+
+    static std::shared_ptr<FilterWrapper> GetFilterWrapper();
+
+    // Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
+    virtual Return<void> createComponent(
+            const hidl_string& name,
+            const sp<IComponentListener>& listener,
+            const sp<IClientManager>& pool,
+            createComponent_cb _hidl_cb) override;
+    virtual Return<void> createInterface(
+            const hidl_string& name,
+            createInterface_cb _hidl_cb) override;
+    virtual Return<void> listComponents(listComponents_cb _hidl_cb) override;
+    virtual Return<void> createInputSurface(
+            createInputSurface_cb _hidl_cb) override;
+    virtual Return<void> getStructDescriptors(
+            const hidl_vec<uint32_t>& indices,
+            getStructDescriptors_cb _hidl_cb) override;
+    virtual Return<sp<IClientManager>> getPoolClientManager() override;
+    virtual Return<Status> copyBuffer(
+            const Buffer& src,
+            const Buffer& dst) override;
+    virtual Return<sp<IConfigurable>> getConfigurable() override;
+
+    // Methods from ::android::hardware::media::c2::V1_1::IComponentStore.
+    virtual Return<void> createComponent_1_1(
+            const hidl_string& name,
+            const sp<IComponentListener>& listener,
+            const sp<IClientManager>& pool,
+            createComponent_1_1_cb _hidl_cb) override;
+
+    // Methods from ::android::hardware::media::c2::V1_2::IComponentStore.
+    virtual Return<void> createComponent_1_2(
+            const hidl_string& name,
+            const sp<IComponentListener>& listener,
+            const sp<IClientManager>& pool,
+            createComponent_1_2_cb _hidl_cb) override;
+
+    /**
+     * Dumps information when lshal is called.
+     */
+    virtual Return<void> debug(
+            const hidl_handle& handle,
+            const hidl_vec<hidl_string>& args) override;
+
+protected:
+    sp<CachedConfigurable> mConfigurable;
+    struct StoreParameterCache;
+    std::shared_ptr<StoreParameterCache> mParameterCache;
+
+    // Does bookkeeping for an interface that has been loaded.
+    void onInterfaceLoaded(const std::shared_ptr<C2ComponentInterface> &intf);
+
+    c2_status_t mInit;
+    std::shared_ptr<C2ComponentStore> mStore;
+    std::shared_ptr<C2ParamReflector> mParamReflector;
+
+    std::map<C2Param::CoreIndex, std::shared_ptr<C2StructDescriptor>> mStructDescriptors;
+    std::set<C2Param::CoreIndex> mUnsupportedStructDescriptors;
+    std::set<C2String> mLoadedInterfaces;
+    mutable std::mutex mStructDescriptorsMutex;
+
+    // ComponentStore keeps track of live Components.
+
+    struct ComponentStatus {
+        std::shared_ptr<C2Component> c2Component;
+        std::chrono::system_clock::time_point birthTime;
+    };
+
+    mutable std::mutex mComponentRosterMutex;
+    std::map<Component*, ComponentStatus> mComponentRoster;
+
+    // Called whenever Component is created.
+    void reportComponentBirth(Component* component);
+    // Called only from the destructor of Component.
+    void reportComponentDeath(Component* component);
+
+    friend Component;
+
+    // Helper functions for dumping.
+
+    std::ostream& dump(
+            std::ostream& out,
+            const std::shared_ptr<const C2Component::Traits>& comp);
+
+    std::ostream& dump(
+            std::ostream& out,
+            ComponentStatus& compStatus);
+
+};
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
+
+#endif // CODEC2_HIDL_V1_2_UTILS_COMPONENT_STORE_H
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h
new file mode 100644
index 0000000..2efad31
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_2_UTILS_CONFIGURABLE_H
+#define CODEC2_HIDL_V1_2_UTILS_CONFIGURABLE_H
+
+#include <codec2/hidl/1.0/Configurable.h>
+#include <codec2/hidl/1.2/types.h>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+namespace utils {
+
+using ::android::hardware::media::c2::V1_0::utils::ConfigurableC2Intf;
+using ::android::hardware::media::c2::V1_0::utils::ParameterCache;
+using ::android::hardware::media::c2::V1_0::utils::CachedConfigurable;
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
+
+#endif // CODEC2_HIDL_V1_2_UTILS_CONFIGURABLE_H
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h
similarity index 65%
copy from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
copy to media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h
index f77852d..e4a5db4 100644
--- a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,26 +14,26 @@
  * limitations under the License.
  */
 
-#ifndef CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
-#define CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
+#ifndef CODEC2_HIDL_V1_2_UTILS_INPUT_BUFFER_MANAGER_H
+#define CODEC2_HIDL_V1_2_UTILS_INPUT_BUFFER_MANAGER_H
 
-#include <codec2/hidl/1.0/OutputBufferQueue.h>
-#include <codec2/hidl/1.1/types.h>
+#include <codec2/hidl/1.0/InputBufferManager.h>
+#include <codec2/hidl/1.2/types.h>
 
 namespace android {
 namespace hardware {
 namespace media {
 namespace c2 {
-namespace V1_1 {
+namespace V1_2 {
 namespace utils {
 
-using ::android::hardware::media::c2::V1_0::utils::OutputBufferQueue;
+using ::android::hardware::media::c2::V1_0::utils::InputBufferManager;
 
 } // namespace utils
-} // namespace V1_1
+} // namespace V1_2
 } // namespace c2
 } // namespace media
 } // namespace hardware
 } // namespace android
 
-#endif // CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
+#endif // CODEC2_HIDL_V1_2_UTILS_INPUT_BUFFER_MANAGER_H
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h
similarity index 65%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
rename to media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h
index f77852d..3fae86b 100644
--- a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,26 +14,26 @@
  * limitations under the License.
  */
 
-#ifndef CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
-#define CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
+#ifndef CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_H
+#define CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_H
 
-#include <codec2/hidl/1.0/OutputBufferQueue.h>
-#include <codec2/hidl/1.1/types.h>
+#include <codec2/hidl/1.0/InputSurface.h>
+#include <codec2/hidl/1.2/types.h>
 
 namespace android {
 namespace hardware {
 namespace media {
 namespace c2 {
-namespace V1_1 {
+namespace V1_2 {
 namespace utils {
 
-using ::android::hardware::media::c2::V1_0::utils::OutputBufferQueue;
+using ::android::hardware::media::c2::V1_0::utils::InputSurface;
 
 } // namespace utils
-} // namespace V1_1
+} // namespace V1_2
 } // namespace c2
 } // namespace media
 } // namespace hardware
 } // namespace android
 
-#endif // CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
+#endif // CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_H
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h
similarity index 63%
copy from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
copy to media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h
index f77852d..13a8a61 100644
--- a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,26 +14,26 @@
  * limitations under the License.
  */
 
-#ifndef CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
-#define CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
+#ifndef CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_CONNECTION_H
+#define CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_CONNECTION_H
 
-#include <codec2/hidl/1.0/OutputBufferQueue.h>
-#include <codec2/hidl/1.1/types.h>
+#include <codec2/hidl/1.0/InputSurfaceConnection.h>
+#include <codec2/hidl/1.2/types.h>
 
 namespace android {
 namespace hardware {
 namespace media {
 namespace c2 {
-namespace V1_1 {
+namespace V1_2 {
 namespace utils {
 
-using ::android::hardware::media::c2::V1_0::utils::OutputBufferQueue;
+using ::android::hardware::media::c2::V1_0::utils::InputSurfaceConnection;
 
 } // namespace utils
-} // namespace V1_1
+} // namespace V1_2
 } // namespace c2
 } // namespace media
 } // namespace hardware
 } // namespace android
 
-#endif // CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
+#endif // CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_CONNECTION_H
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/types.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/types.h
new file mode 100644
index 0000000..d3180b0
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/types.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_2_UTILS_TYPES_H
+#define CODEC2_HIDL_V1_2_UTILS_TYPES_H
+
+#include <android/hardware/media/c2/1.2/IComponent.h>
+#include <android/hardware/media/c2/1.0/IComponentInterface.h>
+#include <android/hardware/media/c2/1.0/IComponentListener.h>
+#include <android/hardware/media/c2/1.2/IComponentStore.h>
+#include <android/hardware/media/c2/1.0/IConfigurable.h>
+#include <android/hardware/media/c2/1.0/IInputSink.h>
+#include <android/hardware/media/c2/1.0/IInputSurface.h>
+#include <android/hardware/media/c2/1.0/IInputSurfaceConnection.h>
+
+#include <codec2/hidl/1.0/types.h>
+#include <android/hardware/media/c2/1.2/types.h>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+
+using ::android::hardware::media::c2::V1_0::BaseBlock;
+using ::android::hardware::media::c2::V1_0::Block;
+using ::android::hardware::media::c2::V1_0::Buffer;
+using ::android::hardware::media::c2::V1_0::FieldDescriptor;
+using ::android::hardware::media::c2::V1_0::FieldId;
+using ::android::hardware::media::c2::V1_0::FieldSupportedValues;
+using ::android::hardware::media::c2::V1_0::FieldSupportedValuesQuery;
+using ::android::hardware::media::c2::V1_0::FieldSupportedValuesQueryResult;
+using ::android::hardware::media::c2::V1_0::FrameData;
+using ::android::hardware::media::c2::V1_0::InfoBuffer;
+using ::android::hardware::media::c2::V1_0::ParamDescriptor;
+using ::android::hardware::media::c2::V1_0::ParamField;
+using ::android::hardware::media::c2::V1_0::ParamFieldValues;
+using ::android::hardware::media::c2::V1_0::ParamIndex;
+using ::android::hardware::media::c2::V1_0::Params;
+using ::android::hardware::media::c2::V1_0::PrimitiveValue;
+using ::android::hardware::media::c2::V1_0::SettingResult;
+using ::android::hardware::media::c2::V1_0::Status;
+using ::android::hardware::media::c2::V1_0::StructDescriptor;
+using ::android::hardware::media::c2::V1_0::ValueRange;
+using ::android::hardware::media::c2::V1_0::Work;
+using ::android::hardware::media::c2::V1_0::WorkBundle;
+using ::android::hardware::media::c2::V1_0::WorkOrdinal;
+using ::android::hardware::media::c2::V1_0::Worklet;
+
+using ::android::hardware::media::c2::V1_2::SurfaceSyncObj;
+
+using ::android::hardware::media::c2::V1_0::IComponentInterface;
+using ::android::hardware::media::c2::V1_0::IComponentListener;
+using ::android::hardware::media::c2::V1_0::IConfigurable;
+using ::android::hardware::media::c2::V1_0::IInputSink;
+using ::android::hardware::media::c2::V1_0::IInputSurface;
+using ::android::hardware::media::c2::V1_0::IInputSurfaceConnection;
+
+namespace utils {
+
+using ::android::hardware::media::c2::V1_0::utils::toC2Status;
+
+using ::android::hardware::media::c2::V1_0::utils::C2Hidl_Range;
+using ::android::hardware::media::c2::V1_0::utils::C2Hidl_RangeInfo;
+using ::android::hardware::media::c2::V1_0::utils::C2Hidl_Rect;
+using ::android::hardware::media::c2::V1_0::utils::C2Hidl_RectInfo;
+
+using ::android::hardware::media::c2::V1_0::utils::objcpy;
+using ::android::hardware::media::c2::V1_0::utils::parseParamsBlob;
+using ::android::hardware::media::c2::V1_0::utils::createParamsBlob;
+using ::android::hardware::media::c2::V1_0::utils::copyParamsFromBlob;
+using ::android::hardware::media::c2::V1_0::utils::updateParamsFromBlob;
+
+using ::android::hardware::media::c2::V1_0::utils::BufferPoolSender;
+using ::android::hardware::media::c2::V1_0::utils::DefaultBufferPoolSender;
+
+using ::android::hardware::media::c2::V1_0::utils::beginTransferBufferQueueBlock;
+using ::android::hardware::media::c2::V1_0::utils::beginTransferBufferQueueBlocks;
+using ::android::hardware::media::c2::V1_0::utils::endTransferBufferQueueBlock;
+using ::android::hardware::media::c2::V1_0::utils::endTransferBufferQueueBlocks;
+using ::android::hardware::media::c2::V1_0::utils::displayBufferQueueBlock;
+
+using ::android::hardware::media::c2::V1_0::utils::operator<<;
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
+
+#endif // CODEC2_HIDL_V1_2_UTILS_TYPES_H
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/codec2/hidl/1.2/utils/types.cpp
similarity index 84%
copy from media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
copy to media/codec2/hidl/1.2/utils/types.cpp
index 65756e8..9e0a08b 100644
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ b/media/codec2/hidl/1.2/utils/types.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright 2019 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,4 +14,4 @@
  * limitations under the License.
  */
 
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
+#include <codec2/hidl/1.2/types.h>
diff --git a/media/codec2/hidl/client/Android.bp b/media/codec2/hidl/client/Android.bp
index 5a34c30..0e52813 100644
--- a/media/codec2/hidl/client/Android.bp
+++ b/media/codec2/hidl/client/Android.bp
@@ -12,6 +12,11 @@
 
     srcs: [
         "client.cpp",
+        "output.cpp",
+    ],
+
+    header_libs: [
+        "libcodec2_internal", // private
     ],
 
     shared_libs: [
@@ -19,11 +24,13 @@
         "android.hardware.media.bufferpool@2.0",
         "android.hardware.media.c2@1.0",
         "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
         "libbase",
         "libbinder",
         "libcodec2",
         "libcodec2_hidl_client@1.0",
         "libcodec2_hidl_client@1.1",
+        "libcodec2_hidl_client@1.2",
         "libcodec2_vndk",
         "libcutils",
         "libgui",
@@ -41,9 +48,11 @@
     export_shared_lib_headers: [
         "android.hardware.media.c2@1.0",
         "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
         "libcodec2",
         "libcodec2_hidl_client@1.0",
         "libcodec2_hidl_client@1.1",
+        "libcodec2_hidl_client@1.2",
         "libcodec2_vndk",
     ],
 
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hidl/client/client.cpp
index 341a577..0296004 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hidl/client/client.cpp
@@ -33,16 +33,17 @@
 
 #include <android-base/properties.h>
 #include <bufferpool/ClientManager.h>
-#include <codec2/hidl/1.0/OutputBufferQueue.h>
 #include <codec2/hidl/1.0/types.h>
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
 #include <codec2/hidl/1.1/types.h>
+#include <codec2/hidl/1.2/types.h>
+#include <codec2/hidl/output.h>
 
 #include <cutils/native_handle.h>
 #include <gui/bufferqueue/2.0/B2HGraphicBufferProducer.h>
 #include <gui/bufferqueue/2.0/H2BGraphicBufferProducer.h>
 #include <hidl/HidlSupport.h>
 
+
 #include <deque>
 #include <iterator>
 #include <limits>
@@ -73,6 +74,7 @@
         V2_0::utils::B2HGraphicBufferProducer;
 using H2BGraphicBufferProducer2 = ::android::hardware::graphics::bufferqueue::
         V2_0::utils::H2BGraphicBufferProducer;
+using ::android::hardware::media::c2::V1_2::SurfaceSyncObj;
 
 namespace /* unnamed */ {
 
@@ -592,9 +594,9 @@
 
 // Codec2Client::Component::OutputBufferQueue
 struct Codec2Client::Component::OutputBufferQueue :
-        hardware::media::c2::V1_1::utils::OutputBufferQueue {
+        hardware::media::c2::OutputBufferQueue {
     OutputBufferQueue()
-          : hardware::media::c2::V1_1::utils::OutputBufferQueue() {
+          : hardware::media::c2::OutputBufferQueue() {
     }
 };
 
@@ -612,6 +614,7 @@
         },
         mBase1_0{base},
         mBase1_1{Base1_1::castFrom(base)},
+        mBase1_2{Base1_2::castFrom(base)},
         mServiceIndex{serviceIndex} {
     Return<sp<IClientManager>> transResult = base->getPoolClientManager();
     if (!transResult.isOk()) {
@@ -633,6 +636,10 @@
     return mBase1_1;
 }
 
+sp<Codec2Client::Base1_2> const& Codec2Client::getBase1_2() const {
+    return mBase1_2;
+}
+
 std::string const& Codec2Client::getServiceName() const {
     return GetServiceNames()[mServiceIndex];
 }
@@ -645,8 +652,9 @@
     c2_status_t status;
     sp<Component::HidlListener> hidlListener = new Component::HidlListener{};
     hidlListener->base = listener;
-    Return<void> transStatus = mBase1_1 ?
-        mBase1_1->createComponent_1_1(
+    Return<void> transStatus;
+    if (mBase1_2) {
+        transStatus = mBase1_2->createComponent_1_2(
             name,
             hidlListener,
             ClientManager::getInstance(),
@@ -659,8 +667,25 @@
                 }
                 *component = std::make_shared<Codec2Client::Component>(c);
                 hidlListener->component = *component;
-            }) :
-        mBase1_0->createComponent(
+            });
+    }
+    else if (mBase1_1) {
+        transStatus = mBase1_1->createComponent_1_1(
+            name,
+            hidlListener,
+            ClientManager::getInstance(),
+            [&status, component, hidlListener](
+                    Status s,
+                    const sp<IComponent>& c) {
+                status = static_cast<c2_status_t>(s);
+                if (status != C2_OK) {
+                    return;
+                }
+                *component = std::make_shared<Codec2Client::Component>(c);
+                hidlListener->component = *component;
+            });
+    } else if (mBase1_0) { // ver1_0
+        transStatus = mBase1_0->createComponent(
             name,
             hidlListener,
             ClientManager::getInstance(),
@@ -674,6 +699,9 @@
                 *component = std::make_shared<Codec2Client::Component>(c);
                 hidlListener->component = *component;
             });
+    } else {
+        status = C2_CORRUPTED;
+    }
     if (!transStatus.isOk()) {
         LOG(ERROR) << "createComponent(" << name.c_str()
                    << ") -- transaction failed.";
@@ -1193,6 +1221,7 @@
         },
         mBase1_0{base},
         mBase1_1{Base1_1::castFrom(base)},
+        mBase1_2{Base1_2::castFrom(base)},
         mBufferPoolSender{std::make_unique<BufferPoolSender>()},
         mOutputBufferQueue{std::make_unique<OutputBufferQueue>()} {
 }
@@ -1215,6 +1244,30 @@
         },
         mBase1_0{base},
         mBase1_1{base},
+        mBase1_2{Base1_2::castFrom(base)},
+        mBufferPoolSender{std::make_unique<BufferPoolSender>()},
+        mOutputBufferQueue{std::make_unique<OutputBufferQueue>()} {
+}
+
+Codec2Client::Component::Component(const sp<Base1_2>& base)
+      : Configurable{
+            [base]() -> sp<IConfigurable> {
+                Return<sp<IComponentInterface>> transResult1 =
+                        base->getInterface();
+                if (!transResult1.isOk()) {
+                    return nullptr;
+                }
+                Return<sp<IConfigurable>> transResult2 =
+                        static_cast<sp<IComponentInterface>>(transResult1)->
+                        getConfigurable();
+                return transResult2.isOk() ?
+                        static_cast<sp<IConfigurable>>(transResult2) :
+                        nullptr;
+            }()
+        },
+        mBase1_0{base},
+        mBase1_1{base},
+        mBase1_2{base},
         mBufferPoolSender{std::make_unique<BufferPoolSender>()},
         mOutputBufferQueue{std::make_unique<OutputBufferQueue>()} {
 }
@@ -1440,21 +1493,29 @@
         igbp = new B2HGraphicBufferProducer2(surface);
     }
 
+    std::shared_ptr<SurfaceSyncObj> syncObj;
+
     if (!surface) {
-        mOutputBufferQueue->configure(nullIgbp, generation, 0);
+        mOutputBufferQueue->configure(nullIgbp, generation, 0, nullptr);
     } else if (surface->getUniqueId(&bqId) != OK) {
         LOG(ERROR) << "setOutputSurface -- "
                    "cannot obtain bufferqueue id.";
         bqId = 0;
-        mOutputBufferQueue->configure(nullIgbp, generation, 0);
+        mOutputBufferQueue->configure(nullIgbp, generation, 0, nullptr);
     } else {
-        mOutputBufferQueue->configure(surface, generation, bqId);
+        mOutputBufferQueue->configure(surface, generation, bqId,
+                                      mBase1_2 ? &syncObj : nullptr);
     }
-    ALOGD("generation remote change %u", generation);
+    ALOGD("surface generation remote change %u HAL ver: %s",
+          generation, syncObj ? "1.2" : "1.0");
 
-    Return<Status> transStatus = mBase1_0->setOutputSurface(
-            static_cast<uint64_t>(blockPoolId),
-            bqId == 0 ? nullHgbp : igbp);
+    Return<Status> transStatus = syncObj ?
+            mBase1_2->setOutputSurfaceWithSyncObj(
+                    static_cast<uint64_t>(blockPoolId),
+                    bqId == 0 ? nullHgbp : igbp, *syncObj) :
+            mBase1_0->setOutputSurface(
+                    static_cast<uint64_t>(blockPoolId),
+                    bqId == 0 ? nullHgbp : igbp);
     if (!transStatus.isOk()) {
         LOG(ERROR) << "setOutputSurface -- transaction failed.";
         return C2_TRANSACTION_FAILED;
@@ -1464,6 +1525,7 @@
     if (status != C2_OK) {
         LOG(DEBUG) << "setOutputSurface -- call failed: " << status << ".";
     }
+    ALOGD("Surface configure completed");
     return status;
 }
 
@@ -1474,6 +1536,11 @@
     return mOutputBufferQueue->outputBuffer(block, input, output);
 }
 
+void Codec2Client::Component::setOutputSurfaceMaxDequeueCount(
+        int maxDequeueCount) {
+    mOutputBufferQueue->updateMaxDequeueBufferCount(maxDequeueCount);
+}
+
 c2_status_t Codec2Client::Component::connectToInputSurface(
         const std::shared_ptr<InputSurface>& inputSurface,
         std::shared_ptr<InputSurfaceConnection>* connection) {
diff --git a/media/codec2/hidl/client/include/codec2/hidl/client.h b/media/codec2/hidl/client/include/codec2/hidl/client.h
index bbb2b96..eca268e 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/client.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/client.h
@@ -78,6 +78,11 @@
 struct IComponentStore;
 }  // namespace android::hardware::media::c2::V1_1
 
+namespace android::hardware::media::c2::V1_2 {
+struct IComponent;
+struct IComponentStore;
+}  // namespace android::hardware::media::c2::V1_2
+
 namespace android::hardware::media::bufferpool::V2_0 {
 struct IClientManager;
 }  // namespace android::hardware::media::bufferpool::V2_0
@@ -137,6 +142,7 @@
 
     typedef ::android::hardware::media::c2::V1_0::IComponentStore Base1_0;
     typedef ::android::hardware::media::c2::V1_1::IComponentStore Base1_1;
+    typedef ::android::hardware::media::c2::V1_2::IComponentStore Base1_2;
     typedef Base1_0 Base;
 
     struct Listener;
@@ -156,6 +162,7 @@
     sp<Base> const& getBase() const;
     sp<Base1_0> const& getBase1_0() const;
     sp<Base1_1> const& getBase1_1() const;
+    sp<Base1_2> const& getBase1_2() const;
 
     std::string const& getServiceName() const;
 
@@ -228,6 +235,7 @@
 protected:
     sp<Base1_0> mBase1_0;
     sp<Base1_1> mBase1_1;
+    sp<Base1_2> mBase1_2;
 
     // Finds the first store where the predicate returns C2_OK and returns the
     // last predicate result. The predicate will be tried on all stores. The
@@ -318,6 +326,7 @@
 
     typedef ::android::hardware::media::c2::V1_0::IComponent Base1_0;
     typedef ::android::hardware::media::c2::V1_1::IComponent Base1_1;
+    typedef ::android::hardware::media::c2::V1_2::IComponent Base1_2;
     typedef Base1_0 Base;
 
     c2_status_t createBlockPool(
@@ -398,6 +407,9 @@
             const QueueBufferInput& input,
             QueueBufferOutput* output);
 
+    // Set max dequeue count for output surface.
+    void setOutputSurfaceMaxDequeueCount(int maxDequeueCount);
+
     // Connect to a given InputSurface.
     c2_status_t connectToInputSurface(
             const std::shared_ptr<InputSurface>& inputSurface,
@@ -413,12 +425,14 @@
     // base cannot be null.
     Component(const sp<Base>& base);
     Component(const sp<Base1_1>& base);
+    Component(const sp<Base1_2>& base);
 
     ~Component();
 
 protected:
     sp<Base1_0> mBase1_0;
     sp<Base1_1> mBase1_1;
+    sp<Base1_2> mBase1_2;
 
     struct BufferPoolSender;
     std::unique_ptr<BufferPoolSender> mBufferPoolSender;
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/OutputBufferQueue.h b/media/codec2/hidl/client/include/codec2/hidl/output.h
similarity index 83%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/OutputBufferQueue.h
rename to media/codec2/hidl/client/include/codec2/hidl/output.h
index 80368f7..0f03b36 100644
--- a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/OutputBufferQueue.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/output.h
@@ -19,16 +19,17 @@
 
 #include <gui/IGraphicBufferProducer.h>
 #include <codec2/hidl/1.0/types.h>
+#include <codec2/hidl/1.2/types.h>
 #include <C2Work.h>
 
 struct C2_HIDE _C2BlockPoolData;
+class C2SurfaceSyncMemory;
 
 namespace android {
 namespace hardware {
 namespace media {
 namespace c2 {
-namespace V1_0 {
-namespace utils {
+
 
 // BufferQueue-Based Block Operations
 // ==================================
@@ -45,7 +46,8 @@
     // Graphic blocks from older surface will be migrated to new surface.
     bool configure(const sp<IGraphicBufferProducer>& igbp,
                    uint32_t generation,
-                   uint64_t bqId);
+                   uint64_t bqId,
+                   std::shared_ptr<V1_2::SurfaceSyncObj> *syncObj);
 
     // Render a graphic block to current surface.
     status_t outputBuffer(
@@ -61,22 +63,27 @@
     void holdBufferQueueBlocks(
             const std::list<std::unique_ptr<C2Work>>& workList);
 
+    // Update # of max dequeue buffer from BQ. If # of max dequeued buffer is shared
+    // via shared memory between HAL and framework, Update # of max dequeued buffer
+    // and synchronize.
+    void updateMaxDequeueBufferCount(int maxDequeueBufferCount);
+
 private:
 
     std::mutex mMutex;
     sp<IGraphicBufferProducer> mIgbp;
     uint32_t mGeneration;
     uint64_t mBqId;
+    int32_t mMaxDequeueBufferCount;
     std::shared_ptr<int> mOwner;
     // To migrate existing buffers
     sp<GraphicBuffer> mBuffers[BufferQueueDefs::NUM_BUFFER_SLOTS]; // find a better way
     std::weak_ptr<_C2BlockPoolData> mPoolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
+    std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
 
     bool registerBuffer(const C2ConstGraphicBlock& block);
 };
 
-}  // namespace utils
-}  // namespace V1_0
 }  // namespace c2
 }  // namespace media
 }  // namespace hardware
diff --git a/media/codec2/hidl/1.0/utils/OutputBufferQueue.cpp b/media/codec2/hidl/client/output.cpp
similarity index 71%
rename from media/codec2/hidl/1.0/utils/OutputBufferQueue.cpp
rename to media/codec2/hidl/client/output.cpp
index 2b235f2..7df0da2 100644
--- a/media/codec2/hidl/1.0/utils/OutputBufferQueue.cpp
+++ b/media/codec2/hidl/client/output.cpp
@@ -19,13 +19,16 @@
 #include <android-base/logging.h>
 
 #include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
-#include <codec2/hidl/1.0/OutputBufferQueue.h>
+#include <codec2/hidl/output.h>
+#include <cutils/ashmem.h>
 #include <gui/bufferqueue/2.0/B2HGraphicBufferProducer.h>
+#include <sys/mman.h>
 
 #include <C2AllocatorGralloc.h>
 #include <C2BlockInternal.h>
 #include <C2Buffer.h>
 #include <C2PlatformSupport.h>
+#include <C2SurfaceSyncObj.h>
 
 #include <iomanip>
 
@@ -33,8 +36,6 @@
 namespace hardware {
 namespace media {
 namespace c2 {
-namespace V1_0 {
-namespace utils {
 
 using HGraphicBufferProducer = ::android::hardware::graphics::bufferqueue::
         V2_0::IGraphicBufferProducer;
@@ -105,7 +106,8 @@
 status_t attachToBufferQueue(const C2ConstGraphicBlock& block,
                              const sp<IGraphicBufferProducer>& igbp,
                              uint32_t generation,
-                             int32_t* bqSlot) {
+                             int32_t* bqSlot,
+                             std::shared_ptr<C2SurfaceSyncMemory> syncMem) {
     if (!igbp) {
         LOG(WARNING) << "attachToBufferQueue -- null producer.";
         return NO_INIT;
@@ -126,7 +128,25 @@
             << ", stride " << graphicBuffer->getStride()
             << ", generation " << graphicBuffer->getGenerationNumber();
 
-    status_t result = igbp->attachBuffer(bqSlot, graphicBuffer);
+    C2SyncVariables *syncVar = syncMem ? syncMem->mem() : nullptr;
+    status_t result = OK;
+    if (syncVar) {
+        syncVar->lock();
+        if (!syncVar->isDequeueableLocked() ||
+            syncVar->getSyncStatusLocked() == C2SyncVariables::STATUS_SWITCHING) {
+            syncVar->unlock();
+            LOG(WARNING) << "attachToBufferQueue -- attachBuffer failed: "
+                            "status = " << INVALID_OPERATION << ".";
+            return INVALID_OPERATION;
+        }
+        result = igbp->attachBuffer(bqSlot, graphicBuffer);
+        if (result == OK) {
+            syncVar->notifyDequeuedLocked();
+        }
+        syncVar->unlock();
+    } else {
+        result = igbp->attachBuffer(bqSlot, graphicBuffer);
+    }
     if (result != OK) {
         LOG(WARNING) << "attachToBufferQueue -- attachBuffer failed: "
                         "status = " << result << ".";
@@ -157,12 +177,40 @@
 
 bool OutputBufferQueue::configure(const sp<IGraphicBufferProducer>& igbp,
                                   uint32_t generation,
-                                  uint64_t bqId) {
+                                  uint64_t bqId,
+                                  std::shared_ptr<V1_2::SurfaceSyncObj> *syncObj) {
     uint64_t consumerUsage = 0;
     if (igbp->getConsumerUsage(&consumerUsage) != OK) {
         ALOGW("failed to get consumer usage");
     }
 
+    // TODO : Abstract creation process into C2SurfaceSyncMemory class.
+    // use C2LinearBlock instead ashmem.
+    std::shared_ptr<C2SurfaceSyncMemory> syncMem;
+    if (syncObj && igbp) {
+        bool mapped = false;
+        int memFd = ashmem_create_region("C2SurfaceMem", sizeof(C2SyncVariables));
+        size_t memSize = memFd < 0 ? 0 : ashmem_get_size_region(memFd);
+        if (memSize > 0) {
+            syncMem = C2SurfaceSyncMemory::Create(memFd, memSize);
+            if (syncMem) {
+                mapped = true;
+                *syncObj = std::make_shared<V1_2::SurfaceSyncObj>();
+                (*syncObj)->syncMemory = syncMem->handle();
+                (*syncObj)->bqId = bqId;
+                (*syncObj)->generationId = generation;
+                (*syncObj)->consumerUsage = consumerUsage;
+                ALOGD("C2SurfaceSyncMemory created %zu(%zu)", sizeof(C2SyncVariables), memSize);
+            }
+        }
+        if (!mapped) {
+            if (memFd >= 0) {
+                ::close(memFd);
+            }
+            ALOGW("SurfaceSyncObj creation failure");
+        }
+    }
+
     size_t tryNum = 0;
     size_t success = 0;
     sp<GraphicBuffer> buffers[BufferQueueDefs::NUM_BUFFER_SLOTS];
@@ -173,6 +221,19 @@
         if (generation == mGeneration) {
             return false;
         }
+        std::shared_ptr<C2SurfaceSyncMemory> oldMem = mSyncMem;
+        C2SyncVariables *oldSync = mSyncMem ? mSyncMem->mem() : nullptr;
+        if (oldSync) {
+            oldSync->lock();
+            oldSync->setSyncStatusLocked(C2SyncVariables::STATUS_SWITCHING);
+            oldSync->unlock();
+        }
+        mSyncMem.reset();
+        if (syncMem) {
+            mSyncMem = syncMem;
+        }
+        C2SyncVariables *newSync = mSyncMem ? mSyncMem->mem() : nullptr;
+
         mIgbp = igbp;
         mGeneration = generation;
         mBqId = bqId;
@@ -212,7 +273,7 @@
             }
             bool attach =
                     _C2BlockFactory::EndAttachBlockToBufferQueue(
-                            data, mOwner, getHgbp(mIgbp),
+                            data, mOwner, getHgbp(mIgbp), mSyncMem,
                             generation, bqId, bqSlot);
             if (!attach) {
                 igbp->cancelBuffer(bqSlot, Fence::NO_FENCE);
@@ -226,8 +287,12 @@
             mBuffers[i] = buffers[i];
             mPoolDatas[i] = poolDatas[i];
         }
+        if (newSync) {
+            newSync->setInitialDequeueCount(mMaxDequeueBufferCount, success);
+        }
     }
-    ALOGD("remote graphic buffer migration %zu/%zu", success, tryNum);
+    ALOGD("remote graphic buffer migration %zu/%zu",
+          success, tryNum);
     return true;
 }
 
@@ -258,7 +323,7 @@
                      << ", bqSlot " << oldSlot
                      << ", generation " << mGeneration
                      << ".";
-        _C2BlockFactory::HoldBlockFromBufferQueue(data, mOwner, getHgbp(mIgbp));
+        _C2BlockFactory::HoldBlockFromBufferQueue(data, mOwner, getHgbp(mIgbp), mSyncMem);
         mPoolDatas[oldSlot] = data;
         mBuffers[oldSlot] = createGraphicBuffer(block);
         mBuffers[oldSlot]->setGenerationNumber(mGeneration);
@@ -278,25 +343,39 @@
     uint32_t generation;
     uint64_t bqId;
     int32_t bqSlot;
-    bool display = displayBufferQueueBlock(block);
+    bool display = V1_0::utils::displayBufferQueueBlock(block);
     if (!getBufferQueueAssignment(block, &generation, &bqId, &bqSlot) ||
         bqId == 0) {
         // Block not from bufferqueue -- it must be attached before queuing.
 
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem;
         mMutex.lock();
         sp<IGraphicBufferProducer> outputIgbp = mIgbp;
         uint32_t outputGeneration = mGeneration;
+        syncMem = mSyncMem;
         mMutex.unlock();
 
         status_t status = attachToBufferQueue(
-                block, outputIgbp, outputGeneration, &bqSlot);
+                block, outputIgbp, outputGeneration, &bqSlot, syncMem);
+
         if (status != OK) {
             LOG(WARNING) << "outputBuffer -- attaching failed.";
             return INVALID_OPERATION;
         }
 
-        status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
-                                     input, output);
+        auto syncVar = syncMem ? syncMem->mem() : nullptr;
+        if(syncVar) {
+            syncVar->lock();
+            status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
+                                         input, output);
+            if (status == OK) {
+                syncVar->notifyQueuedLocked();
+            }
+            syncVar->unlock();
+        } else {
+            status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
+                                         input, output);
+        }
         if (status != OK) {
             LOG(ERROR) << "outputBuffer -- queueBuffer() failed "
                        "on non-bufferqueue-based block. "
@@ -306,10 +385,12 @@
         return OK;
     }
 
+    std::shared_ptr<C2SurfaceSyncMemory> syncMem;
     mMutex.lock();
     sp<IGraphicBufferProducer> outputIgbp = mIgbp;
     uint32_t outputGeneration = mGeneration;
     uint64_t outputBqId = mBqId;
+    syncMem = mSyncMem;
     mMutex.unlock();
 
     if (!outputIgbp) {
@@ -330,8 +411,21 @@
         return DEAD_OBJECT;
     }
 
-    status_t status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
-                                          input, output);
+    auto syncVar = syncMem ? syncMem->mem() : nullptr;
+    status_t status = OK;
+    if (syncVar) {
+        syncVar->lock();
+        status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
+                                                  input, output);
+        if (status == OK) {
+            syncVar->notifyQueuedLocked();
+        }
+        syncVar->unlock();
+    } else {
+        status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
+                                                  input, output);
+    }
+
     if (status != OK) {
         LOG(ERROR) << "outputBuffer -- queueBuffer() failed "
                    "on bufferqueue-based block. "
@@ -348,8 +442,18 @@
                            this, std::placeholders::_1));
 }
 
-}  // namespace utils
-}  // namespace V1_0
+void OutputBufferQueue::updateMaxDequeueBufferCount(int maxDequeueBufferCount) {
+    mMutex.lock();
+    mMaxDequeueBufferCount = maxDequeueBufferCount;
+    auto syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
+    if (syncVar) {
+        syncVar->lock();
+        syncVar->updateMaxDequeueCountLocked(maxDequeueBufferCount);
+        syncVar->unlock();
+    }
+    mMutex.unlock();
+}
+
 }  // namespace c2
 }  // namespace media
 }  // namespace hardware
diff --git a/media/codec2/hidl/services/Android.bp b/media/codec2/hidl/services/Android.bp
index 5a23217..bb9f51f 100644
--- a/media/codec2/hidl/services/Android.bp
+++ b/media/codec2/hidl/services/Android.bp
@@ -39,11 +39,11 @@
 }
 
 cc_binary {
-    name: "android.hardware.media.c2@1.1-default-service",
+    name: "android.hardware.media.c2@1.2-default-service",
     vendor: true,
     relative_install_path: "hw",
 
-    init_rc: ["android.hardware.media.c2@1.1-default-service.rc"],
+    init_rc: ["android.hardware.media.c2@1.2-default-service.rc"],
 
     defaults: ["libcodec2-hidl-defaults"],
     srcs: [
@@ -55,7 +55,7 @@
         "libavservices_minijail_vendor",
         "libbinder",
     ],
-    required: ["android.hardware.media.c2@1.1-default-seccomp_policy"],
+    required: ["android.hardware.media.c2@1.2-default-seccomp_policy"],
 
     // The content in manifest_media_c2_V1_1_default.xml can be included
     // directly in the main device manifest.xml file or via vintf_fragments.
@@ -73,23 +73,23 @@
 // Files in the "seccomp_policy" subdirectory are only provided as examples.
 // They may not work on some devices and/or architectures without modification.
 prebuilt_etc {
-    name: "android.hardware.media.c2@1.1-default-seccomp_policy",
+    name: "android.hardware.media.c2@1.2-default-seccomp_policy",
     vendor: true,
     sub_dir: "seccomp_policy",
 
     // If a specific architecture is targeted, multiple choices are not needed.
     arch: {
         arm: {
-            src: "seccomp_policy/android.hardware.media.c2@1.1-default-arm.policy",
+            src: "seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy",
         },
         arm64: {
-            src: "seccomp_policy/android.hardware.media.c2@1.1-default-arm64.policy",
+            src: "seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy",
         },
         x86: {
-            src: "seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy",
+            src: "seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy",
         },
         x86_64: {
-            src: "seccomp_policy/android.hardware.media.c2@1.1-default-x86_64.policy",
+            src: "seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy",
         },
     },
 
diff --git a/media/codec2/hidl/services/android.hardware.media.c2@1.1-default-service.rc b/media/codec2/hidl/services/android.hardware.media.c2@1.1-default-service.rc
deleted file mode 100644
index 44f2d8e..0000000
--- a/media/codec2/hidl/services/android.hardware.media.c2@1.1-default-service.rc
+++ /dev/null
@@ -1,7 +0,0 @@
-service android-hardware-media-c2-hal-1-1 /vendor/bin/hw/android.hardware.media.c2@1.1-default-service
-    class hal
-    user mediacodec
-    group camera mediadrm drmrpc
-    ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
-
diff --git a/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc b/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
new file mode 100644
index 0000000..03f6e3d
--- /dev/null
+++ b/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
@@ -0,0 +1,7 @@
+service android-hardware-media-c2-hal-1-2 /vendor/bin/hw/android.hardware.media.c2@1.2-default-service
+    class hal
+    user mediacodec
+    group camera mediadrm drmrpc
+    ioprio rt 4
+    writepid /dev/cpuset/foreground/tasks
+
diff --git a/media/codec2/hidl/services/manifest_media_c2_V1_2_default.xml b/media/codec2/hidl/services/manifest_media_c2_V1_2_default.xml
new file mode 100644
index 0000000..a5e8d87
--- /dev/null
+++ b/media/codec2/hidl/services/manifest_media_c2_V1_2_default.xml
@@ -0,0 +1,11 @@
+<manifest version="1.0" type="device">
+    <hal>
+        <name>android.hardware.media.c2</name>
+        <transport>hwbinder</transport>
+        <version>1.2</version>
+        <interface>
+            <name>IComponentStore</name>
+            <instance>default</instance>
+        </interface>
+    </hal>
+</manifest>
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86_64.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86_64.policy
deleted file mode 100644
index d9c4045..0000000
--- a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86_64.policy
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright (C) 2017 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-read: 1
-mprotect: 1
-prctl: 1
-openat: 1
-open: 1
-getuid32: 1
-getuid: 1
-getrlimit: 1
-writev: 1
-ioctl: 1
-close: 1
-mmap2: 1
-mmap: 1
-fstat64: 1
-fstat: 1
-stat64: 1
-statfs64: 1
-madvise: 1
-fstatat64: 1
-newfstatat: 1
-futex: 1
-munmap: 1
-faccessat: 1
-_llseek: 1
-lseek: 1
-clone: 1
-sigaltstack: 1
-setpriority: 1
-restart_syscall: 1
-exit: 1
-exit_group: 1
-rt_sigreturn: 1
-ugetrlimit: 1
-readlink: 1
-readlinkat: 1
-_llseek: 1
-fstatfs64: 1
-fstatfs: 1
-pread64: 1
-mremap: 1
-dup: 1
-set_tid_address: 1
-write: 1
-nanosleep: 1
-sched_setscheduler: 1
-uname: 1
-memfd_create: 1
-ftruncate: 1
-ftruncate64: 1
-
-# Required by AddressSanitizer
-gettid: 1
-sched_yield: 1
-getpid: 1
-gettid: 1
-
-@include /system/etc/seccomp_policy/crash_dump.x86.policy
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy
similarity index 100%
rename from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm.policy
rename to media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm64.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
similarity index 96%
rename from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm64.policy
rename to media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
index 4faf8b2..f701987 100644
--- a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm64.policy
+++ b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
@@ -1,4 +1,4 @@
-# Copyright (C) 2019 The Android Open Source Project
+# Copyright (C) 2021 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy
similarity index 95%
rename from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy
rename to media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy
index d9c4045..056c690 100644
--- a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy
+++ b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy
@@ -1,4 +1,4 @@
-# Copyright (C) 2017 The Android Open Source Project
+# Copyright (C) 2021 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy
similarity index 95%
copy from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy
copy to media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy
index d9c4045..056c690 100644
--- a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy
+++ b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy
@@ -1,4 +1,4 @@
-# Copyright (C) 2017 The Android Open Source Project
+# Copyright (C) 2021 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/media/codec2/hidl/services/vendor.cpp b/media/codec2/hidl/services/vendor.cpp
index 3ddb039..0d0684d 100644
--- a/media/codec2/hidl/services/vendor.cpp
+++ b/media/codec2/hidl/services/vendor.cpp
@@ -15,11 +15,11 @@
  */
 
 //#define LOG_NDEBUG 0
-#define LOG_TAG "android.hardware.media.c2@1.1-service"
+#define LOG_TAG "android.hardware.media.c2@1.2-service"
 
 #include <android-base/logging.h>
 #include <binder/ProcessState.h>
-#include <codec2/hidl/1.1/ComponentStore.h>
+#include <codec2/hidl/1.2/ComponentStore.h>
 #include <hidl/HidlTransportSupport.h>
 #include <minijail.h>
 
@@ -31,13 +31,13 @@
 // "android.hardware.media.c2@1.1-default-seccomp_policy" in Android.bp.
 static constexpr char kBaseSeccompPolicyPath[] =
         "/vendor/etc/seccomp_policy/"
-        "android.hardware.media.c2@1.1-default-seccomp-policy";
+        "android.hardware.media.c2@1.2-default-seccomp-policy";
 
 // Additional seccomp permissions can be added in this file.
 // This file does not exist by default.
 static constexpr char kExtSeccompPolicyPath[] =
         "/vendor/etc/seccomp_policy/"
-        "android.hardware.media.c2@1.1-extended-seccomp-policy";
+        "android.hardware.media.c2@1.2-extended-seccomp-policy";
 
 class StoreImpl : public C2ComponentStore {
 public:
@@ -164,7 +164,7 @@
 
 int main(int /* argc */, char** /* argv */) {
     using namespace ::android;
-    LOG(DEBUG) << "android.hardware.media.c2@1.1-service starting...";
+    LOG(DEBUG) << "android.hardware.media.c2@1.2-service starting...";
 
     // Set up minijail to limit system calls.
     signal(SIGPIPE, SIG_IGN);
@@ -180,7 +180,7 @@
 
     // Create IComponentStore service.
     {
-        using namespace ::android::hardware::media::c2::V1_1;
+        using namespace ::android::hardware::media::c2::V1_2;
         sp<IComponentStore> store;
 
         // TODO: Replace this with
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index 3a7af10..f66dc11 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -33,12 +33,14 @@
 #include <OMX_IndexExt.h>
 
 #include <android/fdsan.h>
+#include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/omx/OMXUtils.h>
 #include <media/stagefright/MediaErrors.h>
 #include <ui/Fence.h>
 #include <ui/GraphicBuffer.h>
 #include <utils/Thread.h>
 
+#include "utils/Codec2Mapper.h"
 #include "C2OMXNode.h"
 
 namespace android {
@@ -71,6 +73,23 @@
         jobs->cond.broadcast();
     }
 
+    void setDataspace(android_dataspace dataspace) {
+        Mutexed<Jobs>::Locked jobs(mJobs);
+        ColorUtils::convertDataSpaceToV0(dataspace);
+        jobs->configUpdate.emplace_back(new C2StreamDataSpaceInfo::input(0u, dataspace));
+        int32_t standard;
+        int32_t transfer;
+        int32_t range;
+        ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
+        std::unique_ptr<C2StreamColorAspectsInfo::input> colorAspects =
+            std::make_unique<C2StreamColorAspectsInfo::input>(0u);
+        if (C2Mapper::map(standard, &colorAspects->primaries, &colorAspects->matrix)
+                && C2Mapper::map(transfer, &colorAspects->transfer)
+                && C2Mapper::map(range, &colorAspects->range)) {
+            jobs->configUpdate.push_back(std::move(colorAspects));
+        }
+    }
+
 protected:
     bool threadLoop() override {
         constexpr nsecs_t kIntervalNs = nsecs_t(10) * 1000 * 1000;  // 10ms
@@ -102,6 +121,9 @@
                     uniqueFds.push_back(std::move(queue.workList.front().fd1));
                     queue.workList.pop_front();
                 }
+                for (const std::unique_ptr<C2Param> &param : jobs->configUpdate) {
+                    items.front()->input.configUpdate.emplace_back(C2Param::Copy(*param));
+                }
 
                 jobs.unlock();
                 for (int fenceFd : fenceFds) {
@@ -119,6 +141,7 @@
                 queued = true;
             }
             if (queued) {
+                jobs->configUpdate.clear();
                 return true;
             }
             if (i == 0) {
@@ -161,6 +184,7 @@
         std::map<std::weak_ptr<Codec2Client::Component>,
                  Queue,
                  std::owner_less<std::weak_ptr<Codec2Client::Component>>> queues;
+        std::vector<std::unique_ptr<C2Param>> configUpdate;
         Condition cond;
     };
     Mutexed<Jobs> mJobs;
@@ -172,6 +196,9 @@
       mQueueThread(new QueueThread) {
     android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ALWAYS);
     mQueueThread->run("C2OMXNode", PRIORITY_AUDIO);
+
+    Mutexed<android_dataspace>::Locked ds(mDataspace);
+    *ds = HAL_DATASPACE_UNKNOWN;
 }
 
 status_t C2OMXNode::freeNode() {
@@ -461,8 +488,11 @@
     android_dataspace dataSpace = (android_dataspace)msg.u.event_data.data1;
     uint32_t pixelFormat = msg.u.event_data.data3;
 
-    // TODO: set dataspace on component to see if it impacts color aspects
     ALOGD("dataspace changed to %#x pixel format: %#x", dataSpace, pixelFormat);
+    mQueueThread->setDataspace(dataSpace);
+
+    Mutexed<android_dataspace>::Locked ds(mDataspace);
+    *ds = dataSpace;
     return OK;
 }
 
@@ -495,4 +525,8 @@
     (void)mBufferSource->onInputBufferEmptied(bufferId, -1);
 }
 
+android_dataspace C2OMXNode::getDataspace() {
+    return *mDataspace.lock();
+}
+
 }  // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.h b/media/codec2/sfplugin/C2OMXNode.h
index 1717c96..9c04969 100644
--- a/media/codec2/sfplugin/C2OMXNode.h
+++ b/media/codec2/sfplugin/C2OMXNode.h
@@ -93,6 +93,11 @@
      */
     void onInputBufferDone(c2_cntr64_t index);
 
+    /**
+     * Returns dataspace information from GraphicBufferSource.
+     */
+    android_dataspace getDataspace();
+
 private:
     std::weak_ptr<Codec2Client::Component> mComp;
     sp<IOMXBufferSource> mBufferSource;
@@ -101,6 +106,7 @@
     uint32_t mWidth;
     uint32_t mHeight;
     uint64_t mUsage;
+    Mutexed<android_dataspace> mDataspace;
 
     // WORKAROUND: timestamp adjustment
 
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index af11592..02f7cb8 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -211,8 +211,6 @@
                 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
                 &usage, sizeof(usage));
 
-        // NOTE: we do not use/pass through color aspects from GraphicBufferSource as we
-        // communicate that directly to the component.
         mSource->configure(
                 mOmxNode, static_cast<hardware::graphics::common::V1_0::Dataspace>(mDataSpace));
         return OK;
@@ -411,6 +409,10 @@
         mNode->onInputBufferDone(index);
     }
 
+    android_dataspace getDataspace() override {
+        return mNode->getDataspace();
+    }
+
 private:
     sp<HGraphicBufferSource> mSource;
     sp<C2OMXNode> mNode;
@@ -1086,6 +1088,45 @@
             configUpdate.push_back(std::move(gop));
         }
 
+        if ((config->mDomain & Config::IS_ENCODER)
+                && (config->mDomain & Config::IS_VIDEO)) {
+            // we may not use all 3 of these entries
+            std::unique_ptr<C2StreamPictureQuantizationTuning::output> qp =
+                C2StreamPictureQuantizationTuning::output::AllocUnique(3 /* flexCount */,
+                                                                       0u /* stream */);
+
+            int ix = 0;
+
+            int32_t iMax = INT32_MAX;
+            int32_t iMin = INT32_MIN;
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_I_MAX, &iMax);
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_I_MIN, &iMin);
+            if (iMax != INT32_MAX || iMin != INT32_MIN) {
+                qp->m.values[ix++] = {I_FRAME, iMin, iMax};
+            }
+
+            int32_t pMax = INT32_MAX;
+            int32_t pMin = INT32_MIN;
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_P_MAX, &pMax);
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_P_MIN, &pMin);
+            if (pMax != INT32_MAX || pMin != INT32_MIN) {
+                qp->m.values[ix++] = {P_FRAME, pMin, pMax};
+            }
+
+            int32_t bMax = INT32_MAX;
+            int32_t bMin = INT32_MIN;
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_B_MAX, &bMax);
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_B_MIN, &bMin);
+            if (bMax != INT32_MAX || bMin != INT32_MIN) {
+                qp->m.values[ix++] = {B_FRAME, bMin, bMax};
+            }
+
+            // adjust to reflect actual use.
+            qp->setFlexCount(ix);
+
+            configUpdate.push_back(std::move(qp));
+        }
+
         err = config->setParameters(comp, configUpdate, C2_DONT_BLOCK);
         if (err != OK) {
             ALOGW("failed to configure c2 params");
@@ -1573,6 +1614,7 @@
         outputFormat = config->mOutputFormat = config->mOutputFormat->dup();
         if (config->mInputSurface) {
             err2 = config->mInputSurface->start();
+            config->mInputSurfaceDataspace = config->mInputSurface->getDataspace();
         }
         buffersBoundToCodec = config->mBuffersBoundToCodec;
     }
@@ -1660,6 +1702,7 @@
         if (config->mInputSurface) {
             config->mInputSurface->disconnect();
             config->mInputSurface = nullptr;
+            config->mInputSurfaceDataspace = HAL_DATASPACE_UNKNOWN;
         }
     }
     {
@@ -1709,6 +1752,7 @@
         if (config->mInputSurface) {
             config->mInputSurface->disconnect();
             config->mInputSurface = nullptr;
+            config->mInputSurfaceDataspace = HAL_DATASPACE_UNKNOWN;
         }
     }
 
@@ -1962,6 +2006,39 @@
     config->setParameters(comp, params, C2_MAY_BLOCK);
 }
 
+status_t CCodec::querySupportedParameters(std::vector<std::string> *names) {
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    return config->querySupportedParameters(names);
+}
+
+status_t CCodec::describeParameter(
+        const std::string &name, CodecParameterDescriptor *desc) {
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    return config->describe(name, desc);
+}
+
+status_t CCodec::subscribeToParameters(const std::vector<std::string> &names) {
+    std::shared_ptr<Codec2Client::Component> comp = mState.lock()->comp;
+    if (!comp) {
+        return INVALID_OPERATION;
+    }
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    return config->subscribeToVendorConfigUpdate(comp, names);
+}
+
+status_t CCodec::unsubscribeFromParameters(const std::vector<std::string> &names) {
+    std::shared_ptr<Codec2Client::Component> comp = mState.lock()->comp;
+    if (!comp) {
+        return INVALID_OPERATION;
+    }
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    return config->unsubscribeFromVendorConfigUpdate(comp, names);
+}
+
 void CCodec::onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems) {
     if (!workItems.empty()) {
         Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 3f717c9..c4f9d84 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1177,9 +1177,10 @@
     if (outputFormat != nullptr) {
         sp<IGraphicBufferProducer> outputSurface;
         uint32_t outputGeneration;
+        int maxDequeueCount = 0;
         {
             Mutexed<OutputSurface>::Locked output(mOutputSurface);
-            output->maxDequeueBuffers = numOutputSlots +
+            maxDequeueCount = output->maxDequeueBuffers = numOutputSlots +
                     reorderDepth.value + kRenderingDepth;
             outputSurface = output->surface ?
                     output->surface->getIGraphicBufferProducer() : nullptr;
@@ -1188,6 +1189,9 @@
             }
             outputGeneration = output->generation;
         }
+        if (maxDequeueCount > 0) {
+            mComponent->setOutputSurfaceMaxDequeueCount(maxDequeueCount);
+        }
 
         bool graphic = (oStreamFormat.value == C2BufferData::GRAPHIC);
         C2BlockPool::local_id_t outputPoolId_;
@@ -1766,15 +1770,22 @@
     if (needMaxDequeueBufferCountUpdate) {
         size_t numOutputSlots = 0;
         uint32_t reorderDepth = 0;
+        int maxDequeueCount = 0;
         {
             Mutexed<Output>::Locked output(mOutput);
             numOutputSlots = output->numSlots;
             reorderDepth = output->buffers->getReorderDepth();
         }
-        Mutexed<OutputSurface>::Locked output(mOutputSurface);
-        output->maxDequeueBuffers = numOutputSlots + reorderDepth + kRenderingDepth;
-        if (output->surface) {
-            output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
+        {
+            Mutexed<OutputSurface>::Locked output(mOutputSurface);
+            maxDequeueCount = output->maxDequeueBuffers =
+                    numOutputSlots + reorderDepth + kRenderingDepth;
+            if (output->surface) {
+                output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
+            }
+        }
+        if (maxDequeueCount > 0) {
+            mComponent->setOutputSurfaceMaxDequeueCount(maxDequeueCount);
         }
     }
 
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 5de7539..ad28545 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -24,6 +24,7 @@
 #include <C2Param.h>
 #include <util/C2InterfaceHelper.h>
 
+#include <media/stagefright/CodecBase.h>
 #include <media/stagefright/MediaCodecConstants.h>
 
 #include "CCodecConfig.h"
@@ -290,8 +291,8 @@
     std::vector<std::string> getPathsForDomain(
             Domain any, Domain all = Domain::ALL) const {
         std::vector<std::string> res;
-        for (const std::pair<std::string, std::vector<ConfigMapper>> &el : mConfigMappers) {
-            for (const ConfigMapper &cm : el.second) {
+        for (const auto &[key, mappers] : mConfigMappers) {
+            for (const ConfigMapper &cm : mappers) {
                 ALOGV("filtering %s %x %x %x %x", cm.path().c_str(), cm.domain(), any,
                         (cm.domain() & any), (cm.domain() & any & all));
                 if ((cm.domain() & any) && ((cm.domain() & any & all) == (any & all))) {
@@ -729,19 +730,6 @@
             return C2Value();
         }));
 
-    add(ConfigMapper(KEY_VIDEO_QP_I_MAX, C2_PARAMKEY_QUANTIZATION, "i-max")
-        .limitTo(D::VIDEO & D::ENCODER));
-    add(ConfigMapper(KEY_VIDEO_QP_I_MIN, C2_PARAMKEY_QUANTIZATION, "i-min")
-        .limitTo(D::VIDEO & D::ENCODER));
-    add(ConfigMapper(KEY_VIDEO_QP_P_MAX, C2_PARAMKEY_QUANTIZATION, "p-max")
-        .limitTo(D::VIDEO & D::ENCODER));
-    add(ConfigMapper(KEY_VIDEO_QP_P_MIN, C2_PARAMKEY_QUANTIZATION, "p-min")
-        .limitTo(D::VIDEO & D::ENCODER));
-    add(ConfigMapper(KEY_VIDEO_QP_B_MAX, C2_PARAMKEY_QUANTIZATION, "b-max")
-        .limitTo(D::VIDEO & D::ENCODER));
-    add(ConfigMapper(KEY_VIDEO_QP_B_MIN, C2_PARAMKEY_QUANTIZATION, "b-min")
-        .limitTo(D::VIDEO & D::ENCODER));
-
     // convert to dBFS and add default
     add(ConfigMapper(KEY_AAC_DRC_TARGET_REFERENCE_LEVEL, C2_PARAMKEY_DRC_TARGET_REFERENCE_LEVEL, "value")
         .limitTo(D::AUDIO & D::DECODER & (D::CONFIG | D::PARAM | D::READ))
@@ -1074,7 +1062,7 @@
             std::vector<std::string> keys;
             mParamUpdater->getKeysForParamIndex(desc->index(), &keys);
             for (const std::string &key : keys) {
-                mVendorParamIndices.insert_or_assign(key, desc->index());
+                mVendorParams.insert_or_assign(key, desc);
             }
         }
     }
@@ -1141,6 +1129,12 @@
             insertion.first->second = std::move(p);
         }
     }
+    if (mInputSurface
+            && (domain & mOutputDomain)
+            && mInputSurfaceDataspace != mInputSurface->getDataspace()) {
+        changed = true;
+        mInputSurfaceDataspace = mInputSurface->getDataspace();
+    }
 
     ALOGV("updated configuration has %zu params (%s)", mCurrentConfig.size(),
             changed ? "CHANGED" : "no change");
@@ -1206,8 +1200,8 @@
         const ReflectedParamUpdater::Dict &reflected,
         Domain portDomain) const {
     sp<AMessage> msg = new AMessage;
-    for (const std::pair<std::string, std::vector<ConfigMapper>> &el : mStandardParams->getKeys()) {
-        for (const ConfigMapper &cm : el.second) {
+    for (const auto &[key, mappers] : mStandardParams->getKeys()) {
+        for (const ConfigMapper &cm : mappers) {
             if ((cm.domain() & portDomain) == 0 // input-output-coded-raw
                 || (cm.domain() & mDomain) != mDomain // component domain + kind (these must match)
                 || (cm.domain() & IS_READ) == 0) {
@@ -1231,26 +1225,26 @@
                 ALOGD("unexpected untyped query value for key: %s", cm.path().c_str());
                 continue;
             }
-            msg->setItem(el.first.c_str(), item);
+            msg->setItem(key.c_str(), item);
         }
     }
 
     bool input = (portDomain & Domain::IS_INPUT);
     std::vector<std::string> vendorKeys;
-    for (const std::pair<std::string, ReflectedParamUpdater::Value> &entry : reflected) {
-        auto it = mVendorParamIndices.find(entry.first);
-        if (it == mVendorParamIndices.end()) {
+    for (const auto &[key, value] : reflected) {
+        auto it = mVendorParams.find(key);
+        if (it == mVendorParams.end()) {
             continue;
         }
-        if (mSubscribedIndices.count(it->second) == 0) {
+        C2Param::Index index = it->second->index();
+        if (mSubscribedIndices.count(index) == 0) {
             continue;
         }
         // For vendor parameters, we only care about direction
-        if ((input && !it->second.forInput())
-                || (!input && !it->second.forOutput())) {
+        if ((input && !index.forInput())
+                || (!input && !index.forOutput())) {
             continue;
         }
-        const ReflectedParamUpdater::Value &value = entry.second;
         C2Value c2Value;
         sp<ABuffer> bufValue;
         AString strValue;
@@ -1262,10 +1256,10 @@
         } else if (value.find(&strValue)) {
             item.set(strValue);
         } else {
-            ALOGD("unexpected untyped query value for key: %s", entry.first.c_str());
+            ALOGD("unexpected untyped query value for key: %s", key.c_str());
             continue;
         }
-        msg->setItem(entry.first.c_str(), item);
+        msg->setItem(key.c_str(), item);
     }
 
     { // convert from Codec 2.0 rect to MediaFormat rect and add crop rect if not present
@@ -1369,7 +1363,6 @@
             msg->removeEntryAt(msg->findEntryByName("color-matrix"));
         }
 
-
         // calculate dataspace for raw graphic buffers if not specified by component, or if
         // using surface with unspecified aspects (as those must be defaulted which may change
         // the dataspace)
@@ -1407,6 +1400,23 @@
             }
         }
 
+        if (mInputSurface) {
+            android_dataspace dataspace = mInputSurface->getDataspace();
+            ColorUtils::convertDataSpaceToV0(dataspace);
+            int32_t standard;
+            ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
+            if (range != 0) {
+                msg->setInt32(KEY_COLOR_RANGE, range);
+            }
+            if (standard != 0) {
+                msg->setInt32(KEY_COLOR_STANDARD, standard);
+            }
+            if (transfer != 0) {
+                msg->setInt32(KEY_COLOR_TRANSFER, transfer);
+            }
+            msg->setInt32("android._dataspace", dataspace);
+        }
+
         // HDR static info
 
         C2HdrStaticMetadataStruct hdr;
@@ -1824,8 +1834,81 @@
 status_t CCodecConfig::subscribeToAllVendorParams(
         const std::shared_ptr<Codec2Client::Configurable> &configurable,
         c2_blocking_t blocking) {
-    for (const std::pair<std::string, C2Param::Index> &entry : mVendorParamIndices) {
-        mSubscribedIndices.insert(entry.second);
+    for (const auto &[path, desc] : mVendorParams) {
+        mSubscribedIndices.insert(desc->index());
+    }
+    return subscribeToConfigUpdate(configurable, {}, blocking);
+}
+
+status_t CCodecConfig::querySupportedParameters(std::vector<std::string> *names) {
+    if (!names) {
+        return BAD_VALUE;
+    }
+    names->clear();
+    // TODO: expand to standard params
+    for (const auto &[key, desc] : mVendorParams) {
+        names->push_back(key);
+    }
+    return OK;
+}
+
+status_t CCodecConfig::describe(const std::string &name, CodecParameterDescriptor *desc) {
+    if (!desc) {
+        return BAD_VALUE;
+    }
+    // TODO: expand to standard params
+    desc->name = name;
+    switch (mParamUpdater->getTypeForKey(name)) {
+        case C2FieldDescriptor::INT32:
+        case C2FieldDescriptor::UINT32:
+        case C2FieldDescriptor::CNTR32:
+            desc->type = AMessage::kTypeInt32;
+            return OK;
+        case C2FieldDescriptor::INT64:
+        case C2FieldDescriptor::UINT64:
+        case C2FieldDescriptor::CNTR64:
+            desc->type = AMessage::kTypeInt64;
+            return OK;
+        case C2FieldDescriptor::FLOAT:
+            desc->type = AMessage::kTypeFloat;
+            return OK;
+        case C2FieldDescriptor::STRING:
+            desc->type = AMessage::kTypeString;
+            return OK;
+        case C2FieldDescriptor::BLOB:
+            desc->type = AMessage::kTypeBuffer;
+            return OK;
+        default:
+            return NAME_NOT_FOUND;
+    }
+}
+
+status_t CCodecConfig::subscribeToVendorConfigUpdate(
+        const std::shared_ptr<Codec2Client::Configurable> &configurable,
+        const std::vector<std::string> &names,
+        c2_blocking_t blocking) {
+    for (const std::string &name : names) {
+        auto it = mVendorParams.find(name);
+        if (it == mVendorParams.end()) {
+            ALOGD("%s is not a recognized vendor parameter; ignored.", name.c_str());
+            continue;
+        }
+        mSubscribedIndices.insert(it->second->index());
+    }
+    return subscribeToConfigUpdate(configurable, {}, blocking);
+}
+
+status_t CCodecConfig::unsubscribeFromVendorConfigUpdate(
+        const std::shared_ptr<Codec2Client::Configurable> &configurable,
+        const std::vector<std::string> &names,
+        c2_blocking_t blocking) {
+    for (const std::string &name : names) {
+        auto it = mVendorParams.find(name);
+        if (it == mVendorParams.end()) {
+            ALOGD("%s is not a recognized vendor parameter; ignored.", name.c_str());
+            continue;
+        }
+        mSubscribedIndices.erase(it->second->index());
     }
     return subscribeToConfigUpdate(configurable, {}, blocking);
 }
diff --git a/media/codec2/sfplugin/CCodecConfig.h b/media/codec2/sfplugin/CCodecConfig.h
index 7e060f6..417b773 100644
--- a/media/codec2/sfplugin/CCodecConfig.h
+++ b/media/codec2/sfplugin/CCodecConfig.h
@@ -35,6 +35,7 @@
 namespace android {
 
 struct AMessage;
+struct CodecParameterDescriptor;
 class NativeHandle;
 struct StandardParams;
 
@@ -124,6 +125,7 @@
 
     std::shared_ptr<InputSurfaceWrapper> mInputSurface;
     std::unique_ptr<InputSurfaceWrapper::Config> mISConfig;
+    android_dataspace mInputSurfaceDataspace;
 
     /// the current configuration. Updated after configure() and based on configUpdate in
     /// onWorkDone
@@ -137,8 +139,8 @@
     /// For now support a validation function.
     std::map<C2Param::Index, LocalParamValidator> mLocalParams;
 
-    /// Vendor field name -> index map.
-    std::map<std::string, C2Param::Index> mVendorParamIndices;
+    /// Vendor field name -> desc map.
+    std::map<std::string, std::shared_ptr<C2ParamDescriptor>> mVendorParams;
 
     std::set<std::string> mLastConfig;
 
@@ -326,6 +328,41 @@
         return Watcher<T>(index, this);
     }
 
+    /**
+     * Queries supported parameters and put the keys to |names|.
+     * TODO: currently this method queries vendor parameter keys only.
+     *
+     * \return OK if successful.
+     *         BAD_VALUE if |names| is nullptr.
+     */
+    status_t querySupportedParameters(std::vector<std::string> *names);
+
+    /**
+     * Describe the parameter with |name|, filling the information into |desc|
+     * TODO: currently this method works only for vendor parameters.
+     *
+     * \return OK if successful.
+     *         BAD_VALUE if |desc| is nullptr.
+     *         NAME_NOT_FOUND if |name| is not a recognized parameter name.
+     */
+    status_t describe(const std::string &name, CodecParameterDescriptor *desc);
+
+    /**
+     * Find corresponding indices for |names| and subscribe to them.
+     */
+    status_t subscribeToVendorConfigUpdate(
+            const std::shared_ptr<Codec2Client::Configurable> &configurable,
+            const std::vector<std::string> &names,
+            c2_blocking_t blocking = C2_DONT_BLOCK);
+
+    /**
+     * Find corresponding indices for |names| and unsubscribe from them.
+     */
+    status_t unsubscribeFromVendorConfigUpdate(
+            const std::shared_ptr<Codec2Client::Configurable> &configurable,
+            const std::vector<std::string> &names,
+            c2_blocking_t blocking = C2_DONT_BLOCK);
+
 private:
 
     /// initializes the standard MediaCodec to Codec 2.0 params mapping
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index a26f89e..77a63a7 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -338,6 +338,59 @@
     // parse default XML files
     parser.parseXmlFilesInSearchDirs();
 
+    // The mainline modules for media may optionally include some codec shaping information.
+    // Based on vendor partition SDK, and the brand/product/device information
+    // (expect to be empty in almost always)
+    //
+    {
+        // get build info so we know what file to search
+        // ro.vendor.build.fingerprint
+        std::string fingerprint = base::GetProperty("ro.vendor.build.fingerprint",
+                                               "brand/product/device:");
+        ALOGV("property_get for ro.vendor.build.fingerprint == '%s'", fingerprint.c_str());
+
+        // ro.vendor.build.version.sdk
+        std::string sdk = base::GetProperty("ro.vendor.build.version.sdk", "0");
+        ALOGV("property_get for ro.vendor.build.version.sdk == '%s'", sdk.c_str());
+
+        std::string brand;
+        std::string product;
+        std::string device;
+        size_t pos1;
+        pos1 = fingerprint.find('/');
+        if (pos1 != std::string::npos) {
+            brand = fingerprint.substr(0, pos1);
+            size_t pos2 = fingerprint.find('/', pos1+1);
+            if (pos2 != std::string::npos) {
+                product = fingerprint.substr(pos1+1, pos2 - pos1 - 1);
+                size_t pos3 = fingerprint.find('/', pos2+1);
+                if (pos3 != std::string::npos) {
+                    device = fingerprint.substr(pos2+1, pos3 - pos2 - 1);
+                    size_t pos4 = device.find(':');
+                    if (pos4 != std::string::npos) {
+                        device.resize(pos4);
+                    }
+                }
+            }
+        }
+
+        ALOGV("parsed: sdk '%s' brand '%s' product '%s' device '%s'",
+            sdk.c_str(), brand.c_str(), product.c_str(), device.c_str());
+
+        std::string base = "/apex/com.android.media/etc/formatshaper";
+
+        // looking in these directories within the apex
+        const std::vector<std::string> modulePathnames = {
+            base + "/" + sdk + "/" + brand + "/" + product + "/" + device,
+            base + "/" + sdk + "/" + brand + "/" + product,
+            base + "/" + sdk + "/" + brand,
+            base + "/" + sdk,
+            base
+        };
+
+        parser.parseXmlFilesInSearchDirs( { "media_codecs_shaping.xml" }, modulePathnames);
+    }
+
     if (parser.getParsingStatus() != OK) {
         ALOGD("XML parser no good");
         return OK;
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index 479acb1..50d600c 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -106,6 +106,11 @@
      */
     virtual void onInputBufferDone(c2_cntr64_t /* index */) {}
 
+    /**
+     * Returns dataspace information from GraphicBufferSource.
+     */
+    virtual android_dataspace getDataspace() { return mDataSpace; }
+
 protected:
     android_dataspace mDataSpace;
 };
diff --git a/media/codec2/sfplugin/ReflectedParamUpdater.cpp b/media/codec2/sfplugin/ReflectedParamUpdater.cpp
index f39051b..d14b9b0 100644
--- a/media/codec2/sfplugin/ReflectedParamUpdater.cpp
+++ b/media/codec2/sfplugin/ReflectedParamUpdater.cpp
@@ -288,6 +288,20 @@
     }
 }
 
+C2FieldDescriptor::type_t ReflectedParamUpdater::getTypeForKey(
+        const std::string &key) const {
+    auto it = mMap.find(key);
+    if (it == mMap.end()) {
+        return C2FieldDescriptor::type_t(~0);
+    }
+
+    if (it->second.fieldDesc) {
+        return it->second.fieldDesc->type();
+    }
+    // whole param is exposed as a blob
+    return C2FieldDescriptor::BLOB;
+}
+
 void ReflectedParamUpdater::updateParamsFromMessage(
         const Dict &params,
         std::vector<std::unique_ptr<C2Param>> *vec /* nonnull */) const {
diff --git a/media/codec2/sfplugin/ReflectedParamUpdater.h b/media/codec2/sfplugin/ReflectedParamUpdater.h
index 752c7e4..6dcf2a3 100644
--- a/media/codec2/sfplugin/ReflectedParamUpdater.h
+++ b/media/codec2/sfplugin/ReflectedParamUpdater.h
@@ -176,6 +176,14 @@
             std::vector<std::string> *keys /* nonnull */) const;
 
     /**
+     * Get field type for the given name
+     *
+     * \param key[in]   field name
+     * \return type of the field, or type_t(~0) if not found.
+     */
+    C2FieldDescriptor::type_t getTypeForKey(const std::string &name) const;
+
+    /**
      * Update C2Param objects from field name and value in AMessage object.
      *
      * \param params[in]    Dict object with field name to value pairs.
diff --git a/media/codec2/sfplugin/include/media/stagefright/CCodec.h b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
index ba69d7e..ec18128 100644
--- a/media/codec2/sfplugin/include/media/stagefright/CCodec.h
+++ b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
@@ -65,6 +65,12 @@
     virtual void signalEndOfInputStream() override;
     virtual void signalRequestIDRFrame() override;
 
+    virtual status_t querySupportedParameters(std::vector<std::string> *names) override;
+    virtual status_t describeParameter(
+            const std::string &name, CodecParameterDescriptor *desc) override;
+    virtual status_t subscribeToParameters(const std::vector<std::string> &names) override;
+    virtual status_t unsubscribeFromParameters(const std::vector<std::string> &names) override;
+
     void initiateReleaseIfStuck();
     void onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems);
     void onInputBufferDone(uint64_t frameIndex, size_t arrayIndex);
diff --git a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
index c9caa01..7c660dc 100644
--- a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
@@ -208,6 +208,24 @@
                         .withSetter(Setter<C2StreamPixelAspectRatioInfo::output>)
                         .build());
 
+                if (isEncoder) {
+                    addParameter(
+                            DefineParam(mInputBitrate, C2_PARAMKEY_BITRATE)
+                            .withDefault(new C2StreamBitrateInfo::input(0u))
+                            .withFields({C2F(mInputBitrate, value).any()})
+                            .withSetter(Setter<C2StreamBitrateInfo::input>)
+                            .build());
+
+                    addParameter(
+                            DefineParam(mOutputBitrate, C2_PARAMKEY_BITRATE)
+                            .withDefault(new C2StreamBitrateInfo::output(0u))
+                            .withFields({C2F(mOutputBitrate, value).any()})
+                            .calculatedAs(
+                                Copy<C2StreamBitrateInfo::output, C2StreamBitrateInfo::input>,
+                                mInputBitrate)
+                            .build());
+                }
+
                 // TODO: more SDK params
             }
         private:
@@ -221,11 +239,19 @@
             std::shared_ptr<C2StreamVendorInt64Info::output> mInt64Output;
             std::shared_ptr<C2PortVendorStringInfo::input> mStringInput;
             std::shared_ptr<C2StreamPixelAspectRatioInfo::output> mPixelAspectRatio;
+            std::shared_ptr<C2StreamBitrateInfo::input> mInputBitrate;
+            std::shared_ptr<C2StreamBitrateInfo::output> mOutputBitrate;
 
             template<typename T>
             static C2R Setter(bool, C2P<T> &) {
                 return C2R::Ok();
             }
+
+            template<typename ME, typename DEP>
+            static C2R Copy(bool, C2P<ME> &me, const C2P<DEP> &dep) {
+                me.set().value = dep.v.value;
+                return C2R::Ok();
+            }
         };
 
         Impl mImpl;
@@ -457,4 +483,97 @@
             << "mInputFormat = " << mConfig.mInputFormat->debugString().c_str();
 }
 
+TEST_F(CCodecConfigTest, DataspaceUpdate) {
+    init(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER, MIMETYPE_VIDEO_AVC);
+
+    ASSERT_EQ(OK, mConfig.initialize(mReflector, mConfigurable));
+    class InputSurfaceStub : public InputSurfaceWrapper {
+    public:
+        ~InputSurfaceStub() override = default;
+        status_t connect(const std::shared_ptr<Codec2Client::Component> &) override {
+            return OK;
+        }
+        void disconnect() override {}
+        status_t start() override { return OK; }
+        status_t signalEndOfInputStream() override { return OK; }
+        status_t configure(Config &) override { return OK; }
+    };
+    mConfig.mInputSurface = std::make_shared<InputSurfaceStub>();
+
+    sp<AMessage> format{new AMessage};
+    format->setInt32(KEY_COLOR_RANGE, COLOR_RANGE_LIMITED);
+    format->setInt32(KEY_COLOR_STANDARD, COLOR_STANDARD_BT709);
+    format->setInt32(KEY_COLOR_TRANSFER, COLOR_TRANSFER_SDR_VIDEO);
+    format->setInt32(KEY_BIT_RATE, 100);
+
+    std::vector<std::unique_ptr<C2Param>> configUpdate;
+    ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(
+            mConfigurable, format, D::ALL, C2_MAY_BLOCK, &configUpdate));
+    ASSERT_TRUE(mConfig.updateConfiguration(configUpdate, D::ALL));
+
+    int32_t range{0};
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_RANGE, &range))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_RANGE_LIMITED, range)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    int32_t standard{0};
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_STANDARD, &standard))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_STANDARD_BT709, standard)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    int32_t transfer{0};
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &transfer))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_TRANSFER_SDR_VIDEO, transfer)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    mConfig.mInputSurface->setDataSpace(HAL_DATASPACE_BT2020_PQ);
+
+    // Dataspace from input surface should override the configured setting
+    mConfig.updateFormats(D::ALL);
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_RANGE, &range))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_RANGE_FULL, range)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_STANDARD, &standard))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_STANDARD_BT2020, standard)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &transfer))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_TRANSFER_ST2084, transfer)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    // Simulate bitrate update
+    format = new AMessage;
+    format->setInt32(KEY_BIT_RATE, 200);
+    configUpdate.clear();
+    ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(
+            mConfigurable, format, D::ALL, C2_MAY_BLOCK, &configUpdate));
+    ASSERT_EQ(OK, mConfig.setParameters(mConfigurable, configUpdate, C2_MAY_BLOCK));
+
+    // Color information should remain the same
+    mConfig.updateFormats(D::ALL);
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_RANGE, &range))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_RANGE_FULL, range)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_STANDARD, &standard))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_STANDARD_BT2020, standard)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &transfer))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_TRANSFER_ST2084, transfer)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+}
+
 } // namespace android
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index bf2a07e..a54af83 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -121,34 +121,46 @@
     if (view.crop().width != img->mWidth || view.crop().height != img->mHeight) {
         return BAD_VALUE;
     }
+    const uint8_t* src_y = view.data()[0];
+    const uint8_t* src_u = view.data()[1];
+    const uint8_t* src_v = view.data()[2];
+    int32_t src_stride_y = view.layout().planes[0].rowInc;
+    int32_t src_stride_u = view.layout().planes[1].rowInc;
+    int32_t src_stride_v = view.layout().planes[2].rowInc;
+    uint8_t* dst_y = imgBase + img->mPlane[0].mOffset;
+    uint8_t* dst_u = imgBase + img->mPlane[1].mOffset;
+    uint8_t* dst_v = imgBase + img->mPlane[2].mOffset;
+    int32_t dst_stride_y = img->mPlane[0].mRowInc;
+    int32_t dst_stride_u = img->mPlane[1].mRowInc;
+    int32_t dst_stride_v = img->mPlane[2].mRowInc;
+    int width = view.crop().width;
+    int height = view.crop().height;
+
     if ((IsNV12(view) && IsI420(img)) || (IsI420(view) && IsNV12(img))) {
         // Take shortcuts to use libyuv functions between NV12 and I420 conversion.
-        const uint8_t* src_y = view.data()[0];
-        const uint8_t* src_u = view.data()[1];
-        const uint8_t* src_v = view.data()[2];
-        int32_t src_stride_y = view.layout().planes[0].rowInc;
-        int32_t src_stride_u = view.layout().planes[1].rowInc;
-        int32_t src_stride_v = view.layout().planes[2].rowInc;
-        uint8_t* dst_y = imgBase + img->mPlane[0].mOffset;
-        uint8_t* dst_u = imgBase + img->mPlane[1].mOffset;
-        uint8_t* dst_v = imgBase + img->mPlane[2].mOffset;
-        int32_t dst_stride_y = img->mPlane[0].mRowInc;
-        int32_t dst_stride_u = img->mPlane[1].mRowInc;
-        int32_t dst_stride_v = img->mPlane[2].mRowInc;
         if (IsNV12(view) && IsI420(img)) {
             if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
-                                    dst_u, dst_stride_u, dst_v, dst_stride_v, view.crop().width,
-                                    view.crop().height)) {
+                                    dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
         } else {
             if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
-                                    dst_y, dst_stride_y, dst_u, dst_stride_u, view.crop().width,
-                                    view.crop().height)) {
+                                    dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
         }
     }
+    if (IsNV12(view) && IsNV12(img)) {
+        libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+        libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
+        return OK;
+    }
+    if (IsI420(view) && IsI420(img)) {
+        libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+        libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
+        libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
+        return OK;
+    }
     return _ImageCopy<true>(view, img, imgBase);
 }
 
@@ -156,34 +168,47 @@
     if (view.crop().width != img->mWidth || view.crop().height != img->mHeight) {
         return BAD_VALUE;
     }
+    const uint8_t* src_y = imgBase + img->mPlane[0].mOffset;
+    const uint8_t* src_u = imgBase + img->mPlane[1].mOffset;
+    const uint8_t* src_v = imgBase + img->mPlane[2].mOffset;
+    int32_t src_stride_y = img->mPlane[0].mRowInc;
+    int32_t src_stride_u = img->mPlane[1].mRowInc;
+    int32_t src_stride_v = img->mPlane[2].mRowInc;
+    uint8_t* dst_y = view.data()[0];
+    uint8_t* dst_u = view.data()[1];
+    uint8_t* dst_v = view.data()[2];
+    int32_t dst_stride_y = view.layout().planes[0].rowInc;
+    int32_t dst_stride_u = view.layout().planes[1].rowInc;
+    int32_t dst_stride_v = view.layout().planes[2].rowInc;
+    int width = view.crop().width;
+    int height = view.crop().height;
     if ((IsNV12(img) && IsI420(view)) || (IsI420(img) && IsNV12(view))) {
         // Take shortcuts to use libyuv functions between NV12 and I420 conversion.
-        const uint8_t* src_y = imgBase + img->mPlane[0].mOffset;
-        const uint8_t* src_u = imgBase + img->mPlane[1].mOffset;
-        const uint8_t* src_v = imgBase + img->mPlane[2].mOffset;
-        int32_t src_stride_y = img->mPlane[0].mRowInc;
-        int32_t src_stride_u = img->mPlane[1].mRowInc;
-        int32_t src_stride_v = img->mPlane[2].mRowInc;
-        uint8_t* dst_y = view.data()[0];
-        uint8_t* dst_u = view.data()[1];
-        uint8_t* dst_v = view.data()[2];
-        int32_t dst_stride_y = view.layout().planes[0].rowInc;
-        int32_t dst_stride_u = view.layout().planes[1].rowInc;
-        int32_t dst_stride_v = view.layout().planes[2].rowInc;
         if (IsNV12(img) && IsI420(view)) {
             if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
-                                    dst_u, dst_stride_u, dst_v, dst_stride_v, view.width(),
-                                    view.height())) {
+                                    dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
         } else {
             if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
-                                    dst_y, dst_stride_y, dst_u, dst_stride_u, view.width(),
-                                    view.height())) {
+                                    dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
         }
     }
+    if (IsNV12(img) && IsNV12(view)) {
+        // For NV12, copy Y and UV plane
+        libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+        libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
+        return OK;
+    }
+    if (IsI420(img) && IsI420(view)) {
+        // For I420, copy Y, U and V plane.
+        libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+        libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
+        libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
+        return OK;
+    }
     return _ImageCopy<false>(view, img, imgBase);
 }
 
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 0401c1d..be81c84 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -36,9 +36,11 @@
         "C2Buffer.cpp",
         "C2Config.cpp",
         "C2DmaBufAllocator.cpp",
+        "C2Fence.cpp",
         "C2PlatformStorePluginLoader.cpp",
         "C2Store.cpp",
         "platform/C2BqBuffer.cpp",
+        "platform/C2SurfaceSyncObj.cpp",
         "types.cpp",
         "util/C2Debug.cpp",
         "util/C2InterfaceHelper.cpp",
diff --git a/media/codec2/vndk/C2AllocatorIon.cpp b/media/codec2/vndk/C2AllocatorIon.cpp
index 85623b8..12f4027 100644
--- a/media/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/codec2/vndk/C2AllocatorIon.cpp
@@ -30,10 +30,15 @@
 #include <C2ErrnoUtils.h>
 #include <C2HandleIonInternal.h>
 
+#include <android-base/properties.h>
+
 namespace android {
 
 namespace {
     constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+
+    // max padding after ion/dmabuf allocations in bytes
+    constexpr uint32_t MAX_PADDING = 0x8000; // 32KB
 }
 
 /* size_t <=> int(lo), int(hi) conversions */
@@ -376,14 +381,34 @@
         unsigned heapMask, unsigned flags, C2Allocator::id_t id) {
     int bufferFd = -1;
     ion_user_handle_t buffer = -1;
-    size_t alignedSize = align == 0 ? size : (size + align - 1) & ~(align - 1);
+    // NOTE: read this property directly from the property as this code has to run on
+    // Android Q, but the sysprop was only introduced in Android S.
+    static size_t sPadding =
+        base::GetUintProperty("media.c2.dmabuf.padding", (uint32_t)0, MAX_PADDING);
+    if (sPadding > SIZE_MAX - size) {
+        ALOGD("ion_alloc: size %#zx cannot accommodate padding %#zx", size, sPadding);
+        // use ImplV2 as there is no allocation anyways
+        return new ImplV2(ionFd, size, -1, id, -ENOMEM);
+    }
+
+    size_t allocSize = size + sPadding;
+    if (align) {
+        if (align - 1 > SIZE_MAX - allocSize) {
+            ALOGD("ion_alloc: size %#zx cannot accommodate padding %#zx and alignment %#zx",
+                  size, sPadding, align);
+            // use ImplV2 as there is no allocation anyways
+            return new ImplV2(ionFd, size, -1, id, -ENOMEM);
+        }
+        allocSize += align - 1;
+        allocSize &= ~(align - 1);
+    }
     int ret;
 
     if (ion_is_legacy(ionFd)) {
-        ret = ion_alloc(ionFd, alignedSize, align, heapMask, flags, &buffer);
+        ret = ion_alloc(ionFd, allocSize, align, heapMask, flags, &buffer);
         ALOGV("ion_alloc(ionFd = %d, size = %zu, align = %zu, prot = %d, flags = %d) "
               "returned (%d) ; buffer = %d",
-              ionFd, alignedSize, align, heapMask, flags, ret, buffer);
+              ionFd, allocSize, align, heapMask, flags, ret, buffer);
         if (ret == 0) {
             // get buffer fd for native handle constructor
             ret = ion_share(ionFd, buffer, &bufferFd);
@@ -392,15 +417,15 @@
                 buffer = -1;
             }
         }
-        return new Impl(ionFd, alignedSize, bufferFd, buffer, id, ret);
+        return new Impl(ionFd, size, bufferFd, buffer, id, ret);
 
     } else {
-        ret = ion_alloc_fd(ionFd, alignedSize, align, heapMask, flags, &bufferFd);
+        ret = ion_alloc_fd(ionFd, allocSize, align, heapMask, flags, &bufferFd);
         ALOGV("ion_alloc_fd(ionFd = %d, size = %zu, align = %zu, prot = %d, flags = %d) "
               "returned (%d) ; bufferFd = %d",
-              ionFd, alignedSize, align, heapMask, flags, ret, bufferFd);
+              ionFd, allocSize, align, heapMask, flags, ret, bufferFd);
 
-        return new ImplV2(ionFd, alignedSize, bufferFd, id, ret);
+        return new ImplV2(ionFd, size, bufferFd, id, ret);
     }
 }
 
diff --git a/media/codec2/vndk/C2DmaBufAllocator.cpp b/media/codec2/vndk/C2DmaBufAllocator.cpp
index 750aa31..7c8999b 100644
--- a/media/codec2/vndk/C2DmaBufAllocator.cpp
+++ b/media/codec2/vndk/C2DmaBufAllocator.cpp
@@ -16,11 +16,13 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2DmaBufAllocator"
+
 #include <BufferAllocator/BufferAllocator.h>
 #include <C2Buffer.h>
 #include <C2Debug.h>
 #include <C2DmaBufAllocator.h>
 #include <C2ErrnoUtils.h>
+
 #include <linux/ion.h>
 #include <sys/mman.h>
 #include <unistd.h>  // getpagesize, size_t, close, dup
@@ -28,14 +30,15 @@
 
 #include <list>
 
-#ifdef __ANDROID_APEX__
 #include <android-base/properties.h>
-#endif
 
 namespace android {
 
 namespace {
-constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+    constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+
+    // max padding after ion/dmabuf allocations in bytes
+    constexpr uint32_t MAX_PADDING = 0x8000; // 32KB
 }
 
 /* =========================== BUFFER HANDLE =========================== */
@@ -249,9 +252,23 @@
     int bufferFd = -1;
     int ret = 0;
 
-    bufferFd = alloc.Alloc(heap_name, size, flags);
-    if (bufferFd < 0) ret = bufferFd;
+    // NOTE: read this property directly from the property as this code has to run on
+    // Android Q, but the sysprop was only introduced in Android S.
+    static size_t sPadding =
+        base::GetUintProperty("media.c2.dmabuf.padding", (uint32_t)0, MAX_PADDING);
+    if (sPadding > SIZE_MAX - size) {
+        // size would overflow
+        ALOGD("dmabuf_alloc: size #%zx cannot accommodate padding #%zx", size, sPadding);
+        ret = -ENOMEM;
+    } else {
+        size_t allocSize = size + sPadding;
+        bufferFd = alloc.Alloc(heap_name, allocSize, flags);
+        if (bufferFd < 0) {
+            ret = bufferFd;
+        }
+    }
 
+    // this may be a non-working handle if bufferFd is negative
     mHandle = C2HandleBuf(bufferFd, size);
     mId = id;
     mInit = c2_status_t(c2_map_errno<ENOMEM, EACCES, EINVAL>(ret));
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
new file mode 100644
index 0000000..9c5183e
--- /dev/null
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -0,0 +1,145 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2FenceFactory"
+#include <utils/Log.h>
+
+#include <C2FenceFactory.h>
+#include <C2SurfaceSyncObj.h>
+
+class C2Fence::Impl {
+public:
+    virtual c2_status_t wait(c2_nsecs_t timeoutNs) = 0;
+
+    virtual bool valid() const = 0;
+
+    virtual bool ready() const = 0;
+
+    virtual int fd() const = 0;
+
+    virtual bool isHW() const = 0;
+
+    virtual ~Impl() = default;
+
+    Impl() = default;
+};
+
+c2_status_t C2Fence::wait(c2_nsecs_t timeoutNs) {
+    if (mImpl) {
+        return mImpl->wait(timeoutNs);
+    }
+    // null fence is always signalled.
+    return C2_OK;
+}
+
+bool C2Fence::valid() const {
+    if (mImpl) {
+        return mImpl->valid();
+    }
+    // null fence is always valid.
+    return true;
+}
+
+bool C2Fence::ready() const {
+    if (mImpl) {
+        return mImpl->ready();
+    }
+    // null fence is always signalled.
+    return true;
+}
+
+int C2Fence::fd() const {
+    if (mImpl) {
+        return mImpl->fd();
+    }
+    // null fence does not have fd.
+    return -1;
+}
+
+bool C2Fence::isHW() const {
+    if (mImpl) {
+        return mImpl->isHW();
+    }
+    return false;
+}
+
+/**
+ * Fence implementation for C2BufferQueueBlockPool based block allocation.
+ * The implementation supports all C2Fence interface except fd().
+ */
+class _C2FenceFactory::SurfaceFenceImpl: public C2Fence::Impl {
+public:
+    virtual c2_status_t wait(c2_nsecs_t timeoutNs) {
+        if (mPtr) {
+            return mPtr->waitForChange(mWaitId, timeoutNs);
+        }
+        return C2_OK;
+    }
+
+    virtual bool valid() const {
+        return mPtr;
+    }
+
+    virtual bool ready() const {
+        uint32_t status;
+        if (mPtr) {
+            mPtr->lock();
+            status = mPtr->getWaitIdLocked();
+            mPtr->unlock();
+
+            return status != mWaitId;
+        }
+        return true;
+    }
+
+    virtual int fd() const {
+        // does not support fd, since this is shared mem and futex based
+        return -1;
+    }
+
+    virtual bool isHW() const {
+        return false;
+    }
+
+    virtual ~SurfaceFenceImpl() {};
+
+    SurfaceFenceImpl(std::shared_ptr<C2SurfaceSyncMemory> syncMem, uint32_t waitId) :
+            mSyncMem(syncMem),
+            mPtr(syncMem ? syncMem->mem() : nullptr),
+            mWaitId(syncMem ? waitId : 0) {}
+private:
+    const std::shared_ptr<const C2SurfaceSyncMemory> mSyncMem; // This is for life-cycle guarantee
+    C2SyncVariables *const mPtr;
+    const uint32_t mWaitId;
+};
+
+C2Fence::C2Fence(std::shared_ptr<Impl> impl) : mImpl(impl) {}
+
+C2Fence _C2FenceFactory::CreateSurfaceFence(
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem,
+        uint32_t waitId) {
+    if (syncMem) {
+        C2Fence::Impl *p
+                = new _C2FenceFactory::SurfaceFenceImpl(syncMem, waitId);
+        if (p->valid()) {
+            return C2Fence(std::shared_ptr<C2Fence::Impl>(p));
+        } else {
+            delete p;
+        }
+    }
+    return C2Fence();
+}
diff --git a/media/codec2/vndk/include/C2BqBufferPriv.h b/media/codec2/vndk/include/C2BqBufferPriv.h
index 066f1e1..b2636e9 100644
--- a/media/codec2/vndk/include/C2BqBufferPriv.h
+++ b/media/codec2/vndk/include/C2BqBufferPriv.h
@@ -49,6 +49,14 @@
             C2MemoryUsage usage,
             std::shared_ptr<C2GraphicBlock> *block /* nonnull */) override;
 
+    virtual c2_status_t fetchGraphicBlock(
+            uint32_t width,
+            uint32_t height,
+            uint32_t format,
+            C2MemoryUsage usage,
+            std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+            C2Fence *fence /* nonnull */) override;
+
     typedef std::function<void(uint64_t producer, int32_t slot, int64_t nsecs)> OnRenderCallback;
 
     /**
@@ -72,6 +80,27 @@
      */
     virtual void configureProducer(const android::sp<HGraphicBufferProducer> &producer);
 
+    /**
+     * Configures an IGBP in order to create blocks. A newly created block is
+     * dequeued from the configured IGBP. Unique Id of IGBP and the slot number of
+     * blocks are passed via native_handle. Managing IGBP is responsibility of caller.
+     * When IGBP is not configured, block will be created via allocator.
+     * Since zero is not used for Unique Id of IGBP, if IGBP is not configured or producer
+     * is configured as nullptr, unique id which is bundled in native_handle is zero.
+     *
+     * \param producer      the IGBP, which will be used to fetch blocks
+     * \param syncMemory    Shared memory for synchronization of allocation & deallocation.
+     * \param bqId          Id of IGBP
+     * \param generationId  Generation Id for rendering output
+     * \param consumerUsage consumerUsage flagof the IGBP
+     */
+    virtual void configureProducer(
+            const android::sp<HGraphicBufferProducer> &producer,
+            native_handle_t *syncMemory,
+            uint64_t bqId,
+            uint32_t generationId,
+            uint64_t consumerUsage);
+
 private:
     const std::shared_ptr<C2Allocator> mAllocator;
     const local_id_t mLocalId;
@@ -82,6 +111,7 @@
     friend struct C2BufferQueueBlockPoolData;
 };
 
+class C2SurfaceSyncMemory;
 
 struct C2BufferQueueBlockPoolData : public _C2BlockPoolData {
 public:
@@ -97,7 +127,8 @@
     // Create a local BlockPoolData.
     C2BufferQueueBlockPoolData(
             uint32_t generation, uint64_t bqId, int32_t bqSlot,
-            const android::sp<HGraphicBufferProducer>& producer);
+            const android::sp<HGraphicBufferProducer>& producer,
+            std::shared_ptr<C2SurfaceSyncMemory>, int noUse);
 
     virtual ~C2BufferQueueBlockPoolData() override;
 
@@ -105,7 +136,8 @@
 
     int migrate(const android::sp<HGraphicBufferProducer>& producer,
                 uint32_t toGeneration, uint64_t toUsage, uint64_t toBqId,
-                android::sp<android::GraphicBuffer>& graphicBuffer, uint32_t oldGeneration);
+                android::sp<android::GraphicBuffer>& graphicBuffer, uint32_t oldGeneration,
+                std::shared_ptr<C2SurfaceSyncMemory> syncMem);
 
 private:
     friend struct _C2BlockFactory;
@@ -113,12 +145,14 @@
     // Methods delegated from _C2BlockFactory.
     void getBufferQueueData(uint32_t* generation, uint64_t* bqId, int32_t* bqSlot) const;
     bool holdBlockFromBufferQueue(const std::shared_ptr<int>& owner,
-                                  const android::sp<HGraphicBufferProducer>& igbp);
+                                  const android::sp<HGraphicBufferProducer>& igbp,
+                                  std::shared_ptr<C2SurfaceSyncMemory> syncMem);
     bool beginTransferBlockToClient();
     bool endTransferBlockToClient(bool transfer);
     bool beginAttachBlockToBufferQueue();
     bool endAttachBlockToBufferQueue(const std::shared_ptr<int>& owner,
                                      const android::sp<HGraphicBufferProducer>& igbp,
+                                     std::shared_ptr<C2SurfaceSyncMemory> syncMem,
                                      uint32_t generation, uint64_t bqId, int32_t bqSlot);
     bool displayBlockToBufferQueue();
 
@@ -141,6 +175,7 @@
     bool mDisplay; // display on remote;
     std::weak_ptr<int> mOwner;
     android::sp<HGraphicBufferProducer> mIgbp;
+    std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
     mutable std::mutex mLock;
 };
 
diff --git a/media/codec2/vndk/include/C2FenceFactory.h b/media/codec2/vndk/include/C2FenceFactory.h
new file mode 100644
index 0000000..d4bed26
--- /dev/null
+++ b/media/codec2/vndk/include/C2FenceFactory.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STAGEFRIGHT_CODEC2_FENCE_FACTORY_H_
+#define STAGEFRIGHT_CODEC2_FENCE_FACTORY_H_
+
+
+#include <C2Buffer.h>
+
+class C2SurfaceSyncMemory;
+
+/**
+ * C2Fence implementation factory
+ */
+struct _C2FenceFactory {
+
+    class SurfaceFenceImpl;
+
+    /*
+     * Create C2Fence for BufferQueueBased blockpool.
+     *
+     * \param syncMem           Shared memory object for synchronization between processes.
+     * \param waitId            wait id for tracking status change for C2Fence.
+     */
+    static C2Fence CreateSurfaceFence(
+            std::shared_ptr<C2SurfaceSyncMemory> syncMem,
+            uint32_t waitId);
+};
+
+
+#endif // STAGEFRIGHT_CODEC2_FENCE_FACTORY_H_
diff --git a/media/codec2/vndk/include/C2SurfaceSyncObj.h b/media/codec2/vndk/include/C2SurfaceSyncObj.h
new file mode 100644
index 0000000..16e9a9d
--- /dev/null
+++ b/media/codec2/vndk/include/C2SurfaceSyncObj.h
@@ -0,0 +1,232 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STAGEFRIGHT_CODEC2_SURFACE_SYNC_OBJ_H_
+#define STAGEFRIGHT_CODEC2_SURFACE_SYNC_OBJ_H_
+
+#include <cutils/native_handle.h>
+#include <memory>
+#include <atomic>
+
+#include <C2Buffer.h>
+
+/**
+ * Futex based lock / wait implementation for sharing output buffer allocation
+ * information between Framework and HAL.
+ */
+struct C2SyncVariables {
+    enum SyncStatus : uint32_t {
+           STATUS_INIT = 0,         // When surface configuration starts.
+           STATUS_ACTIVE = 1,       // When surface configuration finishs.
+                                    // STATUS_INIT -> STATUS_ACTIVE
+           STATUS_SWITCHING = 2,    // When the surface is replaced by a new surface
+                                    // during surface configuration.
+                                    // STATUS_ACTIVE -> STATUS_SWITCHING
+    };
+
+    /**
+     * Lock the memory region
+     */
+    int lock();
+
+    /**
+     * Unlock the memory region
+     */
+    int unlock();
+
+    /**
+     * Set initial dequeued buffer count.
+     *
+     * \param maxDequeueCount           Initial value of # of max dequeued buffer count
+     * \param curDequeueCount           Initial value of # of current dequeued buffer count
+     */
+    void setInitialDequeueCount(int32_t maxDequeueCount, int32_t curDequeueCount);
+
+    /**
+     * Get a waitId which will be used to implement fence.
+     */
+    uint32_t getWaitIdLocked();
+
+    /**
+     * Return whether the upcoming dequeue operation is not blocked.
+     * if it's blocked and waitId is non-null, waitId is returned to be used for waiting.
+     *
+     * \retval false    dequeue operation is blocked now.
+     * \retval true     dequeue operation is possible.
+     */
+    bool isDequeueableLocked(uint32_t *waitId = nullptr);
+
+    /**
+     * Notify a buffer is queued. Return whether the upcoming dequeue operation
+     * is not blocked. if it's blocked and waitId is non-null, waitId is returned
+     * to be used for waiting.
+     *
+     * \retval false    dequeue operation is blocked now.
+     * \retval true     dequeue operation is possible.
+     */
+    bool notifyQueuedLocked(uint32_t *waitId = nullptr);
+
+    /**
+     * Notify a buffer is dequeued.
+     */
+    void notifyDequeuedLocked();
+
+    /**
+     * Set sync status.
+     */
+    void setSyncStatusLocked(SyncStatus status);
+
+    /**
+     * Get sync status.
+     */
+    C2SyncVariables::SyncStatus getSyncStatusLocked();
+
+    /**
+     * Update current max dequeue count.
+     */
+    void updateMaxDequeueCountLocked(int32_t maxDequeueCount);
+
+    /**
+     * Wait until status is no longer equal to waitId, or until timeout.
+     *
+     * \param waitId            internal status for waiting until it is changed.
+     * \param timeousNs         nano seconds to timeout.
+     *
+     * \retval C2_TIMEDOUT      change does not happen during waiting.
+     * \retval C2_BAD_VALUE     invalid event waiting.
+     * \retval C2_OK            change was signalled.
+     */
+    c2_status_t waitForChange(uint32_t waitId, c2_nsecs_t timeoutNs);
+
+    C2SyncVariables() {}
+
+private:
+    /**
+     * signal one waiter to wake up.
+     */
+    int signal();
+
+    /**
+     * signal all waiter to wake up.
+     */
+    int broadcast();
+
+    /**
+     * wait for signal or broadcast.
+     */
+    int wait();
+
+    std::atomic<uint32_t> mLock;
+    std::atomic<uint32_t> mCond;
+    int32_t mMaxDequeueCount;
+    int32_t mCurDequeueCount;
+    SyncStatus mStatus;
+};
+
+/**
+ * Shared memory in order to synchronize information for Surface(IGBP)
+ * based output buffer allocation.
+ */
+class C2SurfaceSyncMemory {
+public:
+    /**
+     * Shared memory handle in order to synchronize information for
+     * Surface based output buffer allocation.
+     */
+    struct HandleSyncMem : public native_handle_t {
+        HandleSyncMem(int fd, size_t size) :
+            native_handle_t(cHeader),
+            mFds{fd},
+            mInts{int(size & 0xFFFFFFFF),
+                int((uint64_t(size) >> 32) & 0xFFFFFFFF), kMagic} {}
+
+        /** Returns a file descriptor of the shared memory
+         * \return a file descriptor representing the shared memory
+         */
+        int memFd() const {return mFds.mMem;}
+
+        /** Returns the size of the shared memory */
+        size_t size() const {
+            return size_t(unsigned(mInts.mSizeLo))
+                    | size_t(uint64_t(unsigned(mInts.mSizeHi)) << 32);
+        }
+
+        /** Check whether the native handle is in the form of HandleSyncMem
+         *
+         * \return whether the native handle is compatible
+         */
+        static bool isValid(const native_handle_t * const o);
+
+    protected:
+        struct {
+            int mMem;
+        } mFds;
+        struct {
+            int mSizeLo;
+            int mSizeHi;
+            int mMagic;
+        } mInts;
+    private:
+        enum {
+            kMagic = 'ssm\x00',
+            numFds = sizeof(mFds) / sizeof(int),
+            numInts = sizeof(mInts) / sizeof(int),
+            version = sizeof(native_handle_t)
+        };
+        const static native_handle_t cHeader;
+    };
+
+    /**
+     * Imports a shared memory object from a native handle(The shared memory is already existing).
+     * This is usually used after native_handle_t is passed via RPC.
+     *
+     * \param handle        handle representing shared memory for output buffer allocation.
+     */
+    static std::shared_ptr<C2SurfaceSyncMemory> Import(native_handle_t *handle);
+
+    /**
+     * Creats a shared memory object for synchronization of output buffer allocation.
+     * Shared memory creation should be done explicitly.
+     *
+     * \param fd            file descriptor to shared memory
+     * \param size          size of the shared memory
+     */
+    static std::shared_ptr<C2SurfaceSyncMemory> Create(int fd, size_t size);
+
+    /**
+     * Returns a handle representing the shread memory for synchronization of
+     * output buffer allocation.
+     */
+    native_handle_t *handle();
+
+    /**
+     * Returns synchronization object which will provide synchronization primitives.
+     *
+     * \return a ptr to synchronization primitive class
+     */
+    C2SyncVariables *mem();
+
+    ~C2SurfaceSyncMemory();
+
+private:
+    bool mInit;
+    HandleSyncMem *mHandle;
+    C2SyncVariables *mMem;
+
+    C2SurfaceSyncMemory();
+};
+
+#endif // STAGEFRIGHT_CODEC2_SURFACE_SYNC_OBJ_H_
diff --git a/media/codec2/vndk/internal/C2BlockInternal.h b/media/codec2/vndk/internal/C2BlockInternal.h
index 4ae946a..c510fca 100644
--- a/media/codec2/vndk/internal/C2BlockInternal.h
+++ b/media/codec2/vndk/internal/C2BlockInternal.h
@@ -52,6 +52,8 @@
 
 struct C2BufferQueueBlockPoolData;
 
+class C2SurfaceSyncMemory;
+
 /**
  * Internal only interface for creating blocks by block pool/buffer passing implementations.
  *
@@ -279,6 +281,8 @@
      *                 anymore.
      * \param igbp     \c IGraphicBufferProducer instance to be assigned to the
      *                 block. This is not needed when the block is local.
+     * \param syncMem  Memory block which will support synchronization
+     *                 between Framework and HAL.
      *
      * \return The previous held status.
      */
@@ -287,7 +291,8 @@
             const std::shared_ptr<_C2BlockPoolData>& poolData,
             const std::shared_ptr<int>& owner,
             const ::android::sp<::android::hardware::graphics::bufferqueue::
-                                V2_0::IGraphicBufferProducer>& igbp = nullptr);
+                                V2_0::IGraphicBufferProducer>& igbp = nullptr,
+            std::shared_ptr<C2SurfaceSyncMemory> syncMem = nullptr);
 
     /**
      * Prepare a block to be transferred to other process. This blocks
@@ -358,6 +363,7 @@
             const std::shared_ptr<int>& owner,
             const ::android::sp<::android::hardware::graphics::bufferqueue::
                                 V2_0::IGraphicBufferProducer>& igbp,
+            std::shared_ptr<C2SurfaceSyncMemory>,
             uint32_t generation,
             uint64_t bqId,
             int32_t bqSlot);
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 3f6fa7d..8b53b24 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -29,6 +29,8 @@
 #include <C2AllocatorGralloc.h>
 #include <C2BqBufferPriv.h>
 #include <C2BlockInternal.h>
+#include <C2FenceFactory.h>
+#include <C2SurfaceSyncObj.h>
 
 #include <list>
 #include <map>
@@ -69,10 +71,11 @@
 bool _C2BlockFactory::HoldBlockFromBufferQueue(
         const std::shared_ptr<_C2BlockPoolData>& data,
         const std::shared_ptr<int>& owner,
-        const sp<HGraphicBufferProducer>& igbp) {
+        const sp<HGraphicBufferProducer>& igbp,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem) {
     const std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
             std::static_pointer_cast<C2BufferQueueBlockPoolData>(data);
-    return poolData->holdBlockFromBufferQueue(owner, igbp);
+    return poolData->holdBlockFromBufferQueue(owner, igbp, syncMem);
 }
 
 bool _C2BlockFactory::BeginTransferBlockToClient(
@@ -102,12 +105,13 @@
         const std::shared_ptr<_C2BlockPoolData>& data,
         const std::shared_ptr<int>& owner,
         const sp<HGraphicBufferProducer>& igbp,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem,
         uint32_t generation,
         uint64_t bqId,
         int32_t bqSlot) {
     const std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
             std::static_pointer_cast<C2BufferQueueBlockPoolData>(data);
-    return poolData->endAttachBlockToBufferQueue(owner, igbp, generation, bqId, bqSlot);
+    return poolData->endAttachBlockToBufferQueue(owner, igbp, syncMem, generation, bqId, bqSlot);
 }
 
 bool _C2BlockFactory::DisplayBlockToBufferQueue(
@@ -231,12 +235,58 @@
 class C2BufferQueueBlockPool::Impl
         : public std::enable_shared_from_this<C2BufferQueueBlockPool::Impl> {
 private:
+    c2_status_t dequeueBuffer(
+            uint32_t width,
+            uint32_t height,
+            uint32_t format,
+            C2AndroidMemoryUsage androidUsage,
+            int *slot, bool *needsRealloc, sp<Fence> *fence) {
+        status_t status{};
+        using Input = HGraphicBufferProducer::DequeueBufferInput;
+        using Output = HGraphicBufferProducer::DequeueBufferOutput;
+        Return<void> transResult = mProducer->dequeueBuffer(
+                Input{
+                    width,
+                    height,
+                    format,
+                    androidUsage.asGrallocUsage()},
+                [&status, slot, needsRealloc,
+                 fence](HStatus hStatus,
+                         int32_t hSlot,
+                         Output const& hOutput) {
+                    *slot = static_cast<int>(hSlot);
+                    if (!h2b(hStatus, &status) ||
+                            !h2b(hOutput.fence, fence)) {
+                        status = ::android::BAD_VALUE;
+                    } else {
+                        *needsRealloc =
+                                hOutput.bufferNeedsReallocation;
+                    }
+                });
+        if (!transResult.isOk() || status != android::OK) {
+            if (transResult.isOk()) {
+                ++mDqFailure;
+                if (status == android::INVALID_OPERATION ||
+                    status == android::TIMED_OUT ||
+                    status == android::WOULD_BLOCK) {
+                    // Dequeue buffer is blocked temporarily. Retrying is
+                    // required.
+                    return C2_BLOCKING;
+                }
+            }
+            ALOGD("cannot dequeue buffer %d", status);
+            return C2_BAD_VALUE;
+        }
+        return C2_OK;
+    }
+
     c2_status_t fetchFromIgbp_l(
             uint32_t width,
             uint32_t height,
             uint32_t format,
             C2MemoryUsage usage,
-            std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
+            std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+            C2Fence *c2Fence) {
         // We have an IGBP now.
         C2AndroidMemoryUsage androidUsage = usage;
         status_t status{};
@@ -245,41 +295,39 @@
         sp<Fence> fence = new Fence();
         ALOGV("tries to dequeue buffer");
 
+        C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem(): nullptr;
         { // Call dequeueBuffer().
-            using Input = HGraphicBufferProducer::DequeueBufferInput;
-            using Output = HGraphicBufferProducer::DequeueBufferOutput;
-            Return<void> transResult = mProducer->dequeueBuffer(
-                    Input{
-                        width,
-                        height,
-                        format,
-                        androidUsage.asGrallocUsage()},
-                    [&status, &slot, &bufferNeedsReallocation,
-                     &fence](HStatus hStatus,
-                             int32_t hSlot,
-                             Output const& hOutput) {
-                        slot = static_cast<int>(hSlot);
-                        if (!h2b(hStatus, &status) ||
-                                !h2b(hOutput.fence, &fence)) {
-                            status = ::android::BAD_VALUE;
-                        } else {
-                            bufferNeedsReallocation =
-                                    hOutput.bufferNeedsReallocation;
-                        }
-                    });
-            if (!transResult.isOk() || status != android::OK) {
-                if (transResult.isOk()) {
-                    ++mDqFailure;
-                    if (status == android::INVALID_OPERATION ||
-                        status == android::TIMED_OUT ||
-                        status == android::WOULD_BLOCK) {
-                        // Dequeue buffer is blocked temporarily. Retrying is
-                        // required.
-                        return C2_BLOCKING;
+            c2_status_t c2Status;
+            if (syncVar) {
+                uint32_t waitId;
+                syncVar->lock();
+                if (!syncVar->isDequeueableLocked(&waitId)) {
+                    syncVar->unlock();
+                    if (c2Fence) {
+                        *c2Fence = _C2FenceFactory::CreateSurfaceFence(mSyncMem, waitId);
                     }
+                    return C2_BLOCKING;
                 }
-                ALOGD("cannot dequeue buffer %d", status);
-                return C2_BAD_VALUE;
+                if (syncVar->getSyncStatusLocked() != C2SyncVariables::STATUS_ACTIVE) {
+                    waitId = syncVar->getWaitIdLocked();
+                    syncVar->unlock();
+                    if (c2Fence) {
+                        *c2Fence = _C2FenceFactory::CreateSurfaceFence(mSyncMem, waitId);
+                    }
+                    return C2_BLOCKING;
+                }
+                c2Status = dequeueBuffer(width, height, format, androidUsage,
+                              &slot, &bufferNeedsReallocation, &fence);
+                if (c2Status == C2_OK) {
+                    syncVar->notifyDequeuedLocked();
+                }
+                syncVar->unlock();
+            } else {
+                c2Status = dequeueBuffer(width, height, format, usage,
+                              &slot, &bufferNeedsReallocation, &fence);
+            }
+            if (c2Status != C2_OK) {
+                return c2Status;
             }
             mDqFailure = 0;
             mLastDqTs = getTimestampNow();
@@ -290,18 +338,41 @@
             return C2_BAD_VALUE;
         }
         ALOGV("dequeued a buffer successfully");
+        bool dequeueable = false;
+        uint32_t waitId;
         if (fence) {
             static constexpr int kFenceWaitTimeMs = 10;
 
             status_t status = fence->wait(kFenceWaitTimeMs);
             if (status == -ETIME) {
                 // fence is not signalled yet.
-                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                if (syncVar) {
+                    syncVar->lock();
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    dequeueable = syncVar->notifyQueuedLocked(&waitId);
+                    syncVar->unlock();
+                    if (c2Fence) {
+                        *c2Fence = dequeueable ? C2Fence() :
+                                _C2FenceFactory::CreateSurfaceFence(mSyncMem, waitId);
+                    }
+                } else {
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                }
                 return C2_BLOCKING;
             }
             if (status != android::NO_ERROR) {
                 ALOGD("buffer fence wait error %d", status);
-                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                if (syncVar) {
+                    syncVar->lock();
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    syncVar->notifyQueuedLocked();
+                    syncVar->unlock();
+                    if (c2Fence) {
+                        *c2Fence = C2Fence();
+                    }
+                } else {
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                }
                 return C2_BAD_VALUE;
             } else if (mRenderCallback) {
                 nsecs_t signalTime = fence->getSignalTime();
@@ -341,7 +412,17 @@
                 return C2_BAD_VALUE;
             } else if (status != android::NO_ERROR) {
                 slotBuffer.clear();
-                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                if (syncVar) {
+                    syncVar->lock();
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    syncVar->notifyQueuedLocked();
+                    syncVar->unlock();
+                    if (c2Fence) {
+                        *c2Fence = C2Fence();
+                    }
+                } else {
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                }
                 return C2_BAD_VALUE;
             }
             if (mGeneration == 0) {
@@ -372,14 +453,28 @@
                         std::make_shared<C2BufferQueueBlockPoolData>(
                                 slotBuffer->getGenerationNumber(),
                                 mProducerId, slot,
-                                mProducer);
+                                mProducer, mSyncMem, 0);
                 mPoolDatas[slot] = poolData;
                 *block = _C2BlockFactory::CreateGraphicBlock(alloc, poolData);
                 return C2_OK;
             }
             // Block was not created. call requestBuffer# again next time.
             slotBuffer.clear();
-            (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+            if (syncVar) {
+                syncVar->lock();
+                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                syncVar->notifyQueuedLocked();
+                syncVar->unlock();
+                if (c2Fence) {
+                    *c2Fence = C2Fence();
+                }
+            } else {
+                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+            }
+            return C2_BAD_VALUE;
+        }
+        if (c2Fence) {
+            *c2Fence = C2Fence();
         }
         return C2_BAD_VALUE;
     }
@@ -409,7 +504,8 @@
             uint32_t height,
             uint32_t format,
             C2MemoryUsage usage,
-            std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
+            std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+            C2Fence *fence) {
         block->reset();
         if (mInit != C2_OK) {
             return mInit;
@@ -440,17 +536,19 @@
             }
             std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
                     std::make_shared<C2BufferQueueBlockPoolData>(
-                            0, (uint64_t)0, ~0, nullptr);
+                            0, (uint64_t)0, ~0, nullptr, nullptr, 0);
             *block = _C2BlockFactory::CreateGraphicBlock(alloc, poolData);
             ALOGV("allocated a buffer successfully");
 
             return C2_OK;
         }
-        c2_status_t status = fetchFromIgbp_l(width, height, format, usage, block);
+        c2_status_t status = fetchFromIgbp_l(width, height, format, usage, block, fence);
         if (status == C2_BLOCKING) {
             lock.unlock();
-            // in order not to drain cpu from component's spinning
-            ::usleep(kMaxIgbpRetryDelayUs);
+            if (!fence) {
+                // in order not to drain cpu from component's spinning
+                ::usleep(kMaxIgbpRetryDelayUs);
+            }
         }
         return status;
     }
@@ -460,11 +558,12 @@
         mRenderCallback = renderCallback;
     }
 
+    /* This is for Old HAL request for compatibility */
     void configureProducer(const sp<HGraphicBufferProducer> &producer) {
         uint64_t producerId = 0;
         uint32_t generation = 0;
         uint64_t usage = 0;
-        bool haveGeneration = false;
+        bool bqInformation = false;
         if (producer) {
             Return<uint64_t> transResult = producer->getUniqueId();
             if (!transResult.isOk()) {
@@ -472,14 +571,32 @@
                 return;
             }
             producerId = static_cast<uint64_t>(transResult);
-            // TODO: provide gneration number from parameter.
-            haveGeneration = getGenerationNumberAndUsage(producer, &generation, &usage);
-            if (!haveGeneration) {
+            bqInformation = getGenerationNumberAndUsage(producer, &generation, &usage);
+            if (!bqInformation) {
                 ALOGW("get generationNumber failed %llu",
                       (unsigned long long)producerId);
             }
         }
+        configureProducer(producer, nullptr, producerId, generation, usage, bqInformation);
+    }
+
+    void configureProducer(const sp<HGraphicBufferProducer> &producer,
+                           native_handle_t *syncHandle,
+                           uint64_t producerId,
+                           uint32_t generation,
+                           uint64_t usage,
+                           bool bqInformation) {
+        std::shared_ptr<C2SurfaceSyncMemory> c2SyncMem;
+        if (syncHandle) {
+            if (!producer) {
+                native_handle_close(syncHandle);
+                native_handle_delete(syncHandle);
+            } else {
+                c2SyncMem = C2SurfaceSyncMemory::Import(syncHandle);
+            }
+        }
         int migrated = 0;
+        std::shared_ptr<C2SurfaceSyncMemory> oldMem;
         // poolDatas dtor should not be called during lock is held.
         std::shared_ptr<C2BufferQueueBlockPoolData>
                 poolDatas[NUM_BUFFER_SLOTS];
@@ -499,22 +616,30 @@
             if (producer) {
                 mProducer = producer;
                 mProducerId = producerId;
-                mGeneration = haveGeneration ? generation : 0;
+                mGeneration = bqInformation ? generation : 0;
             } else {
                 mProducer = nullptr;
                 mProducerId = 0;
                 mGeneration = 0;
                 ALOGW("invalid producer producer(%d), generation(%d)",
-                      (bool)producer, haveGeneration);
+                      (bool)producer, bqInformation);
             }
-            if (mProducer && haveGeneration) { // migrate buffers
+            oldMem = mSyncMem; // preven destruction while locked.
+            mSyncMem = c2SyncMem;
+            C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
+            if (syncVar) {
+                syncVar->lock();
+                syncVar->setSyncStatusLocked(C2SyncVariables::STATUS_ACTIVE);
+                syncVar->unlock();
+            }
+            if (mProducer && bqInformation) { // migrate buffers
                 for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
                     std::shared_ptr<C2BufferQueueBlockPoolData> data =
                             mPoolDatas[i].lock();
                     if (data) {
                         int slot = data->migrate(
                                 mProducer, generation, usage,
-                                producerId, mBuffers[i], oldGeneration);
+                                producerId, mBuffers[i], oldGeneration, mSyncMem);
                         if (slot >= 0) {
                             buffers[slot] = mBuffers[i];
                             poolDatas[slot] = data;
@@ -528,7 +653,7 @@
                 mPoolDatas[i] = poolDatas[i];
             }
         }
-        if (producer && haveGeneration) {
+        if (producer && bqInformation) {
             ALOGD("local generation change %u , "
                   "bqId: %llu migrated buffers # %d",
                   generation, (unsigned long long)producerId, migrated);
@@ -555,6 +680,8 @@
 
     sp<GraphicBuffer> mBuffers[NUM_BUFFER_SLOTS];
     std::weak_ptr<C2BufferQueueBlockPoolData> mPoolDatas[NUM_BUFFER_SLOTS];
+
+    std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
 };
 
 C2BufferQueueBlockPoolData::C2BufferQueueBlockPoolData(
@@ -570,11 +697,14 @@
 
 C2BufferQueueBlockPoolData::C2BufferQueueBlockPoolData(
         uint32_t generation, uint64_t bqId, int32_t bqSlot,
-        const android::sp<HGraphicBufferProducer>& producer) :
+        const android::sp<HGraphicBufferProducer>& producer,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem, int noUse) :
         mLocal(true), mHeld(true),
         mGeneration(generation), mBqId(bqId), mBqSlot(bqSlot),
         mCurrentGeneration(generation), mCurrentBqId(bqId),
-        mTransfer(false), mAttach(false), mDisplay(false), mIgbp(producer) {
+        mTransfer(false), mAttach(false), mDisplay(false),
+        mIgbp(producer), mSyncMem(syncMem) {
+            (void)noUse;
 }
 
 C2BufferQueueBlockPoolData::~C2BufferQueueBlockPoolData() {
@@ -584,10 +714,30 @@
 
     if (mLocal) {
         if (mGeneration == mCurrentGeneration && mBqId == mCurrentBqId) {
-            mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
+            C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
+            if (syncVar) {
+                syncVar->lock();
+                if (syncVar->getSyncStatusLocked() == C2SyncVariables::STATUS_ACTIVE) {
+                    mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
+                    syncVar->notifyQueuedLocked();
+                }
+                syncVar->unlock();
+            } else {
+                mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
+            }
         }
     } else if (!mOwner.expired()) {
-        mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
+        C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
+        if (syncVar) {
+            syncVar->lock();
+            if (syncVar->getSyncStatusLocked() != C2SyncVariables::STATUS_SWITCHING) {
+                mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
+                syncVar->notifyQueuedLocked();
+            }
+            syncVar->unlock();
+        } else {
+            mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
+        }
     }
 }
 
@@ -598,7 +748,8 @@
 int C2BufferQueueBlockPoolData::migrate(
         const sp<HGraphicBufferProducer>& producer,
         uint32_t toGeneration, uint64_t toUsage, uint64_t toBqId,
-        sp<GraphicBuffer>& graphicBuffer, uint32_t oldGeneration) {
+        sp<GraphicBuffer>& graphicBuffer, uint32_t oldGeneration,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem) {
     std::scoped_lock<std::mutex> l(mLock);
 
     mCurrentBqId = toBqId;
@@ -678,6 +829,14 @@
     mGeneration = toGeneration;
     mBqId = toBqId;
     mBqSlot = slot;
+    mSyncMem = syncMem;
+
+    C2SyncVariables *syncVar = syncMem ? syncMem->mem() : nullptr;
+    if (syncVar) {
+        syncVar->lock();
+        syncVar->notifyDequeuedLocked();
+        syncVar->unlock();
+    }
     return slot;
 }
 
@@ -697,11 +856,13 @@
 
 bool C2BufferQueueBlockPoolData::holdBlockFromBufferQueue(
         const std::shared_ptr<int>& owner,
-        const sp<HGraphicBufferProducer>& igbp) {
+        const sp<HGraphicBufferProducer>& igbp,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem) {
     std::scoped_lock<std::mutex> lock(mLock);
     if (!mLocal) {
         mOwner = owner;
         mIgbp = igbp;
+        mSyncMem = syncMem;
     }
     if (mHeld) {
         return false;
@@ -741,6 +902,7 @@
 bool C2BufferQueueBlockPoolData::endAttachBlockToBufferQueue(
         const std::shared_ptr<int>& owner,
         const sp<HGraphicBufferProducer>& igbp,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem,
         uint32_t generation,
         uint64_t bqId,
         int32_t bqSlot) {
@@ -757,6 +919,7 @@
     mHeld = true;
     mOwner = owner;
     mIgbp = igbp;
+    mSyncMem = syncMem;
     mGeneration = generation;
     mBqId = bqId;
     mBqSlot = bqSlot;
@@ -792,7 +955,20 @@
         C2MemoryUsage usage,
         std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
     if (mImpl) {
-        return mImpl->fetchGraphicBlock(width, height, format, usage, block);
+        return mImpl->fetchGraphicBlock(width, height, format, usage, block, nullptr);
+    }
+    return C2_CORRUPTED;
+}
+
+c2_status_t C2BufferQueueBlockPool::fetchGraphicBlock(
+        uint32_t width,
+        uint32_t height,
+        uint32_t format,
+        C2MemoryUsage usage,
+        std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+        C2Fence *fence /* nonnull */) {
+    if (mImpl) {
+        return mImpl->fetchGraphicBlock(width, height, format, usage, block, fence);
     }
     return C2_CORRUPTED;
 }
@@ -803,6 +979,18 @@
     }
 }
 
+void C2BufferQueueBlockPool::configureProducer(
+        const sp<HGraphicBufferProducer> &producer,
+        native_handle_t *syncMemory,
+        uint64_t bqId,
+        uint32_t generationId,
+        uint64_t consumerUsage) {
+    if (mImpl) {
+        mImpl->configureProducer(
+               producer, syncMemory, bqId, generationId, consumerUsage, true);
+    }
+}
+
 void C2BufferQueueBlockPool::setRenderCallback(const OnRenderCallback &renderCallback) {
     if (mImpl) {
         mImpl->setRenderCallback(renderCallback);
diff --git a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
new file mode 100644
index 0000000..587992e
--- /dev/null
+++ b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
@@ -0,0 +1,265 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SurfaceSyncObj"
+#include <limits.h>
+#include <linux/futex.h>
+#include <sys/mman.h>
+#include <sys/syscall.h>
+#include <sys/time.h>
+#include <utils/Log.h>
+
+#include <chrono>
+#include <C2SurfaceSyncObj.h>
+
+const native_handle_t C2SurfaceSyncMemory::HandleSyncMem::cHeader = {
+    C2SurfaceSyncMemory::HandleSyncMem::version,
+    C2SurfaceSyncMemory::HandleSyncMem::numFds,
+    C2SurfaceSyncMemory::HandleSyncMem::numInts,
+    {}
+};
+
+bool C2SurfaceSyncMemory::HandleSyncMem::isValid(const native_handle_t * const o) {
+    if (!o || memcmp(o, &cHeader, sizeof(cHeader))) {
+        return false;
+    }
+
+    const HandleSyncMem *other = static_cast<const HandleSyncMem*>(o);
+    return other->mInts.mMagic == kMagic;
+}
+
+C2SurfaceSyncMemory::C2SurfaceSyncMemory()
+    : mInit(false), mHandle(nullptr), mMem(nullptr) {}
+
+C2SurfaceSyncMemory::~C2SurfaceSyncMemory() {
+    if (mInit) {
+        if (mMem) {
+            munmap(static_cast<void *>(mMem), mHandle->size());
+        }
+        if (mHandle) {
+            native_handle_close(mHandle);
+            native_handle_delete(mHandle);
+        }
+    }
+}
+
+std::shared_ptr<C2SurfaceSyncMemory> C2SurfaceSyncMemory::Import(
+        native_handle_t *handle) {
+    if (!HandleSyncMem::isValid(handle)) {
+        return nullptr;
+    }
+
+    HandleSyncMem *o = static_cast<HandleSyncMem*>(handle);
+    void *ptr = mmap(NULL, o->size(), PROT_READ | PROT_WRITE, MAP_SHARED, o->memFd(), 0);
+
+    if (ptr == MAP_FAILED) {
+        native_handle_close(handle);
+        native_handle_delete(handle);
+        return nullptr;
+    }
+
+    std::shared_ptr<C2SurfaceSyncMemory> syncMem(new C2SurfaceSyncMemory);
+    syncMem->mInit = true;
+    syncMem->mHandle = o;
+    syncMem->mMem = static_cast<C2SyncVariables*>(ptr);
+    return syncMem;
+}
+
+std::shared_ptr<C2SurfaceSyncMemory> C2SurfaceSyncMemory::Create(int fd, size_t size) {
+    if (fd < 0 || size == 0) {
+        return nullptr;
+    }
+    HandleSyncMem *handle = new HandleSyncMem(fd, size);
+
+    void *ptr = mmap(NULL, size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
+    if (ptr == MAP_FAILED) {
+        native_handle_close(handle);
+        native_handle_delete(handle);
+        return nullptr;
+    }
+    memset(ptr, 0, size);
+
+    std::shared_ptr<C2SurfaceSyncMemory> syncMem(new C2SurfaceSyncMemory);
+    syncMem->mInit = true;
+    syncMem->mHandle = handle;
+    syncMem->mMem = static_cast<C2SyncVariables*>(ptr);
+    return syncMem;
+}
+
+native_handle_t *C2SurfaceSyncMemory::handle() {
+    return !mInit ? nullptr : mHandle;
+}
+
+C2SyncVariables *C2SurfaceSyncMemory::mem() {
+    return !mInit ? nullptr : mMem;
+}
+
+namespace {
+    constexpr int kSpinNumForLock = 100;
+    constexpr int kSpinNumForUnlock = 200;
+
+    enum : uint32_t {
+        FUTEX_UNLOCKED = 0,
+        FUTEX_LOCKED_UNCONTENDED = 1,  // user-space locking
+        FUTEX_LOCKED_CONTENDED = 2,    // futex locking
+    };
+}
+
+int C2SyncVariables::lock() {
+    uint32_t old;
+    for (int i = 0; i < kSpinNumForLock; i++) {
+        old = 0;
+        if (mLock.compare_exchange_strong(old, FUTEX_LOCKED_UNCONTENDED)) {
+            return 0;
+        }
+        sched_yield();
+    }
+
+    if (old == FUTEX_LOCKED_UNCONTENDED)
+        old = mLock.exchange(FUTEX_LOCKED_CONTENDED);
+
+    while (old) {
+        (void) syscall(__NR_futex, &mLock, FUTEX_WAIT, FUTEX_LOCKED_CONTENDED, NULL, NULL, 0);
+        old = mLock.exchange(FUTEX_LOCKED_CONTENDED);
+    }
+    return 0;
+}
+
+int C2SyncVariables::unlock() {
+    if (mLock.exchange(FUTEX_UNLOCKED) == FUTEX_LOCKED_UNCONTENDED) return 0;
+
+    for (int i = 0; i < kSpinNumForUnlock; i++) {
+        if (mLock.load()) {
+            uint32_t old = FUTEX_LOCKED_UNCONTENDED;
+            mLock.compare_exchange_strong(old, FUTEX_LOCKED_CONTENDED);
+            if (old) {
+                return 0;
+            }
+        }
+        sched_yield();
+    }
+
+    (void) syscall(__NR_futex, &mLock, FUTEX_WAKE, 1, NULL, NULL, 0);
+    return 0;
+}
+
+void C2SyncVariables::setInitialDequeueCount(
+        int32_t maxDequeueCount, int32_t curDequeueCount) {
+    lock();
+    mMaxDequeueCount = maxDequeueCount;
+    mCurDequeueCount = curDequeueCount;
+    unlock();
+}
+
+uint32_t C2SyncVariables::getWaitIdLocked() {
+    return mCond.load();
+}
+
+bool C2SyncVariables::isDequeueableLocked(uint32_t *waitId) {
+    if (mMaxDequeueCount <= mCurDequeueCount) {
+        if (waitId) {
+            *waitId = getWaitIdLocked();
+        }
+        return false;
+    }
+    return true;
+}
+
+bool C2SyncVariables::notifyQueuedLocked(uint32_t *waitId) {
+    // Note. thundering herds may occur. Edge trigged signalling.
+    // But one waiter will guarantee to dequeue. others may wait again.
+    // Minimize futex syscall(trap) for the main use case(one waiter case).
+    if (mMaxDequeueCount == mCurDequeueCount--) {
+        broadcast();
+        return true;
+    }
+
+    if (mCurDequeueCount >= mMaxDequeueCount) {
+        if (waitId) {
+            *waitId = getWaitIdLocked();
+        }
+        ALOGV("dequeue blocked %d/%d", mCurDequeueCount, mMaxDequeueCount);
+        return false;
+    }
+    return true;
+}
+
+void C2SyncVariables::notifyDequeuedLocked() {
+    mCurDequeueCount++;
+    ALOGV("dequeue successful %d/%d", mCurDequeueCount, mMaxDequeueCount);
+}
+
+void C2SyncVariables::setSyncStatusLocked(SyncStatus status) {
+    mStatus = status;
+    if (mStatus == STATUS_ACTIVE) {
+        broadcast();
+    }
+}
+
+C2SyncVariables::SyncStatus C2SyncVariables::getSyncStatusLocked() {
+    return mStatus;
+}
+
+void C2SyncVariables::updateMaxDequeueCountLocked(int32_t maxDequeueCount) {
+    mMaxDequeueCount = maxDequeueCount;
+    if (mStatus == STATUS_ACTIVE) {
+        broadcast();
+    }
+}
+
+c2_status_t C2SyncVariables::waitForChange(uint32_t waitId, c2_nsecs_t timeoutNs) {
+    if (timeoutNs < 0) {
+        timeoutNs = 0;
+    }
+    struct timespec tv;
+    tv.tv_sec = timeoutNs / 1000000000;
+    tv.tv_nsec = timeoutNs % 1000000000;
+
+    int ret =  syscall(__NR_futex, &mCond, FUTEX_WAIT, waitId, &tv, NULL, 0);
+    if (ret == 0 || ret == EAGAIN) {
+        return C2_OK;
+    }
+    if (ret == EINTR || ret == ETIMEDOUT) {
+        return C2_TIMED_OUT;
+    }
+    return C2_BAD_VALUE;
+}
+
+int C2SyncVariables::signal() {
+    mCond++;
+
+    (void) syscall(__NR_futex, &mCond, FUTEX_WAKE, 1, NULL, NULL, 0);
+    return 0;
+}
+
+int C2SyncVariables::broadcast() {
+    mCond++;
+
+    (void) syscall(__NR_futex, &mCond, FUTEX_REQUEUE, 1, (void *)INT_MAX, &mLock, 0);
+    return 0;
+}
+
+int C2SyncVariables::wait() {
+    uint32_t old = mCond.load();
+    unlock();
+
+    (void) syscall(__NR_futex, &mCond, FUTEX_WAIT, old, NULL, NULL, 0);
+    while (mLock.exchange(FUTEX_LOCKED_CONTENDED)) {
+        (void) syscall(__NR_futex, &mLock, FUTEX_WAIT, FUTEX_LOCKED_CONTENDED, NULL, NULL, 0);
+    }
+    return 0;
+}
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 8788a86..6c9e85c 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1283,6 +1283,46 @@
     return finalBufferSize;
 }
 
+ssize_t AudioTrack::getStartThresholdInFrames() const
+{
+    AutoMutex lock(mLock);
+    if (mOutput == AUDIO_IO_HANDLE_NONE || mProxy.get() == 0) {
+        return NO_INIT;
+    }
+    return (ssize_t) mProxy->getStartThresholdInFrames();
+}
+
+ssize_t AudioTrack::setStartThresholdInFrames(size_t startThresholdInFrames)
+{
+    if (startThresholdInFrames > INT32_MAX || startThresholdInFrames == 0) {
+        // contractually we could simply return the current threshold in frames
+        // to indicate the request was ignored, but we return an error here.
+        return BAD_VALUE;
+    }
+    AutoMutex lock(mLock);
+    // We do not permit calling setStartThresholdInFrames() between the AudioTrack
+    // default ctor AudioTrack() and set(...) but rather fail such an attempt.
+    // (To do so would require a cached mOrigStartThresholdInFrames and we may
+    // not have proper validation for the actual set value).
+    if (mOutput == AUDIO_IO_HANDLE_NONE || mProxy.get() == 0) {
+        return NO_INIT;
+    }
+    const uint32_t original = mProxy->getStartThresholdInFrames();
+    const uint32_t final = mProxy->setStartThresholdInFrames(startThresholdInFrames);
+    if (original != final) {
+        android::mediametrics::LogItem(mMetricsId)
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETSTARTTHRESHOLD)
+                .set(AMEDIAMETRICS_PROP_STARTTHRESHOLDFRAMES, (int32_t)final)
+                .record();
+        if (original > final) {
+            // restart track if it was disabled by audioflinger due to previous underrun
+            // and we reduced the number of frames for the threshold.
+            restartIfDisabled();
+        }
+    }
+    return final;
+}
+
 status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount)
 {
     if (mSharedBuffer == 0 || isOffloadedOrDirect()) {
@@ -2602,6 +2642,10 @@
         staticPosition = mStaticProxy->getPosition().unsignedValue();
     }
 
+    // save the old startThreshold and framecount
+    const uint32_t originalStartThresholdInFrames = mProxy->getStartThresholdInFrames();
+    const uint32_t originalFrameCount = mProxy->frameCount();
+
     // See b/74409267. Connecting to a BT A2DP device supporting multiple codecs
     // causes a lot of churn on the service side, and it can reject starting
     // playback of a previously created track. May also apply to other cases.
@@ -2662,6 +2706,18 @@
             return status;
         });
 
+        // restore the original start threshold if different than frameCount.
+        if (originalStartThresholdInFrames != originalFrameCount) {
+            // Note: mProxy->setStartThresholdInFrames() call is in the Proxy
+            // and does not trigger a restart.
+            // (Also CBLK_DISABLED is not set, buffers are empty after track recreation).
+            // Any start would be triggered on the mState == ACTIVE check below.
+            const uint32_t currentThreshold =
+                    mProxy->setStartThresholdInFrames(originalStartThresholdInFrames);
+            ALOGD_IF(originalStartThresholdInFrames != currentThreshold,
+                    "%s(%d) startThresholdInFrames changing from %u to %u",
+                    __func__, mPortId, originalStartThresholdInFrames, currentThreshold);
+        }
         if (mState == STATE_ACTIVE) {
             mAudioTrack->start(&result);
         }
diff --git a/media/libaudioclient/AudioTrackShared.cpp b/media/libaudioclient/AudioTrackShared.cpp
index e2c9698..35719be 100644
--- a/media/libaudioclient/AudioTrackShared.cpp
+++ b/media/libaudioclient/AudioTrackShared.cpp
@@ -17,6 +17,7 @@
 #define LOG_TAG "AudioTrackShared"
 //#define LOG_NDEBUG 0
 
+#include <atomic>
 #include <android-base/macros.h>
 #include <private/media/AudioTrackShared.h>
 #include <utils/Log.h>
@@ -33,6 +34,21 @@
     return sizeof(T) > sizeof(size_t) && x > (T) SIZE_MAX ? SIZE_MAX : x < 0 ? 0 : (size_t) x;
 }
 
+// compile-time safe atomics. TODO: update all methods to use it
+template <typename T>
+T android_atomic_load(const volatile T* addr) {
+    static_assert(sizeof(T) == sizeof(std::atomic<T>)); // no extra sync data required.
+    static_assert(std::atomic<T>::is_always_lock_free); // no hash lock somewhere.
+    return atomic_load((std::atomic<T>*)addr);          // memory_order_seq_cst
+}
+
+template <typename T>
+void android_atomic_store(const volatile T* addr, T value) {
+    static_assert(sizeof(T) == sizeof(std::atomic<T>)); // no extra sync data required.
+    static_assert(std::atomic<T>::is_always_lock_free); // no hash lock somewhere.
+    atomic_store((std::atomic<T>*)addr, value);         // memory_order_seq_cst
+}
+
 // incrementSequence is used to determine the next sequence value
 // for the loop and position sequence counters.  It should return
 // a value between "other" + 1 and "other" + INT32_MAX, the choice of
@@ -51,6 +67,7 @@
     : mServer(0), mFutex(0), mMinimum(0)
     , mVolumeLR(GAIN_MINIFLOAT_PACKED_UNITY), mSampleRate(0), mSendLevel(0)
     , mBufferSizeInFrames(0)
+    , mStartThresholdInFrames(0) // filled in by the server.
     , mFlags(0)
 {
     memset(&u, 0, sizeof(u));
@@ -66,6 +83,26 @@
 {
 }
 
+uint32_t Proxy::getStartThresholdInFrames() const
+{
+    const uint32_t startThresholdInFrames =
+           android_atomic_load(&mCblk->mStartThresholdInFrames);
+    if (startThresholdInFrames == 0 || startThresholdInFrames > mFrameCount) {
+        ALOGD("%s: startThresholdInFrames %u not between 1 and frameCount %zu, "
+                "setting to frameCount",
+                __func__, startThresholdInFrames, mFrameCount);
+        return mFrameCount;
+    }
+    return startThresholdInFrames;
+}
+
+uint32_t Proxy::setStartThresholdInFrames(uint32_t startThresholdInFrames)
+{
+    const uint32_t actual = std::min((size_t)startThresholdInFrames, frameCount());
+    android_atomic_store(&mCblk->mStartThresholdInFrames, actual);
+    return actual;
+}
+
 // ---------------------------------------------------------------------------
 
 ClientProxy::ClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount,
@@ -663,6 +700,7 @@
     , mTimestampMutator(&cblk->mExtendedTimestampQueue)
 {
     cblk->mBufferSizeInFrames = frameCount;
+    cblk->mStartThresholdInFrames = frameCount;
 }
 
 __attribute__((no_sanitize("integer")))
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index b1650ed..d167c40 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -429,6 +429,19 @@
      */
             ssize_t     setBufferSizeInFrames(size_t size);
 
+    /* Returns the start threshold on the buffer for audio streaming
+     * or a negative value if the AudioTrack is not initialized.
+     */
+            ssize_t     getStartThresholdInFrames() const;
+
+    /* Sets the start threshold in frames on the buffer for audio streaming.
+     *
+     * May be clamped internally. Returns the actual value set, or a negative
+     * value if the AudioTrack is not initialized or if the input
+     * is zero or greater than INT_MAX.
+     */
+            ssize_t     setStartThresholdInFrames(size_t startThresholdInFrames);
+
     /* Return the static buffer specified in constructor or set(), or 0 for streaming mode */
             sp<IMemory> sharedBuffer() const { return mSharedBuffer; }
 
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index b9a3e29..03a0d86 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -246,6 +246,10 @@
         return status;
     }
     CoreUtils::AudioInputFlags hidlFlags;
+#if MAJOR_VERSION <= 5
+    // Some flags were specific to framework and must not leak to the HAL.
+    flags = static_cast<audio_input_flags_t>(flags & ~AUDIO_INPUT_FLAG_DIRECT);
+#endif
     if (status_t status = CoreUtils::audioInputFlagsFromHal(flags, &hidlFlags); status != OK) {
         return status;
     }
@@ -278,10 +282,6 @@
         sinkMetadata.tracks[0].destination.device(std::move(hidlOutputDevice));
     }
 #endif
-#if MAJOR_VERSION <= 5
-    // Some flags were specific to framework and must not leak to the HAL.
-    flags = static_cast<audio_input_flags_t>(flags & ~AUDIO_INPUT_FLAG_DIRECT);
-#endif
     Return<void> ret = mDevice->openInputStream(
             handle, hidlDevice, hidlConfig, hidlFlags, sinkMetadata,
             [&](Result r, const sp<IStreamIn>& result, const AudioConfig& suggestedConfig) {
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index 87ed8b6..c6e036a 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -18,15 +18,10 @@
     ],
 }
 
-cc_library {
-    name: "libaudiopreprocessing",
+cc_defaults {
+    name: "libaudiopreprocessing-defaults",
     vendor: true,
-    relative_install_path: "soundfx",
     host_supported: true,
-    srcs: ["PreProcessing.cpp"],
-    local_include_dirs: [
-        ".",
-    ],
     cflags: [
         "-Wall",
         "-Werror",
@@ -46,7 +41,6 @@
     header_libs: [
         "libaudioeffects",
         "libhardware_headers",
-        "libwebrtc_absl_headers",
     ],
     target: {
         darwin: {
@@ -54,3 +48,13 @@
         },
     },
 }
+
+cc_library {
+    name: "libaudiopreprocessing",
+    defaults: ["libaudiopreprocessing-defaults"],
+    relative_install_path: "soundfx",
+    srcs: ["PreProcessing.cpp"],
+    header_libs: [
+        "libwebrtc_absl_headers",
+    ],
+}
diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp
index 3b0b6d6..19a8b2f 100644
--- a/media/libeffects/preprocessing/PreProcessing.cpp
+++ b/media/libeffects/preprocessing/PreProcessing.cpp
@@ -105,9 +105,8 @@
     webrtc::AudioProcessing* apm;  // handle on webRTC audio processing module (APM)
     // Audio Processing module builder
     webrtc::AudioProcessingBuilder ap_builder;
-    size_t apmFrameCount;      // buffer size for webRTC process (10 ms)
-    uint32_t apmSamplingRate;  // webRTC APM sampling rate (8/16 or 32 kHz)
-    size_t frameCount;         // buffer size before input resampler ( <=> apmFrameCount)
+    // frameCount represents the size of the buffers used for processing, and must represent 10ms.
+    size_t frameCount;
     uint32_t samplingRate;     // sampling rate at effect process interface
     uint32_t inChannelCount;   // input channel count
     uint32_t outChannelCount;  // output channel count
@@ -119,21 +118,12 @@
     webrtc::AudioProcessing::Config config;
     webrtc::StreamConfig inputConfig;   // input stream configuration
     webrtc::StreamConfig outputConfig;  // output stream configuration
-    int16_t* inBuf;    // input buffer used when resampling
-    size_t inBufSize;  // input buffer size in frames
-    size_t framesIn;   // number of frames in input buffer
-    int16_t* outBuf;    // output buffer used when resampling
-    size_t outBufSize;  // output buffer size in frames
-    size_t framesOut;   // number of frames in output buffer
     uint32_t revChannelCount;  // number of channels on reverse stream
     uint32_t revEnabledMsk;    // bit field containing IDs of enabled pre processors
                                // with reverse channel
     uint32_t revProcessedMsk;  // bit field containing IDs of pre processors with reverse
                                // channel already processed in current round
     webrtc::StreamConfig revConfig;     // reverse stream configuration.
-    int16_t* revBuf;    // reverse channel input buffer
-    size_t revBufSize;  // reverse channel input buffer size
-    size_t framesRev;   // number of frames in reverse channel input buffer
 };
 
 #ifdef DUAL_MIC_TEST
@@ -862,9 +852,7 @@
             ALOGW("Session_CreateEffect could not get apm engine");
             goto error;
         }
-        session->apmSamplingRate = kPreprocDefaultSr;
-        session->apmFrameCount = (kPreprocDefaultSr) / 100;
-        session->frameCount = session->apmFrameCount;
+        session->frameCount = kPreprocDefaultSr / 100;
         session->samplingRate = kPreprocDefaultSr;
         session->inChannelCount = kPreProcDefaultCnl;
         session->outChannelCount = kPreProcDefaultCnl;
@@ -879,12 +867,6 @@
         session->processedMsk = 0;
         session->revEnabledMsk = 0;
         session->revProcessedMsk = 0;
-        session->inBuf = NULL;
-        session->inBufSize = 0;
-        session->outBuf = NULL;
-        session->outBufSize = 0;
-        session->revBuf = NULL;
-        session->revBufSize = 0;
     }
     status = Effect_Create(&session->effects[procId], session, interface);
     if (status < 0) {
@@ -908,13 +890,6 @@
     if (session->createdMsk == 0) {
         delete session->apm;
         session->apm = NULL;
-        delete session->inBuf;
-        session->inBuf = NULL;
-        free(session->outBuf);
-        session->outBuf = NULL;
-        delete session->revBuf;
-        session->revBuf = NULL;
-
         session->id = 0;
     }
 
@@ -934,24 +909,8 @@
     ALOGV("Session_SetConfig sr %d cnl %08x", config->inputCfg.samplingRate,
           config->inputCfg.channels);
 
-    // AEC implementation is limited to 16kHz
-    if (config->inputCfg.samplingRate >= 32000 && !(session->createdMsk & (1 << PREPROC_AEC))) {
-        session->apmSamplingRate = 32000;
-    } else if (config->inputCfg.samplingRate >= 16000) {
-        session->apmSamplingRate = 16000;
-    } else if (config->inputCfg.samplingRate >= 8000) {
-        session->apmSamplingRate = 8000;
-    }
-
-
     session->samplingRate = config->inputCfg.samplingRate;
-    session->apmFrameCount = session->apmSamplingRate / 100;
-    if (session->samplingRate == session->apmSamplingRate) {
-        session->frameCount = session->apmFrameCount;
-    } else {
-        session->frameCount =
-                (session->apmFrameCount * session->samplingRate) / session->apmSamplingRate;
-    }
+    session->frameCount = session->samplingRate / 100;
     session->inChannelCount = inCnl;
     session->outChannelCount = outCnl;
     session->inputConfig.set_sample_rate_hz(session->samplingRate);
@@ -963,13 +922,6 @@
     session->revConfig.set_sample_rate_hz(session->samplingRate);
     session->revConfig.set_num_channels(inCnl);
 
-    // force process buffer reallocation
-    session->inBufSize = 0;
-    session->outBufSize = 0;
-    session->framesIn = 0;
-    session->framesOut = 0;
-
-
     session->state = PREPROC_SESSION_STATE_CONFIG;
     return 0;
 }
@@ -1004,9 +956,6 @@
     }
     uint32_t inCnl = audio_channel_count_from_out_mask(config->inputCfg.channels);
     session->revChannelCount = inCnl;
-    // force process buffer reallocation
-    session->revBufSize = 0;
-    session->framesRev = 0;
 
     return 0;
 }
@@ -1023,12 +972,8 @@
 
 void Session_SetProcEnabled(preproc_session_t* session, uint32_t procId, bool enabled) {
     if (enabled) {
-        if (session->enabledMsk == 0) {
-            session->framesIn = 0;
-        }
         session->enabledMsk |= (1 << procId);
         if (HasReverseStream(procId)) {
-            session->framesRev = 0;
             session->revEnabledMsk |= (1 << procId);
         }
     } else {
@@ -1117,43 +1062,24 @@
         return -EINVAL;
     }
 
+    if (inBuffer->frameCount != outBuffer->frameCount) {
+        ALOGW("inBuffer->frameCount %zu is not equal to outBuffer->frameCount %zu",
+              inBuffer->frameCount, outBuffer->frameCount);
+        return -EINVAL;
+    }
+
+    if (inBuffer->frameCount != session->frameCount) {
+        ALOGW("inBuffer->frameCount %zu != %zu representing 10ms at sampling rate %d",
+              inBuffer->frameCount, session->frameCount, session->samplingRate);
+        return -EINVAL;
+    }
+
     session->processedMsk |= (1 << effect->procId);
 
     //    ALOGV("PreProcessingFx_Process In %d frames enabledMsk %08x processedMsk %08x",
     //         inBuffer->frameCount, session->enabledMsk, session->processedMsk);
-
     if ((session->processedMsk & session->enabledMsk) == session->enabledMsk) {
         effect->session->processedMsk = 0;
-        size_t framesRq = outBuffer->frameCount;
-        size_t framesWr = 0;
-        if (session->framesOut) {
-            size_t fr = session->framesOut;
-            if (outBuffer->frameCount < fr) {
-                fr = outBuffer->frameCount;
-            }
-            memcpy(outBuffer->s16, session->outBuf,
-                   fr * session->outChannelCount * sizeof(int16_t));
-            memmove(session->outBuf, session->outBuf + fr * session->outChannelCount,
-                    (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
-            session->framesOut -= fr;
-            framesWr += fr;
-        }
-        outBuffer->frameCount = framesWr;
-        if (framesWr == framesRq) {
-            inBuffer->frameCount = 0;
-            return 0;
-        }
-
-        size_t fr = session->frameCount - session->framesIn;
-        if (inBuffer->frameCount < fr) {
-            fr = inBuffer->frameCount;
-        }
-        session->framesIn += fr;
-        inBuffer->frameCount = fr;
-        if (session->framesIn < session->frameCount) {
-            return 0;
-        }
-        session->framesIn = 0;
         if (int status = effect->session->apm->ProcessStream(
                     (const int16_t* const)inBuffer->s16,
                     (const webrtc::StreamConfig)effect->session->inputConfig,
@@ -1163,34 +1089,6 @@
             ALOGE("Process Stream failed with error %d\n", status);
             return status;
         }
-        outBuffer->frameCount = inBuffer->frameCount;
-
-        if (session->outBufSize < session->framesOut + session->frameCount) {
-            int16_t* buf;
-            session->outBufSize = session->framesOut + session->frameCount;
-            buf = (int16_t*)realloc(
-                    session->outBuf,
-                    session->outBufSize * session->outChannelCount * sizeof(int16_t));
-            if (buf == NULL) {
-                session->framesOut = 0;
-                free(session->outBuf);
-                session->outBuf = NULL;
-                return -ENOMEM;
-            }
-            session->outBuf = buf;
-        }
-
-        fr = session->framesOut;
-        if (framesRq - framesWr < fr) {
-            fr = framesRq - framesWr;
-        }
-        memcpy(outBuffer->s16 + framesWr * session->outChannelCount, session->outBuf,
-               fr * session->outChannelCount * sizeof(int16_t));
-        memmove(session->outBuf, session->outBuf + fr * session->outChannelCount,
-                (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
-        session->framesOut -= fr;
-        outBuffer->frameCount += fr;
-
         return 0;
     } else {
         return -ENODATA;
@@ -1565,6 +1463,18 @@
         return -EINVAL;
     }
 
+    if (inBuffer->frameCount != outBuffer->frameCount) {
+        ALOGW("inBuffer->frameCount %zu is not equal to outBuffer->frameCount %zu",
+              inBuffer->frameCount, outBuffer->frameCount);
+        return -EINVAL;
+    }
+
+    if (inBuffer->frameCount != session->frameCount) {
+        ALOGW("inBuffer->frameCount %zu != %zu representing 10ms at sampling rate %d",
+              inBuffer->frameCount, session->frameCount, session->samplingRate);
+        return -EINVAL;
+    }
+
     session->revProcessedMsk |= (1 << effect->procId);
 
     //    ALOGV("PreProcessingFx_ProcessReverse In %d frames revEnabledMsk %08x revProcessedMsk
@@ -1573,16 +1483,6 @@
 
     if ((session->revProcessedMsk & session->revEnabledMsk) == session->revEnabledMsk) {
         effect->session->revProcessedMsk = 0;
-        size_t fr = session->frameCount - session->framesRev;
-        if (inBuffer->frameCount < fr) {
-            fr = inBuffer->frameCount;
-        }
-        session->framesRev += fr;
-        inBuffer->frameCount = fr;
-        if (session->framesRev < session->frameCount) {
-            return 0;
-        }
-        session->framesRev = 0;
         if (int status = effect->session->apm->ProcessReverseStream(
                     (const int16_t* const)inBuffer->s16,
                     (const webrtc::StreamConfig)effect->session->revConfig,
diff --git a/media/libeffects/preprocessing/README.md b/media/libeffects/preprocessing/README.md
new file mode 100644
index 0000000..af46376
--- /dev/null
+++ b/media/libeffects/preprocessing/README.md
@@ -0,0 +1,7 @@
+# Preprocessing effects
+
+## Limitations
+- Preprocessing effects currently work on 10ms worth of data and do not support
+  arbitrary frame counts. This limiation comes from the underlying effects in
+  webrtc modules
+- There is currently no api to communicate this requirement
diff --git a/media/libeffects/preprocessing/benchmarks/Android.bp b/media/libeffects/preprocessing/benchmarks/Android.bp
index c1b2295..fbbcab4 100644
--- a/media/libeffects/preprocessing/benchmarks/Android.bp
+++ b/media/libeffects/preprocessing/benchmarks/Android.bp
@@ -11,27 +11,10 @@
 
 cc_benchmark {
     name: "preprocessing_benchmark",
-    vendor: true,
+    defaults: ["libaudiopreprocessing-defaults"],
     srcs: ["preprocessing_benchmark.cpp"],
-    shared_libs: [
-        "libaudioutils",
-        "liblog",
-        "libutils",
-    ],
     static_libs: [
         "libaudiopreprocessing",
-        "webrtc_audio_processing",
-    ],
-    cflags: [
-        "-DWEBRTC_POSIX",
-        "-fvisibility=default",
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-    header_libs: [
-        "libaudioeffects",
-        "libhardware_headers",
-        "libwebrtc_absl_headers",
+        "libaudioutils",
     ],
 }
diff --git a/media/libeffects/preprocessing/tests/Android.bp b/media/libeffects/preprocessing/tests/Android.bp
index 18c6c98..d80b135 100644
--- a/media/libeffects/preprocessing/tests/Android.bp
+++ b/media/libeffects/preprocessing/tests/Android.bp
@@ -12,9 +12,8 @@
 
 cc_test {
     name: "EffectPreprocessingTest",
-    vendor: true,
+    defaults: ["libaudiopreprocessing-defaults"],
     gtest: true,
-    host_supported: true,
     test_suites: ["device-tests"],
     srcs: [
         "EffectPreprocessingTest.cpp",
@@ -23,46 +22,18 @@
     static_libs: [
         "libaudiopreprocessing",
         "libaudioutils",
-        "webrtc_audio_processing",
     ],
-    shared_libs: [
-        "liblog",
-    ],
-    header_libs: [
-        "libaudioeffects",
-        "libhardware_headers",
-    ],
-    target: {
-        darwin: {
-            enabled: false,
-        },
-    },
 }
 
 cc_test {
     name: "AudioPreProcessingTest",
-    vendor: true,
-    host_supported: true,
+    defaults: ["libaudiopreprocessing-defaults"],
     gtest: false,
     srcs: ["PreProcessingTest.cpp"],
-    shared_libs: [
-        "libaudioutils",
-        "liblog",
-        "libutils",
-    ],
     static_libs: [
         "libaudiopreprocessing",
-        "webrtc_audio_processing",
+        "libaudioutils",
     ],
-    header_libs: [
-        "libaudioeffects",
-        "libhardware_headers",
-    ],
-    target: {
-        darwin: {
-            enabled: false,
-        },
-    },
 }
 
 cc_test {
diff --git a/media/libeffects/preprocessing/tests/EffectTestHelper.h b/media/libeffects/preprocessing/tests/EffectTestHelper.h
index db06823..117cf7b 100644
--- a/media/libeffects/preprocessing/tests/EffectTestHelper.h
+++ b/media/libeffects/preprocessing/tests/EffectTestHelper.h
@@ -88,7 +88,8 @@
 
     static constexpr size_t kNumChMasks = std::size(kChMasks);
 
-    static constexpr size_t kSampleRates[] = {8000, 16000, 24000, 32000, 48000};
+    static constexpr size_t kSampleRates[] = {8000,  11025, 12000, 16000, 22050,
+                                              24000, 32000, 44100, 48000};
 
     static constexpr size_t kNumSampleRates = std::size(kSampleRates);
 
diff --git a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
index e0025fe..3bd93f8 100644
--- a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
+++ b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
@@ -451,8 +451,8 @@
         }
         audio_buffer_t inputBuffer, outputBuffer;
         audio_buffer_t farInBuffer{};
-        inputBuffer.frameCount = samplesRead;
-        outputBuffer.frameCount = samplesRead;
+        inputBuffer.frameCount = frameLength;
+        outputBuffer.frameCount = frameLength;
         inputBuffer.s16 = in.data();
         outputBuffer.s16 = out.data();
 
@@ -472,7 +472,7 @@
                 }
             }
 
-            farInBuffer.frameCount = samplesRead;
+            farInBuffer.frameCount = frameLength;
             farInBuffer.s16 = farIn.data();
         }
 
@@ -519,6 +519,7 @@
         }
         frameCounter += frameLength;
     }
+    printf("frameCounter: [%d]\n", frameCounter);
     // Release all the effect handles created
     for (int i = 0; i < PREPROC_NUM_EFFECTS; i++) {
         if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle[i]);
diff --git a/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh b/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh
index 942f2ec..35da13e 100755
--- a/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh
+++ b/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh
@@ -59,9 +59,13 @@
 
 fs_arr=(
     8000
+    11025
+    12000
     16000
+    22050
     24000
     32000
+    44100
     48000
 )
 
diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp
index c010d68..be9f8c0 100644
--- a/media/libeffects/proxy/EffectProxy.cpp
+++ b/media/libeffects/proxy/EffectProxy.cpp
@@ -116,6 +116,16 @@
         pContext->sube[SUB_FX_OFFLOAD] = sube[1];
         pContext->desc[SUB_FX_OFFLOAD] = desc[1];
         pContext->aeli[SUB_FX_OFFLOAD] = aeli[1];
+    } else {
+        ALOGE("Both effects have (or don't have) EFFECT_FLAG_HW_ACC_TUNNEL flag");
+        delete[] sube;
+        delete[] desc;
+        delete[] aeli;
+        delete[] pContext->sube;
+        delete[] pContext->desc;
+        delete[] pContext->aeli;
+        delete pContext;
+        return -EINVAL;
     }
     delete[] desc;
     delete[] aeli;
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 9e33610..b2056ad 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -432,5 +432,8 @@
         },
     },
 
-    apex_available: ["com.android.media"],
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media"
+    ],
 }
diff --git a/media/libmediaformatshaper/Android.bp b/media/libmediaformatshaper/Android.bp
new file mode 100644
index 0000000..731ff4c
--- /dev/null
+++ b/media/libmediaformatshaper/Android.bp
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+// these headers include the structure of needed function pointers
+cc_library_headers {
+    name: "libmediaformatshaper_headers",
+    export_include_dirs: ["include"],
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
+    min_sdk_version: "29",
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+cc_defaults {
+    name: "libmediaformatshaper_defaults",
+    srcs: [
+        "CodecProperties.cpp",
+        "FormatShaper.cpp",
+        "ManageShapingCodecs.cpp",
+        "VideoShaper.cpp",
+        "VQApply.cpp",
+    ],
+
+    local_include_dirs: [
+        "include",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-fvisibility=hidden",
+        "-Wthread-safety",                      // enables GUARDED_BY()
+    ],
+
+    target: {
+        android: {
+            shared_libs: [
+                "libmediandk#29",
+            ],
+        },
+    },
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
+
+cc_library {
+    name: "libmediaformatshaper",
+    defaults: ["libmediaformatshaper_defaults"],
+
+    min_sdk_version: "29",
+
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
+
+    version_script: "exports.lds",
+
+}
diff --git a/media/libmediaformatshaper/CodecProperties.cpp b/media/libmediaformatshaper/CodecProperties.cpp
new file mode 100644
index 0000000..dccfd95
--- /dev/null
+++ b/media/libmediaformatshaper/CodecProperties.cpp
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecProperties"
+#include <utils/Log.h>
+
+#include <string>
+
+#include <media/formatshaper/CodecProperties.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+CodecProperties::CodecProperties(std::string name, std::string mediaType) {
+    mName = name;
+    mMediaType = mediaType;
+}
+
+std::string CodecProperties::getName(){
+    return mName;
+}
+
+std::string CodecProperties::getMediaType(){
+    return mMediaType;
+}
+
+int CodecProperties::supportedMinimumQuality() {
+    return mMinimumQuality;
+}
+void CodecProperties::setSupportedMinimumQuality(int vmaf) {
+    mMinimumQuality = vmaf;
+}
+
+int CodecProperties::targetQpMax() {
+    return mTargetQpMax;
+}
+void CodecProperties::setTargetQpMax(int qpMax) {
+    mTargetQpMax = qpMax;
+}
+
+// what API is this codec set up for (e.g. API of the associated partition)
+// vendor-side (OEM) codecs may be older, due to 'vendor freeze' and treble
+int CodecProperties::supportedApi() {
+    return mApi;
+}
+
+std::string CodecProperties::getMapping(std::string key, std::string kind) {
+    ALOGV("getMapping(key %s, kind %s )", key.c_str(), kind.c_str());
+    //play with mMappings
+    auto mapped = mMappings.find(kind + "-" + key);
+    if (mapped != mMappings.end()) {
+        std::string result = mapped->second;
+        ALOGV("getMapping(%s, %s) -> %s", key.c_str(), kind.c_str(), result.c_str());
+        return result;
+    }
+    ALOGV("nope, return unchanged key");
+    return key;
+}
+
+
+// really a bit of debugging code here.
+void CodecProperties::showMappings() {
+    ALOGD("Mappings:");
+    int count = 0;
+    for (const auto& [key, value] : mMappings) {
+         count++;
+         ALOGD("'%s' -> '%s'", key.c_str(), value.c_str());
+    }
+    ALOGD("total %d mappings", count);
+}
+
+void CodecProperties::setMapping(std::string kind, std::string key, std::string value) {
+    ALOGV("setMapping(%s,%s,%s)", kind.c_str(), key.c_str(), value.c_str());
+    std::string metaKey = kind + "-" + key;
+    mMappings.insert({metaKey, value});
+}
+
+const char **CodecProperties::getMappings(std::string kind, bool reverse) {
+    ALOGV("getMappings(kind %s, reverse %d", kind.c_str(), reverse);
+    // how many do we need?
+    int count = mMappings.size();
+    if (count == 0) {
+        ALOGV("empty mappings");
+        return nullptr;
+    }
+    size_t size = sizeof(char *) * (2 * count + 2);
+    const char **result = (const char **)malloc(size);
+    if (result == nullptr) {
+        ALOGW("no memory to return mappings");
+        return nullptr;
+    }
+    memset(result, '\0', size);
+
+    const char **pp = result;
+    for (const auto& [key, value] : mMappings) {
+        // split out the kind/key
+        size_t pos = key.find('-');
+        if (pos == std::string::npos) {
+            ALOGD("ignoring malformed key: %s", key.c_str());
+            continue;
+        }
+        std::string actualKind = key.substr(0,pos);
+        if (kind.length() != 0 && kind != actualKind) {
+            ALOGD("kinds don't match: want '%s' got '%s'", kind.c_str(), actualKind.c_str());
+            continue;
+        }
+        if (reverse) {
+            // codec specific -> std aka 'unmapping'
+            pp[0] = strdup( value.c_str());
+            pp[1] = strdup( key.substr(pos+1).c_str());
+        } else {
+            // std -> codec specific
+            pp[0] = strdup( key.substr(pos+1).c_str());
+            pp[1] = strdup( value.c_str());
+        }
+        ALOGV(" %s -> %s", pp[0], pp[1]);
+        pp += 2;
+    }
+
+    pp[0] = nullptr;
+    pp[1] = nullptr;
+
+    return result;
+}
+
+
+} // namespace mediaformatshaper
+} // namespace android
+
diff --git a/media/libmediaformatshaper/FormatShaper.cpp b/media/libmediaformatshaper/FormatShaper.cpp
new file mode 100644
index 0000000..ca4dc72
--- /dev/null
+++ b/media/libmediaformatshaper/FormatShaper.cpp
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "FormatShaper"
+#include <utils/Log.h>
+
+#include <string>
+#include <inttypes.h>
+
+#include <media/NdkMediaFormat.h>
+
+#include <media/formatshaper/VQops.h>
+#include <media/formatshaper/CodecProperties.h>
+#include <media/formatshaper/FormatShaper.h>
+#include <media/formatshaper/VideoShaper.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+//
+// Caller retains ownership of and responsibility for inFormat
+//
+
+//
+// the interface to the outside
+//
+
+int shapeFormat(shaperHandle_t shaper, AMediaFormat* inFormat, int flags) {
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr) {
+        return -1;
+    }
+    if (!codec->isRegistered()) {
+        return -1;
+    }
+
+    // run through the list of possible transformations
+    //
+
+    std::string mediaType = codec->getMediaType();
+    if (strncmp(mediaType.c_str(), "video/", 6) == 0) {
+        // video specific shaping
+        (void) videoShaper(codec, inFormat, flags);
+
+    } else if (strncmp(mediaType.c_str(), "audio/", 6) == 0) {
+        // audio specific shaping
+
+    } else {
+        ALOGV("unknown mediatype '%s', left untouched", mediaType.c_str());
+
+    }
+
+    return 0;
+}
+
+int setMap(shaperHandle_t shaper,  const char *kind, const char *key, const char *value) {
+    ALOGV("setMap: kind %s key %s -> value %s", kind, key, value);
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr) {
+        return -1;
+    }
+    // must not yet be registered
+    if (codec->isRegistered()) {
+        return -1;
+    }
+
+    codec->setMapping(kind, key, value);
+    return 0;
+}
+
+int setFeature(shaperHandle_t shaper, const char *feature, int value) {
+    ALOGV("set_feature: feature %s value %d", feature, value);
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr) {
+        return -1;
+    }
+    // must not yet be registered
+    if (codec->isRegistered()) {
+        return -1;
+    }
+
+    if (!strcmp(feature, "vq-minimum-quality")) {
+        codec->setSupportedMinimumQuality(value);
+    } else if (!strcmp(feature, "vq-supports-qp")) {
+        codec->setSupportsQp(value != 0);
+    } else if (!strcmp(feature, "vq-target-qpmax")) {
+        codec->setTargetQpMax(value);
+    } else if (!strcmp(feature, "vq-target-bppx100")) {
+        double bpp = value / 100.0;
+        codec->setBpp(bpp);
+    } else {
+        // changed nothing, don't mark as configured
+        return 0;
+    }
+    return 0;
+}
+
+/*
+ * The routines that manage finding, creating, and registering the shapers.
+ */
+
+shaperHandle_t findShaper(const char *codecName, const char *mediaType) {
+    CodecProperties *codec = findCodec(codecName, mediaType);
+    return (shaperHandle_t) codec;
+}
+
+shaperHandle_t createShaper(const char *codecName, const char *mediaType) {
+    CodecProperties *codec = new CodecProperties(codecName, mediaType);
+    return (shaperHandle_t) codec;
+}
+
+shaperHandle_t registerShaper(shaperHandle_t shaper, const char *codecName, const char *mediaType) {
+    ALOGV("registerShaper(handle, codecName %s, mediaType %s", codecName, mediaType);
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr) {
+        return nullptr;
+    }
+    // must not yet be registered
+    if (codec->isRegistered()) {
+        return nullptr;
+    }
+
+    codec = registerCodec(codec, codecName, mediaType);
+    return (shaperHandle_t) codec;
+}
+
+// mapping & unmapping
+// give me the mappings for 'kind'.
+// kind==null (or empty string), means *all* mappings
+
+const char **getMappings(shaperHandle_t shaper, const char *kind) {
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr)
+        return nullptr;
+    if (kind == nullptr)
+        kind = "";
+
+    return codec->getMappings(kind, /* reverse */ false);
+}
+
+const char **getReverseMappings(shaperHandle_t shaper, const char *kind) {
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr)
+        return nullptr;
+    if (kind == nullptr)
+        kind = "";
+
+    return codec->getMappings(kind, /* reverse */ true);
+}
+
+
+// the system grabs this structure
+__attribute__ ((visibility ("default")))
+extern "C" FormatShaperOps_t shaper_ops = {
+    .version = SHAPER_VERSION_V1,
+
+    .findShaper = findShaper,
+    .createShaper = createShaper,
+    .setMap = setMap,
+    .setFeature = setFeature,
+    .registerShaper = registerShaper,
+
+    .shapeFormat = shapeFormat,
+    .getMappings = getMappings,
+    .getReverseMappings = getReverseMappings,
+};
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
diff --git a/media/libmediaformatshaper/ManageShapingCodecs.cpp b/media/libmediaformatshaper/ManageShapingCodecs.cpp
new file mode 100644
index 0000000..bdc395f
--- /dev/null
+++ b/media/libmediaformatshaper/ManageShapingCodecs.cpp
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ManageShapingCodecs"
+#include <utils/Log.h>
+
+#include <mutex>
+#include <string>
+#include <inttypes.h>
+
+#include <media/NdkMediaFormat.h>
+#include <media/formatshaper/CodecProperties.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+// manage the list of codec information.
+//
+// XXX: the mutex here is too heavy; rework that.
+//
+
+static std::mutex sCodecMutex;
+static std::map<std::string, CodecProperties*> sCodecTraits;
+
+CodecProperties *findCodec(const char *codecName, const char *mediaType) {
+    CodecProperties *codec = nullptr;
+
+    // synthesize a name from both codecName + mediaType
+    // some codecs support multiple media types and may have different capabilities
+    // for each media type
+    //
+    std::string codecKey = codecName;
+    codecKey += "-";
+    codecKey += mediaType;
+
+    std::lock_guard  _l(sCodecMutex);
+
+    auto it = sCodecTraits.find(codecKey);
+    if (it != sCodecTraits.end()) {
+        codec = it->second;
+    }
+
+    return codec;
+}
+
+CodecProperties *registerCodec(CodecProperties *codec, const char *codecName,
+                               const char *mediaType) {
+
+    CodecProperties *registeredCodec = nullptr;
+
+    if (codec->isRegistered()) {
+        return nullptr;
+    }
+
+    // synthesize a name from both codecName + mediaType
+    // some codecs support multiple media types and may have different capabilities
+    // for each media type
+    //
+    std::string codecKey = codecName;
+    codecKey += "-";
+    codecKey += mediaType;
+
+    std::lock_guard  _l(sCodecMutex);
+
+    auto it = sCodecTraits.find(codecKey);
+    if (it != sCodecTraits.end()) {
+        registeredCodec = it->second;
+    }
+
+    if (registeredCodec == nullptr) {
+        // register the one that was passed to us
+        ALOGV("Creating entry for codec %s, mediaType %s, key %s", codecName, mediaType,
+              codecKey.c_str());
+        sCodecTraits.insert({codecKey, codec});
+        registeredCodec = codec;
+        codec->setRegistered(true);
+    } else {
+        // one has already been registered, use that
+        // and discard the candidate
+        delete codec;
+        codec = nullptr;
+    }
+
+    return registeredCodec;
+}
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
diff --git a/media/libmediaformatshaper/VQApply.cpp b/media/libmediaformatshaper/VQApply.cpp
new file mode 100644
index 0000000..6f6f33c
--- /dev/null
+++ b/media/libmediaformatshaper/VQApply.cpp
@@ -0,0 +1,202 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VQApply"
+#include <utils/Log.h>
+
+#include <string>
+#include <inttypes.h>
+
+#include <media/NdkMediaFormat.h>
+
+#include <media/formatshaper/VQops.h>
+#include <media/formatshaper/CodecProperties.h>
+#include <media/formatshaper/VideoShaper.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+
+// these are all NDK#31 and we run as NDK#29 (to be within the module)
+// the __builtin_available(android 31, *) constructs didn't work for me.
+//
+#define	AMEDIAFORMAT_VIDEO_QP_MAX	"video-qp-max"
+#define	AMEDIAFORMAT_VIDEO_QP_MIN	"video-qp-min"
+
+#define	AMEDIAFORMAT_VIDEO_QP_B_MAX	"video-qp-b-max"
+#define	AMEDIAFORMAT_VIDEO_QP_B_MIN	"video-qp-b-min"
+#define	AMEDIAFORMAT_VIDEO_QP_I_MAX	"video-qp-i-max"
+#define	AMEDIAFORMAT_VIDEO_QP_I_MIN	"video-qp-i-min"
+#define	AMEDIAFORMAT_VIDEO_QP_P_MAX	"video-qp-p-max"
+#define	AMEDIAFORMAT_VIDEO_QP_P_MIN	"video-qp-p-min"
+
+//
+// Caller retains ownership of and responsibility for inFormat
+//
+int VQApply(CodecProperties *codec, vqOps_t *info, AMediaFormat* inFormat, int flags) {
+    ALOGV("codecName %s inFormat %p flags x%x", codec->getName().c_str(), inFormat, flags);
+
+    if (codec->supportedMinimumQuality() > 0) {
+        // allow the codec provided minimum quality behavior to work at it
+        ALOGD("minquality(codec): codec says %d", codec->supportedMinimumQuality());
+        return 0;
+    }
+
+    ALOGD("considering other ways to improve quality...");
+
+    //
+    // apply any and all tools that we have.
+    // -- qp
+    // -- minimum bits-per-pixel
+    //
+    if (codec->supportsQp()) {
+        // use a (configurable) QP value to force better quality
+        //
+        // XXX: augment this so that we don't lower an existing QP setting
+        // (e.g. if user set it to 40, we don't want to set it back to 45)
+        int qpmax = codec->targetQpMax();
+        if (qpmax <= 0) {
+                qpmax = 45;
+                ALOGD("use default substitute QpMax == %d", qpmax);
+        }
+        ALOGD("minquality by QP: inject %s=%d", AMEDIAFORMAT_VIDEO_QP_MAX, qpmax);
+        AMediaFormat_setInt32(inFormat, AMEDIAFORMAT_VIDEO_QP_MAX, qpmax);
+
+        // force spreading the QP across frame types, since we imposing a value
+        qpSpreadMaxPerFrameType(inFormat, info->qpDelta, info->qpMax, /* override */ true);
+    } else {
+        ALOGD("codec %s: no qp bounding", codec->getName().c_str());
+    }
+
+    double bpp = codec->getBpp();
+    if (bpp > 0.0) {
+        // if we've decided to use bits-per-pixel (per second) to drive the quality
+        //
+        // (properly phrased as 'bits per second per pixel' so that it's resolution
+        // and framerate agnostic
+        //
+        // all of these is structured so that a missing value cleanly gets us to a
+        // non-faulting value of '0' for the minimum bits-per-pixel.
+        //
+        int32_t width = 0;
+        (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_WIDTH, &width);
+        int32_t height = 0;
+        (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_HEIGHT, &height);
+        int32_t bitrateConfigured = 0;
+        (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_BIT_RATE, &bitrateConfigured);
+
+        int64_t pixels = ((int64_t)width) * height;
+        int64_t bitrateFloor = pixels * bpp;
+
+        if (bitrateFloor > INT32_MAX) bitrateFloor = INT32_MAX;
+
+        ALOGD("minquality/bitrate: target %d floor %" PRId64 "(%.3f bpp * (%d w * %d h)",
+              bitrateConfigured, bitrateFloor, codec->getBpp(), height, width);
+
+        if (bitrateConfigured < bitrateFloor) {
+            ALOGD("minquality/target bitrate raised from %d to %" PRId64 " to maintain quality",
+                  bitrateConfigured, bitrateFloor);
+            AMediaFormat_setInt32(inFormat, AMEDIAFORMAT_KEY_BIT_RATE, (int32_t)bitrateFloor);
+        }
+    }
+
+    return 0;
+}
+
+
+bool hasQpPerFrameType(AMediaFormat *format) {
+    int32_t value;
+
+    if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MAX, &value)
+        || !AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, &value)) {
+        return true;
+    }
+    if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MAX, &value)
+        || !AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, &value)) {
+        return true;
+    }
+    if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MAX, &value)
+        || !AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, &value)) {
+        return true;
+    }
+    return false;
+}
+
+bool hasQp(AMediaFormat *format) {
+    int32_t value;
+    if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_MAX, &value)
+        || !AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_MIN, &value)) {
+        return true;
+    }
+    return hasQpPerFrameType(format);
+}
+
+void qpSpreadPerFrameType(AMediaFormat *format, int delta,
+                           int qplow, int qphigh, bool override) {
+     qpSpreadMaxPerFrameType(format, delta, qphigh, override);
+     qpSpreadMinPerFrameType(format, qplow, override);
+}
+
+void qpSpreadMaxPerFrameType(AMediaFormat *format, int delta, int qphigh, bool override) {
+    ALOGV("format %p delta %d  hi %d override %d", format, delta, qphigh, override);
+
+    int32_t qpOffered = 0;
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_MAX, &qpOffered)) {
+        // propagate to otherwise unspecified frame-specific keys
+        int32_t maxI;
+        if (override || !AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MAX, &maxI)) {
+            int32_t value = std::min(qphigh, qpOffered);
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MAX, value);
+        }
+        int32_t maxP;
+        if (override || !AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MAX, &maxP)) {
+            int32_t value = std::min(qphigh, (std::min(qpOffered, INT32_MAX-delta) + delta));
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MAX, value);
+        }
+        int32_t maxB;
+        if (override || !AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MAX, &maxB)) {
+            int32_t value = std::min(qphigh, (std::min(qpOffered, INT32_MAX-2*delta) + 2*delta));
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MAX, value);
+        }
+    }
+}
+
+void qpSpreadMinPerFrameType(AMediaFormat *format, int qplow, bool override) {
+    ALOGV("format %p lo %d override %d", format, qplow, override);
+
+    int32_t qpOffered = 0;
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_MIN, &qpOffered)) {
+        int value = std::max(qplow, qpOffered);
+        // propagate to otherwise unspecified frame-specific keys
+        int32_t minI;
+        if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, &minI)) {
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, value);
+        }
+        int32_t minP;
+        if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, &minP)) {
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, value);
+        }
+        int32_t minB;
+        if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, &minB)) {
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, value);
+        }
+    }
+}
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
diff --git a/media/libmediaformatshaper/VideoShaper.cpp b/media/libmediaformatshaper/VideoShaper.cpp
new file mode 100644
index 0000000..fecd3a1
--- /dev/null
+++ b/media/libmediaformatshaper/VideoShaper.cpp
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoShaper"
+#include <utils/Log.h>
+
+#include <string>
+#include <inttypes.h>
+
+#include <media/NdkMediaFormat.h>
+
+#include <media/formatshaper/VQops.h>
+#include <media/formatshaper/CodecProperties.h>
+#include <media/formatshaper/VideoShaper.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+// mediatype-specific operations
+
+vqOps_t mediaInfo[] = {
+    {
+        .mediaType = "video/avc",
+        .qpMin = 0,
+        .qpMax = 51,
+        .qpDelta = 3,
+    },
+    {
+        .mediaType = "video/hevc",
+        .qpMin = 0,
+        .qpMax = 51,
+        .qpDelta = 3,
+    },
+    {
+        .mediaType = NULL,                // matches everything, it must come last
+        .qpMin = INT32_MIN,
+        .qpMax = INT32_MAX,
+        .qpDelta = 3,
+    }
+};
+int nMediaInfos = sizeof(mediaInfo) / sizeof(mediaInfo[0]);
+
+//
+// Caller retains ownership of and responsibility for inFormat
+//
+
+int videoShaper(CodecProperties *codec, AMediaFormat* inFormat, int flags) {
+    if (codec == nullptr) {
+        return -1;
+    }
+    ALOGV("codec %s inFormat %p flags x%x", codec->getName().c_str(), inFormat, flags);
+
+    int ix;
+
+    std::string mediaType = codec->getMediaType();
+    // we should always come out of this with a selection, because the final entry
+    // is deliberaly a NULL -- so that it will act as a default
+    for(ix = 0; mediaInfo[ix].mediaType != NULL; ix++) {
+        if (strcmp(mediaType.c_str(), mediaInfo[ix].mediaType) == 0) {
+            break;
+        }
+    }
+    if (ix >= nMediaInfos) {
+        // shouldn't happen, but if it does .....
+    }
+
+    vqOps_t *info = &mediaInfo[ix];
+
+    // apply any quality transforms in here..
+    (void) VQApply(codec, info, inFormat, flags);
+
+    // We must always spread and map any QP parameters.
+    // Sometimes it's something we inserted here, sometimes it's a value that the user injected.
+    //
+    qpSpreadPerFrameType(inFormat, info->qpDelta, info->qpMin, info->qpMax, /* override */ false);
+
+    //
+    return 0;
+
+}
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
diff --git a/media/libmediaformatshaper/exports.lds b/media/libmediaformatshaper/exports.lds
new file mode 100644
index 0000000..a29cadb
--- /dev/null
+++ b/media/libmediaformatshaper/exports.lds
@@ -0,0 +1,6 @@
+{
+    global:
+        shaper_ops;
+    local:
+        *;
+};
diff --git a/media/libmediaformatshaper/include/media/formatshaper/CodecProperties.h b/media/libmediaformatshaper/include/media/formatshaper/CodecProperties.h
new file mode 100644
index 0000000..f7177a4
--- /dev/null
+++ b/media/libmediaformatshaper/include/media/formatshaper/CodecProperties.h
@@ -0,0 +1,106 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _LIBMEDIAFORMATSHAPER_CODECPROPERTIES_H_
+#define _LIBMEDIAFORMATSHAPER_CODECPROPERTIES_H_
+
+#include <map>
+#include <mutex>
+#include <string>
+
+#include <utils/RefBase.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+class CodecProperties {
+
+  public:
+    CodecProperties(std::string name, std::string mediaType);
+
+    std::string getName();
+    std::string getMediaType();
+
+    // establish a mapping from standard 'key' to non-standard 'value' in the namespace 'kind'
+    void setMapping(std::string kind, std::string key, std::string value);
+
+    // translate from from standard key to non-standard key
+    // return original standard key if there is no mapping
+    std::string getMapping(std::string key, std::string kind);
+
+    // returns an array of char *, which are paired "from" and "to" values
+    // for mapping (or unmapping). it's always expressed as from->to
+    // and 'reverse' describes which strings are to be on which side.
+    const char **getMappings(std::string kind, bool reverse);
+
+    // debugging of what's in the mapping dictionary
+    void showMappings();
+
+    // does the codec support the Android S minimum quality rules
+    void setSupportedMinimumQuality(int vmaf);
+    int supportedMinimumQuality();
+
+    // qp max bound used to compensate when SupportedMinimumQuality == 0
+    // 0 == let a system default handle it
+    void setTargetQpMax(int qpmax);
+    int targetQpMax();
+
+    // target bits-per-pixel (per second) for encoding operations.
+    // This is used to calculate a minimum bitrate for any particular resolution.
+    // A 1080p (1920*1080 = 2073600 pixels) to be encoded at 5Mbps has a bpp == 2.41
+    void setBpp(double bpp) { mBpp = bpp;}
+    double getBpp() {return mBpp;}
+
+    // Does this codec support QP bounding
+    // The getMapping() methods provide any needed mapping to non-standard keys.
+    void setSupportsQp(bool supported) { mSupportsQp = supported;}
+    bool supportsQp() { return mSupportsQp;}
+
+    int  supportedApi();
+
+    // a codec is not usable until it has been registered with its
+    // name/mediaType.
+    bool isRegistered() { return mIsRegistered;}
+    void setRegistered(bool registered) { mIsRegistered = registered;}
+
+  private:
+    std::string mName;
+    std::string mMediaType;
+    int mApi = 0;
+    int mMinimumQuality = 0;
+    int mTargetQpMax = 0;
+    bool mSupportsQp = false;
+    double mBpp = 0.0;
+
+    std::mutex mMappingLock;
+    // XXX figure out why I'm having problems getting compiler to like GUARDED_BY
+    std::map<std::string, std::string> mMappings /*GUARDED_BY(mMappingLock)*/ ;
+    std::map<std::string, std::string> mUnMappings /*GUARDED_BY(mMappingLock)*/ ;
+
+    bool mIsRegistered = false;
+
+    // DISALLOW_EVIL_CONSTRUCTORS(CodecProperties);
+};
+
+extern CodecProperties *findCodec(const char *codecName, const char *mediaType);
+extern CodecProperties *registerCodec(CodecProperties *codec, const char *codecName,
+                               const char *mediaType);
+
+
+} // namespace mediaformatshaper
+} // namespace android
+
+#endif  //  _LIBMEDIAFORMATSHAPER_CODECPROPERTIES_H_
diff --git a/media/libmediaformatshaper/include/media/formatshaper/FormatShaper.h b/media/libmediaformatshaper/include/media/formatshaper/FormatShaper.h
new file mode 100644
index 0000000..8ad81cd
--- /dev/null
+++ b/media/libmediaformatshaper/include/media/formatshaper/FormatShaper.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * structure defining the function pointers that system-side folks
+ * use to invoke operations within the MediaFormat shaping library
+ *
+ * This is the include file the outside world uses.
+ */
+
+#ifndef LIBMEDIAFORMATSHAPER_FORMATSHAPER_H_
+#define LIBMEDIAFORMATSHAPER_FORMATSHAPER_H_
+
+namespace android {
+namespace mediaformatshaper {
+
+/*
+ * An opaque handle clients use to refer to codec+mediatype being shaped.
+ */
+typedef void (*shaperHandle_t);
+
+/*
+ * shapeFormat applies any re-shaping on the passed AMediaFormat.
+ * The updated format is returned in-place.
+ */
+typedef int (*shapeFormat_t)(shaperHandle_t shaperHandle,
+                             AMediaFormat* inFormat, int flags);
+
+/*
+ * getMapping returns any mappings from standard keys to codec-specific keys.
+ * The return is a vector of const char* which are set up in pairs
+ * of "from", and "to".
+ * This array is always finished with a pair of nulls (to indicate a null from
+ * and a null to)
+ */
+
+typedef const char **(*getMappings_t)(shaperHandle_t shaperHandle, const char *kind);
+
+/*
+ * Returns a handle to the shaperHandle for the specified codec and mediatype.
+ * If none exists, it returns null.
+ */
+typedef shaperHandle_t (*findShaper_t)(const char *codecName, const char *mediaType);
+
+/*
+ * Creates and returns an empty shaperHandle that the client can populate using the
+ * setFeature() and setMap() operations.
+ */
+typedef shaperHandle_t (*createShaper_t)(const char *codecName, const char *mediaType);
+
+/*
+ * Registers the indicated shaperHandle for the indicated codec and mediatype.
+ * This call returns the shaperHandle that is to be used for further shaper operations.
+ * The returned value may be different than the one passed as an argument if another
+ * shaperinfo was registered while the passed one was being configured.
+ */
+typedef shaperHandle_t (*registerShaper_t)(shaperHandle_t shaper, const char *codecName,
+                                         const char *mediaType);
+
+/*
+ * establishes a mapping between the standard key "from" and the codec-specific key "to"
+ * in the "kind" namespace. This mapping is specific to the indicated codecName when
+ * encoding for the indicated mediaType.
+ */
+typedef int (*setMap_t)(shaperHandle_t shaper, const char *kind, const char *from, const char *to);
+
+/*
+ * establishes that codec "codecName" encoding for "mediaType" supports the indicated
+ * feature at the indicated value
+ */
+typedef int (*setFeature_t)(shaperHandle_t shaper, const char *feature, int value);
+
+/*
+ * The expectation is that the client will implement a flow similar to the following when
+ * setting up an encoding.
+ *
+ * if ((shaper=formatShaperops->findShaper(codecName, mediaType)) == NULL) {
+ *     for (all codec features) {
+ *         get feature name, feature value
+ *         formatShaperops->setFeature(shaper,, featurename, featurevalue)
+ *     }
+ *     for (all codec mappings) {
+ *         get mapping 'kind', mapping 'from', mapping 'to'
+ *         formatShaperops->setMap(shaper, kind, from, to)
+ *     }
+ * }
+ *
+ */
+
+typedef struct FormatShaperOps {
+    const uint32_t version;
+
+    /*
+     * find, create, setup, and register the shaper info
+     */
+    findShaper_t findShaper;
+    createShaper_t createShaper;
+    setMap_t setMap;
+    setFeature_t setFeature;
+    registerShaper_t registerShaper;
+
+    /*
+     * use the shaper info
+     */
+    shapeFormat_t shapeFormat;
+    getMappings_t getMappings;
+    getMappings_t getReverseMappings;
+} FormatShaperOps_t;
+
+// versioninf information
+const uint32_t SHAPER_VERSION_UNKNOWN = 0;
+const uint32_t SHAPER_VERSION_V1 = 1;
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
+#endif  // LIBMEDIAFORMATSHAPER_FORMATSHAPER_H_
diff --git a/media/libmediaformatshaper/include/media/formatshaper/VQops.h b/media/libmediaformatshaper/include/media/formatshaper/VQops.h
new file mode 100644
index 0000000..807e8af
--- /dev/null
+++ b/media/libmediaformatshaper/include/media/formatshaper/VQops.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBMEDIAFORMATSHAPER_VQOPS_H_
+#define LIBMEDIAFORMATSHAPER_VQOPS_H_
+
+#include <media/formatshaper/CodecProperties.h>
+#include <media/NdkMediaFormat.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+// parameterized info for the different mediaType types
+typedef struct {
+    const char *mediaType;
+
+    int32_t qpMin;      // codec type limit (e.g. h264, not c2.android.avc.encoder)
+    int32_t qpMax;
+    int32_t qpDelta;    // from I to P to B
+
+} vqOps_t;
+
+int VQApply(CodecProperties *codec, vqOps_t *info, AMediaFormat* inFormat, int flags);
+
+// spread the overall QP setting to any un-set per-frame-type settings
+void qpSpreadPerFrameType(AMediaFormat *format, int delta, int qplow, int qphigh, bool override);
+void qpSpreadMaxPerFrameType(AMediaFormat *format, int delta, int qphigh, bool override);
+void qpSpreadMinPerFrameType(AMediaFormat *format, int qplow, bool override);
+
+// does the format have QP bounding entries
+bool hasQp(AMediaFormat *format);
+bool hasQpPerFrameType(AMediaFormat *format);
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
+#endif  // LIBMEDIAFORMATSHAPER_VQOPS_H_
diff --git a/media/libmediaformatshaper/include/media/formatshaper/VideoShaper.h b/media/libmediaformatshaper/include/media/formatshaper/VideoShaper.h
new file mode 100644
index 0000000..53f1b13
--- /dev/null
+++ b/media/libmediaformatshaper/include/media/formatshaper/VideoShaper.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBMEDIAFORMATSHAPER_VIDEOSHAPER_H_
+#define LIBMEDIAFORMATSHAPER_VIDEOSHAPER_H_
+
+namespace android {
+namespace mediaformatshaper {
+
+/*
+ * runs through video-specific shaping operations for the codec/format combination.
+ * updates inFormat in place.
+ */
+int videoShaper(CodecProperties *codec,  AMediaFormat* inFormat, int flags);
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
+#endif  // LIBMEDIAFORMATSHAPER_VIDEOSHAPER_H_
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 0f80d35..de4f8d4 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -142,6 +142,7 @@
 #define AMEDIAMETRICS_PROP_SESSIONID      "sessionId"      // int32
 #define AMEDIAMETRICS_PROP_SHARINGMODE    "sharingMode"    // string value, "exclusive", shared"
 #define AMEDIAMETRICS_PROP_SOURCE         "source"         // string (AudioAttributes)
+#define AMEDIAMETRICS_PROP_STARTTHRESHOLDFRAMES "startThresholdFrames" // int32 (AudioTrack)
 #define AMEDIAMETRICS_PROP_STARTUPMS      "startupMs"      // double value
 // State is "ACTIVE" or "STOPPED" for AudioRecord
 #define AMEDIAMETRICS_PROP_STATE          "state"          // string
@@ -187,6 +188,7 @@
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETLOGSESSIONID  "setLogSessionId" // AudioTrack, Record
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYBACKPARAM "setPlaybackParam" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYERIID "setPlayerIId" // AudioTrack
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETSTARTTHRESHOLD "setStartThreshold" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOICEVOLUME   "setVoiceVolume" // AudioFlinger
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOLUME  "setVolume"  // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_START      "start"  // AudioTrack, AudioRecord
diff --git a/media/libmediatranscoding/TranscoderWrapper.cpp b/media/libmediatranscoding/TranscoderWrapper.cpp
index d9c98c6..b19e711 100644
--- a/media/libmediatranscoding/TranscoderWrapper.cpp
+++ b/media/libmediatranscoding/TranscoderWrapper.cpp
@@ -22,6 +22,7 @@
 #include <media/MediaTranscoder.h>
 #include <media/NdkCommon.h>
 #include <media/TranscoderWrapper.h>
+#include <media/TranscodingRequest.h>
 #include <utils/Log.h>
 
 #include <thread>
@@ -221,9 +222,10 @@
 }
 
 void TranscoderWrapper::start(ClientIdType clientId, SessionIdType sessionId,
-                              const TranscodingRequestParcel& request, uid_t callingUid,
+                              const TranscodingRequestParcel& requestParcel, uid_t callingUid,
                               const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
-    queueEvent(Event::Start, clientId, sessionId, [=, &request] {
+    TranscodingRequest request{requestParcel};
+    queueEvent(Event::Start, clientId, sessionId, [=] {
         media_status_t err = handleStart(clientId, sessionId, request, callingUid, clientCb);
         if (err != AMEDIA_OK) {
             cleanup();
@@ -255,9 +257,10 @@
 }
 
 void TranscoderWrapper::resume(ClientIdType clientId, SessionIdType sessionId,
-                               const TranscodingRequestParcel& request, uid_t callingUid,
+                               const TranscodingRequestParcel& requestParcel, uid_t callingUid,
                                const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
-    queueEvent(Event::Resume, clientId, sessionId, [=, &request] {
+    TranscodingRequest request{requestParcel};
+    queueEvent(Event::Resume, clientId, sessionId, [=] {
         media_status_t err = handleResume(clientId, sessionId, request, callingUid, clientCb);
         if (err != AMEDIA_OK) {
             cleanup();
@@ -363,6 +366,12 @@
         return AMEDIA_ERROR_INVALID_OPERATION;
     }
 
+    // Unwrap the callback and send heartbeats to the client after each operation during setup.
+    auto callback = mCallback.lock();
+    if (callback == nullptr) {
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    }
+
     Status status;
     ::ndk::ScopedFileDescriptor srcFd, dstFd;
     int srcFdInt = request.sourceFd.get();
@@ -376,6 +385,8 @@
         srcFdInt = srcFd.get();
     }
 
+    callback->onHeartBeat(clientId, sessionId);
+
     int dstFdInt = request.destinationFd.get();
     if (dstFdInt < 0) {
         // Open dest file with "rw", as the transcoder could potentially reuse part of it
@@ -390,6 +401,8 @@
         dstFdInt = dstFd.get();
     }
 
+    callback->onHeartBeat(clientId, sessionId);
+
     mCurrentClientId = clientId;
     mCurrentSessionId = sessionId;
     mCurrentCallingUid = callingUid;
@@ -402,6 +415,8 @@
         return AMEDIA_ERROR_UNKNOWN;
     }
 
+    callback->onHeartBeat(clientId, sessionId);
+
     media_status_t err = mTranscoder->configureSource(srcFdInt);
     if (err != AMEDIA_OK) {
         ALOGE("failed to configure source: %d", err);
@@ -409,6 +424,8 @@
         return err;
     }
 
+    callback->onHeartBeat(clientId, sessionId);
+
     std::vector<std::shared_ptr<AMediaFormat>> trackFormats = mTranscoder->getTrackFormats();
     if (trackFormats.size() == 0) {
         ALOGE("failed to get track formats!");
@@ -416,6 +433,8 @@
         return AMEDIA_ERROR_MALFORMED;
     }
 
+    callback->onHeartBeat(clientId, sessionId);
+
     for (int i = 0; i < trackFormats.size(); ++i) {
         std::shared_ptr<AMediaFormat> format;
         const char* mime = nullptr;
@@ -434,6 +453,8 @@
             *failureReason = TranscodingLogger::SessionEndedReason::CONFIG_TRACK_FAILED;
             return err;
         }
+
+        callback->onHeartBeat(clientId, sessionId);
     }
 
     err = mTranscoder->configureDestination(dstFdInt);
@@ -443,6 +464,8 @@
         return err;
     }
 
+    callback->onHeartBeat(clientId, sessionId);
+
     return AMEDIA_OK;
 }
 
diff --git a/media/libmediatranscoding/include/media/TranscodingRequest.h b/media/libmediatranscoding/include/media/TranscodingRequest.h
index 16f4cc0..d38fc59 100644
--- a/media/libmediatranscoding/include/media/TranscodingRequest.h
+++ b/media/libmediatranscoding/include/media/TranscodingRequest.h
@@ -18,11 +18,15 @@
 #define ANDROID_MEDIA_TRANSCODING_REQUEST_H
 
 #include <aidl/android/media/TranscodingRequestParcel.h>
+#include <android/binder_parcel.h>
 
 namespace android {
 
 using ::aidl::android::media::TranscodingRequestParcel;
 
+// TODO: replace __ANDROID_API_FUTURE__with 31 when it's official (b/178144708)
+#define __TRANSCODING_MIN_API__ __ANDROID_API_FUTURE__
+
 // Helper class for duplicating a TranscodingRequestParcel
 class TranscodingRequest : public TranscodingRequestParcel {
 public:
@@ -36,20 +40,28 @@
 
 private:
     void setTo(const TranscodingRequestParcel& parcel) {
-        sourceFilePath = parcel.sourceFilePath;
-        sourceFd = ndk::ScopedFileDescriptor(dup(parcel.sourceFd.get()));
-        destinationFilePath = parcel.destinationFilePath;
-        destinationFd = ndk::ScopedFileDescriptor(dup(parcel.destinationFd.get()));
-        clientUid = parcel.clientUid;
-        clientPid = parcel.clientPid;
-        clientPackageName = parcel.clientPackageName;
-        transcodingType = parcel.transcodingType;
-        requestedVideoTrackFormat = parcel.requestedVideoTrackFormat;
-        priority = parcel.priority;
-        requestProgressUpdate = parcel.requestProgressUpdate;
-        requestSessionEventUpdate = parcel.requestSessionEventUpdate;
-        isForTesting = parcel.isForTesting;
-        testConfig = parcel.testConfig;
+        if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+            AParcel* p = AParcel_create();
+            parcel.writeToParcel(p);
+            AParcel_setDataPosition(p, 0);
+            readFromParcel(p);
+            AParcel_delete(p);
+        } else {
+            sourceFilePath = parcel.sourceFilePath;
+            sourceFd = ndk::ScopedFileDescriptor(dup(parcel.sourceFd.get()));
+            destinationFilePath = parcel.destinationFilePath;
+            destinationFd = ndk::ScopedFileDescriptor(dup(parcel.destinationFd.get()));
+            clientUid = parcel.clientUid;
+            clientPid = parcel.clientPid;
+            clientPackageName = parcel.clientPackageName;
+            transcodingType = parcel.transcodingType;
+            requestedVideoTrackFormat = parcel.requestedVideoTrackFormat;
+            priority = parcel.priority;
+            requestProgressUpdate = parcel.requestProgressUpdate;
+            requestSessionEventUpdate = parcel.requestSessionEventUpdate;
+            isForTesting = parcel.isForTesting;
+            testConfig = parcel.testConfig;
+        }
     }
 };
 
diff --git a/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp b/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
index 88c1c42..10b2e80 100644
--- a/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
+++ b/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
@@ -328,8 +328,8 @@
                 }
                 lastProgressUpdate = progress;
             }
-            progressSinceLastReport = true;
         }
+        progressSinceLastReport = true;
     }
 
     return AMEDIA_OK;
diff --git a/media/libmediatranscoding/transcoder/MediaTranscoder.cpp b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
index 413f049..879241e 100644
--- a/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
@@ -158,6 +158,11 @@
         return;
     }
 
+    // The sample writer is not yet started so notify the caller that progress is still made.
+    if (mHeartBeatIntervalUs > 0) {
+        mCallbacks->onHeartBeat(this);
+    }
+
     MediaTrackTranscoder* mutableTranscoder = const_cast<MediaTrackTranscoder*>(transcoder);
     mutableTranscoder->setSampleConsumer(consumer);
 
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 71a4ad8..1aa1848 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -74,6 +74,12 @@
     kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles
 };
 
+namespace {
+
+constexpr char TUNNEL_PEEK_KEY[] = "android._trigger-tunnel-peek";
+
+}
+
 // OMX errors are directly mapped into status_t range if
 // there is no corresponding MediaError status code.
 // Use the statusFromOMXError(int32_t omxError) function.
@@ -1465,6 +1471,10 @@
     mCallback->onOutputFramesRendered(done);
 }
 
+void ACodec::onFirstTunnelFrameReady() {
+    mCallback->onFirstTunnelFrameReady();
+}
+
 ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {
     ANativeWindowBuffer *buf;
     CHECK(mNativeWindow.get() != NULL);
@@ -2460,6 +2470,30 @@
     return err;
 }
 
+status_t ACodec::setTunnelPeek(int32_t tunnelPeek) {
+    if (mIsEncoder) {
+        ALOGE("encoder does not support %s", TUNNEL_PEEK_KEY);
+        return BAD_VALUE;
+    }
+    if (!mTunneled) {
+        ALOGE("%s is only supported in tunnel mode", TUNNEL_PEEK_KEY);
+        return BAD_VALUE;
+    }
+
+    OMX_CONFIG_BOOLEANTYPE config;
+    InitOMXParams(&config);
+    config.bEnabled = (OMX_BOOL)(tunnelPeek != 0);
+    status_t err = mOMXNode->setConfig(
+            (OMX_INDEXTYPE)OMX_IndexConfigAndroidTunnelPeek,
+            &config, sizeof(config));
+    if (err != OK) {
+        ALOGE("decoder cannot set %s to %d (err %d)",
+              TUNNEL_PEEK_KEY, tunnelPeek, err);
+    }
+
+    return err;
+}
+
 status_t ACodec::setAudioPresentation(int32_t presentationId, int32_t programId) {
     OMX_AUDIO_CONFIG_ANDROID_AUDIOPRESENTATION config;
     InitOMXParams(&config);
@@ -5683,15 +5717,18 @@
     int32_t range, standard, transfer;
     convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer);
 
+    int32_t dsRange, dsStandard, dsTransfer;
+    getColorConfigFromDataSpace(dataSpace, &dsRange, &dsStandard, &dsTransfer);
+
     // if some aspects are unspecified, use dataspace fields
     if (range == 0) {
-        range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT;
+        range = dsRange;
     }
     if (standard == 0) {
-        standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT;
+        standard = dsStandard;
     }
     if (transfer == 0) {
-        transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT;
+        transfer = dsTransfer;
     }
 
     mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event
@@ -7890,6 +7927,15 @@
                 &presentation, sizeof(presentation));
         }
     }
+
+    int32_t tunnelPeek = 0;
+    if (params->findInt32(TUNNEL_PEEK_KEY, &tunnelPeek)) {
+        status_t err = setTunnelPeek(tunnelPeek);
+        if (err != OK) {
+            return err;
+        }
+    }
+
     return setVendorParameters(params);
 }
 
@@ -8355,6 +8401,12 @@
             return true;
         }
 
+        case OMX_EventOnFirstTunnelFrameReady:
+        {
+            mCodec->onFirstTunnelFrameReady();
+            return true;
+        }
+
         default:
             return BaseState::onOMXEvent(event, data1, data2);
     }
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 930bc0f..52434b3 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -352,6 +352,8 @@
         "libwebm",
         "libstagefright_id3",
         "media_permission-aidl-cpp",
+        "libmediandk_format",
+        "libmedia_ndkformatpriv",
     ],
 
     header_libs:[
@@ -359,6 +361,7 @@
         "libnativeloader-headers",
         "libstagefright_xmlparser_headers",
         "media_ndk_headers",
+        "libmediaformatshaper_headers",
     ],
 
     export_shared_lib_headers: [
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index f80b22f..be21a5d 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -20,9 +20,11 @@
 #include <utils/Log.h>
 
 #include <set>
+#include <stdlib.h>
 
 #include <inttypes.h>
 #include <stdlib.h>
+#include <dlfcn.h>
 
 #include <C2Buffer.h>
 
@@ -35,6 +37,7 @@
 #include <aidl/android/media/IResourceManagerService.h>
 #include <android/binder_ibinder.h>
 #include <android/binder_manager.h>
+#include <android/dlext.h>
 #include <binder/IMemory.h>
 #include <binder/MemoryDealer.h>
 #include <cutils/properties.h>
@@ -47,6 +50,10 @@
 #include <media/MediaCodecInfo.h>
 #include <media/MediaMetricsItem.h>
 #include <media/MediaResource.h>
+#include <media/NdkMediaErrorPriv.h>
+#include <media/NdkMediaFormat.h>
+#include <media/NdkMediaFormatPriv.h>
+#include <media/formatshaper/FormatShaper.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
@@ -68,6 +75,7 @@
 #include <media/stagefright/OMXClient.h>
 #include <media/stagefright/PersistentSurface.h>
 #include <media/stagefright/SurfaceUtils.h>
+#include <nativeloader/dlext_namespaces.h>
 #include <private/android_filesystem_config.h>
 #include <utils/Singleton.h>
 
@@ -416,6 +424,7 @@
     kWhatSignaledInputEOS    = 'seos',
     kWhatOutputFramesRendered = 'outR',
     kWhatOutputBuffersChanged = 'outC',
+    kWhatFirstTunnelFrameReady = 'ftfR',
 };
 
 class BufferCallback : public CodecBase::BufferCallback {
@@ -478,6 +487,7 @@
     virtual void onSignaledInputEOS(status_t err) override;
     virtual void onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) override;
     virtual void onOutputBuffersChanged() override;
+    virtual void onFirstTunnelFrameReady() override;
 private:
     const sp<AMessage> mNotify;
 };
@@ -598,6 +608,12 @@
     notify->post();
 }
 
+void CodecCallback::onFirstTunnelFrameReady() {
+    sp<AMessage> notify(mNotify->dup());
+    notify->setInt32("what", kWhatFirstTunnelFrameReady);
+    notify->post();
+}
+
 }  // namespace
 
 ////////////////////////////////////////////////////////////////////////////////
@@ -696,6 +712,7 @@
       mTunneledInputWidth(0),
       mTunneledInputHeight(0),
       mTunneled(false),
+      mTunnelPeekState(TunnelPeekState::kEnabledNoBuffer),
       mHaveInputSurface(false),
       mHavePendingInputBuffers(false),
       mCpuBoostRequested(false),
@@ -901,6 +918,47 @@
     }
 }
 
+constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
+    switch(state) {
+        case TunnelPeekState::kEnabledNoBuffer:
+            return "EnabledNoBuffer";
+        case TunnelPeekState::kDisabledNoBuffer:
+            return "DisabledNoBuffer";
+        case TunnelPeekState::kBufferDecoded:
+            return "BufferDecoded";
+        case TunnelPeekState::kBufferRendered:
+            return "BufferRendered";
+        default:
+            return default_string;
+    }
+}
+
+void MediaCodec::updateTunnelPeek(const sp<AMessage> &msg) {
+    int32_t tunnelPeek = 0;
+    if (!msg->findInt32("tunnel-peek", &tunnelPeek)){
+        return;
+    }
+    if(tunnelPeek == 0){
+        if (mTunnelPeekState == TunnelPeekState::kEnabledNoBuffer) {
+            mTunnelPeekState = TunnelPeekState::kDisabledNoBuffer;
+            ALOGV("TunnelPeekState: %s -> %s",
+                  asString(TunnelPeekState::kEnabledNoBuffer),
+                  asString(TunnelPeekState::kDisabledNoBuffer));
+            return;
+        }
+    } else {
+        if (mTunnelPeekState == TunnelPeekState::kDisabledNoBuffer) {
+            mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+            ALOGV("TunnelPeekState: %s -> %s",
+                  asString(TunnelPeekState::kDisabledNoBuffer),
+                  asString(TunnelPeekState::kEnabledNoBuffer));
+            return;
+        }
+    }
+
+    ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
+}
+
 bool MediaCodec::Histogram::setup(int nbuckets, int64_t width, int64_t floor)
 {
     if (nbuckets <= 0 || width <= 0) {
@@ -1326,6 +1384,15 @@
     return msg->post();
 }
 
+status_t MediaCodec::setOnFirstTunnelFrameReadyNotification(const sp<AMessage> &notify) {
+    sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
+    msg->setMessage("first-tunnel-frame-ready", notify);
+    return msg->post();
+}
+
+static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
+                      bool reverse);
+
 status_t MediaCodec::configure(
         const sp<AMessage> &format,
         const sp<Surface> &nativeWindow,
@@ -1382,6 +1449,7 @@
             ALOGE("Invalid size(s), width=%d, height=%d", mVideoWidth, mVideoHeight);
             return BAD_VALUE;
         }
+
     } else {
         if (mMetricsHandle != 0) {
             int32_t channelCount;
@@ -1395,6 +1463,19 @@
         }
     }
 
+    // apply framework level modifications to the mediaformat for encoding
+    // XXX: default off for a while during dogfooding
+    static const char *enable_property = "debug.stagefright.enableshaping";
+    int8_t enableShaping = property_get_bool(enable_property, 0);
+    if (!enableShaping) {
+        ALOGD("format shaping disabled via property '%s'", enable_property);
+    } else {
+        if (flags & CONFIGURE_FLAG_ENCODE) {
+            (void) shapeMediaFormat(format, flags);
+            mapFormat(mComponentName, format, nullptr, false);
+        }
+    }
+
     updateLowLatency(format);
 
     msg->setMessage("format", format);
@@ -1464,6 +1545,316 @@
     return err;
 }
 
+// Media Format Shaping support
+//
+
+static android::mediaformatshaper::FormatShaperOps_t *sShaperOps = NULL;
+
+static bool connectFormatShaper() {
+    static std::once_flag sCheckOnce;
+    static void *libHandle = NULL;
+
+    std::call_once(sCheckOnce, [&](){
+
+        // prefer any copy in the mainline module
+        //
+        android_namespace_t *mediaNs = android_get_exported_namespace("com_android_media");
+        AString libraryName = "libmediaformatshaper.so";
+
+        if (mediaNs != NULL) {
+            static const android_dlextinfo dlextinfo = {
+                .flags = ANDROID_DLEXT_USE_NAMESPACE,
+                .library_namespace = mediaNs,
+            };
+
+            AString libraryMainline = "/apex/com.android.media/";
+#if __LP64__
+            libraryMainline.append("lib64/");
+#else
+            libraryMainline.append("lib/");
+#endif
+            libraryMainline.append(libraryName);
+
+            libHandle = android_dlopen_ext(libraryMainline.c_str(), RTLD_NOW|RTLD_NODELETE,
+                                                 &dlextinfo);
+
+            if (libHandle != NULL) {
+                sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
+                                dlsym(libHandle, "shaper_ops");
+            } else {
+                ALOGW("connectFormatShaper: unable to load mainline formatshaper %s",
+                      libraryMainline.c_str());
+            }
+        } else {
+            ALOGV("connectFormatShaper: couldn't find media namespace.");
+        }
+
+        // fall back to the system partition, if present.
+        //
+        if (sShaperOps == NULL) {
+
+            libHandle = dlopen(libraryName.c_str(), RTLD_NOW|RTLD_NODELETE);
+
+            if (libHandle != NULL) {
+                sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
+                                dlsym(libHandle, "shaper_ops");
+            } else {
+                ALOGW("connectFormatShaper: unable to load formatshaper %s", libraryName.c_str());
+            }
+        }
+
+        if (sShaperOps != nullptr
+            && sShaperOps->version != android::mediaformatshaper::SHAPER_VERSION_V1) {
+            ALOGW("connectFormatShaper: unhandled version ShaperOps: %d, DISABLED",
+                  sShaperOps->version);
+            sShaperOps = nullptr;
+        }
+
+        if (sShaperOps != nullptr) {
+            ALOGV("connectFormatShaper: connected to library %s", libraryName.c_str());
+        }
+
+    });
+
+    return true;
+}
+
+// a construct to force the above dlopen() to run very early.
+// goal: so the dlopen() doesn't happen on critical path of latency sensitive apps
+// failure of this means that cold start of those apps is slower by the time to dlopen()
+//
+static bool forceEarlyLoadingShaper = connectFormatShaper();
+
+// parse the codec's properties: mapping, whether it meets min quality, etc
+// and pass them into the video quality code
+//
+status_t MediaCodec::setupFormatShaper(AString mediaType) {
+    ALOGV("setupFormatShaper: initializing shaper data for codec %s mediaType %s",
+          mComponentName.c_str(), mediaType.c_str());
+
+    nsecs_t mapping_started = systemTime(SYSTEM_TIME_MONOTONIC);
+
+    // see if the shaper is already present, if so return
+    mediaformatshaper::shaperHandle_t shaperHandle;
+    shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
+    if (shaperHandle != nullptr) {
+        ALOGV("shaperhandle %p -- no initialization needed", shaperHandle);
+        return OK;
+    }
+
+    // not there, so we get to build & register one
+    shaperHandle = sShaperOps->createShaper(mComponentName.c_str(), mediaType.c_str());
+    if (shaperHandle == nullptr) {
+        ALOGW("unable to create a shaper for cocodec %s mediaType %s",
+              mComponentName.c_str(), mediaType.c_str());
+        return OK;
+    }
+
+    sp<MediaCodecInfo::Capabilities> capabilities =
+                    mCodecInfo->getCapabilitiesFor(mediaType.c_str());
+    if (capabilities == nullptr) {
+        ALOGI("no capabilities as part of the codec?");
+    } else {
+        const sp<AMessage> &details = capabilities->getDetails();
+        AString mapTarget;
+        int count = details->countEntries();
+        for(int ix = 0; ix < count; ix++) {
+            AMessage::Type entryType;
+            const char *mapSrc = details->getEntryNameAt(ix, &entryType);
+            // XXX: re-use ix from getEntryAt() to avoid additional findXXX() invocation
+            //
+            static const char *featurePrefix = "feature-";
+            static const int featurePrefixLen = strlen(featurePrefix);
+            static const char *mappingPrefix = "mapping-";
+            static const int mappingPrefixLen = strlen(mappingPrefix);
+
+            if (mapSrc == NULL) {
+                continue;
+            } else if (!strncmp(mapSrc, featurePrefix, featurePrefixLen)) {
+                int32_t intValue;
+                if (details->findInt32(mapSrc, &intValue)) {
+                    ALOGV("-- feature '%s' -> %d", mapSrc, intValue);
+                    (void)(sShaperOps->setFeature)(shaperHandle, &mapSrc[featurePrefixLen],
+                                                   intValue);
+                }
+                continue;
+            } else if (!strncmp(mapSrc, mappingPrefix, mappingPrefixLen)) {
+                AString target;
+                if (details->findString(mapSrc, &target)) {
+                    ALOGV("-- mapping %s: map %s to %s", mapSrc, &mapSrc[mappingPrefixLen],
+                          target.c_str());
+                    // key is really "kind-key"
+                    // separate that, so setMap() sees the triple  kind, key, value
+                    const char *kind = &mapSrc[mappingPrefixLen];
+                    const char *sep = strchr(kind, '-');
+                    const char *key = sep+1;
+                    if (sep != NULL) {
+                         std::string xkind = std::string(kind, sep-kind);
+                        (void)(sShaperOps->setMap)(shaperHandle, xkind.c_str(),
+                                                   key, target.c_str());
+                    }
+                }
+            }
+        }
+    }
+    shaperHandle = sShaperOps->registerShaper(shaperHandle,
+                                              mComponentName.c_str(), mediaType.c_str());
+
+    nsecs_t mapping_finished = systemTime(SYSTEM_TIME_MONOTONIC);
+    ALOGD("setupFormatShaper: populated shaper node for codec %s: %" PRId64 " us",
+          mComponentName.c_str(), (mapping_finished - mapping_started)/1000);
+
+    return OK;
+}
+
+
+// Format Shaping
+//      Mapping and Manipulation of encoding parameters
+//
+
+status_t MediaCodec::shapeMediaFormat(
+            const sp<AMessage> &format,
+            uint32_t flags) {
+    ALOGV("shapeMediaFormat entry");
+
+    if (!(flags & CONFIGURE_FLAG_ENCODE)) {
+        ALOGW("shapeMediaFormat: not encoder");
+        return OK;
+    }
+    if (mCodecInfo == NULL) {
+        ALOGW("shapeMediaFormat: no codecinfo");
+        return OK;
+    }
+
+    AString mediaType;
+    if (!format->findString("mime", &mediaType)) {
+        ALOGW("shapeMediaFormat: no mediaType information");
+        return OK;
+    }
+
+    // make sure we have the function entry points for the shaper library
+    //
+
+    connectFormatShaper();
+    if (sShaperOps == nullptr) {
+        ALOGW("shapeMediaFormat: no MediaFormatShaper hooks available");
+        return OK;
+    }
+
+    // find the shaper information for this codec+mediaType pair
+    //
+    mediaformatshaper::shaperHandle_t shaperHandle;
+    shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
+    if (shaperHandle == nullptr)  {
+        setupFormatShaper(mediaType);
+        shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
+    }
+    if (shaperHandle == nullptr) {
+        ALOGW("shapeMediaFormat: no handler for codec %s mediatype %s",
+              mComponentName.c_str(), mediaType.c_str());
+        return OK;
+    }
+
+    // run the shaper
+    //
+
+    ALOGV("Shaping input: %s", format->debugString(0).c_str());
+
+    sp<AMessage> updatedFormat = format->dup();
+    AMediaFormat *updatedNdkFormat = AMediaFormat_fromMsg(&updatedFormat);
+
+    int result = (*sShaperOps->shapeFormat)(shaperHandle, updatedNdkFormat, flags);
+    if (result == 0) {
+        AMediaFormat_getFormat(updatedNdkFormat, &updatedFormat);
+
+        sp<AMessage> deltas = updatedFormat->changesFrom(format, false /* deep */);
+        ALOGD("shapeMediaFormat: deltas: %s", deltas->debugString(2).c_str());
+
+        // note that this means that for anything in both, the copy in deltas wins
+        format->extend(deltas);
+    }
+
+    AMediaFormat_delete(updatedNdkFormat);
+    return OK;
+}
+
+static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
+                      bool reverse) {
+    AString mediaType;
+    if (!format->findString("mime", &mediaType)) {
+        ALOGW("mapFormat: no mediaType information");
+        return;
+    }
+    ALOGV("mapFormat: codec %s mediatype %s kind %s reverse %d", componentName.c_str(),
+          mediaType.c_str(), kind ? kind : "<all>", reverse);
+
+    // make sure we have the function entry points for the shaper library
+    //
+
+    connectFormatShaper();
+    if (sShaperOps == nullptr) {
+        ALOGW("mapFormat: no MediaFormatShaper hooks available");
+        return;
+    }
+
+    // find the shaper information for this codec+mediaType pair
+    //
+    mediaformatshaper::shaperHandle_t shaperHandle;
+    shaperHandle = sShaperOps->findShaper(componentName.c_str(), mediaType.c_str());
+    if (shaperHandle == nullptr) {
+        ALOGV("mapFormat: no shaper handle");
+        return;
+    }
+
+    const char **mappings;
+    if (reverse)
+        mappings = sShaperOps->getReverseMappings(shaperHandle, kind);
+    else
+        mappings = sShaperOps->getMappings(shaperHandle, kind);
+
+    if (mappings == nullptr) {
+        ALOGV("no mappings returned");
+        return;
+    }
+
+    ALOGV("Pre-mapping: %s",  format->debugString(2).c_str());
+    // do the mapping
+    //
+    int entries = format->countEntries();
+    for (int i = 0; ; i += 2) {
+        if (mappings[i] == nullptr) {
+            break;
+        }
+
+        size_t ix = format->findEntryByName(mappings[i]);
+        if (ix < entries) {
+            ALOGV("map '%s' to '%s'", mappings[i], mappings[i+1]);
+            status_t status = format->setEntryNameAt(ix, mappings[i+1]);
+            if (status != OK) {
+                ALOGW("Unable to map from '%s' to '%s': status %d",
+                      mappings[i], mappings[i+1], status);
+            }
+        }
+    }
+    ALOGV("Post-mapping: %s",  format->debugString(2).c_str());
+
+
+    // reclaim the mapping memory
+    for (int i = 0; ; i += 2) {
+        if (mappings[i] == nullptr) {
+            break;
+        }
+        free((void*)mappings[i]);
+        free((void*)mappings[i + 1]);
+    }
+    free(mappings);
+    mappings = nullptr;
+}
+
+//
+// end of Format Shaping hooks within MediaCodec
+//
+
 status_t MediaCodec::releaseCrypto()
 {
     ALOGV("releaseCrypto");
@@ -2074,6 +2465,22 @@
     return OK;
 }
 
+status_t MediaCodec::querySupportedVendorParameters(std::vector<std::string> *names) {
+    return mCodec->querySupportedParameters(names);
+}
+
+status_t MediaCodec::describeParameter(const std::string &name, CodecParameterDescriptor *desc) {
+    return mCodec->describeParameter(name, desc);
+}
+
+status_t MediaCodec::subscribeToVendorParameters(const std::vector<std::string> &names) {
+    return mCodec->subscribeToParameters(names);
+}
+
+status_t MediaCodec::unsubscribeFromVendorParameters(const std::vector<std::string> &names) {
+    return mCodec->unsubscribeFromParameters(names);
+}
+
 void MediaCodec::requestActivityNotification(const sp<AMessage> &notify) {
     sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, this);
     msg->setMessage("notify", notify);
@@ -2658,9 +3065,17 @@
 
                 case kWhatOutputFramesRendered:
                 {
-                    // ignore these in all states except running, and check that we have a
-                    // notification set
-                    if (mState == STARTED && mOnFrameRenderedNotification != NULL) {
+                    // ignore these in all states except running
+                    if (mState != STARTED) {
+                        break;
+                    }
+                    TunnelPeekState previousState = mTunnelPeekState;
+                    mTunnelPeekState = TunnelPeekState::kBufferRendered;
+                    ALOGV("TunnelPeekState: %s -> %s",
+                          asString(previousState),
+                          asString(TunnelPeekState::kBufferRendered));
+                    // check that we have a notification set
+                    if (mOnFrameRenderedNotification != NULL) {
                         sp<AMessage> notify = mOnFrameRenderedNotification->dup();
                         notify->setMessage("data", msg);
                         notify->post();
@@ -2668,6 +3083,41 @@
                     break;
                 }
 
+                case kWhatFirstTunnelFrameReady:
+                {
+                    if (mState != STARTED) {
+                        break;
+                    }
+                    switch(mTunnelPeekState) {
+                        case TunnelPeekState::kDisabledNoBuffer:
+                            mTunnelPeekState = TunnelPeekState::kBufferDecoded;
+                            ALOGV("TunnelPeekState: %s -> %s",
+                                  asString(TunnelPeekState::kDisabledNoBuffer),
+                                  asString(TunnelPeekState::kBufferDecoded));
+                            break;
+                        case TunnelPeekState::kEnabledNoBuffer:
+                            mTunnelPeekState = TunnelPeekState::kBufferDecoded;
+                            ALOGV("TunnelPeekState: %s -> %s",
+                                  asString(TunnelPeekState::kEnabledNoBuffer),
+                                  asString(TunnelPeekState::kBufferDecoded));
+                            {
+                                sp<AMessage> parameters = new AMessage();
+                                parameters->setInt32("android._trigger-tunnel-peek", 1);
+                                mCodec->signalSetParameters(parameters);
+                            }
+                            break;
+                        default:
+                            break;
+                    }
+
+                    if (mOnFirstTunnelFrameReadyNotification != nullptr) {
+                        sp<AMessage> notify = mOnFirstTunnelFrameReadyNotification->dup();
+                        notify->setMessage("data", msg);
+                        notify->post();
+                    }
+                    break;
+                }
+
                 case kWhatFillThisBuffer:
                 {
                     /* size_t index = */updateBuffers(kPortIndexInput, msg);
@@ -2886,6 +3336,9 @@
             if (msg->findMessage("on-frame-rendered", &notify)) {
                 mOnFrameRenderedNotification = notify;
             }
+            if (msg->findMessage("first-tunnel-frame-ready", &notify)) {
+                mOnFirstTunnelFrameReadyNotification = notify;
+            }
             break;
         }
 
@@ -3127,6 +3580,11 @@
             }
             sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
+            TunnelPeekState previousState = mTunnelPeekState;
+            mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+            ALOGV("TunnelPeekState: %s -> %s",
+                  asString(previousState),
+                  asString(TunnelPeekState::kEnabledNoBuffer));
 
             mReplyID = replyID;
             setState(STARTING);
@@ -3561,6 +4019,11 @@
 
             mCodec->signalFlush();
             returnBuffersToCodec();
+            TunnelPeekState previousState = mTunnelPeekState;
+            mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+            ALOGV("TunnelPeekState: %s -> %s",
+                  asString(previousState),
+                  asString(TunnelPeekState::kEnabledNoBuffer));
             break;
         }
 
@@ -3691,6 +4154,7 @@
         buffer->meta()->setObject("changedKeys", changedKeys);
     }
     mOutputFormat = format;
+    mapFormat(mComponentName, format, nullptr, true);
     ALOGV("[%s] output format changed to: %s",
             mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
 
@@ -4469,6 +4933,8 @@
 
 status_t MediaCodec::onSetParameters(const sp<AMessage> &params) {
     updateLowLatency(params);
+    mapFormat(mComponentName, params, nullptr, false);
+    updateTunnelPeek(params);
     mCodec->signalSetParameters(params);
 
     return OK;
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 0f7df24..876d06c 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -775,12 +775,9 @@
                     android_dataspace dataspace = static_cast<android_dataspace>(ds);
                     ColorUtils::convertDataSpaceToV0(dataspace);
                     ALOGD("Updating dataspace to %x", dataspace);
-                    int32_t standard = (int32_t(dataspace) & HAL_DATASPACE_STANDARD_MASK)
-                        >> HAL_DATASPACE_STANDARD_SHIFT;
-                    int32_t transfer = (int32_t(dataspace) & HAL_DATASPACE_TRANSFER_MASK)
-                        >> HAL_DATASPACE_TRANSFER_SHIFT;
-                    int32_t range = (int32_t(dataspace) & HAL_DATASPACE_RANGE_MASK)
-                        >> HAL_DATASPACE_RANGE_SHIFT;
+                    int32_t standard, transfer, range;
+                    ColorUtils::getColorConfigFromDataSpace(
+                            dataspace, &range, &standard, &transfer);
                     sp<AMessage> msg = new AMessage;
                     msg->setInt32(KEY_COLOR_STANDARD, standard);
                     msg->setInt32(KEY_COLOR_TRANSFER, transfer);
diff --git a/media/libstagefright/MediaExtractorFactory.cpp b/media/libstagefright/MediaExtractorFactory.cpp
index d77845f..2520e2a 100644
--- a/media/libstagefright/MediaExtractorFactory.cpp
+++ b/media/libstagefright/MediaExtractorFactory.cpp
@@ -253,6 +253,7 @@
                 (GetExtractorDef) dlsym(libHandle, "GETEXTRACTORDEF");
             if (getDef == nullptr) {
                 ALOGI("no sniffer found in %s", libPath.string());
+                dlclose(libHandle);
                 continue;
             }
 
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index 070e325..3812afe 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -613,6 +613,35 @@
 }
 
 // static
+void ColorUtils::getColorConfigFromDataSpace(
+        const android_dataspace &dataspace, int32_t *range, int32_t *standard, int32_t *transfer) {
+    uint32_t gfxRange =
+        (dataspace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT;
+    uint32_t gfxStandard =
+        (dataspace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT;
+    uint32_t gfxTransfer =
+        (dataspace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT;
+
+    // assume 1-to-1 mapping to HAL values (to deal with potential vendor extensions)
+    CU::ColorRange    cuRange    = CU::kColorRangeUnspecified;
+    CU::ColorStandard cuStandard = CU::kColorStandardUnspecified;
+    CU::ColorTransfer cuTransfer = CU::kColorTransferUnspecified;
+    // TRICKY: use & to ensure all three mappings are completed
+    if (!(sGfxRanges.map(gfxRange, &cuRange) & sGfxStandards.map(gfxStandard, &cuStandard)
+            & sGfxTransfers.map(gfxTransfer, &cuTransfer))) {
+        ALOGW("could not safely map graphics dataspace (R:%u S:%u T:%u) to "
+              "platform color aspects (R:%u(%s) S:%u(%s) T:%u(%s)",
+              gfxRange, gfxStandard, gfxTransfer,
+              cuRange,    asString(cuRange),
+              cuStandard, asString(cuStandard),
+              cuTransfer, asString(cuTransfer));
+    }
+    *range    = cuRange;
+    *standard = cuStandard;
+    *transfer = cuTransfer;
+}
+
+// static
 void ColorUtils::getColorConfigFromFormat(
         const sp<AMessage> &format, int32_t *range, int32_t *standard, int32_t *transfer) {
     if (!format->findInt32("color-range", range)) {
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
index cd0af2b..9e3f718 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
@@ -156,6 +156,10 @@
     // suited to blending. This requires implicit color space conversion on part of the device.
     static android_dataspace getDataSpaceForColorAspects(ColorAspects &aspects, bool mayExpand);
 
+    // it returns the platform color configs from given |dataspace|.
+    static void getColorConfigFromDataSpace(
+            const android_dataspace &dataspace, int *range, int *standard, int *transfer);
+
     // converts |dataSpace| to a V0 enum, and returns true if dataSpace is an aspect-only value
     static bool convertDataSpaceToV0(android_dataspace &dataSpace);
 
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 30bc44e..c84cc10 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -518,6 +518,7 @@
     status_t setLowLatency(int32_t lowLatency);
     status_t setLatency(uint32_t latency);
     status_t getLatency(uint32_t *latency);
+    status_t setTunnelPeek(int32_t tunnelPeek);
     status_t setAudioPresentation(int32_t presentationId, int32_t programId);
     status_t setOperatingRate(float rateFloat, bool isVideo);
     status_t getIntraRefreshPeriod(uint32_t *intraRefreshPeriod);
@@ -578,6 +579,8 @@
     void notifyOfRenderedFrames(
             bool dropIncomplete = false, FrameRenderTracker::Info *until = NULL);
 
+    void onFirstTunnelFrameReady();
+
     // Pass |expectedFormat| to print a warning if the format differs from it.
     // Using sp<> instead of const sp<>& because expectedFormat is likely the current mOutputFormat
     // which will get updated inside.
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index dd6df90..efb2f86 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -61,6 +61,11 @@
 
 using hardware::cas::native::V1_0::IDescrambler;
 
+struct CodecParameterDescriptor {
+    std::string name;
+    AMessage::Type type;
+};
+
 struct CodecBase : public AHandler, /* static */ ColorUtils {
     /**
      * This interface defines events firing from CodecBase back to MediaCodec.
@@ -173,6 +178,10 @@
          * Notify MediaCodec that output buffers are changed.
          */
         virtual void onOutputBuffersChanged() = 0;
+        /**
+         * Notify MediaCodec that the first tunnel frame is ready.
+         */
+        virtual void onFirstTunnelFrameReady() = 0;
     };
 
     /**
@@ -233,6 +242,64 @@
     virtual void signalSetParameters(const sp<AMessage> &msg) = 0;
     virtual void signalEndOfInputStream() = 0;
 
+    /**
+     * Query supported parameters from this instance, and fill |names| with the
+     * names of the parameters.
+     *
+     * \param names string vector to fill with supported parameters.
+     * \return OK if successful;
+     *         BAD_VALUE if |names| is null;
+     *         INVALID_OPERATION if already released;
+     *         ERROR_UNSUPPORTED if not supported.
+     */
+    virtual status_t querySupportedParameters([[maybe_unused]] std::vector<std::string> *names) {
+        return ERROR_UNSUPPORTED;
+    }
+    /**
+     * Fill |desc| with description of the parameter with |name|.
+     *
+     * \param name name of the parameter to describe
+     * \param desc pointer to CodecParameterDescriptor to be filled
+     * \return OK if successful;
+     *         BAD_VALUE if |desc| is null;
+     *         NAME_NOT_FOUND if |name| is not recognized by the component;
+     *         INVALID_OPERATION if already released;
+     *         ERROR_UNSUPPORTED if not supported.
+     */
+    virtual status_t describeParameter(
+            [[maybe_unused]] const std::string &name,
+            [[maybe_unused]] CodecParameterDescriptor *desc) {
+        return ERROR_UNSUPPORTED;
+    }
+    /**
+     * Subscribe to parameters in |names| and get output format change event
+     * when they change.
+     * Unrecognized / already subscribed parameters are ignored.
+     *
+     * \param names names of parameters to subscribe
+     * \return OK if successful;
+     *         INVALID_OPERATION if already released;
+     *         ERROR_UNSUPPORTED if not supported.
+     */
+    virtual status_t subscribeToParameters(
+            [[maybe_unused]] const std::vector<std::string> &names) {
+        return ERROR_UNSUPPORTED;
+    }
+    /**
+     * Unsubscribe from parameters in |names| and no longer get
+     * output format change event when they change.
+     * Unrecognized / already unsubscribed parameters are ignored.
+     *
+     * \param names names of parameters to unsubscribe
+     * \return OK if successful;
+     *         INVALID_OPERATION if already released;
+     *         ERROR_UNSUPPORTED if not supported.
+     */
+    virtual status_t unsubscribeFromParameters(
+            [[maybe_unused]] const std::vector<std::string> &names) {
+        return ERROR_UNSUPPORTED;
+    }
+
     typedef CodecBase *(*CreateCodecFunc)(void);
     typedef PersistentSurface *(*CreateInputSurfaceFunc)(void);
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 5f64686..3f93e6d 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -50,6 +50,7 @@
 struct BatteryChecker;
 class BufferChannelBase;
 struct CodecBase;
+struct CodecParameterDescriptor;
 class IBatteryStats;
 struct ICrypto;
 class MediaCodecBuffer;
@@ -128,6 +129,8 @@
 
     status_t setOnFrameRenderedNotification(const sp<AMessage> &notify);
 
+    status_t setOnFirstTunnelFrameReadyNotification(const sp<AMessage> &notify);
+
     status_t createInputSurface(sp<IGraphicBufferProducer>* bufferProducer);
 
     status_t setInputSurface(const sp<PersistentSurface> &surface);
@@ -246,6 +249,11 @@
 
     status_t setParameters(const sp<AMessage> &params);
 
+    status_t querySupportedVendorParameters(std::vector<std::string> *names);
+    status_t describeParameter(const std::string &name, CodecParameterDescriptor *desc);
+    status_t subscribeToVendorParameters(const std::vector<std::string> &names);
+    status_t unsubscribeFromVendorParameters(const std::vector<std::string> &names);
+
     // Create a MediaCodec notification message from a list of rendered or dropped render infos
     // by adding rendered frame information to a base notification message. Returns the number
     // of frames that were rendered.
@@ -361,6 +369,22 @@
         bool mOwnedByClient;
     };
 
+    // This type is used to track the tunnel mode video peek state machine:
+    //
+    // DisabledNoBuffer -> EnabledNoBuffer  when tunnel-peek = true
+    // EnabledNoBuffer  -> DisabledNoBuffer when tunnel-peek = false
+    // DisabledNoBuffer -> BufferDecoded    when kWhatFirstTunnelFrameReady
+    // EnabledNoBuffer  -> BufferDecoded    when kWhatFirstTunnelFrameReady
+    // BufferDecoded    -> BufferRendered   when kWhatFrameRendered
+    // <all states>     -> EnabledNoBuffer  when flush
+    // <all states>     -> EnabledNoBuffer  when stop then configure then start
+    enum struct TunnelPeekState {
+        kDisabledNoBuffer,
+        kEnabledNoBuffer,
+        kBufferDecoded,
+        kBufferRendered,
+    };
+
     struct ResourceManagerServiceProxy;
 
     State mState;
@@ -387,12 +411,15 @@
     void flushMediametrics();
     void updateEphemeralMediametrics(mediametrics_handle_t item);
     void updateLowLatency(const sp<AMessage> &msg);
+    constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
+    void updateTunnelPeek(const sp<AMessage> &msg);
 
     sp<AMessage> mOutputFormat;
     sp<AMessage> mInputFormat;
     sp<AMessage> mCallback;
     sp<AMessage> mOnFrameRenderedNotification;
     sp<AMessage> mAsyncReleaseCompleteNotification;
+    sp<AMessage> mOnFirstTunnelFrameReadyNotification;
 
     sp<ResourceManagerServiceProxy> mResourceManagerProxy;
 
@@ -408,6 +435,17 @@
     // configure parameter
     sp<AMessage> mConfigureMsg;
 
+    // rewrites the format description during configure() for encoding.
+    // format and flags as they exist within configure()
+    // the (possibly) updated format is returned in place.
+    status_t shapeMediaFormat(
+            const sp<AMessage> &format,
+            uint32_t flags);
+
+    // populate the format shaper library with information for this codec encoding
+    // for the indicated media type
+    status_t setupFormatShaper(AString mediaType);
+
     // Used only to synchronize asynchronous getBufferAndFormat
     // across all the other (synchronous) buffer state change
     // operations, such as de/queueIn/OutputBuffer, start and
@@ -428,6 +466,7 @@
     int32_t mTunneledInputWidth;
     int32_t mTunneledInputHeight;
     bool mTunneled;
+    TunnelPeekState mTunnelPeekState;
 
     sp<IDescrambler> mDescrambler;
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 8fb4db2..1a5609a 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -857,9 +857,9 @@
 constexpr char PARAMETER_KEY_REQUEST_SYNC_FRAME[] = "request-sync";
 constexpr char PARAMETER_KEY_SUSPEND[] = "drop-input-frames";
 constexpr char PARAMETER_KEY_SUSPEND_TIME[] = "drop-start-time-us";
+constexpr char PARAMETER_KEY_TUNNEL_PEEK[] =  "tunnel-peek";
 constexpr char PARAMETER_KEY_VIDEO_BITRATE[] = "video-bitrate";
 
 }
 
 #endif  // MEDIA_CODEC_CONSTANTS_H_
-
diff --git a/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp b/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp
index b7f8cb4..04da9a5 100644
--- a/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp
+++ b/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp
@@ -404,7 +404,7 @@
     }
 
     mImageManager = std::make_unique<ImageManager>(this);
-    mImageManager->initThread();
+    mImageManager->initThread(args.realtime);
     mDrawingBuffer = createFramebuffer();
     sp<GraphicBuffer> buf =
             new GraphicBuffer(1, 1, PIXEL_FORMAT_RGBA_8888, 1,
diff --git a/media/libstagefright/renderfright/gl/ImageManager.cpp b/media/libstagefright/renderfright/gl/ImageManager.cpp
index 6256649..5b0cf52 100644
--- a/media/libstagefright/renderfright/gl/ImageManager.cpp
+++ b/media/libstagefright/renderfright/gl/ImageManager.cpp
@@ -32,14 +32,16 @@
 
 ImageManager::ImageManager(GLESRenderEngine* engine) : mEngine(engine) {}
 
-void ImageManager::initThread() {
+void ImageManager::initThread(bool realtime) {
     mThread = std::thread([this]() { threadMain(); });
     pthread_setname_np(mThread.native_handle(), "ImageManager");
-    // Use SCHED_FIFO to minimize jitter
-    struct sched_param param = {0};
-    param.sched_priority = 2;
-    if (pthread_setschedparam(mThread.native_handle(), SCHED_FIFO, &param) != 0) {
-        ALOGE("Couldn't set SCHED_FIFO for ImageManager");
+    if (realtime) {
+        // Use SCHED_FIFO to minimize jitter
+        struct sched_param param = {0};
+        param.sched_priority = 2;
+        if (pthread_setschedparam(mThread.native_handle(), SCHED_FIFO, &param) != 0) {
+            ALOGE("Couldn't set SCHED_FIFO for ImageManager");
+        }
     }
 }
 
diff --git a/media/libstagefright/renderfright/gl/ImageManager.h b/media/libstagefright/renderfright/gl/ImageManager.h
index be67de8..6be8e3c 100644
--- a/media/libstagefright/renderfright/gl/ImageManager.h
+++ b/media/libstagefright/renderfright/gl/ImageManager.h
@@ -42,7 +42,7 @@
     // Starts the background thread for the ImageManager
     // We need this to guarantee that the class is fully-constructed before the
     // thread begins running.
-    void initThread();
+    void initThread(bool realtime);
     void cacheAsync(const sp<GraphicBuffer>& buffer, const std::shared_ptr<Barrier>& barrier)
             EXCLUDES(mMutex);
     status_t cache(const sp<GraphicBuffer>& buffer);
diff --git a/media/libstagefright/renderfright/include/renderengine/RenderEngine.h b/media/libstagefright/renderfright/include/renderengine/RenderEngine.h
index af2870f..373d07b 100644
--- a/media/libstagefright/renderfright/include/renderengine/RenderEngine.h
+++ b/media/libstagefright/renderfright/include/renderengine/RenderEngine.h
@@ -206,6 +206,7 @@
     bool supportsBackgroundBlur;
     RenderEngine::ContextPriority contextPriority;
     RenderEngine::RenderEngineType renderEngineType;
+    bool realtime;
 
     struct Builder;
 
@@ -215,7 +216,8 @@
                              bool _enableProtectedContext, bool _precacheToneMapperShaderOnly,
                              bool _supportsBackgroundBlur,
                              RenderEngine::ContextPriority _contextPriority,
-                             RenderEngine::RenderEngineType _renderEngineType)
+                             RenderEngine::RenderEngineType _renderEngineType,
+                             bool _realtime)
           : pixelFormat(_pixelFormat),
             imageCacheSize(_imageCacheSize),
             useColorManagement(_useColorManagement),
@@ -223,7 +225,8 @@
             precacheToneMapperShaderOnly(_precacheToneMapperShaderOnly),
             supportsBackgroundBlur(_supportsBackgroundBlur),
             contextPriority(_contextPriority),
-            renderEngineType(_renderEngineType) {}
+            renderEngineType(_renderEngineType),
+            realtime(_realtime) {}
     RenderEngineCreationArgs() = delete;
 };
 
@@ -262,10 +265,15 @@
         this->renderEngineType = renderEngineType;
         return *this;
     }
+    Builder& setRealtime(bool realtime) {
+        this->realtime = realtime;
+        return *this;
+    }
     RenderEngineCreationArgs build() const {
         return RenderEngineCreationArgs(pixelFormat, imageCacheSize, useColorManagement,
                                         enableProtectedContext, precacheToneMapperShaderOnly,
-                                        supportsBackgroundBlur, contextPriority, renderEngineType);
+                                        supportsBackgroundBlur, contextPriority, renderEngineType,
+                                        realtime);
     }
 
 private:
@@ -278,6 +286,7 @@
     bool supportsBackgroundBlur = false;
     RenderEngine::ContextPriority contextPriority = RenderEngine::ContextPriority::MEDIUM;
     RenderEngine::RenderEngineType renderEngineType = RenderEngine::RenderEngineType::GLES;
+    bool realtime = true;
 };
 
 class BindNativeBufferAsFramebuffer {
diff --git a/media/mediaserver/manifest_media_c2_software.xml b/media/mediaserver/manifest_media_c2_software.xml
index f23ed44..a5b4896 100644
--- a/media/mediaserver/manifest_media_c2_software.xml
+++ b/media/mediaserver/manifest_media_c2_software.xml
@@ -2,7 +2,7 @@
     <hal>
         <name>android.hardware.media.c2</name>
         <transport>hwbinder</transport>
-        <version>1.1</version>
+        <version>1.2</version>
         <interface>
             <name>IComponentStore</name>
             <instance>software</instance>
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 392d339..031e0cf 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -261,6 +261,12 @@
         }
         registered = mPolicyRegistered;
         enabled = mPolicyEnabled;
+        // The simultaneous release of two EffectHandles with the same EffectModule
+        // may cause us to call this method at the same time.
+        // This may deadlock under some circumstances (b/180941720).  Avoid this.
+        if (!doRegister && !(registered && doEnable)) {
+            return NO_ERROR;
+        }
         mPolicyLock.lock();
     }
     ALOGV("%s name %s id %d session %d doRegister %d registered %d doEnable %d enabled %d",
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index cd3c743..13e2ced 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -353,7 +353,8 @@
 #endif
         //ALOGD("Eric FastMixer::onWork() mIsWarm");
     } else {
-        dumpState->mTimestampVerifier.discontinuity();
+        dumpState->mTimestampVerifier.discontinuity(
+            dumpState->mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS);
         // See comment in if block.
 #ifdef FASTMIXER_LOG_HIST_TS
         LOG_AUDIO_STATE();
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index a5b3077..ca9b747 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -286,8 +286,6 @@
     };
     sp<AudioVibrationController> mAudioVibrationController;
     sp<os::ExternalVibration>    mExternalVibration;
-    /** How many frames should be in the buffer before the track is considered ready */
-    const size_t        mFrameCountToBeReady;
 
     audio_dual_mono_mode_t mDualMonoMode = AUDIO_DUAL_MONO_MODE_OFF;
     float               mAudioDescriptionMixLevel = -std::numeric_limits<float>::infinity();
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 90f7a61..7f91a54 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -626,7 +626,7 @@
     mIoJitterMs.reset();
     mLatencyMs.reset();
     mProcessTimeMs.reset();
-    mTimestampVerifier.discontinuity();
+    mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS);
 
     sp<ConfigEvent> configEvent = (ConfigEvent *)new IoConfigEvent(event, pid, portId);
     sendConfigEvent_l(configEvent);
@@ -2748,7 +2748,7 @@
         // the timestamp frame position to reset to 0 for direct and offload threads.
         // (Out of sequence requests are ignored, since the discontinuity would be handled
         // elsewhere, e.g. in flush).
-        mTimestampVerifier.discontinuity();
+        mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
         mDrainSequence &= ~1;
         mWaitWorkCV.signal();
     }
@@ -3440,7 +3440,6 @@
 
     mStandbyTimeNs = systemTime();
     int64_t lastLoopCountWritten = -2; // never matches "previous" loop, when loopCount = 0.
-    int64_t lastFramesWritten = -1;    // track changes in timestamp server frames written
 
     // MIXER
     nsecs_t lastWarning = 0;
@@ -3476,14 +3475,6 @@
 
     checkSilentMode_l();
 
-    // DIRECT and OFFLOAD threads should reset frame count to zero on stop/flush
-    // TODO: add confirmation checks:
-    // 1) DIRECT threads and linear PCM format really resets to 0?
-    // 2) Is frame count really valid if not linear pcm?
-    // 3) Are all 64 bits of position returned, not just lowest 32 bits?
-    if (mType == OFFLOAD || mType == DIRECT) {
-        mTimestampVerifier.setDiscontinuityMode(mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
-    }
     audio_patch_handle_t lastDownstreamPatchHandle = AUDIO_PATCH_HANDLE_NONE;
 
     // loopCount is used for statistics and diagnostics.
@@ -3555,135 +3546,8 @@
                 logString = NULL;
             }
 
-            // Collect timestamp statistics for the Playback Thread types that support it.
-            if (mType == MIXER
-                    || mType == DUPLICATING
-                    || mType == DIRECT
-                    || mType == OFFLOAD) { // no indentation
-            // Gather the framesReleased counters for all active tracks,
-            // and associate with the sink frames written out.  We need
-            // this to convert the sink timestamp to the track timestamp.
-            bool kernelLocationUpdate = false;
-            ExtendedTimestamp timestamp; // use private copy to fetch
-            if (mStandby) {
-                mTimestampVerifier.discontinuity();
-            } else if (threadloop_getHalTimestamp_l(&timestamp) == OK) {
-                mTimestampVerifier.add(timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL],
-                        timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
-                        mSampleRate);
+            collectTimestamps_l();
 
-                if (isTimestampCorrectionEnabled()) {
-                    ALOGVV("TS_BEFORE: %d %lld %lld", id(),
-                            (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
-                            (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
-                    auto correctedTimestamp = mTimestampVerifier.getLastCorrectedTimestamp();
-                    timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
-                            = correctedTimestamp.mFrames;
-                    timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]
-                            = correctedTimestamp.mTimeNs;
-                    ALOGVV("TS_AFTER: %d %lld %lld", id(),
-                            (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
-                            (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
-
-                    // Note: Downstream latency only added if timestamp correction enabled.
-                    if (mDownstreamLatencyStatMs.getN() > 0) { // we have latency info.
-                        const int64_t newPosition =
-                                timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
-                                - int64_t(mDownstreamLatencyStatMs.getMean() * mSampleRate * 1e-3);
-                        // prevent retrograde
-                        timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] = max(
-                                newPosition,
-                                (mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
-                                        - mSuspendedFrames));
-                    }
-                }
-
-                // We always fetch the timestamp here because often the downstream
-                // sink will block while writing.
-
-                // We keep track of the last valid kernel position in case we are in underrun
-                // and the normal mixer period is the same as the fast mixer period, or there
-                // is some error from the HAL.
-                if (mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
-                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
-                            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
-                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
-                            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
-
-                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
-                            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER];
-                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
-                            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER];
-                }
-
-                if (timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
-                    kernelLocationUpdate = true;
-                } else {
-                    ALOGVV("getTimestamp error - no valid kernel position");
-                }
-
-                // copy over kernel info
-                mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] =
-                        timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
-                        + mSuspendedFrames; // add frames discarded when suspended
-                mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] =
-                        timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
-            } else {
-                mTimestampVerifier.error();
-            }
-
-            // mFramesWritten for non-offloaded tracks are contiguous
-            // even after standby() is called. This is useful for the track frame
-            // to sink frame mapping.
-            bool serverLocationUpdate = false;
-            if (mFramesWritten != lastFramesWritten) {
-                serverLocationUpdate = true;
-                lastFramesWritten = mFramesWritten;
-            }
-            // Only update timestamps if there is a meaningful change.
-            // Either the kernel timestamp must be valid or we have written something.
-            if (kernelLocationUpdate || serverLocationUpdate) {
-                if (serverLocationUpdate) {
-                    // use the time before we called the HAL write - it is a bit more accurate
-                    // to when the server last read data than the current time here.
-                    //
-                    // If we haven't written anything, mLastIoBeginNs will be -1
-                    // and we use systemTime().
-                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER] = mFramesWritten;
-                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = mLastIoBeginNs == -1
-                            ? systemTime() : mLastIoBeginNs;
-                }
-
-                for (const sp<Track> &t : mActiveTracks) {
-                    if (!t->isFastTrack()) {
-                        t->updateTrackFrameInfo(
-                                t->mAudioTrackServerProxy->framesReleased(),
-                                mFramesWritten,
-                                mSampleRate,
-                                mTimestamp);
-                    }
-                }
-            }
-
-            if (audio_has_proportional_frames(mFormat)) {
-                const double latencyMs = mTimestamp.getOutputServerLatencyMs(mSampleRate);
-                if (latencyMs != 0.) { // note 0. means timestamp is empty.
-                    mLatencyMs.add(latencyMs);
-                }
-            }
-
-            } // if (mType ... ) { // no indentation
-#if 0
-            // logFormat example
-            if (z % 100 == 0) {
-                timespec ts;
-                clock_gettime(CLOCK_MONOTONIC, &ts);
-                LOGT("This is an integer %d, this is a float %f, this is my "
-                    "pid %p %% %s %t", 42, 3.14, "and this is a timestamp", ts);
-                LOGT("A deceptive null-terminated string %\0");
-            }
-            ++z;
-#endif
             saveOutputTracks();
             if (mSignalPending) {
                 // A signal was raised while we were unlocked
@@ -4129,6 +3993,148 @@
     return false;
 }
 
+void AudioFlinger::PlaybackThread::collectTimestamps_l()
+{
+    // Collect timestamp statistics for the Playback Thread types that support it.
+    if (mType != MIXER
+            && mType != DUPLICATING
+            && mType != DIRECT
+            && mType != OFFLOAD) {
+        return;
+    }
+    if (mStandby) {
+        mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
+        return;
+    } else if (mHwPaused) {
+        mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS);
+        return;
+    }
+
+    // Gather the framesReleased counters for all active tracks,
+    // and associate with the sink frames written out.  We need
+    // this to convert the sink timestamp to the track timestamp.
+    bool kernelLocationUpdate = false;
+    ExtendedTimestamp timestamp; // use private copy to fetch
+
+    // Always query HAL timestamp and update timestamp verifier. In standby or pause,
+    // HAL may be draining some small duration buffered data for fade out.
+    if (threadloop_getHalTimestamp_l(&timestamp) == OK) {
+        mTimestampVerifier.add(timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL],
+                timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
+                mSampleRate);
+
+        if (isTimestampCorrectionEnabled()) {
+            ALOGVV("TS_BEFORE: %d %lld %lld", id(),
+                    (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
+                    (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
+            auto correctedTimestamp = mTimestampVerifier.getLastCorrectedTimestamp();
+            timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+                    = correctedTimestamp.mFrames;
+            timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]
+                    = correctedTimestamp.mTimeNs;
+            ALOGVV("TS_AFTER: %d %lld %lld", id(),
+                    (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
+                    (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
+
+            // Note: Downstream latency only added if timestamp correction enabled.
+            if (mDownstreamLatencyStatMs.getN() > 0) { // we have latency info.
+                const int64_t newPosition =
+                        timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+                        - int64_t(mDownstreamLatencyStatMs.getMean() * mSampleRate * 1e-3);
+                // prevent retrograde
+                timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] = max(
+                        newPosition,
+                        (mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+                                - mSuspendedFrames));
+            }
+        }
+
+        // We always fetch the timestamp here because often the downstream
+        // sink will block while writing.
+
+        // We keep track of the last valid kernel position in case we are in underrun
+        // and the normal mixer period is the same as the fast mixer period, or there
+        // is some error from the HAL.
+        if (mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
+            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
+                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
+                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+
+            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
+                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER];
+            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
+                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER];
+        }
+
+        if (timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
+            kernelLocationUpdate = true;
+        } else {
+            ALOGVV("getTimestamp error - no valid kernel position");
+        }
+
+        // copy over kernel info
+        mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] =
+                timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+                + mSuspendedFrames; // add frames discarded when suspended
+        mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] =
+                timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+    } else {
+        mTimestampVerifier.error();
+    }
+
+    // mFramesWritten for non-offloaded tracks are contiguous
+    // even after standby() is called. This is useful for the track frame
+    // to sink frame mapping.
+    bool serverLocationUpdate = false;
+    if (mFramesWritten != mLastFramesWritten) {
+        serverLocationUpdate = true;
+        mLastFramesWritten = mFramesWritten;
+    }
+    // Only update timestamps if there is a meaningful change.
+    // Either the kernel timestamp must be valid or we have written something.
+    if (kernelLocationUpdate || serverLocationUpdate) {
+        if (serverLocationUpdate) {
+            // use the time before we called the HAL write - it is a bit more accurate
+            // to when the server last read data than the current time here.
+            //
+            // If we haven't written anything, mLastIoBeginNs will be -1
+            // and we use systemTime().
+            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER] = mFramesWritten;
+            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = mLastIoBeginNs == -1
+                    ? systemTime() : mLastIoBeginNs;
+        }
+
+        for (const sp<Track> &t : mActiveTracks) {
+            if (!t->isFastTrack()) {
+                t->updateTrackFrameInfo(
+                        t->mAudioTrackServerProxy->framesReleased(),
+                        mFramesWritten,
+                        mSampleRate,
+                        mTimestamp);
+            }
+        }
+    }
+
+    if (audio_has_proportional_frames(mFormat)) {
+        const double latencyMs = mTimestamp.getOutputServerLatencyMs(mSampleRate);
+        if (latencyMs != 0.) { // note 0. means timestamp is empty.
+            mLatencyMs.add(latencyMs);
+        }
+    }
+#if 0
+    // logFormat example
+    if (z % 100 == 0) {
+        timespec ts;
+        clock_gettime(CLOCK_MONOTONIC, &ts);
+        LOGT("This is an integer %d, this is a float %f, this is my "
+            "pid %p %% %s %t", 42, 3.14, "and this is a timestamp", ts);
+        LOGT("A deceptive null-terminated string %\0");
+    }
+    ++z;
+#endif
+}
+
 // removeTracks_l() must be called with ThreadBase::mLock held
 void AudioFlinger::PlaybackThread::removeTracks_l(const Vector< sp<Track> >& tracksToRemove)
 {
@@ -4182,20 +4188,15 @@
         return status;
     }
     if ((mType == OFFLOAD || mType == DIRECT) && mOutput != NULL) {
-        uint64_t position64;
-        if (mOutput->getPresentationPosition(&position64, &timestamp.mTime) == OK) {
-            timestamp.mPosition = (uint32_t)position64;
-            if (mDownstreamLatencyStatMs.getN() > 0) {
-                const uint32_t positionOffset =
-                    (uint32_t)(mDownstreamLatencyStatMs.getMean() * mSampleRate * 1e-3);
-                if (positionOffset > timestamp.mPosition) {
-                    timestamp.mPosition = 0;
-                } else {
-                    timestamp.mPosition -= positionOffset;
-                }
-            }
-            return NO_ERROR;
+        collectTimestamps_l();
+        if (mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] <= 0) {
+            return INVALID_OPERATION;
         }
+        timestamp.mPosition = mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+        const int64_t timeNs = mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+        timestamp.mTime.tv_sec = timeNs / NANOS_PER_SECOND;
+        timestamp.mTime.tv_nsec = timeNs - (timestamp.mTime.tv_sec * NANOS_PER_SECOND);
+        return NO_ERROR;
     }
     return INVALID_OPERATION;
 }
@@ -5596,8 +5597,6 @@
                                                        status_t& status)
 {
     bool reconfig = false;
-    bool a2dpDeviceChanged = false;
-
     status = NO_ERROR;
 
     AutoPark<FastMixer> park(mFastMixer);
@@ -5669,7 +5668,7 @@
         }
     }
 
-    return reconfig || a2dpDeviceChanged;
+    return reconfig;
 }
 
 
@@ -6130,8 +6129,6 @@
                                                               status_t& status)
 {
     bool reconfig = false;
-    bool a2dpDeviceChanged = false;
-
     status = NO_ERROR;
 
     AudioParameter param = AudioParameter(keyValuePair);
@@ -6166,7 +6163,7 @@
         }
     }
 
-    return reconfig || a2dpDeviceChanged;
+    return reconfig;
 }
 
 uint32_t AudioFlinger::DirectOutputThread::activeSleepTimeUs() const
@@ -6223,7 +6220,7 @@
     mOutput->flush();
     mHwPaused = false;
     mFlushPending = false;
-    mTimestampVerifier.discontinuity(); // DIRECT and OFFLOADED flush resets frame count.
+    mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
     mTimestamp.clear();
 }
 
@@ -6559,13 +6556,14 @@
                     track->presentationComplete(framesWritten, audioHALFrames);
                     track->reset();
                     tracksToRemove->add(track);
-                    // DIRECT and OFFLOADED stop resets frame counts.
+                    // OFFLOADED stop resets frame counts.
                     if (!mUseAsyncWrite) {
                         // If we don't get explicit drain notification we must
                         // register discontinuity regardless of whether this is
                         // the previous (!last) or the upcoming (last) track
                         // to avoid skipping the discontinuity.
-                        mTimestampVerifier.discontinuity();
+                        mTimestampVerifier.discontinuity(
+                                mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
                     }
                 }
             } else {
@@ -7429,7 +7427,9 @@
         if (mPipeSource.get() == nullptr /* don't obtain for FastCapture, could block */) {
             int64_t position, time;
             if (mStandby) {
-                mTimestampVerifier.discontinuity();
+                mTimestampVerifier.discontinuity(audio_is_linear_pcm(mFormat) ?
+                    mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS :
+                    mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
             } else if (mSource->getCapturePosition(&position, &time) == NO_ERROR
                     && time > mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]) {
 
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 5aa5169..e63642b 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -612,6 +612,11 @@
                 ExtendedTimestamp       mTimestamp;
                 TimestampVerifier< // For timestamp statistics.
                         int64_t /* frame count */, int64_t /* time ns */> mTimestampVerifier;
+                // DIRECT and OFFLOAD threads should reset frame count to zero on stop/flush
+                // TODO: add confirmation checks:
+                // 1) DIRECT threads and linear PCM format really resets to 0?
+                // 2) Is frame count really valid if not linear pcm?
+                // 3) Are all 64 bits of position returned, not just lowest 32 bits?
                 // Timestamp corrected device should be a single device.
                 audio_devices_t         mTimestampCorrectedDevice = AUDIO_DEVICE_NONE;
 
@@ -1051,6 +1056,8 @@
 
     int64_t                         mBytesWritten;
     int64_t                         mFramesWritten; // not reset on standby
+    int64_t                         mLastFramesWritten = -1; // track changes in timestamp
+                                                             // server frames written.
     int64_t                         mSuspendedFrames; // not reset on standby
 
     // mHapticChannelMask and mHapticChannelCount will only be valid when the thread support
@@ -1063,6 +1070,14 @@
     // copy rather than the one in AudioFlinger.  This optimization saves a lock.
     bool                            mMasterMute;
                 void        setMasterMute_l(bool muted) { mMasterMute = muted; }
+
+                auto discontinuityForStandbyOrFlush() const { // call on threadLoop or with lock.
+                    return ((mType == DIRECT && !audio_is_linear_pcm(mFormat))
+                                    || mType == OFFLOAD)
+                            ? mTimestampVerifier.DISCONTINUITY_MODE_ZERO
+                            : mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS;
+                }
+
 protected:
     ActiveTracks<Track>     mActiveTracks;
 
@@ -1114,6 +1129,8 @@
     void        updateMetadata_l() final;
     virtual void sendMetadataToBackend_l(const StreamOutHalInterface::SourceMetadata& metadata);
 
+    void        collectTimestamps_l();
+
     // The Tracks class manages tracks added and removed from the Thread.
     template <typename T>
     class Tracks {
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index a86102f..efcdb51 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -673,7 +673,6 @@
     mOpPlayAudioMonitor(OpPlayAudioMonitor::createIfNeeded(identity, attr, id(),
         streamType)),
     // mSinkTimestamp
-    mFrameCountToBeReady(frameCountToBeReady),
     mFastIndex(-1),
     mCachedVolume(1.0),
     /* The track might not play immediately after being active, similarly as if its volume was 0.
@@ -708,6 +707,7 @@
                 mFrameSize, sampleRate);
     }
     mServerProxy = mAudioTrackServerProxy;
+    mServerProxy->setStartThresholdInFrames(frameCountToBeReady); // update the Cblk value
 
     // only allocate a fast track index if we were able to allocate a normal track name
     if (flags & AUDIO_OUTPUT_FLAG_FAST) {
@@ -1040,7 +1040,10 @@
     }
 
     size_t bufferSizeInFrames = mServerProxy->getBufferSizeInFrames();
-    size_t framesToBeReady = std::min(mFrameCountToBeReady, bufferSizeInFrames);
+    // Note: mServerProxy->getStartThresholdInFrames() is clamped.
+    const size_t startThresholdInFrames = mServerProxy->getStartThresholdInFrames();
+    const size_t framesToBeReady = std::clamp(  // clamp again to validate client values.
+            std::min(startThresholdInFrames, bufferSizeInFrames), size_t(1), mFrameCount);
 
     if (framesReady() >= framesToBeReady || (mCblk->mFlags & CBLK_FORCEREADY)) {
         ALOGV("%s(%d): consider track ready with %zu/%zu, target was %zu)",
diff --git a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
index e6eef24..ab33b38 100644
--- a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
+++ b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
@@ -44,7 +44,7 @@
 
     bool equals(const sp<PolicyAudioPort> &right) const
     {
-        return getTagName() == right->getTagName();
+        return right != 0 && getTagName() == right->getTagName();
     }
 
     virtual sp<AudioPort> asAudioPort() const = 0;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp
index c8e4e76..866417e 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp
@@ -39,7 +39,7 @@
 bool AudioRoute::supportsPatch(const sp<PolicyAudioPort> &srcPort,
                                const sp<PolicyAudioPort> &dstPort) const
 {
-    if (mSink == 0 || dstPort == 0 || !dstPort->equals(mSink)) {
+    if (mSink == 0 || srcPort == 0 || dstPort == 0 || !dstPort->equals(mSink)) {
         return false;
     }
     ALOGV("%s: sinks %s matching", __FUNCTION__, mSink->getTagName().c_str());
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index c91750e..6cd20a1 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -128,10 +128,9 @@
 static const String16 sCameraOpenCloseListenerPermission(
         "android.permission.CAMERA_OPEN_CLOSE_LISTENER");
 
-// Matches with PERCEPTIBLE_APP_ADJ in ProcessList.java
-static constexpr int32_t kVendorClientScore = 200;
-// Matches with PROCESS_STATE_PERSISTENT_UI in ActivityManager.java
-static constexpr int32_t kVendorClientState = 1;
+static constexpr int32_t kVendorClientScore = resource_policy::PERCEPTIBLE_APP_ADJ;
+static constexpr int32_t kVendorClientState = ActivityManager::PROCESS_STATE_PERSISTENT_UI;
+
 const String8 CameraService::kOfflineDevice("offline-");
 
 CameraService::CameraService() :
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index 4c3ded6..ee764ec 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -158,7 +158,7 @@
         res = device->createStream(mCallbackWindow,
                 params.previewWidth, params.previewHeight, callbackFormat,
                 HAL_DATASPACE_V0_JFIF, CAMERA_STREAM_ROTATION_0, &mCallbackStreamId,
-                String8());
+                String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for callbacks: "
                     "%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index ff2e398..eed2654 100755
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -151,7 +151,7 @@
                 params.pictureWidth, params.pictureHeight,
                 HAL_PIXEL_FORMAT_BLOB, HAL_DATASPACE_V0_JFIF,
                 CAMERA_STREAM_ROTATION_0, &mCaptureStreamId,
-                String8());
+                String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for capture: "
                     "%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index 3a709c9..02ac638 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -56,7 +56,7 @@
     int previewTransform; // set by CAMERA_CMD_SET_DISPLAY_ORIENTATION
 
     int pictureWidth, pictureHeight;
-    // Store the picture size before they are overriden by video snapshot
+    // Store the picture size before they are overridden by video snapshot
     int pictureWidthLastSet, pictureHeightLastSet;
     bool pictureSizeOverriden;
 
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index 8b1eb28..2d3597c 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -198,7 +198,8 @@
         res = device->createStream(mPreviewWindow,
                 params.previewWidth, params.previewHeight,
                 CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, HAL_DATASPACE_UNKNOWN,
-                CAMERA_STREAM_ROTATION_0, &mPreviewStreamId, String8());
+                CAMERA_STREAM_ROTATION_0, &mPreviewStreamId, String8(),
+                std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Unable to create preview stream: %s (%d)",
                     __FUNCTION__, mId, strerror(-res), res);
@@ -384,7 +385,7 @@
                 params.videoWidth, params.videoHeight,
                 params.videoFormat, params.videoDataSpace,
                 CAMERA_STREAM_ROTATION_0, &mRecordingStreamId,
-                String8());
+                String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for recording: "
                     "%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 9fdc727..8e598f1 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -261,7 +261,7 @@
         res = device->createStream(outSurface, params.fastInfo.maxZslSize.width,
             params.fastInfo.maxZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
             HAL_DATASPACE_UNKNOWN, CAMERA_STREAM_ROTATION_0, &mZslStreamId,
-            String8());
+            String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create ZSL stream: "
                     "%s (%d)", __FUNCTION__, client->getCameraId(),
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 8cccbb1..1b65d1a 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -125,8 +125,8 @@
                                       /*listener*/this,
                                       /*sendPartials*/true);
 
-    auto deviceInfo = mDevice->info();
-    camera_metadata_entry_t physicalKeysEntry = deviceInfo.find(
+    const CameraMetadata &deviceInfo = mDevice->info();
+    camera_metadata_ro_entry_t physicalKeysEntry = deviceInfo.find(
             ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS);
     if (physicalKeysEntry.count > 0) {
         mSupportedPhysicalRequestKeys.insert(mSupportedPhysicalRequestKeys.begin(),
@@ -135,6 +135,17 @@
     }
 
     mProviderManager = providerPtr;
+    // Cache physical camera ids corresponding to this device and also the high
+    // resolution sensors in this device + physical camera ids
+    mProviderManager->isLogicalCamera(mCameraIdStr.string(), &mPhysicalCameraIds);
+    if (isUltraHighResolutionSensor(mCameraIdStr)) {
+        mHighResolutionSensors.insert(mCameraIdStr.string());
+    }
+    for (auto &physicalId : mPhysicalCameraIds) {
+        if (isUltraHighResolutionSensor(String8(physicalId.c_str()))) {
+            mHighResolutionSensors.insert(physicalId.c_str());
+        }
+    }
     return OK;
 }
 
@@ -186,6 +197,17 @@
     return binder::Status::ok();
 }
 
+static std::list<int> getIntersection(const std::unordered_set<int> &streamIdsForThisCamera,
+        const Vector<int> &streamIdsForThisRequest) {
+    std::list<int> intersection;
+    for (auto &streamId : streamIdsForThisRequest) {
+        if (streamIdsForThisCamera.find(streamId) != streamIdsForThisCamera.end()) {
+            intersection.emplace_back(streamId);
+        }
+    }
+    return intersection;
+}
+
 binder::Status CameraDeviceClient::submitRequestList(
         const std::vector<hardware::camera2::CaptureRequest>& requests,
         bool streaming,
@@ -332,6 +354,24 @@
                         "Request settings are empty");
             }
 
+            // Check whether the physical / logical stream has settings
+            // consistent with the sensor pixel mode(s) it was configured with.
+            // mCameraIdToStreamSet will only have ids that are high resolution
+            const auto streamIdSetIt = mHighResolutionCameraIdToStreamIdSet.find(it.id);
+            if (streamIdSetIt != mHighResolutionCameraIdToStreamIdSet.end()) {
+                std::list<int> streamIdsUsedInRequest = getIntersection(streamIdSetIt->second,
+                        outputStreamIds);
+                if (!request.mIsReprocess &&
+                        !isSensorPixelModeConsistent(streamIdsUsedInRequest, it.settings)) {
+                     ALOGE("%s: Camera %s: Request settings CONTROL_SENSOR_PIXEL_MODE not "
+                            "consistent with configured streams. Rejecting request.",
+                            __FUNCTION__, it.id.c_str());
+                    return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                        "Request settings CONTROL_SENSOR_PIXEL_MODE are not consistent with "
+                        "streams configured");
+                }
+            }
+
             String8 physicalId(it.id.c_str());
             if (physicalId != mDevice->getId()) {
                 auto found = std::find(requestedPhysicalIds.begin(), requestedPhysicalIds.end(),
@@ -494,7 +534,7 @@
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
 
-    res = SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
+    res = camera3::SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
             mCameraIdStr);
     if (!res.isOk()) {
         return res;
@@ -560,8 +600,8 @@
 
 binder::Status CameraDeviceClient::isSessionConfigurationSupported(
         const SessionConfiguration& sessionConfiguration, bool *status /*out*/) {
-    ATRACE_CALL();
 
+    ATRACE_CALL();
     binder::Status res;
     status_t ret = OK;
     if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
@@ -573,7 +613,7 @@
     }
 
     auto operatingMode = sessionConfiguration.getOperatingMode();
-    res = SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
+    res = camera3::SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
             mCameraIdStr);
     if (!res.isOk()) {
         return res;
@@ -589,7 +629,7 @@
     metadataGetter getMetadata = [this](const String8 &id) {return mDevice->infoPhysical(id);};
     std::vector<std::string> physicalCameraIds;
     mProviderManager->isLogicalCamera(mCameraIdStr.string(), &physicalCameraIds);
-    res = SessionConfigurationUtils::convertToHALStreamCombination(sessionConfiguration,
+    res = camera3::SessionConfigurationUtils::convertToHALStreamCombination(sessionConfiguration,
             mCameraIdStr, mDevice->info(), getMetadata, physicalCameraIds, streamConfiguration,
             &earlyExit);
     if (!res.isOk()) {
@@ -714,6 +754,13 @@
                 }
                 mCompositeStreamMap.removeItemsAt(compositeIndex);
             }
+            for (auto &mapIt: mHighResolutionCameraIdToStreamIdSet) {
+                auto &streamSet = mapIt.second;
+                if (streamSet.find(streamId) != streamSet.end()) {
+                    streamSet.erase(streamId);
+                    break;
+                }
+            }
         }
     }
 
@@ -740,7 +787,7 @@
     bool deferredConsumerOnly = deferredConsumer && numBufferProducers == 0;
     bool isMultiResolution = outputConfiguration.isMultiResolution();
 
-    res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
+    res = camera3::SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
             outputConfiguration.getSurfaceType());
     if (!res.isOk()) {
         return res;
@@ -749,10 +796,8 @@
     if (!mDevice.get()) {
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
-    std::vector<std::string> physicalCameraIds;
-    mProviderManager->isLogicalCamera(mCameraIdStr.string(), &physicalCameraIds);
-    res = SessionConfigurationUtils::checkPhysicalCameraId(physicalCameraIds, physicalCameraId,
-            mCameraIdStr);
+    res = camera3::SessionConfigurationUtils::checkPhysicalCameraId(mPhysicalCameraIds,
+            physicalCameraId, mCameraIdStr);
     if (!res.isOk()) {
         return res;
     }
@@ -768,6 +813,8 @@
 
     OutputStreamInfo streamInfo;
     bool isStreamInfoValid = false;
+    const std::vector<int32_t> &sensorPixelModesUsed =
+            outputConfiguration.getSensorPixelModesUsed();
     for (auto& bufferProducer : bufferProducers) {
         // Don't create multiple streams for the same target surface
         sp<IBinder> binder = IInterface::asBinder(bufferProducer);
@@ -780,8 +827,9 @@
         }
 
         sp<Surface> surface;
-        res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo, isStreamInfoValid,
-                surface, bufferProducer, mCameraIdStr, mDevice->infoPhysical(physicalCameraId));
+        res = camera3::SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
+                isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
+                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed);
 
         if (!res.isOk())
             return res;
@@ -793,10 +841,10 @@
         binders.push_back(IInterface::asBinder(bufferProducer));
         surfaces.push_back(surface);
     }
-
     int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
     std::vector<int> surfaceIds;
-    bool isDepthCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
+    bool isDepthCompositeStream =
+            camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
     bool isHeicCompisiteStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
     if (isDepthCompositeStream || isHeicCompisiteStream) {
         sp<CompositeStream> compositeStream;
@@ -809,8 +857,8 @@
         err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
                 streamInfo.height, streamInfo.format,
                 static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
-                &streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
-                isShared, isMultiResolution);
+                &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
+                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution);
         if (err == OK) {
             mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
                     compositeStream);
@@ -819,8 +867,8 @@
         err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
                 streamInfo.height, streamInfo.format, streamInfo.dataSpace,
                 static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
-                &streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
-                isShared, isMultiResolution);
+                &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
+                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution);
     }
 
     if (err != OK) {
@@ -848,6 +896,16 @@
         // Set transform flags to ensure preview to be rotated correctly.
         res = setStreamTransformLocked(streamId);
 
+        // Fill in mHighResolutionCameraIdToStreamIdSet map
+        const String8 &cameraIdUsed =
+                physicalCameraId.size() != 0 ? physicalCameraId : mCameraIdStr;
+        const char *cameraIdUsedCStr = cameraIdUsed.string();
+        // Only needed for high resolution sensors
+        if (mHighResolutionSensors.find(cameraIdUsedCStr) !=
+                mHighResolutionSensors.end()) {
+            mHighResolutionCameraIdToStreamIdSet[cameraIdUsedCStr].insert(streamId);
+        }
+
         *newStreamId = streamId;
     }
 
@@ -884,10 +942,25 @@
     std::vector<sp<Surface>> noSurface;
     std::vector<int> surfaceIds;
     String8 physicalCameraId(outputConfiguration.getPhysicalCameraId());
+    const String8 &cameraIdUsed =
+            physicalCameraId.size() != 0 ? physicalCameraId : mCameraIdStr;
+    // Here, we override sensor pixel modes
+    std::unordered_set<int32_t> overriddenSensorPixelModesUsed;
+    const std::vector<int32_t> &sensorPixelModesUsed =
+            outputConfiguration.getSensorPixelModesUsed();
+    if (camera3::SessionConfigurationUtils::checkAndOverrideSensorPixelModesUsed(
+            sensorPixelModesUsed, format, width, height, getStaticInfo(cameraIdUsed),
+            /*allowRounding*/ false, &overriddenSensorPixelModesUsed) != OK) {
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "sensor pixel modes used not valid for deferred stream");
+    }
+
     err = mDevice->createStream(noSurface, /*hasDeferredConsumer*/true, width,
             height, format, dataSpace,
             static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
-            &streamId, physicalCameraId, &surfaceIds,
+            &streamId, physicalCameraId,
+            overriddenSensorPixelModesUsed,
+            &surfaceIds,
             outputConfiguration.getSurfaceSetID(), isShared,
             outputConfiguration.isMultiResolution(), consumerUsage);
 
@@ -900,9 +973,9 @@
         // a separate list to track. Once the deferred surface is set, this id will be
         // relocated to mStreamMap.
         mDeferredStreams.push_back(streamId);
-
         mStreamInfoMap.emplace(std::piecewise_construct, std::forward_as_tuple(streamId),
-                std::forward_as_tuple(width, height, format, dataSpace, consumerUsage));
+                std::forward_as_tuple(width, height, format, dataSpace, consumerUsage,
+                        overriddenSensorPixelModesUsed));
 
         ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
                 " (%d x %d) stream with format 0x%x.",
@@ -912,6 +985,13 @@
         res = setStreamTransformLocked(streamId);
 
         *newStreamId = streamId;
+        // Fill in mHighResolutionCameraIdToStreamIdSet
+        const char *cameraIdUsedCStr = cameraIdUsed.string();
+        // Only needed for high resolution sensors
+        if (mHighResolutionSensors.find(cameraIdUsedCStr) !=
+                mHighResolutionSensors.end()) {
+            mHighResolutionCameraIdToStreamIdSet[cameraIdUsed.string()].insert(streamId);
+        }
     }
     return res;
 }
@@ -1081,13 +1161,15 @@
             newOutputsMap.removeItemsAt(idx);
         }
     }
+    const std::vector<int32_t> &sensorPixelModesUsed =
+            outputConfiguration.getSensorPixelModesUsed();
 
     for (size_t i = 0; i < newOutputsMap.size(); i++) {
         OutputStreamInfo outInfo;
         sp<Surface> surface;
-        res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo, /*isStreamInfoValid*/ false,
-                surface, newOutputsMap.valueAt(i), mCameraIdStr,
-                mDevice->infoPhysical(physicalCameraId));
+        res = camera3::SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
+                /*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
+                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed);
         if (!res.isOk())
             return res;
 
@@ -1442,6 +1524,8 @@
     }
 
     std::vector<sp<Surface>> consumerSurfaces;
+    const std::vector<int32_t> &sensorPixelModesUsed =
+            outputConfiguration.getSensorPixelModesUsed();
     for (auto& bufferProducer : bufferProducers) {
         // Don't create multiple streams for the same target surface
         ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
@@ -1452,9 +1536,9 @@
         }
 
         sp<Surface> surface;
-        res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
+        res = camera3::SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
                 true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
-                mDevice->infoPhysical(physicalId));
+                mDevice->infoPhysical(physicalId), sensorPixelModesUsed);
 
         if (!res.isOk())
             return res;
@@ -1936,4 +2020,54 @@
 
     return ret;
 }
+
+const CameraMetadata &CameraDeviceClient::getStaticInfo(const String8 &cameraId) {
+    if (mDevice->getId() == cameraId) {
+        return mDevice->info();
+    }
+    return mDevice->infoPhysical(cameraId);
+}
+
+bool CameraDeviceClient::isUltraHighResolutionSensor(const String8 &cameraId) {
+    const CameraMetadata &deviceInfo = getStaticInfo(cameraId);
+    return camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(deviceInfo);
+}
+
+bool CameraDeviceClient::isSensorPixelModeConsistent(
+        const std::list<int> &streamIdList, const CameraMetadata &settings) {
+    // First we get the sensorPixelMode from the settings metadata.
+    int32_t sensorPixelMode = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
+    camera_metadata_ro_entry sensorPixelModeEntry = settings.find(ANDROID_SENSOR_PIXEL_MODE);
+    if (sensorPixelModeEntry.count != 0) {
+        sensorPixelMode = sensorPixelModeEntry.data.u8[0];
+        if (sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_DEFAULT &&
+            sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) {
+            ALOGE("%s: Request sensor pixel mode not is not one of the valid values %d",
+                      __FUNCTION__, sensorPixelMode);
+            return false;
+        }
+    }
+    // Check whether each stream has max resolution allowed.
+    bool consistent = true;
+    for (auto it : streamIdList) {
+        auto const streamInfoIt = mStreamInfoMap.find(it);
+        if (streamInfoIt == mStreamInfoMap.end()) {
+            ALOGE("%s: stream id %d not created, skipping", __FUNCTION__, it);
+            return false;
+        }
+        consistent =
+                streamInfoIt->second.sensorPixelModesUsed.find(sensorPixelMode) !=
+                        streamInfoIt->second.sensorPixelModesUsed.end();
+        if (!consistent) {
+            ALOGE("sensorPixelMode used %i not consistent with configured modes", sensorPixelMode);
+            for (auto m : streamInfoIt->second.sensorPixelModesUsed) {
+                ALOGE("sensor pixel mode used list: %i", m);
+            }
+            break;
+        }
+    }
+
+    return consistent;
+}
+
 } // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 9f7a4af..adedf92 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -28,6 +28,7 @@
 #include "common/FrameProcessorBase.h"
 #include "common/Camera2ClientBase.h"
 #include "CompositeStream.h"
+#include "utils/SessionConfigurationUtils.h"
 
 using android::camera3::OutputStreamInfo;
 using android::camera3::CompositeStream;
@@ -222,6 +223,13 @@
     // Calculate the ANativeWindow transform from android.sensor.orientation
     status_t              getRotationTransformLocked(/*out*/int32_t* transform);
 
+    bool isUltraHighResolutionSensor(const String8 &cameraId);
+
+    bool isSensorPixelModeConsistent(const std::list<int> &streamIdList,
+            const CameraMetadata &settings);
+
+    const CameraMetadata &getStaticInfo(const String8 &cameraId);
+
 private:
     // StreamSurfaceId encapsulates streamId + surfaceId for a particular surface.
     // streamId specifies the index of the stream the surface belongs to, and the
@@ -305,6 +313,8 @@
 
     int32_t mRequestIdCounter;
 
+    std::vector<std::string> mPhysicalCameraIds;
+
     // The list of output streams whose surfaces are deferred. We have to track them separately
     // as there are no surfaces available and can not be put into mStreamMap. Once the deferred
     // Surface is configured, the stream id will be moved to mStreamMap.
@@ -313,6 +323,12 @@
     // stream ID -> outputStreamInfo mapping
     std::unordered_map<int32_t, OutputStreamInfo> mStreamInfoMap;
 
+    // map high resolution camera id (logical / physical) -> list of stream ids configured
+    std::unordered_map<std::string, std::unordered_set<int>> mHighResolutionCameraIdToStreamIdSet;
+
+    // set of high resolution camera id (logical / physical)
+    std::unordered_set<std::string> mHighResolutionSensors;
+
     KeyedVector<sp<IBinder>, sp<CompositeStream>> mCompositeStreamMap;
 
     sp<CameraProviderManager> mProviderManager;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 515b7f2..4b840fc 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -47,7 +47,9 @@
 status_t CompositeStream::createStream(const std::vector<sp<Surface>>& consumers,
         bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
         camera_stream_rotation_t rotation, int * id, const String8& physicalCameraId,
-        std::vector<int> * surfaceIds, int streamSetId, bool isShared, bool isMultiResolution) {
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        std::vector<int> * surfaceIds,
+        int streamSetId, bool isShared, bool isMultiResolution) {
     if (hasDeferredConsumer) {
         ALOGE("%s: Deferred consumers not supported in case of composite streams!",
                 __FUNCTION__);
@@ -72,8 +74,8 @@
         return BAD_VALUE;
     }
 
-    return createInternalStreams(consumers, hasDeferredConsumer, width, height, format, rotation, id,
-            physicalCameraId, surfaceIds, streamSetId, isShared);
+    return createInternalStreams(consumers, hasDeferredConsumer, width, height, format, rotation,
+            id, physicalCameraId, sensorPixelModesUsed, surfaceIds, streamSetId, isShared);
 }
 
 status_t CompositeStream::deleteStream() {
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index 1bf137a..600bd28 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -44,7 +44,9 @@
     status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-            std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution);
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds,
+            int streamSetId, bool isShared, bool isMultiResolution);
 
     status_t deleteStream();
 
@@ -55,7 +57,9 @@
     virtual status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-            std::vector<int> *surfaceIds, int streamSetId, bool isShared) = 0;
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds,
+            int streamSetId, bool isShared) = 0;
 
     // Release all internal streams and corresponding resources.
     virtual status_t deleteInternalStreams() = 0;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 2c553f3..19b54e0 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -20,6 +20,7 @@
 
 #include "api1/client2/JpegProcessor.h"
 #include "common/CameraProviderManager.h"
+#include "utils/SessionConfigurationUtils.h"
 #include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
@@ -78,7 +79,10 @@
             }
         }
 
-        getSupportedDepthSizes(staticInfo, &mSupportedDepthSizes);
+        getSupportedDepthSizes(staticInfo, /*maxResolution*/false, &mSupportedDepthSizes);
+        if (SessionConfigurationUtils::isUltraHighResolutionSensor(staticInfo)) {
+            getSupportedDepthSizes(staticInfo, true, &mSupportedDepthSizesMaximumResolution);
+        }
     }
 }
 
@@ -484,17 +488,82 @@
     return false;
 }
 
+static bool setContains(std::unordered_set<int32_t> containerSet, int32_t value) {
+    return containerSet.find(value) != containerSet.end();
+}
+
+status_t DepthCompositeStream::checkAndGetMatchingDepthSize(size_t width, size_t height,
+        const std::vector<std::tuple<size_t, size_t>> &depthSizes,
+        const std::vector<std::tuple<size_t, size_t>> &depthSizesMaximumResolution,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        size_t *depthWidth, size_t *depthHeight) {
+    if (depthWidth == nullptr || depthHeight == nullptr) {
+        return BAD_VALUE;
+    }
+    size_t chosenDepthWidth = 0, chosenDepthHeight = 0;
+    bool hasDefaultSensorPixelMode =
+            setContains(sensorPixelModesUsed, ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+
+    bool hasMaximumResolutionSensorPixelMode =
+        setContains(sensorPixelModesUsed, ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+
+    if (!hasDefaultSensorPixelMode && !hasMaximumResolutionSensorPixelMode) {
+        ALOGE("%s: sensor pixel modes don't contain either maximum resolution or default modes",
+                __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    if (hasDefaultSensorPixelMode) {
+        auto ret = getMatchingDepthSize(width, height, depthSizes, &chosenDepthWidth,
+                &chosenDepthHeight);
+        if (ret != OK) {
+            ALOGE("%s: No matching depth stream size found", __FUNCTION__);
+            return ret;
+        }
+    }
+
+    if (hasMaximumResolutionSensorPixelMode) {
+        size_t depthWidth = 0, depthHeight = 0;
+        auto ret = getMatchingDepthSize(width, height,
+                depthSizesMaximumResolution, &depthWidth, &depthHeight);
+        if (ret != OK) {
+            ALOGE("%s: No matching max resolution depth stream size found", __FUNCTION__);
+            return ret;
+        }
+        // Both matching depth sizes should be the same.
+        if (chosenDepthWidth != 0 && chosenDepthWidth != depthWidth &&
+                chosenDepthHeight != depthHeight) {
+            ALOGE("%s: Maximum resolution sensor pixel mode and default sensor pixel mode don't"
+                    " have matching depth sizes", __FUNCTION__);
+            return BAD_VALUE;
+        }
+        if (chosenDepthWidth == 0) {
+            chosenDepthWidth = depthWidth;
+            chosenDepthHeight = depthHeight;
+        }
+    }
+    *depthWidth = chosenDepthWidth;
+    *depthHeight = chosenDepthHeight;
+    return OK;
+}
+
+
 status_t DepthCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
         bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
         camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-        std::vector<int> *surfaceIds, int /*streamSetId*/, bool /*isShared*/) {
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        std::vector<int> *surfaceIds,
+        int /*streamSetId*/, bool /*isShared*/) {
     if (mSupportedDepthSizes.empty()) {
         ALOGE("%s: This camera device doesn't support any depth map streams!", __FUNCTION__);
         return INVALID_OPERATION;
     }
 
     size_t depthWidth, depthHeight;
-    auto ret = getMatchingDepthSize(width, height, mSupportedDepthSizes, &depthWidth, &depthHeight);
+    auto ret =
+            checkAndGetMatchingDepthSize(width, height, mSupportedDepthSizes,
+                    mSupportedDepthSizesMaximumResolution, sensorPixelModesUsed, &depthWidth,
+                    &depthHeight);
     if (ret != OK) {
         ALOGE("%s: Failed to find an appropriate depth stream size!", __FUNCTION__);
         return ret;
@@ -515,7 +584,7 @@
     mBlobSurface = new Surface(producer);
 
     ret = device->createStream(mBlobSurface, width, height, format, kJpegDataSpace, rotation,
-            id, physicalCameraId, surfaceIds);
+            id, physicalCameraId, sensorPixelModesUsed, surfaceIds);
     if (ret == OK) {
         mBlobStreamId = *id;
         mBlobSurfaceId = (*surfaceIds)[0];
@@ -531,7 +600,8 @@
     mDepthSurface = new Surface(producer);
     std::vector<int> depthSurfaceId;
     ret = device->createStream(mDepthSurface, depthWidth, depthHeight, kDepthMapPixelFormat,
-            kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, &depthSurfaceId);
+            kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, sensorPixelModesUsed,
+            &depthSurfaceId);
     if (ret == OK) {
         mDepthSurfaceId = depthSurfaceId[0];
     } else {
@@ -749,13 +819,15 @@
     return ((*depthWidth > 0) && (*depthHeight > 0)) ? OK : BAD_VALUE;
 }
 
-void DepthCompositeStream::getSupportedDepthSizes(const CameraMetadata& ch,
+void DepthCompositeStream::getSupportedDepthSizes(const CameraMetadata& ch, bool maxResolution,
         std::vector<std::tuple<size_t, size_t>>* depthSizes /*out*/) {
     if (depthSizes == nullptr) {
         return;
     }
 
-    auto entry = ch.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS);
+    auto entry = ch.find(
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxResolution));
     if (entry.count > 0) {
         // Depth stream dimensions have four int32_t components
         // (pixelformat, width, height, type)
@@ -779,30 +851,43 @@
     }
 
     std::vector<std::tuple<size_t, size_t>> depthSizes;
-    getSupportedDepthSizes(ch, &depthSizes);
+    std::vector<std::tuple<size_t, size_t>> depthSizesMaximumResolution;
+    getSupportedDepthSizes(ch, /*maxResolution*/false, &depthSizes);
     if (depthSizes.empty()) {
         ALOGE("%s: No depth stream configurations present", __FUNCTION__);
         return BAD_VALUE;
     }
 
-    size_t depthWidth, depthHeight;
-    auto ret = getMatchingDepthSize(streamInfo.width, streamInfo.height, depthSizes, &depthWidth,
-            &depthHeight);
+    if (SessionConfigurationUtils::isUltraHighResolutionSensor(ch)) {
+        getSupportedDepthSizes(ch, /*maxResolution*/true, &depthSizesMaximumResolution);
+        if (depthSizesMaximumResolution.empty()) {
+            ALOGE("%s: No depth stream configurations for maximum resolution present",
+                    __FUNCTION__);
+            return BAD_VALUE;
+        }
+    }
+
+    size_t chosenDepthWidth = 0, chosenDepthHeight = 0;
+    auto ret = checkAndGetMatchingDepthSize(streamInfo.width, streamInfo.height, depthSizes,
+            depthSizesMaximumResolution, streamInfo.sensorPixelModesUsed, &chosenDepthWidth,
+            &chosenDepthHeight);
+
     if (ret != OK) {
-        ALOGE("%s: No matching depth stream size found", __FUNCTION__);
+        ALOGE("%s: Couldn't get matching depth sizes", __FUNCTION__);
         return ret;
     }
 
     compositeOutput->clear();
     compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
 
+    // Sensor pixel modes should stay the same here. They're already overridden.
     // Jpeg/Blob stream info
     (*compositeOutput)[0].dataSpace = kJpegDataSpace;
     (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
 
     // Depth stream info
-    (*compositeOutput)[1].width = depthWidth;
-    (*compositeOutput)[1].height = depthHeight;
+    (*compositeOutput)[1].width = chosenDepthWidth;
+    (*compositeOutput)[1].height = chosenDepthHeight;
     (*compositeOutput)[1].format = kDepthMapPixelFormat;
     (*compositeOutput)[1].dataSpace = kDepthMapDataSpace;
     (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index 05bc504..a520bbf 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -51,7 +51,9 @@
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-            std::vector<int> *surfaceIds, int streamSetId, bool isShared) override;
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds,
+            int streamSetId, bool isShared) override;
     status_t deleteInternalStreams() override;
     status_t configureStream() override;
     status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
@@ -86,11 +88,17 @@
     };
 
     // Helper methods
-    static void getSupportedDepthSizes(const CameraMetadata& ch,
+    static void getSupportedDepthSizes(const CameraMetadata& ch, bool maxResolution,
             std::vector<std::tuple<size_t, size_t>>* depthSizes /*out*/);
     static status_t getMatchingDepthSize(size_t width, size_t height,
             const std::vector<std::tuple<size_t, size_t>>& supporedDepthSizes,
             size_t *depthWidth /*out*/, size_t *depthHeight /*out*/);
+    static status_t checkAndGetMatchingDepthSize(size_t width, size_t height,
+        const std::vector<std::tuple<size_t, size_t>> &depthSizes,
+        const std::vector<std::tuple<size_t, size_t>> &depthSizesMaximumResolution,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        size_t *depthWidth /*out*/, size_t *depthHeight /*out*/);
+
 
     // Dynamic depth processing
     status_t encodeGrayscaleJpeg(size_t width, size_t height, uint8_t *in, void *out,
@@ -126,6 +134,7 @@
 
     ssize_t              mMaxJpegSize;
     std::vector<std::tuple<size_t, size_t>> mSupportedDepthSizes;
+    std::vector<std::tuple<size_t, size_t>> mSupportedDepthSizesMaximumResolution;
     std::vector<float>   mIntrinsicCalibration, mLensDistortion;
     bool                 mIsLogicalCamera;
 
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 7d68485..582001d 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -36,6 +36,7 @@
 
 #include "common/CameraDeviceBase.h"
 #include "utils/ExifUtils.h"
+#include "utils/SessionConfigurationUtils.h"
 #include "HeicEncoderInfoManager.h"
 #include "HeicCompositeStream.h"
 
@@ -115,7 +116,9 @@
 status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
         bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
         camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-        std::vector<int> *surfaceIds, int /*streamSetId*/, bool /*isShared*/) {
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        std::vector<int> *surfaceIds,
+        int /*streamSetId*/, bool /*isShared*/) {
 
     sp<CameraDeviceBase> device = mDevice.promote();
     if (!device.get()) {
@@ -141,7 +144,8 @@
     mStaticInfo = device->info();
 
     res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
-            kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId, surfaceIds);
+            kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
+            sensorPixelModesUsed,surfaceIds);
     if (res == OK) {
         mAppSegmentSurfaceId = (*surfaceIds)[0];
     } else {
@@ -177,7 +181,7 @@
     int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
     res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
-            rotation, id, physicalCameraId, &sourceSurfaceId);
+            rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId);
     if (res == OK) {
         mMainImageSurfaceId = sourceSurfaceId[0];
         mMainImageStreamId = *id;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index cbd9d21..1077a1f 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -46,7 +46,9 @@
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-            std::vector<int> *surfaceIds, int streamSetId, bool isShared) override;
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds,
+            int streamSetId, bool isShared) override;
 
     status_t deleteInternalStreams() override;
 
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 5acbb99..85b0cc2 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -164,6 +164,7 @@
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t>  &sensorPixelModesUsed,
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
             bool isShared = false, bool isMultiResolution = false,
@@ -180,6 +181,7 @@
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
             bool isShared = false, bool isMultiResolution = false,
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index dfe2409..62fc18f 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -686,9 +686,39 @@
     }
 }
 
-status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addDynamicDepthTags() {
-    uint32_t depthExclTag = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE;
-    uint32_t depthSizesTag = ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS;
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addDynamicDepthTags(
+        bool maxResolution) {
+    const int32_t depthExclTag = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE;
+
+    const int32_t scalerSizesTag =
+              camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                      ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t scalerMinFrameDurationsTag =
+            ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
+    const int32_t scalerStallDurationsTag =
+                 camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                        ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, maxResolution);
+
+    const int32_t depthSizesTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t depthStallDurationsTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS, maxResolution);
+    const int32_t depthMinFrameDurationsTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS, maxResolution);
+
+    const int32_t dynamicDepthSizesTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t dynamicDepthStallDurationsTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS, maxResolution);
+    const int32_t dynamicDepthMinFrameDurationsTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                 ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS, maxResolution);
+
     auto& c = mCameraCharacteristics;
     std::vector<std::tuple<size_t, size_t>> supportedBlobSizes, supportedDepthSizes,
             supportedDynamicDepthSizes, internalDepthSizes;
@@ -718,7 +748,7 @@
         return BAD_VALUE;
     }
 
-    getSupportedSizes(c, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, HAL_PIXEL_FORMAT_BLOB,
+    getSupportedSizes(c, scalerSizesTag, HAL_PIXEL_FORMAT_BLOB,
             &supportedBlobSizes);
     getSupportedSizes(c, depthSizesTag, HAL_PIXEL_FORMAT_Y16, &supportedDepthSizes);
     if (supportedBlobSizes.empty() || supportedDepthSizes.empty()) {
@@ -745,10 +775,10 @@
     std::vector<int64_t> blobMinDurations, blobStallDurations;
     std::vector<int64_t> dynamicDepthMinDurations, dynamicDepthStallDurations;
 
-    getSupportedDurations(c, ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
-            HAL_PIXEL_FORMAT_Y16, internalDepthSizes, &depthMinDurations);
-    getSupportedDurations(c, ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
-            HAL_PIXEL_FORMAT_BLOB, supportedDynamicDepthSizes, &blobMinDurations);
+    getSupportedDurations(c, depthMinFrameDurationsTag, HAL_PIXEL_FORMAT_Y16, internalDepthSizes,
+                          &depthMinDurations);
+    getSupportedDurations(c, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
+                          supportedDynamicDepthSizes, &blobMinDurations);
     if (blobMinDurations.empty() || depthMinDurations.empty() ||
             (depthMinDurations.size() != blobMinDurations.size())) {
         ALOGE("%s: Unexpected number of available depth min durations! %zu vs. %zu",
@@ -756,10 +786,10 @@
         return BAD_VALUE;
     }
 
-    getSupportedDurations(c, ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
-            HAL_PIXEL_FORMAT_Y16, internalDepthSizes, &depthStallDurations);
-    getSupportedDurations(c, ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
-            HAL_PIXEL_FORMAT_BLOB, supportedDynamicDepthSizes, &blobStallDurations);
+    getSupportedDurations(c, depthStallDurationsTag, HAL_PIXEL_FORMAT_Y16, internalDepthSizes,
+            &depthStallDurations);
+    getSupportedDurations(c, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
+            supportedDynamicDepthSizes, &blobStallDurations);
     if (blobStallDurations.empty() || depthStallDurations.empty() ||
             (depthStallDurations.size() != blobStallDurations.size())) {
         ALOGE("%s: Unexpected number of available depth stall durations! %zu vs. %zu",
@@ -804,15 +834,14 @@
     supportedChTags.reserve(chTags.count + 3);
     supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
             chTags.data.i32 + chTags.count);
-    supportedChTags.push_back(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
-    supportedChTags.push_back(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS);
-    supportedChTags.push_back(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS);
-    c.update(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS,
-            dynamicDepthEntries.data(), dynamicDepthEntries.size());
-    c.update(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS,
-            dynamicDepthMinDurationEntries.data(), dynamicDepthMinDurationEntries.size());
-    c.update(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS,
-            dynamicDepthStallDurationEntries.data(), dynamicDepthStallDurationEntries.size());
+    supportedChTags.push_back(dynamicDepthSizesTag);
+    supportedChTags.push_back(dynamicDepthMinFrameDurationsTag);
+    supportedChTags.push_back(dynamicDepthStallDurationsTag);
+    c.update(dynamicDepthSizesTag, dynamicDepthEntries.data(), dynamicDepthEntries.size());
+    c.update(dynamicDepthMinFrameDurationsTag, dynamicDepthMinDurationEntries.data(),
+            dynamicDepthMinDurationEntries.size());
+    c.update(dynamicDepthStallDurationsTag, dynamicDepthStallDurationEntries.data(),
+             dynamicDepthStallDurationEntries.size());
     c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
             supportedChTags.size());
 
@@ -1046,7 +1075,24 @@
     return OK;
 }
 
-status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveHeicTags() {
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveHeicTags(bool maxResolution) {
+    int32_t scalerStreamSizesTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+    int32_t scalerMinFrameDurationsTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, maxResolution);
+
+    int32_t heicStreamSizesTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
+    int32_t heicMinFrameDurationsTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS, maxResolution);
+    int32_t heicStallDurationsTag =
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS, maxResolution);
+
     auto& c = mCameraCharacteristics;
 
     camera_metadata_entry halHeicSupport = c.find(ANDROID_HEIC_INFO_SUPPORTED);
@@ -1075,10 +1121,8 @@
     std::vector<int64_t> heicDurations;
     std::vector<int64_t> heicStallDurations;
 
-    camera_metadata_entry halStreamConfigs =
-            c.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
-    camera_metadata_entry minFrameDurations =
-            c.find(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
+    camera_metadata_entry halStreamConfigs = c.find(scalerStreamSizesTag);
+    camera_metadata_entry minFrameDurations = c.find(scalerMinFrameDurationsTag);
 
     status_t res = fillHeicStreamCombinations(&heicOutputs, &heicDurations, &heicStallDurations,
             halStreamConfigs, minFrameDurations);
@@ -1088,12 +1132,9 @@
         return res;
     }
 
-    c.update(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS,
-           heicOutputs.data(), heicOutputs.size());
-    c.update(ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS,
-            heicDurations.data(), heicDurations.size());
-    c.update(ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS,
-            heicStallDurations.data(), heicStallDurations.size());
+    c.update(heicStreamSizesTag, heicOutputs.data(), heicOutputs.size());
+    c.update(heicMinFrameDurationsTag, heicDurations.data(), heicDurations.size());
+    c.update(heicStallDurationsTag, heicStallDurations.data(), heicStallDurations.size());
 
     return OK;
 }
@@ -2005,16 +2046,20 @@
                 size_t numStreams = halCameraIdsAndStreamCombinations.size();
                 halCameraIdsAndStreamCombinations_2_6.resize(numStreams);
                 for (size_t i = 0; i < numStreams; i++) {
+                    using namespace camera3;
                     auto const& combination = halCameraIdsAndStreamCombinations[i];
                     halCameraIdsAndStreamCombinations_2_6[i].cameraId = combination.cameraId;
                     bool success =
                             SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
-                            halCameraIdsAndStreamCombinations_2_6[i].streamConfiguration,
-                            combination.streamConfiguration);
+                                    halCameraIdsAndStreamCombinations_2_6[i].streamConfiguration,
+                                    combination.streamConfiguration);
                     if (!success) {
                         *isSupported = false;
                         return OK;
                     }
+                    camera3::SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
+                            halCameraIdsAndStreamCombinations_2_6[i].streamConfiguration,
+                            combination.streamConfiguration);
                 }
                 ret = interface_2_6->isConcurrentStreamCombinationSupported(
                         halCameraIdsAndStreamCombinations_2_6, cb);
@@ -2220,6 +2265,21 @@
         ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
     }
+
+    if (camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
+        status_t status = addDynamicDepthTags(/*maxResolution*/true);
+        if (OK != status) {
+            ALOGE("%s: Failed appending dynamic depth tags for maximum resolution mode: %s (%d)",
+                    __FUNCTION__, strerror(-status), status);
+        }
+
+        status = deriveHeicTags(/*maxResolution*/true);
+        if (OK != status) {
+            ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities for"
+                    "maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
+        }
+    }
+
     res = addRotateCropTags();
     if (OK != res) {
         ALOGE("%s: Unable to add default SCALER_ROTATE_AND_CROP tags: %s (%d)", __FUNCTION__,
@@ -2426,26 +2486,22 @@
     status_t res;
     Status callStatus;
     ::android::hardware::Return<void> ret;
-    if (interface_3_7 != nullptr) {
-        ret = interface_3_7->isStreamCombinationSupported_3_7(configuration,
+    auto halCb =
             [&callStatus, &status] (Status s, bool combStatus) {
                 callStatus = s;
                 *status = combStatus;
-            });
+            };
+    if (interface_3_7 != nullptr) {
+        ret = interface_3_7->isStreamCombinationSupported_3_7(configuration, halCb);
     } else if (interface_3_5 != nullptr) {
         hardware::camera::device::V3_4::StreamConfiguration configuration_3_4;
-        bool success = SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
+        bool success = camera3::SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
                 configuration_3_4, configuration);
         if (!success) {
             *status = false;
             return OK;
         }
-
-        ret = interface_3_5->isStreamCombinationSupported(configuration_3_4,
-            [&callStatus, &status] (Status s, bool combStatus) {
-                callStatus = s;
-                *status = combStatus;
-            });
+        ret = interface_3_5->isStreamCombinationSupported(configuration_3_4, halCb);
     } else {
         return INVALID_OPERATION;
     }
@@ -2829,7 +2885,7 @@
         if (res != OK) {
             return res;
         }
-        metadataGetter getMetadata =
+        camera3::metadataGetter getMetadata =
                 [this](const String8 &id) {
                     CameraMetadata physicalDeviceInfo;
                     getCameraCharacteristicsLocked(id.string(), &physicalDeviceInfo);
@@ -2838,7 +2894,7 @@
         std::vector<std::string> physicalCameraIds;
         isLogicalCameraLocked(cameraIdAndSessionConfig.mCameraId, &physicalCameraIds);
         bStatus =
-            SessionConfigurationUtils::convertToHALStreamCombination(
+            camera3::SessionConfigurationUtils::convertToHALStreamCombination(
                     cameraIdAndSessionConfig.mSessionConfiguration,
                     String8(cameraIdAndSessionConfig.mCameraId.c_str()), deviceInfo, getMetadata,
                     physicalCameraIds, streamConfiguration, &shouldExit);
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index fa9cc1c..12bda9b 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -556,8 +556,8 @@
             void queryPhysicalCameraIds();
             SystemCameraKind getSystemCameraKind();
             status_t fixupMonochromeTags();
-            status_t addDynamicDepthTags();
-            status_t deriveHeicTags();
+            status_t addDynamicDepthTags(bool maxResolution = false);
+            status_t deriveHeicTags(bool maxResolution = false);
             status_t addRotateCropTags();
             status_t addPreCorrectionActiveArraySize();
 
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 35a06d8..bf7e597 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -60,6 +60,7 @@
 #include "device3/Camera3SharedOutputStream.h"
 #include "CameraService.h"
 #include "utils/CameraThreadState.h"
+#include "utils/SessionConfigurationUtils.h"
 #include "utils/TraceHFR.h"
 #include "utils/CameraServiceProxyWrapper.h"
 
@@ -69,6 +70,7 @@
 using namespace android::camera3;
 using namespace android::hardware::camera;
 using namespace android::hardware::camera::device::V3_2;
+using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
 
 namespace android {
 
@@ -489,8 +491,13 @@
     const int STREAM_WIDTH_OFFSET = 1;
     const int STREAM_HEIGHT_OFFSET = 2;
     const int STREAM_IS_INPUT_OFFSET = 3;
+    bool isHighResolutionSensor =
+            camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(mDeviceInfo);
+    int32_t scalerSizesTag = isHighResolutionSensor ?
+            ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
+                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
     camera_metadata_ro_entry_t availableStreamConfigs =
-            mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+            mDeviceInfo.find(scalerSizesTag);
     if (availableStreamConfigs.count == 0 ||
             availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
         return camera3::Size(0, 0);
@@ -628,6 +635,8 @@
     ssize_t jpegBufferSize = scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) +
             kMinJpegBufferSize;
     if (jpegBufferSize > maxJpegBufferSize) {
+        ALOGI("%s: jpeg buffer size calculated is > maxJpeg bufferSize(%zd), clamping",
+                  __FUNCTION__, maxJpegBufferSize);
         jpegBufferSize = maxJpegBufferSize;
     }
 
@@ -647,13 +656,17 @@
     return maxBytesForPointCloud;
 }
 
-ssize_t Camera3Device::getRawOpaqueBufferSize(int32_t width, int32_t height) const {
+ssize_t Camera3Device::getRawOpaqueBufferSize(int32_t width, int32_t height,
+        bool maxResolution) const {
     const int PER_CONFIGURATION_SIZE = 3;
     const int WIDTH_OFFSET = 0;
     const int HEIGHT_OFFSET = 1;
     const int SIZE_OFFSET = 2;
     camera_metadata_ro_entry rawOpaqueSizes =
-        mDeviceInfo.find(ANDROID_SENSOR_OPAQUE_RAW_SIZE);
+        mDeviceInfo.find(
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_SENSOR_OPAQUE_RAW_SIZE,
+                    maxResolution));
     size_t count = rawOpaqueSizes.count;
     if (count == 0 || (count % PER_CONFIGURATION_SIZE)) {
         ALOGE("%s: Camera %s: bad opaque RAW size static metadata length(%zu)!",
@@ -1325,8 +1338,9 @@
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
             const String8& physicalCameraId,
-            std::vector<int> *surfaceIds, int streamSetId, bool isShared,
-            bool isMultiResolution, uint64_t consumerUsage) {
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
+            uint64_t consumerUsage) {
     ATRACE_CALL();
 
     if (consumer == nullptr) {
@@ -1338,14 +1352,26 @@
     consumers.push_back(consumer);
 
     return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
-            format, dataSpace, rotation, id, physicalCameraId, surfaceIds, streamSetId,
-            isShared, isMultiResolution, consumerUsage);
+            format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
+            streamSetId, isShared, isMultiResolution, consumerUsage);
+}
+
+static bool isRawFormat(int format) {
+    switch (format) {
+        case HAL_PIXEL_FORMAT_RAW16:
+        case HAL_PIXEL_FORMAT_RAW12:
+        case HAL_PIXEL_FORMAT_RAW10:
+        case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+            return true;
+        default:
+            return false;
+    }
 }
 
 status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
         bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
-        const String8& physicalCameraId,
+        const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
         uint64_t consumerUsage) {
     ATRACE_CALL();
@@ -1399,6 +1425,12 @@
         return BAD_VALUE;
     }
 
+    if (isRawFormat(format) && sensorPixelModesUsed.size() > 1) {
+        // We can't use one stream with a raw format in both sensor pixel modes since its going to
+        // be found in only one sensor pixel mode.
+        ALOGE("%s: RAW opaque stream cannot be used with > 1 sensor pixel modes", __FUNCTION__);
+        return BAD_VALUE;
+    }
     if (format == HAL_PIXEL_FORMAT_BLOB) {
         ssize_t blobBufferSize;
         if (dataSpace == HAL_DATASPACE_DEPTH) {
@@ -1418,28 +1450,36 @@
         }
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, blobBufferSize, format, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, streamSetId, isMultiResolution);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
+                isMultiResolution);
     } else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
-        ssize_t rawOpaqueBufferSize = getRawOpaqueBufferSize(width, height);
+        bool maxResolution =
+                sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
+                        sensorPixelModesUsed.end();
+        ssize_t rawOpaqueBufferSize = getRawOpaqueBufferSize(width, height, maxResolution);
         if (rawOpaqueBufferSize <= 0) {
             SET_ERR_L("Invalid RAW opaque buffer size %zd", rawOpaqueBufferSize);
             return BAD_VALUE;
         }
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, streamSetId, isMultiResolution);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
+                isMultiResolution);
     } else if (isShared) {
         newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
                 width, height, format, consumerUsage, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, streamSetId, mUseHalBufManager);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
+                mUseHalBufManager);
     } else if (consumers.size() == 0 && hasDeferredConsumer) {
         newStream = new Camera3OutputStream(mNextStreamId,
                 width, height, format, consumerUsage, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, streamSetId, isMultiResolution);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
+                isMultiResolution);
     } else {
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, format, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, streamSetId, isMultiResolution);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
+                isMultiResolution);
     }
 
     size_t consumerCount = consumers.size();
@@ -2611,6 +2651,7 @@
         config.input_is_multi_resolution = mIsInputStreamMultiResolution;
     }
 
+    mGroupIdPhysicalCameraMap.clear();
     for (size_t i = 0; i < mOutputStreams.size(); i++) {
 
         // Don't configure bidi streams twice, nor add them twice to the list
@@ -2644,6 +2685,12 @@
                         __FUNCTION__, outputStream->data_space);
             }
         }
+
+        if (mOutputStreams[i]->isMultiResolution()) {
+            int32_t streamGroupId = mOutputStreams[i]->getHalStreamGroupId();
+            const String8& physicalCameraId = mOutputStreams[i]->getPhysicalCameraId();
+            mGroupIdPhysicalCameraMap[streamGroupId].insert(physicalCameraId);
+        }
     }
 
     config.streams = streams.editArray();
@@ -2714,7 +2761,8 @@
     // Request thread needs to know to avoid using repeat-last-settings protocol
     // across configure_streams() calls
     if (notifyRequestThread) {
-        mRequestThread->configurationComplete(mIsConstrainedHighSpeedConfiguration, sessionParams);
+        mRequestThread->configurationComplete(mIsConstrainedHighSpeedConfiguration,
+                sessionParams, mGroupIdPhysicalCameraMap);
     }
 
     char value[PROPERTY_VALUE_MAX];
@@ -2887,8 +2935,9 @@
 status_t Camera3Device::registerInFlight(uint32_t frameNumber,
         int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
         bool hasAppCallback, nsecs_t maxExpectedDuration,
-        std::set<String8>& physicalCameraIds, bool isStillCapture,
-        bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& cameraIdsWithZoom,
+        const std::set<std::set<String8>>& physicalCameraIds,
+        bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
+        const std::set<std::string>& cameraIdsWithZoom,
         const SurfaceMap& outputSurfaces, nsecs_t requestTimeNs) {
     ATRACE_CALL();
     std::lock_guard<std::mutex> l(mInFlightLock);
@@ -3212,7 +3261,7 @@
         dst3_2.usage = mapToConsumerUsage(cam3stream->getUsage());
         dst3_2.rotation = mapToStreamRotation((camera_stream_rotation_t) src->rotation);
         // For HidlSession version 3.5 or newer, the format and dataSpace sent
-        // to HAL are original, not the overriden ones.
+        // to HAL are original, not the overridden ones.
         if (mHidlSession_3_5 != nullptr) {
             dst3_2.format = mapToPixelFormat(cam3stream->isFormatOverridden() ?
                     cam3stream->getOriginalFormat() : src->format);
@@ -3229,7 +3278,12 @@
         }
         dst3_7.v3_4 = dst3_4;
         dst3_7.groupId = cam3stream->getHalStreamGroupId();
-
+        dst3_7.sensorPixelModesUsed.resize(src->sensor_pixel_modes_used.size());
+        size_t j = 0;
+        for (int mode : src->sensor_pixel_modes_used) {
+            dst3_7.sensorPixelModesUsed[j++] =
+                    static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
+        }
         activeStreams.insert(streamId);
         // Create Buffer ID map if necessary
         mBufferRecords.tryCreateBufferCache(streamId);
@@ -3246,13 +3300,15 @@
     }
     requestedConfiguration3_2.operationMode = operationMode;
     requestedConfiguration3_4.operationMode = operationMode;
+    requestedConfiguration3_7.operationMode = operationMode;
+    size_t sessionParamSize = get_camera_metadata_size(sessionParams);
     requestedConfiguration3_4.sessionParams.setToExternal(
             reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
-            get_camera_metadata_size(sessionParams));
+            sessionParamSize);
     requestedConfiguration3_7.operationMode = operationMode;
     requestedConfiguration3_7.sessionParams.setToExternal(
             reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
-            get_camera_metadata_size(sessionParams));
+            sessionParamSize);
 
     // Invoke configureStreams
     device::V3_3::HalStreamConfiguration finalConfiguration;
@@ -3962,11 +4018,13 @@
 }
 
 void Camera3Device::RequestThread::configurationComplete(bool isConstrainedHighSpeed,
-        const CameraMetadata& sessionParams) {
+        const CameraMetadata& sessionParams,
+        const std::map<int32_t, std::set<String8>>& groupIdPhysicalCameraMap) {
     ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
     mReconfigured = true;
     mLatestSessionParams = sessionParams;
+    mGroupIdPhysicalCameraMap = groupIdPhysicalCameraMap;
     // Prepare video stream for high speed recording.
     mPrepareVideoStream = isConstrainedHighSpeed;
     mConstrainedMode = isConstrainedHighSpeed;
@@ -4725,7 +4783,7 @@
         outputBuffers->insertAt(camera_stream_buffer_t(), 0,
                 captureRequest->mOutputStreams.size());
         halRequest->output_buffers = outputBuffers->array();
-        std::set<String8> requestedPhysicalCameras;
+        std::set<std::set<String8>> requestedPhysicalCameras;
 
         sp<Camera3Device> parent = mParent.promote();
         if (parent == NULL) {
@@ -4820,8 +4878,11 @@
             }
 
             String8 physicalCameraId = outputStream->getPhysicalCameraId();
-            if (!physicalCameraId.isEmpty()) {
-                requestedPhysicalCameras.insert(physicalCameraId);
+            int32_t streamGroupId = outputStream->getHalStreamGroupId();
+            if (streamGroupId != -1 && mGroupIdPhysicalCameraMap.count(streamGroupId) == 1) {
+                requestedPhysicalCameras.insert(mGroupIdPhysicalCameraMap[streamGroupId]);
+            } else if (!physicalCameraId.isEmpty()) {
+                requestedPhysicalCameras.insert(std::set<String8>({physicalCameraId}));
             }
             halRequest->num_output_buffers++;
         }
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 018dbe5..d9e89fd 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -132,14 +132,17 @@
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
             bool isShared = false, bool isMultiResolution = false,
             uint64_t consumerUsage = 0) override;
+
     status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
             bool isShared = false, bool isMultiResolution = false,
@@ -190,7 +193,7 @@
 
     ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const override;
     ssize_t getPointCloudBufferSize() const;
-    ssize_t getRawOpaqueBufferSize(int32_t width, int32_t height) const;
+    ssize_t getRawOpaqueBufferSize(int32_t width, int32_t height, bool maxResolution) const;
 
     // Methods called by subclasses
     void             notifyStatus(bool idle); // updates from StatusTracker
@@ -497,6 +500,8 @@
     sp<camera3::Camera3Stream> mInputStream;
     bool                       mIsInputStreamMultiResolution;
     SessionStatsBuilder        mSessionStatsBuilder;
+    // Map from stream group ID to physical cameras backing the stream group
+    std::map<int32_t, std::set<String8>> mGroupIdPhysicalCameraMap;
 
     int                        mNextStreamId;
     bool                       mNeedConfig;
@@ -800,7 +805,8 @@
          * Call after stream (re)-configuration is completed.
          */
         void     configurationComplete(bool isConstrainedHighSpeed,
-                const CameraMetadata& sessionParams);
+                const CameraMetadata& sessionParams,
+                const std::map<int32_t, std::set<String8>>& groupIdPhysicalCameraMap);
 
         /**
          * Set or clear the list of repeating requests. Does not block
@@ -1057,6 +1063,8 @@
         Vector<int32_t>    mSessionParamKeys;
         CameraMetadata     mLatestSessionParams;
 
+        std::map<int32_t, std::set<String8>> mGroupIdPhysicalCameraMap;
+
         const bool         mUseHalBufManager;
     };
     sp<RequestThread> mRequestThread;
@@ -1076,7 +1084,8 @@
 
     status_t registerInFlight(uint32_t frameNumber,
             int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
-            bool callback, nsecs_t maxExpectedDuration, std::set<String8>& physicalCameraIds,
+            bool callback, nsecs_t maxExpectedDuration,
+            const std::set<std::set<String8>>& physicalCameraIds,
             bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
             const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces,
             nsecs_t requestTimeNs);
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
index 2196c7d..8cc6833 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
@@ -31,7 +31,7 @@
 Camera3FakeStream::Camera3FakeStream(int id) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, FAKE_WIDTH, FAKE_HEIGHT,
                 /*maxSize*/0, FAKE_FORMAT, FAKE_DATASPACE, FAKE_ROTATION,
-                FAKE_ID) {
+                FAKE_ID, std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT}) {
 
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index a837900..0204d49 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -32,10 +32,12 @@
 Camera3IOStreamBase::Camera3IOStreamBase(int id, camera_stream_type_t type,
         uint32_t width, uint32_t height, size_t maxSize, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
-        const String8& physicalCameraId, int setId, bool isMultiResolution) :
+        const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        int setId, bool isMultiResolution) :
         Camera3Stream(id, type,
                 width, height, maxSize, format, dataSpace, rotation,
-                physicalCameraId, setId, isMultiResolution),
+                physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
         mTotalBufferCount(0),
         mHandoutTotalBufferCount(0),
         mHandoutOutputBufferCount(0),
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 2e744ee..90c8a7b 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -36,6 +36,7 @@
             uint32_t width, uint32_t height, size_t maxSize, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
 
   public:
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index b00a963..6d8317b 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -33,7 +33,8 @@
         uint32_t width, uint32_t height, int format) :
         Camera3IOStreamBase(id, CAMERA_STREAM_INPUT, width, height, /*maxSize*/0,
                             format, HAL_DATASPACE_UNKNOWN, CAMERA_STREAM_ROTATION_0,
-                            FAKE_ID) {
+                            FAKE_ID,
+                            std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT}) {
 
     if (format == HAL_PIXEL_FORMAT_BLOB) {
         ALOGE("%s: Bad format, BLOB not supported", __FUNCTION__);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 3ec3b6b..221bebb 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -44,10 +44,11 @@
         uint32_t width, uint32_t height, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
         nsecs_t timestampOffset, const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
         int setId, bool isMultiResolution) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation,
-                            physicalCameraId, setId, isMultiResolution),
+                            physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
         mConsumer(consumer),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -70,11 +71,12 @@
         sp<Surface> consumer,
         uint32_t width, uint32_t height, size_t maxSize, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
-        nsecs_t timestampOffset, const String8& physicalCameraId, int setId,
-        bool isMultiResolution) :
+        nsecs_t timestampOffset, const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        int setId, bool isMultiResolution) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
-                            format, dataSpace, rotation, physicalCameraId, setId,
-                            isMultiResolution),
+                            format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
+                            setId, isMultiResolution),
         mConsumer(consumer),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -104,10 +106,12 @@
         uint32_t width, uint32_t height, int format,
         uint64_t consumerUsage, android_dataspace dataSpace,
         camera_stream_rotation_t rotation, nsecs_t timestampOffset,
-        const String8& physicalCameraId, int setId, bool isMultiResolution) :
+        const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        int setId, bool isMultiResolution) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation,
-                            physicalCameraId, setId, isMultiResolution),
+                            physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
         mConsumer(nullptr),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -142,12 +146,13 @@
                                          android_dataspace dataSpace,
                                          camera_stream_rotation_t rotation,
                                          const String8& physicalCameraId,
+                                        const std::unordered_set<int32_t> &sensorPixelModesUsed,
                                          uint64_t consumerUsage, nsecs_t timestampOffset,
                                          int setId, bool isMultiResolution) :
         Camera3IOStreamBase(id, type, width, height,
                             /*maxSize*/0,
                             format, dataSpace, rotation,
-                            physicalCameraId, setId, isMultiResolution),
+                            physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
         mTransform(0),
         mTraceFirstBuffer(true),
         mUseMonoTimestamp(false),
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index c82f2a6..00e4854 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -87,8 +87,8 @@
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
             nsecs_t timestampOffset, const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
-
     /**
      * Set up a stream for formats that have a variable buffer size for the same
      * dimensions, such as compressed JPEG.
@@ -99,8 +99,8 @@
             uint32_t width, uint32_t height, size_t maxSize, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
             nsecs_t timestampOffset, const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
-
     /**
      * Set up a stream with deferred consumer for formats that have 2 dimensions, such as
      * RAW and YUV. The consumer must be set before using this stream for output. A valid
@@ -110,6 +110,7 @@
             uint64_t consumerUsage, android_dataspace dataSpace,
             camera_stream_rotation_t rotation, nsecs_t timestampOffset,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
 
     virtual ~Camera3OutputStream();
@@ -234,6 +235,7 @@
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             uint64_t consumerUsage = 0, nsecs_t timestampOffset = 0,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
 
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 384c2c6..9f225d0 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -484,6 +484,20 @@
     states.inflightIntf.checkInflightMapLengthLocked();
 }
 
+// Erase the subset of physicalCameraIds that contains id
+bool erasePhysicalCameraIdSet(
+        std::set<std::set<String8>>& physicalCameraIds, const String8& id) {
+    bool found = false;
+    for (auto iter = physicalCameraIds.begin(); iter != physicalCameraIds.end(); iter++) {
+        if (iter->count(id) == 1) {
+            physicalCameraIds.erase(iter);
+            found = true;
+            break;
+        }
+    }
+    return found;
+}
+
 void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result) {
     ATRACE_CALL();
 
@@ -583,12 +597,10 @@
             }
             for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
                 String8 physicalId(result->physcam_ids[i]);
-                std::set<String8>::iterator cameraIdIter =
-                        request.physicalCameraIds.find(physicalId);
-                if (cameraIdIter != request.physicalCameraIds.end()) {
-                    request.physicalCameraIds.erase(cameraIdIter);
-                } else {
-                    SET_ERR("Total result for frame %d has already returned for camera %s",
+                bool validPhysicalCameraMetadata =
+                        erasePhysicalCameraIdSet(request.physicalCameraIds, physicalId);
+                if (!validPhysicalCameraMetadata) {
+                    SET_ERR("Unexpected total result for frame %d camera %s",
                             frameNumber, physicalId.c_str());
                     return;
                 }
@@ -1083,14 +1095,14 @@
                             errorCode) {
                         if (physicalCameraId.size() > 0) {
                             String8 cameraId(physicalCameraId);
-                            auto iter = r.physicalCameraIds.find(cameraId);
-                            if (iter == r.physicalCameraIds.end()) {
+                            bool validPhysicalCameraId =
+                                    erasePhysicalCameraIdSet(r.physicalCameraIds, cameraId);
+                            if (!validPhysicalCameraId) {
                                 ALOGE("%s: Reported result failure for physical camera device: %s "
                                         " which is not part of the respective request!",
                                         __FUNCTION__, cameraId.string());
                                 break;
                             }
-                            r.physicalCameraIds.erase(iter);
                             resultExtras.errorPhysicalCameraId = physicalCameraId;
                             physicalDeviceResultError = true;
                         }
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 8aa5f1a..15cf7f4 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -32,9 +32,10 @@
         uint64_t consumerUsage, android_dataspace dataSpace,
         camera_stream_rotation_t rotation,
         nsecs_t timestampOffset, const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
         int setId, bool useHalBufManager) :
         Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
-                            format, dataSpace, rotation, physicalCameraId,
+                            format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
                             consumerUsage, timestampOffset, setId),
         mUseHalBufManager(useHalBufManager) {
     size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index a61316c..4b6341b 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -38,6 +38,7 @@
             uint64_t consumerUsage, android_dataspace dataSpace,
             camera_stream_rotation_t rotation, nsecs_t timestampOffset,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID,
             bool useHalBufManager = false);
 
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index c6e7002..02b6585 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -49,7 +49,9 @@
         camera_stream_type type,
         uint32_t width, uint32_t height, size_t maxSize, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
-        const String8& physicalCameraId, int setId, bool isMultiResolution) :
+        const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        int setId, bool isMultiResolution) :
     camera_stream(),
     mId(id),
     mSetId(setId),
@@ -84,6 +86,7 @@
     camera_stream::rotation = rotation;
     camera_stream::max_buffers = 0;
     camera_stream::physical_camera_id = mPhysicalCameraId.string();
+    camera_stream::sensor_pixel_modes_used = sensorPixelModesUsed;
 
     if ((format == HAL_PIXEL_FORMAT_BLOB || format == HAL_PIXEL_FORMAT_RAW_OPAQUE) &&
             maxSize == 0) {
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 45d8478..5a364ab 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -498,7 +498,9 @@
     Camera3Stream(int id, camera_stream_type type,
             uint32_t width, uint32_t height, size_t maxSize, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
-            const String8& physicalCameraId, int setId, bool isMultiResolution);
+            const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            int setId, bool isMultiResolution);
 
     wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
 
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index a567cb4..2d3397c 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -62,6 +62,8 @@
     android_dataspace_t data_space;
     camera_stream_rotation_t rotation;
     const char* physical_camera_id;
+
+    std::unordered_set<int32_t> sensor_pixel_modes_used;
 } camera_stream_t;
 
 typedef struct camera_stream_buffer {
@@ -104,13 +106,15 @@
         uint64_t consumerUsage;
         bool finalized = false;
         bool supportsOffline = false;
+        std::unordered_set<int32_t> sensorPixelModesUsed;
         OutputStreamInfo() :
             width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
             consumerUsage(0) {}
         OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
-                uint64_t _consumerUsage) :
+                uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed) :
             width(_width), height(_height), format(_format),
-            dataSpace(_dataSpace), consumerUsage(_consumerUsage) {}
+            dataSpace(_dataSpace), consumerUsage(_consumerUsage),
+            sensorPixelModesUsed(_sensorPixelModesUsed) {}
 };
 
 /**
@@ -135,6 +139,16 @@
     virtual int      getStreamSetId() const = 0;
 
     /**
+     * Is this stream part of a multi-resolution stream set
+     */
+    virtual bool     isMultiResolution() const = 0;
+
+    /**
+     * Get the HAL stream group id for a multi-resolution stream set
+     */
+    virtual int      getHalStreamGroupId() const = 0;
+
+    /**
      * Get the stream's dimensions and format
      */
     virtual uint32_t getWidth() const = 0;
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.cpp b/services/camera/libcameraservice/device3/DistortionMapper.cpp
index 316303e..89dd115 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.cpp
+++ b/services/camera/libcameraservice/device3/DistortionMapper.cpp
@@ -22,13 +22,14 @@
 #include <cmath>
 
 #include "device3/DistortionMapper.h"
+#include "utils/SessionConfigurationUtils.h"
 
 namespace android {
 
 namespace camera3 {
 
 
-DistortionMapper::DistortionMapper() : mValidMapping(false), mValidGrids(false) {
+DistortionMapper::DistortionMapper() {
     initRemappedKeys();
 }
 
@@ -61,41 +62,81 @@
 
 status_t DistortionMapper::setupStaticInfo(const CameraMetadata &deviceInfo) {
     std::lock_guard<std::mutex> lock(mMutex);
+    status_t res = setupStaticInfoLocked(deviceInfo, /*maxResolution*/false);
+    if (res != OK) {
+        return res;
+    }
+
+    bool mMaxResolution = SessionConfigurationUtils::isUltraHighResolutionSensor(deviceInfo);
+    if (mMaxResolution) {
+        res = setupStaticInfoLocked(deviceInfo, /*maxResolution*/true);
+    }
+    return res;
+}
+
+status_t DistortionMapper::setupStaticInfoLocked(const CameraMetadata &deviceInfo,
+        bool maxResolution) {
+    DistortionMapperInfo *mapperInfo = maxResolution ? &mDistortionMapperInfoMaximumResolution :
+            &mDistortionMapperInfo;
+
     camera_metadata_ro_entry_t array;
 
-    array = deviceInfo.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
+    array = deviceInfo.find(
+        SessionConfigurationUtils::getAppropriateModeTag(
+                ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, maxResolution));
     if (array.count != 4) return BAD_VALUE;
 
     float arrayX = static_cast<float>(array.data.i32[0]);
     float arrayY = static_cast<float>(array.data.i32[1]);
-    mArrayWidth = static_cast<float>(array.data.i32[2]);
-    mArrayHeight = static_cast<float>(array.data.i32[3]);
+    mapperInfo->mArrayWidth = static_cast<float>(array.data.i32[2]);
+    mapperInfo->mArrayHeight = static_cast<float>(array.data.i32[3]);
 
-    array = deviceInfo.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+    array = deviceInfo.find(
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, maxResolution));
     if (array.count != 4) return BAD_VALUE;
 
     float activeX = static_cast<float>(array.data.i32[0]);
     float activeY = static_cast<float>(array.data.i32[1]);
-    mActiveWidth = static_cast<float>(array.data.i32[2]);
-    mActiveHeight = static_cast<float>(array.data.i32[3]);
+    mapperInfo->mActiveWidth = static_cast<float>(array.data.i32[2]);
+    mapperInfo->mActiveHeight = static_cast<float>(array.data.i32[3]);
 
-    mArrayDiffX = activeX - arrayX;
-    mArrayDiffY = activeY - arrayY;
+    mapperInfo->mArrayDiffX = activeX - arrayX;
+    mapperInfo->mArrayDiffY = activeY - arrayY;
 
-    return updateCalibration(deviceInfo);
+    return updateCalibration(deviceInfo, /*isStatic*/ true, maxResolution);
+}
+
+static bool doesSettingsHaveMaxResolution(const CameraMetadata *settings) {
+    if (settings == nullptr) {
+        return false;
+    }
+    // First we get the sensorPixelMode from the settings metadata.
+    camera_metadata_ro_entry sensorPixelModeEntry = settings->find(ANDROID_SENSOR_PIXEL_MODE);
+    if (sensorPixelModeEntry.count != 0) {
+        return (sensorPixelModeEntry.data.u8[0] == ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+    }
+    return false;
 }
 
 bool DistortionMapper::calibrationValid() const {
     std::lock_guard<std::mutex> lock(mMutex);
-
-    return mValidMapping;
+    bool isValid =  mDistortionMapperInfo.mValidMapping;
+    if (mMaxResolution) {
+        isValid = isValid && mDistortionMapperInfoMaximumResolution.mValidMapping;
+    }
+    return isValid;
 }
 
 status_t DistortionMapper::correctCaptureRequest(CameraMetadata *request) {
     std::lock_guard<std::mutex> lock(mMutex);
     status_t res;
 
-    if (!mValidMapping) return OK;
+    bool maxResolution = doesSettingsHaveMaxResolution(request);
+    DistortionMapperInfo *mapperInfo = maxResolution ? &mDistortionMapperInfoMaximumResolution :
+            &mDistortionMapperInfo;
+
+    if (!mapperInfo->mValidMapping) return OK;
 
     camera_metadata_entry_t e;
     e = request->find(ANDROID_DISTORTION_CORRECTION_MODE);
@@ -107,27 +148,30 @@
                 if (weight == 0) {
                     continue;
                 }
-                res = mapCorrectedToRaw(e.data.i32 + j, 2, /*clamp*/true);
+                res = mapCorrectedToRaw(e.data.i32 + j, 2, mapperInfo, /*clamp*/true);
                 if (res != OK) return res;
             }
         }
         for (auto rect : kRectsToCorrect) {
             e = request->find(rect);
-            res = mapCorrectedRectToRaw(e.data.i32, e.count / 4, /*clamp*/true);
+            res = mapCorrectedRectToRaw(e.data.i32, e.count / 4, mapperInfo, /*clamp*/true);
             if (res != OK) return res;
         }
     }
-
     return OK;
 }
 
 status_t DistortionMapper::correctCaptureResult(CameraMetadata *result) {
     std::lock_guard<std::mutex> lock(mMutex);
+
+    bool maxResolution = doesSettingsHaveMaxResolution(result);
+    DistortionMapperInfo *mapperInfo = maxResolution ? &mDistortionMapperInfoMaximumResolution :
+            &mDistortionMapperInfo;
     status_t res;
 
-    if (!mValidMapping) return OK;
+    if (!mapperInfo->mValidMapping) return OK;
 
-    res = updateCalibration(*result);
+    res = updateCalibration(*result, /*isStatic*/ false, maxResolution);
     if (res != OK) {
         ALOGE("Failure to update lens calibration information");
         return INVALID_OPERATION;
@@ -143,18 +187,18 @@
                 if (weight == 0) {
                     continue;
                 }
-                res = mapRawToCorrected(e.data.i32 + j, 2, /*clamp*/true);
+                res = mapRawToCorrected(e.data.i32 + j, 2, mapperInfo, /*clamp*/true);
                 if (res != OK) return res;
             }
         }
         for (auto rect : kRectsToCorrect) {
             e = result->find(rect);
-            res = mapRawRectToCorrected(e.data.i32, e.count / 4, /*clamp*/true);
+            res = mapRawRectToCorrected(e.data.i32, e.count / 4, mapperInfo, /*clamp*/true);
             if (res != OK) return res;
         }
         for (auto pts : kResultPointsToCorrectNoClamp) {
             e = result->find(pts);
-            res = mapRawToCorrected(e.data.i32, e.count / 2, /*clamp*/false);
+            res = mapRawToCorrected(e.data.i32, e.count / 2, mapperInfo, /*clamp*/false);
             if (res != OK) return res;
         }
     }
@@ -164,25 +208,37 @@
 
 // Utility methods; not guarded by mutex
 
-status_t DistortionMapper::updateCalibration(const CameraMetadata &result) {
+status_t DistortionMapper::updateCalibration(const CameraMetadata &result, bool isStatic,
+        bool maxResolution) {
     camera_metadata_ro_entry_t calib, distortion;
+    DistortionMapperInfo *mapperInfo =
+            maxResolution ? &mDistortionMapperInfoMaximumResolution : &mDistortionMapperInfo;
+    // We only need maximum resolution version of LENS_INTRINSIC_CALIBRATION and
+    // LENS_DISTORTION since CaptureResults would still use the same key
+    // regardless of sensor pixel mode.
+    int calibrationKey =
+        SessionConfigurationUtils::getAppropriateModeTag(ANDROID_LENS_INTRINSIC_CALIBRATION,
+                maxResolution && isStatic);
+    int distortionKey =
+        SessionConfigurationUtils::getAppropriateModeTag(ANDROID_LENS_DISTORTION,
+                maxResolution && isStatic);
 
-    calib = result.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
-    distortion = result.find(ANDROID_LENS_DISTORTION);
+    calib = result.find(calibrationKey);
+    distortion = result.find(distortionKey);
 
     if (calib.count != 5) return BAD_VALUE;
     if (distortion.count != 5) return BAD_VALUE;
 
     // Skip redoing work if no change to calibration fields
-    if (mValidMapping &&
-            mFx == calib.data.f[0] &&
-            mFy == calib.data.f[1] &&
-            mCx == calib.data.f[2] &&
-            mCy == calib.data.f[3] &&
-            mS == calib.data.f[4]) {
+    if (mapperInfo->mValidMapping &&
+            mapperInfo->mFx == calib.data.f[0] &&
+            mapperInfo->mFy == calib.data.f[1] &&
+            mapperInfo->mCx == calib.data.f[2] &&
+            mapperInfo->mCy == calib.data.f[3] &&
+            mapperInfo->mS == calib.data.f[4]) {
         bool noChange = true;
         for (size_t i = 0; i < distortion.count; i++) {
-            if (mK[i] != distortion.data.f[i]) {
+            if (mapperInfo->mK[i] != distortion.data.f[i]) {
                 noChange = false;
                 break;
             }
@@ -190,39 +246,39 @@
         if (noChange) return OK;
     }
 
-    mFx = calib.data.f[0];
-    mFy = calib.data.f[1];
-    mCx = calib.data.f[2];
-    mCy = calib.data.f[3];
-    mS = calib.data.f[4];
+    mapperInfo->mFx = calib.data.f[0];
+    mapperInfo->mFy = calib.data.f[1];
+    mapperInfo->mCx = calib.data.f[2];
+    mapperInfo->mCy = calib.data.f[3];
+    mapperInfo->mS = calib.data.f[4];
 
-    mInvFx = 1 / mFx;
-    mInvFy = 1 / mFy;
+    mapperInfo->mInvFx = 1 / mapperInfo->mFx;
+    mapperInfo->mInvFy = 1 / mapperInfo->mFy;
 
     for (size_t i = 0; i < distortion.count; i++) {
-        mK[i] = distortion.data.f[i];
+        mapperInfo->mK[i] = distortion.data.f[i];
     }
 
-    mValidMapping = true;
+    mapperInfo->mValidMapping = true;
     // Need to recalculate grid
-    mValidGrids = false;
+    mapperInfo->mValidGrids = false;
 
     return OK;
 }
 
 status_t DistortionMapper::mapRawToCorrected(int32_t *coordPairs, int coordCount,
-        bool clamp, bool simple) {
-    if (!mValidMapping) return INVALID_OPERATION;
+        DistortionMapperInfo *mapperInfo, bool clamp, bool simple) {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
 
-    if (simple) return mapRawToCorrectedSimple(coordPairs, coordCount, clamp);
+    if (simple) return mapRawToCorrectedSimple(coordPairs, coordCount, mapperInfo, clamp);
 
-    if (!mValidGrids) {
-        status_t res = buildGrids();
+    if (!mapperInfo->mValidGrids) {
+        status_t res = buildGrids(mapperInfo);
         if (res != OK) return res;
     }
 
     for (int i = 0; i < coordCount * 2; i += 2) {
-        const GridQuad *quad = findEnclosingQuad(coordPairs + i, mDistortedGrid);
+        const GridQuad *quad = findEnclosingQuad(coordPairs + i, mapperInfo->mDistortedGrid);
         if (quad == nullptr) {
             ALOGE("Raw to corrected mapping failure: No quad found for (%d, %d)",
                     *(coordPairs + i), *(coordPairs + i + 1));
@@ -258,8 +314,8 @@
 
         // Clamp to within active array
         if (clamp) {
-            corrX = std::min(mActiveWidth - 1, std::max(0.f, corrX));
-            corrY = std::min(mActiveHeight - 1, std::max(0.f, corrY));
+            corrX = std::min(mapperInfo->mActiveWidth - 1, std::max(0.f, corrX));
+            corrY = std::min(mapperInfo->mActiveHeight - 1, std::max(0.f, corrY));
         }
 
         coordPairs[i] = static_cast<int32_t>(std::round(corrX));
@@ -270,19 +326,19 @@
 }
 
 status_t DistortionMapper::mapRawToCorrectedSimple(int32_t *coordPairs, int coordCount,
-        bool clamp) const {
-    if (!mValidMapping) return INVALID_OPERATION;
+       const DistortionMapperInfo *mapperInfo, bool clamp) const {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
 
-    float scaleX = mActiveWidth / mArrayWidth;
-    float scaleY = mActiveHeight / mArrayHeight;
+    float scaleX = mapperInfo->mActiveWidth / mapperInfo->mArrayWidth;
+    float scaleY = mapperInfo->mActiveHeight / mapperInfo->mArrayHeight;
     for (int i = 0; i < coordCount * 2; i += 2) {
         float x = coordPairs[i];
         float y = coordPairs[i + 1];
         float corrX = x * scaleX;
         float corrY = y * scaleY;
         if (clamp) {
-            corrX = std::min(mActiveWidth - 1, std::max(0.f, corrX));
-            corrY = std::min(mActiveHeight - 1, std::max(0.f, corrY));
+            corrX = std::min(mapperInfo->mActiveWidth - 1, std::max(0.f, corrX));
+            corrY = std::min(mapperInfo->mActiveHeight - 1, std::max(0.f, corrY));
         }
         coordPairs[i] = static_cast<int32_t>(std::round(corrX));
         coordPairs[i + 1] = static_cast<int32_t>(std::round(corrY));
@@ -291,9 +347,9 @@
     return OK;
 }
 
-status_t DistortionMapper::mapRawRectToCorrected(int32_t *rects, int rectCount, bool clamp,
-        bool simple) {
-    if (!mValidMapping) return INVALID_OPERATION;
+status_t DistortionMapper::mapRawRectToCorrected(int32_t *rects, int rectCount,
+       DistortionMapperInfo *mapperInfo, bool clamp, bool simple) {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
     for (int i = 0; i < rectCount * 4; i += 4) {
         // Map from (l, t, width, height) to (l, t, r, b)
         int32_t coords[4] = {
@@ -303,7 +359,7 @@
             rects[i + 1] + rects[i + 3] - 1
         };
 
-        mapRawToCorrected(coords, 2, clamp, simple);
+        mapRawToCorrected(coords, 2, mapperInfo, clamp, simple);
 
         // Map back to (l, t, width, height)
         rects[i] = coords[0];
@@ -315,60 +371,60 @@
     return OK;
 }
 
-status_t DistortionMapper::mapCorrectedToRaw(int32_t *coordPairs, int coordCount, bool clamp,
-        bool simple) const {
-    return mapCorrectedToRawImpl(coordPairs, coordCount, clamp, simple);
+status_t DistortionMapper::mapCorrectedToRaw(int32_t *coordPairs, int coordCount,
+       const DistortionMapperInfo *mapperInfo, bool clamp, bool simple) const {
+    return mapCorrectedToRawImpl(coordPairs, coordCount, mapperInfo, clamp, simple);
 }
 
 template<typename T>
-status_t DistortionMapper::mapCorrectedToRawImpl(T *coordPairs, int coordCount, bool clamp,
-        bool simple) const {
-    if (!mValidMapping) return INVALID_OPERATION;
+status_t DistortionMapper::mapCorrectedToRawImpl(T *coordPairs, int coordCount,
+       const DistortionMapperInfo *mapperInfo, bool clamp, bool simple) const {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
 
-    if (simple) return mapCorrectedToRawImplSimple(coordPairs, coordCount, clamp);
+    if (simple) return mapCorrectedToRawImplSimple(coordPairs, coordCount, mapperInfo, clamp);
 
-    float activeCx = mCx - mArrayDiffX;
-    float activeCy = mCy - mArrayDiffY;
+    float activeCx = mapperInfo->mCx - mapperInfo->mArrayDiffX;
+    float activeCy = mapperInfo->mCy - mapperInfo->mArrayDiffY;
     for (int i = 0; i < coordCount * 2; i += 2) {
         // Move to normalized space from active array space
-        float ywi = (coordPairs[i + 1] - activeCy) * mInvFy;
-        float xwi = (coordPairs[i] - activeCx - mS * ywi) * mInvFx;
+        float ywi = (coordPairs[i + 1] - activeCy) * mapperInfo->mInvFy;
+        float xwi = (coordPairs[i] - activeCx - mapperInfo->mS * ywi) * mapperInfo->mInvFx;
         // Apply distortion model to calculate raw image coordinates
+        const std::array<float, 5> &kK = mapperInfo->mK;
         float rSq = xwi * xwi + ywi * ywi;
-        float Fr = 1.f + (mK[0] * rSq) + (mK[1] * rSq * rSq) + (mK[2] * rSq * rSq * rSq);
-        float xc = xwi * Fr + (mK[3] * 2 * xwi * ywi) + mK[4] * (rSq + 2 * xwi * xwi);
-        float yc = ywi * Fr + (mK[4] * 2 * xwi * ywi) + mK[3] * (rSq + 2 * ywi * ywi);
+        float Fr = 1.f + (kK[0] * rSq) + (kK[1] * rSq * rSq) + (kK[2] * rSq * rSq * rSq);
+        float xc = xwi * Fr + (kK[3] * 2 * xwi * ywi) + kK[4] * (rSq + 2 * xwi * xwi);
+        float yc = ywi * Fr + (kK[4] * 2 * xwi * ywi) + kK[3] * (rSq + 2 * ywi * ywi);
         // Move back to image space
-        float xr = mFx * xc + mS * yc + mCx;
-        float yr = mFy * yc + mCy;
+        float xr = mapperInfo->mFx * xc + mapperInfo->mS * yc + mapperInfo->mCx;
+        float yr = mapperInfo->mFy * yc + mapperInfo->mCy;
         // Clamp to within pre-correction active array
         if (clamp) {
-            xr = std::min(mArrayWidth - 1, std::max(0.f, xr));
-            yr = std::min(mArrayHeight - 1, std::max(0.f, yr));
+            xr = std::min(mapperInfo->mArrayWidth - 1, std::max(0.f, xr));
+            yr = std::min(mapperInfo->mArrayHeight - 1, std::max(0.f, yr));
         }
 
         coordPairs[i] = static_cast<T>(std::round(xr));
         coordPairs[i + 1] = static_cast<T>(std::round(yr));
     }
-
     return OK;
 }
 
 template<typename T>
 status_t DistortionMapper::mapCorrectedToRawImplSimple(T *coordPairs, int coordCount,
-        bool clamp) const {
-    if (!mValidMapping) return INVALID_OPERATION;
+       const DistortionMapperInfo *mapperInfo, bool clamp) const {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
 
-    float scaleX = mArrayWidth / mActiveWidth;
-    float scaleY = mArrayHeight / mActiveHeight;
+    float scaleX = mapperInfo->mArrayWidth / mapperInfo->mActiveWidth;
+    float scaleY = mapperInfo->mArrayHeight / mapperInfo->mActiveHeight;
     for (int i = 0; i < coordCount * 2; i += 2) {
         float x = coordPairs[i];
         float y = coordPairs[i + 1];
         float rawX = x * scaleX;
         float rawY = y * scaleY;
         if (clamp) {
-            rawX = std::min(mArrayWidth - 1, std::max(0.f, rawX));
-            rawY = std::min(mArrayHeight - 1, std::max(0.f, rawY));
+            rawX = std::min(mapperInfo->mArrayWidth - 1, std::max(0.f, rawX));
+            rawY = std::min(mapperInfo->mArrayHeight - 1, std::max(0.f, rawY));
         }
         coordPairs[i] = static_cast<T>(std::round(rawX));
         coordPairs[i + 1] = static_cast<T>(std::round(rawY));
@@ -377,9 +433,9 @@
     return OK;
 }
 
-status_t DistortionMapper::mapCorrectedRectToRaw(int32_t *rects, int rectCount, bool clamp,
-        bool simple) const {
-    if (!mValidMapping) return INVALID_OPERATION;
+status_t DistortionMapper::mapCorrectedRectToRaw(int32_t *rects, int rectCount,
+       const DistortionMapperInfo *mapperInfo, bool clamp, bool simple) const {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
 
     for (int i = 0; i < rectCount * 4; i += 4) {
         // Map from (l, t, width, height) to (l, t, r, b)
@@ -390,7 +446,7 @@
             rects[i + 1] + rects[i + 3] - 1
         };
 
-        mapCorrectedToRaw(coords, 2, clamp, simple);
+        mapCorrectedToRaw(coords, 2, mapperInfo, clamp, simple);
 
         // Map back to (l, t, width, height)
         rects[i] = coords[0];
@@ -402,37 +458,37 @@
     return OK;
 }
 
-status_t DistortionMapper::buildGrids() {
-    if (mCorrectedGrid.size() != kGridSize * kGridSize) {
-        mCorrectedGrid.resize(kGridSize * kGridSize);
-        mDistortedGrid.resize(kGridSize * kGridSize);
+status_t DistortionMapper::buildGrids(DistortionMapperInfo *mapperInfo) {
+    if (mapperInfo->mCorrectedGrid.size() != kGridSize * kGridSize) {
+        mapperInfo->mCorrectedGrid.resize(kGridSize * kGridSize);
+        mapperInfo->mDistortedGrid.resize(kGridSize * kGridSize);
     }
 
-    float gridMargin = mArrayWidth * kGridMargin;
-    float gridSpacingX = (mArrayWidth + 2 * gridMargin) / kGridSize;
-    float gridSpacingY = (mArrayHeight + 2 * gridMargin) / kGridSize;
+    float gridMargin = mapperInfo->mArrayWidth * kGridMargin;
+    float gridSpacingX = (mapperInfo->mArrayWidth + 2 * gridMargin) / kGridSize;
+    float gridSpacingY = (mapperInfo->mArrayHeight + 2 * gridMargin) / kGridSize;
 
     size_t index = 0;
     float x = -gridMargin;
     for (size_t i = 0; i < kGridSize; i++, x += gridSpacingX) {
         float y = -gridMargin;
         for (size_t j = 0; j < kGridSize; j++, y += gridSpacingY, index++) {
-            mCorrectedGrid[index].src = nullptr;
-            mCorrectedGrid[index].coords = {
+            mapperInfo->mCorrectedGrid[index].src = nullptr;
+            mapperInfo->mCorrectedGrid[index].coords = {
                 x, y,
                 x + gridSpacingX, y,
                 x + gridSpacingX, y + gridSpacingY,
                 x, y + gridSpacingY
             };
-            mDistortedGrid[index].src = &mCorrectedGrid[index];
-            mDistortedGrid[index].coords = mCorrectedGrid[index].coords;
-            status_t res = mapCorrectedToRawImpl(mDistortedGrid[index].coords.data(), 4,
-                    /*clamp*/false, /*simple*/false);
+            mapperInfo->mDistortedGrid[index].src = &(mapperInfo->mCorrectedGrid[index]);
+            mapperInfo->mDistortedGrid[index].coords = mapperInfo->mCorrectedGrid[index].coords;
+            status_t res = mapCorrectedToRawImpl(mapperInfo->mDistortedGrid[index].coords.data(), 4,
+                    mapperInfo, /*clamp*/false, /*simple*/false);
             if (res != OK) return res;
         }
     }
 
-    mValidGrids = true;
+    mapperInfo->mValidGrids = true;
     return OK;
 }
 
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.h b/services/camera/libcameraservice/device3/DistortionMapper.h
index 5027bd0..96f4fda 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.h
+++ b/services/camera/libcameraservice/device3/DistortionMapper.h
@@ -37,13 +37,8 @@
     DistortionMapper();
 
     DistortionMapper(const DistortionMapper& other) :
-            mValidMapping(other.mValidMapping), mValidGrids(other.mValidGrids),
-            mFx(other.mFx), mFy(other.mFy), mCx(other.mCx), mCy(other.mCy), mS(other.mS),
-            mInvFx(other.mInvFx), mInvFy(other.mInvFy), mK(other.mK),
-            mArrayWidth(other.mArrayWidth), mArrayHeight(other.mArrayHeight),
-            mActiveWidth(other.mActiveWidth), mActiveHeight(other.mActiveHeight),
-            mArrayDiffX(other.mArrayDiffX), mArrayDiffY(other.mArrayDiffY),
-            mCorrectedGrid(other.mCorrectedGrid), mDistortedGrid(other.mDistortedGrid) {
+            mDistortionMapperInfo(other.mDistortionMapperInfo),
+            mDistortionMapperInfoMaximumResolution(other.mDistortionMapperInfoMaximumResolution) {
             initRemappedKeys(); }
 
     void initRemappedKeys() override;
@@ -75,10 +70,14 @@
 
 
   public: // Visible for testing. Not guarded by mutex; do not use concurrently
+
+    struct DistortionMapperInfo;
+
     /**
      * Update lens calibration from capture results or equivalent
      */
-    status_t updateCalibration(const CameraMetadata &result);
+    status_t updateCalibration(const CameraMetadata &result, bool isStatic = false,
+            bool maxResolution = false);
 
     /**
      * Transform from distorted (original) to corrected (warped) coordinates.
@@ -89,8 +88,8 @@
      *   clamp: Whether to clamp the result to the bounds of the active array
      *   simple: Whether to do complex correction or just a simple linear map
      */
-    status_t mapRawToCorrected(int32_t *coordPairs, int coordCount, bool clamp,
-            bool simple = true);
+    status_t mapRawToCorrected(int32_t *coordPairs, int coordCount,
+            DistortionMapperInfo *mapperInfo, bool clamp, bool simple = true);
 
     /**
      * Transform from distorted (original) to corrected (warped) coordinates.
@@ -101,8 +100,8 @@
      *   clamp: Whether to clamp the result to the bounds of the active array
      *   simple: Whether to do complex correction or just a simple linear map
      */
-    status_t mapRawRectToCorrected(int32_t *rects, int rectCount, bool clamp,
-            bool simple = true);
+    status_t mapRawRectToCorrected(int32_t *rects, int rectCount,
+          DistortionMapperInfo *mapperInfo, bool clamp, bool simple = true);
 
     /**
      * Transform from corrected (warped) to distorted (original) coordinates.
@@ -113,8 +112,8 @@
      *   clamp: Whether to clamp the result to the bounds of the precorrection active array
      *   simple: Whether to do complex correction or just a simple linear map
      */
-    status_t mapCorrectedToRaw(int32_t* coordPairs, int coordCount, bool clamp,
-            bool simple = true) const;
+    status_t mapCorrectedToRaw(int32_t* coordPairs, int coordCount,
+            const DistortionMapperInfo *mapperInfo, bool clamp, bool simple = true) const;
 
     /**
      * Transform from corrected (warped) to distorted (original) coordinates.
@@ -125,8 +124,8 @@
      *   clamp: Whether to clamp the result to the bounds of the precorrection active array
      *   simple: Whether to do complex correction or just a simple linear map
      */
-    status_t mapCorrectedRectToRaw(int32_t *rects, int rectCount, bool clamp,
-            bool simple = true) const;
+    status_t mapCorrectedRectToRaw(int32_t *rects, int rectCount,
+           const DistortionMapperInfo *mapperInfo, bool clamp, bool simple = true) const;
 
     struct GridQuad {
         // Source grid quad, or null
@@ -136,6 +135,28 @@
         std::array<float, 8> coords;
     };
 
+    struct DistortionMapperInfo {
+        bool mValidMapping = false;
+        bool mValidGrids = false;
+
+        // intrisic parameters, in pixels
+        float mFx, mFy, mCx, mCy, mS;
+        // pre-calculated inverses for speed
+        float mInvFx, mInvFy;
+        // radial/tangential distortion parameters
+        std::array<float, 5> mK;
+
+        // pre-correction active array dimensions
+        float mArrayWidth, mArrayHeight;
+        // active array dimensions
+        float mActiveWidth, mActiveHeight;
+        // corner offsets between pre-correction and active arrays
+        float mArrayDiffX, mArrayDiffY;
+
+        std::vector<GridQuad> mCorrectedGrid;
+        std::vector<GridQuad> mDistortedGrid;
+    };
+
     // Find which grid quad encloses the point; returns null if none do
     static const GridQuad* findEnclosingQuad(
             const int32_t pt[2], const std::vector<GridQuad>& grid);
@@ -153,6 +174,11 @@
     // if it is false, then an interpolation coordinate for edges E14 and E23 is found.
     static float calculateUorV(const int32_t pt[2], const GridQuad& quad, bool calculateU);
 
+    DistortionMapperInfo *getMapperInfo(bool maxResolution = false) {
+          return maxResolution ? &mDistortionMapperInfoMaximumResolution :
+                  &mDistortionMapperInfo;
+    };
+
   private:
     mutable std::mutex mMutex;
 
@@ -163,39 +189,28 @@
     // Fuzziness for float inequality tests
     constexpr static float kFloatFuzz = 1e-4;
 
+    bool mMaxResolution = false;
+
+    status_t setupStaticInfoLocked(const CameraMetadata &deviceInfo, bool maxResolution);
+
     // Single implementation for various mapCorrectedToRaw methods
     template<typename T>
-    status_t mapCorrectedToRawImpl(T* coordPairs, int coordCount, bool clamp, bool simple) const;
+    status_t mapCorrectedToRawImpl(T* coordPairs, int coordCount,
+            const DistortionMapperInfo *mapperInfo, bool clamp, bool simple) const;
 
     // Simple linear interpolation option
     template<typename T>
-    status_t mapCorrectedToRawImplSimple(T* coordPairs, int coordCount, bool clamp) const;
+    status_t mapCorrectedToRawImplSimple(T* coordPairs, int coordCount,
+            const DistortionMapperInfo *mapperInfo, bool clamp) const;
 
-    status_t mapRawToCorrectedSimple(int32_t *coordPairs, int coordCount, bool clamp) const;
+    status_t mapRawToCorrectedSimple(int32_t *coordPairs, int coordCount,
+            const DistortionMapperInfo *mapperInfo, bool clamp) const;
 
     // Utility to create reverse mapping grids
-    status_t buildGrids();
+    status_t buildGrids(DistortionMapperInfo *mapperInfo);
 
-
-    bool mValidMapping;
-    bool mValidGrids;
-
-    // intrisic parameters, in pixels
-    float mFx, mFy, mCx, mCy, mS;
-    // pre-calculated inverses for speed
-    float mInvFx, mInvFy;
-    // radial/tangential distortion parameters
-    std::array<float, 5> mK;
-
-    // pre-correction active array dimensions
-    float mArrayWidth, mArrayHeight;
-    // active array dimensions
-    float mActiveWidth, mActiveHeight;
-    // corner offsets between pre-correction and active arrays
-    float mArrayDiffX, mArrayDiffY;
-
-    std::vector<GridQuad> mCorrectedGrid;
-    std::vector<GridQuad> mDistortedGrid;
+    DistortionMapperInfo mDistortionMapperInfo;
+    DistortionMapperInfo mDistortionMapperInfoMaximumResolution;
 
 }; // class DistortionMapper
 
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index e3aaf44..523a2c7 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -96,7 +96,10 @@
     ERROR_BUF_STRATEGY errorBufStrategy;
 
     // The physical camera ids being requested.
-    std::set<String8> physicalCameraIds;
+    // For request on a physical camera stream, the inside set contains one Id
+    // For request on a stream group containing physical camera streams, the
+    // inside set contains all stream Ids in the group.
+    std::set<std::set<String8>> physicalCameraIds;
 
     // Map of physicalCameraId <-> Metadata
     std::vector<PhysicalCaptureResultInfo> physicalMetadatas;
@@ -142,7 +145,7 @@
 
     InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
             bool hasAppCallback, nsecs_t maxDuration,
-            const std::set<String8>& physicalCameraIdSet, bool isStillCapture,
+            const std::set<std::set<String8>>& physicalCameraIdSet, bool isStillCapture,
             bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& idsWithZoom,
             nsecs_t requestNs, const SurfaceMap& outSurfaces = SurfaceMap{}) :
             shutterTimestamp(0),
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index 1bc2081..1a39510 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -20,6 +20,7 @@
 #include <algorithm>
 
 #include "device3/ZoomRatioMapper.h"
+#include "utils/SessionConfigurationUtils.h"
 
 namespace android {
 
@@ -128,43 +129,120 @@
     return OK;
 }
 
+static bool getArrayWidthAndHeight(const CameraMetadata *deviceInfo,
+        int32_t arrayTag, int32_t *width, int32_t *height) {
+    if (width == nullptr || height == nullptr) {
+        ALOGE("%s: width / height nullptr", __FUNCTION__);
+        return false;
+    }
+    camera_metadata_ro_entry_t entry;
+    entry = deviceInfo->find(arrayTag);
+    if (entry.count != 4) return false;
+    *width = entry.data.i32[2];
+    *height = entry.data.i32[3];
+    return true;
+}
+
 ZoomRatioMapper::ZoomRatioMapper(const CameraMetadata* deviceInfo,
         bool supportNativeZoomRatio, bool usePrecorrectArray) {
     initRemappedKeys();
 
-    camera_metadata_ro_entry_t entry;
+    int32_t arrayW = 0;
+    int32_t arrayH = 0;
+    int32_t arrayMaximumResolutionW = 0;
+    int32_t arrayMaximumResolutionH = 0;
+    int32_t activeW = 0;
+    int32_t activeH = 0;
+    int32_t activeMaximumResolutionW = 0;
+    int32_t activeMaximumResolutionH = 0;
 
-    entry = deviceInfo->find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
-    if (entry.count != 4) return;
-    int32_t arrayW = entry.data.i32[2];
-    int32_t arrayH = entry.data.i32[3];
+    if (!getArrayWidthAndHeight(deviceInfo, ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
+            &arrayW, &arrayH)) {
+        ALOGE("%s: Couldn't get pre correction active array size", __FUNCTION__);
+        return;
+    }
+     if (!getArrayWidthAndHeight(deviceInfo, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+            &activeW, &activeH)) {
+        ALOGE("%s: Couldn't get active array size", __FUNCTION__);
+        return;
+    }
 
-    entry = deviceInfo->find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
-    if (entry.count != 4) return;
-    int32_t activeW = entry.data.i32[2];
-    int32_t activeH = entry.data.i32[3];
+    bool isUltraHighResolutionSensor =
+            camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(*deviceInfo);
+    if (isUltraHighResolutionSensor) {
+        if (!getArrayWidthAndHeight(deviceInfo,
+                ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+                &arrayMaximumResolutionW, &arrayMaximumResolutionH)) {
+            ALOGE("%s: Couldn't get maximum resolution pre correction active array size",
+                    __FUNCTION__);
+            return;
+        }
+         if (!getArrayWidthAndHeight(deviceInfo,
+                ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+                &activeMaximumResolutionW, &activeMaximumResolutionH)) {
+            ALOGE("%s: Couldn't get maximum resolution pre correction active array size",
+                    __FUNCTION__);
+            return;
+        }
+    }
 
     if (usePrecorrectArray) {
         mArrayWidth = arrayW;
         mArrayHeight = arrayH;
+        mArrayWidthMaximumResolution = arrayMaximumResolutionW;
+        mArrayHeightMaximumResolution = arrayMaximumResolutionH;
     } else {
         mArrayWidth = activeW;
         mArrayHeight = activeH;
+        mArrayWidthMaximumResolution = activeMaximumResolutionW;
+        mArrayHeightMaximumResolution = activeMaximumResolutionH;
     }
     mHalSupportsZoomRatio = supportNativeZoomRatio;
 
-    ALOGV("%s: array size: %d x %d, mHalSupportsZoomRatio %d",
-            __FUNCTION__, mArrayWidth, mArrayHeight, mHalSupportsZoomRatio);
+    ALOGV("%s: array size: %d x %d, full res array size: %d x %d,  mHalSupportsZoomRatio %d",
+            __FUNCTION__, mArrayWidth, mArrayHeight, mArrayWidthMaximumResolution,
+            mArrayHeightMaximumResolution, mHalSupportsZoomRatio);
     mIsValid = true;
 }
 
+status_t ZoomRatioMapper::getArrayDimensionsToBeUsed(const CameraMetadata *settings,
+        int32_t *arrayWidth, int32_t *arrayHeight) {
+    if (settings == nullptr || arrayWidth == nullptr || arrayHeight == nullptr) {
+        return BAD_VALUE;
+    }
+    // First we get the sensorPixelMode from the settings metadata.
+    int32_t sensorPixelMode = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
+    camera_metadata_ro_entry sensorPixelModeEntry = settings->find(ANDROID_SENSOR_PIXEL_MODE);
+    if (sensorPixelModeEntry.count != 0) {
+        sensorPixelMode = sensorPixelModeEntry.data.u8[0];
+        if (sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_DEFAULT &&
+            sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) {
+            ALOGE("%s: Request sensor pixel mode is not one of the valid values %d",
+                      __FUNCTION__, sensorPixelMode);
+            return BAD_VALUE;
+        }
+    }
+    if (sensorPixelMode == ANDROID_SENSOR_PIXEL_MODE_DEFAULT) {
+        *arrayWidth = mArrayWidth;
+        *arrayHeight = mArrayHeight;
+    } else {
+        *arrayWidth = mArrayWidthMaximumResolution;
+        *arrayHeight = mArrayHeightMaximumResolution;
+    }
+    return OK;
+}
+
 status_t ZoomRatioMapper::updateCaptureRequest(CameraMetadata* request) {
     if (!mIsValid) return INVALID_OPERATION;
 
     status_t res = OK;
     bool zoomRatioIs1 = true;
     camera_metadata_entry_t entry;
-
+    int arrayHeight, arrayWidth = 0;
+    res = getArrayDimensionsToBeUsed(request, &arrayWidth, &arrayHeight);
+    if (res != OK) {
+        return res;
+    }
     entry = request->find(ANDROID_CONTROL_ZOOM_RATIO);
     if (entry.count == 1 && entry.data.f[0] != 1.0f) {
         zoomRatioIs1 = false;
@@ -174,19 +252,19 @@
         if (cropRegionEntry.count == 4) {
             int cropWidth = cropRegionEntry.data.i32[2];
             int cropHeight = cropRegionEntry.data.i32[3];
-            if (cropWidth < mArrayWidth && cropHeight < mArrayHeight) {
+            if (cropWidth < arrayWidth && cropHeight < arrayHeight) {
                 cropRegionEntry.data.i32[0] = 0;
                 cropRegionEntry.data.i32[1] = 0;
-                cropRegionEntry.data.i32[2] = mArrayWidth;
-                cropRegionEntry.data.i32[3] = mArrayHeight;
+                cropRegionEntry.data.i32[2] = arrayWidth;
+                cropRegionEntry.data.i32[3] = arrayHeight;
             }
         }
     }
 
     if (mHalSupportsZoomRatio && zoomRatioIs1) {
-        res = separateZoomFromCropLocked(request, false/*isResult*/);
+        res = separateZoomFromCropLocked(request, false/*isResult*/, arrayWidth, arrayHeight);
     } else if (!mHalSupportsZoomRatio && !zoomRatioIs1) {
-        res = combineZoomAndCropLocked(request, false/*isResult*/);
+        res = combineZoomAndCropLocked(request, false/*isResult*/, arrayWidth, arrayHeight);
     }
 
     // If CONTROL_ZOOM_RATIO is in request, but HAL doesn't support
@@ -203,10 +281,15 @@
 
     status_t res = OK;
 
+    int arrayHeight, arrayWidth = 0;
+    res = getArrayDimensionsToBeUsed(result, &arrayWidth, &arrayHeight);
+    if (res != OK) {
+        return res;
+    }
     if (mHalSupportsZoomRatio && requestedZoomRatioIs1) {
-        res = combineZoomAndCropLocked(result, true/*isResult*/);
+        res = combineZoomAndCropLocked(result, true/*isResult*/, arrayWidth, arrayHeight);
     } else if (!mHalSupportsZoomRatio && !requestedZoomRatioIs1) {
-        res = separateZoomFromCropLocked(result, true/*isResult*/);
+        res = separateZoomFromCropLocked(result, true/*isResult*/, arrayWidth, arrayHeight);
     } else {
         camera_metadata_entry_t entry = result->find(ANDROID_CONTROL_ZOOM_RATIO);
         if (entry.count == 0) {
@@ -218,16 +301,22 @@
     return res;
 }
 
-float ZoomRatioMapper::deriveZoomRatio(const CameraMetadata* metadata) {
+status_t ZoomRatioMapper::deriveZoomRatio(const CameraMetadata* metadata, float *zoomRatioRet,
+        int arrayWidth, int arrayHeight) {
+    if (metadata == nullptr || zoomRatioRet == nullptr) {
+        return BAD_VALUE;
+    }
     float zoomRatio = 1.0;
 
     camera_metadata_ro_entry_t entry;
     entry = metadata->find(ANDROID_SCALER_CROP_REGION);
-    if (entry.count != 4) return zoomRatio;
-
+    if (entry.count != 4) {
+        *zoomRatioRet = 1;
+        return OK;
+    }
     // Center of the preCorrection/active size
-    float arrayCenterX = mArrayWidth / 2.0;
-    float arrayCenterY = mArrayHeight / 2.0;
+    float arrayCenterX = arrayWidth / 2.0;
+    float arrayCenterY = arrayHeight / 2.0;
 
     // Re-map crop region to coordinate system centered to (arrayCenterX,
     // arrayCenterY).
@@ -237,22 +326,30 @@
     float cropRegionBottom = entry.data.i32[1] + entry.data.i32[3] - arrayCenterY;
 
     // Calculate the scaling factor for left, top, bottom, right
-    float zoomRatioLeft = std::max(mArrayWidth / (2 * cropRegionLeft), 1.0f);
-    float zoomRatioTop = std::max(mArrayHeight / (2 * cropRegionTop), 1.0f);
-    float zoomRatioRight = std::max(mArrayWidth / (2 * cropRegionRight), 1.0f);
-    float zoomRatioBottom = std::max(mArrayHeight / (2 * cropRegionBottom), 1.0f);
+    float zoomRatioLeft = std::max(arrayWidth / (2 * cropRegionLeft), 1.0f);
+    float zoomRatioTop = std::max(arrayHeight / (2 * cropRegionTop), 1.0f);
+    float zoomRatioRight = std::max(arrayWidth / (2 * cropRegionRight), 1.0f);
+    float zoomRatioBottom = std::max(arrayHeight / (2 * cropRegionBottom), 1.0f);
 
     // Use minimum scaling factor to handle letterboxing or pillarboxing
     zoomRatio = std::min(std::min(zoomRatioLeft, zoomRatioRight),
             std::min(zoomRatioTop, zoomRatioBottom));
 
     ALOGV("%s: derived zoomRatio is %f", __FUNCTION__, zoomRatio);
-    return zoomRatio;
+    *zoomRatioRet = zoomRatio;
+    return OK;
 }
 
-status_t ZoomRatioMapper::separateZoomFromCropLocked(CameraMetadata* metadata, bool isResult) {
-    status_t res;
-    float zoomRatio = deriveZoomRatio(metadata);
+status_t ZoomRatioMapper::separateZoomFromCropLocked(CameraMetadata* metadata, bool isResult,
+        int arrayWidth, int arrayHeight) {
+    float zoomRatio = 1.0;
+    status_t res = deriveZoomRatio(metadata, &zoomRatio, arrayWidth, arrayHeight);
+
+    if (res != OK) {
+        ALOGE("%s: Failed to derive zoom ratio: %s(%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     // Update zoomRatio metadata tag
     res = metadata->update(ANDROID_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
@@ -272,12 +369,14 @@
                 continue;
             }
             // Top left (inclusive)
-            scaleCoordinates(entry.data.i32 + j, 1, zoomRatio, true /*clamp*/);
+            scaleCoordinates(entry.data.i32 + j, 1, zoomRatio, true /*clamp*/, arrayWidth,
+                    arrayHeight);
             // Bottom right (exclusive): Use adjacent inclusive pixel to
             // calculate.
             entry.data.i32[j+2] -= 1;
             entry.data.i32[j+3] -= 1;
-            scaleCoordinates(entry.data.i32 + j + 2, 1, zoomRatio, true /*clamp*/);
+            scaleCoordinates(entry.data.i32 + j + 2, 1, zoomRatio, true /*clamp*/, arrayWidth,
+                    arrayHeight);
             entry.data.i32[j+2] += 1;
             entry.data.i32[j+3] += 1;
         }
@@ -285,20 +384,22 @@
 
     for (auto rect : kRectsToCorrect) {
         entry = metadata->find(rect);
-        scaleRects(entry.data.i32, entry.count / 4, zoomRatio);
+        scaleRects(entry.data.i32, entry.count / 4, zoomRatio, arrayWidth, arrayHeight);
     }
 
     if (isResult) {
         for (auto pts : kResultPointsToCorrectNoClamp) {
             entry = metadata->find(pts);
-            scaleCoordinates(entry.data.i32, entry.count / 2, zoomRatio, false /*clamp*/);
+            scaleCoordinates(entry.data.i32, entry.count / 2, zoomRatio, false /*clamp*/,
+                    arrayWidth, arrayHeight);
         }
     }
 
     return OK;
 }
 
-status_t ZoomRatioMapper::combineZoomAndCropLocked(CameraMetadata* metadata, bool isResult) {
+status_t ZoomRatioMapper::combineZoomAndCropLocked(CameraMetadata* metadata, bool isResult,
+        int arrayWidth, int arrayHeight) {
     float zoomRatio = 1.0f;
     camera_metadata_entry_t entry;
     entry = metadata->find(ANDROID_CONTROL_ZOOM_RATIO);
@@ -307,7 +408,6 @@
     }
 
     // Unscale regions with zoomRatio
-    status_t res;
     for (auto region : kMeteringRegionsToCorrect) {
         entry = metadata->find(region);
         for (size_t j = 0; j < entry.count; j += 5) {
@@ -316,29 +416,32 @@
                 continue;
             }
             // Top-left (inclusive)
-            scaleCoordinates(entry.data.i32 + j, 1, 1.0 / zoomRatio, true /*clamp*/);
+            scaleCoordinates(entry.data.i32 + j, 1, 1.0 / zoomRatio, true /*clamp*/, arrayWidth,
+                    arrayHeight);
             // Bottom-right (exclusive): Use adjacent inclusive pixel to
             // calculate.
             entry.data.i32[j+2] -= 1;
             entry.data.i32[j+3] -= 1;
-            scaleCoordinates(entry.data.i32 + j + 2, 1, 1.0 / zoomRatio, true /*clamp*/);
+            scaleCoordinates(entry.data.i32 + j + 2, 1, 1.0 / zoomRatio, true /*clamp*/, arrayWidth,
+                    arrayHeight);
             entry.data.i32[j+2] += 1;
             entry.data.i32[j+3] += 1;
         }
     }
     for (auto rect : kRectsToCorrect) {
         entry = metadata->find(rect);
-        scaleRects(entry.data.i32, entry.count / 4, 1.0 / zoomRatio);
+        scaleRects(entry.data.i32, entry.count / 4, 1.0 / zoomRatio, arrayWidth, arrayHeight);
     }
     if (isResult) {
         for (auto pts : kResultPointsToCorrectNoClamp) {
             entry = metadata->find(pts);
-            scaleCoordinates(entry.data.i32, entry.count / 2, 1.0 / zoomRatio, false /*clamp*/);
+            scaleCoordinates(entry.data.i32, entry.count / 2, 1.0 / zoomRatio, false /*clamp*/,
+                    arrayWidth, arrayHeight);
         }
     }
 
     zoomRatio = 1.0;
-    res = metadata->update(ANDROID_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
+    status_t res = metadata->update(ANDROID_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
     if (res != OK) {
         return res;
     }
@@ -347,7 +450,7 @@
 }
 
 void ZoomRatioMapper::scaleCoordinates(int32_t* coordPairs, int coordCount,
-        float scaleRatio, bool clamp) {
+        float scaleRatio, bool clamp, int32_t arrayWidth, int32_t arrayHeight) {
     // A pixel's coordinate is represented by the position of its top-left corner.
     // To avoid the rounding error, we use the coordinate for the center of the
     // pixel instead:
@@ -360,18 +463,18 @@
     for (int i = 0; i < coordCount * 2; i += 2) {
         float x = coordPairs[i];
         float y = coordPairs[i + 1];
-        float xCentered = x - (mArrayWidth - 2) / 2;
-        float yCentered = y - (mArrayHeight - 2) / 2;
+        float xCentered = x - (arrayWidth - 2) / 2;
+        float yCentered = y - (arrayHeight - 2) / 2;
         float scaledX = xCentered * scaleRatio;
         float scaledY = yCentered * scaleRatio;
-        scaledX += (mArrayWidth - 2) / 2;
-        scaledY += (mArrayHeight - 2) / 2;
+        scaledX += (arrayWidth - 2) / 2;
+        scaledY += (arrayHeight - 2) / 2;
         coordPairs[i] = static_cast<int32_t>(std::round(scaledX));
         coordPairs[i+1] = static_cast<int32_t>(std::round(scaledY));
         // Clamp to within activeArray/preCorrectionActiveArray
         if (clamp) {
-            int32_t right = mArrayWidth - 1;
-            int32_t bottom = mArrayHeight - 1;
+            int32_t right = arrayWidth - 1;
+            int32_t bottom = arrayHeight - 1;
             coordPairs[i] =
                     std::min(right, std::max(0, coordPairs[i]));
             coordPairs[i+1] =
@@ -382,7 +485,7 @@
 }
 
 void ZoomRatioMapper::scaleRects(int32_t* rects, int rectCount,
-        float scaleRatio) {
+        float scaleRatio, int32_t arrayWidth, int32_t arrayHeight) {
     for (int i = 0; i < rectCount * 4; i += 4) {
         // Map from (l, t, width, height) to (l, t, l+width-1, t+height-1),
         // where both top-left and bottom-right are inclusive.
@@ -394,9 +497,9 @@
         };
 
         // top-left
-        scaleCoordinates(coords, 1, scaleRatio, true /*clamp*/);
+        scaleCoordinates(coords, 1, scaleRatio, true /*clamp*/, arrayWidth, arrayHeight);
         // bottom-right
-        scaleCoordinates(coords+2, 1, scaleRatio, true /*clamp*/);
+        scaleCoordinates(coords+2, 1, scaleRatio, true /*clamp*/, arrayWidth, arrayHeight);
 
         // Map back to (l, t, width, height)
         rects[i] = coords[0];
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.h b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
index 3769299..b7a9e41 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.h
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
@@ -68,22 +68,31 @@
 
   public: // Visible for testing. Do not use concurently.
     void scaleCoordinates(int32_t* coordPairs, int coordCount,
-            float scaleRatio, bool clamp);
+            float scaleRatio, bool clamp, int32_t arrayWidth, int32_t arrayHeight);
 
     bool isValid() { return mIsValid; }
   private:
     // const after construction
     bool mHalSupportsZoomRatio;
-    // active array / pre-correction array dimension
+
+    // active array / pre-correction array dimension for default and maximum
+    // resolution modes.
     int32_t mArrayWidth, mArrayHeight;
+    int32_t mArrayWidthMaximumResolution, mArrayHeightMaximumResolution;
 
     bool mIsValid = false;
 
-    float deriveZoomRatio(const CameraMetadata* metadata);
-    void scaleRects(int32_t* rects, int rectCount, float scaleRatio);
+    status_t deriveZoomRatio(const CameraMetadata* metadata, float *zoomRatio, int arrayWidth,
+            int arrayHeight);
+    void scaleRects(int32_t* rects, int rectCount, float scaleRatio, int32_t arrayWidth,
+            int32_t arrayHeight);
 
-    status_t separateZoomFromCropLocked(CameraMetadata* metadata, bool isResult);
-    status_t combineZoomAndCropLocked(CameraMetadata* metadata, bool isResult);
+    status_t separateZoomFromCropLocked(CameraMetadata* metadata, bool isResult, int arrayWidth,
+            int arrayHeight);
+    status_t combineZoomAndCropLocked(CameraMetadata* metadata, bool isResult, int arrayWidth,
+            int arrayHeight);
+    status_t getArrayDimensionsToBeUsed(const CameraMetadata *settings, int32_t *arrayWidth,
+            int32_t *arrayHeight);
 };
 
 } // namespace camera3
diff --git a/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp b/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp
index 96bab4e..88ec85c 100644
--- a/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp
+++ b/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp
@@ -23,6 +23,7 @@
 
 using namespace android;
 using namespace android::camera3;
+using DistortionMapperInfo = android::camera3::DistortionMapper::DistortionMapperInfo;
 
 int32_t testActiveArray[] = {100, 100, 1000, 750};
 float testICal[] = { 1000.f, 1000.f, 500.f, 500.f, 0.f };
@@ -62,10 +63,10 @@
     for (int index = 0; fdp.remaining_bytes() > 0; index++) {
         input.push_back(fdp.ConsumeIntegral<int32_t>());
     }
-
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
     // The size argument counts how many coordinate pairs there are, so
     // it is expected to be 1/2 the size of the input.
-    m.mapCorrectedToRaw(input.data(), input.size()/2,  clamp, simple);
+    m.mapCorrectedToRaw(input.data(), input.size()/2,  mapperInfo, clamp, simple);
 
     return 0;
 }
diff --git a/services/camera/libcameraservice/tests/ClientManagerTest.cpp b/services/camera/libcameraservice/tests/ClientManagerTest.cpp
index 6a38427..037c5c2 100644
--- a/services/camera/libcameraservice/tests/ClientManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/ClientManagerTest.cpp
@@ -17,10 +17,13 @@
 #define LOG_NDEBUG 0
 #define LOG_TAG "ClientManagerTest"
 
+#include <binder/ActivityManager.h>
+
 #include "../utils/ClientManager.h"
 #include <gtest/gtest.h>
 
 using namespace android::resource_policy;
+using namespace android;
 
 struct TestClient {
     TestClient(int id, int32_t cost, const std::set<int>& conflictingKeys, int32_t ownerId,
@@ -59,13 +62,15 @@
 
     TestClientManager cm;
     TestClient cam0Client(/*ID*/0, /*cost*/100, /*conflicts*/{1},
-            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+            /*ownerId*/ 1000, PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
     auto cam0Desc = makeDescFromTestClient(cam0Client);
     auto evicted = cm.addAndEvict(cam0Desc);
     ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
 
     TestClient cam1Client(/*ID*/1, /*cost*/100, /*conflicts*/{0},
-            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+            /*ownerId*/ 1000, PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
     auto cam1Desc = makeDescFromTestClient(cam1Client);
 
     // 1. Check with conflicting devices, new client would be evicted
@@ -76,13 +81,15 @@
     cm.removeAll();
 
     TestClient cam2Client(/*ID*/2, /*cost*/100, /*conflicts*/{},
-            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+            /*ownerId*/ 1000, PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
     auto cam2Desc = makeDescFromTestClient(cam2Client);
     evicted = cm.addAndEvict(cam2Desc);
     ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
 
     TestClient cam3Client(/*ID*/3, /*cost*/100, /*conflicts*/{},
-            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+            /*ownerId*/ 1000, PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
     auto cam3Desc = makeDescFromTestClient(cam3Client);
 
     // 2. Check without conflicting devices, the pre-existing client won't be evicted
@@ -97,12 +104,42 @@
     ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
 
     TestClient cam0ClientNew(/*ID*/0, /*cost*/100, /*conflicts*/{1},
-            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+            /*ownerId*/ 1000, PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
     auto cam0DescNew = makeDescFromTestClient(cam0ClientNew);
     wouldBeEvicted = cm.wouldEvict(cam0DescNew);
 
     // 3. Check opening the same camera twice will evict the older client
     ASSERT_EQ(wouldBeEvicted.size(), 1u) << "Evicted list length must be 1";
     ASSERT_EQ(wouldBeEvicted[0], cam0Desc) << "cam0 (old) must be evicted";
-}
 
+    // 4. Check that an invalid client (dead process) will be evicted
+
+    cm.removeAll();
+
+    TestClient camDeadClient(/*ID*/ 0, /*cost*/100, /*conflicts*/{},
+            /*ownerId*/ 1000, INVALID_ADJ,
+            ActivityManager::PROCESS_STATE_NONEXISTENT, /*isVendorClient*/ false);
+    auto camDeadDesc = makeDescFromTestClient(camDeadClient);
+    evicted = cm.addAndEvict(camDeadDesc);
+    wouldBeEvicted = cm.wouldEvict(cam0Desc);
+
+    ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
+    ASSERT_EQ(wouldBeEvicted.size(), 1u) << "Evicted list length must be 1";
+    ASSERT_EQ(wouldBeEvicted[0], camDeadDesc) << "dead cam must be evicted";
+
+    // 5. Check that a more important client will win
+
+    TestClient cam0ForegroundClient(/*ID*/0, /*cost*/100, /*conflicts*/{1},
+            /*ownerId*/ 1000, FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
+    auto cam0FgDesc = makeDescFromTestClient(cam0ForegroundClient);
+
+    cm.removeAll();
+    evicted = cm.addAndEvict(cam0Desc);
+    wouldBeEvicted = cm.wouldEvict(cam0FgDesc);
+
+    ASSERT_EQ(evicted.size(), 0u);
+    ASSERT_EQ(wouldBeEvicted.size(), 1u);
+    ASSERT_EQ(wouldBeEvicted[0],cam0Desc) << "less important cam0 must be evicted";
+}
diff --git a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
index 54935c9..8331136 100644
--- a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
+++ b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
@@ -27,7 +27,7 @@
 
 using namespace android;
 using namespace android::camera3;
-
+using DistortionMapperInfo = android::camera3::DistortionMapper::DistortionMapperInfo;
 
 int32_t testActiveArray[] = {100, 100, 1000, 750};
 int32_t testPreCorrActiveArray[] = {90, 90, 1020, 770};
@@ -132,14 +132,15 @@
             /*preCorrectionActiveArray*/ testActiveArray);
 
     auto coords = basicCoords;
-    res = m.mapCorrectedToRaw(coords.data(), 5,  /*clamp*/true);
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
+    res = m.mapCorrectedToRaw(coords.data(), 5, mapperInfo, /*clamp*/true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_EQ(coords[i], basicCoords[i]);
     }
 
-    res = m.mapRawToCorrected(coords.data(), 5, /*clamp*/true);
+    res = m.mapRawToCorrected(coords.data(), 5, mapperInfo, /*clamp*/true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < coords.size(); i++) {
@@ -152,14 +153,14 @@
     };
 
     auto rectsOrig = rects;
-    res = m.mapCorrectedRectToRaw(rects.data(), 2, /*clamp*/true);
+    res = m.mapCorrectedRectToRaw(rects.data(), 2, mapperInfo, /*clamp*/true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < rects.size(); i++) {
         EXPECT_EQ(rects[i], rectsOrig[i]);
     }
 
-    res = m.mapRawRectToCorrected(rects.data(), 2, /*clamp*/true);
+    res = m.mapRawRectToCorrected(rects.data(), 2, mapperInfo, /*clamp*/true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < rects.size(); i++) {
@@ -176,14 +177,17 @@
             /*preCorrectionActiveArray*/ activeArray.data());
 
     auto rectsOrig = activeArray;
-    res = m.mapCorrectedRectToRaw(activeArray.data(), 1, /*clamp*/true, /*simple*/ true);
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
+    res = m.mapCorrectedRectToRaw(activeArray.data(), 1, mapperInfo, /*clamp*/true,
+            /*simple*/ true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < activeArray.size(); i++) {
         EXPECT_EQ(activeArray[i], rectsOrig[i]);
     }
 
-    res = m.mapRawRectToCorrected(activeArray.data(), 1, /*clamp*/true, /*simple*/ true);
+    res = m.mapRawRectToCorrected(activeArray.data(), 1, mapperInfo, /*clamp*/true,
+            /*simple*/ true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < activeArray.size(); i++) {
@@ -200,7 +204,8 @@
             /*preCorrectionActiveArray*/ testPreCorrActiveArray);
 
     auto coords = basicCoords;
-    res = m.mapCorrectedToRaw(coords.data(), 5,  /*clamp*/true, /*simple*/true);
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
+    res = m.mapCorrectedToRaw(coords.data(), 5, mapperInfo, /*clamp*/true, /*simple*/true);
     ASSERT_EQ(res, OK);
 
     ASSERT_EQ(coords[0], 0); ASSERT_EQ(coords[1], 0);
@@ -237,12 +242,13 @@
     auto origCoords = randCoords;
 
     base::Timer correctedToRawTimer;
-    res = m.mapCorrectedToRaw(randCoords.data(), randCoords.size() / 2, clamp, simple);
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
+    res = m.mapCorrectedToRaw(randCoords.data(), randCoords.size() / 2, mapperInfo, clamp, simple);
     auto correctedToRawDurationMs = correctedToRawTimer.duration();
     EXPECT_EQ(res, OK);
 
     base::Timer rawToCorrectedTimer;
-    res = m.mapRawToCorrected(randCoords.data(), randCoords.size() / 2, clamp, simple);
+    res = m.mapRawToCorrected(randCoords.data(), randCoords.size() / 2, mapperInfo, clamp, simple);
     auto rawToCorrectedDurationMs = rawToCorrectedTimer.duration();
     EXPECT_EQ(res, OK);
 
@@ -363,7 +369,8 @@
 
     using namespace openCvData;
 
-    res = m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, /*clamp*/false,
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
+    res = m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, mapperInfo, /*clamp*/false,
             /*simple*/false);
 
     for (size_t i = 0; i < rawCoords.size(); i+=2) {
diff --git a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
index 4e94991..ff7aafd 100644
--- a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
+++ b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
@@ -182,7 +182,7 @@
 
     // Verify 1.0x zoom doesn't change the coordinates
     auto coords = originalCoords;
-    mapper.scaleCoordinates(coords.data(), coords.size()/2, 1.0f, false /*clamp*/);
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 1.0f, false /*clamp*/, width, height);
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_EQ(coords[i], originalCoords[i]);
     }
@@ -199,7 +199,7 @@
             (width - 1) * 5.0f / 4.0f, (height - 1) / 2.0f, // middle-right after 1.33x zoom
     };
     coords = originalCoords;
-    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, false /*clamp*/);
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, false /*clamp*/, width, height);
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_LE(std::abs(coords[i] - expected2xCoords[i]), kMaxAllowedPixelError);
     }
@@ -216,7 +216,7 @@
             width - 1.0f,  (height - 1) / 2.0f, // middle-right after 1.33x zoom
     };
     coords = originalCoords;
-    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, true /*clamp*/);
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, true /*clamp*/, width, height);
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_LE(std::abs(coords[i] - expected2xCoordsClampedInc[i]), kMaxAllowedPixelError);
     }
@@ -233,7 +233,7 @@
             width - 1.0f,  height / 2.0f, // middle-right after 1.33x zoom
     };
     coords = originalCoords;
-    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, true /*clamp*/);
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, true /*clamp*/, width, height);
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_LE(std::abs(coords[i] - expected2xCoordsClampedExc[i]), kMaxAllowedPixelError);
     }
@@ -250,7 +250,7 @@
             (width - 1) * 5 / 8.0f, (height - 1) / 2.0f, // middle-right after 1.33x zoom-in
     };
     coords = originalCoords;
-    mapper.scaleCoordinates(coords.data(), coords.size()/2, 1.0f/3, false /*clamp*/);
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 1.0f/3, false /*clamp*/, width, height);
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_LE(std::abs(coords[i] - expectedZoomOutCoords[i]), kMaxAllowedPixelError);
     }
diff --git a/services/camera/libcameraservice/utils/ClientManager.h b/services/camera/libcameraservice/utils/ClientManager.h
index 64be6c5..09258ef 100644
--- a/services/camera/libcameraservice/utils/ClientManager.h
+++ b/services/camera/libcameraservice/utils/ClientManager.h
@@ -31,6 +31,31 @@
 namespace android {
 namespace resource_policy {
 
+// Values from frameworks/base/services/core/java/com/android/server/am/ProcessList.java
+const int32_t INVALID_ADJ = -10000;
+const int32_t UNKNOWN_ADJ = 1001;
+const int32_t CACHED_APP_MAX_ADJ = 999;
+const int32_t CACHED_APP_MIN_ADJ = 900;
+const int32_t CACHED_APP_LMK_FIRST_ADJ = 950;
+const int32_t CACHED_APP_IMPORTANCE_LEVELS = 5;
+const int32_t SERVICE_B_ADJ = 800;
+const int32_t PREVIOUS_APP_ADJ = 700;
+const int32_t HOME_APP_ADJ = 600;
+const int32_t SERVICE_ADJ = 500;
+const int32_t HEAVY_WEIGHT_APP_ADJ = 400;
+const int32_t BACKUP_APP_ADJ = 300;
+const int32_t PERCEPTIBLE_LOW_APP_ADJ = 250;
+const int32_t PERCEPTIBLE_MEDIUM_APP_ADJ = 225;
+const int32_t PERCEPTIBLE_APP_ADJ = 200;
+const int32_t VISIBLE_APP_ADJ = 100;
+const int32_t VISIBLE_APP_LAYER_MAX = PERCEPTIBLE_APP_ADJ - VISIBLE_APP_ADJ - 1;
+const int32_t PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ = 50;
+const int32_t FOREGROUND_APP_ADJ = 0;
+const int32_t PERSISTENT_SERVICE_ADJ = -700;
+const int32_t PERSISTENT_PROC_ADJ = -800;
+const int32_t SYSTEM_ADJ = -900;
+const int32_t NATIVE_ADJ = -1000;
+
 class ClientPriority {
 public:
     /**
@@ -40,7 +65,9 @@
      * hwbinder thread.
      */
     ClientPriority(int32_t score, int32_t state, bool isVendorClient) :
-            mScore(score), mState(state), mIsVendorClient(isVendorClient) { }
+            mScore((score == INVALID_ADJ) ? UNKNOWN_ADJ : score),
+            mState(state),
+            mIsVendorClient(isVendorClient) { }
 
     int32_t getScore() const { return mScore; }
     int32_t getState() const { return mState; }
@@ -50,7 +77,7 @@
         // construction. Otherwise, it can get reset each time cameraserver
         // queries ActivityManagerService for oom_adj scores / states .
         if (!mIsVendorClient) {
-            mScore = score;
+            mScore = (score == INVALID_ADJ) ? UNKNOWN_ADJ : score;
         }
     }
 
diff --git a/services/camera/libcameraservice/utils/ExifUtils.cpp b/services/camera/libcameraservice/utils/ExifUtils.cpp
index 8a0303a..485705c 100644
--- a/services/camera/libcameraservice/utils/ExifUtils.cpp
+++ b/services/camera/libcameraservice/utils/ExifUtils.cpp
@@ -916,11 +916,25 @@
         ALOGV("%s: Cannot find focal length in metadata.", __FUNCTION__);
     }
 
+    int32_t sensorPixelMode = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
+    camera_metadata_ro_entry sensorPixelModeEntry = metadata.find(ANDROID_SENSOR_PIXEL_MODE);
+    if (sensorPixelModeEntry.count != 0) {
+        sensorPixelMode = sensorPixelModeEntry.data.u8[0];
+        if (sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_DEFAULT ||
+            sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) {
+            ALOGE("%s: Request sensor pixel mode is not one of the valid values %d",
+                      __FUNCTION__, sensorPixelMode);
+            return false;
+        }
+    }
+    int32_t activeArrayTag = sensorPixelMode == ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION ?
+            ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION :
+                    ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE;
     if (metadata.exists(ANDROID_SCALER_CROP_REGION) &&
-            staticInfo.exists(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE)) {
+            staticInfo.exists(activeArrayTag)) {
         entry = metadata.find(ANDROID_SCALER_CROP_REGION);
         camera_metadata_ro_entry activeArrayEntry =
-                staticInfo.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+                staticInfo.find(activeArrayTag);
 
         if (!setDigitalZoomRatio(entry.data.i32[2], entry.data.i32[3],
                 activeArrayEntry.data.i32[2], activeArrayEntry.data.i32[3])) {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 8f42a85..6dcf440 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -21,22 +21,115 @@
 #include "device3/Camera3Device.h"
 #include "device3/Camera3OutputStream.h"
 
-// Convenience methods for constructing binder::Status objects for error returns
-
-#define STATUS_ERROR(errorCode, errorString) \
-    binder::Status::fromServiceSpecificError(errorCode, \
-            String8::format("%s:%d: %s", __FUNCTION__, __LINE__, errorString))
-
-#define STATUS_ERROR_FMT(errorCode, errorString, ...) \
-    binder::Status::fromServiceSpecificError(errorCode, \
-            String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, \
-                    __VA_ARGS__))
-
 using android::camera3::OutputStreamInfo;
 using android::camera3::OutputStreamInfo;
 using android::hardware::camera2::ICameraDeviceUser;
+using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
 
 namespace android {
+namespace camera3 {
+
+void StreamConfiguration::getStreamConfigurations(
+        const CameraMetadata &staticInfo, int configuration,
+        std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
+    if (scm == nullptr) {
+        ALOGE("%s: StreamConfigurationMap nullptr", __FUNCTION__);
+        return;
+    }
+    const int STREAM_FORMAT_OFFSET = 0;
+    const int STREAM_WIDTH_OFFSET = 1;
+    const int STREAM_HEIGHT_OFFSET = 2;
+    const int STREAM_IS_INPUT_OFFSET = 3;
+
+    camera_metadata_ro_entry availableStreamConfigs = staticInfo.find(configuration);
+    for (size_t i = 0; i < availableStreamConfigs.count; i += 4) {
+        int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
+        int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
+        int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
+        int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
+        StreamConfiguration sc = {format, width, height, isInput};
+        (*scm)[format].push_back(sc);
+    }
+}
+
+void StreamConfiguration::getStreamConfigurations(
+        const CameraMetadata &staticInfo, bool maxRes,
+        std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
+    int32_t scalerKey =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxRes);
+
+    int32_t depthKey =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxRes);
+
+    int32_t dynamicDepthKey =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
+
+    int32_t heicKey =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
+
+    getStreamConfigurations(staticInfo, scalerKey, scm);
+    getStreamConfigurations(staticInfo, depthKey, scm);
+    getStreamConfigurations(staticInfo, dynamicDepthKey, scm);
+    getStreamConfigurations(staticInfo, heicKey, scm);
+}
+
+int32_t SessionConfigurationUtils::getAppropriateModeTag(int32_t defaultTag, bool maxResolution) {
+    if (!maxResolution) {
+        return defaultTag;
+    }
+    switch (defaultTag) {
+        case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS:
+            return ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS:
+            return ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_SCALER_AVAILABLE_STALL_DURATIONS:
+            return ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS;
+        case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS;
+        case ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS:
+            return ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS:
+            return ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
+            return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS;
+        case ANDROID_SENSOR_OPAQUE_RAW_SIZE:
+            return ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION;
+        case ANDROID_LENS_INTRINSIC_CALIBRATION:
+            return ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION;
+        case ANDROID_LENS_DISTORTION:
+            return ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION;
+        default:
+            ALOGE("%s: Tag %d doesn't have a maximum resolution counterpart", __FUNCTION__,
+                    defaultTag);
+            return -1;
+    }
+    return -1;
+}
+
+
+StreamConfigurationPair
+SessionConfigurationUtils::getStreamConfigurationPair(const CameraMetadata &staticInfo) {
+    camera3::StreamConfigurationPair streamConfigurationPair;
+    camera3::StreamConfiguration::getStreamConfigurations(staticInfo, false,
+            &streamConfigurationPair.mDefaultStreamConfigurationMap);
+    camera3::StreamConfiguration::getStreamConfigurations(staticInfo, true,
+            &streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
+    return streamConfigurationPair;
+}
 
 int64_t SessionConfigurationUtils::euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
     int64_t d0 = x0 - x1;
@@ -45,15 +138,22 @@
 }
 
 bool SessionConfigurationUtils::roundBufferDimensionNearest(int32_t width, int32_t height,
-        int32_t format, android_dataspace dataSpace, const CameraMetadata& info,
-        /*out*/int32_t* outWidth, /*out*/int32_t* outHeight) {
+        int32_t format, android_dataspace dataSpace,
+        const CameraMetadata& info, bool maxResolution, /*out*/int32_t* outWidth,
+        /*out*/int32_t* outHeight) {
+    const int32_t depthSizesTag =
+            getAppropriateModeTag(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
+                    maxResolution);
+    const int32_t scalerSizesTag =
+            getAppropriateModeTag(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t heicSizesTag =
+            getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
 
     camera_metadata_ro_entry streamConfigs =
-            (dataSpace == HAL_DATASPACE_DEPTH) ?
-            info.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS) :
+            (dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
             (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
-            info.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS) :
-            info.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+            info.find(heicSizesTag) :
+            info.find(scalerSizesTag);
 
     int32_t bestWidth = -1;
     int32_t bestHeight = -1;
@@ -128,11 +228,11 @@
 binder::Status SessionConfigurationUtils::createSurfaceFromGbp(
         OutputStreamInfo& streamInfo, bool isStreamInfoValid,
         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
-        const String8 &cameraId, const CameraMetadata &physicalCameraMetadata) {
-
+        const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
+        const std::vector<int32_t> &sensorPixelModesUsed){
     // bufferProducer must be non-null
     if (gbp == nullptr) {
-        String8 msg = String8::format("Camera %s: Surface is NULL", cameraId.string());
+        String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
         ALOGW("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
@@ -144,13 +244,13 @@
     status_t err;
     if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
         String8 msg = String8::format("Camera %s: Failed to query Surface consumer usage: %s (%d)",
-                cameraId.string(), strerror(-err), err);
+                logicalCameraId.string(), strerror(-err), err);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
     }
     if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
         ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for"
-                "stream", __FUNCTION__, cameraId.string(), consumerUsage);
+                "stream", __FUNCTION__, logicalCameraId.string(), consumerUsage);
         useAsync = true;
     }
 
@@ -169,26 +269,26 @@
     android_dataspace dataSpace;
     if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
         String8 msg = String8::format("Camera %s: Failed to query Surface width: %s (%d)",
-                 cameraId.string(), strerror(-err), err);
+                 logicalCameraId.string(), strerror(-err), err);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
     }
     if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
         String8 msg = String8::format("Camera %s: Failed to query Surface height: %s (%d)",
-                cameraId.string(), strerror(-err), err);
+                logicalCameraId.string(), strerror(-err), err);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
     }
     if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
         String8 msg = String8::format("Camera %s: Failed to query Surface format: %s (%d)",
-                cameraId.string(), strerror(-err), err);
+                logicalCameraId.string(), strerror(-err), err);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
     }
     if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
             reinterpret_cast<int*>(&dataSpace))) != OK) {
         String8 msg = String8::format("Camera %s: Failed to query Surface dataspace: %s (%d)",
-                cameraId.string(), strerror(-err), err);
+                logicalCameraId.string(), strerror(-err), err);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
     }
@@ -199,16 +299,31 @@
             ((consumerUsage & GRALLOC_USAGE_HW_MASK) &&
              ((consumerUsage & GRALLOC_USAGE_SW_READ_MASK) == 0))) {
         ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED",
-                __FUNCTION__, cameraId.string(), format);
+                __FUNCTION__, logicalCameraId.string(), format);
         format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
     }
+    std::unordered_set<int32_t> overriddenSensorPixelModes;
+    if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
+            physicalCameraMetadata, flexibleConsumer, &overriddenSensorPixelModes) != OK) {
+        String8 msg = String8::format("Camera %s: sensor pixel modes for stream with "
+                "format %#x are not valid",logicalCameraId.string(), format);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    bool foundInMaxRes = false;
+    if (overriddenSensorPixelModes.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
+            overriddenSensorPixelModes.end()) {
+        // we can use the default stream configuration map
+        foundInMaxRes = true;
+    }
     // Round dimensions to the nearest dimensions available for this format
     if (flexibleConsumer && isPublicFormat(format) &&
             !SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
-            format, dataSpace, physicalCameraMetadata, /*out*/&width, /*out*/&height)) {
+            format, dataSpace, physicalCameraMetadata, foundInMaxRes, /*out*/&width,
+            /*out*/&height)) {
         String8 msg = String8::format("Camera %s: No supported stream configurations with "
                 "format %#x defined, failed to create output stream",
-                cameraId.string(), format);
+                logicalCameraId.string(), format);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
@@ -219,30 +334,31 @@
         streamInfo.format = format;
         streamInfo.dataSpace = dataSpace;
         streamInfo.consumerUsage = consumerUsage;
+        streamInfo.sensorPixelModesUsed = overriddenSensorPixelModes;
         return binder::Status::ok();
     }
     if (width != streamInfo.width) {
         String8 msg = String8::format("Camera %s:Surface width doesn't match: %d vs %d",
-                cameraId.string(), width, streamInfo.width);
+                logicalCameraId.string(), width, streamInfo.width);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
     if (height != streamInfo.height) {
         String8 msg = String8::format("Camera %s:Surface height doesn't match: %d vs %d",
-                 cameraId.string(), height, streamInfo.height);
+                 logicalCameraId.string(), height, streamInfo.height);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
     if (format != streamInfo.format) {
         String8 msg = String8::format("Camera %s:Surface format doesn't match: %d vs %d",
-                 cameraId.string(), format, streamInfo.format);
+                 logicalCameraId.string(), format, streamInfo.format);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
     if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
         if (dataSpace != streamInfo.dataSpace) {
             String8 msg = String8::format("Camera %s:Surface dataSpace doesn't match: %d vs %d",
-                    cameraId.string(), dataSpace, streamInfo.dataSpace);
+                    logicalCameraId.string(), dataSpace, streamInfo.dataSpace);
             ALOGE("%s: %s", __FUNCTION__, msg.string());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
         }
@@ -251,7 +367,7 @@
         if (consumerUsage != streamInfo.consumerUsage) {
             String8 msg = String8::format(
                     "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
-                    cameraId.string(), consumerUsage, streamInfo.consumerUsage);
+                    logicalCameraId.string(), consumerUsage, streamInfo.consumerUsage);
             ALOGE("%s: %s", __FUNCTION__, msg.string());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
         }
@@ -259,7 +375,6 @@
     return binder::Status::ok();
 }
 
-
 void SessionConfigurationUtils::mapStreamInfo(const OutputStreamInfo &streamInfo,
             camera3::camera_stream_rotation_t rotation, String8 physicalId,
             int32_t groupId, hardware::camera::device::V3_7::Stream *stream /*out*/) {
@@ -280,6 +395,12 @@
     stream->v3_4.physicalCameraId = std::string(physicalId.string());
     stream->v3_4.bufferSize = 0;
     stream->groupId = groupId;
+    stream->sensorPixelModesUsed.resize(streamInfo.sensorPixelModesUsed.size());
+    size_t idx = 0;
+    for (auto mode : streamInfo.sensorPixelModesUsed) {
+        stream->sensorPixelModesUsed[idx++] =
+                static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
+    }
 }
 
 binder::Status SessionConfigurationUtils::checkPhysicalCameraId(
@@ -394,6 +515,11 @@
     streamConfiguration.streams.resize(streamCount);
     size_t streamIdx = 0;
     if (isInputValid) {
+        hardware::hidl_vec<CameraMetadataEnumAndroidSensorPixelMode> defaultSensorPixelModes;
+        defaultSensorPixelModes.resize(1);
+        defaultSensorPixelModes[0] =
+                static_cast<CameraMetadataEnumAndroidSensorPixelMode>(
+                        ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
         streamConfiguration.streams[streamIdx++] = {{{/*streamId*/0,
                 hardware::camera::device::V3_2::StreamType::INPUT,
                 static_cast<uint32_t> (sessionConfiguration.getInputWidth()),
@@ -401,7 +527,7 @@
                 Camera3Device::mapToPixelFormat(sessionConfiguration.getInputFormat()),
                 /*usage*/ 0, HAL_DATASPACE_UNKNOWN,
                 hardware::camera::device::V3_2::StreamRotation::ROTATION_0},
-                /*physicalId*/ nullptr, /*bufferSize*/0}, /*groupId*/-1};
+                /*physicalId*/ nullptr, /*bufferSize*/0}, /*groupId*/-1, defaultSensorPixelModes};
         streamConfiguration.multiResolutionInputImage =
                 sessionConfiguration.inputIsMultiResolution();
     }
@@ -411,6 +537,12 @@
             it.getGraphicBufferProducers();
         bool deferredConsumer = it.isDeferred();
         String8 physicalCameraId = String8(it.getPhysicalCameraId());
+
+        std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
+        const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId);
+        const CameraMetadata &metadataChosen =
+                physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo;
+
         size_t numBufferProducers = bufferProducers.size();
         bool isStreamInfoValid = false;
         int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
@@ -436,6 +568,15 @@
             if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
                 streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
             }
+            if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
+                    streamInfo.format, streamInfo.width,
+                    streamInfo.height, metadataChosen, false /*flexibleConsumer*/,
+                    &streamInfo.sensorPixelModesUsed) != OK) {
+                        ALOGE("%s: Deferred surface sensor pixel modes not valid",
+                                __FUNCTION__);
+                        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                                "Deferred surface sensor pixel modes not valid");
+            }
             mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId, groupId,
                     &streamConfiguration.streams[streamIdx++]);
             isStreamInfoValid = true;
@@ -447,10 +588,8 @@
 
         for (auto& bufferProducer : bufferProducers) {
             sp<Surface> surface;
-            const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId);
             res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
-                    logicalCameraId,
-                    physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo );
+                    logicalCameraId, metadataChosen, sensorPixelModesUsed);
 
             if (!res.isOk())
                 return res;
@@ -465,6 +604,7 @@
                     // additional internal camera streams.
                     std::vector<OutputStreamInfo> compositeStreams;
                     if (isDepthCompositeStream) {
+                      // TODO: Take care of composite streams.
                         ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
                                 deviceInfo, &compositeStreams);
                     } else {
@@ -505,7 +645,97 @@
         }
     }
     return binder::Status::ok();
+}
 
+static bool inStreamConfigurationMap(int format, int width, int height,
+        const std::unordered_map<int, std::vector<camera3::StreamConfiguration>> &sm) {
+    auto scs = sm.find(format);
+    if (scs == sm.end()) {
+        return false;
+    }
+    for (auto &sc : scs->second) {
+        if (sc.width == width && sc.height == height && sc.isInput == 0) {
+            return true;
+        }
+    }
+    return false;
+}
+
+static std::unordered_set<int32_t> convertToSet(const std::vector<int32_t> &sensorPixelModesUsed) {
+    return std::unordered_set<int32_t>(sensorPixelModesUsed.begin(), sensorPixelModesUsed.end());
+}
+
+status_t SessionConfigurationUtils::checkAndOverrideSensorPixelModesUsed(
+        const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
+        const CameraMetadata &staticInfo, bool flexibleConsumer,
+        std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
+    if (!isUltraHighResolutionSensor(staticInfo)) {
+        overriddenSensorPixelModesUsed->clear();
+        overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+        return OK;
+    }
+
+    StreamConfigurationPair streamConfigurationPair = getStreamConfigurationPair(staticInfo);
+    const std::unordered_set<int32_t> &sensorPixelModesUsedSet =
+            convertToSet(sensorPixelModesUsed);
+    bool isInDefaultStreamConfigurationMap =
+            inStreamConfigurationMap(format, width, height,
+                    streamConfigurationPair.mDefaultStreamConfigurationMap);
+
+    bool isInMaximumResolutionStreamConfigurationMap =
+            inStreamConfigurationMap(format, width, height,
+                    streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
+
+    // Case 1: The client has not changed the sensor mode defaults. In this case, we check if the
+    // size + format of the OutputConfiguration is found exclusively in 1.
+    // If yes, add that sensorPixelMode to overriddenSensorPixelModes.
+    // If no, add 'DEFAULT' to sensorPixelMode. This maintains backwards
+    // compatibility.
+    if (sensorPixelModesUsedSet.size() == 0) {
+        // Ambiguous case, default to only 'DEFAULT' mode.
+        if (isInDefaultStreamConfigurationMap && isInMaximumResolutionStreamConfigurationMap) {
+            overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+            return OK;
+        }
+        // We don't allow flexible consumer for max resolution mode.
+        if (isInMaximumResolutionStreamConfigurationMap) {
+            overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+            return OK;
+        }
+        if (isInDefaultStreamConfigurationMap || (flexibleConsumer && width < ROUNDING_WIDTH_CAP)) {
+            overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+            return OK;
+        }
+        return BAD_VALUE;
+    }
+
+    // Case2: The app has set sensorPixelModesUsed, we need to verify that they
+    // are valid / err out.
+    if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_DEFAULT) !=
+            sensorPixelModesUsedSet.end() && !isInDefaultStreamConfigurationMap) {
+        return BAD_VALUE;
+    }
+
+   if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
+            sensorPixelModesUsedSet.end() && !isInMaximumResolutionStreamConfigurationMap) {
+        return BAD_VALUE;
+    }
+    *overriddenSensorPixelModesUsed = sensorPixelModesUsedSet;
+    return OK;
+}
+
+bool SessionConfigurationUtils::isUltraHighResolutionSensor(const CameraMetadata &deviceInfo) {
+    camera_metadata_ro_entry_t entryCap;
+    entryCap = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    // Go through the capabilities and check if it has
+    // ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR
+    for (size_t i = 0; i < entryCap.count; ++i) {
+        uint8_t capability = entryCap.data.u8[i];
+        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) {
+            return true;
+        }
+    }
+    return false;
 }
 
 bool SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
@@ -531,4 +761,5 @@
     return true;
 }
 
-}// namespace android
+} // namespace camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 36e1dd7..863a0cd 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -22,24 +22,60 @@
 #include <camera/camera2/SessionConfiguration.h>
 #include <camera/camera2/SubmitInfo.h>
 #include <android/hardware/camera/device/3.7/types.h>
+#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
+#include <android/hardware/camera/device/3.7/ICameraDeviceSession.h>
 
 #include <device3/Camera3StreamInterface.h>
 
 #include <stdint.h>
 
+// Convenience methods for constructing binder::Status objects for error returns
+
+#define STATUS_ERROR(errorCode, errorString) \
+    binder::Status::fromServiceSpecificError(errorCode, \
+            String8::format("%s:%d: %s", __FUNCTION__, __LINE__, errorString))
+
+#define STATUS_ERROR_FMT(errorCode, errorString, ...) \
+    binder::Status::fromServiceSpecificError(errorCode, \
+            String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, \
+                    __VA_ARGS__))
+
 namespace android {
+namespace camera3 {
 
 typedef std::function<CameraMetadata (const String8 &)> metadataGetter;
 
+class StreamConfiguration {
+public:
+    int32_t format;
+    int32_t width;
+    int32_t height;
+    int32_t isInput;
+    static void getStreamConfigurations(
+            const CameraMetadata &static_info, bool maxRes,
+            std::unordered_map<int, std::vector<StreamConfiguration>> *scm);
+    static void getStreamConfigurations(
+            const CameraMetadata &static_info, int configuration,
+            std::unordered_map<int, std::vector<StreamConfiguration>> *scm);
+};
+
+// Holds the default StreamConfigurationMap and Maximum resolution
+// StreamConfigurationMap for a camera device.
+struct StreamConfigurationPair {
+    std::unordered_map<int, std::vector<camera3::StreamConfiguration>>
+            mDefaultStreamConfigurationMap;
+    std::unordered_map<int, std::vector<camera3::StreamConfiguration>>
+            mMaximumResolutionStreamConfigurationMap;
+};
+
 class SessionConfigurationUtils {
 public:
-
     static int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
 
     // Find the closest dimensions for a given format in available stream configurations with
     // a width <= ROUNDING_WIDTH_CAP
     static bool roundBufferDimensionNearest(int32_t width, int32_t height, int32_t format,
-            android_dataspace dataSpace, const CameraMetadata& info,
+            android_dataspace dataSpace, const CameraMetadata& info, bool maxResolution,
             /*out*/int32_t* outWidth, /*out*/int32_t* outHeight);
 
     //check if format is not custom format
@@ -50,7 +86,8 @@
     static binder::Status createSurfaceFromGbp(
         camera3::OutputStreamInfo& streamInfo, bool isStreamInfoValid,
         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
-        const String8 &cameraId, const CameraMetadata &physicalCameraMetadata);
+        const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
+        const std::vector<int32_t> &sensorPixelModesUsed);
 
     static void mapStreamInfo(const camera3::OutputStreamInfo &streamInfo,
             camera3::camera_stream_rotation_t rotation, String8 physicalId, int32_t groupId,
@@ -86,10 +123,23 @@
             hardware::camera::device::V3_4::StreamConfiguration &streamConfigV34,
             const hardware::camera::device::V3_7::StreamConfiguration &streamConfigV37);
 
+    static StreamConfigurationPair getStreamConfigurationPair(const CameraMetadata &metadata);
+
+    static status_t checkAndOverrideSensorPixelModesUsed(
+            const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
+            const CameraMetadata &staticInfo, bool flexibleConsumer,
+            std::unordered_set<int32_t> *overriddenSensorPixelModesUsed);
+
+    static bool isUltraHighResolutionSensor(const CameraMetadata &deviceInfo);
+
+    static int32_t getAppropriateModeTag(int32_t defaultTag, bool maxResolution = false);
+
     static const int32_t MAX_SURFACES_PER_STREAM = 4;
 
     static const int32_t ROUNDING_WIDTH_CAP = 1920;
+
 };
 
+} // camera3
 } // android
 #endif
diff --git a/services/mediacodec/registrant/CodecServiceRegistrant.cpp b/services/mediacodec/registrant/CodecServiceRegistrant.cpp
index 184251a..b479433 100644
--- a/services/mediacodec/registrant/CodecServiceRegistrant.cpp
+++ b/services/mediacodec/registrant/CodecServiceRegistrant.cpp
@@ -25,8 +25,9 @@
 #include <C2PlatformSupport.h>
 #include <codec2/hidl/1.0/ComponentStore.h>
 #include <codec2/hidl/1.1/ComponentStore.h>
-#include <codec2/hidl/1.1/Configurable.h>
-#include <codec2/hidl/1.1/types.h>
+#include <codec2/hidl/1.2/ComponentStore.h>
+#include <codec2/hidl/1.2/Configurable.h>
+#include <codec2/hidl/1.2/types.h>
 #include <hidl/HidlSupport.h>
 #include <media/CodecServiceRegistrant.h>
 
@@ -37,8 +38,8 @@
 using ::android::hardware::Return;
 using ::android::hardware::Void;
 using ::android::sp;
-using namespace ::android::hardware::media::c2::V1_1;
-using namespace ::android::hardware::media::c2::V1_1::utils;
+using namespace ::android::hardware::media::c2::V1_2;
+using namespace ::android::hardware::media::c2::V1_2::utils;
 
 constexpr c2_status_t C2_TRANSACTION_FAILED = C2_CORRUPTED;
 
@@ -420,11 +421,20 @@
     // STOPSHIP: Remove code name checking once platform version bumps up to 30.
     std::string codeName =
         android::base::GetProperty("ro.build.version.codename", "");
-    if (codeName == "R") {
-        platformVersion = 30;
+    if (codeName == "S") {
+        platformVersion = 31;
     }
 
     switch (platformVersion) {
+        case 31: {
+            android::sp<V1_2::IComponentStore> storeV1_2 =
+                new V1_2::utils::ComponentStore(store);
+            if (storeV1_2->registerAsService("software") != android::OK) {
+                LOG(ERROR) << "Cannot register software Codec2 v1.2 service.";
+                return;
+            }
+            break;
+        }
         case 30: {
             android::sp<V1_1::IComponentStore> storeV1_1 =
                 new V1_1::utils::ComponentStore(store);
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index 443d339..5989181 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -61,6 +61,7 @@
     "-bugprone-unhandled-self-assignment", // found in TimeMachine.h
     "-bugprone-suspicious-string-compare", // found in TimeMachine.h
     "-cert-oop54-cpp", // found in TransactionLog.h
+    "-bugprone-narrowing-conversions", // b/182410845
 ]
 
 cc_defaults {
diff --git a/services/mediaresourcemanager/ResourceObserverService.cpp b/services/mediaresourcemanager/ResourceObserverService.cpp
index 9cc6fe4..4e97406 100644
--- a/services/mediaresourcemanager/ResourceObserverService.cpp
+++ b/services/mediaresourcemanager/ResourceObserverService.cpp
@@ -165,6 +165,10 @@
         return Status::fromServiceSpecificError(PERMISSION_DENIED);
     }
 
+    if (in_observer == nullptr) {
+        return Status::fromServiceSpecificError(BAD_VALUE);
+    }
+
     ::ndk::SpAIBinder binder = in_observer->asBinder();
 
     {
@@ -220,6 +224,10 @@
         return Status::fromServiceSpecificError(PERMISSION_DENIED);
     }
 
+    if (in_observer == nullptr) {
+        return Status::fromServiceSpecificError(BAD_VALUE);
+    }
+
     ::ndk::SpAIBinder binder = in_observer->asBinder();
 
     {
diff --git a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
index e3d3e78..acd9df1 100644
--- a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
@@ -182,6 +182,11 @@
     std::vector<MediaObservableFilter> filters1;
     Status status;
 
+    // Register with null observer should fail.
+    status = mObserverService->registerObserver(nullptr, filters1);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), BAD_VALUE);
+
     // Register with empty observables should fail.
     status = mObserverService->registerObserver(mTestObserver1, filters1);
     EXPECT_FALSE(status.isOk());