Handle ANDROID_JPEG_MAX_SIZE for ultra high res sensors.

DepthCompositeStream conservatively allocates buffers in
processInputFrame() looking at ANDROID_JPEG_MAX_SIZE. For ultra high
resolution sensors ANDROID_JPEG_MAX_SIZE was set to the maximum buffer
size for max res mode JPEG captures.

This could lead to even default size input frames getting ultra max resolution size
buffers, which is wasteful.

Here we go back to the original definition of ANDROID_JPEG_MAX_SIZE :
maximum jpeg buffer size for default mode captures. We estimate the
maximum jpeg buffer size for max res mode captures instead.

Bug: 193346383

Test: Camera CTS

Change-Id: I4decf1430d38219c666ea11dfe109587f7fff1ba
Signed-off-by: Jayant Chowdhary <jchowdhary@google.com>
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 19b54e0..a66a592 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -42,17 +42,29 @@
         mDepthBufferAcquired(false),
         mBlobBufferAcquired(false),
         mProducerListener(new ProducerListener()),
-        mMaxJpegSize(-1),
+        mMaxJpegBufferSize(-1),
+        mUHRMaxJpegBufferSize(-1),
         mIsLogicalCamera(false) {
     if (device != nullptr) {
         CameraMetadata staticInfo = device->info();
         auto entry = staticInfo.find(ANDROID_JPEG_MAX_SIZE);
         if (entry.count > 0) {
-            mMaxJpegSize = entry.data.i32[0];
+            mMaxJpegBufferSize = entry.data.i32[0];
         } else {
             ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
         }
 
+        mUHRMaxJpegSize =
+                SessionConfigurationUtils::getMaxJpegResolution(staticInfo,
+                        /*ultraHighResolution*/true);
+        mDefaultMaxJpegSize =
+                SessionConfigurationUtils::getMaxJpegResolution(staticInfo,
+                        /*isUltraHighResolution*/false);
+
+        mUHRMaxJpegBufferSize =
+            SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
+                    mMaxJpegBufferSize);
+
         entry = staticInfo.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
         if (entry.count == 5) {
             mIntrinsicCalibration.reserve(5);
@@ -243,13 +255,22 @@
         jpegSize = inputFrame.jpegBuffer.width;
     }
 
-    size_t maxDepthJpegSize;
-    if (mMaxJpegSize > 0) {
-        maxDepthJpegSize = mMaxJpegSize;
+    size_t maxDepthJpegBufferSize = 0;
+    if (mMaxJpegBufferSize > 0) {
+        // If this is an ultra high resolution sensor and the input frames size
+        // is > default res jpeg.
+        if (mUHRMaxJpegSize.width != 0 &&
+                inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
+                mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
+            maxDepthJpegBufferSize = mUHRMaxJpegBufferSize;
+        } else {
+            maxDepthJpegBufferSize = mMaxJpegBufferSize;
+        }
     } else {
-        maxDepthJpegSize = std::max<size_t> (jpegSize,
+        maxDepthJpegBufferSize = std::max<size_t> (jpegSize,
                 inputFrame.depthBuffer.width * inputFrame.depthBuffer.height * 3 / 2);
     }
+
     uint8_t jpegQuality = 100;
     auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
     if (entry.count > 0) {
@@ -259,7 +280,7 @@
     // The final depth photo will consist of the main jpeg buffer, the depth map buffer (also in
     // jpeg format) and confidence map (jpeg as well). Assume worst case that all 3 jpeg need
     // max jpeg size.
-    size_t finalJpegBufferSize = maxDepthJpegSize * 3;
+    size_t finalJpegBufferSize = maxDepthJpegBufferSize * 3;
 
     if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), finalJpegBufferSize, 1))
             != OK) {
@@ -302,7 +323,7 @@
     depthPhoto.mDepthMapStride = inputFrame.depthBuffer.stride;
     depthPhoto.mJpegQuality = jpegQuality;
     depthPhoto.mIsLogical = mIsLogicalCamera;
-    depthPhoto.mMaxJpegSize = maxDepthJpegSize;
+    depthPhoto.mMaxJpegSize = maxDepthJpegBufferSize;
     // The camera intrinsic calibration layout is as follows:
     // [focalLengthX, focalLengthY, opticalCenterX, opticalCenterY, skew]
     if (mIntrinsicCalibration.size() == 5) {
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index a520bbf..c1c75c1 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -132,7 +132,12 @@
     sp<Surface>          mDepthSurface, mBlobSurface, mOutputSurface;
     sp<ProducerListener> mProducerListener;
 
-    ssize_t              mMaxJpegSize;
+    ssize_t              mMaxJpegBufferSize;
+    ssize_t              mUHRMaxJpegBufferSize;
+
+    camera3::Size        mDefaultMaxJpegSize;
+    camera3::Size        mUHRMaxJpegSize;
+
     std::vector<std::tuple<size_t, size_t>> mSupportedDepthSizes;
     std::vector<std::tuple<size_t, size_t>> mSupportedDepthSizesMaximumResolution;
     std::vector<float>   mIntrinsicCalibration, mLensDistortion;