Merge "hdr: enable VP9 profile2 in SoftVPX"
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index d9fdfe3..14ea2a8 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -551,6 +551,8 @@
       mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0),
       mStateGeneration(0),
       mVendorExtensionsStatus(kExtensionsUnchecked) {
+    memset(&mLastHDRStaticInfo, 0, sizeof(mLastHDRStaticInfo));
+
     mUninitializedState = new UninitializedState(this);
     mLoadedState = new LoadedState(this);
     mLoadedToIdleState = new LoadedToIdleState(this);
@@ -6103,6 +6105,14 @@
             mCodec->mLastNativeWindowDataSpace = dataSpace;
             ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err);
         }
+        if (buffer->format()->contains("hdr-static-info")) {
+            HDRStaticInfo info;
+            if (ColorUtils::getHDRStaticInfoFromFormat(buffer->format(), &info)
+                && memcmp(&mCodec->mLastHDRStaticInfo, &info, sizeof(info))) {
+                setNativeWindowHdrMetadata(mCodec->mNativeWindow.get(), &info);
+                mCodec->mLastHDRStaticInfo = info;
+            }
+        }
 
         // save buffers sent to the surface so we can get render time when they return
         int64_t mediaTimeUs = -1;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 7cfa4ce..77d9ce4 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -1832,6 +1832,12 @@
                                         mSurface.get(), (android_dataspace)dataSpace);
                                 ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
                             }
+                            if (mOutputFormat->contains("hdr-static-info")) {
+                                HDRStaticInfo info;
+                                if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
+                                    setNativeWindowHdrMetadata(mSurface.get(), &info);
+                                }
+                            }
 
                             if (mime.startsWithIgnoreCase("video/")) {
                                 mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index b7c1598..9e11a94 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -18,8 +18,8 @@
 #define LOG_TAG "SurfaceUtils"
 #include <utils/Log.h>
 
+#include <media/hardware/VideoAPI.h>
 #include <media/stagefright/SurfaceUtils.h>
-
 #include <gui/Surface.h>
 
 namespace android {
@@ -128,6 +128,40 @@
     return NO_ERROR;
 }
 
+void setNativeWindowHdrMetadata(ANativeWindow *nativeWindow, HDRStaticInfo *info) {
+    struct android_smpte2086_metadata smpte2086_meta = {
+            .displayPrimaryRed = {
+                    info->sType1.mR.x * 0.00002f,
+                    info->sType1.mR.y * 0.00002f
+            },
+            .displayPrimaryGreen = {
+                    info->sType1.mG.x * 0.00002f,
+                    info->sType1.mG.y * 0.00002f
+            },
+            .displayPrimaryBlue = {
+                    info->sType1.mB.x * 0.00002f,
+                    info->sType1.mB.y * 0.00002f
+            },
+            .whitePoint = {
+                    info->sType1.mW.x * 0.00002f,
+                    info->sType1.mW.y * 0.00002f
+            },
+            .maxLuminance = (float) info->sType1.mMaxDisplayLuminance,
+            .minLuminance = info->sType1.mMinDisplayLuminance * 0.0001f
+    };
+
+    int err = native_window_set_buffers_smpte2086_metadata(nativeWindow, &smpte2086_meta);
+    ALOGW_IF(err != 0, "failed to set smpte2086 metadata on surface (%d)", err);
+
+    struct android_cta861_3_metadata cta861_meta = {
+            .maxContentLightLevel = (float) info->sType1.mMaxContentLightLevel,
+            .maxFrameAverageLightLevel = (float) info->sType1.mMaxFrameAverageLightLevel
+    };
+
+    err = native_window_set_buffers_cta861_3_metadata(nativeWindow, &cta861_meta);
+    ALOGW_IF(err != 0, "failed to set cta861_3 metadata on surface (%d)", err);
+}
+
 status_t pushBlankBuffersToNativeWindow(ANativeWindow *nativeWindow /* nonnull */) {
     status_t err = NO_ERROR;
     ANativeWindowBuffer* anb = NULL;
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
index 39b67ab..fda7028 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
@@ -353,7 +353,8 @@
     bool portWillReset = false;
     const bool fakeStride = true;
     SoftVideoDecoderOMXComponent::handlePortSettingsChange(
-            &portWillReset, buf_width, buf_height, cropSettingsMode, fakeStride);
+            &portWillReset, buf_width, buf_height,
+            OMX_COLOR_FormatYUV420Planar, cropSettingsMode, fakeStride);
     if (portWillReset) {
         if (mMode == MODE_H263) {
             PVCleanUpVideoDecoder(mHandle);
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
index 3490008..8d5f3e7 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
@@ -30,7 +30,9 @@
 
 // Only need to declare the highest supported profile and level here.
 static const CodecProfileLevel kVP9ProfileLevels[] = {
-    { OMX_VIDEO_VP9Profile0, OMX_VIDEO_VP9Level5  },
+    { OMX_VIDEO_VP9Profile0, OMX_VIDEO_VP9Level5 },
+    { OMX_VIDEO_VP9Profile2, OMX_VIDEO_VP9Level5 },
+    { OMX_VIDEO_VP9Profile2HDR, OMX_VIDEO_VP9Level5 },
 };
 
 SoftVPX::SoftVPX(
@@ -78,6 +80,10 @@
     return cpuCoreCount;
 }
 
+bool SoftVPX::supportDescribeHdrStaticInfo() {
+    return true;
+}
+
 status_t SoftVPX::initDecoder() {
     mCtx = new vpx_codec_ctx_t;
     vpx_codec_err_t vpx_err;
@@ -146,15 +152,21 @@
         uint32_t height = mImg->d_h;
         outInfo = *outQueue.begin();
         outHeader = outInfo->mHeader;
-        CHECK_EQ(mImg->fmt, VPX_IMG_FMT_I420);
-        handlePortSettingsChange(portWillReset, width, height);
+        CHECK(mImg->fmt == VPX_IMG_FMT_I420 || mImg->fmt == VPX_IMG_FMT_I42016);
+        OMX_COLOR_FORMATTYPE outputColorFormat = OMX_COLOR_FormatYUV420Planar;
+        int32_t bpp = 1;
+        if (mImg->fmt == VPX_IMG_FMT_I42016) {
+            outputColorFormat = OMX_COLOR_FormatYUV420Planar16;
+            bpp = 2;
+        }
+        handlePortSettingsChange(portWillReset, width, height, outputColorFormat);
         if (*portWillReset) {
             return true;
         }
 
         outHeader->nOffset = 0;
         outHeader->nFlags = 0;
-        outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * 3) / 2;
+        outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * bpp * 3) / 2;
         outHeader->nTimeStamp = *(OMX_TICKS *)mImg->user_priv;
         if (outputBufferSafe(outHeader)) {
             uint8_t *dst = outHeader->pBuffer;
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.h b/media/libstagefright/codecs/on2/dec/SoftVPX.h
index d6bb902..a01a4f3 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.h
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.h
@@ -40,6 +40,7 @@
     virtual void onQueueFilled(OMX_U32 portIndex);
     virtual void onPortFlushCompleted(OMX_U32 portIndex);
     virtual void onReset();
+    virtual bool supportDescribeHdrStaticInfo();
 
 private:
     enum {
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index cbb38fd..c7f9001 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -19,13 +19,28 @@
 #include <utils/Log.h>
 
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/ColorConverter.h>
 #include <media/stagefright/MediaErrors.h>
 
 #include "libyuv/convert_from.h"
 #include "libyuv/video_common.h"
 
+#include <sys/time.h>
+
 #define USE_LIBYUV
+#define PERF_PROFILING 0
+
+
+#if defined(__aarch64__) || defined(__ARM_NEON__)
+#define USE_NEON_Y410 1
+#else
+#define USE_NEON_Y410 0
+#endif
+
+#if USE_NEON_Y410
+#include <arm_neon.h>
+#endif
 
 namespace android {
 
@@ -48,6 +63,9 @@
                     || mDstFormat == OMX_COLOR_Format32BitRGBA8888
                     || mDstFormat == OMX_COLOR_Format32bitBGRA8888;
 
+        case OMX_COLOR_FormatYUV420Planar16:
+            return mDstFormat == OMX_COLOR_Format32BitRGBA1010102;
+
         case OMX_COLOR_FormatCbYCrY:
         case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
         case OMX_COLOR_FormatYUV420SemiPlanar:
@@ -81,10 +99,16 @@
 
     case OMX_COLOR_Format32bitBGRA8888:
     case OMX_COLOR_Format32BitRGBA8888:
+    case OMX_COLOR_Format32BitRGBA1010102:
         mBpp = 4;
         mStride = 4 * mWidth;
         break;
 
+    case OMX_COLOR_FormatYUV420Planar16:
+        mBpp = 2;
+        mStride = 2 * mWidth;
+        break;
+
     case OMX_COLOR_FormatYUV420Planar:
     case OMX_COLOR_FormatCbYCrY:
     case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
@@ -146,6 +170,19 @@
 #endif
             break;
 
+        case OMX_COLOR_FormatYUV420Planar16:
+        {
+#if PERF_PROFILING
+            int64_t startTimeUs = ALooper::GetNowUs();
+#endif
+            err = convertYUV420Planar16(src, dst);
+#if PERF_PROFILING
+            int64_t endTimeUs = ALooper::GetNowUs();
+            ALOGD("convertYUV420Planar16 took %lld us", (long long) (endTimeUs - startTimeUs));
+#endif
+            break;
+        }
+
         case OMX_COLOR_FormatCbYCrY:
             err = convertCbYCrY(src, dst);
             break;
@@ -406,6 +443,234 @@
     return OK;
 }
 
+/*
+ * Pack 10-bit YUV into RGBA_1010102.
+ *
+ * Media sends 10-bit YUV in a RGBA_1010102 format buffer. SF will handle
+ * the conversion to RGB using RenderEngine fallback.
+ *
+ * We do not perform a YUV->RGB conversion here, however the conversion with
+ * BT2020 to Full range is below for reference:
+ *
+ *   B = 1.168  *(Y - 64) + 2.148  *(U - 512)
+ *   G = 1.168  *(Y - 64) - 0.652  *(V - 512) - 0.188  *(U - 512)
+ *   R = 1.168  *(Y - 64) + 1.683  *(V - 512)
+ *
+ *   B = 1196/1024  *(Y - 64) + 2200/1024  *(U - 512)
+ *   G = .................... -  668/1024  *(V - 512) - 192/1024  *(U - 512)
+ *   R = .................... + 1723/1024  *(V - 512)
+ *
+ *   min_B = (1196  *(- 64) + 2200  *(- 512)) / 1024 = -1175
+ *   min_G = (1196  *(- 64) - 668  *(1023 - 512) - 192  *(1023 - 512)) / 1024 = -504
+ *   min_R = (1196  *(- 64) + 1723  *(- 512)) / 1024 = -937
+ *
+ *   max_B = (1196  *(1023 - 64) + 2200  *(1023 - 512)) / 1024 = 2218
+ *   max_G = (1196  *(1023 - 64) - 668  *(- 512) - 192  *(- 512)) / 1024 = 1551
+ *   max_R = (1196  *(1023 - 64) + 1723  *(1023 - 512)) / 1024 = 1980
+ *
+ *   clip range -1175 .. 2218
+ *
+ */
+
+#if !USE_NEON_Y410
+
+status_t ColorConverter::convertYUV420Planar16(
+        const BitmapParams &src, const BitmapParams &dst) {
+    uint8_t *dst_ptr = (uint8_t *)dst.mBits
+        + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
+
+    const uint8_t *src_y =
+        (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft * src.mBpp;
+
+    const uint8_t *src_u =
+        (const uint8_t *)src.mBits + src.mStride * src.mHeight
+        + (src.mCropTop / 2) * (src.mStride / 2) + (src.mCropLeft / 2) * src.mBpp;
+
+    const uint8_t *src_v =
+        src_u + (src.mStride / 2) * (src.mHeight / 2);
+
+    // Converting two lines at a time, slightly faster
+    for (size_t y = 0; y < src.cropHeight(); y += 2) {
+        uint32_t *dst_top = (uint32_t *) dst_ptr;
+        uint32_t *dst_bot = (uint32_t *) (dst_ptr + dst.mStride);
+        uint16_t *ptr_ytop = (uint16_t*) src_y;
+        uint16_t *ptr_ybot = (uint16_t*) (src_y + src.mStride);
+        uint16_t *ptr_u = (uint16_t*) src_u;
+        uint16_t *ptr_v = (uint16_t*) src_v;
+
+        uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
+        size_t x = 0;
+        for (; x < src.cropWidth() - 3; x += 4) {
+            u01 = *((uint32_t*)ptr_u); ptr_u += 2;
+            v01 = *((uint32_t*)ptr_v); ptr_v += 2;
+
+            y01 = *((uint32_t*)ptr_ytop); ptr_ytop += 2;
+            y23 = *((uint32_t*)ptr_ytop); ptr_ytop += 2;
+            y45 = *((uint32_t*)ptr_ybot); ptr_ybot += 2;
+            y67 = *((uint32_t*)ptr_ybot); ptr_ybot += 2;
+
+            uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
+            uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
+
+            *dst_top++ = ((y01 & 0x3FF) << 10) | uv0;
+            *dst_top++ = ((y01 >> 16) << 10) | uv0;
+            *dst_top++ = ((y23 & 0x3FF) << 10) | uv1;
+            *dst_top++ = ((y23 >> 16) << 10) | uv1;
+
+            *dst_bot++ = ((y45 & 0x3FF) << 10) | uv0;
+            *dst_bot++ = ((y45 >> 16) << 10) | uv0;
+            *dst_bot++ = ((y67 & 0x3FF) << 10) | uv1;
+            *dst_bot++ = ((y67 >> 16) << 10) | uv1;
+        }
+
+        // There should be at most 2 more pixels to process. Note that we don't
+        // need to consider odd case as the buffer is always aligned to even.
+        if (x < src.cropWidth()) {
+            u01 = *ptr_u;
+            v01 = *ptr_v;
+            y01 = *((uint32_t*)ptr_ytop);
+            y45 = *((uint32_t*)ptr_ybot);
+            uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
+            *dst_top++ = ((y01 & 0x3FF) << 10) | uv0;
+            *dst_top++ = ((y01 >> 16) << 10) | uv0;
+            *dst_bot++ = ((y45 & 0x3FF) << 10) | uv0;
+            *dst_bot++ = ((y45 >> 16) << 10) | uv0;
+        }
+
+        src_y += src.mStride * 2;
+        src_u += src.mStride / 2;
+        src_v += src.mStride / 2;
+        dst_ptr += dst.mStride * 2;
+    }
+
+    return OK;
+}
+
+#else
+
+status_t ColorConverter::convertYUV420Planar16(
+        const BitmapParams &src, const BitmapParams &dst) {
+    uint8_t *out = (uint8_t *)dst.mBits
+        + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
+
+    const uint8_t *src_y =
+        (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft * src.mBpp;
+
+    const uint8_t *src_u =
+        (const uint8_t *)src.mBits + src.mStride * src.mHeight
+        + (src.mCropTop / 2) * (src.mStride / 2) + (src.mCropLeft / 2) * src.mBpp;
+
+    const uint8_t *src_v =
+        src_u + (src.mStride / 2) * (src.mHeight / 2);
+
+    for (size_t y = 0; y < src.cropHeight(); y++) {
+        uint16_t *ptr_y = (uint16_t*) src_y;
+        uint16_t *ptr_u = (uint16_t*) src_u;
+        uint16_t *ptr_v = (uint16_t*) src_v;
+        uint32_t *ptr_out = (uint32_t *) out;
+
+        // Process 16-pixel at a time.
+        uint32_t *ptr_limit = ptr_out + (src.cropWidth() & ~15);
+        while (ptr_out < ptr_limit) {
+            uint16x4_t u0123 = vld1_u16(ptr_u); ptr_u += 4;
+            uint16x4_t u4567 = vld1_u16(ptr_u); ptr_u += 4;
+            uint16x4_t v0123 = vld1_u16(ptr_v); ptr_v += 4;
+            uint16x4_t v4567 = vld1_u16(ptr_v); ptr_v += 4;
+            uint16x4_t y0123 = vld1_u16(ptr_y); ptr_y += 4;
+            uint16x4_t y4567 = vld1_u16(ptr_y); ptr_y += 4;
+            uint16x4_t y89ab = vld1_u16(ptr_y); ptr_y += 4;
+            uint16x4_t ycdef = vld1_u16(ptr_y); ptr_y += 4;
+
+            uint32x2_t uvtempl;
+            uint32x4_t uvtempq;
+
+            uvtempq = vaddw_u16(vshll_n_u16(v0123, 20), u0123);
+
+            uvtempl = vget_low_u32(uvtempq);
+            uint32x4_t uv0011 = vreinterpretq_u32_u64(
+                    vaddw_u32(vshll_n_u32(uvtempl, 32), uvtempl));
+
+            uvtempl = vget_high_u32(uvtempq);
+            uint32x4_t uv2233 = vreinterpretq_u32_u64(
+                    vaddw_u32(vshll_n_u32(uvtempl, 32), uvtempl));
+
+            uvtempq = vaddw_u16(vshll_n_u16(v4567, 20), u4567);
+
+            uvtempl = vget_low_u32(uvtempq);
+            uint32x4_t uv4455 = vreinterpretq_u32_u64(
+                    vaddw_u32(vshll_n_u32(uvtempl, 32), uvtempl));
+
+            uvtempl = vget_high_u32(uvtempq);
+            uint32x4_t uv6677 = vreinterpretq_u32_u64(
+                    vaddw_u32(vshll_n_u32(uvtempl, 32), uvtempl));
+
+            uint32x4_t dsttemp;
+
+            dsttemp = vorrq_u32(uv0011, vshll_n_u16(y0123, 10));
+            vst1q_u32(ptr_out, dsttemp); ptr_out += 4;
+
+            dsttemp = vorrq_u32(uv2233, vshll_n_u16(y4567, 10));
+            vst1q_u32(ptr_out, dsttemp); ptr_out += 4;
+
+            dsttemp = vorrq_u32(uv4455, vshll_n_u16(y89ab, 10));
+            vst1q_u32(ptr_out, dsttemp); ptr_out += 4;
+
+            dsttemp = vorrq_u32(uv6677, vshll_n_u16(ycdef, 10));
+            vst1q_u32(ptr_out, dsttemp); ptr_out += 4;
+        }
+
+        src_y += src.mStride;
+        if (y & 1) {
+            src_u += src.mStride / 2;
+            src_v += src.mStride / 2;
+        }
+        out += dst.mStride;
+    }
+
+    // Process the left-overs out-of-loop, 2-pixel at a time. Note that we don't
+    // need to consider odd case as the buffer is always aligned to even.
+    if (src.cropWidth() & 15) {
+        size_t xstart = (src.cropWidth() & ~15);
+
+        uint8_t *out = (uint8_t *)dst.mBits + dst.mCropTop * dst.mStride
+                + (dst.mCropLeft + xstart) * dst.mBpp;
+
+        const uint8_t *src_y = (const uint8_t *)src.mBits + src.mCropTop * src.mStride
+                + (src.mCropLeft + xstart) * src.mBpp;
+
+        const uint8_t *src_u = (const uint8_t *)src.mBits + src.mStride * src.mHeight
+            + (src.mCropTop / 2) * (src.mStride / 2)
+            + ((src.mCropLeft + xstart) / 2) * src.mBpp;
+
+        const uint8_t *src_v = src_u + (src.mStride / 2) * (src.mHeight / 2);
+
+        for (size_t y = 0; y < src.cropHeight(); y++) {
+            uint16_t *ptr_y = (uint16_t*) src_y;
+            uint16_t *ptr_u = (uint16_t*) src_u;
+            uint16_t *ptr_v = (uint16_t*) src_v;
+            uint32_t *ptr_out = (uint32_t *) out;
+            for (size_t x = xstart; x < src.cropWidth(); x += 2) {
+                uint16_t u = *ptr_u++;
+                uint16_t v = *ptr_v++;
+                uint32_t y01 = *((uint32_t*)ptr_y); ptr_y += 2;
+                uint32_t uv = u | (((uint32_t)v) << 20);
+                *ptr_out++ = ((y01 & 0x3FF) << 10) | uv;
+                *ptr_out++ = ((y01 >> 16) << 10) | uv;
+            }
+            src_y += src.mStride;
+            if (y & 1) {
+                src_u += src.mStride / 2;
+                src_v += src.mStride / 2;
+            }
+            out += dst.mStride;
+        }
+    }
+
+    return OK;
+}
+
+#endif // USE_NEON_Y410
+
 status_t ColorConverter::convertQCOMYUV420SemiPlanar(
         const BitmapParams &src, const BitmapParams &dst) {
     uint8_t *kAdjustedClip = initClip();
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index a07787a..fca9c09 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -18,10 +18,11 @@
 #include <utils/Log.h>
 
 #include "../include/SoftwareRenderer.h"
-
 #include <cutils/properties.h> // for property_get
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/SurfaceUtils.h>
 #include <system/window.h>
 #include <ui/Fence.h>
 #include <ui/GraphicBufferMapper.h>
@@ -30,7 +31,6 @@
 
 namespace android {
 
-
 static int ALIGN(int x, int y) {
     // y must be a power of 2.
     return (x + y - 1) & ~(y - 1);
@@ -50,7 +50,9 @@
       mCropBottom(0),
       mCropWidth(0),
       mCropHeight(0),
-      mRotationDegrees(rotation) {
+      mRotationDegrees(rotation),
+      mDataSpace(HAL_DATASPACE_UNKNOWN) {
+    memset(&mHDRStaticInfo, 0, sizeof(mHDRStaticInfo));
 }
 
 SoftwareRenderer::~SoftwareRenderer() {
@@ -130,6 +132,13 @@
                 bufHeight = (mCropHeight + 1) & ~1;
                 break;
             }
+            case OMX_COLOR_FormatYUV420Planar16:
+            {
+                halFormat = HAL_PIXEL_FORMAT_RGBA_1010102;
+                bufWidth = (mCropWidth + 1) & ~1;
+                bufHeight = (mCropHeight + 1) & ~1;
+                break;
+            }
             default:
             {
                 break;
@@ -141,6 +150,10 @@
         mConverter = new ColorConverter(
                 mColorFormat, OMX_COLOR_Format16bitRGB565);
         CHECK(mConverter->isValid());
+    } else if (mColorFormat == OMX_COLOR_FormatYUV420Planar16) {
+        mConverter = new ColorConverter(
+                mColorFormat, OMX_COLOR_Format32BitRGBA1010102);
+        CHECK(mConverter->isValid());
     }
 
     CHECK(mNativeWindow != NULL);
@@ -365,12 +378,29 @@
     // color conversion to RGB. For now, just mark dataspace for YUV rendering.
     android_dataspace dataSpace;
     if (format->findInt32("android._dataspace", (int32_t *)&dataSpace) && dataSpace != mDataSpace) {
+        mDataSpace = dataSpace;
+
+        if (mConverter != NULL) {
+            // graphics only supports full range RGB. ColorConverter should have
+            // converted any YUV to full range.
+            dataSpace = (android_dataspace)
+                    ((dataSpace & ~HAL_DATASPACE_RANGE_MASK) | HAL_DATASPACE_RANGE_FULL);
+        }
+
         ALOGD("setting dataspace on output surface to #%x", dataSpace);
         if ((err = native_window_set_buffers_data_space(mNativeWindow.get(), dataSpace))) {
             ALOGW("failed to set dataspace on surface (%d)", err);
         }
-        mDataSpace = dataSpace;
     }
+    if (format->contains("hdr-static-info")) {
+        HDRStaticInfo info;
+        if (ColorUtils::getHDRStaticInfoFromFormat(format, &info)
+            && memcmp(&mHDRStaticInfo, &info, sizeof(info))) {
+            setNativeWindowHdrMetadata(mNativeWindow.get(), &info);
+            mHDRStaticInfo = info;
+        }
+    }
+
     if ((err = mNativeWindow->queueBuffer(mNativeWindow.get(), buf, -1)) != 0) {
         ALOGW("Surface::queueBuffer returned error %d", err);
     } else {
diff --git a/media/libstagefright/include/SoftwareRenderer.h b/media/libstagefright/include/SoftwareRenderer.h
index 258511a..e04b59f 100644
--- a/media/libstagefright/include/SoftwareRenderer.h
+++ b/media/libstagefright/include/SoftwareRenderer.h
@@ -22,6 +22,7 @@
 #include <media/stagefright/FrameRenderTracker.h>
 #include <utils/RefBase.h>
 #include <system/window.h>
+#include <media/hardware/VideoAPI.h>
 
 #include <list>
 
@@ -55,6 +56,7 @@
     int32_t mCropWidth, mCropHeight;
     int32_t mRotationDegrees;
     android_dataspace mDataSpace;
+    HDRStaticInfo mHDRStaticInfo;
     FrameRenderTracker mRenderTracker;
 
     SoftwareRenderer(const SoftwareRenderer &);
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index d1a9d25..3196b10 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -235,6 +235,7 @@
     int mNativeWindowUsageBits;
     android_native_rect_t mLastNativeWindowCrop;
     int32_t mLastNativeWindowDataSpace;
+    HDRStaticInfo mLastHDRStaticInfo;
     sp<AMessage> mConfigFormat;
     sp<AMessage> mInputFormat;
     sp<AMessage> mOutputFormat;
diff --git a/media/libstagefright/include/media/stagefright/ColorConverter.h b/media/libstagefright/include/media/stagefright/ColorConverter.h
index 7ac9b37..f6bd353 100644
--- a/media/libstagefright/include/media/stagefright/ColorConverter.h
+++ b/media/libstagefright/include/media/stagefright/ColorConverter.h
@@ -73,6 +73,9 @@
     status_t convertYUV420Planar(
             const BitmapParams &src, const BitmapParams &dst);
 
+    status_t convertYUV420Planar16(
+            const BitmapParams &src, const BitmapParams &dst);
+
     status_t convertYUV420PlanarUseLibYUV(
             const BitmapParams &src, const BitmapParams &dst);
 
diff --git a/media/libstagefright/include/media/stagefright/SurfaceUtils.h b/media/libstagefright/include/media/stagefright/SurfaceUtils.h
index a7747c7..689e458 100644
--- a/media/libstagefright/include/media/stagefright/SurfaceUtils.h
+++ b/media/libstagefright/include/media/stagefright/SurfaceUtils.h
@@ -24,6 +24,8 @@
 
 namespace android {
 
+struct HDRStaticInfo;
+
 /**
  * Configures |nativeWindow| for given |width|x|height|, pixel |format|, |rotation| and |usage|.
  * If |reconnect| is true, reconnects to the native window before hand.
@@ -32,6 +34,8 @@
 status_t setNativeWindowSizeFormatAndUsage(
         ANativeWindow *nativeWindow /* nonnull */,
         int width, int height, int format, int rotation, int usage, bool reconnect);
+void setNativeWindowHdrMetadata(
+        ANativeWindow *nativeWindow /* nonnull */, HDRStaticInfo *info /* nonnull */);
 status_t pushBlankBuffersToNativeWindow(ANativeWindow *nativeWindow /* nonnull */);
 status_t nativeWindowConnect(ANativeWindow *surface, const char *reason);
 status_t nativeWindowDisconnect(ANativeWindow *surface, const char *reason);
diff --git a/media/libstagefright/omx/OMXUtils.cpp b/media/libstagefright/omx/OMXUtils.cpp
index e032985..f597e02 100644
--- a/media/libstagefright/omx/OMXUtils.cpp
+++ b/media/libstagefright/omx/OMXUtils.cpp
@@ -215,6 +215,9 @@
         fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar &&
         fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) {
         ALOGW("do not know color format 0x%x = %d", fmt, fmt);
+        if (fmt == OMX_COLOR_FormatYUV420Planar16) {
+            ALOGW("Cannot describe color format OMX_COLOR_FormatYUV420Planar16");
+        }
         return false;
     }
 
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index 8e92539..8ef7620 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -61,6 +61,7 @@
         mCropTop(0),
         mCropWidth(width),
         mCropHeight(height),
+        mOutputFormat(OMX_COLOR_FormatYUV420Planar),
         mOutputPortSettingsChange(NONE),
         mUpdateColorAspects(false),
         mMinInputBufferSize(384), // arbitrary, using one uncompressed macroblock
@@ -74,6 +75,7 @@
     memset(&mDefaultColorAspects, 0, sizeof(ColorAspects));
     memset(&mBitstreamColorAspects, 0, sizeof(ColorAspects));
     memset(&mFinalColorAspects, 0, sizeof(ColorAspects));
+    memset(&mHdrStaticInfo, 0, sizeof(HDRStaticInfo));
 }
 
 void SoftVideoDecoderOMXComponent::initPorts(
@@ -140,7 +142,6 @@
     def.format.video.xFramerate = 0;
     def.format.video.bFlagErrorConcealment = OMX_FALSE;
     def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
-    def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
     def.format.video.pNativeWindow = NULL;
 
     addPort(def);
@@ -152,11 +153,13 @@
     OMX_PARAM_PORTDEFINITIONTYPE *outDef = &editPortInfo(kOutputPortIndex)->mDef;
     outDef->format.video.nFrameWidth = outputBufferWidth();
     outDef->format.video.nFrameHeight = outputBufferHeight();
+    outDef->format.video.eColorFormat = mOutputFormat;
     outDef->format.video.nStride = outDef->format.video.nFrameWidth;
     outDef->format.video.nSliceHeight = outDef->format.video.nFrameHeight;
 
+    int32_t bpp = (mOutputFormat == OMX_COLOR_FormatYUV420Planar16) ? 2 : 1;
     outDef->nBufferSize =
-        (outDef->format.video.nStride * outDef->format.video.nSliceHeight * 3) / 2;
+        (outDef->format.video.nStride * outDef->format.video.nSliceHeight * bpp * 3) / 2;
 
     OMX_PARAM_PORTDEFINITIONTYPE *inDef = &editPortInfo(kInputPortIndex)->mDef;
     inDef->format.video.nFrameWidth = mWidth;
@@ -191,9 +194,11 @@
 
 void SoftVideoDecoderOMXComponent::handlePortSettingsChange(
         bool *portWillReset, uint32_t width, uint32_t height,
+        OMX_COLOR_FORMATTYPE outputFormat,
         CropSettingsMode cropSettingsMode, bool fakeStride) {
     *portWillReset = false;
     bool sizeChanged = (width != mWidth || height != mHeight);
+    bool formatChanged = (outputFormat != mOutputFormat);
     bool updateCrop = (cropSettingsMode == kCropUnSet);
     bool cropChanged = (cropSettingsMode == kCropChanged);
     bool strideChanged = false;
@@ -205,13 +210,18 @@
         }
     }
 
-    if (sizeChanged || cropChanged || strideChanged) {
+    if (formatChanged || sizeChanged || cropChanged || strideChanged) {
+        if (formatChanged) {
+            ALOGD("formatChanged: 0x%08x -> 0x%08x", mOutputFormat, outputFormat);
+        }
+        mOutputFormat = outputFormat;
         mWidth = width;
         mHeight = height;
 
         if ((sizeChanged && !mIsAdaptive)
             || width > mAdaptiveMaxWidth
-            || height > mAdaptiveMaxHeight) {
+            || height > mAdaptiveMaxHeight
+            || formatChanged) {
             if (mIsAdaptive) {
                 if (width > mAdaptiveMaxWidth) {
                     mAdaptiveMaxWidth = width;
@@ -305,27 +315,30 @@
 void SoftVideoDecoderOMXComponent::copyYV12FrameToOutputBuffer(
         uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
         size_t srcYStride, size_t srcUStride, size_t srcVStride) {
-    size_t dstYStride = outputBufferWidth();
+    OMX_PARAM_PORTDEFINITIONTYPE *outDef = &editPortInfo(kOutputPortIndex)->mDef;
+    int32_t bpp = (outDef->format.video.eColorFormat == OMX_COLOR_FormatYUV420Planar16) ? 2 : 1;
+
+    size_t dstYStride = outputBufferWidth() * bpp;
     size_t dstUVStride = dstYStride / 2;
     size_t dstHeight = outputBufferHeight();
     uint8_t *dstStart = dst;
 
     for (size_t i = 0; i < mHeight; ++i) {
-         memcpy(dst, srcY, mWidth);
+         memcpy(dst, srcY, mWidth * bpp);
          srcY += srcYStride;
          dst += dstYStride;
     }
 
     dst = dstStart + dstYStride * dstHeight;
     for (size_t i = 0; i < mHeight / 2; ++i) {
-         memcpy(dst, srcU, mWidth / 2);
+         memcpy(dst, srcU, mWidth / 2 * bpp);
          srcU += srcUStride;
          dst += dstUVStride;
     }
 
     dst = dstStart + (5 * dstYStride * dstHeight) / 4;
     for (size_t i = 0; i < mHeight / 2; ++i) {
-         memcpy(dst, srcV, mWidth / 2);
+         memcpy(dst, srcV, mWidth / 2 * bpp);
          srcV += srcVStride;
          dst += dstUVStride;
     }
@@ -562,6 +575,24 @@
             return OMX_ErrorNone;
         }
 
+        case kDescribeHdrStaticInfoIndex:
+        {
+            if (!supportDescribeHdrStaticInfo()) {
+                return OMX_ErrorUnsupportedIndex;
+            }
+
+            DescribeHDRStaticInfoParams* hdrStaticInfoParams =
+                    (DescribeHDRStaticInfoParams *)params;
+
+            if (hdrStaticInfoParams->nPortIndex != kOutputPortIndex) {
+                return OMX_ErrorBadPortIndex;
+            }
+
+            hdrStaticInfoParams->sInfo = mHdrStaticInfo;
+
+            return OMX_ErrorNone;
+        }
+
         default:
             return OMX_ErrorUnsupportedIndex;
     }
@@ -595,6 +626,28 @@
             return OMX_ErrorNone;
         }
 
+        case kDescribeHdrStaticInfoIndex:
+        {
+            if (!supportDescribeHdrStaticInfo()) {
+                return OMX_ErrorUnsupportedIndex;
+            }
+
+            const DescribeHDRStaticInfoParams* hdrStaticInfoParams =
+                    (DescribeHDRStaticInfoParams *)params;
+
+            if (hdrStaticInfoParams->nPortIndex != kOutputPortIndex) {
+                return OMX_ErrorBadPortIndex;
+            }
+
+            if (hdrStaticInfoParams != NULL) {
+                mOutputFormat = OMX_COLOR_FormatYUV420Planar16;
+                mHdrStaticInfo = hdrStaticInfoParams->sInfo;
+                updatePortDefinitions(false);
+            }
+
+            return OMX_ErrorNone;
+        }
+
         default:
             return OMX_ErrorUnsupportedIndex;
     }
@@ -610,6 +663,10 @@
                 && supportsDescribeColorAspects()) {
         *(int32_t*)index = kDescribeColorAspectsIndex;
         return OMX_ErrorNone;
+    } else if (!strcmp(name, "OMX.google.android.index.describeHDRStaticInfo")
+            && supportDescribeHdrStaticInfo()) {
+        *(int32_t*)index = kDescribeHdrStaticInfoIndex;
+        return OMX_ErrorNone;
     }
 
     return SimpleSoftOMXComponent::getExtensionIndex(name, index);
@@ -623,6 +680,10 @@
     return kNotSupported;
 }
 
+bool SoftVideoDecoderOMXComponent::supportDescribeHdrStaticInfo() {
+    return false;
+}
+
 void SoftVideoDecoderOMXComponent::onReset() {
     mOutputPortSettingsChange = NONE;
 }
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/SoftVideoDecoderOMXComponent.h b/media/libstagefright/omx/include/media/stagefright/omx/SoftVideoDecoderOMXComponent.h
index c9fd745..56fc691 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/SoftVideoDecoderOMXComponent.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/SoftVideoDecoderOMXComponent.h
@@ -23,6 +23,7 @@
 #include <media/stagefright/foundation/AHandlerReflector.h>
 #include <media/stagefright/foundation/ColorUtils.h>
 #include <media/IOMX.h>
+#include <media/hardware/HardwareAPI.h>
 
 #include <utils/RefBase.h>
 #include <utils/threads.h>
@@ -46,6 +47,7 @@
 protected:
     enum {
         kDescribeColorAspectsIndex = kPrepareForAdaptivePlaybackIndex + 1,
+        kDescribeHdrStaticInfoIndex = kPrepareForAdaptivePlaybackIndex + 2,
     };
 
     enum {
@@ -76,6 +78,8 @@
 
     virtual int getColorAspectPreference();
 
+    virtual bool supportDescribeHdrStaticInfo();
+
     // This function sets both minimum buffer count and actual buffer count of
     // input port to be |numInputBuffers|. It will also set both minimum buffer
     // count and actual buffer count of output port to be |numOutputBuffers|.
@@ -113,7 +117,9 @@
     // It will trigger OMX_EventPortSettingsChanged event if necessary.
     void handlePortSettingsChange(
             bool *portWillReset, uint32_t width, uint32_t height,
-            CropSettingsMode cropSettingsMode = kCropUnSet, bool fakeStride = false);
+            OMX_COLOR_FORMATTYPE outputFormat = OMX_COLOR_FormatYUV420Planar,
+            CropSettingsMode cropSettingsMode = kCropUnSet,
+            bool fakeStride = false);
 
     void copyYV12FrameToOutputBuffer(
             uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
@@ -129,7 +135,8 @@
     uint32_t mAdaptiveMaxWidth, mAdaptiveMaxHeight;
     uint32_t mWidth, mHeight;
     uint32_t mCropLeft, mCropTop, mCropWidth, mCropHeight;
-
+    OMX_COLOR_FORMATTYPE mOutputFormat;
+    HDRStaticInfo mHdrStaticInfo;
     enum {
         NONE,
         AWAITING_DISABLED,